Abid Ali Awan commited on
Commit
a889234
·
1 Parent(s): 43d984b

feat: Add heart disease dataset and remove unused Gradio test file.

Browse files
Files changed (5) hide show
  1. .gitattributes +1 -0
  2. agent.py +17 -11
  3. app.py +1 -1
  4. heart.csv +3 -0
  5. test_warning.py +0 -8
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.csv filter=lfs diff=lfs merge=lfs -text
agent.py CHANGED
@@ -3,7 +3,7 @@ import asyncio
3
  from typing import Any, Optional
4
  from mcp.client.streamable_http import streamablehttp_client
5
  from mcp.client.session import ClientSession
6
- from agents import Agent, Runner
7
  import openai
8
 
9
  # Define the MCP Server URL
@@ -31,20 +31,26 @@ class MCPAgent:
31
  # 2. Fetch Tools from MCP
32
  mcp_tools = await self.session.list_tools()
33
 
34
- # 3. Convert MCP Tools to Agent Tools
35
- # The Agents SDK expects python functions or specific tool objects.
36
- # We'll create dynamic functions that call the MCP tools.
37
  agent_tools = []
38
  for tool in mcp_tools.tools:
39
- # Create a wrapper function for each tool
40
- async def make_tool_func(t_name=tool.name):
 
41
  async def wrapper(**kwargs):
42
- return await self.session.call_tool(t_name, arguments=kwargs)
43
- wrapper.__name__ = t_name
44
- wrapper.__doc__ = tool.description
 
 
 
 
 
 
45
  return wrapper
46
 
47
- agent_tools.append(await make_tool_func())
 
48
 
49
  # 4. Configure the Agent
50
  # Configure the client based on provider
@@ -70,7 +76,7 @@ class MCPAgent:
70
  await self.initialize()
71
 
72
  result = await Runner.run(self.agent, input=user_message)
73
- return result.output
74
 
75
  async def close(self):
76
  """Closes the MCP connection."""
 
3
  from typing import Any, Optional
4
  from mcp.client.streamable_http import streamablehttp_client
5
  from mcp.client.session import ClientSession
6
+ from agents import Agent, Runner, function_tool
7
  import openai
8
 
9
  # Define the MCP Server URL
 
31
  # 2. Fetch Tools from MCP
32
  mcp_tools = await self.session.list_tools()
33
 
34
+ # 3. Convert MCP Tools to Agent Tools using function_tool decorator
 
 
35
  agent_tools = []
36
  for tool in mcp_tools.tools:
37
+ # Create a wrapper function for each tool with proper closure
38
+ def make_tool_func(t_name, t_description):
39
+ @function_tool(strict_mode=False)
40
  async def wrapper(**kwargs):
41
+ """MCP tool wrapper"""
42
+ result = await self.session.call_tool(t_name, arguments=kwargs)
43
+ return result
44
+
45
+ # Set function attributes
46
+ clean_name = t_name.replace("Auto_Deployer_", "")
47
+ wrapper.__name__ = clean_name
48
+ wrapper.__doc__ = t_description or "MCP tool"
49
+
50
  return wrapper
51
 
52
+ tool_func = make_tool_func(tool.name, tool.description)
53
+ agent_tools.append(tool_func)
54
 
55
  # 4. Configure the Agent
56
  # Configure the client based on provider
 
76
  await self.initialize()
77
 
78
  result = await Runner.run(self.agent, input=user_message)
79
+ return result.final_output
80
 
81
  async def close(self):
82
  """Closes the MCP connection."""
app.py CHANGED
@@ -70,7 +70,7 @@ with gr.Blocks(title="MCP MLOps Agent") as demo:
70
  with gr.Row():
71
  file_input = gr.File(label="Upload Dataset (CSV)")
72
 
73
- chatbot = gr.Chatbot(height=600, type="messages", allow_tags=False)
74
  msg = gr.Textbox(label="Message", placeholder="Type your message here...")
75
  clear = gr.ClearButton([msg, chatbot, file_input])
76
 
 
70
  with gr.Row():
71
  file_input = gr.File(label="Upload Dataset (CSV)")
72
 
73
+ chatbot = gr.Chatbot(height=600, type="messages")
74
  msg = gr.Textbox(label="Message", placeholder="Type your message here...")
75
  clear = gr.ClearButton([msg, chatbot, file_input])
76
 
heart.csv ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:948420b084d8a3a0ca42b8419fce9aee175879e43f8aedf712377899a67aa49b
3
+ size 35921
test_warning.py DELETED
@@ -1,8 +0,0 @@
1
- import gradio as gr
2
-
3
- with gr.Blocks() as demo:
4
- chatbot = gr.Chatbot(type="messages", allow_tags=False)
5
-
6
- if __name__ == "__main__":
7
- print("Running demo...")
8
- # demo.launch()