Abid Ali Awan commited on
Commit
475a1fb
·
1 Parent(s): cc4e23a

added fastmcp

Browse files
Files changed (2) hide show
  1. orchestrator.py +277 -100
  2. requirements.txt +1 -2
orchestrator.py CHANGED
@@ -1,44 +1,52 @@
1
  import os
2
  import json
3
- from contextlib import AsyncExitStack
4
  from typing import Any, List, Tuple, AsyncGenerator, Dict
5
 
6
  from openai import AsyncOpenAI
7
 
8
- # Try to import MCP client with different possible paths
9
  MCP_AVAILABLE = False
 
10
  ClientSession = None
11
- streamablehttp_client = None
12
 
13
- # Try multiple import paths for MCP
14
- try:
15
- import mcp
16
- print(f"MCP package found at: {mcp.__file__}")
17
- # Try to see what's available
18
- print(f"MCP module contents: {dir(mcp)}")
19
 
20
- # Try direct import
21
- from mcp import ClientSession, streamablehttp_client
 
 
22
  MCP_AVAILABLE = True
23
- print("MCP imported successfully via direct path")
24
- except ImportError as e:
25
- print(f"Direct MCP import failed: {e}")
 
26
  try:
27
- from mcp.client.session import ClientSession
28
- from mcp.client.streamablehttp import streamablehttp_client
29
  MCP_AVAILABLE = True
30
- print("MCP imported successfully via client.session path")
 
31
  except ImportError as e2:
32
- print(f"Client.session path failed: {e2}")
33
  try:
 
34
  from mcp.client.session import ClientSession
35
- from mcp.client.transport.streamablehttp import streamablehttp_client
36
  MCP_AVAILABLE = True
37
- print("MCP imported successfully via transport path")
38
  except ImportError as e3:
39
- print(f"All MCP imports failed. Final error: {e3}")
40
- # MCP not available
41
- pass
 
 
 
 
 
 
 
 
 
42
 
43
  MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp/"
44
 
@@ -54,107 +62,276 @@ def _is_data_or_model_question(message: str) -> bool:
54
  return any(keyword.lower() in message.lower() for keyword in keywords)
55
 
56
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
57
  async def _mcp_chat_stream(full_prompt: str) -> AsyncGenerator[Dict[str, Any], None]:
58
  """
59
  Simple autonomous AI that uses MCP tools for data/model questions
60
  """
61
  if not MCP_AVAILABLE:
62
- yield {"type": "error", "content": "❌ MCP tools not available. Please install MCP client."}
63
  return
64
 
65
- # Extract user message
66
- user_message = full_prompt.split("👤 **New request**:")[-1].strip() if "👤 **New request**:" in full_prompt else full_prompt
67
- file_url = full_prompt.split("HTTP URL:\n")[-1].split("\n\n")[0] if "HTTP URL:" in full_prompt else ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
  # Initialize OpenAI client
70
  client = AsyncOpenAI(api_key=os.environ.get("OPENAI_API_KEY", ""))
71
 
72
- # Connect to MCP
73
- async with AsyncExitStack() as stack:
74
- sse_transport = await stack.enter_async_context(streamablehttp_client(MCP_SERVER_URL))
75
- session = await stack.enter_async_context(ClientSession(sse_transport[0], sse_transport[1]))
76
- await session.initialize()
77
-
78
- # Get tools from MCP
79
- mcp_tools = await session.list_tools()
80
- openai_tools = []
81
-
82
- for tool in mcp_tools.tools:
83
- openai_tool = {
84
- "type": "function",
85
- "function": {
86
- "name": tool.name.replace("Auto_Deployer_", ""),
87
- "description": tool.description or "MCP tool",
88
- "parameters": tool.inputSchema or {}
89
- }
90
- }
91
- openai_tools.append(openai_tool)
92
-
93
- # System message for autonomous AI
94
- system_message = (
95
- "You are an intelligent MLOps assistant. "
96
- "Automatically use MCP tools when users ask about data analysis, model training, or deployment. "
97
- "Always analyze the CSV file first if available. "
98
- "Extract relevant information and provide clear, human-readable answers. "
99
- "Be proactive and helpful."
100
- )
101
-
102
- messages = [
103
- {"role": "system", "content": system_message},
104
- {"role": "user", "content": f"File URL: {file_url}\n\nQuestion: {user_message}"}
105
- ]
106
 
107
- # Get AI response with tool calls
108
- response = await client.chat.completions.create(
109
- model="gpt-5-mini",
110
- messages=messages,
111
- tools=openai_tools
112
- )
113
 
114
- message = response.choices[0].message
115
- tool_calls = message.tool_calls or []
 
 
 
116
 
117
- # Execute tool calls if any
118
- if tool_calls:
119
- yield {"type": "thinking", "content": "🤖 **Analyzing your request and using appropriate tools...**"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
120
 
121
- for tool_call in tool_calls:
122
- args = json.loads(tool_call.function.arguments)
 
 
123
 
124
- # Add file_url if not present and needed
125
- if file_url and 'file_path' not in args and 'file_url' not in args:
126
- args['file_path'] = file_url
127
- elif file_url and 'file_url' in [p.lower() for p in args.keys()]:
128
- args['file_url'] = file_url
 
129
 
130
- yield {"type": "tool", "content": f"🔧 **Using**: {tool_call.function.name}"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
 
132
- # Execute MCP tool
133
- result = await session.call_tool(f"Auto_Deployer_{tool_call.function.name}", args)
 
 
 
 
 
 
 
 
 
 
 
134
 
135
- # Extract and display results
136
- if hasattr(result, 'content'):
137
- content = result.content
138
- if isinstance(content, list):
139
- for item in content:
140
- if hasattr(item, 'text'):
141
- yield {"type": "result", "content": f"📊 **Result**: {item.text}"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
142
  else:
143
- yield {"type": "result", "content": f"📊 **Result**: {content}"}
 
 
 
 
144
 
145
- # Get final analysis
146
- messages.append({"role": "assistant", "content": message.content or ""})
147
 
148
- final_response = await client.chat.completions.create(
149
- model="gpt-5-mini",
150
- messages=messages
151
- )
 
 
 
152
 
153
- yield {"type": "final", "content": f"🎯 **Answer**: {final_response.choices[0].message.content}"}
154
 
155
- else:
156
- # No tools needed, just return AI response
157
- yield {"type": "final", "content": f"🎯 **Answer**: {message.content}"}
158
 
159
 
160
  async def run_orchestrated_chat_stream(
 
1
  import os
2
  import json
 
3
  from typing import Any, List, Tuple, AsyncGenerator, Dict
4
 
5
  from openai import AsyncOpenAI
6
 
7
+ # Try to import FastMCP client with streamable HTTP support
8
  MCP_AVAILABLE = False
9
+ FastMCPClient = None
10
  ClientSession = None
 
11
 
12
+ print("Attempting to import FastMCP client...")
 
 
 
 
 
13
 
14
+ try:
15
+ # Try FastMCP client (latest version)
16
+ from fastmcp import Client
17
+ FastMCPClient = Client
18
  MCP_AVAILABLE = True
19
+ print(" FastMCP Client imported successfully!")
20
+
21
+ except ImportError as e1:
22
+ print(f"❌ Failed to import FastMCP Client: {e1}")
23
  try:
24
+ # Alternative FastMCP import pattern
25
+ from fastmcp import FastMCPClient
26
  MCP_AVAILABLE = True
27
+ print(" FastMCPClient imported successfully!")
28
+
29
  except ImportError as e2:
30
+ print(f" Failed to import FastMCPClient: {e2}")
31
  try:
32
+ # Fallback to original MCP package
33
  from mcp.client.session import ClientSession
34
+ from mcp.client.streamablehttp import streamablehttp_client
35
  MCP_AVAILABLE = True
36
+ print("MCP client imported successfully via mcp.client.streamablehttp")
37
  except ImportError as e3:
38
+ print(f" Failed to import via mcp.client.streamablehttp: {e3}")
39
+ try:
40
+ # Try without streamablehttp_client - just use ClientSession
41
+ from mcp.client.session import ClientSession
42
+ MCP_AVAILABLE = True
43
+ print("✅ MCP ClientSession imported (streamablehttp_client not available)")
44
+ except ImportError as e4:
45
+ print(f"❌ All MCP import attempts failed: {e4}")
46
+ MCP_AVAILABLE = False
47
+
48
+ if not MCP_AVAILABLE:
49
+ print("⚠️ MCP client not available - falling back to HTTP requests")
50
 
51
  MCP_SERVER_URL = "https://mcp-1st-birthday-auto-deployer.hf.space/gradio_api/mcp/"
52
 
 
62
  return any(keyword.lower() in message.lower() for keyword in keywords)
63
 
64
 
65
+ async def _make_mcp_request(tool_name: str, arguments: dict) -> dict:
66
+ """
67
+ Make HTTP request to MCP server since streamablehttp_client is not available
68
+ """
69
+ import httpx
70
+
71
+ async with httpx.AsyncClient() as client:
72
+ response = await client.post(
73
+ MCP_SERVER_URL,
74
+ json={
75
+ "jsonrpc": "2.0",
76
+ "id": "1",
77
+ "method": "tools/call",
78
+ "params": {
79
+ "name": f"Auto_Deployer_{tool_name}",
80
+ "arguments": arguments
81
+ }
82
+ }
83
+ )
84
+ return response.json()
85
+
86
+
87
  async def _mcp_chat_stream(full_prompt: str) -> AsyncGenerator[Dict[str, Any], None]:
88
  """
89
  Simple autonomous AI that uses MCP tools for data/model questions
90
  """
91
  if not MCP_AVAILABLE:
92
+ yield {"type": "error", "content": "❌ MCP tools not available. Please install proper MCP client package."}
93
  return
94
 
95
+ # Extract user message and file URL
96
+ if "👤 **New request**:" in full_prompt:
97
+ user_message = full_prompt.split("👤 **New request**:")[-1].strip()
98
+ else:
99
+ user_message = full_prompt.strip()
100
+
101
+ if "📎 File available at: " in full_prompt:
102
+ file_url = full_prompt.split("📎 File available at: ")[-1].split("\n\n")[0].strip()
103
+ elif "📎 **File uploaded!**" in full_prompt:
104
+ # Try to extract from the actual format used
105
+ lines = full_prompt.split('\n')
106
+ for line in lines:
107
+ if 'HTTP URL:' in line:
108
+ file_url = line.split('HTTP URL:')[-1].strip()
109
+ break
110
+ else:
111
+ file_url = ""
112
+ else:
113
+ file_url = ""
114
 
115
  # Initialize OpenAI client
116
  client = AsyncOpenAI(api_key=os.environ.get("OPENAI_API_KEY", ""))
117
 
118
+ # Try to use FastMCP client if available
119
+ if FastMCPClient and MCP_AVAILABLE:
120
+ yield {"type": "thinking", "content": "🤖 **Using FastMCP client...**"}
121
+ return await _use_proper_mcp_client(client, user_message, file_url)
122
+ else:
123
+ yield {"type": "thinking", "content": "🤖 **Using HTTP fallback for MCP...**"}
124
+ return await _use_http_fallback(client, user_message, file_url)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
125
 
 
 
 
 
 
 
126
 
127
+ async def _use_proper_mcp_client(client, user_message: str, file_url: str):
128
+ """Use FastMCP client"""
129
+ if not FastMCPClient:
130
+ yield {"type": "error", "content": "❌ FastMCP client not available"}
131
+ return
132
 
133
+ try:
134
+ # Connect to MCP server using FastMCP
135
+ async with FastMCPClient(MCP_SERVER_URL) as mcp_client:
136
+ # Get tools from MCP
137
+ mcp_tools = await mcp_client.list_tools()
138
+ openai_tools = []
139
+ for tool in mcp_tools.tools:
140
+ openai_tool = {
141
+ "type": "function",
142
+ "function": {
143
+ "name": tool.name.replace("Auto_Deployer_", ""),
144
+ "description": tool.description or "MCP tool",
145
+ "parameters": tool.inputSchema or {}
146
+ }
147
+ }
148
+ openai_tools.append(openai_tool)
149
+
150
+ # System message for autonomous AI
151
+ system_message = (
152
+ "You are an intelligent MLOps assistant. "
153
+ "Automatically use MCP tools when users ask about data analysis, model training, or deployment. "
154
+ "Always analyze the CSV file first if available. "
155
+ "Extract relevant information and provide clear, human-readable answers. "
156
+ "Be proactive and helpful."
157
+ )
158
 
159
+ messages = [
160
+ {"role": "system", "content": system_message},
161
+ {"role": "user", "content": f"File URL: {file_url}\n\nQuestion: {user_message}"}
162
+ ]
163
 
164
+ # Get AI response with tool calls
165
+ response = await client.chat.completions.create(
166
+ model="gpt-4o",
167
+ messages=messages,
168
+ tools=openai_tools
169
+ )
170
 
171
+ message = response.choices[0].message
172
+ tool_calls = message.tool_calls or []
173
+
174
+ # Execute tool calls if any
175
+ if tool_calls:
176
+ yield {"type": "thinking", "content": "🤖 **Analyzing your request and using appropriate tools...**"}
177
+
178
+ for tool_call in tool_calls:
179
+ args = json.loads(tool_call.function.arguments)
180
+
181
+ # Add file_url if not present
182
+ if file_url and 'file_path' not in args:
183
+ args['file_path'] = file_url
184
+
185
+ yield {"type": "tool", "content": f"🔧 **Using**: {tool_call.function.name}"}
186
+
187
+ try:
188
+ # Execute MCP tool via FastMCP client
189
+ result = await mcp_client.call_tool(f"Auto_Deployer_{tool_call.function.name}", args)
190
+
191
+ if hasattr(result, 'content'):
192
+ content = result.content
193
+ if isinstance(content, list):
194
+ for item in content:
195
+ if hasattr(item, 'text'):
196
+ yield {"type": "result", "content": f"📊 **Result**: {item.text}"}
197
+ else:
198
+ yield {"type": "result", "content": f"📊 **Result**: {content}"}
199
+ else:
200
+ yield {"type": "result", "content": f"📊 **Result**: {str(result)}"}
201
+
202
+ except Exception as e:
203
+ yield {"type": "error", "content": f"❌ **Error executing tool**: {str(e)}"}
204
+
205
+ # Get final analysis
206
+ messages.append({"role": "assistant", "content": message.content or ""})
207
+
208
+ final_response = await client.chat.completions.create(
209
+ model="gpt-4o",
210
+ messages=messages
211
+ )
212
+
213
+ yield {"type": "final", "content": f"🎯 **Answer**: {final_response.choices[0].message.content}"}
214
+
215
+ else:
216
+ # No tools needed, just return AI response
217
+ yield {"type": "final", "content": f"🎯 **Answer**: {message.content}"}
218
+
219
+ except Exception as e:
220
+ yield {"type": "error", "content": f"❌ **FastMCP client error**: {str(e)}"}
221
+
222
+
223
+ async def _use_http_fallback(client, user_message: str, file_url: str):
224
+ """Fallback using direct HTTP requests"""
225
+ # Define available tools (hardcoded for fallback)
226
+ openai_tools = [
227
+ {
228
+ "type": "function",
229
+ "function": {
230
+ "name": "analyze_data_tool",
231
+ "description": "Analyze a dataset to understand its structure, statistics, and insights",
232
+ "parameters": {
233
+ "type": "object",
234
+ "properties": {
235
+ "file_path": {"type": "string", "description": "Path to the CSV file to analyze"}
236
+ },
237
+ "required": ["file_path"]
238
+ }
239
+ }
240
+ },
241
+ {
242
+ "type": "function",
243
+ "function": {
244
+ "name": "train_model_tool",
245
+ "description": "Train a machine learning model on the provided dataset",
246
+ "parameters": {
247
+ "type": "object",
248
+ "properties": {
249
+ "file_path": {"type": "string", "description": "Path to the training CSV file"},
250
+ "target_column": {"type": "string", "description": "Name of the target column"},
251
+ "task_type": {"type": "string", "enum": ["classification", "regression", "time_series"]},
252
+ "test_size": {"type": "number", "default": 0.2},
253
+ "random_state": {"type": "integer", "default": 42}
254
+ },
255
+ "required": ["file_path", "target_column", "task_type"]
256
+ }
257
+ }
258
+ }
259
+ ]
260
 
261
+ # System message for autonomous AI
262
+ system_message = (
263
+ "You are an intelligent MLOps assistant. "
264
+ "Automatically use MCP tools when users ask about data analysis, model training, or deployment. "
265
+ "Always analyze the CSV file first if available. "
266
+ "Extract relevant information and provide clear, human-readable answers. "
267
+ "Be proactive and helpful."
268
+ )
269
+
270
+ messages = [
271
+ {"role": "system", "content": system_message},
272
+ {"role": "user", "content": f"File URL: {file_url}\n\nQuestion: {user_message}"}
273
+ ]
274
 
275
+ # Get AI response with tool calls
276
+ response = await client.chat.completions.create(
277
+ model="gpt-4o",
278
+ messages=messages,
279
+ tools=openai_tools
280
+ )
281
+
282
+ message = response.choices[0].message
283
+ tool_calls = message.tool_calls or []
284
+
285
+ # Execute tool calls if any
286
+ if tool_calls:
287
+ yield {"type": "thinking", "content": "🤖 **Analyzing your request and using appropriate tools...**"}
288
+
289
+ for tool_call in tool_calls:
290
+ args = json.loads(tool_call.function.arguments)
291
+
292
+ # Add file_url if not present
293
+ if file_url and 'file_path' not in args:
294
+ args['file_path'] = file_url
295
+
296
+ yield {"type": "tool", "content": f"🔧 **Using**: {tool_call.function.name}"}
297
+
298
+ try:
299
+ # Execute MCP tool via HTTP request
300
+ result = await _make_mcp_request(tool_call.function.name, args)
301
+
302
+ if "result" in result:
303
+ tool_result = result["result"]
304
+ if hasattr(tool_result, 'content'):
305
+ content = tool_result.content
306
+ if isinstance(content, list):
307
+ for item in content:
308
+ if hasattr(item, 'text'):
309
+ yield {"type": "result", "content": f"📊 **Result**: {item.text}"}
310
+ else:
311
+ yield {"type": "result", "content": f"📊 **Result**: {content}"}
312
  else:
313
+ yield {"type": "result", "content": f"📊 **Result**: {str(tool_result)}"}
314
+ elif "error" in result:
315
+ yield {"type": "error", "content": f"❌ **Tool Error**: {result['error']}"}
316
+ else:
317
+ yield {"type": "result", "content": f"📊 **Result**: {str(result)}"}
318
 
319
+ except Exception as e:
320
+ yield {"type": "error", "content": f"❌ **Error executing tool**: {str(e)}"}
321
 
322
+ # Get final analysis
323
+ messages.append({"role": "assistant", "content": message.content or ""})
324
+
325
+ final_response = await client.chat.completions.create(
326
+ model="gpt-4o",
327
+ messages=messages
328
+ )
329
 
330
+ yield {"type": "final", "content": f"🎯 **Answer**: {final_response.choices[0].message.content}"}
331
 
332
+ else:
333
+ # No tools needed, just return AI response
334
+ yield {"type": "final", "content": f"🎯 **Answer**: {message.content}"}
335
 
336
 
337
  async def run_orchestrated_chat_stream(
requirements.txt CHANGED
@@ -1,3 +1,2 @@
1
  openai==2.8.1
2
- gradio[mcp]==6.0.0
3
- mcp
 
1
  openai==2.8.1
2
+ fastmcp>=2.13.1