mgbam commited on
Commit
fed45f7
·
verified ·
1 Parent(s): fd33904

Upload generator.py

Browse files
Files changed (1) hide show
  1. mcp_gen/generator.py +369 -0
mcp_gen/generator.py ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ MCP Generation Engine - The Innovation That Wins
3
+
4
+ Dynamically generates custom MCP servers based on user needs.
5
+ This is the KILLER FEATURE that has never been done before.
6
+ """
7
+
8
+ import os
9
+ import json
10
+ import asyncio
11
+ from typing import Dict, Any, List, Optional
12
+ from datetime import datetime
13
+ import hashlib
14
+ from pathlib import Path
15
+
16
+ from core.model_router import router, TaskType
17
+
18
+
19
+ class MCPGenerator:
20
+ """
21
+ Generates custom MCP servers on-the-fly using AI.
22
+
23
+ INNOVATION: Instead of pre-built tools, this creates new tools as needed.
24
+ - User needs web scraping? Generate scraper MCP
25
+ - User needs data analysis? Generate analyzer MCP
26
+ - User needs API integration? Generate connector MCP
27
+ """
28
+
29
+ def __init__(self, output_dir: str = "./generated_mcps"):
30
+ self.output_dir = Path(output_dir)
31
+ self.output_dir.mkdir(parents=True, exist_ok=True)
32
+ self.generated_servers = {}
33
+
34
+ async def generate_mcp_server(
35
+ self,
36
+ task_description: str,
37
+ tool_name: Optional[str] = None,
38
+ context: Optional[Dict[str, Any]] = None
39
+ ) -> Dict[str, Any]:
40
+ """
41
+ Generate a complete MCP server from a task description.
42
+
43
+ Args:
44
+ task_description: What the tool should do (e.g., "scrape product prices from Amazon")
45
+ tool_name: Optional custom name for the tool
46
+ context: Additional context (APIs to use, data schemas, etc.)
47
+
48
+ Returns:
49
+ Dict with server code, deployment info, and usage instructions
50
+ """
51
+ print(f"[GEN] Generating MCP server for: {task_description}")
52
+
53
+ # Step 1: Analyze task and plan MCP architecture
54
+ planning_prompt = f"""You are an expert MCP (Model Context Protocol) server architect.
55
+
56
+ Task: {task_description}
57
+ Context: {json.dumps(context or {}, indent=2)}
58
+
59
+ Analyze this task and design an MCP server architecture:
60
+
61
+ 1. What tools/functions does this MCP need? (1-5 functions)
62
+ 2. What are the function signatures? (name, parameters, return types)
63
+ 3. What external APIs or libraries are needed?
64
+ 4. What are the edge cases and error handling needs?
65
+ 5. What's the best way to structure this MCP for reusability?
66
+
67
+ Respond with a JSON object:
68
+ {{
69
+ "server_name": "descriptive_name",
70
+ "description": "what this MCP does",
71
+ "tools": [
72
+ {{
73
+ "name": "tool_function_name",
74
+ "description": "what it does",
75
+ "parameters": {{"param1": "type", "param2": "type"}},
76
+ "returns": "return_type",
77
+ "implementation_notes": "how to implement"
78
+ }}
79
+ ],
80
+ "dependencies": ["package1", "package2"],
81
+ "complexity": "simple|medium|complex"
82
+ }}
83
+ """
84
+
85
+ plan_result = await router.generate(
86
+ planning_prompt,
87
+ task_type=TaskType.PLANNING,
88
+ temperature=0.3
89
+ )
90
+
91
+ # Parse planning response
92
+ try:
93
+ plan_json = self._extract_json(plan_result["response"])
94
+ except Exception as e:
95
+ print(f"❌ Failed to parse planning response: {e}")
96
+ # Fallback to simple single-tool server
97
+ plan_json = {
98
+ "server_name": tool_name or "custom_tool",
99
+ "description": task_description,
100
+ "tools": [{
101
+ "name": "execute",
102
+ "description": task_description,
103
+ "parameters": {"input": "str"},
104
+ "returns": "dict"
105
+ }],
106
+ "dependencies": [],
107
+ "complexity": "simple"
108
+ }
109
+
110
+ print(f"[PLAN] {plan_json['server_name']} with {len(plan_json['tools'])} tools")
111
+
112
+ # Step 2: Generate MCP server code
113
+ code_prompt = f"""You are an expert Python developer specializing in MCP servers.
114
+
115
+ Generate a COMPLETE, PRODUCTION-READY Gradio MCP server based on this specification:
116
+
117
+ {json.dumps(plan_json, indent=2)}
118
+
119
+ Requirements:
120
+ 1. Use Gradio for the MCP server interface
121
+ 2. Implement ALL tools from the specification
122
+ 3. Include proper error handling and logging
123
+ 4. Add docstrings and type hints
124
+ 5. Make it deployable to Hugging Face Spaces
125
+ 6. Include a simple Gradio UI for testing the tools
126
+ 7. Follow MCP protocol standards
127
+
128
+ Generate the COMPLETE app.py file with:
129
+ - All imports
130
+ - Tool implementations
131
+ - Gradio interface
132
+ - MCP endpoint setup
133
+ - Error handling
134
+ - Main execution block
135
+
136
+ IMPORTANT: Return ONLY the Python code, no explanations.
137
+ """
138
+
139
+ code_result = await router.generate(
140
+ code_prompt,
141
+ task_type=TaskType.CODE_GEN,
142
+ max_tokens=4000,
143
+ temperature=0.2
144
+ )
145
+
146
+ server_code = self._extract_code(code_result["response"])
147
+
148
+ # Step 3: Generate requirements.txt
149
+ requirements = self._generate_requirements(plan_json["dependencies"])
150
+
151
+ # Step 4: Generate README.md
152
+ readme = self._generate_readme(plan_json, task_description)
153
+
154
+ # Step 5: Save generated files
155
+ server_id = self._generate_server_id(plan_json["server_name"])
156
+ server_dir = self.output_dir / server_id
157
+ server_dir.mkdir(parents=True, exist_ok=True)
158
+
159
+ # Write files with UTF-8 encoding (Windows compatibility)
160
+ (server_dir / "app.py").write_text(server_code, encoding='utf-8')
161
+ (server_dir / "requirements.txt").write_text(requirements, encoding='utf-8')
162
+ (server_dir / "README.md").write_text(readme, encoding='utf-8')
163
+
164
+ # Store metadata
165
+ metadata = {
166
+ "server_id": server_id,
167
+ "server_name": plan_json["server_name"],
168
+ "description": plan_json["description"],
169
+ "tools": plan_json["tools"],
170
+ "task_description": task_description,
171
+ "generated_at": datetime.now().isoformat(),
172
+ "directory": str(server_dir),
173
+ "files": {
174
+ "app": str(server_dir / "app.py"),
175
+ "requirements": str(server_dir / "requirements.txt"),
176
+ "readme": str(server_dir / "README.md")
177
+ },
178
+ "deployment_status": "generated",
179
+ "complexity": plan_json.get("complexity", "medium")
180
+ }
181
+
182
+ self.generated_servers[server_id] = metadata
183
+
184
+ print(f"[OK] Generated MCP server: {server_id}")
185
+ print(f"[LOC] Location: {server_dir}")
186
+ print(f"[TOOLS] Tools: {[t['name'] for t in plan_json['tools']]}")
187
+
188
+ return metadata
189
+
190
+ def _extract_json(self, text: str) -> Dict[str, Any]:
191
+ """Extract JSON from LLM response"""
192
+ import re
193
+
194
+ # Try to find JSON block
195
+ json_match = re.search(r'\{[\s\S]*\}', text)
196
+ if json_match:
197
+ return json.loads(json_match.group())
198
+
199
+ # Try parsing entire response
200
+ return json.loads(text)
201
+
202
+ def _extract_code(self, text: str) -> str:
203
+ """Extract Python code from LLM response"""
204
+ import re
205
+
206
+ # Try to find code block
207
+ code_match = re.search(r'```python\n([\s\S]*?)\n```', text)
208
+ if code_match:
209
+ return code_match.group(1)
210
+
211
+ code_match = re.search(r'```\n([\s\S]*?)\n```', text)
212
+ if code_match:
213
+ return code_match.group(1)
214
+
215
+ # Return as-is if no code block found
216
+ return text
217
+
218
+ def _generate_requirements(self, dependencies: List[str]) -> str:
219
+ """Generate requirements.txt content"""
220
+ base_requirements = [
221
+ "gradio>=6.0.0",
222
+ "httpx>=0.28.0",
223
+ "pydantic>=2.0.0"
224
+ ]
225
+
226
+ all_requirements = base_requirements + dependencies
227
+ return "\n".join(all_requirements)
228
+
229
+ def _generate_readme(self, plan: Dict[str, Any], task_description: str) -> str:
230
+ """Generate README.md for the MCP server"""
231
+ tools_md = "\n".join([
232
+ f"- **{tool['name']}**: {tool['description']}"
233
+ for tool in plan["tools"]
234
+ ])
235
+
236
+ return f"""# {plan['server_name']}
237
+
238
+ {plan['description']}
239
+
240
+ ## Original Request
241
+ {task_description}
242
+
243
+ ## Available Tools
244
+
245
+ {tools_md}
246
+
247
+ ## Installation
248
+
249
+ ```bash
250
+ pip install -r requirements.txt
251
+ ```
252
+
253
+ ## Usage
254
+
255
+ ### As MCP Server
256
+
257
+ ```python
258
+ # Connect to this MCP server from your agent
259
+ mcp_url = "https://huggingface.co/spaces/YOUR_USERNAME/{plan['server_name']}/gradio_api/mcp/sse"
260
+ ```
261
+
262
+ ### Standalone Testing
263
+
264
+ ```bash
265
+ python app.py
266
+ ```
267
+
268
+ Then open http://localhost:7860 in your browser.
269
+
270
+ ## Auto-Generated
271
+
272
+ This MCP server was automatically generated by OmniMind Orchestrator.
273
+
274
+ Generated: {datetime.now().strftime("%Y-%m-%d %H:%M:%S")}
275
+ """
276
+
277
+ def _generate_server_id(self, server_name: str) -> str:
278
+ """Generate unique server ID"""
279
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
280
+ name_hash = hashlib.md5(server_name.encode()).hexdigest()[:6]
281
+ return f"{server_name.lower().replace(' ', '_')}_{name_hash}_{timestamp}"
282
+
283
+ async def improve_mcp_server(
284
+ self,
285
+ server_id: str,
286
+ feedback: str,
287
+ error_log: Optional[str] = None
288
+ ) -> Dict[str, Any]:
289
+ """
290
+ Improve an existing MCP server based on feedback or errors.
291
+
292
+ This makes the system SELF-EVOLVING - it learns and improves tools.
293
+ """
294
+ if server_id not in self.generated_servers:
295
+ raise ValueError(f"Server {server_id} not found")
296
+
297
+ metadata = self.generated_servers[server_id]
298
+ current_code = Path(metadata["files"]["app"]).read_text()
299
+
300
+ improvement_prompt = f"""You are improving an existing MCP server.
301
+
302
+ Current Implementation:
303
+ ```python
304
+ {current_code}
305
+ ```
306
+
307
+ Feedback: {feedback}
308
+
309
+ {f"Error Log: {error_log}" if error_log else ""}
310
+
311
+ Analyze the issues and generate an IMPROVED version of the code.
312
+ Fix bugs, optimize performance, add missing features.
313
+
314
+ Return ONLY the complete improved Python code.
315
+ """
316
+
317
+ result = await router.generate(
318
+ improvement_prompt,
319
+ task_type=TaskType.CODE_GEN,
320
+ max_tokens=4000,
321
+ temperature=0.2
322
+ )
323
+
324
+ improved_code = self._extract_code(result["response"])
325
+
326
+ # Save improved version with UTF-8 encoding (Windows compatibility)
327
+ server_dir = Path(metadata["directory"])
328
+ backup_path = server_dir / f"app_backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.py"
329
+ Path(metadata["files"]["app"]).rename(backup_path)
330
+ Path(metadata["files"]["app"]).write_text(improved_code, encoding='utf-8')
331
+
332
+ metadata["improved_at"] = datetime.now().isoformat()
333
+ metadata["improvement_count"] = metadata.get("improvement_count", 0) + 1
334
+
335
+ print(f"[OK] Improved MCP server: {server_id}")
336
+
337
+ return metadata
338
+
339
+ def list_servers(self) -> List[Dict[str, Any]]:
340
+ """List all generated MCP servers"""
341
+ return list(self.generated_servers.values())
342
+
343
+ def get_server(self, server_id: str) -> Optional[Dict[str, Any]]:
344
+ """Get metadata for a specific server"""
345
+ return self.generated_servers.get(server_id)
346
+
347
+ async def test_mcp_server(self, server_id: str, test_input: Dict[str, Any]) -> Dict[str, Any]:
348
+ """
349
+ Test a generated MCP server locally before deployment.
350
+
351
+ Returns test results and any errors.
352
+ """
353
+ if server_id not in self.generated_servers:
354
+ raise ValueError(f"Server {server_id} not found")
355
+
356
+ metadata = self.generated_servers[server_id]
357
+
358
+ # In production, this would actually run the MCP server
359
+ # For now, return simulation
360
+ return {
361
+ "server_id": server_id,
362
+ "status": "success",
363
+ "test_input": test_input,
364
+ "message": "Server would be tested here in production"
365
+ }
366
+
367
+
368
+ # Global generator instance
369
+ generator = MCPGenerator()