Skip to content

Commit 80ee2f8

Browse files
Added MarkChat.
1 parent 5d448e5 commit 80ee2f8

13 files changed

Lines changed: 1278 additions & 1 deletion

File tree

.gitignore

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
.DS_Store
22
.venv
33
.idea
4-
4+
__pycache__
5+
*.pyc
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.env
2+
model.log
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
3.13
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# 运行方法
2+
3+
1. 在当前目录中新建一个名为 `.env` 的文件,内容如下:
4+
5+
```text
6+
OPENROUTER_API_KEY="sk-xxx"
7+
```
8+
9+
其中 `sk-xxx` 替换为你的 OpenRouter API Key。
10+
11+
2. 启动服务器
12+
13+
```bash
14+
uv run start.py
15+
```
16+
17+
3. 打开浏览器,访问 `http://localhost:5000`
Lines changed: 182 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,182 @@
1+
import asyncio
2+
3+
import requests
4+
import json
5+
from dotenv import load_dotenv
6+
import os
7+
8+
from mcp_client import MCPClient
9+
10+
11+
def get_api_key() -> str:
12+
"""Load the API key from an environment variable."""
13+
load_dotenv()
14+
api_key = os.getenv("OPENROUTER_API_KEY")
15+
if not api_key:
16+
raise ValueError("未找到 OPENROUTER_API_KEY 环境变量,请在 .env 文件中设置。")
17+
return api_key
18+
19+
20+
OPENROUTER_API_KEY = get_api_key()
21+
MODEL_NAME = "openai/gpt-4o-mini"
22+
23+
TOOLS = [
24+
{
25+
"type": "function",
26+
"function": {
27+
"name": "search",
28+
"description": "搜索网络",
29+
"parameters": {
30+
"type": "object",
31+
"properties": {
32+
"query": {
33+
"type": "string",
34+
"description": "要搜索的内容"
35+
}
36+
},
37+
"required": ["query"]
38+
}
39+
}
40+
}
41+
]
42+
43+
44+
class AppLogger:
45+
def __init__(self):
46+
"""Initialize the logger with a file that will be cleared on startup."""
47+
self.log_file = "model.log"
48+
# Clear the log file on startup
49+
with open(self.log_file, 'w') as f:
50+
f.write("")
51+
52+
def log(self, message):
53+
"""Log a message to both file and console."""
54+
55+
# Log to file
56+
with open(self.log_file, 'a') as f:
57+
f.write(message + "\n")
58+
59+
60+
logger = AppLogger()
61+
62+
63+
class LLMProcessor:
64+
def __init__(self):
65+
self.api_key = OPENROUTER_API_KEY
66+
self.base_url = "https://openrouter.ai/api/v1/chat/completions"
67+
self.headers = {
68+
"Authorization": f"Bearer {self.api_key}",
69+
"Content-Type": "application/json"
70+
}
71+
72+
self.history = []
73+
74+
def process_user_query(self, query):
75+
76+
self.history.append({"role": "user", "content": query})
77+
78+
first_model_response = self.call_model()
79+
80+
first_model_message = first_model_response["choices"][0]["message"]
81+
self.history.append(first_model_message)
82+
83+
# 检查模型是否需要调用工具
84+
if "tool_calls" in first_model_message and first_model_message["tool_calls"]:
85+
tool_call = first_model_message["tool_calls"][0]
86+
tool_name = tool_call["function"]["name"]
87+
tool_args = json.loads(tool_call["function"]["arguments"])
88+
89+
result = self.execute_tool(tool_name, tool_args)
90+
91+
self.history.append({
92+
"role": "tool",
93+
"tool_call_id": tool_call["id"],
94+
"name": tool_name,
95+
"content": result
96+
})
97+
98+
second_response_data = self.call_model_after_tool_execution()
99+
100+
final_message = second_response_data["choices"][0]["message"]
101+
self.history.append(final_message)
102+
103+
return {
104+
"tool_name": tool_name,
105+
"tool_parameters": tool_args,
106+
"tool_executed": True,
107+
"tool_result": result,
108+
"final_response": final_message["content"],
109+
}
110+
else:
111+
return {
112+
"final_response": first_model_message["content"],
113+
}
114+
115+
def execute_tool(self, function_name, args):
116+
if function_name == "search":
117+
# 正常情况下,这里应该调用相关 API 做搜索,为了减少代码的复杂度,
118+
# 这里我们返回一段假的工具执行结果,用以测试
119+
return "纽约市今天的天气是晴天,明天的天气是多云。"
120+
else:
121+
raise ValueError(f"未知的工具名称:{function_name}")
122+
123+
def call_model(self):
124+
125+
request_body = {
126+
"model": MODEL_NAME,
127+
"messages": self.history,
128+
"tools": TOOLS,
129+
"stream": False,
130+
}
131+
132+
response = requests.post(
133+
self.base_url,
134+
headers=self.headers,
135+
json=request_body
136+
)
137+
138+
logger.log(f"第一次模型请求:\n{json.dumps(request_body, indent=2, ensure_ascii=False)}\n")
139+
logger.log(f"第一次模型返回:\n{json.dumps(response.json(), indent=2, ensure_ascii=False)}\n")
140+
141+
if response.status_code != 200:
142+
raise Exception(f"API request failed with status {response.status_code}: {response.text}")
143+
144+
return response.json()
145+
146+
def call_model_after_tool_execution(self):
147+
second_request_body = {
148+
"model": MODEL_NAME,
149+
"messages": self.history,
150+
"tools": TOOLS,
151+
}
152+
153+
# Make the second POST request
154+
second_response = requests.post(
155+
self.base_url,
156+
headers=self.headers,
157+
json=second_request_body
158+
)
159+
160+
logger.log(f"第二次模型请求:\n{json.dumps(second_request_body, indent=2, ensure_ascii=False)}\n")
161+
logger.log(f"第二次模型返回:\n{json.dumps(second_response.json(), indent=2, ensure_ascii=False)}\n")
162+
163+
# Check if the request was successful
164+
if second_response.status_code != 200:
165+
raise Exception(f"API request failed with status {second_response.status_code}: {second_response.text}")
166+
167+
# Parse the second response
168+
return second_response.json()
169+
170+
def execute_tool_with_mcp(self, function_name, args):
171+
loop = asyncio.new_event_loop()
172+
return loop.run_until_complete(self.execute_tool_with_mcp_async(function_name, args))
173+
174+
175+
async def execute_tool_with_mcp_async(self, function_name, args):
176+
# 获取与当前脚本同目录下的 mcp_server.py 的绝对地址
177+
mcp_server_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "mcp_server.py"))
178+
179+
# 启动 MCP Client 并调用 MCP Tool
180+
async with MCPClient("uv", ["run", mcp_server_path]) as client:
181+
return await client.call_tool(function_name, args)
182+
Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
import asyncio
2+
from typing import Optional, List, Dict, Any
3+
from contextlib import AsyncExitStack
4+
import os
5+
6+
from mcp import ClientSession, StdioServerParameters
7+
from mcp.client.stdio import stdio_client
8+
9+
10+
class MCPClient:
11+
def __init__(self, command: str, args: List[str]):
12+
# Initialize session and client objects
13+
self.session: Optional[ClientSession] = None
14+
self.exit_stack = AsyncExitStack()
15+
self.command = command
16+
self.args = args
17+
18+
async def call_tool(self, tool_name: str, tool_args: Dict[str, Any]) -> str:
19+
call_tool_result = await self.session.call_tool(tool_name, tool_args)
20+
return call_tool_result.content[0].text
21+
22+
async def connect_to_server(self):
23+
"""Connect to the MCP server using the uv run command"""
24+
server_params = StdioServerParameters(
25+
command=self.command,
26+
args=self.args,
27+
env=None
28+
)
29+
30+
stdio, write = await self.exit_stack.enter_async_context(stdio_client(server_params))
31+
self.session = await self.exit_stack.enter_async_context(ClientSession(stdio, write))
32+
await self.session.initialize()
33+
34+
async def __aenter__(self):
35+
await self.connect_to_server()
36+
return self
37+
38+
async def __aexit__(self, exc_type, exc_val, exc_tb):
39+
await self.exit_stack.aclose()
40+
41+
42+
# Example usage
43+
if __name__ == "__main__":
44+
45+
async def main():
46+
47+
# 获取与当前脚本同目录下的 mcp_server.py 的绝对地址
48+
mcp_server_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "mcp_server.py"))
49+
50+
# 启动 MCP Client 并调用 MCP Tool
51+
async with MCPClient("uv", ["run", mcp_server_path]) as client:
52+
result = await client.call_tool("search", { "query": "weather in New York"})
53+
print(result)
54+
55+
asyncio.run(main())
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
from mcp.server.fastmcp import FastMCP
2+
3+
4+
# Initialize FastMCP server
5+
mcp = FastMCP("search_mcp_server", log_level="ERROR")
6+
7+
8+
# Constants
9+
@mcp.tool()
10+
async def search(query: str) -> str:
11+
"""搜索网络
12+
13+
Args:
14+
query: 搜索内容
15+
"""
16+
# 正常情况下,这里应该调用相关 API 做搜索,为了减少代码的复杂度,
17+
# 这里我们返回一段假的工具执行结果,用以测试
18+
return "来自 MCP Server 的答案:纽约市今天的天气是晴天,明天的天气是多云。"
19+
20+
21+
if __name__ == "__main__":
22+
# Initialize and run the server
23+
mcp.run(transport='stdio')
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
[project]
2+
name = "function-calling-vs-mcp"
3+
version = "0.1.0"
4+
description = "Add your description here"
5+
readme = "README.md"
6+
requires-python = ">=3.13"
7+
dependencies = [
8+
"dotenv>=0.9.9",
9+
"flask>=3.1.0",
10+
"mcp>=1.8.0",
11+
"requests>=2.31.0",
12+
]
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
from flask import Flask, render_template, request, jsonify
2+
from backend import LLMProcessor, MODEL_NAME
3+
4+
app = Flask(__name__)
5+
6+
llm_processor = LLMProcessor()
7+
8+
@app.route('/')
9+
def index():
10+
"""Serves the main HTML page."""
11+
return render_template('index.html', model_name=MODEL_NAME)
12+
13+
@app.route('/chat', methods=['POST'])
14+
def chat():
15+
"""Handles chat messages from the user."""
16+
data = request.json
17+
user_query = data.get('message')
18+
19+
if not user_query:
20+
return jsonify({"error": "No message provided"}), 400
21+
22+
response_steps = llm_processor.process_user_query(user_query)
23+
return jsonify(response_steps)
24+
25+
if __name__ == '__main__':
26+
print("Flask app running on http://127.0.0.1:5000/")
27+
app.run(debug=True)

0 commit comments

Comments
 (0)