🌐 MCP-Use 是一种开源工具,可将**任意大语言模型(LLM)**连接到任何 MCP 服务器,帮助开发人员构建自定义的智能代理,访问诸如网页浏览、文件操作等工具,无需使用封闭源或专用客户端。
💡 让开发者轻松连接任意 LLM 到浏览网页、文件操作等各种工具。
主要功能
| 功能 | 描述 | | 🔄 易于使用 | 只需6行代码即可创建一个支持MCP的代理 | | 🤖 LLM灵活性 | 支持任何LangChain兼容、支持工具调用的LLM(如OpenAI、Anthropic、Groq、LLama等) | | 🌐 HTTP支持 | 直接连接到运行在特定HTTP端口的MCP服务器 | | 🧩 多服务器支持 | 单个代理可同时使用多个MCP服务器 | | 🛡️ 工具访问限制 | 可限制具有潜在危险性的工具,如文件系统或网络访问 |
快速开始
安装:
pip install mcp-use
从源代码安装:
git clone https://github.com/pietrozullo/mcp-use.git
cd mcp-use
pip install -e .
安装 LangChain 提供者
根据使用的LLM安装对应的LangChain提供者。例如:
# 对于OpenAI
pip install langchain-openai
# 对于Anthropic
pip install langchain-anthropic
在.env
文件中添加API密钥:
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
注意 : 仅支持具备工具调用功能的模型。
启动代理
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
load_dotenv()
config = {
"mcpServers": {
"playwright": {
"command": "npx",
"args": ["@playwright/mcp@latest"],
"env": {"DISPLAY": ":1"}
}
}
}
client = MCPClient.from_dict(config)
llm = ChatOpenAI(model="gpt-4o")
agent = MCPAgent(llm=llm, client=client, max_steps=30)
result = await agent.run("查找旧金山最好的餐厅")
print(f"\n结果: {result}")
if __name__ == "__main__":
asyncio.run(main())
也可使用配置文件创建客户端:
client = MCPClient.from\_config\_file("browser\_mcp.json")
配置文件示例 (browser\_mcp.json
):
{
"mcpServers": {
"playwright": {
"command": "npx",
"args": ["@playwright/mcp@latest"],
"env": {"DISPLAY": ":1"}
}
}
}
使用场景示例
网页浏览(使用Playwright)
# 同快速启动代码示例
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
# Load environment variables
load_dotenv()
# Create MCPClient from config file
client = MCPClient.from_config_file(
os.path.join(os.path.dirname(__file__), "browser_mcp.json")
)
# Create LLM
llm = ChatOpenAI(model="gpt-4o")
# Alternative models:
# llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# llm = ChatGroq(model="llama3-8b-8192")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
# Run the query
result = await agent.run(
"Find the best restaurant in San Francisco USING GOOGLE SEARCH",
max_steps=30,
)
print(f"\nResult: {result}")
if __name__ == "__main__":
asyncio.run(main())
Airbnb搜索
# Airbnb搜索代码示例
import asyncio
import os
from dotenv import load_dotenv
from langchain_anthropic import ChatAnthropic
from mcp_use import MCPAgent, MCPClient
async def run_airbnb_example():
# Load environment variables
load_dotenv()
# Create MCPClient with Airbnb configuration
client = MCPClient.from_config_file(
os.path.join(os.path.dirname(__file__), "airbnb_mcp.json")
)
# Create LLM - you can choose between different models
llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
try:
# Run a query to search for accommodations
result = await agent.run(
"Find me a nice place to stay in Barcelona for 2 adults "
"for a week in August. I prefer places with a pool and "
"good reviews. Show me the top 3 options.",
max_steps=30,
)
print(f"\nResult: {result}")
finally:
# Ensure we clean up resources properly
if client.sessions:
await client.close_all_sessions()
if __name__ == "__main__":
asyncio.run(run_airbnb_example())
配置文件示例 (airbnb\_mcp.json
):
{
"mcpServers": {
"airbnb": {
"command": "npx",
"args": ["-y", "@openbnb/mcp-server-airbnb"]
}
}
}
Blender 3D创建
# Blender 3D创建代码示例
import asyncio
from dotenv import load_dotenv
from langchain_anthropic import ChatAnthropic
from mcp_use import MCPAgent, MCPClient
async def run_blender_example():
# Load environment variables
load_dotenv()
# Create MCPClient with Blender MCP configuration
config = {"mcpServers": {"blender": {"command": "uvx", "args": ["blender-mcp"]}}}
client = MCPClient.from_dict(config)
# Create LLM
llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
try:
# Run the query
result = await agent.run(
"Create an inflatable cube with soft material and a plane as ground.",
max_steps=30,
)
print(f"\nResult: {result}")
finally:
# Ensure we clean up resources properly
if client.sessions:
await client.close_all_sessions()
if __name__ == "__main__":
asyncio.run(run_blender_example())
配置文件支持
# 从配置文件初始化MCP会话
import asyncio
from mcp_use import create_session_from_config
async def main():
# Create an MCP session from a config file
session = create_session_from_config("mcp-config.json")
# Initialize the session
await session.initialize()
# Use the session...
# Disconnect when done
await session.disconnect()
if __name__ == "__main__":
asyncio.run(main())
HTTP连接示例
支持连接到HTTP端口运行的MCP服务器:
# HTTP连接示例代码
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
"""Run the example using a configuration file."""
# Load environment variables
load_dotenv()
config = {
"mcpServers": {
"http": {
"url": "http://localhost:8931/sse"
}
}
}
# Create MCPClient from config file
client = MCPClient.from_dict(config)
# Create LLM
llm = ChatOpenAI(model="gpt-4o")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
# Run the query
result = await agent.run(
"Find the best restaurant in San Francisco USING GOOGLE SEARCH",
max_steps=30,
)
print(f"\nResult: {result}")
if __name__ == "__main__":
# Run the appropriate example
asyncio.run(main())
多服务器支持
支持同时使用多个MCP服务器:
{
"mcpServers": {
"airbnb": {"command": "npx", "args": ["-y", "@openbnb/mcp-server-airbnb"]},
"playwright": {"command": "npx", "args": ["@playwright/mcp@latest"], "env": {"DISPLAY": ":1"}}
}
}
工具访问控制
限制代理可访问的工具,提高安全性:
agent = MCPAgent(
llm=ChatOpenAI(model="gpt-4"),
client=client,
disallowed_tools=["file_system", "network"]
)
路线图
• 支持多服务器• 远程连接测试(HTTP、WS)• 更多功能持续开发中
贡献
欢迎提交问题和功能请求。
系统要求
•Python 3.11+•MCP实现(如Playwright MCP)•LangChain和相应模型库
引用方式
若在研究或项目中使用,请引用:
@software{mcp_use2025,
author = {Zullo, Pietro},
title = {MCP-Use: MCP Library for Python},
year = {2025},
publisher = {GitHub},
url = {https://github.com/pietrozullo/mcp-use}
}
MIT许可。
声明:本文由山行AI翻译整理自:https://github.com/mcp-use/mcp-use,如果对您有帮助,请帮忙点赞、关注、收藏,谢谢~