MCP
Last updated
Was this helpful?
Last updated
Was this helpful?
MCP is an open protocol that standardizes how applications provide context to LLMs
MCP provides a standardized way to connect AI models to different data sources and tools.
STDIO (Standard Input/Output) Transport is primarily used for inter-process communication within the same system. It allows a client application to send data to a server application through the standard input and receive responses via the standard output streams.
from dataclasses import asdict, dataclass
import aiohttp
from mcp.server.fastmcp import FastMCP
from mcp.server.sse import SseServerTransport
from mcp.server import Server
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.routing import Mount, Route
# Create the MCP server
mcp = FastMCP("SSE")
@dataclass
class DisplayName:
text: str
languageCode: str
@dataclass
class Restaurant:
id: str
displayName: DisplayName
rating: int
primaryTypeDisplayName: str
@dataclass
class Location:
latitude: float
longitude: float
async def get_latitude_longitude(location_name):
# Define the endpoint URL
endpoint = "https://maps.googleapis.com/maps/api/geocode/json"
# Set up the parameters for the request
params = {
'address': location_name,
'key': "AIzaSyBUguvUNCxqha21BhH6r3bOcF48ckoxTz8"
}
async with aiohttp.ClientSession() as session:
async with session.get(endpoint, params=params) as response:
data = await response.json()
# Check if the request was successful
if data['status'] == 'OK':
# Extract latitude and longitude
latitude = data['results'][0]['geometry']['location']['lat']
longitude = data['results'][0]['geometry']['location']['lng']
return Location(latitude=latitude, longitude=longitude)
else:
return None
async def get_restaurants(max_result_count: int , location_info: Location):
url='https://places.googleapis.com/v1/places:searchNearby'
request_body = {
"languageCode": "zh-HK",
"regionCode": "HK",
"includedTypes": ["restaurant"],
"maxResultCount": max_result_count,
"locationRestriction": {
"circle": {
"center": {
"latitude": location_info.latitude,
"longitude": location_info.longitude
},
"radius": 500.0
}
}}
if max_result_count > 20:
request_body.pop('maxResultCount')
async with aiohttp.ClientSession() as session:
resp = await session.post(url,
json=request_body,
headers={
"Content-Type": "application/json",
"X-Goog-Api-Key": "AIzaSyBUguvUNCxqha21BhH6r3bOcF48ckoxTz8",
"X-Goog-FieldMask": "places.id,places.displayName,places.rating,places.primaryTypeDisplayName"
}
)
res = await resp.json()
restaurants = []
if(res.get('places')):
restaurants = [ Restaurant(**restaurant) for restaurant in res.get('places')]
else:
print(res)
return restaurants
@mcp.tool()
async def get_restaurant_list(location: str, result_count: int) -> list:
"""Get the list of restaurant
Args:
location: The location of the restaurant (e.g: Kwun tong)
result_count: The number of restaurants to return, default is 10, if user want more restaurant, increase 10, if user want all the restaurants, set to 100
"""
print(f"location: {location}, result_count: {result_count}")
location_info = await get_latitude_longitude(location)
if(location_info is None):
print("Location not found")
return []
restaurants = await get_restaurants(max_result_count=result_count, location_info=location_info)
return [asdict(restaurant) for restaurant in restaurants]
if __name__ == "__main__":
mcp.run(transport='stdio')
# Create server parameters for stdio connection
import asyncio
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from langchain_mcp_adapters.tools import load_mcp_tools
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
from langgraph.checkpoint.memory import MemorySaver
model = ChatOpenAI(
model="anthropic/claude-3.5-sonnet",
base_url="https://openrouter.ai/api/v1",
api_key="xxxx"
)
server_params = StdioServerParameters(
command="python",
# Make sure to update to the full absolute path to your math_server.py file
args=["server.py"],
)
async def main():
tools = []
async with stdio_client(server_params) as (read, write):
async with ClientSession(read, write) as session:
# Initialize the connection
await session.initialize()
# Get tools
tools = await load_mcp_tools(session)
print("tools:", tools)
if __name__ == "__main__":
asyncio.run(main())
{
"mcpServers": {
"Demo": {
"command": "uv",
"args": [
"--directory",
"/Users/petercheng/Desktop/my-mcp-server",
"run",
"server.py"
]
}
}
}
Server-Sent Events (SSE) is a technology that enables servers to push real-time updates to web clients over a single, long-lived HTTP connection. This allows for efficient, one-way communication from the server to the client, making it suitable for applications that require real-time data updates
from dataclasses import asdict, dataclass
import aiohttp
from mcp.server.fastmcp import FastMCP
from mcp.server.sse import SseServerTransport
from mcp.server import Server
from starlette.applications import Starlette
from starlette.requests import Request
from starlette.routing import Mount, Route
import uvicorn
# Create the MCP server
mcp = FastMCP("SSE")
@dataclass
class DisplayName:
text: str
languageCode: str
@dataclass
class Restaurant:
id: str
displayName: DisplayName
rating: int
primaryTypeDisplayName: str
@dataclass
class Location:
latitude: float
longitude: float
async def get_latitude_longitude(location_name):
# Define the endpoint URL
endpoint = "https://maps.googleapis.com/maps/api/geocode/json"
# Set up the parameters for the request
params = {
'address': location_name,
'key': "AIzaSyBUguvUNCxqha21BhH6r3bOcF48ckoxTz8"
}
async with aiohttp.ClientSession() as session:
async with session.get(endpoint, params=params) as response:
data = await response.json()
# Check if the request was successful
if data['status'] == 'OK':
# Extract latitude and longitude
latitude = data['results'][0]['geometry']['location']['lat']
longitude = data['results'][0]['geometry']['location']['lng']
return Location(latitude=latitude, longitude=longitude)
else:
return None
async def get_restaurants(max_result_count: int , location_info: Location):
url='https://places.googleapis.com/v1/places:searchNearby'
request_body = {
"languageCode": "zh-HK",
"regionCode": "HK",
"includedTypes": ["restaurant"],
"maxResultCount": max_result_count,
"locationRestriction": {
"circle": {
"center": {
"latitude": location_info.latitude,
"longitude": location_info.longitude
},
"radius": 500.0
}
}}
if max_result_count > 20:
request_body.pop('maxResultCount')
async with aiohttp.ClientSession() as session:
resp = await session.post(url,
json=request_body,
headers={
"Content-Type": "application/json",
"X-Goog-Api-Key": "AIzaSyBUguvUNCxqha21BhH6r3bOcF48ckoxTz8",
"X-Goog-FieldMask": "places.id,places.displayName,places.rating,places.primaryTypeDisplayName"
}
)
res = await resp.json()
restaurants = []
if(res.get('places')):
restaurants = [ Restaurant(**restaurant) for restaurant in res.get('places')]
else:
print(res)
return restaurants
@mcp.tool()
async def get_restaurant_list(location: str, result_count: int) -> list:
"""Get the list of restaurant
Args:
location: The location of the restaurant (e.g: Kwun tong)
result_count: The number of restaurants to return, default is 10, if user want more restaurant, increase 10, if user want all the restaurants, set to 100
"""
print(f"location: {location}, result_count: {result_count}")
location_info = await get_latitude_longitude(location)
if(location_info is None):
print("Location not found")
return []
restaurants = await get_restaurants(max_result_count=result_count, location_info=location_info)
return [asdict(restaurant) for restaurant in restaurants]
def create_starlette_app(mcp_server: Server, *, debug: bool = False) -> Starlette:
"""Create a Starlette application that can serve the MCP server with SSE."""
sse = SseServerTransport("/messages/")
async def handle_sse(request: Request) -> None:
async with sse.connect_sse(
request.scope,
request.receive,
request._send,
) as (read_stream, write_stream):
await mcp_server.run(
read_stream,
write_stream,
mcp_server.create_initialization_options(),
)
return Starlette(
debug=debug,
routes=[
Route("/sse", endpoint=handle_sse),
Mount("/messages/", app=sse.handle_post_message),
],
)
if __name__ == "__main__":
# # Get the underlying MCP server
mcp_server = mcp._mcp_server
# Create Starlette app with SSE support
starlette_app = create_starlette_app(mcp_server, debug=True)
port = 8080
print(f"Starting MCP server with SSE transport on port {port}...")
print(f"SSE endpoint available at: http://localhost:{port}/sse")
# Run the server using uvicorn
uvicorn.run(starlette_app, host="0.0.0.0", port=port)
# Create server parameters for stdio connection
import asyncio
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from langchain_mcp_adapters.tools import load_mcp_tools
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
from langgraph.checkpoint.memory import MemorySaver
model = ChatOpenAI(
model="anthropic/claude-3.5-sonnet",
base_url="https://openrouter.ai/api/v1",
api_key="xxxx"
)
server_params = StdioServerParameters(
command="python",
# Make sure to update to the full absolute path to your math_server.py file
args=["server.py"],
)
async def main():
tools = []
async with MultiServerMCPClient(
{
"demo": {
# make sure you start your weather server on port 8000
"url": "http://0.0.0.0:8080/sse",
"transport": "sse",
}
}
) as client:
tools = client.get_tools()
print("tools:", tools)
{
"mcpServers": {
"server-name": {
"url": "http://0.0.0.0:8080/sse"
}
}
}