3.1 [MCP] Intro
S - Extend LLM with external actions Unify the protocol, and implement once TA - Model Context Protocol Arch overview 边界 类别 Arch Flow External Services UPWARD: work as capabilities to server Local file Local desktop to screenshot your local computer to tell what you are seeing API call Another LLM A - wrappered by server Local file <- with open(file,“a/r”, encoding=“utf-8) as f: f.write(content)/notes = f.read() from mcp.server.fastmcp import FastMCP mcp = FastMCP("LocalNotes") @mcp.tool() def add_note_to_file(content: str) -> str: """ Appends the given content to the user's local notes. Args: content: The text content to append. """ filename = 'notes.txt' try: with open(filename, "a", encoding="utf-8") as f: f.write(content + "\n") return f"Content appended to {filename}." except Exception as e: return f"Error appending to file {filename}: {e}" @mcp.tool() def read_notes() -> str: """ Reads and returns the contents of the user's local notes. """ filename = 'notes.txt' try: with open(filename, "r", encoding="utf-8") as f: notes = f.read() return notes if notes else "No notes found." except FileNotFoundError: return "No notes file found." except Exception as e: return f"Error reading file {filename}: {e}" if __name__ == "__main__": mcp.run() Local desktop <- pyautogui.screenshot() from mcp.server.fastmcp import FastMCP from mcp.server.fastmcp.utilities.types import Image import pyautogui import io # Create server mcp = FastMCP("ScreenshotDemo") @mcp.tool() def capture_screenshot() -> Image: """ Capture the current screen and return the image. Use this tool whenever the user requests a screenshot of their activity. """ buffer = io.BytesIO() # if the file exceeds ~1MB, it will be rejected by Claude screenshot = pyautogui.screenshot() screenshot.convert("RGB").save(buffer, format="JPEG", quality=60, optimize=True) return Image(data=buffer.getvalue(), format="jpeg") if __name__ == "__main__": mcp.run() API call <- requests.get() from mcp.server.fastmcp import FastMCP import requests mcp = FastMCP("Crypto") @mcp.tool() def get_cryptocurrency_price(crypto: str) -> str: """ Gets the price of a cryptocurrency. Args: crypto: symbol of the cryptocurrency (e.g., 'bitcoin', 'ethereum'). """ try: # Use CoinGecko API to fetch current price in USD url = f"https://api.coingecko.com/api/v3/simple/price" params = {"ids": crypto.lower(), "vs_currencies": "usd"} response = requests.get(url, params=params, timeout=10) response.raise_for_status() data = response.json() price = data.get(crypto.lower(), {}).get("usd") if price is not None: return f"The price of {crypto} is ${price} USD." else: return f"Price for {crypto} not found." except Exception as e: return f"Error fetching price for {crypto}: {e}" if __name__ == "__main__": mcp.run() Another model <- client.chat.completions.create() from mcp.server.fastmcp import FastMCP from openai import OpenAI YOUR_API_KEY = 'xxx' YOUR_API_KEY1 = 'xxx' DS = "https://api.deepseek.com" PL = "https://api.perplexity.ai" mcp = FastMCP("WebSearch") @mcp.tool() def perform_websearch(query: str) -> str: """ Performs a web search for a query Args: query: the query to web search. """ messages = [ { "role": "system", "content": ( "You are an AI assistant that searches the web and responds to questions" ), }, { "role": "user", "content": ( query ), }, ] client = OpenAI(api_key=YOUR_API_KEY1, base_url=PL) # chat completion without streaming response = client.chat.completions.create( model="sonar-pro", messages=messages, ) return response.choices[0].message.content if __name__ == "__main__": mcp.run() MCP Server Public Choices mcp official servers: https://github.com/modelcontextprotocol Airbnb ...