LangGraph Integration¶
- async agentstr.mcp.langgraph.to_langgraph_tools(nostr_mcp_client: NostrMCPClient) list[BaseTool] [source]¶
Convert tools from the MCP client to LangGraph tools.
- Parameters:
nostr_mcp_client – An instance of NostrMCPClient to fetch tools from.
- Returns:
A list of LangGraph BaseTool objects that wrap the MCP tools.
This module provides integration with LangGraph tools, enabling conversion between MCP tools and LangGraph’s tool format.
Usage Example¶
1from dotenv import load_dotenv
2
3load_dotenv()
4
5import os
6
7from langchain_openai import ChatOpenAI
8from langgraph.prebuilt import create_react_agent
9
10from agentstr import NostrAgentServer, NostrMCPClient, ChatInput
11from agentstr.mcp.langgraph import to_langgraph_tools
12
13# Create Nostr MCP client
14nostr_mcp_client = NostrMCPClient(relays=os.getenv("NOSTR_RELAYS").split(","),
15 private_key=os.getenv("EXAMPLE_LANGGRAPH_AGENT_NSEC"),
16 mcp_pubkey=os.getenv("EXAMPLE_MCP_SERVER_PUBKEY"),
17 nwc_str=os.getenv("MCP_CLIENT_NWC_CONN_STR"))
18
19async def agent_server():
20 # Convert tools to LangGraph tools
21 langgraph_tools = await to_langgraph_tools(nostr_mcp_client)
22
23 for tool in langgraph_tools:
24 print(f'Found {tool.name}: {tool.description}')
25
26 # Create react agent
27 agent = create_react_agent(
28 model=ChatOpenAI(temperature=0,
29 base_url=os.getenv("LLM_BASE_URL"),
30 api_key=os.getenv("LLM_API_KEY"),
31 model_name=os.getenv("LLM_MODEL_NAME")),
32 tools=langgraph_tools,
33 prompt="You are a helpful assistant",
34 )
35
36 # Define agent callable
37 async def agent_callable(input: ChatInput) -> str:
38 result = await agent.ainvoke(
39 {"messages": [{"role": "user", "content": input.messages[-1]}]},
40 )
41 return result["messages"][-1].content
42
43 # Create Nostr Agent Server
44 server = NostrAgentServer(nostr_mcp_client=nostr_mcp_client,
45 agent_callable=agent_callable)
46
47 # Start server
48 await server.start()
49
50
51if __name__ == "__main__":
52 import asyncio
53 asyncio.run(agent_server())