"""
Integrates a LangChain ReAct agent with CData Connect AI MCP server.
The script demonstrates fetching, filtering, and using tools with an LLM for agent-based reasoning.
"""
import asyncio
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent
from config import Config
async def main():
# Initialize MCP client with one or more server URLs
mcp_client = MultiServerMCPClient(
connections={
"default": { # you can name this anything
"transport": "streamable_http",
"url": Config.MCP_BASE_URL,
"headers": {"Authorization": f"Basic {Config.MCP_AUTH}"},
}
}
)
# Load remote MCP tools exposed by the server
all_mcp_tools = await mcp_client.get_tools()
print("Discovered MCP tools:", [tool.name for tool in all_mcp_tools])
# Create and run the ReAct style agent
llm = ChatOpenAI(
model="gpt-4o",
temperature=0.2,
api_key="YOUR_OPEN_API_KEY" #Use your OpenAPI Key here. This can be found here: https://platform.openai.com/.
)
agent = create_react_agent(llm, all_mcp_tools)
user_prompt = "Tell me how many sales I had in Q1 for the current fiscal year." #Change prompts as per need
print(f"\nUser prompt: {user_prompt}")
# Send a prompt asking the agent to use the MCP tools
response = await agent.ainvoke(
{ "messages": [{ "role": "user", "content": (user_prompt),}]}
)
# Print out the agent's final response
final_msg = response["messages"][-1].content
print("Agent final response:", final_msg)
if __name__ == "__main__":
asyncio.run(main())