Tool Calling example on Raspberry PI
#14
by
samairtimer
- opened
I used Ollama to host a functionGemma model to create an Agnet which invokes my Gmail MCP actions using langchain. It is wonderful!! It runs on Raspberry PI. Just takes few seconds.
import asyncio
import time
from langchain.agents import create_agent
from langchain_ollama import ChatOllama
from langchain_mcp_adapters.client import MultiServerMCPClient
async def init():
client = MultiServerMCPClient(
{
"email": {
"transport": "http", # HTTP-based remote server
# Ensure you start your weather server on port 8000
"url": "https://n8n.samair.me/mcp/gmailv2",
"headers": {
"Authorization": "Bearer "
}
},
}
)
tools = await client.get_tools()
model = ChatOllama(
model="functiongemma",
temperature=0,
base_url="http://pi5.local:11434",
# other params...
)
SYSTEM_PROMPT = """
Your are helpful personal assistant, your job is to help your user.
"""
agent = create_agent(model, tools=tools, system_prompt=SYSTEM_PROMPT)
last_step_time = time.perf_counter()
current_time = time.perf_counter()
duration = current_time - last_step_time
print(f"[{duration:.2f}s]")
async for chunk in agent.astream(
{"messages": [{"role": "user", "content": "Send a message asking if they are availble on weekend for meeting to sameer@samair.me"}]},
):
for step, data in chunk.items():
current_time = time.perf_counter()
duration = current_time - last_step_time
print(f"[{duration:.2f}s] step: {step}")
print(f"content: {data['messages'][-1].content_blocks}")
if name == "main":
asyncio.run(init())
'''