Version: 0.1.7
Langfuse Agent MCP Server + A2A Agent
Agent for interacting with Langfuse Observability API
This repository is actively maintained - Contributions are welcome!
The MCP Server can be run in two modes: stdio (for local testing) or http (for networked access).
LANGFUSE_URL: The URL of the target service.LANGFUSE_TOKEN: The API token or access token.
export LANGFUSE_URL="http://localhost:8080"
export LANGFUSE_TOKEN="your_token"
langfuse-mcp --transport "stdio"export LANGFUSE_URL="http://localhost:8080"
export LANGFUSE_TOKEN="your_token"
langfuse-mcp --transport "http" --host "0.0.0.0" --port "8000"export LANGFUSE_URL="http://localhost:8080"
export LANGFUSE_TOKEN="your_token"
langfuse-agent --provider openai --model-id gpt-4o --api-key sk-...docker build -t langfuse-agent .docker run -d \
--name langfuse-agent \
-p 8000:8000 \
-e TRANSPORT=http \
-e LANGFUSE_URL="http://your-service:8080" \
-e LANGFUSE_TOKEN="your_token" \
knucklessg1/langfuse-agent:latestservices:
langfuse-agent:
image: knucklessg1/langfuse-agent:latest
environment:
- HOST=0.0.0.0
- PORT=8000
- TRANSPORT=http
- LANGFUSE_URL=http://your-service:8080
- LANGFUSE_TOKEN=your_token
ports:
- 8000:8000{
"mcpServers": {
"langfuse": {
"command": "uv",
"args": [
"run",
"--with",
"langfuse-agent",
"langfuse-mcp"
],
"env": {
"LANGFUSE_URL": "http://your-service:8080",
"LANGFUSE_TOKEN": "your_token"
}
}
}
}python -m pip install langfuse-agentuv pip install langfuse-agent