92 lines
2.5 KiB
Python
92 lines
2.5 KiB
Python
|
|
"""MCP server for the web researcher.
|
||
|
|
|
||
|
|
Exposes a single tool `research` that delegates to WebResearcher.
|
||
|
|
Run with: python -m researchers.web.server
|
||
|
|
"""
|
||
|
|
|
||
|
|
import asyncio
|
||
|
|
import os
|
||
|
|
import sys
|
||
|
|
from typing import Optional
|
||
|
|
|
||
|
|
from mcp.server.fastmcp import FastMCP
|
||
|
|
|
||
|
|
from researchers.web.agent import WebResearcher
|
||
|
|
from researchers.web.models import ResearchConstraints
|
||
|
|
|
||
|
|
mcp = FastMCP(
|
||
|
|
name="marchwarden-web-researcher",
|
||
|
|
instructions=(
|
||
|
|
"A Marchwarden web research specialist. "
|
||
|
|
"Call the research tool with a question to get a grounded, "
|
||
|
|
"evidence-based answer with citations, gaps, open questions, "
|
||
|
|
"and confidence scoring."
|
||
|
|
),
|
||
|
|
)
|
||
|
|
|
||
|
|
|
||
|
|
def _read_secret(key: str) -> str:
|
||
|
|
"""Read a secret from ~/secrets file."""
|
||
|
|
secrets_path = os.path.expanduser("~/secrets")
|
||
|
|
with open(secrets_path) as f:
|
||
|
|
for line in f:
|
||
|
|
if line.startswith(f"{key}="):
|
||
|
|
return line.split("=", 1)[1].strip()
|
||
|
|
raise ValueError(f"Key {key} not found in {secrets_path}")
|
||
|
|
|
||
|
|
|
||
|
|
def _get_researcher() -> WebResearcher:
|
||
|
|
"""Create a WebResearcher with keys from ~/secrets."""
|
||
|
|
return WebResearcher(
|
||
|
|
anthropic_api_key=_read_secret("ANTHROPIC_API_KEY"),
|
||
|
|
tavily_api_key=_read_secret("TAVILY_API_KEY"),
|
||
|
|
model_id=os.environ.get("MARCHWARDEN_MODEL", "claude-sonnet-4-5-20250514"),
|
||
|
|
)
|
||
|
|
|
||
|
|
|
||
|
|
@mcp.tool()
|
||
|
|
async def research(
|
||
|
|
question: str,
|
||
|
|
context: Optional[str] = None,
|
||
|
|
depth: str = "balanced",
|
||
|
|
max_iterations: int = 5,
|
||
|
|
token_budget: int = 20000,
|
||
|
|
) -> str:
|
||
|
|
"""Research a question using web search and return a structured answer.
|
||
|
|
|
||
|
|
Args:
|
||
|
|
question: The question to investigate.
|
||
|
|
context: What the caller already knows (optional).
|
||
|
|
depth: Research depth — "shallow", "balanced", or "deep".
|
||
|
|
max_iterations: Maximum number of search/fetch iterations (1-20).
|
||
|
|
token_budget: Maximum tokens to spend (1000-100000).
|
||
|
|
|
||
|
|
Returns:
|
||
|
|
JSON string containing the full ResearchResult with answer,
|
||
|
|
citations, gaps, discovery_events, open_questions, confidence,
|
||
|
|
and cost_metadata.
|
||
|
|
"""
|
||
|
|
researcher = _get_researcher()
|
||
|
|
constraints = ResearchConstraints(
|
||
|
|
max_iterations=max_iterations,
|
||
|
|
token_budget=token_budget,
|
||
|
|
)
|
||
|
|
|
||
|
|
result = await researcher.research(
|
||
|
|
question=question,
|
||
|
|
context=context,
|
||
|
|
depth=depth,
|
||
|
|
constraints=constraints,
|
||
|
|
)
|
||
|
|
|
||
|
|
return result.model_dump_json(indent=2)
|
||
|
|
|
||
|
|
|
||
|
|
def main():
|
||
|
|
"""Run the MCP server on stdio."""
|
||
|
|
mcp.run(transport="stdio")
|
||
|
|
|
||
|
|
|
||
|
|
if __name__ == "__main__":
|
||
|
|
main()
|