Spaces:
Running
Running
File size: 1,919 Bytes
32e3b61 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
#!/usr/bin/env python3
"""
Demo: Search for drug repurposing evidence.
This script demonstrates Phase 2 functionality:
- PubMed search (biomedical literature)
- Web search (DuckDuckGo)
- SearchHandler (parallel scatter-gather orchestration)
Usage:
# From project root:
uv run python examples/search_demo/run_search.py
# With custom query:
uv run python examples/search_demo/run_search.py "metformin cancer"
Requirements:
- Optional: NCBI_API_KEY in .env for higher PubMed rate limits
"""
import asyncio
import sys
from src.tools.pubmed import PubMedTool
from src.tools.search_handler import SearchHandler
from src.tools.websearch import WebTool
async def main(query: str) -> None:
"""Run search demo with the given query."""
print(f"\n{'='*60}")
print("DeepCritical Search Demo")
print(f"Query: {query}")
print(f"{'='*60}\n")
# Initialize tools
pubmed = PubMedTool()
web = WebTool()
handler = SearchHandler(tools=[pubmed, web], timeout=30.0)
# Execute search
print("Searching PubMed and Web in parallel...")
result = await handler.execute(query, max_results_per_tool=5)
# Display results
print(f"\n{'='*60}")
print(f"Results: {result.total_found} pieces of evidence")
print(f"Sources: {', '.join(result.sources_searched)}")
if result.errors:
print(f"Errors: {result.errors}")
print(f"{'='*60}\n")
for i, evidence in enumerate(result.evidence, 1):
print(f"[{i}] {evidence.citation.source.upper()}: {evidence.citation.title[:80]}...")
print(f" URL: {evidence.citation.url}")
print(f" Content: {evidence.content[:150]}...")
print()
if __name__ == "__main__":
# Default query or use command line arg
default_query = "metformin Alzheimer's disease drug repurposing"
query = sys.argv[1] if len(sys.argv) > 1 else default_query
asyncio.run(main(query))
|