{"id":"haiku-rag","name":"Haiku RAG","homepage":"https://github.com/ggozad/haiku.rag","repo_url":"https://github.com/ggozad/haiku.rag","category":"rag","subcategories":["mcp-server","document-qa","vector-search"],"tags":["rag","lancedb","pydantic-ai","docling","mcp-server","hybrid-search","reranking","python","local-first","multi-agent"],"what_it_does":"An opinionated local-first RAG system built on LanceDB, Pydantic AI, and Docling that provides hybrid vector/full-text search, citation-aware Q&A, multi-agent research workflows, and an MCP server for integration with AI assistants like Claude Desktop.","use_cases":["Indexing and querying a personal or enterprise document library with citation-backed answers (page numbers, section headings)","Running multi-agent research workflows that plan, search, evaluate, and synthesize across a document corpus","Integrating a local document knowledge base into Claude Desktop or other MCP clients via the built-in MCP server"],"not_for":["Cloud-first teams that need managed vector database infrastructure without self-hosting","Simple keyword search use cases where a full RAG pipeline adds unnecessary complexity","Non-Python shops or teams without Python 3.12+ capability"],"best_when":"You want a production-quality, local-first RAG system with strong document structure awareness, citations, and MCP integration for AI assistant workflows.","avoid_when":"You need a managed RAG-as-a-service solution or your documents are primarily unstructured web content rather than PDFs and structured documents.","alternatives":["graphlit-mcp-server","context7","llama-index"],"af_score":76.4,"security_score":68.0,"reliability_score":null,"package_type":"mcp_server","discovery_source":["github","mcp_registry"],"priority":"low","status":"evaluated","version_evaluated":"latest","last_evaluated":"2026-03-01T09:50:05.668431+00:00","performance":{"latency_p50_ms":null,"latency_p99_ms":null,"uptime_sla_percent":null,"rate_limits":null,"data_source":"llm_estimated","measured_on":null}}