-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrag_query.py
More file actions
105 lines (82 loc) · 2.91 KB
/
rag_query.py
File metadata and controls
105 lines (82 loc) · 2.91 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
#!/usr/bin/env python3
"""
RAG Query Tool - Run similarity searches against ChromaDB
Usage:
docker exec genai-app python /app/rag_query.py "your query here"
docker exec -it genai-app python /app/rag_query.py # interactive mode
"""
import chromadb
import os
import sys
from langchain_ollama import OllamaEmbeddings
PERSIST_DIR = os.getenv("CHROMA_PERSIST_DIR", "/app/chroma_db")
OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://ollama:11434")
EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "nomic-embed-text")
COLLECTION_NAME = "documents"
N_RESULTS = 3
def get_embeddings():
return OllamaEmbeddings(model=EMBEDDING_MODEL, base_url=OLLAMA_BASE_URL)
def search(collection, query, n_results=N_RESULTS):
"""Run similarity search and display results"""
print(f"\n{'='*60}")
print(f"QUERY: {query}")
print("=" * 60)
embeddings = get_embeddings()
query_embedding = embeddings.embed_query(query)
results = collection.query(
query_embeddings=[query_embedding],
n_results=n_results,
include=["documents", "metadatas", "distances"]
)
if not results["documents"][0]:
print("No results found.")
return
print(f"Found {len(results['documents'][0])} results:\n")
for i, (doc, metadata, distance) in enumerate(zip(
results["documents"][0],
results["metadatas"][0],
results["distances"][0]
)):
similarity = 1 - distance
source = os.path.basename(metadata.get("source", "unknown"))
page = metadata.get("page", "?")
print(f"[{i+1}] Similarity: {similarity:.3f} | Source: {source} | Page: {page}")
print("-" * 60)
print(doc[:400])
print("...\n" if len(doc) > 400 else "\n")
def interactive_mode(collection):
"""Interactive query mode"""
print("\n" + "=" * 60)
print("INTERACTIVE RAG QUERY MODE")
print("Type your queries (or 'quit' to exit)")
print("=" * 60)
while True:
try:
query = input("\nQuery> ").strip()
if query.lower() in ["quit", "exit", "q"]:
break
if not query:
continue
search(collection, query)
except (EOFError, KeyboardInterrupt):
break
print("\nGoodbye!")
def main():
client = chromadb.PersistentClient(path=PERSIST_DIR)
try:
collection = client.get_collection(COLLECTION_NAME)
except Exception as e:
print(f"Error: Collection '{COLLECTION_NAME}' not found.")
print("Upload documents via the Streamlit app first.")
sys.exit(1)
count = collection.count()
print(f"Connected to ChromaDB | Collection: {COLLECTION_NAME} | Chunks: {count:,}")
# Check if query provided as argument
if len(sys.argv) > 1:
query = " ".join(sys.argv[1:])
search(collection, query)
else:
# Interactive mode
interactive_mode(collection)
if __name__ == "__main__":
main()