@@ -271,62 +271,3 @@ def change_table(self, new_table_name: str):
271271            self .table_name , schema = self .schema , exist_ok = True 
272272        )
273273        print (f"Switched to new table: { self .table_name }  " )
274- 
275- 
276- # Usage example: 
277- if  __name__  ==  "__main__" :
278-     from  sentence_transformers  import  SentenceTransformer 
279- 
280-     # Initialize embedding model 
281-     embedding_model  =  SentenceTransformer ("all-MiniLM-L6-v2" , trust_remote_code = True )
282- 
283-     # Initialize TextRAG 
284-     text_rag  =  SimpleRAG (
285-         text_embedding_model = embedding_model ,
286-         db_path = "~/visionrag_db" ,
287-         table_name = "default_table" ,
288-     )
289- 
290-     # Initialize OpenAI client 
291-     llm  =  OpenAI ()
292- 
293-     # Index PDFs 
294-     pdf_paths  =  ["path/to/pdf1.pdf" , "path/to/pdf2.pdf" ]
295-     result  =  text_rag .index (
296-         pdf_paths ,
297-         recursive = True ,
298-         chunking_strategy = FixedTokenChunker (chunk_size = 500 ),
299-         metadata = {"source" : "example_data" },
300-         overwrite = True ,
301-         verbose = True ,
302-         ocr = True ,
303-     )
304-     print (result )
305- 
306-     # Search for relevant chunks 
307-     query  =  "What is the main topic of the documents?" 
308-     search_results  =  text_rag .search (query , k = 5 )
309-     print ("Search results:" , search_results )
310- 
311-     # Generate response using OpenAI 
312-     context  =  "\n " .join ([r ["text" ] for  r  in  search_results ])
313-     response  =  llm .chat .completions .create (
314-         model = "gpt-4" ,
315-         messages = [
316-             {"role" : "system" , "content" : "You are a helpful assistant." },
317-             {"role" : "user" , "content" : f"Context: { context } \n \n Question: { query }  " },
318-         ],
319-     )
320-     print ("Generated response:" , response .choices [0 ].message .content )
321- 
322-     # Add new documents to the index 
323-     new_pdf_paths  =  ["path/to/new_pdf.pdf" ]
324-     result  =  text_rag .add_to_index (
325-         new_pdf_paths ,
326-         recursive = False ,
327-         chunking_strategy = FixedTokenChunker (chunk_size = 500 ),
328-         metadata = {"project" : "new_project" },
329-         verbose = True ,
330-         ocr = False ,
331-     )
332-     print (result )
0 commit comments