6
6
7
7
"""
8
8
9
+ from llama_index .core .vector_stores .types import ExactMatchFilter , MetadataFilters
10
+ from llama_index .core import VectorStoreIndex , SimpleDirectoryReader , StorageContext
11
+ import timeplus_connect
12
+ import os
9
13
import importlib
10
14
import json
11
15
import logging
@@ -48,7 +52,8 @@ def escape_str(value: str) -> str:
48
52
BS = "\\ "
49
53
must_escape = (BS , "'" )
50
54
return (
51
- "" .join (f"{ BS } { c } " if c in must_escape else c for c in value ) if value else ""
55
+ "" .join (
56
+ f"{ BS } { c } " if c in must_escape else c for c in value ) if value else ""
52
57
)
53
58
54
59
@@ -226,7 +231,8 @@ def __init__(
226
231
self ._column_config = column_config
227
232
self ._column_names = column_names
228
233
self ._column_type_names = column_type_names
229
- dimension = len (Settings .embed_model .get_query_embedding ("try this out" ))
234
+ dimension = len (
235
+ Settings .embed_model .get_query_embedding ("try this out" ))
230
236
self .create_table (dimension )
231
237
232
238
@property
@@ -254,7 +260,8 @@ def _upload_batch(
254
260
for idx , item in enumerate (batch ):
255
261
_row = []
256
262
for column_name in self ._column_names :
257
- _row .append (self ._column_config [column_name ]["extract_func" ](item ))
263
+ _row .append (
264
+ self ._column_config [column_name ]["extract_func" ](item ))
258
265
_data .append (_row )
259
266
260
267
self ._client .insert (
@@ -264,11 +271,10 @@ def _upload_batch(
264
271
column_type_names = self ._column_type_names ,
265
272
)
266
273
267
-
268
274
def _append_meta_filter_condition (
269
275
self , where_str : Optional [str ], exact_match_filter : list
270
276
) -> str :
271
-
277
+
272
278
filter_str = " AND " .join (
273
279
f"json_extract_string("
274
280
f"{ self .metadata_column } , '{ filter_item .key } ') "
@@ -354,13 +360,13 @@ def query(
354
360
nodes = []
355
361
ids = []
356
362
similarities = []
357
- #print(f"query: {query_statement}")
363
+ # print(f"query: {query_statement}")
358
364
response = self ._client .query (query_statement )
359
- #print(f"response: {response}")
365
+ # print(f"response: {response}")
360
366
column_names = response .column_names
361
- #print(f"column_names: {column_names}")
362
- #print(f"column_names: {response.result_columns}")
363
-
367
+ # print(f"column_names: {column_names}")
368
+ # print(f"column_names: {response.result_columns}")
369
+
364
370
id_idx = column_names .index ("id" )
365
371
text_idx = column_names .index ("text" )
366
372
metadata_idx = column_names .index ("metadata" )
@@ -388,26 +394,21 @@ def query(
388
394
similarities .append (r [score_idx ])
389
395
ids .append (r [id_idx ])
390
396
return VectorStoreQueryResult (nodes = nodes , similarities = similarities , ids = ids )
391
-
392
397
393
- # test code
394
- import os
395
- import timeplus_connect
396
398
397
- from llama_index .core import VectorStoreIndex , SimpleDirectoryReader , StorageContext
398
- from llama_index .core .vector_stores .types import ExactMatchFilter , MetadataFilters
399
+ # test code
399
400
400
401
401
402
timeplus_host = os .getenv ("TIMEPLUS_HOST" ) or "localhost"
402
403
timeplus_user = os .getenv ("TIMEPLUS_USER" ) or "proton"
403
404
timeplus_password = os .getenv ("TIMEPLUS_PASSWORD" ) or "timeplus@t+"
404
405
405
406
client = timeplus_connect .get_client (
406
- host = timeplus_host ,
407
- port = 8123 ,
408
- username = timeplus_user ,
409
- password = timeplus_password ,
410
- )
407
+ host = timeplus_host ,
408
+ port = 8123 ,
409
+ username = timeplus_user ,
410
+ password = timeplus_password ,
411
+ )
411
412
412
413
# Load documents and build index
413
414
documents = SimpleDirectoryReader (
@@ -444,4 +445,3 @@ def query(
444
445
)
445
446
response = query_engine .query ("what did the author do growing up?" )
446
447
print (response )
447
-
0 commit comments