Skip to content

Commit 5f85921

Browse files
committed
refactor: move initializations to class constructor.
1 parent ffe458d commit 5f85921

File tree

1 file changed

+8
-4
lines changed

1 file changed

+8
-4
lines changed

models/hybrid_search_retreiver.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,6 @@
5353
###############################################################################
5454
# initializations
5555
###############################################################################
56-
DEFAULT_MODEL_NAME = Config.OPENAI_PROMPT_MODEL_NAME
57-
pinecone.init(api_key=Credentials.PINECONE_API_KEY, environment=Config.PINECONE_ENVIRONMENT)
58-
set_llm_cache(InMemoryCache())
5956
logging.basicConfig(level=logging.DEBUG if Config.DEBUG_MODE else logging.INFO)
6057

6158

@@ -85,6 +82,11 @@ class HybridSearchRetriever:
8582
_text_splitter: TextSplitter = None
8683
_b25_encoder: BM25Encoder = None
8784

85+
def __init__(self):
86+
"""Constructor"""
87+
pinecone.init(api_key=Credentials.PINECONE_API_KEY, environment=Config.PINECONE_ENVIRONMENT)
88+
set_llm_cache(InMemoryCache())
89+
8890
# prompting wrapper
8991
@property
9092
def chat(self):
@@ -158,7 +160,9 @@ def cached_chat_request(
158160
retval = self.chat(messages)
159161
return retval
160162

161-
def prompt_with_template(self, prompt: PromptTemplate, concept: str, model: str = DEFAULT_MODEL_NAME) -> str:
163+
def prompt_with_template(
164+
self, prompt: PromptTemplate, concept: str, model: str = Config.OPENAI_PROMPT_MODEL_NAME
165+
) -> str:
162166
"""Prompt with template."""
163167
llm = OpenAI(model=model)
164168
retval = llm(prompt.format(concept=concept))

0 commit comments

Comments
 (0)