Skip to content

Commit 22c8a9b

Browse files
committed
chore: add more Config params
1 parent 84be774 commit 22c8a9b

File tree

3 files changed

+12
-23
lines changed

3 files changed

+12
-23
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ SHELL := /bin/bash
33
ifneq ("$(wildcard .env)","")
44
include .env
55
else
6-
$(shell echo -e "OPENAI_API_ORGANIZATION=PLEASE-ADD-ME\nOPENAI_API_KEY=PLEASE-ADD-ME\nPINECONE_API_KEY=PLEASE-ADD-ME\nPINECONE_ENVIRONMENT=gcp-starter\nPINECONE_INDEX_NAME=netec-ssm\nDEBUG_MODE=True\n" >> .env)
6+
$(shell echo -e "OPENAI_API_ORGANIZATION=PLEASE-ADD-ME\nOPENAI_API_KEY=PLEASE-ADD-ME\nPINECONE_API_KEY=PLEASE-ADD-ME\nPINECONE_ENVIRONMENT=gcp-starter\nPINECONE_INDEX_NAME=hsr\nOPENAI_CHAT_MODEL_NAME=gpt-3.5-turbo\nOPENAI_PROMPT_MODEL_NAME=text-davinci-003\nOPENAI_CHAT_TEMPERATURE=0.0\nOPENAI_CHAT_MAX_RETRIES=3\nDEBUG_MODE=True\n" >> .env)
77
endif
88

99
.PHONY: analyze init activate test lint clean

models/const.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,15 +18,21 @@
1818
PINECONE_INDEX_NAME = os.environ.get("PINECONE_INDEX_NAME", "hsr")
1919
OPENAI_CHAT_MODEL_NAME = os.environ.get("OPENAI_CHAT_MODEL_NAME", "gpt-3.5-turbo")
2020
OPENAI_PROMPT_MODEL_NAME = os.environ.get("OPENAI_PROMPT_MODEL_NAME", "text-davinci-003")
21+
OPENAI_CHAT_TEMPERATURE = float(os.environ.get("OPENAI_CHAT_TEMPERATURE", 0.0))
22+
OPENAI_CHAT_MAX_RETRIES = int(os.environ.get("OPENAI_CHAT_MAX_RETRIES", 3))
23+
OPENAI_CHAT_CACHE = bool(os.environ.get("OPENAI_CHAT_CACHE", True))
2124
else:
2225
raise FileNotFoundError("No .env file found in root directory of repository")
2326

2427

2528
class Config:
2629
"""Configuration parameters."""
2730

28-
OPENAI_CHAT_MODEL_NAME = OPENAI_CHAT_MODEL_NAME
29-
OPENAI_PROMPT_MODEL_NAME = OPENAI_PROMPT_MODEL_NAME
31+
OPENAI_CHAT_MODEL_NAME: str = OPENAI_CHAT_MODEL_NAME
32+
OPENAI_PROMPT_MODEL_NAME: str = OPENAI_PROMPT_MODEL_NAME
33+
OPENAI_CHAT_TEMPERATURE: float = OPENAI_CHAT_TEMPERATURE
34+
OPENAI_CHAT_MAX_RETRIES: int = OPENAI_CHAT_MAX_RETRIES
35+
OPENAI_CHAT_CACHE: bool = OPENAI_CHAT_CACHE
3036

3137

3238
class Credentials:

models/hybrid_search_retreiver.py

Lines changed: 3 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
import glob
2121
import os
2222
import textwrap
23-
from typing import List
2423

2524
# pinecone integration
2625
import pinecone
@@ -78,10 +77,10 @@ class HybridSearchRetriever:
7877
chat = ChatOpenAI(
7978
api_key=Credentials.OPENAI_API_KEY,
8079
organization=Credentials.OPENAI_API_ORGANIZATION,
81-
cache=True,
82-
max_retries=3,
80+
cache=Config.OPENAI_CHAT_CACHE,
81+
max_retries=Config.OPENAI_CHAT_MAX_RETRIES,
8382
model=Config.OPENAI_CHAT_MODEL_NAME,
84-
temperature=0.0,
83+
temperature=Config.OPENAI_CHAT_TEMPERATURE,
8584
)
8685

8786
# embeddings
@@ -110,22 +109,6 @@ def prompt_with_template(self, prompt: PromptTemplate, concept: str, model: str
110109
retval = llm(prompt.format(concept=concept))
111110
return retval
112111

113-
def fit_tf_idf_values(self, corpus: List[str]):
114-
"""Fit TF-IDF values.
115-
1. Fit the BM25 encoder on the corpus
116-
2. Encode the corpus
117-
3. Store the encoded corpus in Pinecone
118-
"""
119-
corpus = ["foo", "bar", "world", "hello"]
120-
121-
# fit tf-idf values on your corpus
122-
self.bm25_encoder.fit(corpus)
123-
124-
# persist the values to a json file
125-
self.bm25_encoder.dump("bm25_values.json")
126-
self.bm25_encoder = BM25Encoder().load("bm25_values.json")
127-
self.bm25_encoder.fit(corpus)
128-
129112
def load(self, filepath: str):
130113
"""
131114
Embed PDF.

0 commit comments

Comments
 (0)