1
1
# -*- coding: utf-8 -*-
2
- # pylint: disable=no-member
3
- # pylint: disable=E0213,C0103
2
+ # pylint: disable=E0213,C0103,C0301
4
3
"""
5
4
Configuration for Lambda functions.
6
5
@@ -34,6 +33,8 @@ def load_version() -> Dict[str, str]:
34
33
"""Stringify the __version__ module."""
35
34
version_file_path = os .path .join (HERE , "__version__.py" )
36
35
spec = importlib .util .spec_from_file_location ("__version__" , version_file_path )
36
+ if spec is None or spec .loader is None :
37
+ raise ModelConfigurationError (f"Could not load version file: { version_file_path } " )
37
38
version_module = importlib .util .module_from_spec (spec )
38
39
spec .loader .exec_module (version_module )
39
40
return version_module .__dict__
@@ -74,22 +75,22 @@ class SettingsDefaults:
74
75
75
76
LANGCHAIN_MEMORY_KEY = "chat_history"
76
77
77
- PINECONE_API_KEY : SecretStr = SecretStr (None )
78
- PINECONE_ENVIRONMENT = " gcp-starter"
79
- PINECONE_INDEX_NAME = " openai-embeddings"
80
- PINECONE_VECTORSTORE_TEXT_KEY = " lc_id"
81
- PINECONE_METRIC = " dotproduct"
82
- PINECONE_DIMENSIONS = 1536
83
-
84
- OPENAI_API_ORGANIZATION : str = None
85
- OPENAI_API_KEY : SecretStr = SecretStr (None )
86
- OPENAI_ENDPOINT_IMAGE_N = 4
87
- OPENAI_ENDPOINT_IMAGE_SIZE = " 1024x768"
88
- OPENAI_CHAT_CACHE = True
89
- OPENAI_CHAT_MODEL_NAME = " gpt-4"
90
- OPENAI_PROMPT_MODEL_NAME = " gpt-4"
91
- OPENAI_CHAT_TEMPERATURE = 0.0
92
- OPENAI_CHAT_MAX_RETRIES = 3
78
+ PINECONE_API_KEY : Optional [ SecretStr ] = SecretStr (os . environ . get ( "PINECONE_API_KEY" )) if os . environ . get ( "PINECONE_API_KEY" ) else None # type: ignore[assignment]
79
+ PINECONE_ENVIRONMENT = os . environ . get ( "PINECONE_ENVIRONMENT" , " gcp-starter")
80
+ PINECONE_INDEX_NAME = os . environ . get ( "PINECONE_INDEX_NAME" , " openai-embeddings")
81
+ PINECONE_VECTORSTORE_TEXT_KEY = os . environ . get ( "PINECONE_VECTORSTORE_TEXT_KEY" , " lc_id")
82
+ PINECONE_METRIC = os . environ . get ( "PINECONE_METRIC" , " dotproduct")
83
+ PINECONE_DIMENSIONS = int ( os . environ . get ( "PINECONE_DIMENSIONS" , 1536 ))
84
+
85
+ OPENAI_API_ORGANIZATION : Optional [ str ] = os . environ . get ( "OPENAI_API_ORGANIZATION" , None )
86
+ OPENAI_API_KEY : Optional [ SecretStr ] = SecretStr (os . environ . get ( "OPENAI_API_KEY" )) if os . environ . get ( "OPENAI_API_KEY" ) else None # type: ignore[assignment]
87
+ OPENAI_ENDPOINT_IMAGE_N = int ( os . environ . get ( "OPENAI_ENDPOINT_IMAGE_N" , 4 ))
88
+ OPENAI_ENDPOINT_IMAGE_SIZE = os . environ . get ( "OPENAI_ENDPOINT_IMAGE_SIZE" , " 1024x768")
89
+ OPENAI_CHAT_CACHE = os . environ . get ( "OPENAI_CHAT_CACHE" , "true" ). lower () in [ "true" , "1" , "t" , "y" , "yes" ]
90
+ OPENAI_CHAT_MODEL_NAME = os . environ . get ( "OPENAI_CHAT_MODEL_NAME" , " gpt-4o-mini" )
91
+ OPENAI_PROMPT_MODEL_NAME = os . environ . get ( "OPENAI_PROMPT_MODEL_NAME" , " gpt-4o-mini" )
92
+ OPENAI_CHAT_TEMPERATURE = float ( os . environ . get ( "OPENAI_CHAT_TEMPERATURE" , 0.0 ))
93
+ OPENAI_CHAT_MAX_RETRIES = int ( os . environ . get ( "OPENAI_CHAT_MAX_RETRIES" , 3 ))
93
94
94
95
@classmethod
95
96
def to_dict (cls ):
@@ -123,7 +124,7 @@ def empty_str_to_int_default(v: str, default: int) -> int:
123
124
class Settings (BaseSettings ):
124
125
"""Settings for Lambda functions"""
125
126
126
- _dump : dict = None
127
+ _dump : Optional [ dict ] = None
127
128
_pinecone_api_key_source : str = "unset"
128
129
_openai_api_key_source : str = "unset"
129
130
_initialized : bool = False
@@ -142,59 +143,38 @@ def __init__(self, **data: Any):
142
143
143
144
debug_mode : Optional [bool ] = Field (
144
145
SettingsDefaults .DEBUG_MODE ,
145
- env = "DEBUG_MODE" ,
146
- pre = True ,
147
- getter = lambda v : empty_str_to_bool_default (v , SettingsDefaults .DEBUG_MODE ),
148
146
)
149
147
dump_defaults : Optional [bool ] = Field (
150
148
SettingsDefaults .DUMP_DEFAULTS ,
151
- env = "DUMP_DEFAULTS" ,
152
- pre = True ,
153
- getter = lambda v : empty_str_to_bool_default (v , SettingsDefaults .DUMP_DEFAULTS ),
154
149
)
155
150
156
- langchain_memory_key : Optional [str ] = Field (SettingsDefaults .LANGCHAIN_MEMORY_KEY , env = "LANGCHAIN_MEMORY_KEY" )
151
+ langchain_memory_key : Optional [str ] = Field (SettingsDefaults .LANGCHAIN_MEMORY_KEY )
157
152
158
- openai_api_organization : Optional [str ] = Field (
159
- SettingsDefaults .OPENAI_API_ORGANIZATION , env = "OPENAI_API_ORGANIZATION"
160
- )
161
- openai_api_key : Optional [SecretStr ] = Field (SettingsDefaults .OPENAI_API_KEY , env = "OPENAI_API_KEY" )
162
- openai_endpoint_image_n : Optional [int ] = Field (
163
- SettingsDefaults .OPENAI_ENDPOINT_IMAGE_N , env = "OPENAI_ENDPOINT_IMAGE_N"
164
- )
165
- openai_endpoint_image_size : Optional [str ] = Field (
166
- SettingsDefaults .OPENAI_ENDPOINT_IMAGE_SIZE , env = "OPENAI_ENDPOINT_IMAGE_SIZE"
167
- )
153
+ openai_api_organization : Optional [str ] = Field (SettingsDefaults .OPENAI_API_ORGANIZATION )
154
+ openai_api_key : Optional [SecretStr ] = Field (SettingsDefaults .OPENAI_API_KEY )
155
+ openai_endpoint_image_n : Optional [int ] = Field (SettingsDefaults .OPENAI_ENDPOINT_IMAGE_N )
156
+ openai_endpoint_image_size : Optional [str ] = Field (SettingsDefaults .OPENAI_ENDPOINT_IMAGE_SIZE )
168
157
openai_chat_cache : Optional [bool ] = Field (
169
158
SettingsDefaults .OPENAI_CHAT_CACHE ,
170
- env = "OPENAI_CHAT_CACHE" ,
171
- pre = True ,
172
- getter = lambda v : empty_str_to_bool_default (v , SettingsDefaults .OPENAI_CHAT_CACHE ),
173
- )
174
- openai_chat_model_name : Optional [str ] = Field (SettingsDefaults .OPENAI_CHAT_MODEL_NAME , env = "OPENAI_CHAT_MODEL_NAME" )
175
- openai_prompt_model_name : Optional [str ] = Field (
176
- SettingsDefaults .OPENAI_PROMPT_MODEL_NAME , env = "OPENAI_PROMPT_MODEL_NAME"
177
159
)
160
+ openai_chat_model_name : str = Field (SettingsDefaults .OPENAI_CHAT_MODEL_NAME )
161
+ openai_prompt_model_name : str = Field (SettingsDefaults .OPENAI_PROMPT_MODEL_NAME )
178
162
openai_chat_temperature : Optional [float ] = Field (
179
163
SettingsDefaults .OPENAI_CHAT_TEMPERATURE ,
180
- env = "OPENAI_CHAT_TEMPERATURE" ,
181
164
ge = 0.0 ,
182
165
le = 1.0 ,
183
166
)
184
167
openai_chat_max_retries : Optional [int ] = Field (
185
168
SettingsDefaults .OPENAI_CHAT_MAX_RETRIES ,
186
- env = "OPENAI_CHAT_MAX_RETRIES" ,
187
169
ge = 0 ,
188
170
)
189
171
190
- pinecone_api_key : Optional [SecretStr ] = Field (SettingsDefaults .PINECONE_API_KEY , env = "PINECONE_API_KEY" )
191
- pinecone_environment : Optional [str ] = Field (SettingsDefaults .PINECONE_ENVIRONMENT , env = "PINECONE_ENVIRONMENT" )
192
- pinecone_index_name : Optional [str ] = Field (SettingsDefaults .PINECONE_INDEX_NAME , env = "PINECONE_INDEX_NAME" )
193
- pinecone_vectorstore_text_key : Optional [str ] = Field (
194
- SettingsDefaults .PINECONE_VECTORSTORE_TEXT_KEY , env = "PINECONE_VECTORSTORE_TEXT_KEY"
195
- )
196
- pinecone_metric : Optional [str ] = Field (SettingsDefaults .PINECONE_METRIC , env = "PINECONE_METRIC" )
197
- pinecone_dimensions : Optional [int ] = Field (SettingsDefaults .PINECONE_DIMENSIONS , env = "PINECONE_DIMENSIONS" , gt = 0 )
172
+ pinecone_api_key : Optional [SecretStr ] = Field (SettingsDefaults .PINECONE_API_KEY )
173
+ pinecone_environment : Optional [str ] = Field (SettingsDefaults .PINECONE_ENVIRONMENT )
174
+ pinecone_index_name : Optional [str ] = Field (SettingsDefaults .PINECONE_INDEX_NAME )
175
+ pinecone_vectorstore_text_key : Optional [str ] = Field (SettingsDefaults .PINECONE_VECTORSTORE_TEXT_KEY )
176
+ pinecone_metric : Optional [str ] = Field (SettingsDefaults .PINECONE_METRIC )
177
+ pinecone_dimensions : Optional [int ] = Field (SettingsDefaults .PINECONE_DIMENSIONS , gt = 0 )
198
178
199
179
@property
200
180
def pinecone_api_key_source (self ) -> str :
@@ -331,18 +311,33 @@ def check_langchain_memory_key(cls, v) -> str:
331
311
return v
332
312
333
313
@field_validator ("openai_api_organization" )
334
- def check_openai_api_organization (cls , v ) -> str :
314
+ def check_openai_api_organization (cls , v ) -> Optional [ str ] :
335
315
"""Check openai_api_organization"""
336
- if v in [None , "" ]:
337
- return SettingsDefaults .OPENAI_API_ORGANIZATION
338
- return v
316
+ if isinstance (v , str ) and len (v .strip ()) > 0 :
317
+ return v .strip ()
318
+ if (
319
+ isinstance (SettingsDefaults .OPENAI_API_ORGANIZATION , str )
320
+ and len (SettingsDefaults .OPENAI_API_ORGANIZATION .strip ()) > 0
321
+ ):
322
+ return SettingsDefaults .OPENAI_API_ORGANIZATION .strip ()
323
+ return None
339
324
340
325
@field_validator ("openai_api_key" )
341
326
def check_openai_api_key (cls , v ) -> SecretStr :
342
327
"""Check openai_api_key"""
343
- if v in [None , "" ]:
328
+ if v is None :
329
+ raise ModelValueError (
330
+ "OpenAI API key is required. Please set the OPENAI_API_KEY environment variable or pass it as an argument."
331
+ )
332
+ if isinstance (v , SecretStr ):
333
+ return v
334
+ if isinstance (v , str ):
335
+ return SecretStr (v )
336
+ if isinstance (SettingsDefaults .OPENAI_API_KEY , SecretStr ):
344
337
return SettingsDefaults .OPENAI_API_KEY
345
- return v
338
+ raise ModelValueError (
339
+ "OpenAI API key must be a string or SecretStr. Please set the OPENAI_API_KEY environment variable or pass it as an argument."
340
+ )
346
341
347
342
@field_validator ("openai_endpoint_image_n" )
348
343
def check_openai_endpoint_image_n (cls , v ) -> int :
@@ -400,9 +395,13 @@ def check_openai_chat_max_retries(cls, v) -> int:
400
395
@field_validator ("pinecone_api_key" )
401
396
def check_pinecone_api_key (cls , v ) -> SecretStr :
402
397
"""Check pinecone_api_key"""
403
- if v in [None , "" ]:
398
+ if isinstance (v , SecretStr ):
399
+ return v
400
+ if isinstance (SettingsDefaults .PINECONE_API_KEY , SecretStr ):
404
401
return SettingsDefaults .PINECONE_API_KEY
405
- return v
402
+ raise ModelValueError (
403
+ "Pinecone API key must be a string or SecretStr. Please set the PINECONE_API_KEY environment variable or pass it as an argument."
404
+ )
406
405
407
406
@field_validator ("pinecone_environment" )
408
407
def check_pinecone_environment (cls , v ) -> str :
0 commit comments