1313from pathlib import Path
1414from typing import List , Optional
1515
16- import openai
16+ from openai import OpenAI
1717
1818from ngwidgets .yamlable import lod_storable
1919
@@ -127,6 +127,7 @@ def __init__(
127127 model(str): the model to use
128128 prompt_filepath(str): the filepath for the prompt logging
129129 """
130+ self .client = None
130131 self .model = model
131132 self .token_size_limit = LLM .MODEL_SIZE_LIMITS .get (
132133 model , 4096
@@ -135,7 +136,7 @@ def __init__(
135136 openai_api_key = None
136137 if api_key :
137138 # If an API key is provided during object creation, set it.
138- openai . api_key = api_key
139+ openai_api_key = api_key
139140 else :
140141 # Load the API key from the environment or a JSON file
141142 openai_api_key = os .getenv ("OPENAI_API_KEY" )
@@ -153,8 +154,8 @@ def __init__(
153154 )
154155 else :
155156 return
156- # set the global api key
157- openai . api_key = openai_api_key
157+ # set the client using the api key
158+ self . client = OpenAI ( api_key = openai_api_key )
158159 # If prompts_filepath is None, use default path in the user's home directory with the current date
159160 if prompts_filepath is None :
160161 # Format: Year-Month-Day
@@ -187,7 +188,7 @@ def available(self):
187188 Returns:
188189 bool: True if the Large Language Model is available
189190 """
190- return openai . api_key is not None
191+ return self . client is not None
191192
192193 def ask (self , prompt_text : str , model : str = None , temperature : float = 0.7 ) -> str :
193194 """
@@ -211,9 +212,14 @@ def ask(self, prompt_text: str, model: str = None, temperature: float = 0.7) ->
211212 start_time = datetime .now ()
212213
213214 # Interact with the API
214- chat_completion = openai .chat .completions .create (
215+ chat_completion = self . client .chat .completions .create (
215216 model = model ,
216- messages = [{"role" : "user" , "content" : prompt_text }],
217+ messages = [
218+ {
219+ "role" : "user" ,
220+ "content" : prompt_text
221+ }
222+ ],
217223 temperature = temperature , # Include the temperature parameter here
218224 )
219225 result = chat_completion .choices [0 ].message .content
@@ -278,7 +284,7 @@ def analyze_image(self, image_path: str, auth: dict, prompt_text: str) -> str:
278284 }
279285 ]
280286
281- chat_completion = openai .chat .completions .create (
287+ chat_completion = self . client .chat .completions .create (
282288 model = self .model , messages = messages
283289 )
284290
0 commit comments