Skip to content

Commit ef3d91b

Browse files
committed
Apply litellm to get resopnse (a huge refactoring), design file dialog's url, Prevent character input when prompt is empty
1 parent 4b4da93 commit ef3d91b

File tree

10 files changed

+67
-124
lines changed

10 files changed

+67
-124
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ dependencies = [
2626

2727
"g4f",
2828
"curl_cffi",
29+
"litellm",
2930

3031
"edge-tts"
3132
]

pyqt_openai/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -463,7 +463,7 @@ def move_bin(filename, dst_dir):
463463
# Dictionary that stores the platform and model pairs
464464
PROVIDER_MODEL_DICT = {
465465
"OpenAI": ["gpt-4o", "gpt-4o-mini"] + O1_MODELS,
466-
"Gemini": ["gemini-1.5-flash", "gemini-1.5-pro"],
466+
"Gemini": ["gemini/gemini-1.5-flash", "gemini/gemini-1.5-pro"],
467467
"Claude": ["claude-3-5-sonnet-20240620"],
468468
}
469469

pyqt_openai/chat_widget/center/aiChatUnit.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,17 +43,20 @@ def __initAIChatUi(self):
4343

4444
self.__fileListBtn = Button()
4545
self.__fileListBtn.setStyleAndIcon(ICON_FILE)
46+
self.__fileListBtn.setToolTip("File List")
4647
self.__fileListBtn.clicked.connect(self.__showFileListDialog)
4748

4849
self.__speakerBtn = Button()
4950
self.__speakerBtn.setStyleAndIcon(ICON_SPEAKER)
5051
self.__speakerBtn.setCheckable(True)
5152
self.__speakerBtn.toggled.connect(self.__speak)
53+
5254
self.thread = None
5355

54-
self.getMenuWidget().layout().insertWidget(3, self.__favoriteBtn)
55-
self.getMenuWidget().layout().insertWidget(4, self.__infoBtn)
56-
self.getMenuWidget().layout().insertWidget(5, self.__speakerBtn)
56+
self.getMenuWidget().layout().insertWidget(3, self.__fileListBtn)
57+
self.getMenuWidget().layout().insertWidget(4, self.__favoriteBtn)
58+
self.getMenuWidget().layout().insertWidget(5, self.__infoBtn)
59+
self.getMenuWidget().layout().insertWidget(6, self.__speakerBtn)
5760

5861
self.setBackgroundRole(QPalette.ColorRole.AlternateBase)
5962
self.setAutoFillBackground(True)
@@ -76,7 +79,7 @@ def __showResponseInfoDialog(self):
7679

7780
def __showFileListDialog(self):
7881
if self.__result_info:
79-
dialog = FileTableDialog(self.__result_info, parent=self)
82+
dialog = FileTableDialog(parent=self)
8083
dialog.exec()
8184

8285
def afterResponse(self, arg):

pyqt_openai/chat_widget/center/chatWidget.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,16 @@ def getCurId(self):
124124
return self.__cur_id
125125

126126
def __chat(self):
127+
# If main prompt is empty, do nothing
128+
# TODO LANGUAGE
129+
if not self.__prompt.getContent():
130+
QMessageBox.warning(
131+
self,
132+
LangClass.TRANSLATIONS["Warning"],
133+
LangClass.TRANSLATIONS["Please write something before sending."],
134+
)
135+
return
136+
127137
try:
128138
# Get necessary parameters
129139
stream = CONFIG_MANAGER.get_general_property("stream")

pyqt_openai/chat_widget/center/textEditPromptGroup.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -188,9 +188,9 @@ def handlePaste(self):
188188
# Emit the image data
189189
self.onPasteFile.emit(image_data)
190190
# TXT file
191-
elif mime_data.hasUrls() and mime_data.hasText():
192-
text = mime_data.text()
193-
self.onPasteText.emit(text)
191+
# elif mime_data.hasUrls() and mime_data.hasText():
192+
# text = mime_data.text()
193+
# self.onPasteText.emit(text)
194194
else:
195195
self.__textEdit.paste()
196196

pyqt_openai/globals.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,5 @@
2121
OPENAI_CLIENT = OpenAI(api_key="")
2222
GEMINI_CLIENT = genai.GenerativeModel(DEFAULT_GEMINI_MODEL)
2323
CLAUDE_CLIENT = anthropic.Anthropic(api_key="")
24-
LLAMA_CLIENT = OpenAI(api_key="", base_url=LLAMA_REQUEST_URL)
2524

2625
REPLICATE_CLIENT = ReplicateWrapper(api_key="")

pyqt_openai/ico/file.svg

Lines changed: 8 additions & 29 deletions
Loading

pyqt_openai/util/script.py

Lines changed: 12 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import traceback
2020
import wave
2121
import zipfile
22+
2223
from datetime import datetime
2324
from io import BytesIO
2425
from pathlib import Path
@@ -28,6 +29,7 @@
2829
import psutil
2930
from g4f import ProviderType
3031
from g4f.providers.base_provider import ProviderModelMixin
32+
from litellm import completion
3133

3234
from pyqt_openai.widgets.scrollableErrorDialog import ScrollableErrorDialog
3335

@@ -72,8 +74,6 @@
7274
DB,
7375
OPENAI_CLIENT,
7476
CLAUDE_CLIENT,
75-
LLAMA_CLIENT,
76-
GEMINI_CLIENT,
7777
G4F_CLIENT,
7878
LLAMAINDEX_WRAPPER,
7979
REPLICATE_CLIENT,
@@ -718,12 +718,13 @@ def get_claude_argument(model, system, messages, cur_text, stream, images):
718718
def set_api_key(env_var_name, api_key):
719719
if env_var_name == "OPENAI_API_KEY":
720720
OPENAI_CLIENT.api_key = api_key
721+
os.environ['OPENAI_API_KEY'] = api_key
721722
if env_var_name == "GEMINI_API_KEY":
722723
genai.configure(api_key=api_key)
724+
os.environ["GEMINI_API_KEY"] = api_key
723725
if env_var_name == "CLAUDE_API_KEY":
724726
CLAUDE_CLIENT.api_key = api_key
725-
if env_var_name == "LLAMA_API_KEY":
726-
LLAMA_CLIENT.api_key = api_key
727+
os.environ['ANTHROPIC_API_KEY'] = api_key
727728
if env_var_name == "REPLICATE_API_TOKEN":
728729
REPLICATE_CLIENT.api_key = api_key
729730
os.environ["REPLICATE_API_TOKEN"] = api_key
@@ -991,91 +992,18 @@ def stream_response(provider, response, is_g4f=False, get_content_only=True):
991992
for chunk in response:
992993
yield chunk
993994
else:
994-
if provider == "OpenAI":
995-
for chunk in response:
996-
response_text = chunk.choices[0].delta.content
997-
yield response_text
998-
elif provider == "Gemini":
999-
for chunk in response:
1000-
yield chunk.text
1001-
elif provider == "Claude":
1002-
with response as stream:
1003-
for text in stream.text_stream:
1004-
yield text
1005-
elif provider == "Llama":
1006-
for chunk in response:
1007-
response_text = chunk.choices[0].delta.content
1008-
yield response_text
995+
for part in response:
996+
yield part.choices[0].delta.content or ''
1009997

1010998

1011999
def get_api_response(args, get_content_only=True):
10121000
try:
10131001
provider = get_provider_from_model(args["model"])
1014-
if provider == "OpenAI":
1015-
response = OPENAI_CLIENT.chat.completions.create(**args)
1016-
print(response)
1017-
if args["stream"]:
1018-
return stream_response(provider, response)
1019-
else:
1020-
if get_content_only:
1021-
if args["model"] in O1_MODELS:
1022-
return str(response.choices[0].message.content)
1023-
return response.choices[0].message.content
1024-
else:
1025-
return response
1026-
elif provider == "Gemini":
1027-
for message in args["messages"]:
1028-
message["parts"] = message.pop("content")
1029-
if message["role"] == "assistant":
1030-
message["role"] = "model"
1031-
1032-
if len(args.get("images", [])) > 0:
1033-
# Supposedly this don't support history of chat as well as stream
1034-
response = GEMINI_CLIENT.generate_content(
1035-
[args["messages"][-1]["parts"]] + args["images"]
1036-
)
1037-
return response.text
1038-
else:
1039-
chat = GEMINI_CLIENT.start_chat(history=args["messages"])
1040-
1041-
if args["stream"]:
1042-
response = chat.send_message(
1043-
args["messages"][-1]["parts"], stream=args["stream"]
1044-
)
1045-
return stream_response(provider, response)
1046-
else:
1047-
response = chat.send_message(args["messages"][-1]["parts"])
1048-
if get_content_only:
1049-
return response.text
1050-
else:
1051-
return response
1052-
elif provider == "Claude":
1053-
if args["stream"]:
1054-
response = CLAUDE_CLIENT.messages.stream(
1055-
model=args["model"],
1056-
max_tokens=DEFAULT_TOKEN_CHUNK_SIZE,
1057-
messages=args["messages"],
1058-
)
1059-
return stream_response(provider, response)
1060-
else:
1061-
response = CLAUDE_CLIENT.messages.create(
1062-
model=args["model"],
1063-
max_tokens=DEFAULT_TOKEN_CHUNK_SIZE,
1064-
messages=args["messages"],
1065-
)
1066-
if get_content_only:
1067-
return response.content[0].text
1068-
else:
1069-
return response
1070-
elif provider == "Llama":
1071-
response = LLAMA_CLIENT.chat.completions.create(**args)
1072-
if args["stream"]:
1073-
return stream_response(provider, response)
1074-
else:
1075-
if get_content_only:
1076-
return response.choices[0].message.content
1077-
else:
1078-
return response
1002+
response = completion(drop_params=True, **args)
1003+
if args["stream"]:
1004+
return stream_response(provider, response)
1005+
else:
1006+
return response.choices[0].message.content or ""
10791007
except Exception as e:
10801008
print(e)
10811009
raise e
Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from PySide6.QtWidgets import QDialog
1+
from PySide6.QtWidgets import QDialog, QTableWidget, QHeaderView, QVBoxLayout
22

33

44
class FileTableDialog(QDialog):
@@ -7,4 +7,26 @@ def __init__(self, parent=None):
77
self.__initUi()
88

99
def __initUi(self):
10-
pass
10+
self.setWindowTitle("File List")
11+
# File tables
12+
self.__fileTable = QTableWidget()
13+
self.__fileTable.setColumnCount(2)
14+
self.__fileTable.setHorizontalHeaderLabels(["File Name", "Type"])
15+
self.__fileTable.setColumnWidth(0, 300)
16+
self.__fileTable.setColumnWidth(1, 100)
17+
self.__fileTable.setEditTriggers(QTableWidget.NoEditTriggers)
18+
self.__fileTable.setSelectionBehavior(QTableWidget.SelectRows)
19+
self.__fileTable.setSelectionMode(QTableWidget.SingleSelection)
20+
self.__fileTable.setSortingEnabled(True)
21+
self.__fileTable.setAlternatingRowColors(True)
22+
self.__fileTable.setShowGrid(False)
23+
self.__fileTable.verticalHeader().hide()
24+
self.__fileTable.horizontalHeader().setStretchLastSection(True)
25+
self.__fileTable.horizontalHeader().setHighlightSections(False)
26+
self.__fileTable.horizontalHeader().setSectionsClickable(True)
27+
self.__fileTable.horizontalHeader().setSectionResizeMode(0, QHeaderView.Stretch)
28+
self.__fileTable.horizontalHeader().setSectionResizeMode(1, QHeaderView.ResizeToContents)
29+
lay = QVBoxLayout()
30+
lay.addWidget(self.__fileTable)
31+
self.setLayout(lay)
32+

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,5 +14,6 @@ replicate
1414

1515
g4f
1616
curl_cffi
17+
litellm
1718

1819
edge-tts

0 commit comments

Comments
 (0)