Skip to content

Commit e93912e

Browse files
committed
Update (see detail)
1. Update docGPT.py: sync with garbage code 2. Delete prompt.py 3. Update app.py: add chatBot window, and add cache for qa_chain.
1 parent d037e22 commit e93912e

File tree

4 files changed

+51
-112
lines changed

4 files changed

+51
-112
lines changed

app.py

Lines changed: 45 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
1+
import asyncio
12
import os
3+
import tempfile
4+
from functools import lru_cache
25

36
os.chdir(os.path.dirname(os.path.abspath(__file__)))
47
os.environ['SERPAPI_API_KEY'] = ''
58

6-
import tempfile
7-
89
import langchain
910
import streamlit as st
1011
from langchain.cache import InMemoryCache
12+
from streamlit_chat import message
1113

1214
from agent import AgentHelper
1315
from docGPT import DocGPT
@@ -102,13 +104,14 @@ def load_api_key() -> None:
102104
chain_type='refine',
103105
)
104106
docGPT_spec_tool = agent_.create_doc_chat(docGPT_spec)
105-
except Exception:
106-
st.error('#### ⚠️ :red[You have not pass OpenAPI key. (Or your api key cannot use.)]')
107-
107+
except Exception as e:
108+
print(e)
109+
pass
110+
108111
try:
109112
search_tool = agent_.get_searp_chain
110113
except Exception as e:
111-
st.warning('⚠️ You have not pass SEARPAPI key. (Or your api key cannot use.) Try Refresh')
114+
st.warning('⚠️ You have not pass SEARPAPI key. (Or your api key cannot use.)')
112115

113116
try:
114117
calculate_tool = agent_.get_calculate_chain
@@ -118,18 +121,45 @@ def load_api_key() -> None:
118121
calculate_tool, search_tool
119122
]
120123
agent_.initialize(tools)
121-
except Exception:
122-
pass
124+
except Exception as e:
125+
print(e)
126+
127+
128+
if not st.session_state['openai_api_key']:
129+
st.error('⚠️ :red[You have not pass OpenAPI key. (Or your api key cannot use.)] Necessary')
123130

124131
st.write('---')
125132

126-
with st.container():
127-
query = st.text_input('#### Question:')
128-
response = None
133+
if 'response' not in st.session_state:
134+
st.session_state['response'] = ['How can I help you?']
135+
136+
if 'query' not in st.session_state:
137+
st.session_state['query'] = ['Hi']
138+
129139

140+
@lru_cache(maxsize=20)
141+
async def get_response(query: str):
130142
if agent_ and query and query != '':
131-
response = 'loading...'
132-
response = agent_.query(query)
143+
response = agent_.query(query)
144+
return response
145+
146+
147+
query = st.text_input(
148+
"#### Question:",
149+
placeholder='Enter your question'
150+
)
151+
152+
response_container = st.container()
153+
user_container = st.container()
154+
155+
with user_container:
156+
if query:
157+
response = asyncio.run(get_response(query))
158+
st.session_state.query.append(query)
159+
st.session_state.response.append(response)
133160

134-
st.write('### :blue[Response]:')
135-
st.write(response)
161+
with response_container:
162+
if st.session_state['response']:
163+
for i in range(len(st.session_state['response'])-1, -1, -1):
164+
message(st.session_state["response"][i], key=str(i))
165+
message(st.session_state['query'][i], is_user=True, key=str(i) + '_user')

docGPT/docGPT.py

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import logging
21
import os
32
from abc import ABC, abstractmethod
43

@@ -11,12 +10,8 @@
1110
from langchain.prompts import PromptTemplate
1211
from langchain.vectorstores import Chroma
1312

14-
from .prompt import SpecificationPromptor
15-
1613

1714
openai.api_key = os.getenv('OPENAI_API_KEY')
18-
# TODO: ADD logger
19-
logger = logging.getLogger('./logs/openai_callback.log')
2015

2116

2217
class BaseQaChain(ABC):
@@ -97,6 +92,8 @@ def __init__(self, docs):
9792
self.prompt_template = """
9893
Cite each reference using [Page Number] notation (every result has this number at the beginning).
9994
Only answer what is asked. The answer should be short and concise. Answer step-by-step.
95+
If the content has sections, please summarize them in order and present them in a bulleted format.
96+
For example, sequentially summarize the introduction, methods, results, and so on.
10097
10198
{context}
10299
@@ -108,12 +105,6 @@ def __init__(self, docs):
108105
input_variables=['context', 'question']
109106
)
110107

111-
def set_customer_prompt(self, promptor: str='specification') -> None:
112-
if promptor == 'specification':
113-
spec_prompt = SpecificationPromptor()
114-
self.prompt_template = spec_prompt.prompt_template
115-
self.prompt.template = self.prompt_template
116-
117108
def _helper_prompt(self, chain_type: str) -> None:
118109
# TODO: Bug helper
119110
if chain_type == 'refine':

docGPT/prompt.py

Lines changed: 0 additions & 83 deletions
This file was deleted.

requirements.txt

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
langchain==0.0.222
1+
langchain==0.0.224
22
openai==0.27.8
3-
streamlit==1.22.0
4-
pymupdf
3+
streamlit==1.24.0
4+
streamlit_chat==0.1.1
5+
pymupdf

0 commit comments

Comments
 (0)