Skip to content

Commit 184d30b

Browse files
authored
Merge pull request #242 from FullStackWithLawrence/next
Next
2 parents e751dd5 + 53254e0 commit 184d30b

File tree

6 files changed

+12
-11
lines changed

6 files changed

+12
-11
lines changed

.vscode/settings.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
{
2-
"cornflakes.linter.executablePath": "/Users/mcdaniel/desktop/aws-openai/venv/bin/flake8",
2+
"cornflakes.linter.executablePath": "./venv/bin/flake8",
33
"[python]": {
44
"editor.defaultFormatter": "ms-python.black-formatter"
55
}

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ ifeq ($(OS),Windows_NT)
33
PYTHON = python.exe
44
ACTIVATE_VENV = venv\Scripts\activate
55
else
6-
PYTHON = python3.11
6+
PYTHON = python3.12
77
ACTIVATE_VENV = source venv/bin/activate
88
endif
99
PIP = $(PYTHON) -m pip

models/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
# Managed via automated CI/CD in .github/workflows/semanticVersionBump.yml.
2-
__version__ = "1.3.2"
2+
__version__ = "1.3.4"

models/hybrid_search_retreiver.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,14 @@
2121
import textwrap
2222
from typing import Union
2323

24-
# pinecone integration
25-
from langchain.cache import InMemoryCache
26-
2724
# embedding
2825
from langchain.globals import set_llm_cache
2926
from langchain.prompts import PromptTemplate
3027
from langchain.schema import BaseMessage, HumanMessage, SystemMessage
3128

29+
# pinecone integration
30+
from langchain_community.cache import InMemoryCache
31+
3232
# hybrid search capability
3333
from langchain_community.retrievers.pinecone_hybrid_search import (
3434
PineconeHybridSearchRetriever,
@@ -110,7 +110,8 @@ def cached_chat_request(
110110
human_message = HumanMessage(content=str(human_message))
111111
messages = [system_message, human_message]
112112
# pylint: disable=not-callable
113-
retval = self.chat(messages)
113+
# retval = self.chat(messages)
114+
retval = self.chat.invoke(messages)
114115
return retval
115116

116117
def prompt_with_template(

models/pinecone.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
# pinecone integration
2020
# import pinecone
2121
from pinecone import Pinecone, ServerlessSpec
22-
from pinecone.core.client.exceptions import PineconeApiException
22+
from pinecone.core.openapi.shared.exceptions import PineconeApiException
2323
from pinecone.models import IndexList
2424

2525
# this project

requirements.txt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ flake8-coding==1.3.2
1111
pre-commit==4.0.1
1212
isort==6.0.0
1313
mypy==1.14.1
14-
pylint==3.3.3
14+
pylint==3.3.4
1515
bandit==1.7.10
1616
pydocstringformatter==0.7.3
1717
tox==4.23.2
@@ -21,10 +21,10 @@ codespell==2.4.1
2121
# ------------
2222
python-decouple==3.8
2323
langchainhub==0.1.21
24-
langchain-openai==0.3.3
24+
langchain-openai==0.1.25
2525
langchain-experimental
2626
openai>=1.40.0
27-
langchain==0.3.17
27+
langchain==0.2.11
2828
langchain-pinecone==0.1.3
2929
langchain-experimental
3030
pinecone-client==5.0.1

0 commit comments

Comments
 (0)