Replies: 1 comment
-
I found a similar unsolved discussion that might be relevant to your issue: Passing Regarding your question, you can use Here is the relevant part of the code: @classmethod
def from_llm(
cls,
llm: Optional[BaseLanguageModel] = None,
*,
qa_prompt: Optional[BasePromptTemplate] = None,
cypher_prompt: Optional[BasePromptTemplate] = None,
cypher_llm: Optional[BaseLanguageModel] = None,
qa_llm: Optional[Union[BaseLanguageModel, Any]] = None,
exclude_types: List[str] = [],
include_types: List[str] = [],
validate_cypher: bool = False,
qa_llm_kwargs: Optional[Dict[str, Any]] = None,
cypher_llm_kwargs: Optional[Dict[str, Any]] = None,
use_function_response: bool = False,
function_response_system: str = FUNCTION_RESPONSE_SYSTEM,
**kwargs: Any,
) -> GraphCypherQAChain:
"""Initialize from LLM."""
# ... (other code)
cypher_query_corrector = None
if validate_cypher:
corrector_schema = [
Schema(el["start"], el["type"], el["end"])
for el in kwargs["graph"].structured_schema.get("relationships")
]
cypher_query_corrector = CypherQueryCorrector(corrector_schema)
return cls(
graph_schema=graph_schema,
qa_chain=qa_chain,
cypher_generation_chain=cypher_generation_chain,
cypher_query_corrector=cypher_query_corrector,
use_function_response=use_function_response,
**kwargs,
) To use The error you encountered, Here is how you can modify your code: from langchain.chains import GraphCypherQAChain
from langchain.graphs import Neo4jGraph
from langchain_core.prompts.prompt import PromptTemplate
llm = AzureChatOpenAI(
deployment_name="gpt-4",
openai_api_version="2024-02-15-preview",
model_version="1106-Preview",
max_tokens=4096,
temperature=0,
openai_api_key=openai_api_key,
azure_endpoint=azure_endpoint
)
graph = Neo4jGraph(url=url, username=username, password=password, sanitize=False)
CYPHER_CORRECTOR_PROMPT = PromptTemplate(
input_variables=["question"], template=CYPHER_GENERATION_PROMPT
)
chain = GraphCypherQAChain.from_llm(
llm=llm,
graph=graph,
verbose=True,
cypher_prompt=CYPHER_CORRECTOR_PROMPT,
validate_cypher=True
)
chain.invoke("Question") This should resolve the issue and allow you to use |
Beta Was this translation helpful? Give feedback.
Uh oh!
There was an error while loading. Please reload this page.
-
Checked other resources
Commit to Help
Example Code
Beta Was this translation helpful? Give feedback.
All reactions