Replies: 1 comment 1 reply
-
Hey, @LZS1991! I'm here to help you out with any bugs, questions, or contributions you might have while waiting for a human maintainer. Let's squash those bugs together! 🤖 I found a similar issue that might help you resolve the
Here is an example of how you can modify your code to include these steps: import json
from langchain_core.exceptions import OutputParserException
def _call(
self,
inputs: Dict[str, Any],
run_manager: Optional[CallbackManagerForChainRun] = None,
) -> Dict[str, List]:
docs = self.text_splitter.create_documents([inputs[self.input_key]])
results = self.llm_chain.generate(
[{"text": d.page_content} for d in docs], run_manager=run_manager
)
qa = []
for res in results.generations:
try:
qa.append(json.loads(res[0].text))
except json.JSONDecodeError:
# Handle the invalid JSON output case
print(f"Invalid JSON output: {res[0].text}")
# You can choose to skip, log, or handle the invalid output as needed
continue
return {self.output_key: qa} Additionally, you can use the from langchain.output_parsers.regex import RegexParser
_QA_OUTPUT_PARSER = RegexParser(
regex=r"QUESTION: (.*?)\n+ANSWER: (.*)", output_keys=["query", "answer"]
)
def _call(
self,
inputs: Dict[str, Any],
run_manager: Optional[CallbackManagerForChainRun] = None,
) -> Dict[str, List]:
docs = self.text_splitter.create_documents([inputs[self.input_key]])
results = self.llm_chain.generate(
[{"text": d.page_content} for d in docs], run_manager=run_manager
)
qa = []
for res in results.generations:
try:
parsed_output = _QA_OUTPUT_PARSER.parse(res[0].text)
qa.append(parsed_output)
except OutputParserException as e:
# Handle the invalid output case
print(f"Invalid output: {res[0].text}")
# You can choose to skip, log, or handle the invalid output as needed
continue
return {self.output_key: qa} This approach ensures that your application can handle cases where the language model's output does not match the expected format, thereby preventing the |
Beta Was this translation helpful? Give feedback.
Uh oh!
There was an error while loading. Please reload this page.
-
Checked other resources
Commit to Help
Example Code
Description
I am trying to using langchain to generate dataset in alpaca format from an input txt by using a llm (Qwen1.5-1.8B-Chat), i want to get a json file contains the result,but the code met a probolem:
Traceback (most recent call last):
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\output_parsers\json.py", line 84, in parse_result
return parse_json_markdown(text)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\utils\json.py", line 147, in parse_json_markdown
return _parse_json(json_str, parser=parser)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\utils\json.py", line 163, in parse_json
return parser(json_str)
^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\utils\json.py", line 118, in parse_partial_json
return json.loads(s, strict=strict)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\json_init.py", line 359, in loads
return cls(**kw).decode(s)
^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\json\decoder.py", line 337, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\json\decoder.py", line 355, in raw_decode
raise JSONDecodeError("Expecting value", s, err.value) from None
json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "E:\02-PythonProject\97_AITools\LLMTools\llm_for_training_dataset_generation.py", line 111, in
main()
File "E:\02-PythonProject\97_AITools\LLMTools\llm_for_training_dataset_generation.py", line 104, in main
out = chain.invoke({'text': doc.page_content})
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\runnables\base.py", line 2878, in invoke
input = context.run(step.invoke, input, config)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\output_parsers\base.py", line 192, in invoke
return self._call_with_config(
^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\runnables\base.py", line 1785, in _call_with_config
context.run(
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\runnables\config.py", line 397, in call_func_with_variable_args
return func(input, **kwargs) # type: ignore[call-arg]
^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\output_parsers\base.py", line 193, in
lambda inner_input: self.parse_result([Generation(text=inner_input)]),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AnacondaREPO\envs\OpenLLM\Lib\site-packages\langchain_core\output_parsers\json.py", line 87, in parse_result
raise OutputParserException(msg, llm_output=text) from e
langchain_core.exceptions.OutputParserException: Invalid json output: System:
System Info
System Information
Package Information
Optional packages not installed
Other Dependencies
Beta Was this translation helpful? Give feedback.
All reactions