Getting error with create_csv_agent #27684
SomaKorada07
announced in
Ask Dosu (Archived)
Replies: 2 comments 12 replies
-
There is a similar closed issue related to the error "Object of type 'FieldInfo' is not JSON serializable" [1]. The solution suggested in that issue is to check for an extra comma in your code, which might be causing the serialization error. Here's an example of what to look for: class AnalyseInput(BaseModel):
arg: str = Field(
...,
description="Statistical data of process resource usage.",
), # Remove this comma Ensure that there are no trailing commas in your field definitions, as they can lead to such errors. |
Beta Was this translation helpful? Give feedback.
0 replies
-
@dosu this is not the same error. i do not have any custom class defined. i am using langchain's create_csv_agent |
Beta Was this translation helpful? Give feedback.
12 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Uh oh!
There was an error while loading. Please reload this page.
-
TypeError Traceback (most recent call last)
Cell In[60], line 1
----> 1 agent.run("What are the column names?")
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/_api/deprecation.py:182, in deprecated..deprecate..warning_emitting_wrapper(*args, **kwargs)
180 warned = True
181 emit_warning()
--> 182 return wrapped(*args, **kwargs)
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/chains/base.py:606, in Chain.run(self, callbacks, tags, metadata, *args, **kwargs)
604 if len(args) != 1:
605 raise ValueError("
run
supports only one positional argument.")--> 606 return self(args[0], callbacks=callbacks, tags=tags, metadata=metadata)[
607 _output_key
608 ]
610 if kwargs and not args:
611 return self(kwargs, callbacks=callbacks, tags=tags, metadata=metadata)[
612 _output_key
613 ]
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/_api/deprecation.py:182, in deprecated..deprecate..warning_emitting_wrapper(*args, **kwargs)
180 warned = True
181 emit_warning()
--> 182 return wrapped(*args, **kwargs)
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/chains/base.py:389, in Chain.call(self, inputs, return_only_outputs, callbacks, tags, metadata, run_name, include_run_info)
357 """Execute the chain.
358
359 Args:
(...)
380
Chain.output_keys
.381 """
382 config = {
383 "callbacks": callbacks,
384 "tags": tags,
385 "metadata": metadata,
386 "run_name": run_name,
387 }
--> 389 return self.invoke(
390 inputs,
391 cast(RunnableConfig, {k: v for k, v in config.items() if v is not None}),
392 return_only_outputs=return_only_outputs,
393 include_run_info=include_run_info,
394 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/chains/base.py:170, in Chain.invoke(self, input, config, **kwargs)
168 except BaseException as e:
169 run_manager.on_chain_error(e)
--> 170 raise e
171 run_manager.on_chain_end(outputs)
173 if include_run_info:
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/chains/base.py:160, in Chain.invoke(self, input, config, **kwargs)
157 try:
158 self._validate_inputs(inputs)
159 outputs = (
--> 160 self._call(inputs, run_manager=run_manager)
161 if new_arg_supported
162 else self._call(inputs)
163 )
165 final_outputs: Dict[str, Any] = self.prep_outputs(
166 inputs, outputs, return_only_outputs
167 )
168 except BaseException as e:
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/agents/agent.py:1629, in AgentExecutor._call(self, inputs, run_manager)
1627 # We now enter the agent loop (until it returns something).
1628 while self._should_continue(iterations, time_elapsed):
-> 1629 next_step_output = self._take_next_step(
1630 name_to_tool_map,
1631 color_mapping,
1632 inputs,
1633 intermediate_steps,
1634 run_manager=run_manager,
1635 )
1636 if isinstance(next_step_output, AgentFinish):
1637 return self._return(
1638 next_step_output, intermediate_steps, run_manager=run_manager
1639 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/agents/agent.py:1337, in AgentExecutor._take_next_step(self, name_to_tool_map, color_mapping, inputs, intermediate_steps, run_manager)
1326 def _take_next_step(
1327 self,
1328 name_to_tool_map: Dict[str, BaseTool],
(...)
1332 run_manager: Optional[CallbackManagerForChainRun] = None,
1333 ) -> Union[AgentFinish, List[Tuple[AgentAction, str]]]:
1334 return self._consume_next_step(
1335 [
1336 a
-> 1337 for a in self._iter_next_step(
1338 name_to_tool_map,
1339 color_mapping,
1340 inputs,
1341 intermediate_steps,
1342 run_manager,
1343 )
1344 ]
1345 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/agents/agent.py:1363, in AgentExecutor._iter_next_step(self, name_to_tool_map, color_mapping, inputs, intermediate_steps, run_manager)
1360 intermediate_steps = self._prepare_intermediate_steps(intermediate_steps)
1362 # Call the LLM to see what to do.
-> 1363 output = self._action_agent.plan(
1364 intermediate_steps,
1365 callbacks=run_manager.get_child() if run_manager else None,
1366 **inputs,
1367 )
1368 except OutputParserException as e:
1369 if isinstance(self.handle_parsing_errors, bool):
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain/agents/agent.py:464, in RunnableAgent.plan(self, intermediate_steps, callbacks, **kwargs)
456 final_output: Any = None
457 if self.stream_runnable:
458 # Use streaming to make sure that the underlying LLM is invoked in a
459 # streaming
(...)
462 # Because the response from the plan is not a generator, we need to
463 # accumulate the output into final output and return that.
--> 464 for chunk in self.runnable.stream(inputs, config={"callbacks": callbacks}):
465 if final_output is None:
466 final_output = chunk
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:3407, in RunnableSequence.stream(self, input, config, **kwargs)
3401 def stream(
3402 self,
3403 input: Input,
3404 config: Optional[RunnableConfig] = None,
3405 **kwargs: Optional[Any],
3406 ) -> Iterator[Output]:
-> 3407 yield from self.transform(iter([input]), config, **kwargs)
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:3394, in RunnableSequence.transform(self, input, config, **kwargs)
3388 def transform(
3389 self,
3390 input: Iterator[Input],
3391 config: Optional[RunnableConfig] = None,
3392 **kwargs: Optional[Any],
3393 ) -> Iterator[Output]:
-> 3394 yield from self._transform_stream_with_config(
3395 input,
3396 self._transform,
3397 patch_config(config, run_name=(config or {}).get("run_name") or self.name),
3398 **kwargs,
3399 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:2197, in Runnable._transform_stream_with_config(self, input, transformer, config, run_type, **kwargs)
2195 try:
2196 while True:
-> 2197 chunk: Output = context.run(next, iterator) # type: ignore
2198 yield chunk
2199 if final_output_supported:
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:3357, in RunnableSequence._transform(self, input, run_manager, config, **kwargs)
3354 else:
3355 final_pipeline = step.transform(final_pipeline, config)
-> 3357 yield from final_pipeline
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:1413, in Runnable.transform(self, input, config, **kwargs)
1410 final: Input
1411 got_first_val = False
-> 1413 for ichunk in input:
1414 # The default implementation of transform is to buffer input and
1415 # then call stream.
1416 # It'll attempt to gather all input into a single chunk using
1417 # the
+
operator.1418 # If the input is not addable, then we'll assume that we can
1419 # only operate on the last chunk,
1420 # and we'll iterate until we get to the last chunk.
1421 if not got_first_val:
1422 final = ichunk
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:5561, in RunnableBindingBase.transform(self, input, config, **kwargs)
5555 def transform(
5556 self,
5557 input: Iterator[Input],
5558 config: Optional[RunnableConfig] = None,
5559 **kwargs: Any,
5560 ) -> Iterator[Output]:
-> 5561 yield from self.bound.transform(
5562 input,
5563 self._merge_configs(config),
5564 **{**self.kwargs, **kwargs},
5565 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/runnables/base.py:1431, in Runnable.transform(self, input, config, **kwargs)
1428 final = ichunk
1430 if got_first_val:
-> 1431 yield from self.stream(final, config, **kwargs)
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py:420, in BaseChatModel.stream(self, input, config, stop, **kwargs)
413 except BaseException as e:
414 run_manager.on_llm_error(
415 e,
416 response=LLMResult(
417 generations=[[generation]] if generation else []
418 ),
419 )
--> 420 raise e
421 else:
422 run_manager.on_llm_end(LLMResult(generations=[[generation]]))
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py:400, in BaseChatModel.stream(self, input, config, stop, **kwargs)
397 self.rate_limiter.acquire(blocking=True)
399 try:
--> 400 for chunk in self._stream(messages, stop=stop, **kwargs):
401 if chunk.message.id is None:
402 chunk.message.id = f"run-{run_manager.run_id}"
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/langchain_openai/chat_models/base.py:656, in BaseChatOpenAI._stream(self, messages, stop, run_manager, **kwargs)
654 base_generation_info = {"headers": dict(raw_response.headers)}
655 else:
--> 656 response = self.client.create(**payload)
657 with response:
658 is_first_chunk = True
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/_utils/_utils.py:274, in required_args..inner..wrapper(*args, **kwargs)
272 msg = f"Missing required argument: {quote(missing[0])}"
273 raise TypeError(msg)
--> 274 return func(*args, **kwargs)
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/resources/chat/completions.py:815, in Completions.create(self, messages, model, audio, frequency_penalty, function_call, functions, logit_bias, logprobs, max_completion_tokens, max_tokens, metadata, modalities, n, parallel_tool_calls, presence_penalty, response_format, seed, service_tier, stop, store, stream, stream_options, temperature, tool_choice, tools, top_logprobs, top_p, user, extra_headers, extra_query, extra_body, timeout)
775 @required_args(["messages", "model"], ["messages", "model", "stream"])
776 def create(
777 self,
(...)
812 timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
813 ) -> ChatCompletion | Stream[ChatCompletionChunk]:
814 validate_response_format(response_format)
--> 815 return self._post(
816 "/chat/completions",
817 body=maybe_transform(
818 {
819 "messages": messages,
820 "model": model,
821 "audio": audio,
822 "frequency_penalty": frequency_penalty,
823 "function_call": function_call,
824 "functions": functions,
825 "logit_bias": logit_bias,
826 "logprobs": logprobs,
827 "max_completion_tokens": max_completion_tokens,
828 "max_tokens": max_tokens,
829 "metadata": metadata,
830 "modalities": modalities,
831 "n": n,
832 "parallel_tool_calls": parallel_tool_calls,
833 "presence_penalty": presence_penalty,
834 "response_format": response_format,
835 "seed": seed,
836 "service_tier": service_tier,
837 "stop": stop,
838 "store": store,
839 "stream": stream,
840 "stream_options": stream_options,
841 "temperature": temperature,
842 "tool_choice": tool_choice,
843 "tools": tools,
844 "top_logprobs": top_logprobs,
845 "top_p": top_p,
846 "user": user,
847 },
848 completion_create_params.CompletionCreateParams,
849 ),
850 options=make_request_options(
851 extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
852 ),
853 cast_to=ChatCompletion,
854 stream=stream or False,
855 stream_cls=Stream[ChatCompletionChunk],
856 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/_base_client.py:1277, in SyncAPIClient.post(self, path, cast_to, body, options, files, stream, stream_cls)
1263 def post(
1264 self,
1265 path: str,
(...)
1272 stream_cls: type[_StreamT] | None = None,
1273 ) -> ResponseT | _StreamT:
1274 opts = FinalRequestOptions.construct(
1275 method="post", url=path, json_data=body, files=to_httpx_files(files), **options
1276 )
-> 1277 return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls))
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/_base_client.py:954, in SyncAPIClient.request(self, cast_to, options, remaining_retries, stream, stream_cls)
951 else:
952 retries_taken = 0
--> 954 return self._request(
955 cast_to=cast_to,
956 options=options,
957 stream=stream,
958 stream_cls=stream_cls,
959 retries_taken=retries_taken,
960 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/_base_client.py:980, in SyncAPIClient._request(self, cast_to, options, retries_taken, stream, stream_cls)
977 options = self._prepare_options(options)
979 remaining_retries = options.get_max_retries(self.max_retries) - retries_taken
--> 980 request = self._build_request(options, retries_taken=retries_taken)
981 self._prepare_request(request)
983 kwargs: HttpxSendArgs = {}
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/lib/azure.py:64, in BaseAzureClient._build_request(self, options, retries_taken)
61 if model is not None and not "/deployments" in str(self.base_url):
62 options.url = f"/deployments/{model}{options.url}"
---> 64 return super()._build_request(options, retries_taken=retries_taken)
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/openai/_base_client.py:505, in BaseClient.build_request(self, options, retries_taken)
502 kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("", "-")}
504 # TODO: report this error to httpx
--> 505 return self._client.build_request( # pyright: ignore[reportUnknownMemberType]
506 headers=headers,
507 timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout,
508 method=options.method,
509 url=prepared_url,
510 # the
Query
type that we use is incompatible with qs'511 #
Params
type as it needs to be typed asMapping[str, object]
512 # so that passing a
TypedDict
doesn't cause an error.513 # microsoft/pyright#3526 (comment)
514 params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None,
515 json=json_data,
516 files=files,
517 **kwargs,
518 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/httpx/_client.py:357, in BaseClient.build_request(self, method, url, content, data, files, json, params, headers, cookies, timeout, extensions)
351 timeout = (
352 self.timeout
353 if isinstance(timeout, UseClientDefault)
354 else Timeout(timeout)
355 )
356 extensions = dict(**extensions, timeout=timeout.as_dict())
--> 357 return Request(
358 method,
359 url,
360 content=content,
361 data=data,
362 files=files,
363 json=json,
364 params=params,
365 headers=headers,
366 cookies=cookies,
367 extensions=extensions,
368 )
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/httpx/_models.py:340, in Request.init(self, method, url, params, headers, cookies, content, data, files, json, stream, extensions)
338 if stream is None:
339 content_type: str | None = self.headers.get("content-type")
--> 340 headers, stream = encode_request(
341 content=content,
342 data=data,
343 files=files,
344 json=json,
345 boundary=get_multipart_boundary_from_content_type(
346 content_type=content_type.encode(self.headers.encoding)
347 if content_type
348 else None
349 ),
350 )
351 self._prepare(headers)
352 self.stream = stream
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/httpx/_content.py:212, in encode_request(content, data, files, json, boundary)
210 return encode_urlencoded_data(data)
211 elif json is not None:
--> 212 return encode_json(json)
214 return {}, ByteStream(b"")
File ~/Documents/Jupyter/jupyter_env/lib/python3.12/site-packages/httpx/_content.py:175, in encode_json(json)
174 def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]:
--> 175 body = json_dumps(json).encode("utf-8")
176 content_length = str(len(body))
177 content_type = "application/json"
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/json/init.py:231, in dumps(obj, skipkeys, ensure_ascii, check_circular, allow_nan, cls, indent, separators, default, sort_keys, **kw)
226 # cached encoder
227 if (not skipkeys and ensure_ascii and
228 check_circular and allow_nan and
229 cls is None and indent is None and separators is None and
230 default is None and not sort_keys and not kw):
--> 231 return _default_encoder.encode(obj)
232 if cls is None:
233 cls = JSONEncoder
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/json/encoder.py:200, in JSONEncoder.encode(self, o)
196 return encode_basestring(o)
197 # This doesn't pass the iterator directly to ''.join() because the
198 # exceptions aren't as detailed. The list call should be roughly
199 # equivalent to the PySequence_Fast that ''.join() would do.
--> 200 chunks = self.iterencode(o, _one_shot=True)
201 if not isinstance(chunks, (list, tuple)):
202 chunks = list(chunks)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/json/encoder.py:258, in JSONEncoder.iterencode(self, o, _one_shot)
253 else:
254 _iterencode = _make_iterencode(
255 markers, self.default, _encoder, self.indent, floatstr,
256 self.key_separator, self.item_separator, self.sort_keys,
257 self.skipkeys, _one_shot)
--> 258 return _iterencode(o, 0)
File /Library/Frameworks/Python.framework/Versions/3.12/lib/python3.12/json/encoder.py:180, in JSONEncoder.default(self, o)
161 def default(self, o):
162 """Implement this method in a subclass such that it returns
163 a serializable object for
o
, or calls the base implementation164 (to raise a
TypeError
).(...)
178
179 """
--> 180 raise TypeError(f'Object of type {o.class.name} '
181 f'is not JSON serializable')
TypeError: Object of type FieldInfo is not JSON serializable
Beta Was this translation helpful? Give feedback.
All reactions