Skip to content

Commit 633ca5f

Browse files
fix: stream bugs and update siliconflow_model_example.py (#2358)
Co-authored-by: Wendong <w3ndong.fan@gmail.com> Co-authored-by: Wendong-Fan <133094783+Wendong-Fan@users.noreply.github.com>
1 parent 19b6cea commit 633ca5f

File tree

3 files changed

+17
-4
lines changed

3 files changed

+17
-4
lines changed

camel/agents/chat_agent.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1311,7 +1311,9 @@ def _handle_stream_response(
13111311
response_id: str = ""
13121312
# All choices in one response share one role
13131313
for chunk in response:
1314-
response_id = chunk.id
1314+
# Some model platforms like siliconflow may return None for the
1315+
# chunk.id
1316+
response_id = chunk.id if chunk.id else str(uuid.uuid4())
13151317
self._handle_chunk(
13161318
chunk, content_dict, finish_reasons_dict, output_messages
13171319
)
@@ -1351,7 +1353,9 @@ async def _ahandle_stream_response(
13511353
response_id: str = ""
13521354
# All choices in one response share one role
13531355
async for chunk in response:
1354-
response_id = chunk.id
1356+
# Some model platforms like siliconflow may return None for the
1357+
# chunk.id
1358+
response_id = chunk.id if chunk.id else str(uuid.uuid4())
13551359
self._handle_chunk(
13561360
chunk, content_dict, finish_reasons_dict, output_messages
13571361
)

camel/types/enums.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,11 @@
1515
from enum import Enum, EnumMeta
1616
from typing import cast
1717

18+
from camel.logger import get_logger
1819
from camel.types.unified_model_type import UnifiedModelType
1920

21+
logger = get_logger(__name__)
22+
2023

2124
class RoleType(Enum):
2225
ASSISTANT = "assistant"
@@ -1200,7 +1203,11 @@ def token_limit(self) -> int:
12001203
}:
12011204
return 10_000_000
12021205
else:
1203-
raise ValueError("Unknown model type")
1206+
logger.warning(
1207+
f"Unknown model type {self}, set maximum token limit "
1208+
f"to 999_999_999"
1209+
)
1210+
return 999_999_999
12041211

12051212

12061213
class EmbeddingModelType(Enum):

examples/models/siliconflow_model_example.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@
1919
model = ModelFactory.create(
2020
model_platform=ModelPlatformType.SILICONFLOW,
2121
model_type="deepseek-ai/DeepSeek-R1",
22-
model_config_dict=SiliconFlowConfig(temperature=0.2).as_dict(),
22+
model_config_dict=SiliconFlowConfig(
23+
temperature=0.2, stream=True
24+
).as_dict(),
2325
)
2426

2527
# Define system message

0 commit comments

Comments
 (0)