Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from .aiml_config import AIML_API_PARAMS, AIMLConfig
from .aihubmix_config import AIHUBMIX_API_PARAMS, AihubMixConfig
from .amd_config import AMD_API_PARAMS, AMDConfig
from .anthropic_config import ANTHROPIC_API_PARAMS, AnthropicConfig
from .base_config import BaseConfig
Expand Down Expand Up @@ -57,6 +58,8 @@
'BaseConfig',
'ChatGPTConfig',
'OPENAI_API_PARAMS',
'AihubMixConfig',
'AIHUBMIX_API_PARAMS',
'AnthropicConfig',
'ANTHROPIC_API_PARAMS',
'GROQ_API_PARAMS',
Expand Down Expand Up @@ -127,4 +130,4 @@
'QIANFAN_API_PARAMS',
'CrynuxConfig',
'CRYNUX_API_PARAMS',
]
]
90 changes: 90 additions & 0 deletions camel/configs/aihubmix_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from __future__ import annotations

from typing import Dict, Optional, Sequence, Type, Union

from pydantic import BaseModel

from camel.configs.base_config import BaseConfig


class AihubMixConfig(BaseConfig):
r"""Defines the parameters for generating chat completions using the
AihubMix API.

Args:
temperature (float, optional): Sampling temperature to use, between
:obj:`0` and :obj:`2`. Higher values make the output more random,
while lower values make it more focused and deterministic.
(default: :obj:`0.8`)
max_tokens (int, optional): The maximum number of tokens to generate
in the chat completion. The total length of input tokens and
generated tokens is limited by the model's context length.
(default: :obj:`1024`)
top_p (float, optional): An alternative to sampling with temperature,
called nucleus sampling, where the model considers the results of
the tokens with top_p probability mass. So :obj:`0.1` means only
the tokens comprising the top 10% probability mass are considered.
(default: :obj:`1`)
frequency_penalty (float, optional): Number between :obj:`-2.0` and
:obj:`2.0`. Positive values penalize new tokens based on their
existing frequency in the text so far, decreasing the model's
likelihood to repeat the same line verbatim.
(default: :obj:`0`)
presence_penalty (float, optional): Number between :obj:`-2.0` and
:obj:`2.0`. Positive values penalize new tokens based on whether
they appear in the text so far, increasing the model's likelihood
to talk about new topics.
(default: :obj:`0`)
stream (bool, optional): If True, partial message deltas will be sent
as data-only server-sent events as they become available.
(default: :obj:`False`)
web_search_options (dict, optional): Search model's web search options,
only supported by specific search models.
(default: :obj:`None`)
tools (list[FunctionTool], optional): A list of tools the model may
call. Currently, only functions are supported as a tool. Use this
to provide a list of functions the model may generate JSON inputs
for. A max of 128 functions are supported.
tool_choice (Union[dict[str, str], str], optional): Controls which (if
any) tool is called by the model. :obj:`"none"` means the model
will not call any tool and instead generates a message.
:obj:`"auto"` means the model can pick between generating a
message or calling one or more tools. :obj:`"required"` means the
model must call one or more tools. Specifying a particular tool
via {"type": "function", "function": {"name": "my_function"}}
forces the model to call that tool. :obj:`"none"` is the default
when no tools are present. :obj:`"auto"` is the default if tools
are present.
parallel_tool_calls (bool, optional): A parameter specifying whether
the model should call tools in parallel or not.
(default: :obj:`None`)
extra_headers: Optional[Dict[str, str]]: Extra headers to use for the
model. (default: :obj:`None`)
"""

temperature: Optional[float] = 0.8
max_tokens: Optional[int] = 1024
top_p: Optional[float] = 1.0
frequency_penalty: Optional[float] = 0.0
presence_penalty: Optional[float] = 0.0
stream: Optional[bool] = False
web_search_options: Optional[Dict] = None
tool_choice: Optional[Union[Dict[str, str], str]] = None
parallel_tool_calls: Optional[bool] = None
extra_headers: Optional[Dict[str, str]] = None


AIHUBMIX_API_PARAMS = {param for param in AihubMixConfig.model_fields.keys()}
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from .aihubmix_model import AihubMixModel
from .aiml_model import AIMLModel
from .amd_model import AMDModel
from .anthropic_model import AnthropicModel
Expand Down Expand Up @@ -106,4 +107,5 @@
'WatsonXModel',
'QianfanModel',
'CrynuxModel',
'AihubMixModel',
]
83 changes: 83 additions & 0 deletions camel/models/aihubmix_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
import os
from typing import Any, Dict, Optional, Union

from camel.models.openai_compatible_model import OpenAICompatibleModel
from camel.types import ModelType
from camel.utils import (
BaseTokenCounter,
api_keys_required,
)


class AihubMixModel(OpenAICompatibleModel):
r"""AihubMix API in a unified OpenAICompatibleModel interface.

Args:
model_type (Union[ModelType, str]): Model for which a backend is
created.
model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
that will be fed into OpenAI client. If :obj:`None`,
:obj:`{}` will be used.
(default: :obj:`None`)
api_key (Optional[str], optional): The API key for authenticating with
AihubMix service. (default: :obj:`None`)
url (Optional[str], optional): The URL to AihubMix service. If
not provided, :obj:`https://aihubmix.com/v1` will be used.
(default: :obj:`None`)
token_counter (Optional[BaseTokenCounter], optional): Token counter to
use for the model. If not provided, :obj:`OpenAITokenCounter(
ModelType.GPT_4O_MINI)` will be used.
(default: :obj:`None`)
timeout (Optional[float], optional): The timeout value in seconds for
API calls. If not provided, will fall back to the MODEL_TIMEOUT
environment variable or default to 180 seconds.
(default: :obj:`None`)
max_retries (int, optional): Maximum number of retries for API calls.
(default: :obj:`3`)
**kwargs (Any): Additional arguments to pass to the client
initialization.
"""

@api_keys_required([("api_key", "AIHUBMIX_API_KEY")])
def __init__(
self,
model_type: Union[ModelType, str],
model_config_dict: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
timeout: Optional[float] = None,
max_retries: int = 3,
**kwargs: Any,
) -> None:
if model_config_dict is None:
model_config_dict = {}
api_key = api_key or os.environ.get("AIHUBMIX_API_KEY")
url = url or os.environ.get(
"AIHUBMIX_API_BASE_URL",
"https://aihubmix.com/v1",
)
timeout = timeout or float(os.environ.get("MODEL_TIMEOUT", 180))
super().__init__(
model_type=model_type,
model_config_dict=model_config_dict,
api_key=api_key,
url=url,
token_counter=token_counter,
timeout=timeout,
max_retries=max_retries,
**kwargs,
)
2 changes: 2 additions & 0 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from camel.models.base_model import BaseModelBackend
from camel.models.cohere_model import CohereModel
from camel.models.cometapi_model import CometAPIModel
from camel.models.aihubmix_model import AihubMixModel
from camel.models.crynux_model import CrynuxModel
from camel.models.deepseek_model import DeepSeekModel
from camel.models.gemini_model import GeminiModel
Expand Down Expand Up @@ -107,6 +108,7 @@ class ModelFactory:
ModelPlatformType.WATSONX: WatsonXModel,
ModelPlatformType.QIANFAN: QianfanModel,
ModelPlatformType.CRYNUX: CrynuxModel,
ModelPlatformType.AIHUBMIX: AihubMixModel,
}

@staticmethod
Expand Down
6 changes: 6 additions & 0 deletions camel/types/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -1718,6 +1718,7 @@ class ModelPlatformType(Enum):
WATSONX = "watsonx"
QIANFAN = "qianfan"
CRYNUX = "crynux"
AIHUBMIX = "aihubmix"

@classmethod
def from_name(cls, name):
Expand Down Expand Up @@ -1898,6 +1899,11 @@ def is_crynux(self) -> bool:
r"""Returns whether this platform is Crynux."""
return self is ModelPlatformType.CRYNUX

@property
def is_aihubmix(self) -> bool:
r"""Returns whether this platform is AihubMix."""
return self is ModelPlatformType.AIHUBMIX


class AudioModelType(Enum):
TTS_1 = "tts-1"
Expand Down
100 changes: 100 additions & 0 deletions examples/models/aihubmix_model_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#!/usr/bin/env python3

# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========

r"""Example of using AIHubMix models in CAMEL."""

import os

from camel.agents import ChatAgent
from camel.configs import ChatGPTConfig
from camel.models import ModelFactory
from camel.types import ModelPlatformType, ModelType


def test_aihubmix_model() -> None:
r"""Test AIHubMix model using ModelFactory."""

# Set up the model
# You need to set AIHUBMIX_API_KEY in your environment variables
# or directly pass it as api_key parameter
api_key = os.environ.get("AIHUBMIX_API_KEY")
if not api_key:
print("Skipping AIHubMix test - AIHUBMIX_API_KEY not set")
return

model = ModelFactory.create(
model_platform=ModelPlatformType.AIHUBMIX,
model_type=ModelType.GPT_5,
api_key=api_key,
model_config_dict=ChatGPTConfig(temperature=0.2).as_dict(),
)

# Set up the agent
agent = ChatAgent(
system_message="You are a helpful assistant.",
model=model,
)

# Test the agent
user_msg = "Say hi to CAMEL AI community in a friendly way."
response = agent.step(user_msg)

print(f"User message: {user_msg}")
print(f"Agent response: {response.msg.content}")


def test_aihubmix_with_custom_model() -> None:
r"""Test AIHubMix model with a custom model name."""

# Set up the model
api_key = os.environ.get("AIHUBMIX_API_KEY")
if not api_key:
print("Skipping AIHubMix custom model test - AIHUBMIX_API_KEY not set")
return

model = ModelFactory.create(
model_platform=ModelPlatformType.AIHUBMIX,
model_type="gpt-4", # Using a string directly
api_key=api_key,
model_config_dict=ChatGPTConfig(temperature=0.2).as_dict(),
)

# Set up the agent
agent = ChatAgent(
system_message="You are a helpful assistant.",
model=model,
)

# Test the agent
user_msg = "Explain what is an AI agent in one sentence."
response = agent.step(user_msg)

print(f"User message: {user_msg}")
print(f"Agent response: {response.msg.content}")


def main():
r"""Main function to test AIHubMix models."""
print("Testing AIHubMix models:")
print("=" * 40)

test_aihubmix_model()
print("-" * 40)
test_aihubmix_with_custom_model()


if __name__ == "__main__":
main()
Loading