Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add tencent hunyuan api #1359

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions config/examples/tencent-hunyuan.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
llm:
api_type: "hunyuan" # 腾讯混元大模型
model: "hunyuan-standard" # 可选 hunyuan-lite、hunyuan-standard 、hunyuan-standard-256K、hunyuan-pro
endpoint: "hunyuan.tencentcloudapi.com" #默认 hunyuan.tencentcloudapi.com
secret_id: "" #腾讯云账户 SecretId 和 SecretKey,请注意保密。申请链接:https://console.cloud.tencent.com/cam/capi
secret_key: ""
5 changes: 5 additions & 0 deletions metagpt/configs/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ class LLMType(Enum):
OPENROUTER = "openrouter"
BEDROCK = "bedrock"
ARK = "ark"
HUNYUAN = "hunyuan" # Tencent Hunyuan

def __missing__(self, key):
return self.OPENAI
Expand All @@ -59,6 +60,10 @@ class LLMConfig(YamlModel):
secret_key: Optional[str] = None
endpoint: Optional[str] = None # for self-deployed model on the cloud

# For Tencent Hunyuan
secret_id: Optional[str] = None
secret_key: Optional[str] = None
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

secret key duplicates with line 60

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

已处理


# For Spark(Xunfei), maybe remove later
app_id: Optional[str] = None
api_secret: Optional[str] = None
Expand Down
2 changes: 2 additions & 0 deletions metagpt/provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from metagpt.provider.anthropic_api import AnthropicLLM
from metagpt.provider.bedrock_api import BedrockLLM
from metagpt.provider.ark_api import ArkLLM
from metagpt.provider.hunyuan_api import HunYuanLLM

__all__ = [
"GeminiLLM",
Expand All @@ -34,4 +35,5 @@
"AnthropicLLM",
"BedrockLLM",
"ArkLLM",
"HunYuanLLM",
]
108 changes: 108 additions & 0 deletions metagpt/provider/hunyuan_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
import json
import types

from tencentcloud.common import credential
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.hunyuan.v20230901.hunyuan_client import HunyuanClient
from tencentcloud.hunyuan.v20230901.models import (
ChatCompletionsRequest,
ChatCompletionsResponse,
)

from metagpt.configs.llm_config import LLMConfig, LLMType
from metagpt.const import USE_CONFIG_TIMEOUT
from metagpt.logs import log_llm_stream
from metagpt.provider.base_llm import BaseLLM
from metagpt.provider.llm_provider_registry import register_provider
from metagpt.utils.cost_manager import CostManager
from metagpt.utils.token_counter import HUNYUAN_MODEL_TOKEN_COSTS


@register_provider(LLMType.HUNYUAN)
class HunYuanLLM(BaseLLM):
"""参考资料
腾讯混元大模型产品概述:https://cloud.tencent.com/document/product/1729/104753
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

seems hunyuan provide a compatible interface https://cloud.tencent.com/document/product/1729/111007, should we to re-create a provider ?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

近期混元 API 兼容了 OpenAI 的接口规范,初步来看是更好的方案。

腾讯混元API接口说明:https://cloud.tencent.com/document/api/1729/105701
腾讯混元Python SDK源码:https://github.com/TencentCloud/tencentcloud-sdk-python/blob/master/tencentcloud/hunyuan/v20230901/models.py
腾讯云控制台API密钥管理:https://console.cloud.tencent.com/cam/capi
"""

def __init__(self, config: LLMConfig):
self.config = config
self.secret_id = self.config.secret_id
self.secret_key = self.config.secret_key
self.endpoint = self.config.endpoint
self.model = self.config.model
self.region = ""
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should we config this? or use self.config.region_name

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

self.region的值为空字符串时,表示自动路由选路。建议保持现状,日后有需要再更改为可配置项。

self._init_client()
self.cost_manager = CostManager(token_costs=HUNYUAN_MODEL_TOKEN_COSTS)

def _init_client(self):
"""实例化一个认证客户端对象"""
cred = credential.Credential(self.secret_id, self.secret_key)
httpProfile = HttpProfile()
httpProfile.endpoint = self.endpoint
clientProfile = ClientProfile()
clientProfile.httpProfile = httpProfile
self.aclient: HunyuanClient = HunyuanClient(cred, self.region, clientProfile)

def _format_messages(self, messages: list[dict]) -> list[dict]:
"""将role和content转换为Role和Content"""
new_messages = []
for message in messages:
new_messages.append({"Role": message["role"], "Content": message["content"]})
return new_messages

def _make_request(
self,
messages: list[dict],
timeout=USE_CONFIG_TIMEOUT,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

timeout not used

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

已删除

stream=True,
):
"""构造请求参数对象"""
req = ChatCompletionsRequest()
params = {
"Model": self.model,
"Messages": self._format_messages(messages),
"Stream": stream,
}
req.from_json_string(json.dumps(params))
return req

async def _achat_completion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT) -> ChatCompletionsResponse:
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

missing get_choice_text implement

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

无需重写get_choice_text()方法

resp: ChatCompletionsResponse = self.aclient.ChatCompletions(
self._make_request(messages, timeout, stream=False)
)
# 转换为字典格式
usage = {
"prompt_tokens": resp.Usage.PromptTokens,
"completion_tokens": resp.Usage.CompletionTokens,
}
self._update_costs(usage)
return resp

async def acompletion(self, messages: list[dict], timeout=USE_CONFIG_TIMEOUT) -> ChatCompletionsResponse:
return await self._achat_completion(messages, timeout)

async def _achat_completion_stream(self, messages: list[dict], timeout: int = USE_CONFIG_TIMEOUT) -> str:
resp = self.aclient.ChatCompletions(self._make_request(messages, timeout, stream=True))
full_reply_content = ""
usage = {}
if isinstance(resp, types.GeneratorType): # 流式响应
for event in resp:
data = json.loads(event["data"])
usage = data.get("Usage", {})
for choice in data["Choices"]:
content = choice["Delta"]["Content"]
log_llm_stream(content)
full_reply_content += content
self._update_costs(
{
"prompt_tokens": usage.get("PromptTokens", 0),
"completion_tokens": usage.get("CompletionTokens", 0),
}
)
log_llm_stream("\n")
return full_reply_content
10 changes: 10 additions & 0 deletions metagpt/utils/token_counter.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,16 @@
"llama3-8b-llama3-8b-instruct": {"prompt": 0.0, "completion": 0.0},
}

"""
腾讯混元大模型价格说明:https://cloud.tencent.com/document/product/1729/97731
"""
HUNYUAN_MODEL_TOKEN_COSTS = {
"hunyuan-pro": {"prompt": 0.03, "completion": 0.1},
"hunyuan-standard": {"prompt": 0.0045, "completion": 0.005},
"hunyuan-standard-256k": {"prompt": 0.015, "completion": 0.06},
"hunyuan-lite": {"prompt": 0.0, "completion": 0.0},
"hunyuan-embedding": {"prompt": 0.0007, "completion": 0.0007},
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It should be USD instead of CNY.
try:

{
 "hunyuan-pro": {"prompt": "0.00420", "completion": "0.01400"},
 "hunyuan-standard": {"prompt": "0.00063", "completion": "0.00070"},
 "hunyuan-standard-256k": {"prompt": "0.00210", "completion": "0.00840"},
 "hunyuan-lite": {"prompt": "0.0", "completion": "0.0"}
 }

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

已处理

}

"""
QianFan Token Price https://cloud.baidu.com/doc/WENXINWORKSHOP/s/hlrk4akp7#tokens%E5%90%8E%E4%BB%98%E8%B4%B9
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -71,4 +71,5 @@ dashscope==1.14.1
rank-bm25==0.2.2 # for tool recommendation
gymnasium==0.29.1
boto3~=1.34.69
spark_ai_python~=0.3.30
spark_ai_python~=0.3.30
tencentcloud-sdk-python-hunyuan==3.0.1163 # for tencent hunyuan
Loading