diff --git a/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py b/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py index ab5371d2ef..58d5b51546 100644 --- a/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py +++ b/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/llm.py @@ -40,6 +40,7 @@ from .models import ( CerebrasChatModels, ChatModels, + CometAPIChatModels, DeepSeekChatModels, NebiusChatModels, OctoChatModels, @@ -476,6 +477,56 @@ def with_deepseek( top_p=top_p, ) + @staticmethod + def with_cometapi( + *, + model: str | CometAPIChatModels = "gpt-5-chat-latest", + api_key: str | None = None, + base_url: str = "https://api.cometapi.com/v1/", + client: openai.AsyncClient | None = None, + user: NotGivenOr[str] = NOT_GIVEN, + temperature: NotGivenOr[float] = NOT_GIVEN, + parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN, + tool_choice: ToolChoice = "auto", + reasoning_effort: NotGivenOr[ReasoningEffort] = NOT_GIVEN, + safety_identifier: NotGivenOr[str] = NOT_GIVEN, + prompt_cache_key: NotGivenOr[str] = NOT_GIVEN, + top_p: NotGivenOr[float] = NOT_GIVEN, + ) -> LLM: + """ + Create a new instance of CometAPI LLM. + + ``api_key`` must be set to your CometAPI API key, either using the argument or by setting + the ``COMETAPI_API_KEY`` environmental variable. + + CometAPI provides access to 500+ AI models from multiple providers including OpenAI, + Anthropic, Google, xAI, DeepSeek, and Qwen through a unified API. + + Get your API key at: https://api.cometapi.com/console/token + Learn more: https://www.cometapi.com/?utm_source=livekit&utm_campaign=integration&utm_medium=integration&utm_content=integration + """ + + api_key = api_key or os.environ.get("COMETAPI_API_KEY") + if api_key is None: + raise ValueError( + "CometAPI API key is required, either as argument or set COMETAPI_API_KEY environmental variable" # noqa: E501 + ) + + return LLM( + model=model, + api_key=api_key, + base_url=base_url, + client=client, + user=user, + temperature=temperature, + parallel_tool_calls=parallel_tool_calls, + tool_choice=tool_choice, + reasoning_effort=reasoning_effort, + safety_identifier=safety_identifier, + prompt_cache_key=prompt_cache_key, + top_p=top_p, + ) + @staticmethod def with_octo( *, diff --git a/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/models.py b/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/models.py index 185cab16f3..927226cbaa 100644 --- a/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/models.py +++ b/livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/models.py @@ -141,6 +141,45 @@ "deepseek-chat", ] +CometAPIChatModels = Literal[ + # GPT series + "gpt-5-chat-latest", + "gpt-5", + "gpt-5-pro", + "gpt-5-nano", + "gpt-4.1", + "gpt-4o-mini", + "o4-mini-2025-04-16", + "o3-pro-2025-06-10", + "chatgpt-4o-latest", + # Claude series + "claude-sonnet-4-5-20250929", + "claude-opus-4-1-20250805", + "claude-opus-4-1-20250805-thinking", + "claude-sonnet-4-20250514", + "claude-sonnet-4-20250514-thinking", + "claude-3-7-sonnet-latest", + "claude-3-5-haiku-latest", + # Gemini series + "gemini-2.5-pro", + "gemini-2.5-flash", + "gemini-2.5-flash-lite", + "gemini-2.0-flash", + # Grok series + "grok-4-0709", + "grok-4-fast-non-reasoning", + "grok-4-fast-reasoning", + # DeepSeek series + "deepseek-v3.1", + "deepseek-v3", + "deepseek-r1-0528", + "deepseek-chat", + "deepseek-reasoner", + # Qwen series + "qwen3-30b-a3b", + "qwen3-coder-plus-2025-07-22", +] + VertexModels = Literal[ "google/gemini-2.0-flash-exp", "google/gemini-1.5-flash",