diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index 3cc6a523..5128a08c 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -7,3 +7,10 @@ export type { export { executeRequest } from './http-client.js'; export { getAiCoreDestination } from './context.js'; export { OpenApiRequestBuilder } from './openapi-request-builder.js'; +export type { + AzureOpenAiChatModel, + AzureOpenAiEmbeddingModel, + GcpVertexAiChatModel, + AwsBedrockChatModel, + ChatModel +} from './model-types.js'; diff --git a/packages/core/src/internal.ts b/packages/core/src/internal.ts index 7af67b5d..979cc12c 100644 --- a/packages/core/src/internal.ts +++ b/packages/core/src/internal.ts @@ -1,3 +1,4 @@ export * from './context.js'; export * from './http-client.js'; export * from './openapi-request-builder.js'; +export * from './model-types.js'; diff --git a/packages/core/src/model-types.ts b/packages/core/src/model-types.ts new file mode 100644 index 00000000..b42139da --- /dev/null +++ b/packages/core/src/model-types.ts @@ -0,0 +1,47 @@ +type LiteralUnion = T | (U & Record); + +/** + * Azure OpenAI models for chat completion. + */ +export type AzureOpenAiChatModel = LiteralUnion< + | 'gpt-4o-mini' + | 'gpt-4o' + | 'gpt-4' + | 'gpt-4-32k' + | 'gpt-35-turbo' + | 'gpt-35-turbo-0125' + | 'gpt-35-turbo-16k' +>; + +/** + * Azure OpenAI models for embedding. + */ +export type AzureOpenAiEmbeddingModel = LiteralUnion< + 'text-embedding-ada-002' | 'text-embedding-3-small' | 'text-embedding-3-large' +>; + +/** + * GCP Vertex AI models for chat completion. + */ +export type GcpVertexAiChatModel = LiteralUnion< + 'gemini-1.0-pro' | 'gemini-1.5-pro' | 'gemini-1.5-flash' | 'chat-bison' +>; + +/** + * AWS Bedrock models for chat completion. + */ +export type AwsBedrockChatModel = LiteralUnion< + | 'amazon--titan-text-express' + | 'amazon--titan-text-lite' + | 'anthropic--claude-3-haiku' + | 'anthropic--claude-3-opus' + | 'anthropic--claude-3-sonnet' + | 'anthropic--claude-3.5-sonnet' +>; + +/** + * All available models for chat completion. + */ +export type ChatModel = LiteralUnion< + AzureOpenAiChatModel | GcpVertexAiChatModel | AwsBedrockChatModel +>; diff --git a/packages/foundation-models/src/index.ts b/packages/foundation-models/src/index.ts index ddb00d11..d891b6bd 100644 --- a/packages/foundation-models/src/index.ts +++ b/packages/foundation-models/src/index.ts @@ -1,6 +1,4 @@ export type { - OpenAiChatModel, - OpenAiEmbeddingModel, OpenAiChatMessage, OpenAiChatSystemMessage, OpenAiChatUserMessage, diff --git a/packages/foundation-models/src/openai/openai-chat-client.ts b/packages/foundation-models/src/openai/openai-chat-client.ts index 61babdab..28140bb1 100644 --- a/packages/foundation-models/src/openai/openai-chat-client.ts +++ b/packages/foundation-models/src/openai/openai-chat-client.ts @@ -1,12 +1,13 @@ -import { type CustomRequestConfig, executeRequest } from '@sap-ai-sdk/core'; +import { + type CustomRequestConfig, + type AzureOpenAiChatModel, + executeRequest +} from '@sap-ai-sdk/core'; import { getDeploymentId, type ModelDeployment } from '@sap-ai-sdk/ai-api/internal.js'; -import type { - OpenAiChatCompletionParameters, - OpenAiChatModel -} from './openai-types.js'; +import type { OpenAiChatCompletionParameters } from './openai-types.js'; import { OpenAiChatCompletionResponse } from './openai-response.js'; const apiVersion = '2024-02-01'; @@ -19,7 +20,7 @@ export class OpenAiChatClient { * Creates an instance of the OpenAI chat client. * @param modelDeployment - This configuration is used to retrieve a deployment. Depending on the configuration use either the given deployment ID or the model name to retrieve matching deployments. If model and deployment ID are given, the model is verified against the deployment. */ - constructor(private modelDeployment: ModelDeployment) {} + constructor(private modelDeployment: ModelDeployment) {} /** * Creates a completion for the chat messages. diff --git a/packages/foundation-models/src/openai/openai-embedding-client.ts b/packages/foundation-models/src/openai/openai-embedding-client.ts index d119db4a..b6f4bfbc 100644 --- a/packages/foundation-models/src/openai/openai-embedding-client.ts +++ b/packages/foundation-models/src/openai/openai-embedding-client.ts @@ -1,12 +1,15 @@ -import { type CustomRequestConfig, executeRequest } from '@sap-ai-sdk/core'; +import { + type CustomRequestConfig, + type AzureOpenAiEmbeddingModel, + executeRequest +} from '@sap-ai-sdk/core'; import { getDeploymentId, type ModelDeployment } from '@sap-ai-sdk/ai-api/internal.js'; import type { OpenAiEmbeddingParameters, - OpenAiEmbeddingOutput, - OpenAiEmbeddingModel + OpenAiEmbeddingOutput } from './openai-types.js'; const apiVersion = '2024-02-01'; @@ -19,7 +22,9 @@ export class OpenAiEmbeddingClient { * Creates an instance of the OpenAI embedding client. * @param modelDeployment - This configuration is used to retrieve a deployment. Depending on the configuration use either the given deployment ID or the model name to retrieve matching deployments. If model and deployment ID are given, the model is verified against the deployment. */ - constructor(private modelDeployment: ModelDeployment) {} + constructor( + private modelDeployment: ModelDeployment + ) {} /** * Creates an embedding vector representing the given text. diff --git a/packages/foundation-models/src/openai/openai-types.ts b/packages/foundation-models/src/openai/openai-types.ts index eb78a0ae..35def3cd 100644 --- a/packages/foundation-models/src/openai/openai-types.ts +++ b/packages/foundation-models/src/openai/openai-types.ts @@ -1,22 +1,3 @@ -/** - * Available OpenAI models for chat completion. - */ -export type OpenAiChatModel = - | 'gpt-4o' - | 'gpt-4' - | 'gpt-4-32k' - | 'gpt-35-turbo' - | 'gpt-35-turbo-0125' - | 'gpt-35-turbo-16k'; - -/** - * OpenAI embedding models. - */ -export type OpenAiEmbeddingModel = - | 'text-embedding-ada-002' - | 'text-embedding-3-small' - | 'text-embedding-3-large'; - /** * OpenAI system message. */ diff --git a/packages/orchestration/src/orchestration-types.ts b/packages/orchestration/src/orchestration-types.ts index 1a7bd790..aa26fb1c 100644 --- a/packages/orchestration/src/orchestration-types.ts +++ b/packages/orchestration/src/orchestration-types.ts @@ -1,3 +1,4 @@ +import { ChatModel } from '@sap-ai-sdk/core'; import { ChatMessages, FilteringModuleConfig, @@ -20,11 +21,13 @@ export interface Prompt { inputParams?: Record; } -// TODO: why do we have this extra type? and if there is a reason, why does it not apply to the filtering module? /** * LLMModule configuration. */ -export type LlmConfig = LLMModuleConfig; +export type LlmConfig = LLMModuleConfig & { + /** */ + model_name: ChatModel; +}; /** * Orchestration module configuration. diff --git a/tests/type-tests/test/model.test-d.ts b/tests/type-tests/test/model.test-d.ts new file mode 100644 index 00000000..cdb3153b --- /dev/null +++ b/tests/type-tests/test/model.test-d.ts @@ -0,0 +1,5 @@ +import { ChatModel, AzureOpenAiChatModel } from '@sap-ai-sdk/core'; + +expect('custom-model'); +expect('custom-model'); +expect('gpt-4-32k'); diff --git a/tests/type-tests/test/openai.test-d.ts b/tests/type-tests/test/openai.test-d.ts index ee4ee23f..2e6bef9c 100644 --- a/tests/type-tests/test/openai.test-d.ts +++ b/tests/type-tests/test/openai.test-d.ts @@ -18,9 +18,9 @@ expectType>( ); /** - * Chat Completion with invalid model. + * Chat Completion with custom model. */ -expectError( +expectType( new OpenAiChatClient('unknown').run({ messages: [{ role: 'user', content: 'test prompt' }] }) @@ -108,6 +108,8 @@ expectType>( }) ); +expectType(new OpenAiEmbeddingClient('unknown')); + /** * Embeddings with optional parameters. */ @@ -124,5 +126,3 @@ expectType>( } ) ); - -expectError(new OpenAiEmbeddingClient('gpt-35-turbo'));