From 87a7cde1dc2279af660e570fac19e53d56aff30a Mon Sep 17 00:00:00 2001 From: Tom Frenken Date: Fri, 13 Sep 2024 17:23:57 +0200 Subject: [PATCH] add another mapping test --- .../src/openai/openai-response.test.ts | 6 +- .../openai/__snapshots__/util.test.ts.snap | 50 +++++++++++++++ packages/langchain/src/openai/chat.ts | 6 +- packages/langchain/src/openai/embedding.ts | 2 +- packages/langchain/src/openai/util.test.ts | 64 ++++++++++++++----- 5 files changed, 106 insertions(+), 22 deletions(-) create mode 100644 packages/langchain/src/openai/__snapshots__/util.test.ts.snap diff --git a/packages/foundation-models/src/openai/openai-response.test.ts b/packages/foundation-models/src/openai/openai-response.test.ts index b108ff15..9ff6f797 100644 --- a/packages/foundation-models/src/openai/openai-response.test.ts +++ b/packages/foundation-models/src/openai/openai-response.test.ts @@ -1,6 +1,6 @@ -import { parseMockResponse } from '../../../../test-util/mock-http'; -import { OpenAiChatCompletionResponse } from './openai-response'; -import { OpenAiChatCompletionOutput } from './openai-types'; +import { parseMockResponse } from '../../../../test-util/mock-http.js'; +import { OpenAiChatCompletionResponse } from './openai-response.js'; +import { OpenAiChatCompletionOutput } from './openai-types.js'; describe('OpenAI response', () => { const mockResponse = parseMockResponse( diff --git a/packages/langchain/src/openai/__snapshots__/util.test.ts.snap b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap new file mode 100644 index 00000000..6556ecb3 --- /dev/null +++ b/packages/langchain/src/openai/__snapshots__/util.test.ts.snap @@ -0,0 +1,50 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Mapping Functions should parse an OpenAi response to a (Langchain) chat response 1`] = ` +{ + "generations": [ + { + "generationInfo": { + "finish_reason": "stop", + "function_call": undefined, + "index": 0, + "tool_calls": undefined, + }, + "message": { + "id": [ + "langchain_core", + "messages", + "AIMessage", + ], + "kwargs": { + "additional_kwargs": { + "finish_reason": "stop", + "function_call": undefined, + "index": 0, + "tool_call_id": "", + "tool_calls": undefined, + }, + "content": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", + "invalid_tool_calls": [], + "response_metadata": {}, + "tool_calls": [], + }, + "lc": 1, + "type": "constructor", + }, + "text": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.", + }, + ], + "llmOutput": { + "created": 1725457796, + "id": "chatcmpl-A3kgOwg9B6j87n0IkoCFCUCxRSwQZ", + "model": "gpt-4-32k", + "object": "chat.completion", + "tokenUsage": { + "completionTokens": 22, + "promptTokens": 15, + "totalTokens": 37, + }, + }, +} +`; diff --git a/packages/langchain/src/openai/chat.ts b/packages/langchain/src/openai/chat.ts index 5b7643a7..cee8f8b6 100644 --- a/packages/langchain/src/openai/chat.ts +++ b/packages/langchain/src/openai/chat.ts @@ -14,7 +14,7 @@ export class OpenAiChatClient extends AzureChatOpenAI { private openAiChatClient: OpenAiChatClientBase; constructor(fields: OpenAiChatModelInput) { - const defaultValues = new AzureOpenAI(); + const defaultValues = new AzureOpenAI({ apiKey: 'dummy' }); const stop = fields.stop ? Array.isArray(fields.stop) ? fields.stop @@ -25,8 +25,8 @@ export class OpenAiChatClient extends AzureChatOpenAI { ...fields, stop, // overrides the apikey values as they are not applicable for BTP - azureOpenAIApiKey: 'dummy', - openAIApiKey: 'dummy', + azureOpenAIApiKey: undefined, + openAIApiKey: undefined, apiKey: 'dummy' }); diff --git a/packages/langchain/src/openai/embedding.ts b/packages/langchain/src/openai/embedding.ts index 0ac1f15c..6c358931 100644 --- a/packages/langchain/src/openai/embedding.ts +++ b/packages/langchain/src/openai/embedding.ts @@ -15,7 +15,7 @@ export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings { constructor(fields: OpenAiEmbeddingInput) { // overrides the apikey value as it is not applicable in BTP - super({ ...fields, azureOpenAIApiKey: 'dummy' }); + super({ ...fields, apiKey: 'dummy', azureOpenAIApiKey: undefined }); this.btpOpenAIClient = new OpenAiEmbeddingClientBase({ ...fields }); } diff --git a/packages/langchain/src/openai/util.test.ts b/packages/langchain/src/openai/util.test.ts index 7c8bbc49..0e85f80c 100644 --- a/packages/langchain/src/openai/util.test.ts +++ b/packages/langchain/src/openai/util.test.ts @@ -1,23 +1,57 @@ // mapResponseToChatResult // mapLangchainToAiClient -import { OpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models'; -import { parseMockResponse } from '../../../../test-util/mock-http.js'; +import { OpenAiChatCompletionOutput , OpenAiChatClient as OpenAiChatClientBase, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models'; +import { jest } from '@jest/globals'; +import nock from 'nock'; +import { mockClientCredentialsGrantCall, mockInference, parseMockResponse } from '../../../../test-util/mock-http.js'; import { mapResponseToChatResult } from './util.js'; +import { OpenAiChatClient } from './chat.js'; describe('Mapping Functions', () => { - const openAiMockResponse = parseMockResponse( - 'foundation-models', - 'openai-chat-completion-success-response.json' + const openAiMockResponse = parseMockResponse( + 'foundation-models', + 'openai-chat-completion-success-response.json' + ); + + const chatCompletionEndpoint = { + url: 'inference/deployments/1234/chat/completions', + apiVersion: '2024-02-01' + }; + + beforeEach(() => { + mockClientCredentialsGrantCall(); + }); + + afterEach(() => { + nock.cleanAll(); + }); + + it('should parse an OpenAi response to a (Langchain) chat response', async () => { + const result = mapResponseToChatResult(openAiMockResponse); + expect(result).toMatchSnapshot(); + }); + + it('should parse a Langchain input to an ai sdk input', async () => { + const prompt = { + input: ['AI is fascinating'] + } as OpenAiEmbeddingParameters; + + mockInference( + { + data: prompt + }, + { + data: openAiMockResponse, + status: 200 + }, + chatCompletionEndpoint ); - it('should parse an OpenAi response to a (Langchain) chat response', async () => { - const result = mapResponseToChatResult(openAiMockResponse); - expect(result).toMatchInlineSnapshot(); - }); - - // it('should compute an embedding vector', async () => { - // const result = await embedQuery(); - // expect(result).toBeDefined(); - // expect(result).not.toHaveLength(0); - // }); + + const client = new OpenAiChatClient({ deploymentId: '1234' }); + const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run'); + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const response = await client.invoke('Test'); + expect(runSpy).toHaveBeenCalled(); }); +});