Skip to content

Commit

Permalink
add another mapping test
Browse files Browse the repository at this point in the history
  • Loading branch information
tomfrenken committed Sep 13, 2024
1 parent 115d6e9 commit 87a7cde
Show file tree
Hide file tree
Showing 5 changed files with 106 additions and 22 deletions.
6 changes: 3 additions & 3 deletions packages/foundation-models/src/openai/openai-response.test.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { parseMockResponse } from '../../../../test-util/mock-http';
import { OpenAiChatCompletionResponse } from './openai-response';
import { OpenAiChatCompletionOutput } from './openai-types';
import { parseMockResponse } from '../../../../test-util/mock-http.js';
import { OpenAiChatCompletionResponse } from './openai-response.js';
import { OpenAiChatCompletionOutput } from './openai-types.js';

describe('OpenAI response', () => {
const mockResponse = parseMockResponse<OpenAiChatCompletionOutput>(
Expand Down
50 changes: 50 additions & 0 deletions packages/langchain/src/openai/__snapshots__/util.test.ts.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP

exports[`Mapping Functions should parse an OpenAi response to a (Langchain) chat response 1`] = `
{
"generations": [
{
"generationInfo": {
"finish_reason": "stop",
"function_call": undefined,
"index": 0,
"tool_calls": undefined,
},
"message": {
"id": [
"langchain_core",
"messages",
"AIMessage",
],
"kwargs": {
"additional_kwargs": {
"finish_reason": "stop",
"function_call": undefined,
"index": 0,
"tool_call_id": "",
"tool_calls": undefined,
},
"content": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.",
"invalid_tool_calls": [],
"response_metadata": {},
"tool_calls": [],
},
"lc": 1,
"type": "constructor",
},
"text": "The deepest place on Earth is located in the Western Pacific Ocean and is known as the Mariana Trench.",
},
],
"llmOutput": {
"created": 1725457796,
"id": "chatcmpl-A3kgOwg9B6j87n0IkoCFCUCxRSwQZ",
"model": "gpt-4-32k",
"object": "chat.completion",
"tokenUsage": {
"completionTokens": 22,
"promptTokens": 15,
"totalTokens": 37,
},
},
}
`;
6 changes: 3 additions & 3 deletions packages/langchain/src/openai/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export class OpenAiChatClient extends AzureChatOpenAI {
private openAiChatClient: OpenAiChatClientBase;

constructor(fields: OpenAiChatModelInput) {
const defaultValues = new AzureOpenAI();
const defaultValues = new AzureOpenAI({ apiKey: 'dummy' });
const stop = fields.stop
? Array.isArray(fields.stop)
? fields.stop
Expand All @@ -25,8 +25,8 @@ export class OpenAiChatClient extends AzureChatOpenAI {
...fields,
stop,
// overrides the apikey values as they are not applicable for BTP
azureOpenAIApiKey: 'dummy',
openAIApiKey: 'dummy',
azureOpenAIApiKey: undefined,
openAIApiKey: undefined,
apiKey: 'dummy'
});

Expand Down
2 changes: 1 addition & 1 deletion packages/langchain/src/openai/embedding.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ export class OpenAiEmbeddingClient extends AzureOpenAIEmbeddings {

constructor(fields: OpenAiEmbeddingInput) {
// overrides the apikey value as it is not applicable in BTP
super({ ...fields, azureOpenAIApiKey: 'dummy' });
super({ ...fields, apiKey: 'dummy', azureOpenAIApiKey: undefined });

this.btpOpenAIClient = new OpenAiEmbeddingClientBase({ ...fields });
}
Expand Down
64 changes: 49 additions & 15 deletions packages/langchain/src/openai/util.test.ts
Original file line number Diff line number Diff line change
@@ -1,23 +1,57 @@
// mapResponseToChatResult
// mapLangchainToAiClient

import { OpenAiChatCompletionOutput } from '@sap-ai-sdk/foundation-models';
import { parseMockResponse } from '../../../../test-util/mock-http.js';
import { OpenAiChatCompletionOutput , OpenAiChatClient as OpenAiChatClientBase, OpenAiEmbeddingParameters } from '@sap-ai-sdk/foundation-models';
import { jest } from '@jest/globals';
import nock from 'nock';
import { mockClientCredentialsGrantCall, mockInference, parseMockResponse } from '../../../../test-util/mock-http.js';
import { mapResponseToChatResult } from './util.js';
import { OpenAiChatClient } from './chat.js';

describe('Mapping Functions', () => {
const openAiMockResponse = parseMockResponse<OpenAiChatCompletionOutput>(
'foundation-models',
'openai-chat-completion-success-response.json'
const openAiMockResponse = parseMockResponse<OpenAiChatCompletionOutput>(
'foundation-models',
'openai-chat-completion-success-response.json'
);

const chatCompletionEndpoint = {
url: 'inference/deployments/1234/chat/completions',
apiVersion: '2024-02-01'
};

beforeEach(() => {
mockClientCredentialsGrantCall();
});

afterEach(() => {
nock.cleanAll();
});

it('should parse an OpenAi response to a (Langchain) chat response', async () => {
const result = mapResponseToChatResult(openAiMockResponse);
expect(result).toMatchSnapshot();
});

it('should parse a Langchain input to an ai sdk input', async () => {
const prompt = {
input: ['AI is fascinating']
} as OpenAiEmbeddingParameters;

mockInference(
{
data: prompt
},
{
data: openAiMockResponse,
status: 200
},
chatCompletionEndpoint
);
it('should parse an OpenAi response to a (Langchain) chat response', async () => {
const result = mapResponseToChatResult(openAiMockResponse);
expect(result).toMatchInlineSnapshot();
});

// it('should compute an embedding vector', async () => {
// const result = await embedQuery();
// expect(result).toBeDefined();
// expect(result).not.toHaveLength(0);
// });

const client = new OpenAiChatClient({ deploymentId: '1234' });
const runSpy = jest.spyOn(OpenAiChatClientBase.prototype, 'run');
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const response = await client.invoke('Test');
expect(runSpy).toHaveBeenCalled();
});
});

0 comments on commit 87a7cde

Please sign in to comment.