Skip to content

Commit

Permalink
Merge branch 'main' into harshbhat/recoverable-keys
Browse files Browse the repository at this point in the history
  • Loading branch information
harshsbhat authored Sep 18, 2024
2 parents ab6ad58 + 5fb8bb9 commit 197fe6e
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 17 deletions.
14 changes: 9 additions & 5 deletions apps/semantic-cache/src/pkg/streaming.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { streamSSE } from "hono/streaming";
import type { OpenAI } from "openai";

import type { Context } from "./hono/app";
import { OpenAIResponse, createCompletionChunk, parseMessagesToString } from "./util";
import { createCompletionChunk, parseMessagesToString } from "./util";

import type { CacheError } from "@unkey/cache";
import { BaseError, Err, Ok, type Result, wrap } from "@unkey/error";
Expand Down Expand Up @@ -125,7 +125,7 @@ export async function handleNonStreamingRequest(

// Cache hit
if (cached.val) {
return c.json(OpenAIResponse(cached.val));
return c.json(JSON.parse(cached.val));
}

// miss
Expand All @@ -142,15 +142,19 @@ export async function handleNonStreamingRequest(
const tokens = chatCompletion.val.usage?.completion_tokens ?? 0;
c.set("tokens", Promise.resolve(tokens));

const response = chatCompletion.val.choices.at(0)?.message.content || "";
const { err: updateCacheError } = await updateCache(c, embeddings.val, response, tokens);
const { err: updateCacheError } = await updateCache(
c,
embeddings.val,
JSON.stringify(chatCompletion),
tokens,
);
if (updateCacheError) {
logger.error("unable to update cache", {
error: updateCacheError.message,
});
}

c.set("response", Promise.resolve(response));
c.set("response", Promise.resolve(JSON.stringify(chatCompletion, null, 2)));
return c.json(chatCompletion);
}

Expand Down
12 changes: 0 additions & 12 deletions apps/semantic-cache/src/pkg/util/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,6 @@ export async function createCompletionChunk(content: string, stop = false) {
};
}

export function OpenAIResponse(content: string) {
return {
choices: [
{
message: {
content,
},
},
],
};
}

/**
* Extracts the word enclosed in double quotes from the given chunk.
*
Expand Down

0 comments on commit 197fe6e

Please sign in to comment.