diff --git a/apps/docs/package.json b/apps/docs/package.json index 0a282ff9..77719e37 100644 --- a/apps/docs/package.json +++ b/apps/docs/package.json @@ -31,7 +31,7 @@ "@docusaurus/tsconfig": "3.0.1", "@docusaurus/types": "3.0.1", "autoprefixer": "^10.4.16", - "postcss": "^8.4.31", + "postcss": "^8.4.32", "tailwindcss": "^3.3.5", "typescript": "~5.3.3" }, diff --git a/apps/unsaged/app/api/chat/route.ts b/apps/unsaged/app/api/chat/route.ts index 279f53e2..6feaea5b 100644 --- a/apps/unsaged/app/api/chat/route.ts +++ b/apps/unsaged/app/api/chat/route.ts @@ -2,6 +2,7 @@ import { getStream } from '@/utils/server/ai_vendors/stream'; import { getTokenCount } from '@/utils/server/ai_vendors/token-count'; import { ChatBody } from '@/types/chat'; +import { StreamingTextResponse } from 'ai'; export const runtime = 'edge'; @@ -47,9 +48,7 @@ const handler = async (req: Request): Promise => { }); } - return new Response(stream, { - headers: { 'Content-Type': 'text/event-stream; charset=utf-8' }, - }); + return new StreamingTextResponse(stream); }; export { handler as GET, handler as POST }; diff --git a/apps/unsaged/app/api/image/route.ts b/apps/unsaged/app/api/image/route.ts new file mode 100644 index 00000000..d8ea5c1e --- /dev/null +++ b/apps/unsaged/app/api/image/route.ts @@ -0,0 +1,36 @@ +import { getImage } from '@/utils/server/ai_vendors/image'; + +import { ImageBody } from '@/types/chat'; + +export const runtime = 'edge'; + +const handler = async (req: Request): Promise => { + const { model, prompt, apiKey, params } = + (await req.json()) as ImageBody; + + const { error: imageError, images } = await getImage( + model, + params, + apiKey, + prompt + ); + + if (imageError) { + let message = imageError; + + if (message.message) { + message = message.message; + } + + console.error(message); + + return new Response('Error', { + status: 500, + statusText: message, + }); + } + + return new Response(JSON.stringify(images), { status: 200 }); +}; + +export { handler as GET, handler as POST }; diff --git a/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatInput.tsx b/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatInput.tsx index f5de6e3c..44fd6542 100644 --- a/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatInput.tsx +++ b/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatInput.tsx @@ -70,15 +70,19 @@ export const ChatInput = ({ const handleChange = (e: React.ChangeEvent) => { const value = e.target.value; - const maxLength = selectedConversation?.model.maxLength; - - if (maxLength && value.length > maxLength) { - alert( - t('messageLimit', - { maxLength, valueLength: value.length }, - ), - ); - return; + + if (selectedConversation?.model.type == 'text') { + const maxLength = selectedConversation?.model.maxLength; + + if (maxLength && value.length > maxLength) { + alert( + t( + `messageLimit`, + { maxLength, valueLength: value.length }, + ), + ); + return; + } } setContent(value); @@ -326,8 +330,8 @@ export const ChatInput = ({ bottom: `${textareaRef?.current?.scrollHeight}px`, maxHeight: '400px', overflow: `${textareaRef.current && textareaRef.current.scrollHeight > 400 - ? 'auto' - : 'hidden' + ? 'auto' + : 'hidden' }`, }} placeholder={ diff --git a/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatMessage.tsx b/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatMessage.tsx index 5b6dcefd..dfee5490 100644 --- a/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatMessage.tsx +++ b/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ChatMessage.tsx @@ -26,6 +26,7 @@ import rehypeKatex from 'rehype-katex'; import rehypeRaw from 'rehype-raw'; import remarkGfm from 'remark-gfm'; import remarkMath from 'remark-math'; +import { ImageChatMessage } from './ImageChatMessage'; export interface Props { message: Message; @@ -129,7 +130,11 @@ export const ChatMessage: FC = memo( const copyOnClick = () => { if (!navigator.clipboard) return; - navigator.clipboard.writeText(message.content).then(() => { + const copyText = selectedConversation?.model.type === 'text' + ? message.content + : JSON.parse(message.content)[0].url; + + navigator.clipboard.writeText(copyText).then(() => { setMessageCopied(true); setTimeout(() => { setMessageCopied(false); @@ -151,11 +156,10 @@ export const ChatMessage: FC = memo( return (
@@ -230,99 +234,94 @@ export const ChatMessage: FC = memo( )}
)} - {message.role === 'assistant' && ( + {message.role === 'assistant' && message.content && (
- + + ▍ + + ); + } + + children[0] = (children[0] as string).replace( + '`▍`', + '▍', + ); + } + + const match = /language-(\w+)/.exec(className || ''); + + return !inline ? ( + + ) : ( + + {children} + + ); + }, + table({ children }) { + return ( + + {children} +
+ ); + }, + th({ children }) { + return ( + + {children} + + ); + }, + td({ children }) { return ( - - ▍ - + + {children} + ); - } - - children[0] = (children[0] as string).replace( - '`▍`', - '▍', - ); - } - - const match = /language-(\w+)/.exec(className || ''); - - return !inline ? ( - - ) : ( - - {children} - - ); - }, - table({ children }) { - return ( - - {children} -
- ); - }, - th({ children }) { - return ( - - {children} - - ); - }, - td({ children }) { - return ( - - {children} - - ); - }, - a({ children, ...props }) { - return ( - - {children} - - ); - }, - img({ src, alt, width, height }) { - if (!width && !height) { - width = '1024px'; - height = '1024px'; - } - return ( - // eslint-disable-next-line @next/next/no-img-element - {alt!} - ); - }, - }} - > - {`${message.content}${ - messageIsStreaming && - messageIndex == - (selectedConversationMessages.length ?? 0) - 1 - ? '`▍`' - : '' - }`} -
+ }, + a({ children, ...props }) { + return ( + + {children} + + ); + }, + img({ src, alt, width, height }) { + return ( + + ); + }, + }} + > + {`${message.content}${messageIsStreaming && + messageIndex == + (selectedConversationMessages.length ?? 0) - 1 + ? '`▍`' + : '' + }`} +
+ : + <> + + + }
{messagedCopied ? ( @@ -343,7 +342,7 @@ export const ChatMessage: FC = memo( )}
-
+ ); }, ); diff --git a/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ImageChatMessage.tsx b/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ImageChatMessage.tsx new file mode 100644 index 00000000..a34e300c --- /dev/null +++ b/apps/unsaged/components/Home/components/ChatZone/Screens/Chat/ImageChatMessage.tsx @@ -0,0 +1,25 @@ +import { FC } from "react"; + +interface Props { + src: string | undefined; + alt?: string | undefined; + width?: string | number | undefined; + height?: string | number | undefined; +} + +export const ImageChatMessage: FC = ({ src, alt, width, height }) => { + if (!width && !height) { + width = '1024px'; + height = '1024px'; + } + return ( + // eslint-disable-next-line @next/next/no-img-element + {alt!} + ); +}; \ No newline at end of file diff --git a/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/ModelSettings.tsx b/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/ModelSettings.tsx index ea7f3748..de4df02a 100644 --- a/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/ModelSettings.tsx +++ b/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/ModelSettings.tsx @@ -12,40 +12,56 @@ import { TopKInput } from './components/top-k'; import { TopPSlider } from './components/top-p'; import HomeContext from '@/components/Home/home.context'; +const possibleParameters = { + 'system_prompt': { modelType: 'text', component: SystemPromptSelect }, + 'temperature': { modelType: 'text', component: TemperatureSlider }, + 'max_tokens': { modelType: 'text', component: MaxTokensSlider }, + 'top_p': { modelType: 'text', component: TopPSlider }, + 'top_k': { modelType: 'text', component: TopKInput }, + 'repeat_penalty': { modelType: 'text', component: RepeatPenaltySlider }, + 'presence_penalty': { modelType: 'text', component: PresencePenaltySlider }, + 'stop': { modelType: 'text', component: StopInput }, + 'seed': { modelType: 'text', component: SeedInput }, +} + const openAiSupportedParameters = [ - 'temperature', - 'max_tokens', - 'top_p', - 'repeat_penalty', - 'presence_penalty', - 'stop', - 'seed', + possibleParameters['system_prompt'], + possibleParameters['temperature'], + possibleParameters['max_tokens'], + possibleParameters['top_p'], + possibleParameters['repeat_penalty'], + possibleParameters['presence_penalty'], + possibleParameters['stop'], + possibleParameters['seed'], ]; const claudeSupportedParameters = [ - 'temperature', - 'max_tokens', - 'top_p', - 'top_k', - 'stop', + possibleParameters['system_prompt'], + possibleParameters['temperature'], + possibleParameters['max_tokens'], + possibleParameters['top_p'], + possibleParameters['top_k'], + possibleParameters['stop'], ]; const bardSupportedParameters = [ - 'temperature', - 'max_tokens', - 'top_p', - 'top_k', - 'stop', + possibleParameters['system_prompt'], + possibleParameters['temperature'], + possibleParameters['max_tokens'], + possibleParameters['top_p'], + possibleParameters['top_k'], + possibleParameters['stop'], ]; const ollamaSupportedParameters = [ - 'temperature', - 'max_tokens', - 'repeat_penalty', - 'top_p', - 'top_k', - 'stop', - 'seed', + possibleParameters['system_prompt'], + possibleParameters['temperature'], + possibleParameters['max_tokens'], + possibleParameters['repeat_penalty'], + possibleParameters['top_p'], + possibleParameters['top_k'], + possibleParameters['stop'], + possibleParameters['seed'], ]; export const ModelSettings = () => { @@ -78,27 +94,12 @@ export const ModelSettings = () => {
- - - {supportedParameters.includes('temperature') && } - - {supportedParameters.includes('max_tokens') && } - - {supportedParameters.includes('top_p') && } - - {supportedParameters.includes('top_k') && } - - {supportedParameters.includes('repeat_penalty') && ( - - )} - - {supportedParameters.includes('presence_penalty') && ( - - )} - - {supportedParameters.includes('stop') && } - - {supportedParameters.includes('seed') && } + {supportedParameters.map((parameter, index) => { + if (parameter.modelType === model?.type) { + const ParameterComponent = parameter.component; + return ; + } + })}
); }; diff --git a/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/components/max-tokens.tsx b/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/components/max-tokens.tsx index 84525fcd..34477f1c 100644 --- a/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/components/max-tokens.tsx +++ b/apps/unsaged/components/Home/components/SecondaryMenu/components/Menu/components/Screens/ModelSettings/components/max-tokens.tsx @@ -33,7 +33,11 @@ export const MaxTokensSlider = () => { const model = PossibleAiModels[selectedConversation?.model?.id]; - return model?.tokenLimit ?? 128000; + if (model.type == 'text') { + return model?.tokenLimit ?? 128000; + } else { + return 0; + } }, [selectedConversation?.model?.id]); const [value, setValue] = useState([ diff --git a/apps/unsaged/package.json b/apps/unsaged/package.json index 7a980bed..aa39435a 100644 --- a/apps/unsaged/package.json +++ b/apps/unsaged/package.json @@ -28,6 +28,7 @@ "@tabler/icons-react": "^2.42.0", "@vercel/analytics": "^1.1.1", "@vercel/edge-config": "^0.4.1", + "ai": "^2.2.27", "class-variance-authority": "^0.7.0", "clsx": "^2.0.0", "date-fns": "^2.30.0", diff --git a/apps/unsaged/types/ai-models.ts b/apps/unsaged/types/ai-models.ts index f5fcb0b5..7cd2f23b 100644 --- a/apps/unsaged/types/ai-models.ts +++ b/apps/unsaged/types/ai-models.ts @@ -1,12 +1,17 @@ -export interface AiModel { +export type AiModel = { id: string; maxLength: number; // maximum length of a message tokenLimit: number; requestLimit: number; vendor: 'OpenAI' | 'Anthropic' | 'Google' | 'Ollama'; + type: 'text'; +} | { + id: string; + vendor: 'OpenAI'; + type: 'image'; } -export interface GetAvailableOpenAIModelResponse { +export interface GetAvailableAIModelResponse { error?: any; data: any[]; } @@ -25,6 +30,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4000, requestLimit: 3000, vendor: 'OpenAI', + type: 'text', }, 'gpt-3.5-turbo-16k': { id: 'gpt-3.5-turbo-16k', @@ -32,6 +38,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 16000, requestLimit: 12000, vendor: 'OpenAI', + type: 'text', }, 'gpt-35-az': { id: 'gpt-35-az', @@ -39,6 +46,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4000, requestLimit: 3000, vendor: 'OpenAI', + type: 'text', }, 'gpt-4': { id: 'gpt-4', @@ -46,6 +54,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 8000, requestLimit: 6000, vendor: 'OpenAI', + type: 'text', }, 'gpt-4-32k': { id: 'gpt-4-32k', @@ -53,6 +62,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 32000, requestLimit: 30000, vendor: 'OpenAI', + type: 'text', }, 'gpt-4-1106-preview': { id: 'gpt-4-1106-preview', @@ -60,6 +70,17 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 128000, requestLimit: 120000, vendor: 'OpenAI', + type: 'text', + }, + 'dall-e-3': { + id: 'dall-e-3', + vendor: 'OpenAI', + type: 'image', + }, + 'dall-e-2': { + id: 'dall-e-2', + vendor: 'OpenAI', + type: 'image', }, // // Azure @@ -70,6 +91,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4000, requestLimit: 3000, vendor: 'OpenAI', + type: 'text', }, 'gpt-35-turbo-16k': { id: 'will get from azure', @@ -77,6 +99,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 16000, requestLimit: 12000, vendor: 'OpenAI', + type: 'text', }, // // Anthropic @@ -87,6 +110,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 100000, requestLimit: 98000, vendor: 'Anthropic', + type: 'text', }, 'claude-2': { id: 'claude-2', @@ -94,6 +118,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 100000, requestLimit: 98000, vendor: 'Anthropic', + type: 'text', }, // // Google @@ -104,6 +129,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Google', + type: 'text', }, // // Ollama @@ -114,6 +140,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'llama2:7b': { id: 'llama2:7b', @@ -121,6 +148,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'llama2:13b': { id: 'llama2:13b', @@ -128,6 +156,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'llama2:70b': { id: 'llama2:70b', @@ -135,6 +164,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'codellama:latest': { id: 'codellama:latest', @@ -142,6 +172,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'codellama:7b': { id: 'codellama:7b', @@ -149,6 +180,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'codellama:13b': { id: 'codellama:13b', @@ -156,6 +188,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'codellama:34b': { id: 'codellama:34b', @@ -163,6 +196,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'wizardcoder:latest': { id: 'wizardcoder:latest', @@ -170,6 +204,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'wizardcoder:7b-python': { id: 'wizardcoder:7b-python', @@ -177,6 +212,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'wizardcoder:13b-python': { id: 'wizardcoder:13b-python', @@ -184,6 +220,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'wizardcoder:34b-python': { id: 'wizardcoder:34b-python', @@ -191,6 +228,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'phind-codellama:latest': { id: 'phind-codellama:latest', @@ -198,6 +236,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'phind-codellama:34b': { id: 'phind-codellama:34b', @@ -205,6 +244,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'phind-codellama:34b-v2': { id: 'phind-codellama:34b-v2', @@ -212,6 +252,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'phind-codellama:34b-python': { id: 'phind-codellama:34b-python', @@ -219,6 +260,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'mistral:latest': { id: 'mistral:latest', @@ -226,6 +268,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'mistral-openorca:latest': { id: 'mistral-openorca:latest', @@ -233,6 +276,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'openchat:latest': { id: 'openchat:latest', @@ -240,6 +284,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'neural-chat:latest': { id: 'neural-chat:latest', @@ -247,6 +292,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'goliath:latest': { id: 'goliath:latest', @@ -254,6 +300,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'vicuna:latest': { id: 'vicuna:latest', @@ -261,6 +308,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'orca-mini:latest': { id: 'orca-mini:latest', @@ -268,6 +316,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'llama2-uncensored:latest': { id: 'llama2-uncensored:latest', @@ -275,6 +324,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'yarn-mistral:7b-128k': { id: 'yarn-mistral:7b-128k', @@ -282,6 +332,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'deepseek-coder:latest': { id: 'deepseek-coder:latest', @@ -289,6 +340,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'deepseek-coder:6.7b': { id: 'deepseek-coder:6.7b', @@ -296,6 +348,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'deepseek-coder:33b': { id: 'deepseek-coder:33b', @@ -303,6 +356,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, // // Ollama - Custom Models @@ -313,6 +367,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'wizardcoder13b_python_2080:latest': { id: 'wizardcoder13b_python_2080:latest', @@ -320,6 +375,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4000, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'phindcodellama-34b-_2080:latest': { id: 'phindcodellama-34b-_2080:latest', @@ -327,6 +383,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, 'phind-codellama:34bv2-vram2080': { id: 'phind-codellama:34bv2-vram2080', @@ -334,6 +391,7 @@ export const PossibleAiModels: PossibleAiModelsInterface = { tokenLimit: 4096, requestLimit: 3000, vendor: 'Ollama', + type: 'text', }, }; @@ -346,4 +404,9 @@ export interface ModelParams { stop?: string[]; max_tokens?: number; seed?: number; + n?: number | null; + quality?: 'standard' | 'hd'; + response_format?: 'url' | 'b64_json' | null; + size?: '256x256' | '512x512' | '1024x1024' | '1792x1024' | '1024x1792' | null; + style?: 'vivid' | 'natural' | null; } diff --git a/apps/unsaged/types/chat.ts b/apps/unsaged/types/chat.ts index e1c934f6..261efa23 100644 --- a/apps/unsaged/types/chat.ts +++ b/apps/unsaged/types/chat.ts @@ -17,6 +17,13 @@ export interface ChatBody { apiKey?: string; } +export interface ImageBody { + model: AiModel; + prompt: string; + params: ModelParams; + apiKey?: string; +} + export interface Conversation { id: string; name: string; diff --git a/apps/unsaged/utils/app/const.ts b/apps/unsaged/utils/app/const.ts index a074f6b5..accaaf8f 100644 --- a/apps/unsaged/utils/app/const.ts +++ b/apps/unsaged/utils/app/const.ts @@ -104,7 +104,7 @@ export const OPENAI_API_TYPE = dockerEnvVarFix(process.env.OPENAI_API_TYPE) || 'openai'; export const OPENAI_API_VERSION = - dockerEnvVarFix(process.env.OPENAI_API_VERSION) || '2023-03-15-preview'; + dockerEnvVarFix(process.env.OPENAI_API_VERSION) || '2023-12-01-preview'; export const OPENAI_ORGANIZATION = dockerEnvVarFix(process.env.OPENAI_ORGANIZATION) || ''; diff --git a/apps/unsaged/utils/app/handlers/helpers/messageSender.ts b/apps/unsaged/utils/app/handlers/helpers/messageSender.ts index be56011e..5a6b6a26 100644 --- a/apps/unsaged/utils/app/handlers/helpers/messageSender.ts +++ b/apps/unsaged/utils/app/handlers/helpers/messageSender.ts @@ -5,6 +5,7 @@ import { SavedSetting } from '@/types/settings'; import { SystemPrompt } from '@/types/system-prompt'; import { sendChatRequest } from '../../chat'; +import { sendImageRequest } from '../../image'; export async function messageSender( builtInSystemPrompts: SystemPrompt[], @@ -12,7 +13,7 @@ export async function messageSender( messages: Message[], savedSettings: SavedSetting[], dispatch: React.Dispatch, -) { +): Promise<{ data: null; controller: null; } | { data: ReadableStream; controller: AbortController; }> { let customPrompt = selectedConversation.systemPrompt; if (!selectedConversation.systemPrompt) { @@ -27,25 +28,50 @@ export async function messageSender( systemPrompt: customPrompt, }; - const { response, controller } = await sendChatRequest( - promptInjectedConversation, - messages, - savedSettings, - ); + const model = selectedConversation.model; + if (model.type == 'text') { + const { response, controller } = await sendChatRequest( + promptInjectedConversation, + messages, + savedSettings, + ); + + if (!response.ok) { + dispatch({ field: 'loading', value: false }); + dispatch({ field: 'messageIsStreaming', value: false }); + toast.error(response.statusText); + return { data: null, controller: null }; + } + const data = response.body; + if (!data) { + dispatch({ field: 'loading', value: false }); + dispatch({ field: 'messageIsStreaming', value: false }); + return { data: null, controller: null }; + } - if (!response.ok) { dispatch({ field: 'loading', value: false }); - dispatch({ field: 'messageIsStreaming', value: false }); + return { data, controller }; + } else { + const messagesToSend = messages.filter(m => m.role === 'user').map(m => m.content); + const prompt = messagesToSend.join(' '); + + const { response, controller } = await sendImageRequest( + promptInjectedConversation, + prompt, + savedSettings, + ); + + if (response.ok) { + const data = response.body; + if (data) { + dispatch({ field: 'loading', value: false }); + return { data, controller }; + } + } + toast.error(response.statusText); - return { data: null, controller: null }; - } - const data = response.body; - if (!data) { + dispatch({ field: 'loading', value: false }); - dispatch({ field: 'messageIsStreaming', value: false }); return { data: null, controller: null }; } - - dispatch({ field: 'loading', value: false }); - return { data, controller }; } diff --git a/apps/unsaged/utils/app/image.ts b/apps/unsaged/utils/app/image.ts new file mode 100644 index 00000000..ed0da3ab --- /dev/null +++ b/apps/unsaged/utils/app/image.ts @@ -0,0 +1,36 @@ +import { Conversation, ImageBody } from '@/types/chat'; +import { SavedSetting } from '@/types/settings'; + +import { getSavedSettingValue } from './storage/local/settings'; + +export const sendImageRequest = async ( + conversation: Conversation, + prompt: string, + savedSetting: SavedSetting[], +) => { + const apiKey: string | undefined = getSavedSettingValue( + savedSetting, + conversation.model.vendor.toLowerCase(), + 'api_key', + ); + + const imageBody: ImageBody = { + model: conversation.model, + prompt, + apiKey: apiKey, + params: conversation.params, + }; + + let body = JSON.stringify(imageBody); + const controller = new AbortController(); + const response = await fetch('api/image', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + signal: controller.signal, + body, + }); + + return { response: response, controller: controller }; +}; diff --git a/apps/unsaged/utils/server/ai_vendors/anthropic/models.ts b/apps/unsaged/utils/server/ai_vendors/anthropic/models.ts index 0edd51a4..907d079d 100644 --- a/apps/unsaged/utils/server/ai_vendors/anthropic/models.ts +++ b/apps/unsaged/utils/server/ai_vendors/anthropic/models.ts @@ -1,10 +1,10 @@ -import { AiModel, GetAvailableOpenAIModelResponse, PossibleAiModels } from '@/types/ai-models'; +import { AiModel, GetAvailableAIModelResponse, PossibleAiModels } from '@/types/ai-models'; export const config = { runtime: 'edge', }; -export async function getAvailableAnthropicModels(key: string): Promise { +export async function getAvailableAnthropicModels(key: string): Promise { if (!key) { return { data: [] }; } diff --git a/apps/unsaged/utils/server/ai_vendors/anthropic/stream.ts b/apps/unsaged/utils/server/ai_vendors/anthropic/stream.ts index 7ce5c090..bc61872c 100644 --- a/apps/unsaged/utils/server/ai_vendors/anthropic/stream.ts +++ b/apps/unsaged/utils/server/ai_vendors/anthropic/stream.ts @@ -12,6 +12,7 @@ import { ReconnectInterval, createParser, } from 'eventsource-parser'; +import { AnthropicStream } from 'ai'; export async function streamAnthropic( model: AiModel, @@ -20,7 +21,7 @@ export async function streamAnthropic( apiKey: string | undefined, messages: Message[], tokenCount: number, -) { +): Promise<{ error?: any, stream?: any }> { if (!apiKey) { if (!ANTHROPIC_API_KEY) { return { error: 'Missing API key' }; @@ -29,6 +30,10 @@ export async function streamAnthropic( } } + if (model.type != 'text') { + return { error: 'Chat Stream is only available for model type text' }; + } + let prompt = systemPrompt; let parsedMessages = ''; @@ -83,7 +88,6 @@ export async function streamAnthropic( body: JSON.stringify(body), }); - const encoder = new TextEncoder(); const decoder = new TextDecoder(); if (res.status !== 200) { @@ -98,34 +102,7 @@ export async function streamAnthropic( } } - const stream = new ReadableStream({ - async start(controller) { - const onParse = (event: ParsedEvent | ReconnectInterval) => { - if (event.type === 'event') { - const raw_data = event.data; - - try { - const data = JSON.parse(raw_data); - if (data.stop_reason != null) { - controller.close(); - return; - } - const text = data.completion; - const queue = encoder.encode(text); - controller.enqueue(queue); - } catch (e) { - controller.error(e); - } - } - }; - - const parser = createParser(onParse); - - for await (const chunk of res.body as any) { - parser.feed(decoder.decode(chunk)); - } - }, - }); + const stream = AnthropicStream(res); return { stream: stream }; } diff --git a/apps/unsaged/utils/server/ai_vendors/anthropic/token-count.ts b/apps/unsaged/utils/server/ai_vendors/anthropic/token-count.ts index b24708fb..54860ced 100644 --- a/apps/unsaged/utils/server/ai_vendors/anthropic/token-count.ts +++ b/apps/unsaged/utils/server/ai_vendors/anthropic/token-count.ts @@ -17,7 +17,7 @@ export async function countTokensAnthropic( tokenCount += text.length / 4; } - if (tokenCount > model.requestLimit) { + if (model.type == 'text' && tokenCount > model.requestLimit) { return { error: 'Token limit exceeded' }; } } diff --git a/apps/unsaged/utils/server/ai_vendors/google/models.ts b/apps/unsaged/utils/server/ai_vendors/google/models.ts index 89e62385..2b18bb99 100644 --- a/apps/unsaged/utils/server/ai_vendors/google/models.ts +++ b/apps/unsaged/utils/server/ai_vendors/google/models.ts @@ -1,10 +1,10 @@ -import { AiModel, GetAvailableOpenAIModelResponse, PossibleAiModels } from '@/types/ai-models'; +import { AiModel, GetAvailableAIModelResponse, PossibleAiModels } from '@/types/ai-models'; export const config = { runtime: 'edge', }; -export async function getAvailablePalm2Models(key: string): Promise { +export async function getAvailablePalm2Models(key: string): Promise { if (!key) { return { data: [] }; } diff --git a/apps/unsaged/utils/server/ai_vendors/google/stream.ts b/apps/unsaged/utils/server/ai_vendors/google/stream.ts index eae9a569..cdc848c2 100644 --- a/apps/unsaged/utils/server/ai_vendors/google/stream.ts +++ b/apps/unsaged/utils/server/ai_vendors/google/stream.ts @@ -10,7 +10,7 @@ export async function streamPaLM2( apiKey: string | undefined, messages: Message[], tokenCount: number, -) { +): Promise<{ error?: any, stream?: any }> { if (!apiKey) { if (!PALM_API_KEY) { return { error: 'Missing API key' }; diff --git a/apps/unsaged/utils/server/ai_vendors/google/token-count.ts b/apps/unsaged/utils/server/ai_vendors/google/token-count.ts index e5cf90fe..a9484604 100644 --- a/apps/unsaged/utils/server/ai_vendors/google/token-count.ts +++ b/apps/unsaged/utils/server/ai_vendors/google/token-count.ts @@ -17,7 +17,7 @@ export async function countTokensGoogle( tokenCount += text.length / 4; } - if (tokenCount > model.requestLimit) { + if (model.type == 'text' && tokenCount > model.requestLimit) { return { error: 'Token limit exceeded' }; } } diff --git a/apps/unsaged/utils/server/ai_vendors/image.ts b/apps/unsaged/utils/server/ai_vendors/image.ts new file mode 100644 index 00000000..e2f44285 --- /dev/null +++ b/apps/unsaged/utils/server/ai_vendors/image.ts @@ -0,0 +1,20 @@ +import { AiModel, ModelParams } from '@/types/ai-models'; + +import { imageOpenAI } from './openai/image'; + +export async function getImage( + model: AiModel, + params: ModelParams, + apiKey: string | undefined, + prompt: string, +) { + if (model.vendor === 'OpenAI') { + return imageOpenAI( + model, + params, + apiKey, + prompt + ); + } + return { error: 'Unknown vendor' }; +} diff --git a/apps/unsaged/utils/server/ai_vendors/ollama/models.ts b/apps/unsaged/utils/server/ai_vendors/ollama/models.ts index b831bd9c..9af997df 100644 --- a/apps/unsaged/utils/server/ai_vendors/ollama/models.ts +++ b/apps/unsaged/utils/server/ai_vendors/ollama/models.ts @@ -1,12 +1,12 @@ import { OLLAMA_HOST, OLLAMA_BASIC_USER, OLLAMA_BASIC_PWD, DEBUG_MODE } from '@/utils/app/const'; -import { GetAvailableOpenAIModelResponse, PossibleAiModels } from '@/types/ai-models'; +import { GetAvailableAIModelResponse, PossibleAiModels } from '@/types/ai-models'; export const config = { runtime: 'edge', }; -export async function getAvailableOllamaModels(): Promise { +export async function getAvailableOllamaModels(): Promise { if (OLLAMA_HOST == '') { return { data: [] }; } diff --git a/apps/unsaged/utils/server/ai_vendors/ollama/stream.ts b/apps/unsaged/utils/server/ai_vendors/ollama/stream.ts index d36edd61..92f02a24 100644 --- a/apps/unsaged/utils/server/ai_vendors/ollama/stream.ts +++ b/apps/unsaged/utils/server/ai_vendors/ollama/stream.ts @@ -12,7 +12,7 @@ export async function streamOllama( systemPrompt: string, params: ModelParams, messages: Message[], -) { +): Promise<{ error?: any, stream?: any }> { if (OLLAMA_HOST == '') { return { error: 'Missing OLLAMA_HOST' }; } diff --git a/apps/unsaged/utils/server/ai_vendors/ollama/token-count.ts b/apps/unsaged/utils/server/ai_vendors/ollama/token-count.ts index 7aec80b8..71c426bc 100644 --- a/apps/unsaged/utils/server/ai_vendors/ollama/token-count.ts +++ b/apps/unsaged/utils/server/ai_vendors/ollama/token-count.ts @@ -17,7 +17,7 @@ export async function countTokensOllama( tokenCount += text.length / 4; } - if (tokenCount > model.requestLimit) { + if (model.type == 'text' && tokenCount > model.requestLimit) { return { error: 'Token limit exceeded' }; } } diff --git a/apps/unsaged/utils/server/ai_vendors/openai/image.ts b/apps/unsaged/utils/server/ai_vendors/openai/image.ts new file mode 100644 index 00000000..9d3b1358 --- /dev/null +++ b/apps/unsaged/utils/server/ai_vendors/openai/image.ts @@ -0,0 +1,54 @@ +import { + OPENAI_API_KEY, +} from '@/utils/app/const'; + +import { AiModel, ModelParams } from '@/types/ai-models'; + +import { getOpenAiApi } from './openai'; +import { ImageGenerateParams } from 'openai/resources'; +import OpenAI from 'openai'; + +export async function imageOpenAI( + model: AiModel, + params: ModelParams, + apiKey: string | undefined, + prompt: string, +): Promise<{ error?: any, images?: any[] }> { + if (model.type != 'image') { + return { error: 'Image generation is only available for model type image' }; + } + + if (!apiKey) { + if (!OPENAI_API_KEY) { + return { error: 'Missing API key' }; + } else { + apiKey = OPENAI_API_KEY; + } + } + + const openai = await getOpenAiApi(apiKey, model.id); + + const body: ImageGenerateParams = { + model: model.id, + prompt, + size: params.size, + response_format: params.response_format || "url", + n: params.n || 1, + } + + if (model.id !== 'dall-e-3') { + body.quality = params.quality || "standard"; + body.style = params.style || 'natural'; + } + + return openai.images.generate(body).then(({ data }) => { + return { images: data }; + }).catch((err) => { + if (err instanceof OpenAI.APIError) { + console.error(err.status, err.error); + return { error: err.error }; + } else { + throw err; + } + }); +} diff --git a/apps/unsaged/utils/server/ai_vendors/openai/models.ts b/apps/unsaged/utils/server/ai_vendors/openai/models.ts index a690ef4b..6168838b 100644 --- a/apps/unsaged/utils/server/ai_vendors/openai/models.ts +++ b/apps/unsaged/utils/server/ai_vendors/openai/models.ts @@ -1,25 +1,49 @@ import { DEBUG_MODE, + OPENAI_API_KEY, OPENAI_API_TYPE, + OPENAI_API_URL, + OPENAI_API_VERSION, } from '@/utils/app/const'; -import { AiModel, GetAvailableOpenAIModelResponse, PossibleAiModels } from '@/types/ai-models'; -import { getOpenAi } from './openai'; +import { AiModel, GetAvailableAIModelResponse, PossibleAiModels } from '@/types/ai-models'; +import { getOpenAiApi } from './openai'; export const config = { runtime: 'edge', }; -export async function getAvailableOpenAIModels(key: string): Promise { +export async function getAvailableOpenAIModels(key: string): Promise { if (!key) { return { data: [] }; } - const openai = await getOpenAi(key); + let responseData = null; + if (OPENAI_API_TYPE === 'azure') { + let url = `${OPENAI_API_URL}/openai/deployments?api-version=2023-03-15-preview`; - const list = await openai.models.list(); + const res = await fetch(url, { + headers: { + 'Content-Type': 'application/json', + 'api-key': `${key ? key : OPENAI_API_KEY}`, + }, + }); + + if (res.status !== 200) { + console.error('Error fetching OpenAi models', res.status, res.body); + return { error: res.status, data: [] }; + } + + const json = await res.json(); + responseData = json.data; + } else { + const openai = await getOpenAiApi(key); + + const list = await openai.models.list(); + responseData = list.data; + } - const models: (AiModel | null)[] = list.data + const models: (AiModel | null)[] = responseData .map((openaiModel: any) => { const model_name = OPENAI_API_TYPE === 'azure' ? openaiModel.model : openaiModel.id; diff --git a/apps/unsaged/utils/server/ai_vendors/openai/openai.ts b/apps/unsaged/utils/server/ai_vendors/openai/openai.ts index 088dc6a3..cfc2b806 100644 --- a/apps/unsaged/utils/server/ai_vendors/openai/openai.ts +++ b/apps/unsaged/utils/server/ai_vendors/openai/openai.ts @@ -7,23 +7,17 @@ import { OPENAI_ORGANIZATION, } from '@/utils/app/const'; -export function getOpenAi(apiKey: string, modelId?: string) { - if (OPENAI_API_TYPE === 'azure' && modelId) +export function getOpenAiApi(apiKey: string, modelId?: string) { + if (OPENAI_API_TYPE === 'azure' && !modelId) console.error('ModelId for Azure Deployment is not defined!') const configuration: ClientOptions = { apiKey: apiKey ? apiKey : OPENAI_API_KEY, organization: OPENAI_ORGANIZATION, ...(OPENAI_API_TYPE === 'azure' && { - baseOptions: { - headers: { - "api-key": apiKey ? apiKey : OPENAI_API_KEY, - }, - }, - basePath: `${OPENAI_API_URL}/openai/deployments/${modelId}`, - defaultQueryParams: new URLSearchParams({ - "api-version": OPENAI_API_VERSION, - }), + defaultHeaders: { 'api-key': apiKey ? apiKey : OPENAI_API_KEY }, + baseURL: `${OPENAI_API_URL}/openai/deployments/${modelId}`, + defaultQuery: { 'api-version': OPENAI_API_VERSION }, }), }; diff --git a/apps/unsaged/utils/server/ai_vendors/openai/stream.ts b/apps/unsaged/utils/server/ai_vendors/openai/stream.ts index e7b3d10d..b63e11b8 100644 --- a/apps/unsaged/utils/server/ai_vendors/openai/stream.ts +++ b/apps/unsaged/utils/server/ai_vendors/openai/stream.ts @@ -1,19 +1,14 @@ import { OPENAI_API_KEY, - OPENAI_API_TYPE, - OPENAI_API_URL, - OPENAI_API_VERSION, - OPENAI_ORGANIZATION, } from '@/utils/app/const'; import { AiModel, ModelParams } from '@/types/ai-models'; import { Message } from '@/types/chat'; -import { - ParsedEvent, - ReconnectInterval, - createParser, -} from 'eventsource-parser'; +import OpenAI from 'openai'; +import { OpenAIStream } from 'ai'; +import { getOpenAiApi } from './openai'; +import { ChatCompletionCreateParamsStreaming } from 'openai/resources'; export async function streamOpenAI( model: AiModel, @@ -22,7 +17,7 @@ export async function streamOpenAI( apiKey: string | undefined, messages: Message[], tokenCount: number, -) { +): Promise<{ error?: any, stream?: any }> { if (!apiKey) { if (!OPENAI_API_KEY) { return { error: 'Missing API key' }; @@ -31,6 +26,12 @@ export async function streamOpenAI( } } + if (model.type != 'text') { + return { error: 'Chat Stream is only available for model type text' }; + } + + const openai = await getOpenAiApi(apiKey, model.id); + let messagesToSend: any[] = []; for (let i = messages.length - 1; i >= 0; i--) { @@ -41,13 +42,8 @@ export async function streamOpenAI( messagesToSend = [message, ...messagesToSend]; } - let url = `${OPENAI_API_URL}/chat/completions`; - if (OPENAI_API_TYPE === 'azure') { - url = `${OPENAI_API_URL}/openai/deployments/${model.id}/chat/completions?api-version=${OPENAI_API_VERSION}`; - } - - const body: { [key: string]: any } = { - ...(OPENAI_API_TYPE === 'openai' && { model: model.id }), + const body: ChatCompletionCreateParamsStreaming = { + model: model.id, messages: [ { role: 'system', @@ -56,108 +52,50 @@ export async function streamOpenAI( ...messagesToSend, ], stream: true, - }; + } if (model.id !== 'gpt-4-1106-preview') { - body['max_tokens'] = model.tokenLimit - tokenCount; + body.max_tokens = model.tokenLimit - tokenCount; } if (params.temperature) { - body['temperature'] = params.temperature; + body.temperature = params.temperature; } if (params.max_tokens) { - body['max_tokens'] = params.max_tokens; + body.max_tokens = params.max_tokens; } if (params.repeat_penalty) { - body['frequency_penalty'] = params.repeat_penalty; + body.frequency_penalty = params.repeat_penalty; } if (params.presence_penalty) { - body['presence_penalty'] = params.presence_penalty; + body.presence_penalty = params.presence_penalty; } if (params.stop) { - body['stop'] = params.stop; + body.stop = params.stop; } if (params.top_p) { - body['top_p'] = params.top_p; + body.top_p = params.top_p; } if (params.seed) { - body['seed'] = params.seed; + body.seed = params.seed; } - const res = await fetch(url, { - headers: { - 'Content-Type': 'application/json', - ...(OPENAI_API_TYPE === 'openai' && { - Authorization: `Bearer ${apiKey}`, - }), - ...(OPENAI_API_TYPE === 'azure' && { - 'api-key': apiKey, - }), - ...(OPENAI_API_TYPE === 'openai' && - OPENAI_ORGANIZATION && { - 'OpenAI-Organization': OPENAI_ORGANIZATION, - }), - }, - method: 'POST', - body: JSON.stringify(body), - }); - - const encoder = new TextEncoder(); - const decoder = new TextDecoder(); + return openai.chat.completions.create(body).then((completions) => { + const stream = OpenAIStream(completions); - if (res.status !== 200) { - const result = await res.json(); - if (result.error) { - return { error: result.error }; + return { stream: stream }; + }).catch((err) => { + console.error(err.status, err); + if (err instanceof OpenAI.APIError) { + return { error: err.error }; } else { - throw new Error( - `OpenAI API returned an error: ${ - decoder.decode(result?.value) || result.statusText - }`, - ); + throw err; } - } - - const stream = new ReadableStream({ - async start(controller) { - const onParse = (event: ParsedEvent | ReconnectInterval) => { - if (event.type === 'event') { - const data = event.data; - if (data === '[DONE]') { - controller.close(); - return; - } - - try { - const json = JSON.parse(data); - if (json.choices.length > 0) { - if (json.choices[0].finish_reason != null) { - controller.close(); - return; - } - const text = json.choices[0].delta.content; - const queue = encoder.encode(text); - controller.enqueue(queue); - } - } catch (e) { - controller.error(e); - } - } - }; - - const parser = createParser(onParse); - - for await (const chunk of res.body as any) { - parser.feed(decoder.decode(chunk)); - } - }, }); - - return { stream: stream }; } diff --git a/apps/unsaged/utils/server/ai_vendors/openai/token-count.ts b/apps/unsaged/utils/server/ai_vendors/openai/token-count.ts index 504bc157..2e6fab0e 100644 --- a/apps/unsaged/utils/server/ai_vendors/openai/token-count.ts +++ b/apps/unsaged/utils/server/ai_vendors/openai/token-count.ts @@ -48,7 +48,7 @@ export async function countTokensOpenAI( if (tokens) { tokenCount += tokens.length + tokens_per_message; } - if (tokenCount > model.requestLimit) { + if (model.type == 'text' && tokenCount > model.requestLimit) { encoding.free(); return { error: 'Token limit exceeded' }; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4f9d3e77..691a0872 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -67,7 +67,7 @@ importers: specifier: ^10.4.16 version: 10.4.16(postcss@8.4.32) postcss: - specifier: ^8.4.31 + specifier: ^8.4.32 version: 8.4.32 tailwindcss: specifier: ^3.3.5 @@ -120,6 +120,9 @@ importers: '@vercel/edge-config': specifier: ^0.4.1 version: 0.4.1 + ai: + specifier: ^2.2.27 + version: 2.2.29(react@18.2.0)(solid-js@1.8.7)(svelte@4.2.8)(vue@3.3.11) class-variance-authority: specifier: ^0.7.0 version: 0.7.0 @@ -5031,6 +5034,89 @@ packages: pretty-format: 29.7.0 dev: true + /@vue/compiler-core@3.3.11: + resolution: {integrity: sha512-h97/TGWBilnLuRaj58sxNrsUU66fwdRKLOLQ9N/5iNDfp+DZhYH9Obhe0bXxhedl8fjAgpRANpiZfbgWyruQ0w==} + dependencies: + '@babel/parser': 7.23.5 + '@vue/shared': 3.3.11 + estree-walker: 2.0.2 + source-map-js: 1.0.2 + dev: false + + /@vue/compiler-dom@3.3.11: + resolution: {integrity: sha512-zoAiUIqSKqAJ81WhfPXYmFGwDRuO+loqLxvXmfUdR5fOitPoUiIeFI9cTTyv9MU5O1+ZZglJVTusWzy+wfk5hw==} + dependencies: + '@vue/compiler-core': 3.3.11 + '@vue/shared': 3.3.11 + dev: false + + /@vue/compiler-sfc@3.3.11: + resolution: {integrity: sha512-U4iqPlHO0KQeK1mrsxCN0vZzw43/lL8POxgpzcJweopmqtoYy9nljJzWDIQS3EfjiYhfdtdk9Gtgz7MRXnz3GA==} + dependencies: + '@babel/parser': 7.23.5 + '@vue/compiler-core': 3.3.11 + '@vue/compiler-dom': 3.3.11 + '@vue/compiler-ssr': 3.3.11 + '@vue/reactivity-transform': 3.3.11 + '@vue/shared': 3.3.11 + estree-walker: 2.0.2 + magic-string: 0.30.5 + postcss: 8.4.32 + source-map-js: 1.0.2 + dev: false + + /@vue/compiler-ssr@3.3.11: + resolution: {integrity: sha512-Zd66ZwMvndxRTgVPdo+muV4Rv9n9DwQ4SSgWWKWkPFebHQfVYRrVjeygmmDmPewsHyznCNvJ2P2d6iOOhdv8Qg==} + dependencies: + '@vue/compiler-dom': 3.3.11 + '@vue/shared': 3.3.11 + dev: false + + /@vue/reactivity-transform@3.3.11: + resolution: {integrity: sha512-fPGjH0wqJo68A0wQ1k158utDq/cRyZNlFoxGwNScE28aUFOKFEnCBsvyD8jHn+0kd0UKVpuGuaZEQ6r9FJRqCg==} + dependencies: + '@babel/parser': 7.23.5 + '@vue/compiler-core': 3.3.11 + '@vue/shared': 3.3.11 + estree-walker: 2.0.2 + magic-string: 0.30.5 + dev: false + + /@vue/reactivity@3.3.11: + resolution: {integrity: sha512-D5tcw091f0nuu+hXq5XANofD0OXnBmaRqMYl5B3fCR+mX+cXJIGNw/VNawBqkjLNWETrFW0i+xH9NvDbTPVh7g==} + dependencies: + '@vue/shared': 3.3.11 + dev: false + + /@vue/runtime-core@3.3.11: + resolution: {integrity: sha512-g9ztHGwEbS5RyWaOpXuyIVFTschclnwhqEbdy5AwGhYOgc7m/q3NFwr50MirZwTTzX55JY8pSkeib9BX04NIpw==} + dependencies: + '@vue/reactivity': 3.3.11 + '@vue/shared': 3.3.11 + dev: false + + /@vue/runtime-dom@3.3.11: + resolution: {integrity: sha512-OlhtV1PVpbgk+I2zl+Y5rQtDNcCDs12rsRg71XwaA2/Rbllw6mBLMi57VOn8G0AjOJ4Mdb4k56V37+g8ukShpQ==} + dependencies: + '@vue/runtime-core': 3.3.11 + '@vue/shared': 3.3.11 + csstype: 3.1.3 + dev: false + + /@vue/server-renderer@3.3.11(vue@3.3.11): + resolution: {integrity: sha512-AIWk0VwwxCAm4wqtJyxBylRTXSy1wCLOKbWxHaHiu14wjsNYtiRCSgVuqEPVuDpErOlRdNnuRgipQfXRLjLN5A==} + peerDependencies: + vue: 3.3.11 + dependencies: + '@vue/compiler-ssr': 3.3.11 + '@vue/shared': 3.3.11 + vue: 3.3.11(typescript@5.3.3) + dev: false + + /@vue/shared@3.3.11: + resolution: {integrity: sha512-u2G8ZQ9IhMWTMXaWqZycnK4UthG1fA238CD+DP4Dm4WJi5hdUKKLg0RMRaRpDPNMdkTwIDkp7WtD0Rd9BH9fLw==} + dev: false + /@webassemblyjs/ast@1.11.6: resolution: {integrity: sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==} dependencies: @@ -5215,6 +5301,37 @@ packages: indent-string: 4.0.0 dev: false + /ai@2.2.29(react@18.2.0)(solid-js@1.8.7)(svelte@4.2.8)(vue@3.3.11): + resolution: {integrity: sha512-/zzSTTKF5LxMGQuNVUnNjs7X6PWYfb6M88Zn74gCUnM3KCYgh0CiAWhLyhKP6UtK0H5mHSmXgt0ZkZYUecRp0w==} + engines: {node: '>=14.6'} + peerDependencies: + react: ^18.2.0 + solid-js: ^1.7.7 + svelte: ^3.0.0 || ^4.0.0 + vue: ^3.3.4 + peerDependenciesMeta: + react: + optional: true + solid-js: + optional: true + svelte: + optional: true + vue: + optional: true + dependencies: + eventsource-parser: 1.0.0 + nanoid: 3.3.6 + react: 18.2.0 + solid-js: 1.8.7 + solid-swr-store: 0.10.7(solid-js@1.8.7)(swr-store@0.10.6) + sswr: 2.0.0(svelte@4.2.8) + svelte: 4.2.8 + swr: 2.2.0(react@18.2.0) + swr-store: 0.10.6 + swrv: 1.0.4(vue@3.3.11) + vue: 3.3.11(typescript@5.3.3) + dev: false + /ajv-formats@2.1.1(ajv@8.12.0): resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} peerDependencies: @@ -6040,6 +6157,16 @@ packages: engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} dev: true + /code-red@1.0.4: + resolution: {integrity: sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==} + dependencies: + '@jridgewell/sourcemap-codec': 1.4.15 + '@types/estree': 1.0.5 + acorn: 8.11.2 + estree-walker: 3.0.3 + periscopic: 3.1.0 + dev: false + /collapse-white-space@2.1.0: resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==} dev: false @@ -6414,6 +6541,14 @@ packages: source-map: 0.6.1 dev: false + /css-tree@2.3.1: + resolution: {integrity: sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} + dependencies: + mdn-data: 2.0.30 + source-map-js: 1.0.2 + dev: false + /css-what@6.1.0: resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} engines: {node: '>= 6'} @@ -7812,6 +7947,10 @@ packages: '@types/unist': 3.0.2 dev: false + /estree-walker@2.0.2: + resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + dev: false + /estree-walker@3.0.3: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} dependencies: @@ -7869,6 +8008,11 @@ packages: engines: {node: '>=14.18'} dev: false + /eventsource-parser@1.0.0: + resolution: {integrity: sha512-9jgfSCa3dmEme2ES3mPByGXfgZ87VbP97tng1G2nWwWx6bV2nYxm2AWCrbQjXToSe+yYlqaZNtxffR9IeQr95g==} + engines: {node: '>=14.18'} + dev: false + /execa@0.8.0: resolution: {integrity: sha512-zDWS+Rb1E8BlqqhALSt9kUhss8Qq4nN3iof3gsOdyINksElaPyNBtKUMTR62qhvgVWR0CqCX7sdnKe4MnUbFEA==} engines: {node: '>=4'} @@ -9964,6 +10108,10 @@ packages: pkg-types: 1.0.3 dev: true + /locate-character@3.0.0: + resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==} + dev: false + /locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} engines: {node: '>=6'} @@ -10100,7 +10248,6 @@ packages: engines: {node: '>=12'} dependencies: '@jridgewell/sourcemap-codec': 1.4.15 - dev: true /make-dir@4.0.0: resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} @@ -10576,6 +10723,10 @@ packages: resolution: {integrity: sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==} dev: false + /mdn-data@2.0.30: + resolution: {integrity: sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==} + dev: false + /media-typer@0.3.0: resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} engines: {node: '>= 0.6'} @@ -11493,6 +11644,12 @@ packages: object-assign: 4.1.1 thenify-all: 1.6.0 + /nanoid@3.3.6: + resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + dev: false + /nanoid@3.3.7: resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -14049,6 +14206,11 @@ packages: dependencies: randombytes: 2.1.0 + /seroval@0.15.1: + resolution: {integrity: sha512-OPVtf0qmeC7RW+ScVX+7aOS+xoIM7pWcZ0jOWg2aTZigCydgRB04adfteBRbecZnnrO1WuGQ+C3tLeBBzX2zSQ==} + engines: {node: '>=10'} + dev: false + /serve-handler@6.1.5: resolution: {integrity: sha512-ijPFle6Hwe8zfmBxJdE+5fta53fdIY0lHISJvuikXB3VYFafRjMRpOffSPvCYsbKyBA7pvy9oYr/BT1O3EArlg==} dependencies: @@ -14247,6 +14409,24 @@ packages: websocket-driver: 0.7.4 dev: false + /solid-js@1.8.7: + resolution: {integrity: sha512-9dzrSVieh2zj3SnJ02II6xZkonR6c+j/91b7XZUNcC6xSaldlqjjGh98F1fk5cRJ8ZTkzqF5fPIWDxEOs6QZXA==} + dependencies: + csstype: 3.1.3 + seroval: 0.15.1 + dev: false + + /solid-swr-store@0.10.7(solid-js@1.8.7)(swr-store@0.10.6): + resolution: {integrity: sha512-A6d68aJmRP471aWqKKPE2tpgOiR5fH4qXQNfKIec+Vap+MGQm3tvXlT8n0I8UgJSlNAsSAUuw2VTviH2h3Vv5g==} + engines: {node: '>=10'} + peerDependencies: + solid-js: ^1.2 + swr-store: ^0.10 + dependencies: + solid-js: 1.8.7 + swr-store: 0.10.6 + dev: false + /sort-css-media-queries@2.1.0: resolution: {integrity: sha512-IeWvo8NkNiY2vVYdPa27MCQiR0MN0M80johAYFVxWWXQ44KU84WNxjslwBHmc/7ZL2ccwkM7/e6S5aiKZXm7jA==} engines: {node: '>= 6.3.0'} @@ -14332,6 +14512,15 @@ packages: engines: {node: '>=12'} dev: false + /sswr@2.0.0(svelte@4.2.8): + resolution: {integrity: sha512-mV0kkeBHcjcb0M5NqKtKVg/uTIYNlIIniyDfSGrSfxpEdM9C365jK0z55pl9K0xAkNTJi2OAOVFQpgMPUk+V0w==} + peerDependencies: + svelte: ^4.0.0 + dependencies: + svelte: 4.2.8 + swrev: 4.0.0 + dev: false + /stable@0.1.8: resolution: {integrity: sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w==} deprecated: 'Modern JS already guarantees Array#sort() is a stable sort, so this library is deprecated. See the compatibility table on MDN: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#browser_compatibility' @@ -14620,6 +14809,25 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} + /svelte@4.2.8: + resolution: {integrity: sha512-hU6dh1MPl8gh6klQZwK/n73GiAHiR95IkFsesLPbMeEZi36ydaXL/ZAb4g9sayT0MXzpxyZjR28yderJHxcmYA==} + engines: {node: '>=16'} + dependencies: + '@ampproject/remapping': 2.2.1 + '@jridgewell/sourcemap-codec': 1.4.15 + '@jridgewell/trace-mapping': 0.3.20 + acorn: 8.11.2 + aria-query: 5.3.0 + axobject-query: 3.2.1 + code-red: 1.0.4 + css-tree: 2.3.1 + estree-walker: 3.0.3 + is-reference: 3.0.2 + locate-character: 3.0.0 + magic-string: 0.30.5 + periscopic: 3.1.0 + dev: false + /svg-parser@2.0.4: resolution: {integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==} dev: false @@ -14638,6 +14846,34 @@ packages: stable: 0.1.8 dev: false + /swr-store@0.10.6: + resolution: {integrity: sha512-xPjB1hARSiRaNNlUQvWSVrG5SirCjk2TmaUyzzvk69SZQan9hCJqw/5rG9iL7xElHU784GxRPISClq4488/XVw==} + engines: {node: '>=10'} + dependencies: + dequal: 2.0.3 + dev: false + + /swr@2.2.0(react@18.2.0): + resolution: {integrity: sha512-AjqHOv2lAhkuUdIiBu9xbuettzAzWXmCEcLONNKJRba87WAefz8Ca9d6ds/SzrPc235n1IxWYdhJ2zF3MNUaoQ==} + peerDependencies: + react: ^16.11.0 || ^17.0.0 || ^18.0.0 + dependencies: + react: 18.2.0 + use-sync-external-store: 1.2.0(react@18.2.0) + dev: false + + /swrev@4.0.0: + resolution: {integrity: sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==} + dev: false + + /swrv@1.0.4(vue@3.3.11): + resolution: {integrity: sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==} + peerDependencies: + vue: '>=3.2.26 < 4' + dependencies: + vue: 3.3.11(typescript@5.3.3) + dev: false + /symbol-tree@3.2.4: resolution: {integrity: sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==} dev: true @@ -15349,6 +15585,14 @@ packages: tslib: 2.6.2 dev: false + /use-sync-external-store@1.2.0(react@18.2.0): + resolution: {integrity: sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + react: 18.2.0 + dev: false + /util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} @@ -15580,6 +15824,22 @@ packages: resolution: {integrity: sha512-AFbieoL7a5LMqcnOF04ji+rpXadgOXnZsxQr//r83kLPr7biP7am3g9zbaZIaBGwBRWeSvoMD4mgPdX3e4NWBg==} dev: false + /vue@3.3.11(typescript@5.3.3): + resolution: {integrity: sha512-d4oBctG92CRO1cQfVBZp6WJAs0n8AK4Xf5fNjQCBeKCvMI1efGQ5E3Alt1slFJS9fZuPcFoiAiqFvQlv1X7t/w==} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@vue/compiler-dom': 3.3.11 + '@vue/compiler-sfc': 3.3.11 + '@vue/runtime-dom': 3.3.11 + '@vue/server-renderer': 3.3.11(vue@3.3.11) + '@vue/shared': 3.3.11 + typescript: 5.3.3 + dev: false + /w3c-xmlserializer@4.0.0: resolution: {integrity: sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==} engines: {node: '>=14'}