diff --git a/app/api/openai.ts b/app/api/openai.ts
index bbba69e569c..2b5deca8be3 100644
--- a/app/api/openai.ts
+++ b/app/api/openai.ts
@@ -14,7 +14,7 @@ function getModels(remoteModelRes: OpenAIListModelResponse) {
if (config.disableGPT4) {
remoteModelRes.data = remoteModelRes.data.filter(
(m) =>
- !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o")) ||
+ !(m.id.startsWith("gpt-4") || m.id.startsWith("chatgpt-4o") || m.id.startsWith("o1")) ||
m.id.startsWith("gpt-4o-mini"),
);
}
diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts
index 7c1588440b2..15cfb7ca602 100644
--- a/app/client/platforms/openai.ts
+++ b/app/client/platforms/openai.ts
@@ -224,7 +224,7 @@ export class ChatGPTApi implements LLMApi {
// O1 not support image, tools (plugin in ChatGPTNextWeb) and system, stream, logprobs, temperature, top_p, n, presence_penalty, frequency_penalty yet.
requestPayload = {
messages,
- stream: !isO1 ? options.config.stream : false,
+ stream: options.config.stream,
model: modelConfig.model,
temperature: !isO1 ? modelConfig.temperature : 1,
presence_penalty: !isO1 ? modelConfig.presence_penalty : 0,
@@ -247,7 +247,7 @@ export class ChatGPTApi implements LLMApi {
console.log("[Request] openai payload: ", requestPayload);
- const shouldStream = !isDalle3 && !!options.config.stream && !isO1;
+ const shouldStream = !isDalle3 && !!options.config.stream;
const controller = new AbortController();
options.onController?.(controller);
diff --git a/app/components/emoji.tsx b/app/components/emoji.tsx
index 6db746c462c..d75cdda9268 100644
--- a/app/components/emoji.tsx
+++ b/app/components/emoji.tsx
@@ -37,7 +37,8 @@ export function Avatar(props: { model?: ModelType; avatar?: string }) {
return (
{props.model?.startsWith("gpt-4") ||
- props.model?.startsWith("chatgpt-4o") ? (
+ props.model?.startsWith("chatgpt-4o") ||
+ props.model?.startsWith("o1") ? (
) : (
diff --git a/app/config/server.ts b/app/config/server.ts
index 485f950da03..9d6b3c2b8da 100644
--- a/app/config/server.ts
+++ b/app/config/server.ts
@@ -129,14 +129,15 @@ export const getServerSideConfig = () => {
if (customModels) customModels += ",";
customModels += DEFAULT_MODELS.filter(
(m) =>
- (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o")) &&
+ (m.name.startsWith("gpt-4") || m.name.startsWith("chatgpt-4o") || m.name.startsWith("o1")) &&
!m.name.startsWith("gpt-4o-mini"),
)
.map((m) => "-" + m.name)
.join(",");
if (
(defaultModel.startsWith("gpt-4") ||
- defaultModel.startsWith("chatgpt-4o")) &&
+ defaultModel.startsWith("chatgpt-4o") ||
+ defaultModel.startsWith("o1")) &&
!defaultModel.startsWith("gpt-4o-mini")
)
defaultModel = "";
diff --git a/app/constant.ts b/app/constant.ts
index f1a1996b8c5..25c8d98eae3 100644
--- a/app/constant.ts
+++ b/app/constant.ts
@@ -264,6 +264,7 @@ export const KnowledgeCutOffDate: Record = {
"gpt-4o": "2023-10",
"gpt-4o-2024-05-13": "2023-10",
"gpt-4o-2024-08-06": "2023-10",
+ "gpt-4o-2024-11-20": "2023-10",
"chatgpt-4o-latest": "2023-10",
"gpt-4o-mini": "2023-10",
"gpt-4o-mini-2024-07-18": "2023-10",
@@ -303,6 +304,7 @@ const openaiModels = [
"gpt-4o",
"gpt-4o-2024-05-13",
"gpt-4o-2024-08-06",
+ "gpt-4o-2024-11-20",
"chatgpt-4o-latest",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
@@ -318,6 +320,9 @@ const googleModels = [
"gemini-1.0-pro",
"gemini-1.5-pro-latest",
"gemini-1.5-flash-latest",
+ "gemini-exp-1114",
+ "gemini-exp-1121",
+ "learnlm-1.5-pro-experimental",
"gemini-pro-vision",
];
diff --git a/app/utils.ts b/app/utils.ts
index 1c359ef9508..b62bc126da7 100644
--- a/app/utils.ts
+++ b/app/utils.ts
@@ -260,6 +260,8 @@ export function isVisionModel(model: string) {
"gpt-4o",
"claude-3",
"gemini-1.5",
+ "gemini-exp",
+ "learnlm",
"qwen-vl",
"qwen2-vl",
];