Skip to content

Commit

Permalink
feat: upgrade gpt-tokens
Browse files Browse the repository at this point in the history
  • Loading branch information
Peek-A-Booo committed Jun 30, 2023
1 parent 7b2c9a1 commit 54e0a8b
Show file tree
Hide file tree
Showing 6 changed files with 51 additions and 60 deletions.
1 change: 1 addition & 0 deletions CHANGE_LOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
- Remove unnecessary features to improve performance.
- Optimize UI details and unify global icon styles. Replace react-icons with MingCute Icon.
- Optimize OpenAI/Azure API to return error messages in a more user-friendly format
- Upgrade gpt-tokens dependency

## v0.7.1

Expand Down
1 change: 1 addition & 0 deletions CHANGE_LOG.zh_CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
- 删减部分多余功能,提升性能
- 优化 UI 细节,统一全局图标样式。将 react-icons 替换为 MingCute Icon
- 优化 OpenAI/Azure 接口返回错误格式
- 升级 gpt-tokens 依赖

## v0.7.1

Expand Down
14 changes: 5 additions & 9 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

75 changes: 34 additions & 41 deletions src/lib/gpt-tokens.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Tiktoken, getEncoding, encodingForModel } from "js-tiktoken";
import { encodingForModel, getEncoding, Tiktoken } from "js-tiktoken";
import Decimal from "decimal.js";

/**
Expand Down Expand Up @@ -30,12 +30,27 @@ export class GPTTokens {
constructor(options: {
model: supportModelType;
messages: MessageItem[];
debug?: boolean;
plus?: boolean;
}) {
const { debug = false, model, messages, plus = false } = options;
const { model, messages, plus = false } = options;

if (model === "gpt-3.5-turbo")
this.warning(
`${model} may update over time. Returning num tokens assuming gpt-3.5-turbo-0613`
);
if (model === "gpt-3.5-turbo-16k")
this.warning(
`${model} may update over time. Returning num tokens assuming gpt-3.5-turbo-16k-0613`
);
if (model === "gpt-4")
this.warning(
`${model} may update over time. Returning num tokens assuming gpt-4-0613`
);
if (model === "gpt-4-32k")
this.warning(
`${model} may update over time. Returning num tokens assuming gpt-4-32k-0613`
);

this.debug = debug;
this.model = model;
this.plus = plus;
this.messages = messages;
Expand All @@ -45,8 +60,6 @@ export class GPTTokens {
public readonly model;
public readonly messages;

private readonly debug!: boolean;

// https://openai.com/pricing/
// gpt-3.5-turbo
// $0.002 / 1K tokens
Expand Down Expand Up @@ -98,7 +111,7 @@ export class GPTTokens {
.toNumber();

// Used Tokens
public get usedTokens(): number {
public get usedTokens() {
return this.num_tokens_from_messages(this.messages, this.model);
}

Expand Down Expand Up @@ -150,30 +163,18 @@ export class GPTTokens {
price = promptUSD.add(completionUSD).toNumber();
}

if (this.plus) {
if (
[
"gpt-3.5-turbo",
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
].includes(this.model)
) {
price = new Decimal(price).mul(0.75).toNumber();
}
}

return price;
return this.plus && this.model.startsWith("gpt-3.5-turbo")
? new Decimal(price).mul(0.75).toNumber()
: price;
}

private get promptUsedTokens(): number {
private get promptUsedTokens() {
const messages = this.messages.filter((item) => item.role !== "assistant");

return this.num_tokens_from_messages(messages, this.model);
}

private get completionUsedTokens(): number {
private get completionUsedTokens() {
const messages = this.messages.filter((item) => item.role === "assistant");

return this.num_tokens_from_messages(messages, this.model);
Expand All @@ -184,9 +185,7 @@ export class GPTTokens {
* @param message The message to print. Will be prefixed with "Warning: ".
* @returns void
*/
private warning(message: string): void {
if (!this.debug) return;

private warning(message: string) {
console.warn("Warning:", message);
}

Expand All @@ -200,29 +199,23 @@ export class GPTTokens {
private num_tokens_from_messages(
messages: MessageItem[],
model: supportModelType
): number {
) {
let encoding!: Tiktoken;
let tokens_per_message!: number;
let tokens_per_name!: number;
let num_tokens = 0;
let modelType!: "gpt-3.5-turbo" | "gpt-4";

if (
[
"gpt-3.5-turbo",
"gpt-3.5-turbo-0301",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
].includes(model)
) {
modelType = "gpt-3.5-turbo";
if (["gpt-3.5-turbo-0301"].includes(model)) {
tokens_per_message = 4;
tokens_per_name = -1;
}

if (
[
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
"gpt-4",
"gpt-4-0314",
"gpt-4-0613",
Expand All @@ -231,13 +224,12 @@ export class GPTTokens {
"gpt-4-32k-0613",
].includes(model)
) {
modelType = "gpt-4";
tokens_per_message = 3;
tokens_per_name = 1;
}

try {
encoding = encodingForModel(modelType);
encoding = encodingForModel(model);
} catch (e) {
this.warning("model not found. Using cl100k_base encoding.");

Expand All @@ -259,6 +251,7 @@ export class GPTTokens {
// Supplementary
// encoding.free()

// every reply is primed with <|start|>assistant<|message|>
return num_tokens + 3;
}
}
10 changes: 5 additions & 5 deletions src/locales/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -285,10 +285,10 @@
},
"zLog": {
"full-log": "Click to view the full update log",
"title": "v0.7.1 Released 🔥🔥",
"text1": "Added Azure TTS feature, supporting conversion of conversation replies into voice (Premium).",
"text2": "Improve the display of error messages returned by OpenAI",
"text3": "Optimize some internationalization content",
"text4": "Other details optimizations"
"title": "v0.7.2 Released 🔥🔥",
"text1": "New: Support for automatic playback with Azure TTS (Premium)",
"text2": "Optimization: After actively switching language models, subsequent new sessions will use the same model",
"text3": "Unified global icon style",
"text4": "Refactored a large number of module codes to improve performance"
}
}
10 changes: 5 additions & 5 deletions src/locales/zh-CN.json
Original file line number Diff line number Diff line change
Expand Up @@ -285,10 +285,10 @@
},
"zLog": {
"full-log": "点击查看完整更新日志",
"title": "v0.7.1 版本发布 🔥🔥",
"text1": "新增 Azure TTS 功能,支持将会话回复的内容转换为语音(Premium)",
"text2": "完善 OpenAI 返回报错信息的展示",
"text3": "优化部分国际化内容",
"text4": "其它细节优化"
"title": "v0.7.2 版本发布 🔥🔥",
"text1": "新增:支持Azure TTS自动播放(Premium)",
"text2": "优化:主动切换语言模型后,后续新建会话时会沿用该模型",
"text3": "统一全局图标风格",
"text4": "重构大量模块代码,提升性能表现"
}
}

1 comment on commit 54e0a8b

@vercel
Copy link

@vercel vercel bot commented on 54e0a8b Jun 30, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.