Skip to content

Commit

Permalink
Refactor variable declarations to use 'const' keyword
Browse files Browse the repository at this point in the history
- Update variable declarations to use 'const' instead of 'let' for
  improved immutability and readability.

Generated by gpt-3.5-turbo
  • Loading branch information
joone committed Mar 18, 2024
1 parent ece5d2f commit a9759ce
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 15 deletions.
6 changes: 3 additions & 3 deletions src/config/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -173,10 +173,10 @@ export class Config implements ConfigInterface {

return false;
}
let rawData: any = fs.readFileSync(this.configFilePath);
let config = JSON.parse(rawData);
const rawData: any = fs.readFileSync(this.configFilePath);
const config = JSON.parse(rawData);

for (let item of config.items) {
for (const item of config.items) {
this.set(item.name, item.value);
}
return true;
Expand Down
2 changes: 1 addition & 1 deletion src/llm/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ export class OpenAiAPI extends LLMService {
console.log("OpenAI stream completion");
console.log("Model: " + params.model);
}
let stream = await this.api.chat.completions.create(streaming_params);
const stream = await this.api.chat.completions.create(streaming_params);
return stream;
}
}
Expand Down
14 changes: 5 additions & 9 deletions src/loz.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,7 @@ export class Loz {
}

public async completeUserPrompt(prompt: string): Promise<any> {
let params: LLMSettings;
params = this.defaultSettings;
const params = this.defaultSettings;
params.max_tokens = 500;
params.prompt = prompt;
const completion = await this.llmAPI.completion(params);
Expand All @@ -141,8 +140,7 @@ export class Loz {

const prompt = promptForGIT + diff + "\n" + "Commit Message: ";

let params: LLMSettings;
params = this.defaultSettings;
const params = this.defaultSettings;
params.max_tokens = 500;
params.prompt = prompt;

Expand Down Expand Up @@ -178,8 +176,7 @@ export class Loz {
// git diff | loz --git
public async generateGitCommitMessage(diff: string): Promise<any> {
if (DEBUG) console.log("writeGitCommitMessage");
let params: LLMSettings;
params = this.defaultSettings;
const params = this.defaultSettings;
params.max_tokens = 500;
params.prompt = promptForGIT + diff + "\n" + "Commit Message: ";

Expand Down Expand Up @@ -245,7 +242,7 @@ export class Loz {

public runPromptInteractiveMode(): Promise<any> {
return new Promise((resolve, reject) => {
let cli = new CommandLinePrompt(async (input: string) => {
const cli = new CommandLinePrompt(async (input: string) => {
const tokens = input.split(" ");
if (input === "exit" || input === "quit") {
cli.exit();
Expand All @@ -254,8 +251,7 @@ export class Loz {
} else if (input.indexOf("config") === 0 && tokens.length <= 3) {
await this.handleConfigCommand(tokens);
} else if (input.length !== 0) {
let params: LLMSettings;
params = this.defaultSettings;
const params = this.defaultSettings;
params.prompt = input;
params.max_tokens = 4000;
await this.runCompletion(params);
Expand Down
4 changes: 2 additions & 2 deletions src/prompt/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ describe("Test prompt", () => {
stdin.send("loz\n");
});

let cli = new CommandLinePrompt(async (input: string) => {
const cli = new CommandLinePrompt(async (input: string) => {
expect(input).to.equal("loz");
return Promise.resolve();
});
Expand All @@ -45,7 +45,7 @@ describe("Test prompt", () => {
stdin.send("exit\n");
});

let cli = new CommandLinePrompt(async (input: string) => {
const cli = new CommandLinePrompt(async (input: string) => {
expect(input).to.equal("exit");
return Promise.resolve();
});
Expand Down

0 comments on commit a9759ce

Please sign in to comment.