This repository has been archived by the owner on Sep 12, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 65
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
27 changed files
with
234 additions
and
27 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
import { MemoryVectorStore } from "langchain/vectorstores/memory"; | ||
import { LLamaEmbeddings } from "llama-node/dist/extensions/langchain.js"; | ||
import { LLama } from "llama-node"; | ||
import { LLamaCpp } from "llama-node/dist/llm/llama-cpp.js"; | ||
import path from "path"; | ||
const model = path.resolve(process.cwd(), "../ggml-vicuna-7b-1.1-q4_1.bin"); | ||
const llama = new LLama(LLamaCpp); | ||
const config = { | ||
path: model, | ||
enableLogging: true, | ||
nCtx: 1024, | ||
nParts: -1, | ||
seed: 0, | ||
f16Kv: false, | ||
logitsAll: false, | ||
vocabOnly: false, | ||
useMlock: false, | ||
embedding: true, | ||
useMmap: true, | ||
}; | ||
llama.load(config); | ||
const run = async () => { | ||
// Load the docs into the vector store | ||
const vectorStore = await MemoryVectorStore.fromTexts(["Hello world", "Bye bye", "hello nice world"], [{ id: 2 }, { id: 1 }, { id: 3 }], new LLamaEmbeddings({ maxConcurrency: 1 }, llama)); | ||
// Search for the most similar document | ||
const resultOne = await vectorStore.similaritySearch("hello world", 1); | ||
console.log(resultOne); | ||
}; | ||
run(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import { LLama } from "llama-node"; | ||
import { LLamaCpp } from "llama-node/dist/llm/llama-cpp.js"; | ||
import path from "path"; | ||
const model = path.resolve(process.cwd(), "../ggml-vicuna-7b-1.1-q4_1.bin"); | ||
const llama = new LLama(LLamaCpp); | ||
const config = { | ||
path: model, | ||
enableLogging: true, | ||
nCtx: 1024, | ||
nParts: -1, | ||
seed: 0, | ||
f16Kv: false, | ||
logitsAll: false, | ||
vocabOnly: false, | ||
useMlock: false, | ||
embedding: true, | ||
useMmap: true, | ||
}; | ||
llama.load(config); | ||
const prompt = `Who is the president of the United States?`; | ||
const params = { | ||
nThreads: 4, | ||
nTokPredict: 2048, | ||
topK: 40, | ||
topP: 0.1, | ||
temp: 0.2, | ||
repeatPenalty: 1, | ||
prompt, | ||
}; | ||
llama.getEmbedding(params).then(console.log); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
import { LLama } from "llama-node"; | ||
import { LLamaCpp } from "llama-node/dist/llm/llama-cpp.js"; | ||
import path from "path"; | ||
const model = path.resolve(process.cwd(), "../ggml-vicuna-7b-1.1-q4_1.bin"); | ||
const llama = new LLama(LLamaCpp); | ||
const config = { | ||
path: model, | ||
enableLogging: true, | ||
nCtx: 1024, | ||
nParts: -1, | ||
seed: 0, | ||
f16Kv: false, | ||
logitsAll: false, | ||
vocabOnly: false, | ||
useMlock: false, | ||
embedding: false, | ||
useMmap: true, | ||
}; | ||
llama.load(config); | ||
const template = `How are you?`; | ||
const prompt = `### Human: | ||
${template} | ||
### Assistant:`; | ||
llama.createCompletion({ | ||
nThreads: 4, | ||
nTokPredict: 2048, | ||
topK: 40, | ||
topP: 0.1, | ||
temp: 0.2, | ||
repeatPenalty: 1, | ||
stopSequence: "### Human", | ||
prompt, | ||
}, (response) => { | ||
process.stdout.write(response.token); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
import { LLama } from "llama-node"; | ||
import { LLamaCpp } from "llama-node/dist/llm/llama-cpp.js"; | ||
import path from "path"; | ||
const model = path.resolve(process.cwd(), "../ggml-vicuna-7b-1.1-q4_1.bin"); | ||
const llama = new LLama(LLamaCpp); | ||
const config = { | ||
path: model, | ||
enableLogging: true, | ||
nCtx: 1024, | ||
nParts: -1, | ||
seed: 0, | ||
f16Kv: false, | ||
logitsAll: false, | ||
vocabOnly: false, | ||
useMlock: false, | ||
embedding: false, | ||
useMmap: true, | ||
}; | ||
llama.load(config); | ||
const content = "how are you?"; | ||
llama.tokenize({ content, nCtx: 2048 }).then(console.log); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
import { LLama } from "llama-node"; | ||
import { LLamaRS } from "llama-node/dist/llm/llama-rs.js"; | ||
import path from "path"; | ||
import fs from "fs"; | ||
const model = path.resolve(process.cwd(), "../ggml-alpaca-7b-q4.bin"); | ||
const llama = new LLama(LLamaRS); | ||
llama.load({ path: model }); | ||
const getWordEmbeddings = async (prompt, file) => { | ||
const data = await llama.getEmbedding({ | ||
prompt, | ||
numPredict: 128, | ||
temp: 0.2, | ||
topP: 1, | ||
topK: 40, | ||
repeatPenalty: 1, | ||
repeatLastN: 64, | ||
seed: 0, | ||
}); | ||
console.log(prompt, data); | ||
await fs.promises.writeFile(path.resolve(process.cwd(), file), JSON.stringify(data)); | ||
}; | ||
const run = async () => { | ||
const dog1 = `My favourite animal is the dog`; | ||
await getWordEmbeddings(dog1, "./example/semantic-compare/dog1.json"); | ||
const dog2 = `I have just adopted a cute dog`; | ||
await getWordEmbeddings(dog2, "./example/semantic-compare/dog2.json"); | ||
const cat1 = `My favourite animal is the cat`; | ||
await getWordEmbeddings(cat1, "./example/semantic-compare/cat1.json"); | ||
}; | ||
run(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
import { LLama } from "llama-node"; | ||
import { LLamaRS } from "llama-node/dist/llm/llama-rs.js"; | ||
import path from "path"; | ||
const model = path.resolve(process.cwd(), "../ggml-alpaca-7b-q4.bin"); | ||
const llama = new LLama(LLamaRS); | ||
llama.load({ path: model }); | ||
const template = `how are you`; | ||
const prompt = `Below is an instruction that describes a task. Write a response that appropriately completes the request. | ||
### Instruction: | ||
${template} | ||
### Response:`; | ||
llama.createCompletion({ | ||
prompt, | ||
numPredict: 128, | ||
temp: 0.2, | ||
topP: 1, | ||
topK: 40, | ||
repeatPenalty: 1, | ||
repeatLastN: 64, | ||
seed: 0, | ||
feedPrompt: true, | ||
}, (response) => { | ||
process.stdout.write(response.token); | ||
}); |
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
import * as tf from "@tensorflow/tfjs-node"; | ||
import dog1 from "./dog1.json"; | ||
import dog2 from "./dog2.json"; | ||
import cat1 from "./cat1.json"; | ||
const dog1Tensor = tf.tensor(dog1); | ||
const dog2Tensor = tf.tensor(dog2); | ||
const cat1Tensor = tf.tensor(cat1); | ||
const compareCosineSimilarity = (tensor1, tensor2) => { | ||
const dotProduct = tensor1.dot(tensor2); | ||
const norm1 = tensor1.norm(); | ||
const norm2 = tensor2.norm(); | ||
const cosineSimilarity = dotProduct.div(norm1.mul(norm2)); | ||
return cosineSimilarity.dataSync()[0]; | ||
}; | ||
console.log("dog1 vs dog2", compareCosineSimilarity(dog1Tensor, dog2Tensor)); | ||
console.log("dog1 vs cat1", compareCosineSimilarity(dog1Tensor, cat1Tensor)); |
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
import { LLama } from "llama-node"; | ||
import { LLamaRS } from "llama-node/dist/llm/llama-rs.js"; | ||
import path from "path"; | ||
const model = path.resolve(process.cwd(), "../ggml-alpaca-7b-q4.bin"); | ||
const llama = new LLama(LLamaRS); | ||
llama.load({ path: model }); | ||
const content = "how are you?"; | ||
llama.tokenize(content).then(console.log); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,27 +1,25 @@ | ||
{ | ||
"name": "@llama-node/examples", | ||
"version": "1.0.0", | ||
"version": "0.0.30", | ||
"description": "", | ||
"main": "index.js", | ||
"type": "module", | ||
"scripts": { | ||
"build": "tsc -p .", | ||
"langchain": "tsx src/langchain/langchain.ts", | ||
"llama-cpp": "tsx src/llama-cpp/llama-cpp.ts", | ||
"llama-rs": "tsx src/llama-rs/llama-rs.ts", | ||
"test": "echo \"Error: no test specified\" && exit 1" | ||
"langchain": "node js/langchain/langchain.js", | ||
"llama-cpp": "node js/llama-cpp/llama-cpp.js", | ||
"llama-rs": "node js/llama-rs/llama-rs.js" | ||
}, | ||
"author": "", | ||
"license": "MIT", | ||
"devDependencies": { | ||
"@types/node": "^18.15.11", | ||
"tsx": "^3.12.6", | ||
"typescript": "^5.0.4", | ||
"langchain": "^0.0.56" | ||
}, | ||
"dependencies": { | ||
"@llama-node/core": "file:../packages/core", | ||
"@llama-node/llama-cpp": "file:../packages/llama-cpp", | ||
"llama-node": "file:../" | ||
"@llama-node/core": "0.0.30", | ||
"@llama-node/llama-cpp": "0.0.30", | ||
"llama-node": "0.0.30" | ||
} | ||
} | ||
} |
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters