llama2js
v0.0.1
Published
Inference Llama 2 in pure JavaScript(HTML)
Downloads
2
Readme
llama2.js
A Fork of llama2.js rewritten to be usefull as a library and perhaps add some other functionalities in the future.
A pure JavaScript port of Karpathy's llama2.c with a simple UI.
How to run
- Download Karpathy's Llama2 (Orig instructions) parameters pretrained on TinyStories dataset
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories42M.bin
wget https://huggingface.co/karpathy/tinyllamas/resolve/main/stories110M.bin
- Open run.html via a WebServer
Usage
import { loadVocab, readCheckpoint, generator } from "llama2js"
import * as process from 'process'
import * as fs from 'fs'
async function init() {
const model = fs.readFileSync('pathto/stories15m.bin')
readCheckpoint(model.buffer)
const tokenizer = fs.readFileSync('pathto/tokenizer.bin')
loadVocab(tokenizer.buffer)
generate()
}
async function generate() {
const iterator = await generator({
prompt: "Once upon a time, there was a"
})
while (true) {
const { value, done } = await iterator.next()
if (done) {
console.log(value)
break
}
process.stdout.write(value.next)
}
}
init()
License
MIT