force model cache (#751)

This commit is contained in:
Radamés Ajna
2023-09-05 20:53:31 -07:00
committed by GitHub
parent a4f40f3dc8
commit 16bf44f6e9
4 changed files with 44 additions and 20 deletions

View File

@ -38,11 +38,11 @@
},
stories42M: {
url: "stories42M.bin",
seq_len: 256,
seq_len: 1024,
},
stories110M: {
url: "stories110M.bin",
seq_len: 256,
seq_len: 1024,
},
};
@ -124,9 +124,17 @@
const prompt = document.querySelector("#prompt");
const clearBtn = document.querySelector("#clear-btn");
const runBtn = document.querySelector("#run");
const modelSelect = document.querySelector("#model");
let runController = new AbortController();
let isRunning = false;
modelSelect.addEventListener("change", (e) => {
const model = MODELS[e.target.value];
document.querySelector("#max-seq").max = model.seq_len;
document.querySelector("#max-seq").nextElementSibling.value =
model.seq_len;
});
form.addEventListener("submit", async (e) => {
e.preventDefault();
if (isRunning) {

View File

@ -1,13 +1,17 @@
import init, { Model } from "./build/m.js";
async function fetchArrayBuffer(url) {
const res = await fetch(url, {
cache: "force-cache",
});
const data = await res.arrayBuffer();
return new Uint8Array(data);
const cacheName = "llama2c-candle-cache";
const cache = await caches.open(cacheName);
const cachedResponse = await cache.match(url);
if (cachedResponse) {
const data = await cachedResponse.arrayBuffer();
return new Uint8Array(data);
}
const res = await fetch(url, { cache: "force-cache" });
cache.put(url, res.clone());
return new Uint8Array(await res.arrayBuffer());
}
class Llama2C {
static instance = {};