-
Notifications
You must be signed in to change notification settings - Fork 10
/
tfjs-text-demo.html
51 lines (42 loc) · 2.15 KB
/
tfjs-text-demo.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
<!DOCTYPE html>
<html>
<head>
<title>OpenAI CLIP JavaScript - Text Demo - tfjs</title>
</head>
<body>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/[email protected]/dist/tf.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/[email protected]/dist/tf-backend-wasm.js"></script>
<h3>Note: To run this you need to clone <a href="https://github.com/josephrocca/openai-clip-js">this repo</a>, and then download the tfjs model folder from <a href="https://drive.google.com/drive/folders/1-GI6-OTDiJcjYKTavoobbubc9BYjQDzW?usp=sharing">here</a> and name the folder "clip-text-vit-32-tfjs", and then run a static file server in the repo directory.</h3>
<div>
input text <input id="textInputEl" value="hello world!">
backend: <select id="backendSelectEl">
<option>wasm</option>
<option>webgl</option>
</select>
<button id="startBtn" onclick="main()">start</button>
</div>
<p><a href="https://github.com/josephrocca/openai-clip-js">github repo</a> - <a href="https://huggingface.co/rocca/openai-clip-js/tree/main">huggingface repo</a></p>
<script>
async function main() {
startBtn.disabled = true;
startBtn.innerHTML = "see console";
await tf.setBackend(backendSelectEl.value);
console.log("Loading model... (see network tab for progress)");
let modelPath = './clip-text-vit-32-tfjs/model.json';
let model = await tf.loadGraphModel(modelPath);
console.log("Model loaded.");
let Tokenizer = (await import("https://deno.land/x/[email protected]/mod.js")).default;
let t = new Tokenizer();
let textTokens = t.encodeForCLIP(textInputEl.value);
textTokens = Float32Array.from(textTokens);
let input = {'input': tf.tensor(textTokens, [1, 77], "float32")};
console.log("Running inference...");
const results = await model.execute(input, ["output"]);
debugger;
console.log("Finished inference.");
const data = results["output"].data;
console.log(`data of result tensor 'output'`, data);
}
</script>
</body>
</html>