-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathindex.html
49 lines (47 loc) · 2.84 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
<!DOCTYPE html>
<html>
<head>
<title>Browser LLM</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<link rel="stylesheet" type="text/css" href="style.css">
</head>
<body>
<div class="status-badges">
<div class="status-badge" id="runtimeSupport" data-tooltip="Shows available runtimes. Web Assembly (WASM) is required and supported in most modern browsers. WebGPU enables faster processing but requires a GPU and compatible browser like Chrome.">
<span class="badge-label">Runtime:</span>
<span class="badge-value">Checking...</span>
</div>
<div class="status-badge" id="outputLength" data-tooltip="Controls the maximum length of generated text. Higher values allow longer responses but take more time.">
<span class="badge-label">Max Tokens:</span>
<span class="badge-value">256</span>
<input type="range" id="maxTokens" min="64" max="1024" value="256" step="64">
</div>
</div>
<div id="app">
<div class="input-group">
<label for="modelSelector">LLM</label>
<select id="modelSelector">
<option value="HuggingFaceTB/SmolLM2-135M-Instruct">HuggingFaceTB/SmolLM2-135M-Instruct</option>
<option value="Felladrin/onnx-Smol-Llama-101M-Chat-v1">Felladrin/onnx-Smol-Llama-101M-Chat-v1</option>
<option value="Felladrin/onnx-TinyMistral-248M-Chat-v2">Felladrin/onnx-TinyMistral-248M-Chat-v2</option>
<option value="HuggingFaceTB/SmolLM2-360M-Instruct">HuggingFaceTB/SmolLM2-360M-Instruct</option>
<option value="Xenova/OpenELM-270M-Instruct">Xenova/OpenELM-270M-Instruct</option>
<option value="Xenova/Qwen1.5-0.5B-Chat">Xenova/Qwen1.5-0.5B-Chat</option>
<option value="onnx-community/Qwen2.5-0.5B-Instruct">onnx-community/Qwen2.5-0.5B-Instruct</option>
<option value="Xenova/TinyLlama-1.1B-Chat-v1.0">Xenova/TinyLlama-1.1B-Chat-v1.0</option>
<!-- <option value="HuggingFaceTB/SmolLM2-1.7B-Instruct">HuggingFaceTB/SmolLM2-1.7B-Instruct</option>
<option value="onnx-community/Llama-3.2-1B-Instruct">onnx-community/Llama-3.2-1B-Instruct</option>
<option value="onnx-community/Llama-3.2-3B-Instruct">onnx-community/Llama-3.2-3B-Instruct</option> -->
</select>
</div>
<textarea id="textInput" placeholder="Enter your prompt here"></textarea>
<button id="generateBtn">Generate</button>
<div id="generatingStatus" class="status-container" style="display: none;">
<div class="progress-bar"></div>
<div class="status-text"></div>
</div>
<div id="output"></div>
</div>
<script type="module" src="script.js"></script>
</body>
</html>