Browse Source

add support for llama 3.2

Alex Cheema 9 months ago
parent
commit
777102c997
2 changed files with 9 additions and 1 deletions
  1. 6 0
      exo/models.py
  2. 3 1
      tinychat/examples/tinychat/index.html

+ 6 - 0
exo/models.py

@@ -2,6 +2,12 @@ from exo.inference.shard import Shard
 
 model_base_shards = {
   ### llama
+  "llama-3.2-1b": {
+    "MLXDynamicShardInferenceEngine": Shard(model_id="mlx-community/Llama-3.2-1B-Instruct-4bit", start_layer=0, end_layer=0, n_layers=16),
+  },
+  "llama-3.2-3b": {
+    "MLXDynamicShardInferenceEngine": Shard(model_id="mlx-community/Llama-3.2-3B-Instruct-4bit", start_layer=0, end_layer=0, n_layers=28),
+  },
   "llama-3.1-8b": {
     "MLXDynamicShardInferenceEngine": Shard(model_id="mlx-community/Meta-Llama-3.1-8B-Instruct-4bit", start_layer=0, end_layer=0, n_layers=32),
     "TinygradDynamicShardInferenceEngine": Shard(model_id="mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated", start_layer=0, end_layer=0, n_layers=32),

+ 3 - 1
tinychat/examples/tinychat/index.html

@@ -27,7 +27,9 @@
 <main x-data="state" x-init="console.log(endpoint)">
 <div class="model-selector">
 <select @change="if (cstate) cstate.selectedModel = $event.target.value" x-model="cstate.selectedModel">
-<option selected="" value="llama-3.1-8b">Llama 3.1 8B</option>
+<option selected="" value="llama-3.2-1b">Llama 3.2 1B</option>
+<option value="llama-3.2-3b">Llama 3.2 3B</option>
+<option value="llama-3.1-8b">Llama 3.1 8B</option>
 <option value="llama-3.1-70b">Llama 3.1 70B</option>
 <option value="llama-3.1-70b-bf16">Llama 3.1 70B (BF16)</option>
 <option value="llama-3.1-405b">Llama 3.1 405B</option>