Browse Source

empty space

Alex Cheema 1 year ago
parent
commit
c691205591
1 changed files with 0 additions and 1 deletions
  1. 0 1
      exo/inference/mlx/test_sharded_llama.py

+ 0 - 1
exo/inference/mlx/test_sharded_llama.py

@@ -28,7 +28,6 @@ for _ in range(max_tokens):
 
 
 print("full response: ", full_tokenizer.decode(full_generated_tokens))
 print("full response: ", full_tokenizer.decode(full_generated_tokens))
 
 
-
 sharded_generated_tokens = []
 sharded_generated_tokens = []
 sharded_resp = prompt_tokens
 sharded_resp = prompt_tokens
 for _ in range(max_tokens):
 for _ in range(max_tokens):