|
@@ -20,6 +20,7 @@ parser.add_argument("--node-id", type=str, default=None, help="Node ID")
|
|
parser.add_argument("--node-host", type=str, default="0.0.0.0", help="Node host")
|
|
parser.add_argument("--node-host", type=str, default="0.0.0.0", help="Node host")
|
|
parser.add_argument("--node-port", type=int, default=None, help="Node port")
|
|
parser.add_argument("--node-port", type=int, default=None, help="Node port")
|
|
parser.add_argument("--listen-port", type=int, default=5678, help="Listening port for discovery")
|
|
parser.add_argument("--listen-port", type=int, default=5678, help="Listening port for discovery")
|
|
|
|
+parser.add_argument("--download-quick-check", action="store_true", help="Quick check local path for shard download")
|
|
parser.add_argument("--prometheus-client-port", type=int, default=None, help="Prometheus client port")
|
|
parser.add_argument("--prometheus-client-port", type=int, default=None, help="Prometheus client port")
|
|
parser.add_argument("--broadcast-port", type=int, default=5678, help="Broadcast port for discovery")
|
|
parser.add_argument("--broadcast-port", type=int, default=5678, help="Broadcast port for discovery")
|
|
parser.add_argument("--discovery-timeout", type=int, default=30, help="Discovery timeout in seconds")
|
|
parser.add_argument("--discovery-timeout", type=int, default=30, help="Discovery timeout in seconds")
|
|
@@ -36,7 +37,7 @@ print_yellow_exo()
|
|
system_info = get_system_info()
|
|
system_info = get_system_info()
|
|
print(f"Detected system: {system_info}")
|
|
print(f"Detected system: {system_info}")
|
|
|
|
|
|
-shard_downloader: ShardDownloader = HFShardDownloader()
|
|
|
|
|
|
+shard_downloader: ShardDownloader = HFShardDownloader(quick_check=args.download_quick_check)
|
|
inference_engine_name = args.inference_engine or ("mlx" if system_info == "Apple Silicon Mac" else "tinygrad")
|
|
inference_engine_name = args.inference_engine or ("mlx" if system_info == "Apple Silicon Mac" else "tinygrad")
|
|
inference_engine = get_inference_engine(inference_engine_name, shard_downloader)
|
|
inference_engine = get_inference_engine(inference_engine_name, shard_downloader)
|
|
print(f"Using inference engine: {inference_engine.__class__.__name__} with shard downloader: {shard_downloader.__class__.__name__}")
|
|
print(f"Using inference engine: {inference_engine.__class__.__name__} with shard downloader: {shard_downloader.__class__.__name__}")
|