Browse Source

formatting handle_delete_model

cadenmackenzie 8 months ago
parent
commit
a9838a8f45
1 changed files with 42 additions and 42 deletions
  1. 42 42
      exo/api/chatgpt_api.py

+ 42 - 42
exo/api/chatgpt_api.py

@@ -420,48 +420,48 @@ class ChatGPTAPI:
 
   async def handle_delete_model(self, request):
     try:
-        model_name = request.match_info.get('model_name')
-        if DEBUG >= 2: print(f"Attempting to delete model: {model_name}")
-        
-        if not model_name or model_name not in model_cards:
-            return web.json_response(
-                {"detail": f"Invalid model name: {model_name}"}, 
-                status=400
-            )
-
-        shard = build_base_shard(model_name, self.inference_engine_classname)
-        if not shard:
-            return web.json_response(
-                {"detail": "Could not build shard for model"}, 
-                status=400
-            )
-
-        repo_id = get_repo(shard.model_id, self.inference_engine_classname)
-        if DEBUG >= 2: print(f"Repo ID for model: {repo_id}")
-        
-        # Get the HF cache directory using the helper function
-        hf_home = get_hf_home()
-        cache_dir = get_repo_root(repo_id)
-        
-        if DEBUG >= 2: print(f"Looking for model files in: {cache_dir}")
-        
-        if os.path.exists(cache_dir):
-            if DEBUG >= 2: print(f"Found model files at {cache_dir}, deleting...")
-            try:
-                shutil.rmtree(cache_dir)
-                return web.json_response({
-                    "status": "success", 
-                    "message": f"Model {model_name} deleted successfully",
-                    "path": str(cache_dir)
-                })
-            except Exception as e:
-                return web.json_response({
-                    "detail": f"Failed to delete model files: {str(e)}"
-                }, status=500)
-        else:
-            return web.json_response({
-                "detail": f"Model files not found at {cache_dir}"
-            }, status=404)
+      model_name = request.match_info.get('model_name')
+      if DEBUG >= 2: print(f"Attempting to delete model: {model_name}")
+      
+      if not model_name or model_name not in model_cards:
+        return web.json_response(
+          {"detail": f"Invalid model name: {model_name}"}, 
+          status=400
+          )
+
+      shard = build_base_shard(model_name, self.inference_engine_classname)
+      if not shard:
+        return web.json_response(
+          {"detail": "Could not build shard for model"}, 
+          status=400
+        )
+
+      repo_id = get_repo(shard.model_id, self.inference_engine_classname)
+      if DEBUG >= 2: print(f"Repo ID for model: {repo_id}")
+      
+      # Get the HF cache directory using the helper function
+      hf_home = get_hf_home()
+      cache_dir = get_repo_root(repo_id)
+      
+      if DEBUG >= 2: print(f"Looking for model files in: {cache_dir}")
+      
+      if os.path.exists(cache_dir):
+        if DEBUG >= 2: print(f"Found model files at {cache_dir}, deleting...")
+        try:
+          shutil.rmtree(cache_dir)
+          return web.json_response({
+            "status": "success", 
+            "message": f"Model {model_name} deleted successfully",
+            "path": str(cache_dir)
+          })
+        except Exception as e:
+          return web.json_response({
+            "detail": f"Failed to delete model files: {str(e)}"
+          }, status=500)
+      else:
+        return web.json_response({
+          "detail": f"Model files not found at {cache_dir}"
+        }, status=404)
             
     except Exception as e:
         print(f"Error in handle_delete_model: {str(e)}")