grpc_peer_handle.py 3.3 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374
  1. import grpc
  2. import numpy as np
  3. from typing import Optional
  4. # These would be generated from the .proto file
  5. from . import node_service_pb2
  6. from . import node_service_pb2_grpc
  7. from ..peer_handle import PeerHandle
  8. from inference.shard import Shard
  9. from topology.topology import Topology
  10. from topology.device_capabilities import DeviceCapabilities
  11. class GRPCPeerHandle(PeerHandle):
  12. def __init__(self, id: str, address: str, device_capabilities: DeviceCapabilities):
  13. self._id = id
  14. self.address = address
  15. self._device_capabilities = device_capabilities
  16. def id(self) -> str:
  17. return self._id
  18. def device_capabilities(self) -> DeviceCapabilities:
  19. return self._device_capabilities
  20. async def connect(self):
  21. self.channel = grpc.aio.insecure_channel(self.address)
  22. self.stub = node_service_pb2_grpc.NodeServiceStub(self.channel)
  23. async def disconnect(self):
  24. await self.channel.close()
  25. async def send_prompt(self, shard: Shard, prompt: str) -> Optional[np.array]:
  26. request = node_service_pb2.PromptRequest(prompt=prompt, shard=node_service_pb2.Shard(model_id=shard.model_id, start_layer=shard.start_layer, end_layer=shard.end_layer, n_layers=shard.n_layers))
  27. response = await self.stub.SendPrompt(request)
  28. print(f"Sent prompt to {self.address}: {prompt}")
  29. if not response.tensor_data or not response.shape or not response.dtype:
  30. return None
  31. return np.frombuffer(response.tensor_data, dtype=np.dtype(response.dtype)).reshape(response.shape)
  32. async def send_tensor(self, shard: Shard, tensor: np.ndarray) -> Optional[np.array]:
  33. request = node_service_pb2.TensorRequest(
  34. shard=node_service_pb2.Shard(model_id=shard.model_id, start_layer=shard.start_layer, end_layer=shard.end_layer, n_layers=shard.n_layers),
  35. tensor = node_service_pb2.Tensor(
  36. tensor_data=tensor.tobytes(),
  37. shape=tensor.shape,
  38. dtype=str(tensor.dtype)
  39. ),
  40. )
  41. response = await self.stub.SendTensor(request)
  42. if not response.tensor_data or not response.shape or not response.dtype:
  43. return None
  44. return np.frombuffer(response.tensor_data, dtype=np.dtype(response.dtype)).reshape(response.shape)
  45. async def reset_shard(self, shard: Shard) -> None:
  46. request = node_service_pb2.ResetShardRequest(shard=node_service_pb2.Shard(model_id=shard.model_id, start_layer=shard.start_layer, end_layer=shard.end_layer, n_layers=shard.n_layers))
  47. await self.stub.ResetShard(request)
  48. print(f"Reset shard {shard} on {self.address}")
  49. async def collect_topology(self, max_depth: int) -> Topology:
  50. request = node_service_pb2.CollectTopologyRequest(max_depth=max_depth)
  51. response = await self.stub.CollectTopology(request)
  52. topology = Topology()
  53. for node_id, capabilities in response.nodes.items():
  54. device_capabilities = DeviceCapabilities(model=capabilities.model, chip=capabilities.chip, memory=capabilities.memory)
  55. topology.update_node(node_id, device_capabilities)
  56. for node_id, peers in response.peer_graph.items():
  57. for peer_id in peers.peer_ids:
  58. topology.add_edge(node_id, peer_id)
  59. return topology