|
@@ -71,7 +71,6 @@ class TransformerConnectionHandler(ConnectionHandler):
|
|
|
print("CLOSED RPC_INFERENCE")
|
|
|
|
|
|
async def rpc_forward(self, request: runtime_pb2.ExpertRequest, context: P2PContext) -> runtime_pb2.ExpertResponse:
|
|
|
- return await super().rpc_forward(request, context)
|
|
|
# Parse request and prepare backends
|
|
|
hidden_states = [deserialize_torch_tensor(tensor) for tensor in request.tensors]
|
|
|
requested_uids = self._check_header(request)
|
|
@@ -97,8 +96,6 @@ class TransformerConnectionHandler(ConnectionHandler):
|
|
|
async def rpc_forward_stream(
|
|
|
self, requests: AsyncIterator[runtime_pb2.ExpertRequest], context: P2PContext
|
|
|
) -> AsyncIterator[runtime_pb2.ExpertRequest]:
|
|
|
- async for response in super().rpc_forward_stream(requests, context):
|
|
|
- yield response
|
|
|
# Parse requests and prepare backends
|
|
|
uids_header, hidden_states = await self._gather_inputs(requests, context)
|
|
|
requested_uids = self._check_header_str(uids_header)
|