Skip to content
This repository has been archived by the owner on Jan 9, 2025. It is now read-only.

Commit

Permalink
chore: adopt latest sdk
Browse files Browse the repository at this point in the history
  • Loading branch information
heiruwu committed Jan 12, 2024
1 parent d94a76a commit abb61e8
Showing 1 changed file with 18 additions and 22 deletions.
40 changes: 18 additions & 22 deletions mobilenetv2/1/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,10 @@
from instill.helpers.const import DataType
from instill.helpers.ray_io import serialize_byte_tensor, deserialize_bytes_tensor
from instill.helpers.ray_config import instill_deployment, InstillDeployable

from ray_pb2 import (
ModelMetadataRequest,
ModelMetadataResponse,
RayServiceCallRequest,
RayServiceCallResponse,
InferTensor,
from instill.helpers import (
construct_infer_response,
construct_metadata_response,
Metadata,
)


Expand Down Expand Up @@ -48,29 +45,28 @@ def _image_labels(self) -> List[str]:
categories.append(label.strip())
return categories

def ModelMetadata(self, req: ModelMetadataRequest) -> ModelMetadataResponse:
resp = ModelMetadataResponse(
name=req.name,
versions=req.version,
framework="onnx",
def ModelMetadata(self, req):
resp = construct_metadata_response(
req=req,
inputs=[
ModelMetadataResponse.TensorMetadata(
Metadata(
name="input",
datatype=str(DataType.TYPE_STRING.name),
shape=[1],
),
],
outputs=[
ModelMetadataResponse.TensorMetadata(
Metadata(
name="output",
datatype=str(DataType.TYPE_STRING.name),
shape=[1000],
),
],
)
return resp

async def __call__(self, request: RayServiceCallRequest) -> RayServiceCallResponse:
b_tensors = request.raw_input_contents[0]
async def __call__(self, req):
b_tensors = req.raw_input_contents[0]

input_tensors = deserialize_bytes_tensor(b_tensors)

Expand All @@ -95,16 +91,16 @@ async def __call__(self, request: RayServiceCallRequest) -> RayServiceCallRespon
out = serialize_byte_tensor(np.asarray(s_out))
out = np.expand_dims(out, axis=0)

return RayServiceCallResponse(
model_name=request.model_name,
model_version=request.model_version,
return construct_infer_response(
req=req,
outputs=[
InferTensor(
Metadata(
name="output",
datatype=str(DataType.TYPE_STRING.name),
shape=[len(batch_out), 1000],
),
)
],
raw_output_contents=out,
raw_outputs=out,
)


Expand Down

0 comments on commit abb61e8

Please sign in to comment.