Skip to content

Commit

Permalink
feat: enable opentelemetry grpc instrumentation
Browse files Browse the repository at this point in the history
  • Loading branch information
fdupont committed Feb 7, 2024
1 parent 1f8e804 commit 3897fb8
Show file tree
Hide file tree
Showing 4 changed files with 103 additions and 2 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ The following models can be served by pythie-serving:
* `--port`: Port number to listen to.

#### Environment variables
* `OPENTELEMETRY_COLLECTOR_HOST`: OpenTelemetry Collector receiver endpoint. If not defined OpenTelemetry will not be activated. See https://opentelemetry.io/docs/what-is-opentelemetry for more details.
For a treelite served model:
* `TREELITE_NTHREAD`: Number of threads to use to compute predictions
* `TREELINTE_BIND_THREADS`: Set to `0` to deactivate thread pinning. See https://treelite.readthedocs.io/en/latest/treelite-runtime-api.html
Expand Down
84 changes: 82 additions & 2 deletions pythie-serving-requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,34 @@
#
# pip-compile --extra=serving --no-emit-index-url --output-file=pythie-serving-requirements.txt setup.py
#
backoff==2.2.1
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
certifi==2024.2.2
# via requests
charset-normalizer==3.3.2
# via requests
cloudpickle==2.1.0
# via pythie-serving (setup.py)
deprecated==1.2.14
# via
# opentelemetry-api
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
googleapis-common-protos==1.62.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
grpcio==1.51.1
# via pythie-serving (setup.py)
# via
# opentelemetry-exporter-otlp-proto-grpc
# pythie-serving (setup.py)
idna==3.6
# via requests
importlib-metadata==6.11.0
# via opentelemetry-api
joblib==1.2.0
# via scikit-learn
lightgbm==3.3.4
Expand All @@ -20,8 +44,50 @@ numpy==1.23.5
# scipy
# treelite-runtime
# xgboost
protobuf==4.22.0
opentelemetry-api==1.22.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-instrumentation
# opentelemetry-instrumentation-grpc
# opentelemetry-sdk
# pythie-serving (setup.py)
opentelemetry-exporter-otlp==1.22.0
# via pythie-serving (setup.py)
opentelemetry-exporter-otlp-proto-common==1.22.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-exporter-otlp-proto-grpc==1.22.0
# via opentelemetry-exporter-otlp
opentelemetry-exporter-otlp-proto-http==1.22.0
# via opentelemetry-exporter-otlp
opentelemetry-instrumentation==0.43b0
# via opentelemetry-instrumentation-grpc
opentelemetry-instrumentation-grpc==0.43b0
# via pythie-serving (setup.py)
opentelemetry-proto==1.22.0
# via
# opentelemetry-exporter-otlp-proto-common
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
opentelemetry-sdk==1.22.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-instrumentation-grpc
# pythie-serving (setup.py)
opentelemetry-semantic-conventions==0.43b0
# via
# opentelemetry-instrumentation-grpc
# opentelemetry-sdk
protobuf==4.22.0
# via
# googleapis-common-protos
# opentelemetry-proto
# pythie-serving (setup.py)
requests==2.31.0
# via opentelemetry-exporter-otlp-proto-http
scikit-learn==1.2.0
# via
# lightgbm
Expand All @@ -36,7 +102,21 @@ threadpoolctl==3.1.0
# via scikit-learn
treelite-runtime==2.2.2
# via pythie-serving (setup.py)
typing-extensions==4.9.0
# via opentelemetry-sdk
urllib3==2.2.0
# via requests
wheel==0.38.4
# via lightgbm
wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
# opentelemetry-instrumentation-grpc
xgboost==0.90
# via pythie-serving (setup.py)
zipp==3.17.0
# via importlib-metadata

# The following packages are considered to be unsafe in a requirements file:
# setuptools
4 changes: 4 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
"treelite_runtime~=2.2.2",
"scikit-learn~=1.2.0",
"cloudpickle~=2.1.0",
"opentelemetry-instrumentation-grpc~=0.38b0",
"opentelemetry-api>=1.17.0, <2.0",
"opentelemetry-sdk>=1.17.0, <2.0",
"opentelemetry-exporter-otlp>=1.17.0, <2.0",
]
extras_require_test = [
*extras_require_serving,
Expand Down
16 changes: 16 additions & 0 deletions src/pythie_serving/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,27 @@
from logging.config import dictConfig

from google.protobuf import text_format
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.instrumentation.grpc import GrpcInstrumentorServer
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor

from pythie_serving import create_grpc_server
from pythie_serving.tensorflow_proto.tensorflow_serving.config import (
model_server_config_pb2,
)


def initialize_opentelemetry():
otel_collector_host = str(os.environ.get("OPENTELEMETRY_COLLECTOR_HOST"))
if otel_collector_host is not None:
trace.set_tracer_provider(TracerProvider())
otlp_exporter = OTLPSpanExporter(endpoint=otel_collector_host, insecure=True)
trace.get_tracer_provider().add_span_processor(BatchSpanProcessor(otlp_exporter))
GrpcInstrumentorServer().instrument()


def run():
model_choice_set = {"xgboost", "lightgbm", "treelite", "sklearn", "table"}
model_choice_str = ",".join(model_choice_set)
Expand Down Expand Up @@ -76,6 +90,8 @@ def run():
with open(ns.model_config_file_path) as opened_config_file:
text_format.Parse(opened_config_file.read(), model_server_config)

initialize_opentelemetry()

maximum_concurrent_rpcs = ns.maximum_concurrent_rpcs
if maximum_concurrent_rpcs < 0:
maximum_concurrent_rpcs = None # grpc.server takes None to accept unlimited amount of connections
Expand Down

0 comments on commit 3897fb8

Please sign in to comment.