diff --git a/.bazelignore b/.bazelignore index 9495b24c..b9bead00 100644 --- a/.bazelignore +++ b/.bazelignore @@ -1,4 +1,6 @@ bazel-bidding-auction-server +services/inference_sidecar/common +services/inference_sidecar/modules google_internal/tools/cost production/aws/terraform scp diff --git a/.bazelrc b/.bazelrc index 9dd018c9..fd120bcc 100644 --- a/.bazelrc +++ b/.bazelrc @@ -4,6 +4,7 @@ build --config=clang build --compilation_mode=opt build --output_filter='^//((?!(third_party):).)*$'` build --color=yes +build --workspace_status_command="bash tools/get_workspace_status" build:run_all_tests --cache_test_results=no test:run_all_tests --test_verbose_timeout_warnings @@ -16,11 +17,14 @@ build --copt=-DENABLE_LOGS_PREVIEW # Avoid linking to ICU shared libs for googleurl build --@com_google_googleurl//build_config:system_icu=0 +build:clang --config=cpp_no_warn build:clang --cxxopt=-fbracket-depth=512 build:clang --client_env=CC=clang build:clang --cxxopt=-std=c++17 build:clang --host_cxxopt=-std=c++17 build:clang --client_env=BAZEL_CXXOPTS=-std=c++17 +# Makes thread safety analysis warnings into errors. +build:clang --copt=-Werror=thread-safety #disable the "int conversion" warning for external/nitrokmscli_aws_c_http/source/websocket_decoder.c build:clang --per_file_copt=external/nitrokmscli_.*\.c@-Wno-int-conversion @@ -37,23 +41,28 @@ build:cpp_no_warn --per_file_copt=services/common/telemetry/.*_test.cc@-Wno-unus # Compiler complains about main function being unused in the micro-benchmarks. build:cpp_no_warn --per_file_copt=services/.*_benchmarks.cc@-Wno-unused-function +# Enable clang-tidy +build:clang-tidy --aspects @bazel_clang_tidy//clang_tidy:clang_tidy.bzl%clang_tidy_aspect +build:clang-tidy --output_groups=report +build:clang-tidy --@bazel_clang_tidy//:clang_tidy_config=//:clang_tidy_config + test --test_verbose_timeout_warnings -build:instance_local --//:instance=local --config=cpp_no_warn +build:instance_local --//:instance=local build:instance_local --@google_privacysandbox_servers_common//:instance=local -build:instance_gcp --//:instance=gcp --config=cpp_no_warn +build:instance_gcp --//:instance=gcp build:instance_gcp --@google_privacysandbox_servers_common//:instance=gcp -build:instance_aws --//:instance=aws --config=cpp_no_warn +build:instance_aws --//:instance=aws build:instance_aws --@google_privacysandbox_servers_common//:instance=aws -build:platform_local --//:platform=local --config=cpp_no_warn +build:platform_local --//:platform=local -build:platform_aws --//:platform=aws --config=cpp_no_warn +build:platform_aws --//:platform=aws build:platform_aws --@google_privacysandbox_servers_common//:platform=aws -build:platform_gcp --//:platform=gcp --config=cpp_no_warn +build:platform_gcp --//:platform=gcp build:platform_gcp --@google_privacysandbox_servers_common//:platform=gcp build:local_aws --config=instance_local @@ -129,6 +138,7 @@ test:coretests --test_tag_filters=-flaky build:small-tests --action_env=PRIVACY_SANDBOX_SERVERS_SMALL_TESTS=1 build:small-tests --compilation_mode=dbg +build:small-tests --fission=yes test:small-tests --build_tests_only test:small-tests --test_size_filters=-large,-enormous test:small-tests --test_tag_filters=-requires-network diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..e1e94e72 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.patch whitespace=-trailing-space diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2343fa38..ac2dd613 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,6 +19,9 @@ exclude: (?x)^( version\.txt| .*\.patch| builders/.*| + services/inference_sidecar/common/builders/.*| + services/inference_sidecar/modules/pytorch_v2_1_1/builders/.*| + services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.*| testing/functionaltest-system/.* )$ diff --git a/BUILD b/BUILD index aa88d354..4f953eae 100644 --- a/BUILD +++ b/BUILD @@ -173,6 +173,60 @@ config_setting( visibility = ["//visibility:public"], ) +string_flag( + name = "inference_build", + build_setting_default = "no", + values = [ + "yes", + "no", + ], +) + +config_setting( + name = "inference", + flag_values = { + ":inference_build": "yes", + }, + visibility = ["//visibility:public"], +) + +string_flag( + name = "inference_runtime", + build_setting_default = "noop", + values = [ + "noop", + "pytorch", + "tensorflow", + ], +) + +config_setting( + name = "inference_noop", + flag_values = { + ":inference_build": "yes", + ":inference_runtime": "noop", + }, + visibility = ["//visibility:public"], +) + +config_setting( + name = "inference_pytorch", + flag_values = { + ":inference_build": "yes", + ":inference_runtime": "pytorch", + }, + visibility = ["//visibility:public"], +) + +config_setting( + name = "inference_tensorflow", + flag_values = { + ":inference_build": "yes", + ":inference_runtime": "tensorflow", + }, + visibility = ["//visibility:public"], +) + string_flag( name = "build_for_test", build_setting_default = "non_test", @@ -195,3 +249,9 @@ filegroup( srcs = ["version.txt"], visibility = ["//services/common/telemetry:__pkg__"], ) + +filegroup( + name = "clang_tidy_config", + srcs = [".clang-tidy"], + visibility = ["//visibility:public"], +) diff --git a/CHANGELOG.md b/CHANGELOG.md index 45c2907e..2a7bac74 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,45 @@ All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines. +## 3.4.0 (2024-04-02) + + +### Features + +* Add Bid Currency to PAS bidMetadata for scoreAd() +* Add blob fetch library +* Adds API for generating ciphertexts for server component auctions +* Adds proto mapping functions for top level auction reactor +* Adds reactor for handling top level auction in SFE +* Adds utility for re-encrypting ciphertexts from device +* Adds validator functions for top level auction reactor +* Build version dynamically for telemetry +* Check Bid Currency on Protected App Signals AdWithBids +* create SellerCodeFetchManager for auction service UDF fetching logic +* Enable cpp_no_warn mode by default +* Enable ML inference in B&A +* Implements server component auctions feature in SFE +* Implements support for server component auctions in secure invoke +* Implements top level auction feature in auction service +* Implements top level auctions support in secure invoke + + +### Bug Fixes + +* Add get_workspace_status script to github repo +* Create extra function to only add zone attribute to gcp deployment +* Delete Right AdWithBids for Currency Mismatch in SFE +* Do not check trailing space for patch files +* improve graceful shutdown of bidding server +* Simplify V8Dispatcher::LoadSync, switch to absl::Notification +* Update data plane library dependency +* update generate bid to receive empty json object when no device signals are present + + +### Dependencies + +* **deps:** Upgrade build-system to 0.57.0 + ## 3.3.0 (2024-03-11) diff --git a/WORKSPACE b/WORKSPACE index 2d5fc0f4..9102a7b0 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -3,15 +3,21 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") ### register Python toolchain -- note this toolchain defines the path to a specific version of python load("//builders/bazel:deps.bzl", "python_deps") +http_archive( + name = "io_bazel_rules_docker", + sha256 = "b1e80761a8a8243d03ebca8845e9cc1ba6c82ce7c5179ce2b295cd36f7e394bf", + urls = ["https://github.com/bazelbuild/rules_docker/releases/download/v0.25.0/rules_docker-v0.25.0.tar.gz"], +) + python_deps("//builders/bazel") http_archive( name = "google_privacysandbox_servers_common", - # 2024-02-15 - sha256 = "4073bbbce12efe4d1f1d3586a86bfa712ddf373605f4626abf1c43ec1772c14b", - strip_prefix = "data-plane-shared-libraries-a4b61aeca9da3ce79a03a5ee4aa32e0acaa78cb6", + # 2024-04-01 + sha256 = "e36cc26c917ec4b1066a32777b48ac8728ba13c276cdda2e91c36ad2037d9bcd", + strip_prefix = "data-plane-shared-libraries-1fbac466b6b88e00a4ca037f7359ee1942ade13e", urls = [ - "https://github.com/privacysandbox/data-plane-shared-libraries/archive/a4b61aeca9da3ce79a03a5ee4aa32e0acaa78cb6.zip", + "https://github.com/privacysandbox/data-plane-shared-libraries/archive/1fbac466b6b88e00a4ca037f7359ee1942ade13e.zip", ], ) @@ -38,6 +44,17 @@ load("@google_privacysandbox_servers_common//third_party:deps4.bzl", data_plane_ data_plane_shared_deps4() +load( + "@io_bazel_rules_docker//repositories:repositories.bzl", + container_repositories = "repositories", +) + +container_repositories() + +load("@io_bazel_rules_docker//repositories:deps.bzl", rules_docker_deps = "deps") + +rules_docker_deps() + load("//third_party:container_deps.bzl", "container_deps") container_deps() @@ -58,10 +75,57 @@ http_archive( http_archive( name = "service_value_key_fledge_privacysandbox", - # commit 1eee8e79e44f3ca735cfab0b716e57f81d95bd46 2023-10-26 - sha256 = "f9705cfa29e7ba1fb196a30a8dd11fc314077d7f693091d24fa8597612ac160a", - strip_prefix = "protected-auction-key-value-service-1eee8e79e44f3ca735cfab0b716e57f81d95bd46", + # commit af184d649be5d9f0a62738db41ed1496de427bcd 2024-03-15 + sha256 = "", + strip_prefix = "protected-auction-key-value-service-af184d649be5d9f0a62738db41ed1496de427bcd", + urls = [ + "https://github.com/privacysandbox/protected-auction-key-value-service/archive/af184d649be5d9f0a62738db41ed1496de427bcd.zip", + ], +) + +### Initialize Python headers + +http_archive( + name = "pybind11_bazel", + sha256 = "b72c5b44135b90d1ffaba51e08240be0b91707ac60bea08bb4d84b47316211bb", + strip_prefix = "pybind11_bazel-b162c7c88a253e3f6b673df0c621aca27596ce6b", + urls = ["https://github.com/pybind/pybind11_bazel/archive/b162c7c88a253e3f6b673df0c621aca27596ce6b.zip"], +) + +load("@pybind11_bazel//:python_configure.bzl", "python_configure") + +python_configure( + name = "local_config_python", +) + +### Initialize inference common (for bidding server inference utils) +local_repository( + name = "inference_common", + path = "services/inference_sidecar/common", +) + +### Initialize PyTorch sidecar local repository (for PyTorch sidecar binary) +local_repository( + name = "pytorch_v2_1_1", + path = "services/inference_sidecar/modules/pytorch_v2_1_1", +) + +### Initialize Tensorflow sidecar local respository + +local_repository( + name = "tensorflow_v2_14_0", + path = "services/inference_sidecar/modules/tensorflow_v2_14_0", +) + +# TODO: Remove this once we sync to the common repo version that includes this +# as a dependecy. +http_archive( + name = "bazel_clang_tidy", + patch_args = ["-p1"], + patches = ["//third_party:bazel_clang_tidy.patch"], + sha256 = "352aeb57ad7ed53ff6e02344885de426421fb6fd7a3890b04d14768d759eb598", + strip_prefix = "bazel_clang_tidy-4884c32e09c1ea9ac96b3f08c3004f3ac4c3fe39", urls = [ - "https://github.com/privacysandbox/protected-auction-key-value-service/archive/1eee8e79e44f3ca735cfab0b716e57f81d95bd46.zip", + "https://github.com/erenon/bazel_clang_tidy/archive/4884c32e09c1ea9ac96b3f08c3004f3ac4c3fe39.zip", ], ) diff --git a/api/BUILD b/api/BUILD index 34e9b1e4..c1ca3969 100644 --- a/api/BUILD +++ b/api/BUILD @@ -25,7 +25,7 @@ proto_library( deps = [ "@com_google_googleapis//google/api:annotations_proto", "@com_google_protobuf//:struct_proto", - "@google_privacysandbox_servers_common//src/cpp/logger:logger_proto", + "@google_privacysandbox_servers_common//src/logger:logger_proto", ], ) diff --git a/api/bidding_auction_servers.proto b/api/bidding_auction_servers.proto index 9b816b45..5e914e69 100644 --- a/api/bidding_auction_servers.proto +++ b/api/bidding_auction_servers.proto @@ -18,7 +18,7 @@ package privacy_sandbox.bidding_auction_servers; import "google/api/annotations.proto"; import "google/protobuf/struct.proto"; -import "src/cpp/logger/logger.proto"; +import "src/logger/logger.proto"; // ProtectedAudienceInput is generated and encrypted by the client, // passed through the untrusted Seller service, and decrypted by the @@ -49,6 +49,11 @@ message ProtectedAudienceInput { // Consented debugging field. server_common.ConsentedDebugConfiguration consented_debug_config = 5; + + // Optional. + // A boolean value which indicates whether temporary unlimited egress should + // be enabled. + bool enable_unlimited_egress = 6; } // ProtectedAuctionInput is generated and encrypted by the client, @@ -79,6 +84,11 @@ message ProtectedAuctionInput { // Consented debugging field. server_common.ConsentedDebugConfiguration consented_debug_config = 5; + + // Optional. + // A boolean value which indicates whether temporary unlimited egress should + // be enabled. + bool enable_unlimited_egress = 6; } // Grouping of data pertaining to protected app signals. @@ -255,6 +265,8 @@ message AuctionResult { // Map from the buyer participating origin (that participated in the auction) // to interest group indices. + // Note: This is currently not populated for auctions + // originating from mobile devices (CLIENT_TYPE_ANDROID). map bidding_groups = 11; // In the event of an error during the SelectAd request, an Error object will @@ -285,7 +297,8 @@ message AuctionResult { // Arbitrary metadata to pass to the top-level seller. string ad_metadata = 16; - // Indicates the currency used for the bid price (expressed as ISO 4217 alpha code). + // Optional. Indicates the currency used for the bid price + // (expressed as ISO 4217 alpha code). string bid_currency = 17; message AuctionParams { @@ -307,6 +320,19 @@ message AuctionResult { AuctionParams auction_params = 18; } +message GetComponentAuctionCiphertextsRequest { + // Encrypted ProtectedAuctionInput from the device. + bytes protected_auction_ciphertext = 1; + // List of partner sellers that will participate in the server orchestrated + // component auctions. + repeated string component_sellers = 2; +} + +message GetComponentAuctionCiphertextsResponse { + // Map of sellers passed in request to their encrypted ProtectedAuctionInput. + map seller_component_ciphertexts = 1; +} + // SellerFrontEnd service (also known as SFE) operated by SSP / Seller. service SellerFrontEnd { // Selects a winning remarketing ad for the Publisher ad slot that may be @@ -317,6 +343,14 @@ service SellerFrontEnd { body: "*" }; } + // Returns encrypted Protected Auction request payload for component level + // sellers for server component auctions. + rpc GetComponentAuctionCiphertexts(GetComponentAuctionCiphertextsRequest) returns (GetComponentAuctionCiphertextsResponse) { + option (google.api.http) = { + post: "/v1/getComponentAuctionCiphertexts" + body: "*" + }; + } } enum EncryptionCloudPlatform { @@ -640,6 +674,11 @@ message GetBidsRequest { // The Id is specified by the buyer to support coordinated experiments // with the buyer's Key/Value services. optional int32 buyer_kv_experiment_group_id = 13; + + // Optional. + // A boolean value which indicates whether temporary unlimited egress should + // be enabled. + bool enable_unlimited_egress = 14; } // Encrypted GetBidsRawRequest. @@ -908,6 +947,11 @@ message GenerateProtectedAppSignalsBidsRequest { // Contextual data regarding PAS ads. ContextualProtectedAppSignalsData contextual_protected_app_signals_data = 9; + + // Optional. + // A boolean value which indicates whether temporary unlimited egress should + // be enabled. + bool enable_unlimited_egress = 10; } // Encrypted GenerateProtectedAppSignalsBidsRawRequest. @@ -1123,6 +1167,9 @@ message ScoreAdsRequest { // Domain of Buyer who owns app signals used for this ad. string owner = 7; + + // Indicates the currency used for the bid price. + string bid_currency = 8; } // Ad with bid for protected app signals. diff --git a/builders/CHANGELOG.md b/builders/CHANGELOG.md index d9ad2b9b..e323d8a3 100644 --- a/builders/CHANGELOG.md +++ b/builders/CHANGELOG.md @@ -2,6 +2,14 @@ All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines. +## 0.57.0 (2024-03-10) + + +### Features + +* Add a generic pylintrc +* Add clang-tidy to build-debian + ## 0.56.0 (2024-02-29) diff --git a/builders/etc/.clang-tidy b/builders/etc/.clang-tidy new file mode 100644 index 00000000..2e9829d0 --- /dev/null +++ b/builders/etc/.clang-tidy @@ -0,0 +1,8 @@ +Checks: > + -*, + bugprone-*, + -bugprone-narrowing-conversions, + performance-*, + +WarningsAsErrors: '*' +... diff --git a/builders/etc/.pylintrc b/builders/etc/.pylintrc new file mode 100644 index 00000000..967bd3ee --- /dev/null +++ b/builders/etc/.pylintrc @@ -0,0 +1,118 @@ +[MESSAGES CONTROL] + +# List of checkers and warnings to enable. +enable= + indexing-exception, + old-raise-syntax, + +# List of checkers and warnings to disable. +# TODO: Shrink this list to as small as possible. +disable= + attribute-defined-outside-init, + bad-option-value, + bare-except, + broad-except, + c-extension-no-member, + design, + file-ignored, + fixme, + global-statement, + import-error, + import-outside-toplevel, + locally-disabled, + misplaced-comparison-constant, + multiple-imports, + no-self-use, + relative-import, + similarities, + suppressed-message, + ungrouped-imports, + unsubscriptable-object, + useless-object-inheritance, # Remove once all bots are on Python 3. + useless-suppression, + wrong-import-order, + wrong-import-position, + # FIXME: To be removed. Leftovers from Python 3 migration. + consider-using-with, + raise-missing-from, + super-with-arguments, + use-a-generator, + consider-using-generator, + consider-using-f-string, # Too much legacy usages. + unspecified-encoding, # Too much legacy usage. + broad-exception-raised, + +[BASIC] + +# Regular expression which should only match the name +# of functions or classes which do not require a docstring. +no-docstring-rgx=(__.*__|main) + +# Min length in lines of a function that requires a docstring. +docstring-min-length=10 + +# Regular expression which should only match correct module names. The +# leading underscore is sanctioned for private modules by Google's style +# guide. +# +# There are exceptions to the basic rule (_?[a-z][a-z0-9_]*) to cover +# requirements of Python's module system and of the presubmit framework. +module-rgx=^(_?[a-z][a-z0-9_]*)|__init__|__main__|PRESUBMIT|PRESUBMIT_unittest$ + +# Regular expression which should only match correct module level names. +const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ + +# Regular expression which should only match correct class attribute. +class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$ + +# Regular expression which should only match correct class names. +class-rgx=^_?[A-Z][a-zA-Z0-9]*$ + +# Regular expression which should only match correct function names. +# 'camel_case' and 'snake_case' group names are used for consistency of naming +# styles across functions and methods. +function-rgx=^(?:(?PsetUp|tearDown|setUpModule|tearDownModule)|(?P_?[A-Z][a-zA-Z0-9]*)|(?P_?[a-z][a-z0-9_]*))$ + +# Regular expression which should only match correct method names. +# 'camel_case' and 'snake_case' group names are used for consistency of naming +# styles across functions and methods. 'exempt' indicates a name which is +# consistent with all naming styles. +method-rgx=(?x) + ^(?:(?P_[a-z0-9_]+__|runTest|setUp|tearDown|setUpTestCase + |tearDownTestCase|setupSelf|tearDownClass|setUpClass + |(test|assert)_*[A-Z0-9][a-zA-Z0-9_]*|next) + |(?P_{0,2}[A-Z][a-zA-Z0-9_]*) + |(?P_{0,2}[a-z][a-z0-9_]*))$ + +# Regular expression which should only match correct instance attribute names. +attr-rgx=^_{0,2}[a-z][a-z0-9_]*$ + +# Regular expression which should only match correct argument names. +argument-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression which should only match correct variable names. +variable-rgx=^[a-z][a-z0-9_]*$ + +# Regular expression which should only match correct list comprehension / +# generator expression variable names. +inlinevar-rgx=^[a-z][a-z0-9_]*$ + +# Good variable names which should always be accepted, separated by a comma. +good-names=main,_,maxDiff + +# Bad variable names which should always be refused, separated by a comma. +bad-names= + +# FIXME: Renable this. +# List of builtins function names that should not be used, separated by a comma. +#bad-builtin=input + +[FORMAT] + +# Maximum number of characters on a single line. +max-line-length=90 + +# Maximum number of lines in a module +max-module-lines=99999 + +[TYPECHECK] diff --git a/builders/images/build-debian/install_apps b/builders/images/build-debian/install_apps index 6d49d892..fba47ca1 100755 --- a/builders/images/build-debian/install_apps +++ b/builders/images/build-debian/install_apps @@ -44,7 +44,7 @@ function apt_update() { } function install_python() { - DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-get --quiet install -y --no-install-recommends \ python3.9-venv="3.9.*" python3.9-dev mkdir -p /opt/bin update-alternatives \ @@ -63,7 +63,7 @@ function install_python() { } function install_misc() { - DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-get --quiet install -y --no-install-recommends \ apt-transport-https="2.0.*" \ bsdmainutils \ ca-certificates \ @@ -123,6 +123,12 @@ function install_docker() { apt-get --quiet install -y --no-install-recommends docker-ce docker-ce-cli containerd.io } +function install_clang_tidy() { + apt-get --quiet install -y clang-tidy + printf "clang-tidy version: %s\n" "$(clang-tidy --version)" + printf "clang-tidy config: %s\n" "$(clang-tidy -dump-config)" +} + function cleanup() { apt-get --quiet autoremove -y apt-get autoclean @@ -137,9 +143,12 @@ if [[ ${VERBOSE} -eq 1 ]]; then env fi +declare -x -r DEBIAN_FRONTEND=noninteractive + apt_update install_misc install_clang +install_clang_tidy install_golang "${BUILD_ARCH}" install_docker "${BUILD_ARCH}" install_python # should run after other install_* diff --git a/builders/tests/data/hashes/build-debian b/builders/tests/data/hashes/build-debian index cb2cfbba..c89114f2 100644 --- a/builders/tests/data/hashes/build-debian +++ b/builders/tests/data/hashes/build-debian @@ -1 +1 @@ -b1a855c1b44e4c7ae00a9c51ffc3d9fe28785ef887707100a9bdd510541144ec +38cc8a23a6a56eb6567bef3685100cd3be1c0491dcc8b953993c42182da3fa40 diff --git a/builders/version.txt b/builders/version.txt index c11ca46d..78756de3 100644 --- a/builders/version.txt +++ b/builders/version.txt @@ -1 +1 @@ -0.56.0 \ No newline at end of file +0.57.0 \ No newline at end of file diff --git a/production/deploy/aws/terraform/environment/demo/buyer/buyer.tf b/production/deploy/aws/terraform/environment/demo/buyer/buyer.tf index a1f70462..fda0d10e 100644 --- a/production/deploy/aws/terraform/environment/demo/buyer/buyer.tf +++ b/production/deploy/aws/terraform/environment/demo/buyer/buyer.tf @@ -97,7 +97,6 @@ module "buyer" { # Note: turning on this flag will lead to higher memory consumption for AdTech code execution # and additional latency for parsing the logs. - # Reach out to the Privacy Sandbox B&A team to enroll with Coordinators and update the following flag values. # More information on enrollment can be found here: https://github.com/privacysandbox/fledge-docs/blob/main/bidding_auction_services_api.md#enroll-with-coordinators # Coordinator-based attestation flags: @@ -112,5 +111,9 @@ module "buyer" { KEY_REFRESH_FLOW_RUN_FREQUENCY_SECONDS = "" # Example: "10800" MAX_ALLOWED_SIZE_DEBUG_URL_BYTES = "" # Example: "65536" MAX_ALLOWED_SIZE_ALL_DEBUG_URLS_KB = "" # Example: "3000" + + INFERENCE_SIDECAR_BINARY_PATH = "" # Example: "/server/bin/inference_sidecar" + INFERENCE_MODEL_BUCKET_NAME = "" # Example: "" + INFERENCE_MODEL_BUCKET_PATHS = "" # Example: "," } } diff --git a/production/deploy/aws/terraform/services/dashboards/buyer_dashboard/main.tf b/production/deploy/aws/terraform/services/dashboards/buyer_dashboard/main.tf index d47597b8..3650083b 100644 --- a/production/deploy/aws/terraform/services/dashboards/buyer_dashboard/main.tf +++ b/production/deploy/aws/terraform/services/dashboards/buyer_dashboard/main.tf @@ -76,7 +76,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"request.failed_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"request.failed_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -88,7 +88,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "request.failed_count_by_status [MEAN]" + "title": "request.failed_count [MEAN]" } }, { @@ -260,7 +260,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.count_by_server\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.server name')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.server name')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -272,7 +272,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.count_by_server [MEAN]" + "title": "initiated_request.count [MEAN]" } }, { @@ -352,7 +352,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.error_code\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -364,7 +364,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "bfe.error_code [MEAN]" + "title": "bfe.errors_count [MEAN]" } }, { @@ -375,7 +375,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bidding.error_code\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bidding.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -387,7 +387,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "bidding.error_code [MEAN]" + "title": "bidding.errors_count [MEAN]" } }, { @@ -398,7 +398,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.bidding.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.initiated_request.to_bidding.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -410,7 +410,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.bidding.duration_ms [MEAN]" + "title": "bfe.initiated_request.to_bidding.duration_ms [MEAN]" } }, { @@ -421,7 +421,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.bidding.errors_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.initiated_request.to_bidding.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -433,7 +433,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.bidding.errors_count_by_status [MEAN]" + "title": "bfe.initiated_request.to_bidding.errors_count [MEAN]" } }, { @@ -444,7 +444,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.bidding.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.initiated_request.to_bidding.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -456,7 +456,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.bidding.size_bytes [MEAN]" + "title": "bfe.initiated_request.to_bidding.size_bytes [MEAN]" } }, { @@ -467,7 +467,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.kv.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.to_kv.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -479,7 +479,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.kv.duration_ms [MEAN]" + "title": "initiated_request.to_kv.duration_ms [MEAN]" } }, { @@ -490,7 +490,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.kv.errors_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.to_kv.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -502,7 +502,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.kv.errors_count_by_status [MEAN]" + "title": "initiated_request.to_kv.errors_count [MEAN]" } }, { @@ -513,7 +513,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.to_kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -525,7 +525,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.kv.size_bytes [MEAN]" + "title": "initiated_request.to_kv.size_bytes [MEAN]" } }, { @@ -536,7 +536,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_response.bidding.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.initiated_response.to_bidding.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -548,7 +548,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_response.bidding.size_bytes [MEAN]" + "title": "bfe.initiated_response.to_bidding.size_bytes [MEAN]" } }, { @@ -559,7 +559,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_response.kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bfe.initiated_response.to_kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -571,7 +571,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_response.kv.size_bytes [MEAN]" + "title": "bfe.initiated_response.to_kv.size_bytes [MEAN]" } }, { @@ -605,7 +605,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"js_execution.error.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"js_execution.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -617,7 +617,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "js_execution.error.count [MEAN]" + "title": "js_execution.errors_count [MEAN]" } }, { @@ -628,7 +628,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.bidding.bids.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bidding.business_logic.bids_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -640,7 +640,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.bidding.bids.count [MEAN]" + "title": "bidding.business_logic.bids_count [MEAN]" } }, { @@ -651,7 +651,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.bidding.zero_bid.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bidding.business_logic.zero_bid_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -663,7 +663,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.bidding.zero_bid.count [MEAN]" + "title": "bidding.business_logic.zero_bid_count [MEAN]" } }, { @@ -674,7 +674,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.bidding.zero_bid.percent\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"bidding\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"bidding.business_logic.zero_bid_percent\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -686,7 +686,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.bidding.zero_bid.percent [MEAN]" + "title": "bidding.business_logic.zero_bid_percent [MEAN]" } } ] diff --git a/production/deploy/aws/terraform/services/dashboards/seller_dashboard/main.tf b/production/deploy/aws/terraform/services/dashboards/seller_dashboard/main.tf index 3e9e1f8b..e14f3084 100644 --- a/production/deploy/aws/terraform/services/dashboards/seller_dashboard/main.tf +++ b/production/deploy/aws/terraform/services/dashboards/seller_dashboard/main.tf @@ -76,7 +76,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"request.failed_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"request.failed_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -88,7 +88,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "request.failed_count_by_status [MEAN]" + "title": "request.failed_count [MEAN]" } }, { @@ -260,7 +260,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.count_by_server\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.server name')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.server name')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -272,7 +272,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.count_by_server [MEAN]" + "title": "initiated_request.count [MEAN]" } }, { @@ -352,7 +352,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.error_code\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -364,7 +364,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "sfe.error_code [MEAN]" + "title": "sfe.errors_count [MEAN]" } }, { @@ -375,7 +375,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"auction.error_code\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"auction.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -387,7 +387,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "auction.error_code [MEAN]" + "title": "auction.errors_count [MEAN]" } }, { @@ -398,7 +398,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.auction.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_auction.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -410,7 +410,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.auction.duration_ms [MEAN]" + "title": "sfe.initiated_request.to_auction.duration_ms [MEAN]" } }, { @@ -421,7 +421,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.auction.errors_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_auction.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -433,7 +433,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.auction.errors_count_by_status [MEAN]" + "title": "sfe.initiated_request.to_auction.errors_count [MEAN]" } }, { @@ -444,7 +444,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.auction.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_auction.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -456,7 +456,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.auction.size_bytes [MEAN]" + "title": "sfe.initiated_request.to_auction.size_bytes [MEAN]" } }, { @@ -467,7 +467,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.bfe.errors_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_bfe.errors_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -479,7 +479,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.bfe.errors_count_by_status [MEAN]" + "title": "sfe.initiated_request.to_bfe.errors_count_by_status [MEAN]" } }, { @@ -490,7 +490,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.kv.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.to_kv.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -502,7 +502,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.kv.duration_ms [MEAN]" + "title": "initiated_request.to_kv.duration_ms [MEAN]" } }, { @@ -513,7 +513,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.kv.errors_count_by_status\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.error_status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.to_kv.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.status_code')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -525,7 +525,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.kv.errors_count_by_status [MEAN]" + "title": "initiated_request.to_kv.errors_count [MEAN]" } }, { @@ -536,7 +536,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_request.to_kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -548,7 +548,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_request.kv.size_bytes [MEAN]" + "title": "initiated_request.to_kv.size_bytes [MEAN]" } }, { @@ -559,7 +559,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_response.auction.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_response.to_auction.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -571,7 +571,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_response.auction.size_bytes [MEAN]" + "title": "sfe.initiated_response.to_auction.size_bytes [MEAN]" } }, { @@ -582,7 +582,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"initiated_response.kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_response.to_kv.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -594,7 +594,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "initiated_response.kv.size_bytes [MEAN]" + "title": "sfe.initiated_response.to_kv.size_bytes [MEAN]" } }, { @@ -605,7 +605,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.count_by_buyer\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_bfe.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -617,7 +617,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "sfe.initiated_request.count_by_buyer [MEAN]" + "title": "sfe.initiated_request.to_bfe.count [MEAN]" } }, { @@ -628,7 +628,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.duration_by_buyer\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_bfe.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -640,7 +640,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "sfe.initiated_request.duration_by_buyer [MEAN]" + "title": "sfe.initiated_request.to_bfe.duration_ms [MEAN]" } }, { @@ -651,7 +651,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.errors_count_by_buyer\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_bfe.errors_count_by_buyer\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -663,7 +663,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "sfe.initiated_request.errors_count_by_buyer [MEAN]" + "title": "sfe.initiated_request.to_bfe.errors_count_by_buyer [MEAN]" } }, { @@ -674,7 +674,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.size_by_buyer\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_request.to_bfe.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -686,7 +686,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "sfe.initiated_request.size_by_buyer [MEAN]" + "title": "sfe.initiated_request.to_bfe.size_bytes [MEAN]" } }, { @@ -697,7 +697,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_response.size_by_buyer\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.initiated_response.to_bfe.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.buyer')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -709,7 +709,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "sfe.initiated_response.size_by_buyer [MEAN]" + "title": "sfe.initiated_response.to_bfe.size_bytes [MEAN]" } }, { @@ -743,7 +743,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"js_execution.error.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"js_execution.errors_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -755,7 +755,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "js_execution.error.count [MEAN]" + "title": "js_execution.errors_count [MEAN]" } }, { @@ -766,7 +766,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.auction.bids.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"auction.business_logic.bids_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -778,7 +778,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.auction.bids.count [MEAN]" + "title": "auction.business_logic.bids_count [MEAN]" } }, { @@ -789,7 +789,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.auction.bid_rejected.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"auction.business_logic.bid_rejected_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -801,7 +801,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.auction.bid_rejected.count [MEAN]" + "title": "auction.business_logic.bid_rejected_count [MEAN]" } }, { @@ -812,7 +812,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.auction.bid_rejected.percent\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"auction\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"auction.business_logic.bid_rejected_percent\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.seller_rejection_reason')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -824,7 +824,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.auction.bid_rejected.percent [MEAN]" + "title": "auction.business_logic.bid_rejected_percent [MEAN]" } }, { @@ -835,7 +835,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.sfe.request_with_winner.count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.business_logic.request_with_winner_count\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -847,7 +847,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.sfe.request_with_winner.count [MEAN]" + "title": "sfe.business_logic.request_with_winner_count [MEAN]" } }, { @@ -858,7 +858,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"business_logic.sfe.request_with_winner.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.business_logic.request_with_winner.duration_ms\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -870,7 +870,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "business_logic.sfe.request_with_winner.duration_ms [MEAN]" + "title": "sfe.business_logic.request_with_winner.duration_ms [MEAN]" } }, { @@ -881,7 +881,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"protected_ciphertext.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.protected_ciphertext.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -893,7 +893,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "protected_ciphertext.size_bytes [MEAN]" + "title": "sfe.protected_ciphertext.size_bytes [MEAN]" } }, { @@ -904,7 +904,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "type": "metric", "properties": { "metrics": [ - [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"auction_config.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] + [ { "expression": "SEARCH(' service.name=\"sfe\" deployment.environment=\"${var.environment}\" Noise=(\"Raw\" OR \"Noised\") MetricName=\"sfe.auction_config.size_bytes\" ', 'Average', 60)", "id": "e1", "label": "$${PROP('Dim.service.name')} $${PROP('Dim.deployment.environment')} $${PROP('Dim.Noise')} $${PROP('Dim.service.instance.id')}" } ] ], "timezone": "UTC", "region": "${var.region}", @@ -916,7 +916,7 @@ resource "aws_cloudwatch_dashboard" "environment_dashboard" { "showUnits": false } }, - "title": "auction_config.size_bytes [MEAN]" + "title": "sfe.auction_config.size_bytes [MEAN]" } } ] diff --git a/production/deploy/gcp/terraform/environment/demo/buyer/buyer.tf b/production/deploy/gcp/terraform/environment/demo/buyer/buyer.tf index 559e4a0b..bf43e450 100644 --- a/production/deploy/gcp/terraform/environment/demo/buyer/buyer.tf +++ b/production/deploy/gcp/terraform/environment/demo/buyer/buyer.tf @@ -113,6 +113,10 @@ module "buyer" { BFE_TLS_CERT = "" # You can either set this here or via a secrets.auto.tfvars. MAX_ALLOWED_SIZE_DEBUG_URL_BYTES = "" # Example: "65536" MAX_ALLOWED_SIZE_ALL_DEBUG_URLS_KB = "" # Example: "3000" + + INFERENCE_SIDECAR_BINARY_PATH = "" # Example: "/server/bin/inference_sidecar" + INFERENCE_MODEL_BUCKET_NAME = "" # Example: "" + INFERENCE_MODEL_BUCKET_PATHS = "" # Example: "," } # Please manually create a Google Cloud domain name, dns zone, and SSL certificate. diff --git a/production/deploy/gcp/terraform/services/autoscaling/main.tf b/production/deploy/gcp/terraform/services/autoscaling/main.tf index 371883d7..de732415 100644 --- a/production/deploy/gcp/terraform/services/autoscaling/main.tf +++ b/production/deploy/gcp/terraform/services/autoscaling/main.tf @@ -246,6 +246,7 @@ resource "google_compute_instance_template" "backends" { scopes = ["https://www.googleapis.com/auth/cloud-platform"] } metadata = { + mesh-name = var.mesh_name tee-image-reference = var.backend_tee_image tee-container-log-redirect = true tee-impersonate-service-accounts = var.tee_impersonate_service_accounts diff --git a/production/deploy/gcp/terraform/services/dashboards/buyer_dashboard/main.tf b/production/deploy/gcp/terraform/services/dashboards/buyer_dashboard/main.tf index b9ee9602..23ff31c3 100644 --- a/production/deploy/gcp/terraform/services/dashboards/buyer_dashboard/main.tf +++ b/production/deploy/gcp/terraform/services/dashboards/buyer_dashboard/main.tf @@ -342,7 +342,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.bidding.errors_count_by_status [MEAN]", + "title": "bfe.initiated_request.to_bidding.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -356,12 +356,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.bidding.errors_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/bfe.initiated_request.to_bidding.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -387,7 +387,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "js_execution.error.count [MEAN]", + "title": "js_execution.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -401,7 +401,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/js_execution.error.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/js_execution.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -431,7 +431,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 16, "widget": { - "title": "business_logic.bidding.bids.count [MEAN]", + "title": "bidding.business_logic.bids_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -445,7 +445,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.bidding.bids.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/bidding.business_logic.bids_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -474,7 +474,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 16, "widget": { - "title": "business_logic.bidding.zero_bid.count [MEAN]", + "title": "bidding.business_logic.zero_bid_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -488,7 +488,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.bidding.zero_bid.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/bidding.business_logic.zero_bid_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -518,7 +518,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.kv.duration_ms [95TH PERCENTILE]", + "title": "initiated_request.to_kv.duration_ms [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -541,7 +541,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.kv.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/initiated_request.to_kv.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -558,7 +558,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.count_by_server [MEAN]", + "title": "initiated_request.count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -572,7 +572,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.count_by_server\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/initiated_request.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -602,7 +602,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "business_logic.bidding.zero_bid.percent [95TH PERCENTILE]", + "title": "bidding.business_logic.zero_bid_percent [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -625,7 +625,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.bidding.zero_bid.percent\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/bidding.business_logic.zero_bid_percent\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -641,7 +641,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.bidding.size_bytes [95TH PERCENTILE]", + "title": "bfe.initiated_request.to_bidding.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -664,7 +664,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.bidding.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/bfe.initiated_request.to_bidding.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -680,7 +680,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.bidding.duration_ms [95TH PERCENTILE]", + "title": "bfe.initiated_request.to_bidding.duration_ms [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -703,7 +703,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.bidding.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/bfe.initiated_request.to_bidding.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -719,7 +719,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.kv.size_bytes [95TH PERCENTILE]", + "title": "initiated_request.to_kv.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -742,7 +742,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"bfe\"" + "filter": "metric.type=\"workload.googleapis.com/initiated_request.to_kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"bfe\"" } } } @@ -759,7 +759,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "bfe.initiated_response.kv.size_bytes [95TH PERCENTILE]", + "title": "bfe.initiated_response.to_kv.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -782,7 +782,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/bfe.initiated_response.kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"bfe\"" + "filter": "metric.type=\"workload.googleapis.com/bfe.initiated_response.to_kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"bfe\"" } } } @@ -799,7 +799,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "bfe.error_code [MEAN]", + "title": "bfe.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -813,7 +813,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/bfe.error_code\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/bfe.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -843,7 +843,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "bidding.error_code [MEAN]", + "title": "bidding.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -857,7 +857,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/bidding.error_code\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/bidding.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -888,7 +888,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.kv.errors_count_by_status [MEAN]", + "title": "initiated_request.to_kv.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -902,12 +902,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.kv.errors_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/initiated_request.to_kv.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -932,7 +932,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "request.failed_count_by_status [MEAN]", + "title": "request.failed_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -946,12 +946,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/request.failed_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/request.failed_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -976,7 +976,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_response.bidding.size_bytes [95TH PERCENTILE]", + "title": "bfe.initiated_response.to_bidding.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -999,7 +999,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_response.bidding.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/bfe.initiated_response.to_bidding.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } diff --git a/production/deploy/gcp/terraform/services/dashboards/seller_dashboard/main.tf b/production/deploy/gcp/terraform/services/dashboards/seller_dashboard/main.tf index ffbfa47c..cff6d42c 100644 --- a/production/deploy/gcp/terraform/services/dashboards/seller_dashboard/main.tf +++ b/production/deploy/gcp/terraform/services/dashboards/seller_dashboard/main.tf @@ -342,7 +342,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.bfe.errors_count_by_status [MEAN]", + "title": "sfe.initiated_request.to_bfe.errors_count_by_status [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -356,12 +356,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.bfe.errors_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_bfe.errors_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -387,7 +387,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "js_execution.error.count [MEAN]", + "title": "js_execution.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -401,7 +401,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/js_execution.error.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/js_execution.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -431,7 +431,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 16, "widget": { - "title": "business_logic.auction.bids.count [MEAN]", + "title": "auction.business_logic.bids_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -445,7 +445,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.auction.bids.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/auction.business_logic.bids_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -474,7 +474,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 16, "widget": { - "title": "business_logic.auction.bid_rejected.count [MEAN]", + "title": "auction.business_logic.bid_rejected_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -488,7 +488,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.auction.bid_rejected.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/auction.business_logic.bid_rejected_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -519,7 +519,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.kv.duration_ms [95TH PERCENTILE]", + "title": "initiated_request.to_kv.duration_ms [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -542,7 +542,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.kv.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/initiated_request.to_kv.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -558,7 +558,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.count_by_server [MEAN]", + "title": "initiated_request.count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -572,7 +572,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.count_by_server\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/initiated_request.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -602,7 +602,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "business_logic.auction.bid_rejected.percent [95TH PERCENTILE]", + "title": "auction.business_logic.bid_rejected_percent [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -625,7 +625,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.auction.bid_rejected.percent\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/auction.business_logic.bid_rejected_percent\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -641,7 +641,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.auction.size_bytes [95TH PERCENTILE]", + "title": "sfe.initiated_request.to_auction.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -664,7 +664,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.auction.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_auction.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -680,7 +680,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.auction.duration_ms [95TH PERCENTILE]", + "title": "sfe.initiated_request.to_auction.duration_ms [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -703,7 +703,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.auction.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_auction.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -719,7 +719,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.kv.size_bytes [95TH PERCENTILE]", + "title": "initiated_request.to_kv.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -742,7 +742,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"sfe\"" + "filter": "metric.type=\"workload.googleapis.com/initiated_request.to_kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"sfe\"" } } } @@ -758,7 +758,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.initiated_response.kv.size_bytes [95TH PERCENTILE]", + "title": "sfe.initiated_response.to_kv.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -781,7 +781,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_response.kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"sfe\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_response.to_kv.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\" metric.label.\"service_name\"=\"sfe\"" } } } @@ -797,7 +797,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.error_code [MEAN]", + "title": "sfe.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -811,7 +811,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/sfe.error_code\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -841,7 +841,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "auction.error_code [MEAN]", + "title": "auction.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -855,7 +855,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/auction.error_code\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/auction.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -886,7 +886,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.kv.errors_count_by_status [MEAN]", + "title": "initiated_request.to_kv.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -900,12 +900,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.kv.errors_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/initiated_request.to_kv.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -931,7 +931,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_request.auction.errors_count_by_status [MEAN]", + "title": "sfe.initiated_request.to_auction.errors_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -945,12 +945,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_request.auction.errors_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_auction.errors_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -976,7 +976,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "request.failed_count_by_status [MEAN]", + "title": "request.failed_count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -990,12 +990,12 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/request.failed_count_by_status\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/request.failed_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", "groupByFields": [ - "metric.label.\"error_status_code\"", + "metric.label.\"status_code\"", "metric.label.\"service_name\"", "metric.label.\"deployment_environment\"", "metric.label.\"operator\"", @@ -1020,7 +1020,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "initiated_response.auction.size_bytes [95TH PERCENTILE]", + "title": "sfe.initiated_response.to_auction.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1043,7 +1043,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/initiated_response.auction.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_response.to_auction.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -1101,7 +1101,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.initiated_request.errors_count_by_buyer [MEAN]", + "title": "sfe.initiated_request.to_bfe.errors_count_by_buyer [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1115,7 +1115,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.errors_count_by_buyer\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_bfe.errors_count_by_buyer\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -1146,7 +1146,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.initiated_request.count_by_buyer [MEAN]", + "title": "sfe.initiated_request.to_bfe.count [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1160,7 +1160,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.count_by_buyer\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_bfe.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -1191,7 +1191,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.initiated_request.duration_by_buyer [MEAN]", + "title": "sfe.initiated_request.to_bfe.duration_ms [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1199,7 +1199,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "plotType": "LINE", "targetAxis": "Y1", "timeSeriesQuery": { - "timeSeriesQueryLanguage": "fetch generic_task\n| { metric 'workload.googleapis.com/sfe.initiated_request.duration_by_buyer'\n ; metric 'workload.googleapis.com/sfe.initiated_request.count_by_buyer' }\n| filter (metric.deployment_environment == '${var.environment}')\n| group_by\n [metric.buyer, metric.service_name, metric.deployment_environment,\n metric.operator, metric.Noise, resource.task_id, metric.service_version]\n| align rate(1m)\n| outer_join 0\n| div", + "timeSeriesQueryLanguage": "fetch generic_task\n| { metric 'workload.googleapis.com/sfe.initiated_request.to_bfe.duration_ms'\n ; metric 'workload.googleapis.com/sfe.initiated_request.to_bfe.count' }\n| filter (metric.deployment_environment == '${var.environment}')\n| group_by\n [metric.buyer, metric.service_name, metric.deployment_environment,\n metric.operator, metric.Noise, resource.task_id, metric.service_version]\n| align rate(1m)\n| outer_join 0\n| div", "unitOverride": "" } } @@ -1215,7 +1215,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.initiated_request.size_by_buyer [MEAN]", + "title": "sfe.initiated_request.to_bfe.size_bytes [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1229,7 +1229,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.size_by_buyer\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_request.to_bfe.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -1259,7 +1259,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "sfe.initiated_response.size_by_buyer [MEAN]", + "title": "sfe.initiated_response.to_bfe.size_bytes [MEAN]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1273,7 +1273,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_response.size_by_buyer\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.initiated_response.to_bfe.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -1465,7 +1465,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "business_logic.sfe.request_with_winner.count", + "title": "sfe.business_logic.request_with_winner_count", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1479,7 +1479,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { "alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.sfe.request_with_winner.count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", + "filter": "metric.type=\"workload.googleapis.com/sfe.business_logic.request_with_winner_count\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"", "secondaryAggregation": { "alignmentPeriod": "60s", "crossSeriesReducer": "REDUCE_MEAN", @@ -1509,7 +1509,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "business_logic.sfe.request_with_winner.duration_ms [95TH PERCENTILE]", + "title": "sfe.business_logic.request_with_winner.duration_ms [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1532,7 +1532,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/business_logic.sfe.request_with_winner.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.business_logic.request_with_winner.duration_ms\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -1549,7 +1549,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "protected_ciphertext.size_bytes [95TH PERCENTILE]", + "title": "sfe.protected_ciphertext.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1572,7 +1572,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/protected_ciphertext.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.protected_ciphertext.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } @@ -1589,7 +1589,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { { "height": 19, "widget": { - "title": "auction_config.size_bytes [95TH PERCENTILE]", + "title": "sfe.auction_config.size_bytes [95TH PERCENTILE]", "xyChart": { "chartOptions": {}, "dataSets": [ @@ -1612,7 +1612,7 @@ resource "google_monitoring_dashboard" "environment_dashboard" { ], "perSeriesAligner": "ALIGN_DELTA" }, - "filter": "metric.type=\"workload.googleapis.com/auction_config.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" + "filter": "metric.type=\"workload.googleapis.com/sfe.auction_config.size_bytes\" resource.type=\"generic_task\" metric.label.\"deployment_environment\"=\"${var.environment}\"" } } } diff --git a/production/packaging/aws/auction_service/BUILD b/production/packaging/aws/auction_service/BUILD index 4252eb08..461f6624 100644 --- a/production/packaging/aws/auction_service/BUILD +++ b/production/packaging/aws/auction_service/BUILD @@ -39,7 +39,7 @@ pkg_files( pkg_files( name = "kmstool_enclave_executable", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:kms_cli", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:kms_cli", ], attributes = pkg_attributes(mode = "0555"), prefix = "/", @@ -72,7 +72,7 @@ container_layer( pkg_tar( name = "libnsm-tar", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:libnsm_so", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:libnsm_so", ], mode = "0644", # TODO(b/294113069): Remove arch-specific path. @@ -116,10 +116,12 @@ container_image( ], env = LOG_ENV_VARS, layers = [ - "@google_privacysandbox_servers_common//scp/cc/aws/proxy/src:proxify_layer", ":server_binary_layer", ":kmstool_enclave_cli_layer", ], + tars = [ + "@google_privacysandbox_servers_common//src/aws/proxy:libnsm_and_proxify_tar", + ], ) container_test( diff --git a/production/packaging/aws/bidding_service/BUILD b/production/packaging/aws/bidding_service/BUILD index 961d6750..dbabb7bc 100644 --- a/production/packaging/aws/bidding_service/BUILD +++ b/production/packaging/aws/bidding_service/BUILD @@ -22,7 +22,6 @@ load( "@rules_pkg//pkg:mappings.bzl", "pkg_attributes", "pkg_files", - "strip_prefix", ) load("@rules_pkg//pkg:tar.bzl", "pkg_tar") load("@rules_pkg//pkg:zip.bzl", "pkg_zip") @@ -33,27 +32,25 @@ pkg_files( srcs = [ "//services/bidding_service:server", ] + select({ + "//:inference_noop": [ + "@inference_common//:inference_sidecar_test_target", + ], + "//:inference_pytorch": [ + "@pytorch_v2_1_1//:artifacts/inference_sidecar", + ], + "//:inference_tensorflow": [ + "@tensorflow_v2_14_0//:artifacts/inference_sidecar", + ], "//conditions:default": [], }), attributes = pkg_attributes(mode = "0555"), prefix = "/", ) -pkg_files( - name = "server_data", - srcs = [ - ] + select({ - "//conditions:default": [], - }), - attributes = pkg_attributes(mode = "0555"), - prefix = "/server/data", - strip_prefix = strip_prefix.from_pkg(), -) - pkg_files( name = "kmstool_enclave_executable", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:kms_cli", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:kms_cli", ], attributes = pkg_attributes(mode = "0555"), prefix = "/", @@ -61,7 +58,6 @@ pkg_files( server_binaries = [ ":kmstool_enclave_executable", - ":server_data", ":server_executables", ] @@ -79,7 +75,7 @@ pkg_tar( pkg_tar( name = "libnsm-tar", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:libnsm_so", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:libnsm_so", ], mode = "0644", # TODO(b/294113069): Remove arch-specific path. @@ -131,10 +127,12 @@ container_image( ], env = LOG_ENV_VARS, layers = [ - "@google_privacysandbox_servers_common//scp/cc/aws/proxy/src:proxify_layer", ":server_binary_layer", ":kmstool_enclave_cli_layer", ], + tars = [ + "@google_privacysandbox_servers_common//src/aws/proxy:libnsm_and_proxify_tar", + ], ) container_test( diff --git a/production/packaging/aws/buyer_frontend_service/BUILD b/production/packaging/aws/buyer_frontend_service/BUILD index 5bee24c5..6884dfaa 100644 --- a/production/packaging/aws/buyer_frontend_service/BUILD +++ b/production/packaging/aws/buyer_frontend_service/BUILD @@ -39,7 +39,7 @@ pkg_files( pkg_files( name = "kmstool_enclave_executable", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:kms_cli", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:kms_cli", ], attributes = pkg_attributes(mode = "0555"), prefix = "/", @@ -64,7 +64,7 @@ pkg_tar( pkg_tar( name = "libnsm-tar", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:libnsm_so", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:libnsm_so", ], mode = "0644", # TODO(b/294113069): Remove arch-specific path. @@ -116,10 +116,12 @@ container_image( ], env = LOG_ENV_VARS, layers = [ - "@google_privacysandbox_servers_common//scp/cc/aws/proxy/src:proxify_layer", ":server_binary_layer", ":kmstool_enclave_cli_layer", ], + tars = [ + "@google_privacysandbox_servers_common//src/aws/proxy:libnsm_and_proxify_tar", + ], ) container_test( diff --git a/production/packaging/aws/common/ami/BUILD b/production/packaging/aws/common/ami/BUILD index 1f90873b..91cc0620 100644 --- a/production/packaging/aws/common/ami/BUILD +++ b/production/packaging/aws/common/ami/BUILD @@ -30,7 +30,7 @@ pkg_zip( name = "nonenclave_artifacts", srcs = [ ":ami_config_group", - "@google_privacysandbox_servers_common//scp/cc/aws/proxy/src:proxy", + "@google_privacysandbox_servers_common//src/aws/proxy", ], ) diff --git a/production/packaging/aws/seller_frontend_service/BUILD b/production/packaging/aws/seller_frontend_service/BUILD index 49015eda..8ababfc3 100644 --- a/production/packaging/aws/seller_frontend_service/BUILD +++ b/production/packaging/aws/seller_frontend_service/BUILD @@ -39,7 +39,7 @@ pkg_files( pkg_files( name = "kmstool_enclave_executable", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:kms_cli", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:kms_cli", ], attributes = pkg_attributes(mode = "0555"), prefix = "/", @@ -64,7 +64,7 @@ pkg_tar( pkg_tar( name = "libnsm-tar", srcs = [ - "@google_privacysandbox_servers_common//scp/cc/cpio/client_providers/kms_client_provider/src/aws:libnsm_so", + "@google_privacysandbox_servers_common//src/cpio/client_providers/kms_client_provider/aws:libnsm_so", ], mode = "0644", # TODO(b/294113069): Remove arch-specific path. @@ -116,10 +116,12 @@ container_image( ], env = LOG_ENV_VARS, layers = [ - "@google_privacysandbox_servers_common//scp/cc/aws/proxy/src:proxify_layer", ":server_binary_layer", ":kmstool_enclave_cli_layer", ], + tars = [ + "@google_privacysandbox_servers_common//src/aws/proxy:libnsm_and_proxify_tar", + ], ) container_test( diff --git a/production/packaging/gcp/bidding_service/BUILD b/production/packaging/gcp/bidding_service/BUILD index da7faff2..7c949e00 100644 --- a/production/packaging/gcp/bidding_service/BUILD +++ b/production/packaging/gcp/bidding_service/BUILD @@ -22,7 +22,6 @@ load( "@rules_pkg//pkg:mappings.bzl", "pkg_attributes", "pkg_files", - "strip_prefix", ) load("@rules_pkg//pkg:tar.bzl", "pkg_tar") load("@rules_pkg//pkg:zip.bzl", "pkg_zip") @@ -31,27 +30,26 @@ load("//:config.bzl", "LOG_ENV_VARS") pkg_files( name = "server_executables", srcs = [ + "bin/init_server_basic", "//services/bidding_service:server", + "@com_github_grpc_grpc//test/cpp/util:grpc_cli", ] + select({ + "//:inference_noop": [ + "@inference_common//:inference_sidecar_test_target", + ], + "//:inference_pytorch": [ + "@pytorch_v2_1_1//:artifacts/inference_sidecar", + ], + "//:inference_tensorflow": [ + "@tensorflow_v2_14_0//:artifacts/inference_sidecar", + ], "//conditions:default": [], }), attributes = pkg_attributes(mode = "0555"), prefix = "/server/bin", ) -pkg_files( - name = "server_data", - srcs = [ - ] + select({ - "//conditions:default": [], - }), - attributes = pkg_attributes(mode = "0555"), - prefix = "/server/data", - strip_prefix = strip_prefix.from_pkg(), -) - server_binaries = [ - ":server_data", ":server_executables", ] @@ -85,10 +83,10 @@ container_image( "@platforms//cpu:x86_64": "@runtime-cc-debian-amd64//image", }), cmd = [ - "--init_config_client=true", + "/server/bin/init_server_basic", ], entrypoint = [ - "/server/bin/server", + "sh", ], env = LOG_ENV_VARS, labels = {"tee.launch_policy.log_redirect": "always"}, diff --git a/production/packaging/gcp/bidding_service/bin/init_server_basic b/production/packaging/gcp/bidding_service/bin/init_server_basic new file mode 100755 index 00000000..16d80daf --- /dev/null +++ b/production/packaging/gcp/bidding_service/bin/init_server_basic @@ -0,0 +1,55 @@ +#!/busybox/sh +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit +set -x + +######################################################################### +# # +# Mesh Startup # +# # +######################################################################### +# The following requires a GCP Metadata Server and a service account +# that is allowed to read instance metadata. + +export MESH_NAME=$(wget -qO- --header "Metadata-Flavor: Google" "http://metadata.google.internal/computeMetadata/v1/instance/attributes/mesh-name") +export GRPC_XDS_BOOTSTRAP=/run/td-grpc-bootstrap.json +echo export GRPC_XDS_BOOTSTRAP=$GRPC_XDS_BOOTSTRAP | tee /etc/profile.d/grpc-xds-bootstrap.sh +wget https://storage.googleapis.com/traffic-director/td-grpc-bootstrap-0.14.0.tar.gz +cat td-grpc-bootstrap-0.14.0.tar.gz | tar -xz +./td-grpc-bootstrap-0.14.0/td-grpc-bootstrap --config-mesh-experimental $MESH_NAME | tee $GRPC_XDS_BOOTSTRAP + +echo "Starting backend reachability test..." + +# Either of these backends may or may not be present depending on adtech +# deployment so we don't block the service startup here. +set +e +/server/bin/grpc_cli ls xds:///kv-service-host --channel_creds_type=insecure +/server/bin/grpc_cli ls xds:///ad-retrieval-host --channel_creds_type=insecure +set -e + +######################################################################### +# # +# Bidding Startup # +# # +######################################################################### + + +export GLOG_logtostderr=1 +export GLOG_stderrthreshold=0 +export GRPC_DNS_RESOLVER=native + +# Start the server. +/server/bin/server --init_config_client=true diff --git a/production/packaging/gcp/bidding_service/test/structure.yaml b/production/packaging/gcp/bidding_service/test/structure.yaml index ed77978d..0019582e 100644 --- a/production/packaging/gcp/bidding_service/test/structure.yaml +++ b/production/packaging/gcp/bidding_service/test/structure.yaml @@ -22,6 +22,11 @@ fileExistenceTests: shouldExist: true isExecutableBy: any + - name: init_server_basic + path: /server/bin/init_server_basic + shouldExist: true + isExecutableBy: any + - name: ca-certs path: /etc/ssl/certs/ca-certificates.crt shouldExist: true diff --git a/services/auction_service/BUILD b/services/auction_service/BUILD index 8021b515..49347cce 100644 --- a/services/auction_service/BUILD +++ b/services/auction_service/BUILD @@ -39,16 +39,6 @@ cc_library( ], ) -cc_library( - name = "seller_adtech_reporting_wrapper", - hdrs = [ - "seller_adtech_reporting_wrapper.h", - ], - deps = [ - "@com_google_absl//absl/strings", - ], -) - cc_library( name = "buyer_adtech_reporting_wrapper", hdrs = [ @@ -78,9 +68,9 @@ cc_library( "//services/common/metric:server_definition", "@aws_sdk_cpp//:core", "@com_github_grpc_grpc//:grpc++", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/telemetry", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/telemetry", ], ) @@ -115,10 +105,10 @@ cc_library( "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:key_fetcher_manager_interface", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:key_fetcher_manager_interface", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", "@rapidjson", ], ) @@ -146,7 +136,21 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", + ], +) + +cc_test( + name = "score_ads_reactor_top_level_auction_test", + size = "small", + srcs = ["score_ads_reactor_top_level_auction_test.cc"], + deps = [ + ":auction_constants", + ":score_ads_reactor", + ":score_ads_reactor_test_util", + "//services/common/test:random", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", ], ) @@ -211,9 +215,65 @@ proto_library( cc_proto_library( name = "auction_code_fetch_config_cc_proto", + visibility = ["//services/auction_service:__subpackages__"], deps = [":auction_code_fetch_config_proto"], ) +cc_library( + name = "seller_code_fetch_manager", + srcs = [ + "seller_code_fetch_manager.cc", + ], + hdrs = [ + "seller_code_fetch_manager.h", + ], + deps = [ + ":auction_code_fetch_config_cc_proto", + ":auction_constants", + "//services/auction_service/code_wrapper:buyer_reporting_fetcher", + "//services/auction_service/code_wrapper:seller_code_wrapper", + "//services/common/clients/code_dispatcher:v8_dispatcher", + "//services/common/code_fetch:periodic_bucket_fetcher", + "//services/common/code_fetch:periodic_code_fetcher", + "//services/common/util:file_util", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + ], +) + +cc_test( + name = "seller_code_fetch_manager_test", + size = "small", + srcs = ["seller_code_fetch_manager_test.cc"], + deps = [ + ":seller_code_fetch_manager", + "//services/auction_service/code_wrapper:seller_code_wrapper", + "//services/common/clients/code_dispatcher:v8_dispatcher", + "//services/common/clients/http:http_fetcher_async", + "//services/common/code_fetch:periodic_bucket_fetcher", + "//services/common/code_fetch:periodic_code_fetcher", + "//services/common/test:mocks", + "@com_google_absl//absl/functional:any_invocable", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@com_google_absl//absl/time", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + "@google_privacysandbox_servers_common//src/public/cpio/mock/blob_storage_client:blob_storage_client_mock", + ], +) + cc_binary( name = "server", srcs = ["auction_main.cc"], @@ -244,10 +304,10 @@ cc_binary( "@com_google_absl//absl/flags:parse", "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/util:rlimit_core_config", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/util:rlimit_core_config", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -276,7 +336,19 @@ cc_library( testonly = True, srcs = ["score_ads_reactor_test_util.cc"], hdrs = ["score_ads_reactor_test_util.h"], + visibility = [ + "//services/auction_service:__pkg__", + "//services/auction_service/utils:__pkg__", + ], deps = [ + ":auction_constants", + ":score_ads_reactor", + "//services/auction_service/benchmarking:score_ads_benchmarking_logger", + "//services/auction_service/benchmarking:score_ads_no_op_logger", + "//services/common/constants:common_service_flags", + "//services/common/encryption:key_fetcher_factory", + "//services/common/encryption:mock_crypto_client_wrapper", + "//services/common/test:mocks", "//services/common/test:random", "@com_google_absl//absl/strings", ], diff --git a/services/auction_service/auction_code_fetch_config.proto b/services/auction_service/auction_code_fetch_config.proto index 4773ab02..9343446a 100644 --- a/services/auction_service/auction_code_fetch_config.proto +++ b/services/auction_service/auction_code_fetch_config.proto @@ -18,11 +18,24 @@ syntax = "proto3"; package privacy_sandbox.bidding_auction_servers.auction_service; +// Specify how to fetch the code blob. +enum FetchMode { + // Fetch a single blob from an arbitrary url. + FETCH_MODE_URL = 0; + // Fetch all blobs from a specified bucket + // and rely on a default blob specification. + FETCH_MODE_BUCKET = 1; + // Fetch a blob from a local path. Dev-only. + FETCH_MODE_LOCAL = 2; +} + message SellerCodeFetchConfig { + // Required if fetch_mode = FETCH_MODE_LOCAL. // The javascript generateBid script. string auction_js_path = 1; + // Required if fetch_mode = FETCH_MODE_URL. // URL endpoint for fetching AdTech code blob, js file. string auction_js_url = 2; @@ -32,6 +45,20 @@ message SellerCodeFetchConfig { // A time out limit for HttpsFetcherAsync client to stop executing FetchUrl. int64 url_fetch_timeout_ms = 4; + // Required if fetch_mode = FETCH_MODE_BUCKET. + // The name of a bucket from which to fetch code blobs. + // All blobs will be fetched from this bucket. + string auction_js_bucket = 11; + + // Required if fetch_mode = FETCH_MODE_BUCKET. + // The name of the bucket's default code blob to use. + // The default will be used if the scoring request does not specify a version. + string auction_js_bucket_default_blob = 12; + + // Required. + // Specifies which mode to use when fetching UDF blobs. + FetchMode fetch_mode = 13; + // Allow seller debug URL generation. bool enable_seller_debug_url_generation = 5; diff --git a/services/auction_service/auction_constants.h b/services/auction_service/auction_constants.h index 3a368744..42758080 100644 --- a/services/auction_service/auction_constants.h +++ b/services/auction_service/auction_constants.h @@ -28,6 +28,7 @@ constexpr char kRenderUrlsPropertyForKVResponse[] = "renderUrls"; // The following fields are expected to returned by ScoreAd response constexpr char kRenderUrlsPropertyForScoreAd[] = "renderUrl"; constexpr char kTopLevelSellerFieldPropertyForScoreAd[] = "topLevelSeller"; +constexpr char kComponentSellerFieldPropertyForScoreAd[] = "componentSeller"; constexpr char kAuctionDebugLossUrlPropertyForScoreAd[] = "auctionDebugLossUrl"; constexpr char kAuctionDebugWinUrlPropertyForScoreAd[] = "auctionDebugWinUrl"; constexpr char kRejectReasonPropertyForScoreAd[] = "rejectReason"; @@ -39,6 +40,9 @@ constexpr char kAdMetadataForComponentAuction[] = "ad"; constexpr char kModifiedBidForComponentAuction[] = "bid"; constexpr char kIncomingBidInSellerCurrency[] = "incomingBidInSellerCurrency"; constexpr char kDebugReportUrlsPropertyForScoreAd[] = "debugReportUrls"; +constexpr char kScoreAdBlobVersion[] = "v1"; +constexpr char kIGOwnerPropertyForScoreAd[] = "interestGroupOwner"; +constexpr char kTopWindowHostnamePropertyForScoreAd[] = "topWindowHostname"; constexpr int kArgSizeWithWrapper = 7; // Acceptable error margin for float currency comparisons. @@ -53,7 +57,7 @@ enum class ScoreAdArgs : int { kBid, kAuctionConfig, kScoringSignals, - kDeviceSignals, + kBidMetadata, // This is only added to prevent errors in the score ad script, and // will always be an empty object. kDirectFromSellerSignals, diff --git a/services/auction_service/auction_main.cc b/services/auction_service/auction_main.cc index 3fb124ac..50186696 100644 --- a/services/auction_service/auction_main.cc +++ b/services/auction_service/auction_main.cc @@ -32,7 +32,6 @@ #include "grpcpp/ext/proto_server_reflection_plugin.h" #include "grpcpp/grpcpp.h" #include "grpcpp/health_check_service_interface.h" -#include "public/cpio/interface/cpio.h" #include "services/auction_service/auction_code_fetch_config.pb.h" #include "services/auction_service/auction_service.h" #include "services/auction_service/benchmarking/score_ads_benchmarking_logger.h" @@ -47,11 +46,12 @@ #include "services/common/encryption/crypto_client_factory.h" #include "services/common/encryption/key_fetcher_factory.h" #include "services/common/telemetry/configure_telemetry.h" +#include "src/concurrent/event_engine_executor.h" #include "src/core/lib/event_engine/default_event_engine.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/util/rlimit_core_config.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/util/rlimit_core_config.h" +#include "src/util/status_macro/status_macros.h" ABSL_FLAG(std::optional, port, std::nullopt, "Port the server is listening on."); @@ -83,7 +83,7 @@ using ::grpc::Server; using ::grpc::ServerBuilder; absl::StatusOr GetConfigClient( - std::string config_param_prefix) { + absl::string_view config_param_prefix) { TrustedServersConfigClient config_client(GetServiceFlags()); config_client.SetFlag(FLAGS_port, PORT); config_client.SetFlag(FLAGS_healthcheck_port, HEALTHCHECK_PORT); @@ -259,7 +259,7 @@ absl::Status RunServer() { << " and blobs count:" << ad_tech_code_blobs.size(); // Collect code blobs for protected audience. auto buyer_origin_code_map = GetCodeBlobMap( - /*start=*/0, num_protected_audience_endpoints, buyer_origins, + /*first=*/0, num_protected_audience_endpoints, buyer_origins, ad_tech_code_blobs, "buyer_report_win_js_urls"); // Collect code blobs for protected app signals. absl::flat_hash_map @@ -382,8 +382,6 @@ absl::Status RunServer() { if (code_fetcher) { code_fetcher->End(); } - PS_RETURN_IF_ERROR(dispatcher.Stop()) - << "Error shutting down code dispatcher."; return absl::OkStatus(); } } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/auction_service.cc b/services/auction_service/auction_service.cc index 96d8e17d..a94f739d 100644 --- a/services/auction_service/auction_service.cc +++ b/services/auction_service/auction_service.cc @@ -18,7 +18,7 @@ #include "api/bidding_auction_servers.pb.h" #include "services/common/metric/server_definition.h" -#include "src/cpp/telemetry/telemetry.h" +#include "src/telemetry/telemetry.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/auction_service/auction_service.h b/services/auction_service/auction_service.h index 577a98f4..5d2938c3 100644 --- a/services/auction_service/auction_service.h +++ b/services/auction_service/auction_service.h @@ -22,10 +22,10 @@ #include "absl/functional/any_invocable.h" #include "api/bidding_auction_servers.grpc.pb.h" -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" #include "services/auction_service/data/runtime_config.h" #include "services/auction_service/score_ads_reactor.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/auction_service/auction_service_integration_test.cc b/services/auction_service/auction_service_integration_test.cc index 6d63a0e2..3b0d9ef2 100644 --- a/services/auction_service/auction_service_integration_test.cc +++ b/services/auction_service/auction_service_integration_test.cc @@ -80,7 +80,7 @@ constexpr absl::string_view js_code = R"JS_CODE( bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals ) { // Do a random amount of work to generate the score: @@ -104,7 +104,7 @@ constexpr absl::string_view js_code_with_debug_urls = R"JS_CODE( bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals ) { // Do a random amount of work to generate the score: @@ -142,7 +142,7 @@ constexpr absl::string_view js_code_with_very_large_debug_urls = R"JS_CODE( bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals ) { // Do a random amount of work to generate the score: @@ -167,7 +167,7 @@ constexpr absl::string_view js_code_with_global_this_debug_urls = R"JS_CODE( bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals ) { // Do a random amount of work to generate the score: @@ -192,7 +192,7 @@ constexpr absl::string_view js_code_throws_exception = R"JS_CODE( bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals) { // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); @@ -211,7 +211,7 @@ constexpr absl::string_view js_code_throws_exception_with_debug_urls = bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals) { // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); @@ -254,7 +254,7 @@ constexpr absl::string_view js_code_with_reject_reasons = R"JS_CODE( bid, auction_config, scoring_signals, - device_signals, + bid_metadata, direct_from_seller_signals ) { // Do a random amount of work to generate the score: @@ -476,7 +476,6 @@ TEST_F(AuctionServiceIntegrationTest, ScoresAdsWithCustomScoringLogic) { EXPECT_EQ(scoredAd.render(), interest_group_to_ad.at(scoredAd.interest_group_name()).render()); } - EXPECT_TRUE(dispatcher.Stop().ok()); } void SellerCodeWrappingTestHelper( @@ -498,7 +497,7 @@ void SellerCodeWrappingTestHelper( ScoreAdsRequest::ScoreAdsRawRequest raw_request; raw_request.ParseFromString(request->request_ciphertext()); if (enable_report_win_url_generation) { - for (auto ad_bid : raw_request.ad_bids()) { + for (const auto& ad_bid : raw_request.ad_bids()) { if (enable_report_win_input_noising) { buyer_origin_code_map.try_emplace(ad_bid.interest_group_owner(), kBuyerBaseCodeWithValidation); @@ -556,7 +555,6 @@ void SellerCodeWrappingTestHelper( service.ScoreAds(&context, request, response); std::this_thread::sleep_for(absl::ToChronoSeconds(absl::Seconds(2))); // This line may NOT break if the ad_score() object is empty. - EXPECT_TRUE(dispatcher.Stop().ok()); } TEST_F(AuctionServiceIntegrationTest, ScoresAdsReturnsDebugUrlsForWinningAd) { @@ -1060,7 +1058,8 @@ TEST_F(AuctionServiceIntegrationTest, CapturesRejectionReasonForRejectedAds) { // in response. EXPECT_NE(ad_rejection_reason.rejection_reason(), SellerRejectionReason::SELLER_REJECTION_REASON_NOT_AVAILABLE); - std::string interest_group_name = ad_rejection_reason.interest_group_name(); + const std::string& interest_group_name = + ad_rejection_reason.interest_group_name(); EXPECT_EQ(ad_rejection_reason.rejection_reason(), interest_group_to_rejection_reason.at(interest_group_name)); } @@ -1098,5 +1097,45 @@ TEST_F(AuctionServiceIntegrationTest, FiltersUnallowedAdsForComponentAuction) { raw_response.ParseFromString(response.response_ciphertext()); EXPECT_FALSE(raw_response.has_ad_score()); } + +void BuildTopLevelAuctionScoreAdsRequest( + ScoreAdsRequest* request, bool enable_debug_reporting = false, + int desired_ad_count = 20, absl::string_view seller = kTestSeller) { + ScoreAdsRequest::ScoreAdsRawRequest raw_request; + std::string generation_id = MakeARandomString(); + for (int i = 0; i < desired_ad_count; i++) { + auto auction_result = + MakeARandomComponentAuctionResult(generation_id, absl::StrCat(seller)); + *raw_request.mutable_component_auction_results()->Add() = auction_result; + } + if (enable_debug_reporting) { + raw_request.set_enable_debug_reporting(enable_debug_reporting); + } + raw_request.set_seller(seller); + *request->mutable_request_ciphertext() = raw_request.SerializeAsString(); + request->set_key_id(kKeyId); +} + +TEST_F(AuctionServiceIntegrationTest, + ProcessTopLevelAuctionInputAndReturnWinner) { + ScoreAdsRequest request; + BuildTopLevelAuctionScoreAdsRequest(&request); + ScoreAdsResponse response; + SellerCodeWrappingTestHelper(&request, &response, kTopLevelAuctionCode, + /*enable_seller_debug_url_generation=*/false, + /*enable_debug_reporting=*/false); + ScoreAdsResponse::ScoreAdsRawResponse raw_response; + raw_response.ParseFromString(response.response_ciphertext()); + const auto& scoredAd = raw_response.ad_score(); + EXPECT_GT(scoredAd.desirability(), 0); + EXPECT_FLOAT_EQ(scoredAd.incoming_bid_in_seller_currency(), 1.868); + + // Should not be populated. + EXPECT_FALSE(scoredAd.allow_component_auction()); + EXPECT_TRUE(scoredAd.ad_metadata().empty()); + EXPECT_EQ(scoredAd.bid(), 0); + EXPECT_TRUE(scoredAd.bid_currency().empty()); +} + } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/benchmarking/BUILD b/services/auction_service/benchmarking/BUILD index fef1118f..c576e7ff 100644 --- a/services/auction_service/benchmarking/BUILD +++ b/services/auction_service/benchmarking/BUILD @@ -55,6 +55,6 @@ cc_binary( "//services/common/test:random", "@google_benchmark//:benchmark", "@google_benchmark//:benchmark_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) diff --git a/services/auction_service/benchmarking/score_ads_reactor_benchmarks.cc b/services/auction_service/benchmarking/score_ads_reactor_benchmarks.cc index eb7c8a9d..2d81b83f 100644 --- a/services/auction_service/benchmarking/score_ads_reactor_benchmarks.cc +++ b/services/auction_service/benchmarking/score_ads_reactor_benchmarks.cc @@ -241,6 +241,7 @@ ProtectedAppSignalsAdWithBidMetadata BuildProtectedAppSignalsAdWithBid(int id) { std::vector BuildAdWithBids( int num_interest_groups = kNumInterestGroups) { std::vector ads_with_bid_metadata; + ads_with_bid_metadata.reserve(num_interest_groups); for (int i = 0; i < num_interest_groups; ++i) { ads_with_bid_metadata.emplace_back(BuildAdWithBid(i, kNumComponentsPerAd)); } @@ -251,6 +252,7 @@ std::vector BuildProtectedAppSignalsAdWithBids(int num_pas_ads = kNumPasAds) { std::vector protected_app_signals_ads_with_bid_metadata; + protected_app_signals_ads_with_bid_metadata.reserve(num_pas_ads); for (int i = 0; i < num_pas_ads; ++i) { protected_app_signals_ads_with_bid_metadata.emplace_back( BuildProtectedAppSignalsAdWithBid(i)); @@ -262,6 +264,7 @@ std::string BuildScoringSignals(int num_interest_groups = kNumInterestGroups, int num_pas_ads = kNumPasAds) { std::vector scoring_signals; absl::BitGen bit_gen; + scoring_signals.reserve(num_interest_groups + num_pas_ads); for (int i = 0; i < num_interest_groups; ++i) { scoring_signals.emplace_back(absl::StrFormat( kScoringSignalKvTemplate, absl::StrFormat(kRenderUrlTemplate, i), diff --git a/services/auction_service/code_wrapper/BUILD b/services/auction_service/code_wrapper/BUILD index d6d19be6..80a727c8 100644 --- a/services/auction_service/code_wrapper/BUILD +++ b/services/auction_service/code_wrapper/BUILD @@ -49,3 +49,27 @@ cc_test( "@rapidjson", ], ) + +cc_library( + name = "buyer_reporting_fetcher", + srcs = ["buyer_reporting_fetcher.cc"], + hdrs = [ + "buyer_reporting_fetcher.h", + ], + deps = [ + "//services/auction_service:auction_code_fetch_config_cc_proto", + "//services/common/clients/http:http_fetcher_async", + "//services/common/code_fetch:code_fetcher_interface", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/functional:any_invocable", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@com_google_absl//absl/time", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", + ], +) diff --git a/services/auction_service/code_wrapper/buyer_reporting_fetcher.cc b/services/auction_service/code_wrapper/buyer_reporting_fetcher.cc new file mode 100644 index 00000000..87070169 --- /dev/null +++ b/services/auction_service/code_wrapper/buyer_reporting_fetcher.cc @@ -0,0 +1,120 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "services/auction_service/code_wrapper/buyer_reporting_fetcher.h" + +#include +#include +#include +#include + +#include "absl/status/statusor.h" +#include "absl/synchronization/notification.h" +#include "absl/time/time.h" +#include "services/common/clients/http/http_fetcher_async.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" + +namespace privacy_sandbox::bidding_auction_servers { + +absl::Status BuyerReportingFetcher::Start() { + if (config_.enable_report_win_url_generation()) { + PeriodicBuyerReportingFetchSync(); + } + return absl::OkStatus(); +} + +void BuyerReportingFetcher::End() { + if (task_id_.has_value()) { + executor_.Cancel(*task_id_); + task_id_ = absl::nullopt; + } +} + +absl::flat_hash_map +BuyerReportingFetcher::GetProtectedAuctionReportingByOrigin() + ABSL_LOCKS_EXCLUDED(code_blob_per_origin_mu_) { + absl::MutexLock lock(&code_blob_per_origin_mu_); + return protected_auction_code_blob_per_origin_; +} + +absl::flat_hash_map +BuyerReportingFetcher::GetProtectedAppSignalsReportingByOrigin() + ABSL_LOCKS_EXCLUDED(code_blob_per_origin_mu_) { + absl::MutexLock lock(&code_blob_per_origin_mu_); + return protected_app_signals_code_blob_per_origin_; +} + +void BuyerReportingFetcher::PeriodicBuyerReportingFetchSync() { + std::vector requests; + requests.reserve( + config_.buyer_report_win_js_urls().size() + + config_.protected_app_signals_buyer_report_win_js_urls().size()); + std::vector buyer_origins; + for (const auto& [buyer_origin, report_win_endpoint] : + config_.buyer_report_win_js_urls()) { + requests.push_back({.url = report_win_endpoint}); + buyer_origins.push_back(buyer_origin); + } + + for (const auto& [buyer_origin, report_win_endpoint] : + config_.protected_app_signals_buyer_report_win_js_urls()) { + requests.push_back({.url = report_win_endpoint}); + buyer_origins.push_back(buyer_origin); + } + + if (requests.empty()) { + PS_VLOG(1) << "No buyer reporting UDFs to fetch in config."; + return; + } + + absl::Notification fetched; + auto done_callback = + [&fetched, &requests, &buyer_origins, + this](std::vector> results) mutable { + absl::MutexLock lock(&code_blob_per_origin_mu_); + for (int i = 0; i < results.size(); i++) { + auto& result = results[i]; + if (!result.ok()) { + PS_VLOG(1) << "Failed origin " << buyer_origins[i] << " fetch at " + << requests[i].url + << " with status: " << result.status(); + } else { + if (i < config_.buyer_report_win_js_urls().size()) { + protected_auction_code_blob_per_origin_[buyer_origins[i]] = + *std::move(result); + } else { + protected_app_signals_code_blob_per_origin_[buyer_origins[i]] = + *std::move(result); + } + } + } + fetched.Notify(); + }; + + http_fetcher_.FetchUrls(requests, + absl::Milliseconds(config_.url_fetch_timeout_ms()), + std::move(done_callback)); + PS_VLOG(5) << "Waiting for reporting url fetch done notification."; + fetched.WaitForNotification(); + PS_VLOG(5) << "Reporting url fetch done."; + // Schedules the next code blob fetch and saves that task into task_id_. + task_id_ = + executor_.RunAfter(absl::Milliseconds(config_.url_fetch_period_ms()), + [this]() { PeriodicBuyerReportingFetchSync(); }); +} + +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/code_wrapper/buyer_reporting_fetcher.h b/services/auction_service/code_wrapper/buyer_reporting_fetcher.h new file mode 100644 index 00000000..86ea8e04 --- /dev/null +++ b/services/auction_service/code_wrapper/buyer_reporting_fetcher.h @@ -0,0 +1,95 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SERVICES_BUYER_REPORTING_FETCHER_H_ +#define SERVICES_BUYER_REPORTING_FETCHER_H_ + +#include +#include +#include +#include + +#include "absl/container/flat_hash_map.h" +#include "absl/synchronization/mutex.h" +#include "services/auction_service/auction_code_fetch_config.pb.h" +#include "services/common/clients/http/http_fetcher_async.h" +#include "services/common/code_fetch/code_fetcher_interface.h" +#include "src/concurrent/executor.h" + +namespace privacy_sandbox::bidding_auction_servers { + +// Periodically fetch all code at buyer reporting urls. +// Provide an API to allow callers to retrieve the content +// of the urls. +class BuyerReportingFetcher : public CodeFetcherInterface { + public: + explicit BuyerReportingFetcher( + const auction_service::SellerCodeFetchConfig& config, + HttpFetcherAsync* http_fetcher, server_common::Executor* executor) + : config_(config), http_fetcher_(*http_fetcher), executor_(*executor) {} + + ~BuyerReportingFetcher() { End(); } + + // Not copyable or movable. + BuyerReportingFetcher(const BuyerReportingFetcher&) = delete; + BuyerReportingFetcher& operator=(const BuyerReportingFetcher&) = delete; + + // Starts a periodic code blob fetching process by fetching url with + // MultiCurlHttpFetcherAsync and loading the updated content into a publicly + // accessible map. + absl::Status Start() override; + + // Ends the periodic fetching process by canceling the last scheduled task + // using task_id_. + void End() override; + + // WARNING: do not use in critical paths, this returns a copy of a map. + // return a map of buyer origin to reporting blob + absl::flat_hash_map + GetProtectedAuctionReportingByOrigin() + ABSL_LOCKS_EXCLUDED(code_blob_per_origin_mu_); + + // WARNING: do not use in critical paths, this returns a copy of a map. + // return a map of buyer origin to reporting blob + absl::flat_hash_map + GetProtectedAppSignalsReportingByOrigin() + ABSL_LOCKS_EXCLUDED(code_blob_per_origin_mu_); + + private: + void PeriodicBuyerReportingFetchSync(); + + // Acts as a lock on the code_blob_per_origin_ maps. + absl::Mutex code_blob_per_origin_mu_; + // Keeps track of the next task to be performed on the executor. + std::optional task_id_; + + const auction_service::SellerCodeFetchConfig& config_; + + // Configured as a constructor parameter to aid testing. + HttpFetcherAsync& http_fetcher_; + server_common::Executor& executor_; + + absl::flat_hash_map + protected_auction_code_blob_per_origin_ + ABSL_GUARDED_BY(code_blob_per_origin_mu_); + absl::flat_hash_map + protected_app_signals_code_blob_per_origin_ + ABSL_GUARDED_BY(code_blob_per_origin_mu_); +}; + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_BUYER_REPORTING_FETCHER_H_ diff --git a/services/auction_service/code_wrapper/seller_code_wrapper_test_constants.h b/services/auction_service/code_wrapper/seller_code_wrapper_test_constants.h index 919bb8d1..5da329a5 100644 --- a/services/auction_service/code_wrapper/seller_code_wrapper_test_constants.h +++ b/services/auction_service/code_wrapper/seller_code_wrapper_test_constants.h @@ -140,7 +140,7 @@ constexpr absl::string_view kSellerBaseCode = R"JS_CODE( return fibonacci(num - 1) + fibonacci(num - 2); } - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); console.log("Logging from ScoreAd") @@ -164,9 +164,9 @@ constexpr absl::string_view kSellerBaseCode = R"JS_CODE( )JS_CODE"; constexpr absl::string_view kComponentAuctionCode = R"JS_CODE( - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ return { - ad: device_signals["topLevelSeller"], + ad: bid_metadata["topLevelSeller"], desirability: 1, bid: 2, incomingBidInSellerCurrency: 1.868, @@ -188,9 +188,9 @@ constexpr absl::string_view kComponentAuctionCode = R"JS_CODE( )JS_CODE"; constexpr absl::string_view kComponentAuctionCodeWithNoModifiedBid = R"JS_CODE( - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ return { - ad: device_signals["topLevelSeller"], + ad: bid_metadata["topLevelSeller"], desirability: 1, allowComponentAuction: true } @@ -209,9 +209,9 @@ constexpr absl::string_view kComponentAuctionCodeWithNoModifiedBid = R"JS_CODE( )JS_CODE"; constexpr absl::string_view kSkipAdComponentAuctionCode = R"JS_CODE( - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ return { - ad: device_signals["topLevelSeller"], + ad: bid_metadata["topLevelSeller"], desirability: 1, bid: 2, allowComponentAuction: false @@ -219,6 +219,25 @@ constexpr absl::string_view kSkipAdComponentAuctionCode = R"JS_CODE( } )JS_CODE"; +constexpr absl::string_view kTopLevelAuctionCode = R"JS_CODE( + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ + let desirability = 0; + + if (bid_metadata["componentSeller"] !== undefined && + bid_metadata["componentSeller"].length > 0) { + desirability = 1; + } + return { + ad: bid_metadata["componentSeller"], + desirability: desirability, + incomingBidInSellerCurrency: 1.868, + bidCurrency: "USD", + bid: bid, + allowComponentAuction: true + } + } +)JS_CODE"; + constexpr absl::string_view kExpectedFinalCode = R"JS_CODE( function scoreAdEntryFunction(adMetadata, bid, auctionConfig, trustedScoringSignals, browserSignals, directFromSellerSignals, featureFlags){ @@ -451,7 +470,7 @@ constexpr absl::string_view kExpectedFinalCode = R"JS_CODE( return fibonacci(num - 1) + fibonacci(num - 2); } - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); console.log("Logging from ScoreAd") @@ -823,7 +842,7 @@ constexpr absl::string_view kExpectedProtectedAppSignalsFinalCode = R"JS_CODE( return fibonacci(num - 1) + fibonacci(num - 2); } - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); console.log("Logging from ScoreAd") @@ -990,7 +1009,7 @@ constexpr absl::string_view kExpectedCodeWithReportWinDisabled = R"JS_CODE( return fibonacci(num - 1) + fibonacci(num - 2); } - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); console.log("Logging from ScoreAd") @@ -1071,7 +1090,7 @@ constexpr absl::string_view kExpectedCodeWithReportingDisabled = R"JS_CODE( return fibonacci(num - 1) + fibonacci(num - 2); } - function scoreAd(ad_metadata, bid, auction_config, scoring_signals, device_signals, directFromSellerSignals){ + function scoreAd(ad_metadata, bid, auction_config, scoring_signals, bid_metadata, directFromSellerSignals){ // Do a random amount of work to generate the score: const score = fibonacci(Math.floor(Math.random() * 10 + 1)); console.log("Logging from ScoreAd") diff --git a/services/auction_service/reporting/BUILD b/services/auction_service/reporting/BUILD index 7b8389e1..78dae8b3 100644 --- a/services/auction_service/reporting/BUILD +++ b/services/auction_service/reporting/BUILD @@ -22,7 +22,7 @@ cc_library( "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", ], ) @@ -43,9 +43,9 @@ cc_library( "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", "@rapidjson", ], ) @@ -59,7 +59,7 @@ cc_library( "@boringssl//:crypto", "@boringssl//:ssl", "@com_google_absl//absl/random", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -82,7 +82,7 @@ cc_test( "@com_google_absl//absl/strings", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", "@rapidjson", ], ) @@ -100,7 +100,7 @@ cc_test( "@com_google_absl//absl/strings", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", ], ) diff --git a/services/auction_service/reporting/noiser_and_bucketer.cc b/services/auction_service/reporting/noiser_and_bucketer.cc index 57d7bd58..1481beee 100644 --- a/services/auction_service/reporting/noiser_and_bucketer.cc +++ b/services/auction_service/reporting/noiser_and_bucketer.cc @@ -19,7 +19,7 @@ #include "absl/status/statusor.h" #include "openssl/rand.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/auction_service/reporting/noiser_and_bucketer_test.cc b/services/auction_service/reporting/noiser_and_bucketer_test.cc index e65a0409..5d75cc1b 100644 --- a/services/auction_service/reporting/noiser_and_bucketer_test.cc +++ b/services/auction_service/reporting/noiser_and_bucketer_test.cc @@ -64,7 +64,7 @@ TEST(RandGenerator, RandGeneratorIsUniform) { // way, so that's what we use in the test. constexpr uint64_t kTopOfRange = (std::numeric_limits::max() / 4ULL) * 3ULL; - constexpr double kExpectedAverage = static_cast(kTopOfRange / 2); + constexpr double kExpectedAverage = static_cast(kTopOfRange) / 2.0; constexpr double kAllowedVariance = kExpectedAverage / 50.0; // +/- 2% constexpr int kMinAttempts = 1000; constexpr int kMaxAttempts = 1000000; diff --git a/services/auction_service/reporting/reporting_helper.cc b/services/auction_service/reporting/reporting_helper.cc index e3079d98..76797487 100644 --- a/services/auction_service/reporting/reporting_helper.cc +++ b/services/auction_service/reporting/reporting_helper.cc @@ -28,7 +28,7 @@ #include "services/common/util/post_auction_signals.h" #include "services/common/util/reporting_util.h" #include "services/common/util/request_response_constants.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/auction_service/reporting/reporting_helper.h b/services/auction_service/reporting/reporting_helper.h index 7f4f9bf5..b53de6aa 100644 --- a/services/auction_service/reporting/reporting_helper.h +++ b/services/auction_service/reporting/reporting_helper.h @@ -26,8 +26,8 @@ #include "services/auction_service/reporting/reporting_response.h" #include "services/common/clients/code_dispatcher/v8_dispatcher.h" #include "services/common/util/post_auction_signals.h" -#include "src/cpp/logger/request_context_impl.h" -#include "src/cpp/util/status_macro/status_util.h" +#include "src/logger/request_context_impl.h" +#include "src/util/status_macro/status_util.h" namespace privacy_sandbox::bidding_auction_servers { @@ -123,7 +123,7 @@ struct ReportingDispatchRequestData { absl::string_view egress_features; }; -// Device signals passed as input to reportResult and reportWin +// Bid metadata passed as input to reportResult and reportWin struct SellerReportingMetadata { std::string top_window_hostname; std::string top_level_seller; diff --git a/services/auction_service/reporting/reporting_helper_test.cc b/services/auction_service/reporting/reporting_helper_test.cc index 38a7ef73..6f24fe29 100644 --- a/services/auction_service/reporting/reporting_helper_test.cc +++ b/services/auction_service/reporting/reporting_helper_test.cc @@ -27,7 +27,7 @@ #include "services/common/util/json_util.h" #include "services/common/util/reporting_util.h" #include "services/common/util/request_response_constants.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/auction_service/score_ads_reactor.cc b/services/auction_service/score_ads_reactor.cc index bd281311..f9a51f4d 100644 --- a/services/auction_service/score_ads_reactor.cc +++ b/services/auction_service/score_ads_reactor.cc @@ -34,8 +34,8 @@ #include "services/common/util/auction_scope_util.h" #include "services/common/util/json_util.h" #include "services/common/util/request_response_constants.h" -#include "src/cpp/util/status_macro/status_macros.h" -#include "src/cpp/util/status_macro/status_util.h" +#include "src/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_util.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -112,7 +112,7 @@ bool IsIncomingBidInSellerCurrencyIllegallyModified( // Then check that the incomingBidInSellerCurrency was set (non-zero). (incoming_bid_in_seller_currency > kCurrencyFloatComparisonEpsilon) && // Only now do we check if it was modified. - (fabs(incoming_bid_in_seller_currency - buyer_bid) > + (fabsf(incoming_bid_in_seller_currency - buyer_bid) > kCurrencyFloatComparisonEpsilon); } @@ -215,29 +215,130 @@ absl::btree_map ScoreAdsReactor::GetLoggingContext( {kSellerDebugId, log_context.adtech_debug_id()}}; } +absl::Status ScoreAdsReactor::PopulateTopLevelAuctionDispatchRequests( + bool enable_debug_reporting, + const std::shared_ptr& auction_config, + google::protobuf::RepeatedPtrField& + component_auction_results) { + absl::string_view generation_id; + PS_VLOG(8, log_context_) << __func__; + for (auto& auction_result : component_auction_results) { + if (auction_result.is_chaff() || + auction_result.auction_params().component_seller().empty()) { + continue; + } + if (generation_id.empty()) { + generation_id = + auction_result.auction_params().ciphertext_generation_id(); + } else if (generation_id != + auction_result.auction_params().ciphertext_generation_id()) { + // Ignore auction result for different generation id. + return absl::Status( + absl::StatusCode::kInvalidArgument, + "Mismatched generation ids in component auction results."); + } + + auto dispatch_request = BuildScoreAdRequest( + auction_result.ad_render_url(), auction_result.ad_metadata(), + /*scoring_signals = */ "{}", auction_result.bid(), auction_config, + MakeBidMetadataForTopLevelAuction( + raw_request_.publisher_hostname(), + auction_result.interest_group_owner(), + auction_result.ad_render_url(), + auction_result.ad_component_render_urls(), + auction_result.auction_params().component_seller(), + auction_result.bid_currency()), + log_context_, enable_adtech_code_logging_, enable_debug_reporting); + if (!dispatch_request.ok()) { + PS_VLOG(2, log_context_) + << "Failed to create scoring request for protected audience: " + << dispatch_request.status(); + continue; + } + + // Map all fields from a component auction result to a + // AdWithBidMetadata used in this reactor. This way all the parsing + // and result handling logic for single seller auctions can be + // re-used for top-level auctions. + auto [unused_it, inserted] = + ad_data_.emplace(dispatch_request->id, + MapAuctionResultToAdWithBidMetadata(auction_result)); + if (!inserted) { + PS_VLOG(2, log_context_) << "Protected Audience ScoreAd Request id " + "conflict detected: " + << dispatch_request->id; + continue; + } + + dispatch_request->tags[kRomaTimeoutMs] = roma_timeout_ms_; + dispatch_requests_.push_back(*std::move(dispatch_request)); + } + return absl::OkStatus(); +} + +void ScoreAdsReactor::PopulateProtectedAudienceDispatchRequests( + bool enable_debug_reporting, + const absl::flat_hash_map& + scoring_signals, + const std::shared_ptr& auction_config, + google::protobuf::RepeatedPtrField& ads) { + while (!ads.empty()) { + std::unique_ptr ad(ads.ReleaseLast()); + if (scoring_signals.contains(ad->render())) { + auto dispatch_request = BuildScoreAdRequest( + *ad, auction_config, scoring_signals, enable_debug_reporting, + log_context_, enable_adtech_code_logging_, + MakeBidMetadata(raw_request_.publisher_hostname(), + ad->interest_group_owner(), ad->render(), + ad->ad_components(), raw_request_.top_level_seller(), + ad->bid_currency())); + if (!dispatch_request.ok()) { + PS_VLOG(2, log_context_) + << "Failed to create scoring request for protected audience: " + << dispatch_request.status(); + continue; + } + auto [unused_it, inserted] = + ad_data_.emplace(dispatch_request->id, std::move(ad)); + if (!inserted) { + PS_VLOG(2, log_context_) << "Protected Audience ScoreAd Request id " + "conflict detected: " + << dispatch_request->id; + continue; + } + + dispatch_request->tags[kRomaTimeoutMs] = roma_timeout_ms_; + dispatch_requests_.push_back(*std::move(dispatch_request)); + } + } +} + void ScoreAdsReactor::MayPopulateProtectedAppSignalsDispatchRequests( bool enable_debug_reporting, const absl::flat_hash_map& scoring_signals, - std::shared_ptr auction_config, + const std::shared_ptr& auction_config, RepeatedPtrField& protected_app_signals_ad_bids) { PS_VLOG(8, log_context_) << __func__; while (!protected_app_signals_ad_bids.empty()) { - std::unique_ptr ad( + std::unique_ptr pas_ad_with_bid( protected_app_signals_ad_bids.ReleaseLast()); - if (!scoring_signals.contains(ad->render())) { + if (!scoring_signals.contains(pas_ad_with_bid->render())) { PS_VLOG(5, log_context_) << "Skipping protected app signals ad since render " "URL is not found in the scoring signals: " - << ad->render(); + << pas_ad_with_bid->render(); continue; } auto dispatch_request = BuildScoreAdRequest( - *ad, auction_config, scoring_signals, enable_debug_reporting, - log_context_, enable_adtech_code_logging_, - /*device_signals=*/"\"\""); + *pas_ad_with_bid, auction_config, scoring_signals, + enable_debug_reporting, log_context_, enable_adtech_code_logging_, + MakeBidMetadata( + raw_request_.publisher_hostname(), pas_ad_with_bid->owner(), + pas_ad_with_bid->render(), GetEmptyAdComponentRenderUrls(), + raw_request_.top_level_seller(), pas_ad_with_bid->bid_currency())); if (!dispatch_request.ok()) { PS_VLOG(2, log_context_) << "Failed to create scoring request for protected app signals ad: " @@ -246,7 +347,7 @@ void ScoreAdsReactor::MayPopulateProtectedAppSignalsDispatchRequests( } auto [unused_it, inserted] = protected_app_signals_ad_data_.emplace( - dispatch_request->id, std::move(ad)); + dispatch_request->id, std::move(pas_ad_with_bid)); if (!inserted) { PS_VLOG(2, log_context_) << "ProtectedAppSignals ScoreAd Request id conflict detected: " @@ -260,17 +361,11 @@ void ScoreAdsReactor::MayPopulateProtectedAppSignalsDispatchRequests( } void ScoreAdsReactor::Execute() { - benchmarking_logger_->BuildInputBegin(); - PS_VLOG(kEncrypted, log_context_) << "Encrypted ScoreAdsRequest:\n" << request_->ShortDebugString(); PS_VLOG(kPlain, log_context_) << "ScoreAdsRawRequest:\n" << raw_request_.DebugString(); - auto ads = raw_request_.ad_bids(); - auto protected_app_signals_ad_bids = - raw_request_.protected_app_signals_ad_bids(); - DCHECK(raw_request_.protected_app_signals_ad_bids().empty() || enable_protected_app_signals_) << "Found protected app signals in score ads request even when feature " @@ -288,54 +383,49 @@ void ScoreAdsReactor::Execute() { return; } - absl::StatusOr> - scoring_signals = BuildTrustedScoringSignals(raw_request_, log_context_); - - if (!scoring_signals.ok()) { - PS_VLOG(1, log_context_) << "No scoring signals found, finishing RPC: " - << scoring_signals.status(); - FinishWithStatus(server_common::FromAbslStatus(scoring_signals.status())); - return; - } - + benchmarking_logger_->BuildInputBegin(); std::shared_ptr auction_config = BuildAuctionConfig(raw_request_); bool enable_debug_reporting = enable_seller_debug_url_generation_ && raw_request_.enable_debug_reporting(); - benchmarking_logger_->BuildInputEnd(); - while (!ads.empty()) { - std::unique_ptr ad(ads.ReleaseLast()); - if (scoring_signals->contains(ad->render())) { - auto dispatch_request = BuildScoreAdRequest( - *ad, auction_config, *scoring_signals, enable_debug_reporting, - log_context_, enable_adtech_code_logging_, - MakeDeviceSignals( - raw_request_.publisher_hostname(), ad->interest_group_owner(), - ad->render(), ad->ad_components(), - raw_request_.top_level_seller(), ad->bid_currency())); - if (!dispatch_request.ok()) { - PS_VLOG(2, log_context_) - << "Failed to create scoring request for protected audience: " - << dispatch_request.status(); - continue; - } - auto [unused_it, inserted] = - ad_data_.emplace(dispatch_request->id, std::move(ad)); - if (!inserted) { - PS_VLOG(2, log_context_) << "Protected Audience ScoreAd Request id " - "conflict detected: " - << dispatch_request->id; - continue; - } - - dispatch_request->tags[kRomaTimeoutMs] = roma_timeout_ms_; - dispatch_requests_.push_back(*std::move(dispatch_request)); + if (auction_scope_ == AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER) { + if (raw_request_.component_auction_results_size() == 0) { + // Internal error so that this is not propagated back to ad server. + FinishWithStatus(::grpc::Status(grpc::StatusCode::INTERNAL, + kNoValidComponentAuctions)); + return; } + absl::Status top_level_req_status = PopulateTopLevelAuctionDispatchRequests( + enable_debug_reporting, auction_config, + *raw_request_.mutable_component_auction_results()); + if (!top_level_req_status.ok()) { + // Invalid Argument error so that this is propagated back to ad server. + FinishWithStatus(::grpc::Status(grpc::StatusCode::INVALID_ARGUMENT, + top_level_req_status.message().data())); + return; + } + } else { + auto ads = raw_request_.ad_bids(); + auto protected_app_signals_ad_bids = + raw_request_.protected_app_signals_ad_bids(); + absl::StatusOr> + scoring_signals = + BuildTrustedScoringSignals(raw_request_, log_context_); + + if (!scoring_signals.ok()) { + PS_VLOG(1, log_context_) << "No scoring signals found, finishing RPC: " + << scoring_signals.status(); + FinishWithStatus(server_common::FromAbslStatus(scoring_signals.status())); + return; + } + PopulateProtectedAudienceDispatchRequests( + enable_debug_reporting, *scoring_signals, auction_config, ads); + MayPopulateProtectedAppSignalsDispatchRequests( + enable_debug_reporting, *scoring_signals, auction_config, + protected_app_signals_ad_bids); } - MayPopulateProtectedAppSignalsDispatchRequests( - enable_debug_reporting, *scoring_signals, auction_config, - protected_app_signals_ad_bids); + benchmarking_logger_->BuildInputEnd(); if (dispatch_requests_.empty()) { // Internal error so that this is not propagated back to ad server. @@ -371,6 +461,16 @@ void ScoreAdsReactor::Execute() { void ScoreAdsReactor::PerformReporting( const ScoreAdsResponse::AdScore& winning_ad_score, absl::string_view id) { + if (auction_scope_ == AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER) { + // TODO: Implement reporting for top level auction + // The following properties will not be available: + // raw_request_.per_buyer_signals() + // ad->join_count() + // ad->recency() + // ad->modeling_signals(), + // ad->ad_cost() + return; + } if (auto ad_it = ad_data_.find(id); ad_it != ad_data_.end()) { const auto& ad = ad_it->second; BuyerReportingMetadata buyer_reporting_metadata; @@ -417,12 +517,14 @@ void ScoreAdsReactor::PerformReporting( } } -void ScoreAdsReactor::HandleScoredAd( - int index, float buyer_bid, absl::string_view ad_with_bid_currency, - absl::string_view interest_group_name, - absl::string_view interest_group_owner, - const rapidjson::Document& response_json, AdType ad_type, - ScoreAdsResponse::AdScore& score_ads_response, ScoringData& scoring_data) { +void ScoreAdsReactor::HandleScoredAd(int index, float buyer_bid, + absl::string_view ad_with_bid_currency, + absl::string_view interest_group_name, + absl::string_view interest_group_owner, + const rapidjson::Document& response_json, + AdType ad_type, + ScoreAdsResponse::AdScore& ad_score, + ScoringData& scoring_data) { // Get ad rejection reason before updating the scoring data. std::optional ad_rejection_reason; @@ -432,83 +534,84 @@ void ScoreAdsReactor::HandleScoredAd( response_json, interest_group_owner, interest_group_name, log_context_); } + ad_score.set_interest_group_name(interest_group_name); + ad_score.set_interest_group_owner(interest_group_owner); + ad_score.set_ad_type(ad_type); + ad_score.set_buyer_bid(buyer_bid); + // Used for debug reporting. + // Should include bids rejected by bid currency mismatch + // and those not allowed in component auctions. + ad_scores_.push_back(std::make_unique(ad_score)); + if (CheckAndUpdateModifiedBid(auction_scope_, buyer_bid, ad_with_bid_currency, - &score_ads_response)) { + &ad_score)) { PS_VLOG(1, log_context_) << "Setting modified bid value to original buyer bid value (and " "currency) as the " "modified bid is not set. For interest group: " - << interest_group_name << ": " << score_ads_response.DebugString(); + << interest_group_name << ": " << ad_score.DebugString(); } if (IsBidCurrencyMismatched(auction_scope_, raw_request_.seller_currency(), - score_ads_response.bid(), - score_ads_response.bid_currency())) { + ad_score.bid(), ad_score.bid_currency())) { // Ignore currency-mismatched winner. // TODO(b/311234165): Add metric for ads rejected for mismatched // currency. PS_VLOG(1, log_context_) << "Skipping component bid as it does not match seller_currency: " - << interest_group_name << ": " << score_ads_response.DebugString(); + << interest_group_name << ": " << ad_score.DebugString(); return; } if (IsIncomingBidInSellerCurrencyIllegallyModified( raw_request_.seller_currency(), ad_with_bid_currency, - score_ads_response.incoming_bid_in_seller_currency(), buyer_bid)) { + ad_score.incoming_bid_in_seller_currency(), buyer_bid)) { // Ignore illegally modified AdScore. // TODO(b/311234165): Add metric for ads rejected for mismatched currency. PS_VLOG(1, log_context_) << "Skipping ad_score as its incomingBidInSellerCurrency was modified " "when it should not have been: " - << interest_group_name << ": " << score_ads_response.DebugString(); + << interest_group_name << ": " << ad_score.DebugString(); return; } - score_ads_response.set_interest_group_name(interest_group_name); - score_ads_response.set_interest_group_owner(interest_group_owner); - score_ads_response.set_ad_type(ad_type); - score_ads_response.set_buyer_bid(buyer_bid); - const bool is_valid_ad = !ad_rejection_reason.has_value() || ad_rejection_reason->rejection_reason() == SellerRejectionReason::SELLER_REJECTION_REASON_NOT_AVAILABLE; // Consider only ads that are not explicitly rejected and the ones that have // a positive desirability score. - if (is_valid_ad && score_ads_response.desirability() > + if (is_valid_ad && ad_score.desirability() > scoring_data.desirability_of_most_desirable_ad) { if (auction_scope_ == AuctionScope::AUCTION_SCOPE_DEVICE_COMPONENT_MULTI_SELLER && - !score_ads_response.allow_component_auction()) { + !ad_score.allow_component_auction()) { // Ignore component level winner if it is not allowed to // participate in the top level auction. // TODO(b/311234165): Add metric for rejected component ads. PS_VLOG(1, log_context_) << "Skipping component bid as it is not allowed for " - << interest_group_name << ": " << score_ads_response.DebugString(); + << interest_group_name << ": " << ad_score.DebugString(); return; } - scoring_data.UpdateWinner(index, score_ads_response); + scoring_data.UpdateWinner(index, ad_score); } - scoring_data.score_ad_map[score_ads_response.desirability()].push_back(index); - ad_scores_.push_back( - std::make_unique(score_ads_response)); - if (is_valid_ad && score_ads_response.desirability() > 0) { + if (is_valid_ad && ad_score.desirability() > 0) { // Consider scored ad as valid (i.e. not rejected) when it has a positive // desirability and either: // 1. scoreAd returned a number. // 2. scoreAd returned an object but the reject reason was not populated. // 3. scoreAd returned an object and the reject reason was explicitly set to // "not-available". + // Only consider valid bids for populating other highest bids. + scoring_data.score_ad_map[ad_score.desirability()].push_back(index); return; } // Populate a default rejection reason if needed when we didn't get a positive // desirability. - if (score_ads_response.desirability() <= 0 && - !ad_rejection_reason.has_value()) { + if (ad_score.desirability() <= 0 && !ad_rejection_reason.has_value()) { PS_VLOG(5, log_context_) << "Non-positive desirability for ad and no rejection reason populated " "by seller, providing a default rejection reason"; @@ -633,6 +736,9 @@ void ScoreAdsReactor::PopulateHighestScoringOtherBidsData( const absl::flat_hash_map>& score_ad_map, const std::vector>& responses, ScoreAdsResponse::AdScore& winning_ad_score) { + if (auction_scope_ == AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER) { + return; + } std::vector scores_list; // Logic to calculate the list of highest scoring other bids and // corresponding IG owners. @@ -860,6 +966,9 @@ void ScoreAdsReactor::ReportingCallback( void ScoreAdsReactor::PerformDebugReporting( const std::optional& winning_ad_score) { + if (auction_scope_ == AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER) { + return; + } PostAuctionSignals post_auction_signals = GeneratePostAuctionSignals(winning_ad_score); for (const auto& ad_score : ad_scores_) { @@ -882,6 +991,7 @@ void ScoreAdsReactor::PerformDebugReporting( absl::AnyInvocable)> done_callback; if (PS_VLOG_IS_ON(5)) { done_callback = [ig_owner, ig_name]( + // NOLINTNEXTLINE absl::StatusOr result) mutable { if (result.ok()) { PS_VLOG(5) << "Performed debug reporting for:" << ig_owner @@ -893,6 +1003,7 @@ void ScoreAdsReactor::PerformDebugReporting( } }; } else { + // NOLINTNEXTLINE done_callback = [](absl::StatusOr result) {}; } const HTTPRequest http_request = CreateDebugReportingHttpRequest( diff --git a/services/auction_service/score_ads_reactor.h b/services/auction_service/score_ads_reactor.h index 04263341..5c40757a 100644 --- a/services/auction_service/score_ads_reactor.h +++ b/services/auction_service/score_ads_reactor.h @@ -39,8 +39,8 @@ #include "services/common/encryption/crypto_client_wrapper_interface.h" #include "services/common/metric/server_definition.h" #include "services/common/reporters/async_reporter.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/logger/request_context_impl.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/logger/request_context_impl.h" namespace privacy_sandbox::bidding_auction_servers { @@ -49,6 +49,8 @@ inline constexpr char kDeviceComponentAuctionWithPAS[] = "Device Component Auction"; inline constexpr char kNoAdsWithValidScoringSignals[] = "No ads with valid scoring signals."; +inline constexpr char kNoValidComponentAuctions[] = + "No component auction results in request."; // An aggregate of the data we track when scoring all the ads. struct ScoringData { @@ -91,7 +93,7 @@ class ScoreAdsReactor const AuctionServiceRuntimeConfig& runtime_config); // Initiates the asynchronous execution of the ScoreAdsRequest. - virtual void Execute(); + void Execute() override; private: using AdWithBidMetadata = @@ -144,7 +146,7 @@ class ScoreAdsReactor void DispatchReportingRequestForPA( const ScoreAdsResponse::AdScore& winning_ad_score, - std::shared_ptr auction_config, + const std::shared_ptr& auction_config, const BuyerReportingMetadata& buyer_reporting_metadata) { ReportingDispatchRequestData dispatch_request_data = { .handler_name = kReportingDispatchHandlerFunctionName, @@ -171,7 +173,7 @@ class ScoreAdsReactor void DispatchReportingRequestForPAS( const ScoreAdsResponse::AdScore& winning_ad_score, - std::shared_ptr auction_config, + const std::shared_ptr& auction_config, const BuyerReportingMetadata& buyer_reporting_metadata, std::string_view egress_features) { DispatchReportingRequest( @@ -225,11 +227,32 @@ class ScoreAdsReactor void ReportingCallback( const std::vector>& responses); + // Creates and populates dispatch requests using AdWithBidMetadata objects + // in the input proto for single seller and component auctions. + void PopulateProtectedAudienceDispatchRequests( + bool enable_debug_reporting, + const absl::flat_hash_map& + scoring_signals, + const std::shared_ptr& auction_config, + google::protobuf::RepeatedPtrField& ads); + + // Creates and populates dispatch requests using ComponentAuctionResult + // objects in the input proto for top-level auctions. + absl::Status PopulateTopLevelAuctionDispatchRequests( + bool enable_debug_reporting, + const std::shared_ptr& auction_config, + google::protobuf::RepeatedPtrField& + component_auction_results); + + // Creates and populates dispatch requests using + // ProtectedAppSignalsAdWithBidMetadata objects + // in the input proto for single seller auctions auctions + // if the feature flag is enabled. void MayPopulateProtectedAppSignalsDispatchRequests( bool enable_debug_reporting, const absl::flat_hash_map& scoring_signals, - std::shared_ptr auction_config, + const std::shared_ptr& auction_config, google::protobuf::RepeatedPtrField& protected_app_signals_ad_bids); @@ -282,6 +305,13 @@ class ScoreAdsReactor // Impacts the creation of scoreAd input params and // parsing of scoreAd output. AuctionScope auction_scope_; + + google::protobuf::RepeatedPtrField + GetEmptyAdComponentRenderUrls() { + static google::protobuf::RepeatedPtrField + empty_ad_component_render_urls; + return empty_ad_component_render_urls; + } }; } // namespace privacy_sandbox::bidding_auction_servers #endif // SERVICES_AUCTION_SERVICE_SCORE_ADS_REACTOR_H_ diff --git a/services/auction_service/score_ads_reactor_test.cc b/services/auction_service/score_ads_reactor_test.cc index 3db9f29a..e1d42a75 100644 --- a/services/auction_service/score_ads_reactor_test.cc +++ b/services/auction_service/score_ads_reactor_test.cc @@ -40,14 +40,12 @@ #include "services/common/metric/server_definition.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { namespace { -constexpr char kSecret[] = "secret"; -constexpr char kKeyId[] = "keyid"; constexpr char kTestReportingResponseJson[] = R"({"reportResultResponse":{"reportResultUrl":"http://reportResultUrl.com","signalsForWinner":"{testKey:testValue}","sendReportToInvoked":true,"registerAdBeaconInvoked":true,"interactionReportingUrls":{"click":"http://event.com"}},"sellerLogs":["testLog"]})"; constexpr char kTestComponentReportingWinResponseJson[] = @@ -292,31 +290,9 @@ constexpr char kTestProtectedAppScoringSignals[] = R"json( )json"; constexpr char kTestPublisherHostname[] = "publisher_hostname"; -absl::Status FakeExecute(std::vector& batch, - BatchDispatchDoneCallback done_callback, - std::vector json_ad_scores, - const bool call_wrapper_method = false, - const bool enable_adtech_code_logging = false) { - std::vector> responses; - auto json_ad_score_itr = json_ad_scores.begin(); - - for (const auto& request : batch) { - if (std::strcmp(request.handler_name.c_str(), - kReportingDispatchHandlerFunctionName) != 0) { - EXPECT_EQ(request.handler_name, "scoreAdEntryFunction"); - } - DispatchResponse dispatch_response = {}; - dispatch_response.resp = *json_ad_score_itr++; - dispatch_response.id = request.id; - responses.emplace_back(dispatch_response); - } - done_callback(responses); - return absl::OkStatus(); -} - namespace { -void BuildRawRequest(const std::vector ads_with_bids_to_add, +void BuildRawRequest(std::vector ads_with_bids_to_add, absl::string_view seller_signals, absl::string_view auction_signals, absl::string_view scoring_signals, @@ -353,10 +329,10 @@ void BuildRawRequestForComponentAuction( RawRequest& output, const bool enable_debug_reporting = false, absl::string_view seller_currency = "") { output.set_top_level_seller(kTestTopLevelSeller); - BuildRawRequest(ads_with_bids_to_add, seller_signals, auction_signals, - scoring_signals, publisher_hostname, output, - enable_debug_reporting, /*enable_adtech_code_logging*/ false, - /*top_level_seller*/ "", seller_currency); + BuildRawRequest(std::move(ads_with_bids_to_add), seller_signals, + auction_signals, scoring_signals, publisher_hostname, output, + enable_debug_reporting, /*enable_adtech_code_logging=*/false, + /*top_level_seller=*/"", seller_currency); } } // namespace @@ -457,44 +433,15 @@ void SetupMockCryptoClientWrapper(Request request, class ScoreAdsReactorTest : public ::testing::Test { protected: - void SetUp() override { - server_common::telemetry::TelemetryConfig config_proto; - config_proto.set_mode(server_common::telemetry::TelemetryConfig::PROD); - metric::MetricContextMap( - server_common::telemetry::BuildDependentConfig(config_proto)) - ->Get(&request_); - } + void SetUp() override {} ScoreAdsResponse ExecuteScoreAds( - RawRequest& raw_request, MockCodeDispatchClient& dispatcher, - AuctionServiceRuntimeConfig runtime_config, + const RawRequest& raw_request, MockCodeDispatchClient& dispatcher, + const AuctionServiceRuntimeConfig& runtime_config, bool enable_report_result_url_generation = false) { - ScoreAdsResponse response; - *request_.mutable_request_ciphertext() = raw_request.SerializeAsString(); - std::unique_ptr benchmarkingLogger = - std::make_unique(); - - // Mock Reporting Client. - std::unique_ptr async_reporter = - std::make_unique( - std::make_unique()); - MockCryptoClientWrapper crypto_client; - SetupMockCryptoClientWrapper(raw_request, crypto_client); - TrustedServersConfigClient config_client({}); - config_client.SetFlagForTest(kTrue, TEST_MODE); - auto key_fetcher_manager = CreateKeyFetcherManager( - config_client, /* public_key_fetcher= */ nullptr); - request_.set_key_id(kKeyId); - // The last parameter is a flag for whether to parse the Trusted Scoring - // Signals. It must be set to true for this test to pass. - ScoreAdsReactor reactor(dispatcher, &request_, &response, - std::move(benchmarkingLogger), - key_fetcher_manager.get(), &crypto_client, - async_reporter.get(), runtime_config); - reactor.Execute(); - return response; + ScoreAdsReactorTestHelper test_helper; + return test_helper.ExecuteScoreAds(raw_request, dispatcher, runtime_config, + enable_report_result_url_generation); } - - ScoreAdsRequest request_; }; TEST_F( @@ -564,9 +511,8 @@ TEST_F(ScoreAdsReactorTest, EXPECT_CALL(dispatcher, BatchExecute) .WillOnce([](std::vector& batch, BatchDispatchDoneCallback done_callback) { - for (auto request : batch) { - auto input = request.input; - CheckInputCorrectForFoo(input); + for (const auto& request : batch) { + CheckInputCorrectForFoo(request.input); } return absl::OkStatus(); }); @@ -585,9 +531,8 @@ TEST_F(ScoreAdsReactorTest, EXPECT_CALL(dispatcher, BatchExecute) .WillOnce([](std::vector& batch, BatchDispatchDoneCallback done_callback) { - for (auto request : batch) { - auto input = request.input; - CheckInputCorrectForBar(input); + for (const auto& request : batch) { + CheckInputCorrectForBar(request.input); } return absl::OkStatus(); }); @@ -606,7 +551,7 @@ TEST_F(ScoreAdsReactorTest, EXPECT_CALL(dispatcher, BatchExecute) .WillOnce([](std::vector& batch, BatchDispatchDoneCallback done_callback) { - for (auto request : batch) { + for (const auto& request : batch) { auto input = request.input; EXPECT_EQ(*input[0], R"JSON(["brisket","pulled_pork","smoked_chicken"])JSON"); @@ -638,7 +583,7 @@ TEST_F(ScoreAdsReactorTest, EXPECT_CALL(dispatcher, BatchExecute) .WillOnce([](std::vector& batch, BatchDispatchDoneCallback done_callback) { - for (auto request : batch) { + for (const auto& request : batch) { auto input = request.input; EXPECT_EQ(*input[0], R"JSON(["brisket","pulled_pork","smoked_chicken"])JSON"); @@ -740,7 +685,7 @@ TEST_F(ScoreAdsReactorTest, RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -756,7 +701,7 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { ABSL_LOG(INFO) << "Accessing id ad mapping for " << request.id; ++current_score; score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); @@ -835,7 +780,7 @@ TEST_F(ScoreAdsReactorTest, RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -851,7 +796,7 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector json_ad_scores; - for (auto request : batch) { + for (const auto& request : batch) { ABSL_LOG(INFO) << "Accessing id ad mapping for " << request.id; score_to_ad.insert_or_assign( (request.id == foo_two.render()) ? low_score : high_score, @@ -930,7 +875,7 @@ TEST_F(ScoreAdsReactorTest, RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -944,7 +889,7 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( R"( @@ -1031,7 +976,7 @@ TEST_F(ScoreAdsReactorTest, /*enable_debug_reporting=*/false, kUsdIsoCode); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1045,7 +990,7 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( kComponentWithCurrencyAdScoreTemplate, current_score++, @@ -1101,7 +1046,7 @@ TEST_F(ScoreAdsReactorTest, /*enable_debug_reporting=*/false, kUsdIsoCode); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1116,18 +1061,21 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); - score_logic.push_back(absl::Substitute( - kComponentNoCurrencyAdScoreTemplate, - /*desirability=*/current_score++, - // This is set to 0 so the buyer's (original) bid will be used. - /*(modified) bid=*/0, - // Since seller_currency is set to USD, and bar's currency is USD, - // this needs to match the original bid, else the AdScore - // will be rejected. - /*incomingBidInSellerCurrency=*/bar.bid(), - (allowComponentAuction) ? "true" : "false")); + score_logic.push_back( + absl::Substitute(kComponentNoCurrencyAdScoreTemplate, + // NOLINTNEXTLINE + /*desirability=*/current_score++, + // This is set to 0 so the buyer's (original) bid + // will be used. NOLINTNEXTLINE + /*(modified) bid=*/0, + // Since seller_currency is set to USD, and bar's + // currency is USD, this needs to match the + // original bid, else the AdScore will be + // rejected. NOLINTNEXTLINE + /*incomingBidInSellerCurrency=*/bar.bid(), + (allowComponentAuction) ? "true" : "false")); // Component auction and modified bid of 0 means // device will receive the original bid. // No explicitly set bid currency means that @@ -1186,7 +1134,7 @@ TEST_F(ScoreAdsReactorTest, BidRejectedForModifiedIncomingBidInSellerCurrency) { /*enable_debug_reporting=*/false, kUsdIsoCode); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1201,10 +1149,11 @@ TEST_F(ScoreAdsReactorTest, BidRejectedForModifiedIncomingBidInSellerCurrency) { // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( kComponentNoCurrencyAdScoreTemplate, + // NOLINTNEXTLINE /*desirability=*/current_score++, // This is set to 0 so the buyer's (original) bid will be used. /*(modified) bid=*/0, @@ -1212,6 +1161,7 @@ TEST_F(ScoreAdsReactorTest, BidRejectedForModifiedIncomingBidInSellerCurrency) { // this needs to match the original bid, else the AdScore // will be rejected. // We are deliberately setting it wrong so it is rejected. + // NOLINTNEXTLINE /*incomingBidInSellerCurrency=*/bar.bid() + 1, (allowComponentAuction) ? "true" : "false")); // Component auction and modified bid of 0 means @@ -1249,7 +1199,7 @@ TEST_F(ScoreAdsReactorTest, /*enable_debug_reporting=*/false, kUsdIsoCode); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1264,10 +1214,11 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( kComponentWithCurrencyAdScoreTemplate, + // NOLINTNEXTLINE /*desirability=*/current_score++, // This is set to 0 so the buyer's (original) bid will be used. /*(modified) bid=*/0, @@ -1275,8 +1226,10 @@ TEST_F(ScoreAdsReactorTest, // but in this case the adScore will pass // because EUR will be IGNORED as it is the currency // of the modified bid, and the modified bid was not set. + // NOLINTNEXTLINE /*bidCurrency=*/kEuroIsoCode, // Since seller_currency is set, this must match the original bid. + // NOLINTNEXTLINE /*incomingBidInSellerCurrency=*/bar.bid(), (allowComponentAuction) ? "true" : "false")); // Component auction and modified bid of 0 means @@ -1335,7 +1288,7 @@ TEST_F(ScoreAdsReactorTest, BidCurrencyCanBeModifiedWhenNoSellerCurrencySet) { /*enable_debug_reporting=*/false); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1349,16 +1302,19 @@ TEST_F(ScoreAdsReactorTest, BidCurrencyCanBeModifiedWhenNoSellerCurrencySet) { // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( kComponentWithCurrencyAdScoreTemplate, + // NOLINTNEXTLINE /*desirability=*/current_score++, // This is set to a positive and nonzero value so it will be used. /*(modified) bid=*/1.689, // Setting this to EUR is fine as no seller_currency is set. + // NOLINTNEXTLINE /*bidCurrency=*/kEuroIsoCode, // Since seller_currency is not set, this is ignored. + // NOLINTNEXTLINE /*incomingBidInSellerCurrency=*/1.776, (allowComponentAuction) ? "true" : "false")); // Component auction and modified bid of 0 means @@ -1433,7 +1389,7 @@ TEST_F(ScoreAdsReactorTest, /*enable_debug_reporting=*/false, kEuroIsoCode); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1447,16 +1403,18 @@ TEST_F(ScoreAdsReactorTest, // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( kComponentNoCurrencyAdScoreTemplate, + // NOLINTNEXTLINE /*desirability=*/current_score++, // This is set to 0 so the buyer's (original) bid will be used. /*(modified) bid=*/0, // Since seller_currency is set to EUR, and bar's currency is USD, // scoreAd() is responsible for setting this correctly, // but B&A can't verify that it is set correctly. + // NOLINTNEXTLINE /*incomingBidInSellerCurrency=*/1.776, (allowComponentAuction) ? "true" : "false")); // Component auction and modified bid of 0 means @@ -1491,7 +1449,7 @@ TEST_F(ScoreAdsReactorTest, ModifiedBidOnScoreRejectedForCurrencyMismatch) { /*enable_debug_reporting=*/false, kEuroIsoCode); RawRequest raw_request_copy = raw_request; absl::flat_hash_map id_to_ad; - for (auto ad : raw_request_copy.ad_bids()) { + for (const auto& ad : raw_request_copy.ad_bids()) { // Make sure id is tracked to render urls to stay unique. id_to_ad.insert_or_assign(ad.render(), ad); } @@ -1505,7 +1463,7 @@ TEST_F(ScoreAdsReactorTest, ModifiedBidOnScoreRejectedForCurrencyMismatch) { // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { score_to_ad.insert_or_assign(current_score, id_to_ad.at(request.id)); score_logic.push_back(absl::Substitute( kComponentWithCurrencyButNoIncomingAdScoreTemplate, @@ -1559,7 +1517,6 @@ TEST_F(ScoreAdsReactorTest, ParsesJsonAdMetadataInComponentAuction) { TEST_F(ScoreAdsReactorTest, CreatesDebugUrlsForAllAds) { MockCodeDispatchClient dispatcher; - int current_score = 0; bool allowComponentAuction = false; RawRequest raw_request; AdWithBidMetadata foo, bar; @@ -1575,30 +1532,32 @@ TEST_F(ScoreAdsReactorTest, CreatesDebugUrlsForAllAds) { /*seller_currency=*/kUsdIsoCode); EXPECT_CALL(dispatcher, BatchExecute) - .WillRepeatedly([¤t_score, &allowComponentAuction]( - std::vector& batch, - BatchDispatchDoneCallback done_callback) { - // Each original ad request (AdWithBidMetadata) is stored by its - // expected score and later compared to the output AdScore with the - // matching score. - std::vector score_logic; - for (auto request : batch) { - score_logic.push_back(absl::StrCat( - "{\"response\":" - "{\"desirability\": ", - current_score++, ", \"bid\": ", 1 + (std::rand() % 20), - ", \"allowComponentAuction\": ", - ((allowComponentAuction) ? "true" : "false"), - ", \"debugReportUrls\": {", - " \"auctionDebugLossUrl\" : " - "\"https://example-ssp.com/debugLoss\",", - " \"auctionDebugWinUrl\" : " - "\"https://example-ssp.com/debugWin\"", - "}}, \"logs\":[]}")); - } - return FakeExecute(batch, std::move(done_callback), - std::move(score_logic), true, true); - }); + .WillRepeatedly( + [&allowComponentAuction](std::vector& batch, + BatchDispatchDoneCallback done_callback) { + // Each original ad request (AdWithBidMetadata) is stored by its + // expected score and later compared to the output AdScore with the + // matching score. + std::vector score_logic; + score_logic.reserve(batch.size()); + for (int current_score = 0; current_score < batch.size(); + ++current_score) { + score_logic.push_back(absl::StrCat( + "{\"response\":" + "{\"desirability\": ", + current_score, ", \"bid\": ", 1 + (std::rand() % 20), + ", \"allowComponentAuction\": ", + ((allowComponentAuction) ? "true" : "false"), + ", \"debugReportUrls\": {", + " \"auctionDebugLossUrl\" : " + "\"https://example-ssp.com/debugLoss\",", + " \"auctionDebugWinUrl\" : " + "\"https://example-ssp.com/debugWin\"", + "}}, \"logs\":[]}")); + } + return FakeExecute(batch, std::move(done_callback), + std::move(score_logic), true, true); + }); AuctionServiceRuntimeConfig runtime_config = { .enable_seller_debug_url_generation = true}; auto response = ExecuteScoreAds(raw_request, dispatcher, runtime_config); @@ -2126,8 +2085,9 @@ TEST_F(ScoreAdsReactorTest, VerifyDecryptionEncryptionSuccessful) { kTestScoringSignals, kTestPublisherHostname, raw_request); ScoreAdsResponse response; - request_.set_key_id("key_id"); - request_.set_request_ciphertext("ciphertext"); + ScoreAdsRequest request; + request.set_key_id("key_id"); + request.set_request_ciphertext("ciphertext"); std::unique_ptr benchmarkingLogger = std::make_unique(); @@ -2154,11 +2114,13 @@ TEST_F(ScoreAdsReactorTest, VerifyDecryptionEncryptionSuccessful) { EXPECT_CALL(crypto_client, AeadEncrypt) .WillOnce(testing::Return(encrypt_response)); + SetupTelemetryCheck(request); + // Mock Reporting Client. std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ScoreAdsReactor reactor(dispatcher, &request_, &response, + ScoreAdsReactor reactor(dispatcher, &request, &response, std::move(benchmarkingLogger), &key_fetcher_manager, &crypto_client, async_reporter.get(), AuctionServiceRuntimeConfig()); @@ -2195,7 +2157,7 @@ TEST_F(ScoreAdsReactorTest, CaptureRejectionReasonsForRejectedAds) { // expected score and later compared to the output AdScore with the // matching score. std::vector score_logic; - for (auto request : batch) { + for (const auto& request : batch) { std::string rejection_reason = id_to_rejection_reason.at(request.id); score_logic.push_back(absl::Substitute( R"( @@ -2340,8 +2302,7 @@ TEST_F(ScoreAdsReactorTest, SingleNumberResponseFromScoreAdIsValid) { } void BuildProtectedAppSignalsRawRequest( - const std::vector& - ads_with_bids_to_add, + std::vector ads_with_bids_to_add, const std::string& seller_signals, const std::string& auction_signals, const std::string& scoring_signals, const std::string& publisher_hostname, RawRequest& output, bool enable_debug_reporting = false) { @@ -2365,6 +2326,7 @@ ProtectedAppSignalsAdWithBidMetadata GetProtectedAppSignalsAdWithBidMetadata( MakeAnAd(render_url, "arbitraryMetadataKey", 2)); ad.set_render(render_url); ad.set_bid(bid); + ad.set_bid_currency(kUsdIsoCode); ad.set_owner(kTestProtectedAppSignalsAdOwner); ad.set_egress_features(kTestEgressFeatures); return ad; @@ -2372,47 +2334,15 @@ ProtectedAppSignalsAdWithBidMetadata GetProtectedAppSignalsAdWithBidMetadata( class ScoreAdsReactorProtectedAppSignalsTest : public ScoreAdsReactorTest { protected: - void SetUp() override { - server_common::telemetry::TelemetryConfig config_proto; - config_proto.set_mode(server_common::telemetry::TelemetryConfig::PROD); - metric::MetricContextMap( - server_common::telemetry::BuildDependentConfig(config_proto)) - ->Get(&request_); - - config_client_.SetFlagForTest(kTrue, TEST_MODE); - - runtime_config_.enable_protected_app_signals = true; - - key_fetcher_manager_ = - CreateKeyFetcherManager(config_client_, /*public_key_fetcher=*/nullptr); - } + void SetUp() override { runtime_config_.enable_protected_app_signals = true; } ScoreAdsResponse ExecuteScoreAds(const RawRequest& raw_request, MockCodeDispatchClient& dispatcher) { - SetupMockCryptoClientWrapper(raw_request, crypto_client_); - *request_.mutable_request_ciphertext() = raw_request.SerializeAsString(); - request_.set_key_id(kKeyId); - - ScoreAdsResponse response; - ScoreAdsReactor reactor(dispatcher, &request_, &response, - std::move(benchmarkingLogger_), - key_fetcher_manager_.get(), &crypto_client_, - async_reporter_.get(), runtime_config_); - reactor.Execute(); - return response; + return ScoreAdsReactorTest::ExecuteScoreAds(raw_request, dispatcher, + runtime_config_); } - ScoreAdsRequest request_; AuctionServiceRuntimeConfig runtime_config_; - TrustedServersConfigClient config_client_{{}}; - std::unique_ptr async_reporter_ = - std::make_unique( - std::make_unique()); - MockCryptoClientWrapper crypto_client_; - std::unique_ptr benchmarkingLogger_ = - std::make_unique(); - std::unique_ptr - key_fetcher_manager_; }; TEST_F(ScoreAdsReactorProtectedAppSignalsTest, @@ -2437,8 +2367,9 @@ TEST_F(ScoreAdsReactorProtectedAppSignalsTest, EXPECT_EQ(*req.input[static_cast(ScoreAdArgs::kScoringSignals)], "{\"renderUrl\":{\"testAppAds.com/" "render_ad?id=bar\":[\"test_signal\"]}}"); - EXPECT_EQ(*req.input[static_cast(ScoreAdArgs::kDeviceSignals)], - "\"\""); + EXPECT_EQ( + *req.input[static_cast(ScoreAdArgs::kBidMetadata)], + R"JSON({"interestGroupOwner":"https://PAS-Ad-Owner.com","topWindowHostname":"publisher_hostname","bidCurrency":"USD","renderUrl":"testAppAds.com/render_ad?id=bar"})JSON"); EXPECT_EQ( *req.input[static_cast(ScoreAdArgs::kDirectFromSellerSignals)], "{}"); diff --git a/services/auction_service/score_ads_reactor_test_util.cc b/services/auction_service/score_ads_reactor_test_util.cc index a508dbfe..132d7032 100644 --- a/services/auction_service/score_ads_reactor_test_util.cc +++ b/services/auction_service/score_ads_reactor_test_util.cc @@ -14,10 +14,60 @@ #include "services/auction_service/score_ads_reactor_test_util.h" +#include "services/common/constants/common_service_flags.h" #include "services/common/test/random.h" namespace privacy_sandbox::bidding_auction_servers { +namespace { +template +void SetupMockCryptoClientWrapper(Request request, + MockCryptoClientWrapper& crypto_client) { + // Mock the HpkeEncrypt() call on the crypto client. + google::cmrt::sdk::crypto_service::v1::HpkeEncryptResponse + hpke_encrypt_response; + hpke_encrypt_response.set_secret(kSecret); + hpke_encrypt_response.mutable_encrypted_data()->set_key_id(kKeyId); + hpke_encrypt_response.mutable_encrypted_data()->set_ciphertext( + request.SerializeAsString()); + EXPECT_CALL(crypto_client, HpkeEncrypt) + .Times(testing::AnyNumber()) + .WillOnce(testing::Return(hpke_encrypt_response)); + + // Mock the HpkeDecrypt() call on the crypto_client. + google::cmrt::sdk::crypto_service::v1::HpkeDecryptResponse + hpke_decrypt_response; + hpke_decrypt_response.set_payload(request.SerializeAsString()); + hpke_decrypt_response.set_secret(kSecret); + EXPECT_CALL(crypto_client, HpkeDecrypt) + .Times(testing::AnyNumber()) + .WillRepeatedly(testing::Return(hpke_decrypt_response)); + + // Mock the AeadEncrypt() call on the crypto_client. This is used to encrypt + // the response coming back from the service. + + EXPECT_CALL(crypto_client, AeadEncrypt) + .Times(testing::AnyNumber()) + .WillOnce( + [](absl::string_view plaintext_payload, absl::string_view secret) { + google::cmrt::sdk::crypto_service::v1::AeadEncryptedData data; + data.set_ciphertext(plaintext_payload); + google::cmrt::sdk::crypto_service::v1::AeadEncryptResponse + aead_encrypt_response; + *aead_encrypt_response.mutable_encrypted_data() = std::move(data); + return aead_encrypt_response; + }); + + // Mock the AeadDecrypt() call on the crypto_client. + google::cmrt::sdk::crypto_service::v1::AeadDecryptResponse + aead_decrypt_response; + aead_decrypt_response.set_payload(request.SerializeAsString()); + EXPECT_CALL(crypto_client, AeadDecrypt) + .Times(testing::AnyNumber()) + .WillOnce(testing::Return(aead_decrypt_response)); +} +} // namespace + ProtectedAppSignalsAdWithBidMetadata GetProtectedAppSignalsAdWithBidMetadata( absl::string_view render_url, float bid) { ProtectedAppSignalsAdWithBidMetadata ad; @@ -29,4 +79,60 @@ ProtectedAppSignalsAdWithBidMetadata GetProtectedAppSignalsAdWithBidMetadata( return ad; } +void SetupTelemetryCheck(const ScoreAdsRequest& request) { + server_common::telemetry::TelemetryConfig config_proto; + config_proto.set_mode(server_common::telemetry::TelemetryConfig::PROD); + metric::MetricContextMap( + server_common::telemetry::BuildDependentConfig(config_proto)) + ->Get(&request); +} + +ScoreAdsReactorTestHelper::ScoreAdsReactorTestHelper() { + SetupTelemetryCheck(request_); + config_client_.SetFlagForTest(kTrue, TEST_MODE); + + key_fetcher_manager_ = + CreateKeyFetcherManager(config_client_, /*public_key_fetcher=*/nullptr); +} + +ScoreAdsResponse ScoreAdsReactorTestHelper::ExecuteScoreAds( + const ScoreAdsRequest::ScoreAdsRawRequest& raw_request, + MockCodeDispatchClient& dispatcher, + const AuctionServiceRuntimeConfig& runtime_config, + bool enable_report_result_url_generation) { + SetupMockCryptoClientWrapper(raw_request, crypto_client_); + *request_.mutable_request_ciphertext() = raw_request.SerializeAsString(); + request_.set_key_id(kKeyId); + + ScoreAdsResponse response; + ScoreAdsReactor reactor(dispatcher, &request_, &response, + std::move(benchmarkingLogger_), + key_fetcher_manager_.get(), &crypto_client_, + async_reporter.get(), runtime_config); + reactor.Execute(); + return response; +} + +absl::Status FakeExecute(std::vector& batch, + BatchDispatchDoneCallback done_callback, + std::vector json_ad_scores, + const bool call_wrapper_method, + const bool enable_adtech_code_logging) { + std::vector> responses; + auto json_ad_score_itr = json_ad_scores.begin(); + + for (const auto& request : batch) { + if (std::strcmp(request.handler_name.c_str(), + kReportingDispatchHandlerFunctionName) != 0) { + EXPECT_EQ(request.handler_name, "scoreAdEntryFunction"); + } + DispatchResponse dispatch_response = {}; + dispatch_response.resp = *json_ad_score_itr++; + dispatch_response.id = request.id; + responses.emplace_back(dispatch_response); + } + done_callback(responses); + return absl::OkStatus(); +} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/score_ads_reactor_test_util.h b/services/auction_service/score_ads_reactor_test_util.h index f6d4e662..ab1a5ac0 100644 --- a/services/auction_service/score_ads_reactor_test_util.h +++ b/services/auction_service/score_ads_reactor_test_util.h @@ -17,20 +17,62 @@ #ifndef SERVICES_AUCTION_SERVICE_SCORE_ADS_REACTOR_TEST_UTIL_H_ #define SERVICES_AUCTION_SERVICE_SCORE_ADS_REACTOR_TEST_UTIL_H_ +#include +#include +#include +#include + #include "absl/strings/string_view.h" #include "api/bidding_auction_servers.pb.h" +#include "services/auction_service/benchmarking/score_ads_benchmarking_logger.h" +#include "services/auction_service/benchmarking/score_ads_no_op_logger.h" +#include "services/common/encryption/key_fetcher_factory.h" +#include "services/common/encryption/mock_crypto_client_wrapper.h" +#include "services/common/test/mocks.h" namespace privacy_sandbox::bidding_auction_servers { using ProtectedAppSignalsAdWithBidMetadata = ScoreAdsRequest::ScoreAdsRawRequest::ProtectedAppSignalsAdWithBidMetadata; +constexpr char kKeyId[] = "keyid"; +constexpr char kSecret[] = "secret"; constexpr float kTestBid = 1.25; constexpr char kTestProtectedAppSignalsAdOwner[] = "https://PAS-Ad-Owner.com"; ProtectedAppSignalsAdWithBidMetadata GetProtectedAppSignalsAdWithBidMetadata( absl::string_view render_url, float bid = kTestBid); +void SetupTelemetryCheck(const ScoreAdsRequest& request); + +class ScoreAdsReactorTestHelper { + public: + ScoreAdsReactorTestHelper(); + + ScoreAdsResponse ExecuteScoreAds( + const ScoreAdsRequest::ScoreAdsRawRequest& raw_request, + MockCodeDispatchClient& dispatcher, + const AuctionServiceRuntimeConfig& runtime_config = + AuctionServiceRuntimeConfig(), + bool enable_report_result_url_generation = false); + ScoreAdsRequest request_; + TrustedServersConfigClient config_client_{{}}; + std::unique_ptr async_reporter = + std::make_unique( + std::make_unique()); + MockCryptoClientWrapper crypto_client_; + std::unique_ptr benchmarkingLogger_ = + std::make_unique(); + std::unique_ptr + key_fetcher_manager_; +}; + +absl::Status FakeExecute(std::vector& batch, + BatchDispatchDoneCallback done_callback, + std::vector json, + const bool call_wrapper_method = false, + const bool enable_adtech_code_logging = false); + } // namespace privacy_sandbox::bidding_auction_servers #endif // SERVICES_AUCTION_SERVICE_SCORE_ADS_REACTOR_TEST_UTIL_H_ diff --git a/services/auction_service/score_ads_reactor_top_level_auction_test.cc b/services/auction_service/score_ads_reactor_top_level_auction_test.cc new file mode 100644 index 00000000..6a662faa --- /dev/null +++ b/services/auction_service/score_ads_reactor_top_level_auction_test.cc @@ -0,0 +1,274 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "gmock/gmock.h" +#include "gtest/gtest.h" +#include "services/auction_service/auction_constants.h" +#include "services/auction_service/score_ads_reactor.h" +#include "services/auction_service/score_ads_reactor_test_util.h" +#include "services/common/test/mocks.h" +#include "services/common/test/random.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +constexpr char kTestSellerSignals[] = R"json({"seller_signal": "test 1"})json"; +constexpr char kTestAuctionSignals[] = + R"json({"auction_signal": "test 2"})json"; +constexpr char kTestPublisherHostname[] = "publisher_hostname"; +constexpr char kTestTopLevelSeller[] = "top_level_seller"; +constexpr char kTestGenerationId[] = "test_generation_id"; + +using RawRequest = ScoreAdsRequest::ScoreAdsRawRequest; + +RawRequest BuildTopLevelAuctionRawRequest( + const std::vector& component_auctions, + const std::string& seller_signals, const std::string& auction_signals, + const std::string& publisher_hostname) { + RawRequest output; + for (int i = 0; i < component_auctions.size(); i++) { + // Copy to preserve original for test verification. + *output.mutable_component_auction_results()->Add() = component_auctions[i]; + } + output.set_seller_signals(seller_signals); + output.set_auction_signals(auction_signals); + output.clear_scoring_signals(); + output.set_publisher_hostname(publisher_hostname); + output.set_enable_debug_reporting(false); + return output; +} + +TEST(ScoreAdsReactorTopLevelAuctionTest, SendsComponentAuctionsToDispatcher) { + MockCodeDispatchClient dispatcher; + AuctionResult car_1 = + MakeARandomComponentAuctionResult(kTestGenerationId, kTestTopLevelSeller); + AuctionResult car_2 = + MakeARandomComponentAuctionResult(kTestGenerationId, kTestTopLevelSeller); + RawRequest raw_request = BuildTopLevelAuctionRawRequest( + {car_1, car_2}, kTestSellerSignals, kTestAuctionSignals, + kTestPublisherHostname); + EXPECT_EQ(raw_request.component_auction_results_size(), 2); + EXPECT_CALL(dispatcher, BatchExecute) + .WillOnce([&car_1, &car_2](std::vector& batch, + BatchDispatchDoneCallback done_callback) { + EXPECT_EQ(batch.size(), 2); + // Actual mapping of other fields from component auction + // to dispatch request handled by proto_utils. + EXPECT_EQ(batch.at(0).id, car_1.ad_render_url()); + EXPECT_EQ(batch.at(0).input.size(), 7); + EXPECT_EQ(batch.at(1).id, car_2.ad_render_url()); + EXPECT_EQ(batch.at(1).input.size(), 7); + return absl::OkStatus(); + }); + ScoreAdsReactorTestHelper test_helper; + test_helper.ExecuteScoreAds(raw_request, dispatcher); +} + +TEST(ScoreAdsReactorTopLevelAuctionTest, + DoesNotRunAuctionForMismatchedGenerationId) { + MockCodeDispatchClient dispatcher; + AuctionResult car_1 = MakeARandomComponentAuctionResult(MakeARandomString(), + kTestTopLevelSeller); + AuctionResult car_2 = MakeARandomComponentAuctionResult(MakeARandomString(), + kTestTopLevelSeller); + RawRequest raw_request = BuildTopLevelAuctionRawRequest( + {car_1, car_2}, kTestSellerSignals, kTestAuctionSignals, + kTestPublisherHostname); + EXPECT_EQ(raw_request.component_auction_results_size(), 2); + EXPECT_CALL(dispatcher, BatchExecute).Times(0); + ScoreAdsReactorTestHelper test_helper; + auto response = test_helper.ExecuteScoreAds(raw_request, dispatcher); + EXPECT_TRUE(response.response_ciphertext().empty()); +} + +TEST(ScoreAdsReactorTopLevelAuctionTest, ReturnsWinningAdFromDispatcher) { + MockCodeDispatchClient dispatcher; + AuctionResult car_1 = MakeARandomComponentAuctionResult(MakeARandomString(), + kTestTopLevelSeller); + RawRequest raw_request = BuildTopLevelAuctionRawRequest( + {car_1}, kTestSellerSignals, kTestAuctionSignals, kTestPublisherHostname); + EXPECT_CALL(dispatcher, BatchExecute) + .WillOnce([](std::vector& batch, + BatchDispatchDoneCallback done_callback) { + std::vector score_logic(batch.size(), + R"( + { + "response" : { + "ad": {"key1":"adMetadata"}, + "desirability" : 1, + "bid" : 0.1, + "allowComponentAuction" : true + }, + "logs":[] + } + )"); + return FakeExecute(batch, std::move(done_callback), + std::move(score_logic), false); + }); + ScoreAdsReactorTestHelper test_helper; + auto response = test_helper.ExecuteScoreAds(raw_request, dispatcher); + ScoreAdsResponse::ScoreAdsRawResponse raw_response; + ASSERT_TRUE(raw_response.ParseFromString(response.response_ciphertext())); + const auto& scored_ad = raw_response.ad_score(); + EXPECT_EQ(scored_ad.desirability(), 1); + EXPECT_EQ(scored_ad.render(), car_1.ad_render_url()); + EXPECT_EQ(scored_ad.component_renders_size(), + car_1.ad_component_render_urls_size()); + EXPECT_EQ(scored_ad.interest_group_name(), car_1.interest_group_name()); + EXPECT_EQ(scored_ad.interest_group_owner(), car_1.interest_group_owner()); + EXPECT_EQ(scored_ad.buyer_bid(), car_1.bid()); + + // Since in the above test we are assuming non-component auctions, check that + // the required fields for component auctions are not set. + EXPECT_FALSE(scored_ad.allow_component_auction()); + EXPECT_TRUE(scored_ad.ad_metadata().empty()); + EXPECT_EQ(scored_ad.bid(), 0); +} + +TEST(ScoreAdsReactorTopLevelAuctionTest, DoesNotPopulateHighestOtherBid) { + MockCodeDispatchClient dispatcher; + AuctionResult car_1 = + MakeARandomComponentAuctionResult(kTestGenerationId, kTestTopLevelSeller); + AuctionResult car_2 = + MakeARandomComponentAuctionResult(kTestGenerationId, kTestTopLevelSeller); + RawRequest raw_request = BuildTopLevelAuctionRawRequest( + {car_1, car_2}, kTestSellerSignals, kTestAuctionSignals, + kTestPublisherHostname); + EXPECT_EQ(raw_request.component_auction_results_size(), 2); + EXPECT_CALL(dispatcher, BatchExecute) + .WillOnce([](std::vector& batch, + BatchDispatchDoneCallback done_callback) { + std::vector score_logic(batch.size(), + R"( + { + "response" : { + "ad": {"key1":"adMetadata"}, + "desirability" : 1, + "bid" : 0.1, + }, + "logs":[] + } + )"); + return FakeExecute(batch, std::move(done_callback), + std::move(score_logic), false); + }); + ScoreAdsReactorTestHelper test_helper; + auto response = test_helper.ExecuteScoreAds(raw_request, dispatcher); + ScoreAdsResponse::ScoreAdsRawResponse raw_response; + ASSERT_TRUE(raw_response.ParseFromString(response.response_ciphertext())); + const auto& scored_ad = raw_response.ad_score(); + // Do not populate highest scoring other bids. + EXPECT_TRUE(scored_ad.ig_owner_highest_scoring_other_bids_map().empty()); +} + +TEST(ScoreAdsReactorTopLevelAuctionTest, DoesNotSupportWinReporting) { + MockCodeDispatchClient dispatcher; + AuctionResult car_1 = MakeARandomComponentAuctionResult(MakeARandomString(), + kTestTopLevelSeller); + RawRequest raw_request = BuildTopLevelAuctionRawRequest( + {car_1}, kTestSellerSignals, kTestAuctionSignals, kTestPublisherHostname); + EXPECT_CALL(dispatcher, BatchExecute) + .WillOnce([](std::vector& batch, + BatchDispatchDoneCallback done_callback) { + std::vector score_logic(batch.size(), + R"( + { + "response" : { + "ad": {"key1":"adMetadata"}, + "desirability" : 1, + "bid" : 0.1, + "allowComponentAuction" : true + }, + "logs":[] + } + )"); + return FakeExecute(batch, std::move(done_callback), + std::move(score_logic), false); + }); + AuctionServiceRuntimeConfig runtime_config = { + .enable_seller_debug_url_generation = false, + .enable_adtech_code_logging = false, + .enable_report_result_url_generation = true, + .enable_report_win_url_generation = true}; + ScoreAdsReactorTestHelper test_helper; + auto response = + test_helper.ExecuteScoreAds(raw_request, dispatcher, runtime_config); + ScoreAdsResponse::ScoreAdsRawResponse raw_response; + ASSERT_TRUE(raw_response.ParseFromString(response.response_ciphertext())); + const auto& scored_ad = raw_response.ad_score(); + EXPECT_TRUE(scored_ad.win_reporting_urls() + .top_level_seller_reporting_urls() + .reporting_url() + .empty()); + EXPECT_EQ(scored_ad.win_reporting_urls() + .top_level_seller_reporting_urls() + .interaction_reporting_urls() + .size(), + 0); + EXPECT_TRUE(scored_ad.win_reporting_urls() + .component_seller_reporting_urls() + .reporting_url() + .empty()); + EXPECT_EQ(scored_ad.win_reporting_urls() + .component_seller_reporting_urls() + .interaction_reporting_urls() + .size(), + 0); + EXPECT_TRUE(scored_ad.win_reporting_urls() + .buyer_reporting_urls() + .reporting_url() + .empty()); + EXPECT_EQ(scored_ad.win_reporting_urls() + .buyer_reporting_urls() + .interaction_reporting_urls() + .size(), + 0); +} + +TEST(ScoreAdsReactorTopLevelAuctionTest, DoesNotPerformDebugReporting) { + MockCodeDispatchClient dispatcher; + AuctionResult car_1 = MakeARandomComponentAuctionResult(MakeARandomString(), + kTestTopLevelSeller); + RawRequest raw_request = BuildTopLevelAuctionRawRequest( + {car_1}, kTestSellerSignals, kTestAuctionSignals, kTestPublisherHostname); + EXPECT_CALL(dispatcher, BatchExecute) + .WillOnce([](std::vector& batch, + BatchDispatchDoneCallback done_callback) { + std::vector score_logic(batch.size(), + R"( + { + "response" : { + "ad": {"key1":"adMetadata"}, + "desirability" : 1, + "bid" : 0.1, + "allowComponentAuction" : true + }, + "logs":[] + } + )"); + return FakeExecute(batch, std::move(done_callback), + std::move(score_logic), false); + }); + AuctionServiceRuntimeConfig runtime_config = { + .enable_seller_debug_url_generation = false, + .enable_adtech_code_logging = false, + .enable_report_result_url_generation = true, + .enable_report_win_url_generation = true}; + ScoreAdsReactorTestHelper test_helper; + EXPECT_CALL(*test_helper.async_reporter, DoReport).Times(0); + test_helper.ExecuteScoreAds(raw_request, dispatcher, runtime_config); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/seller_adtech_reporting_wrapper.h b/services/auction_service/seller_adtech_reporting_wrapper.h deleted file mode 100644 index 7ae5abd8..00000000 --- a/services/auction_service/seller_adtech_reporting_wrapper.h +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#ifndef FLEDGE_SERVICES_SELLER_ADTECH_REPORTING_WRAPPER_H_ -#define FLEDGE_SERVICES_SELLER_ADTECH_REPORTING_WRAPPER_H_ - -#include - -#include "absl/strings/str_cat.h" -#include "absl/strings/string_view.h" - -namespace privacy_sandbox::bidding_auction_servers { - -// Wrapper Javascript Wasm for the reportResult function provided by Seller -// AdTech. This includes sendReport and registerAdBeacon functions. -inline constexpr absl::string_view kReportResultWrapperJavascriptWasm = - R"JSCODE( - var ps_report_result_response = { - var signalsForWinner = undefined; - var reportResultUrl = undefined; - var interactionReportingUrls = new Map(); - var reportResultUrlGenerationErrorCount = 0; - var sendReportToInvoked = false; - var registerAdBeaconInvoked = false; - } - - globalThis.sendReportTo = function sendReportTo(url){ - if(!ps_report_result_response.sendReportToInvoked) { - throw new Exception("sendReportTo function invoked more than once"); - } - ps_report_result_response.reportResultUrl = url; - ps_report_result_response.sendReportToInvoked = true; - } - - globalThis.registerAdBeacon =function registerAdBeacon(eventUrlMap){ - if(!ps_report_result_response.registerAdBeaconInvoked) { - throw new Exception("registerAdBeaconInvoked function invoked more than once"); - } - for (const [key, value] of eventUrlMap) { - ps_report_result_response.interactionReportingUrls.set(key. value); - } - ps_report_result_response.sendReportToInvoked = true; - } - - // Handler method to call adTech provided reportResult method and wrap the - // response with reportResult url and interaction reporting urls. - function reportResultWrapper(auctionConfig, sellerReportingSignals) { - var reportResultResponse = {}; - var signalsForWinner = ""; - try { - signalsForWinner = reportResult(auctionConfig, sellerReportingSignals); - } catch(ignored) { - ps_report_result_response.reportResultUrlGenerationErrorCount = 1; - } - ps_report_result_response.signalsForWinner = signalsForWinner; - return ps_report_result_response; -)JSCODE"; - -// Wraps the Ad Tech provided code for reportResult with wrapper code allowing -// Ad Techs to send URLs for event level and fenced frame reporting. -std::string GetWrappedAdtechCodeForReportResult( - absl::string_view adtech_code_blob) { - return absl::StrCat(KReportResultWrapperJavascriptWasm, adtech_code_blob); -} - -} // namespace privacy_sandbox::bidding_auction_servers - -#endif // FLEDGE_SERVICES_SELLER_ADTECH_REPORTING_WRAPPER_H_ diff --git a/services/auction_service/seller_code_fetch_manager.cc b/services/auction_service/seller_code_fetch_manager.cc new file mode 100644 index 00000000..e1c48836 --- /dev/null +++ b/services/auction_service/seller_code_fetch_manager.cc @@ -0,0 +1,170 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/auction_service/seller_code_fetch_manager.h" + +#include +#include +#include +#include +#include + +#include "services/auction_service/auction_code_fetch_config.pb.h" +#include "services/auction_service/auction_constants.h" +#include "services/auction_service/code_wrapper/buyer_reporting_fetcher.h" +#include "services/auction_service/code_wrapper/seller_code_wrapper.h" +#include "services/common/clients/code_dispatcher/v8_dispatcher.h" +#include "services/common/code_fetch/code_fetcher_interface.h" +#include "services/common/code_fetch/periodic_bucket_fetcher.h" +#include "services/common/code_fetch/periodic_code_fetcher.h" +#include "services/common/util/file_util.h" +#include "src/concurrent/event_engine_executor.h" +#include "src/core/interface/errors.h" +#include "src/util/status_macro/status_macros.h" + +#include "seller_code_fetch_manager.h" + +using ::google::scp::core::errors::GetErrorMessage; + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +constexpr int kJsBlobIndex = 0; + +} // namespace + +absl::Status SellerCodeFetchManager::Init() { + if (udf_config_.fetch_mode() != auction_service::FETCH_MODE_LOCAL) { + buyer_reporting_fetcher_ = std::make_unique( + udf_config_, &buyer_reporting_http_fetcher_, &executor_); + PS_RETURN_IF_ERROR(buyer_reporting_fetcher_->Start()) + << kBuyerReportingFailedStartup; + } + + switch (udf_config_.fetch_mode()) { + case auction_service::FETCH_MODE_LOCAL: { + return InitializeLocalCodeFetch(); + } + case auction_service::FETCH_MODE_BUCKET: { + PS_ASSIGN_OR_RETURN(seller_code_fetcher_, InitializeBucketCodeFetch()); + return absl::OkStatus(); + } + case auction_service::FETCH_MODE_URL: { + PS_ASSIGN_OR_RETURN(seller_code_fetcher_, InitializeUrlCodeFetch()); + return absl::OkStatus(); + } + default: { + return absl::InvalidArgumentError("Fetch mode invalid."); + } + } +} + +absl::Status SellerCodeFetchManager::End() { + if (udf_config_.fetch_mode() != auction_service::FETCH_MODE_LOCAL) { + buyer_reporting_fetcher_->End(); + seller_code_fetcher_->End(); + } + return absl::OkStatus(); +} + +WrapCodeForDispatch SellerCodeFetchManager::GetUdfWrapper() { + return [this](const std::vector& ad_tech_code_blobs) { + auto protected_auction_reporting = + buyer_reporting_fetcher_->GetProtectedAuctionReportingByOrigin(); + auto protected_app_signals_reporting = + buyer_reporting_fetcher_->GetProtectedAppSignalsReportingByOrigin(); + return GetSellerWrappedCode( + ad_tech_code_blobs[kJsBlobIndex], + udf_config_.enable_report_result_url_generation(), + enable_protected_app_signals_, + udf_config_.enable_report_win_url_generation(), + protected_auction_reporting, protected_app_signals_reporting); + }; +} + +absl::Status SellerCodeFetchManager::InitializeLocalCodeFetch() { + if (udf_config_.auction_js_path().empty()) { + return absl::UnavailableError( + "Local fetch mode requires a non-empty path."); + } + + PS_ASSIGN_OR_RETURN(auto adtech_code_blob, + GetFileContent(udf_config_.auction_js_path(), + /*log_on_error=*/true)); + + adtech_code_blob = GetSellerWrappedCode( + adtech_code_blob, udf_config_.enable_report_result_url_generation(), + false, {}); + + return dispatcher_.LoadSync(kScoreAdBlobVersion, adtech_code_blob); +} + +absl::StatusOr> +SellerCodeFetchManager::InitializeBucketCodeFetch() { + PS_RETURN_IF_ERROR(InitBucketClient()); + + std::string bucket_name = udf_config_.auction_js_bucket(); + if (udf_config_.auction_js_bucket().empty()) { + return absl::InvalidArgumentError( + "Bucket fetch mode requires a non-empty bucket name."); + } else if (udf_config_.auction_js_bucket_default_blob().empty()) { + return absl::InvalidArgumentError( + "Bucket fetch mode requires a non-empty bucket default object " + "name."); + } + auto seller_code_fetcher = std::make_unique( + udf_config_.auction_js_bucket(), + absl::Milliseconds(udf_config_.url_fetch_period_ms()), &dispatcher_, + &executor_, GetUdfWrapper(), blob_storage_client_.get()); + PS_RETURN_IF_ERROR(seller_code_fetcher->Start()) + << kSellerUDFLoadFailedStartup; + return seller_code_fetcher; +} + +absl::StatusOr> +SellerCodeFetchManager::InitializeUrlCodeFetch() { + if (udf_config_.auction_js_url().empty()) { + return absl::InvalidArgumentError( + "URL fetch mode requires a non-empty url."); + } + std::vector seller_endpoints = {udf_config_.auction_js_url()}; + + auto seller_code_fetcher = std::make_unique( + seller_endpoints, absl::Milliseconds(udf_config_.url_fetch_period_ms()), + &seller_http_fetcher_, &dispatcher_, &executor_, + absl::Milliseconds(udf_config_.url_fetch_timeout_ms()), GetUdfWrapper(), + kScoreAdBlobVersion); + PS_RETURN_IF_ERROR(seller_code_fetcher->Start()) + << kSellerUDFLoadFailedStartup; + return seller_code_fetcher; +} + +absl::Status SellerCodeFetchManager::InitBucketClient() { + auto result = blob_storage_client_->Init(); + if (!result.Successful()) { + return absl::UnavailableError( + absl::StrFormat("Failed to init BlobStorageClient (status_code: %s)\n", + GetErrorMessage(result.status_code))); + } + + result = blob_storage_client_->Run(); + if (!result.Successful()) { + return absl::UnavailableError( + absl::StrFormat("Failed to run BlobStorageClient (status_code: %s)\n", + GetErrorMessage(result.status_code))); + } + return absl::OkStatus(); +} + +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/seller_code_fetch_manager.h b/services/auction_service/seller_code_fetch_manager.h new file mode 100644 index 00000000..77245d6b --- /dev/null +++ b/services/auction_service/seller_code_fetch_manager.h @@ -0,0 +1,97 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_SELLER_CODE_FETCH_MANAGER_H_ +#define SERVICES_SELLER_CODE_FETCH_MANAGER_H_ + +#include +#include +#include +#include +#include + +#include "services/auction_service/auction_code_fetch_config.pb.h" +#include "services/auction_service/code_wrapper/buyer_reporting_fetcher.h" +#include "services/auction_service/code_wrapper/seller_code_wrapper.h" +#include "services/common/clients/code_dispatcher/v8_dispatcher.h" +#include "services/common/clients/http/http_fetcher_async.h" +#include "services/common/code_fetch/code_fetcher_interface.h" +#include "services/common/code_fetch/periodic_bucket_fetcher.h" +#include "services/common/code_fetch/periodic_code_fetcher.h" +#include "src/concurrent/event_engine_executor.h" +#include "src/util/status_macro/status_macros.h" + +namespace privacy_sandbox::bidding_auction_servers { +constexpr char kBuyerReportingFailedStartup[] = + "Buyer reporting code fetcher failed startup."; +constexpr char kSellerUDFLoadFailedStartup[] = + "Failed loading code blob on startup."; + +// SellerCodeFetchManager acts as a wrapper for all logic related to fetching +// auction service UDFs. This class consumes a SellerCodeFetchConfig and uses +// it, along with various other dependencies, to create and own all instances of +// CodeFetcherInterface in the auction service. +class SellerCodeFetchManager { + public: + SellerCodeFetchManager( + std::unique_ptr + blob_storage_client, + server_common::Executor* executor, HttpFetcherAsync* seller_http_fetcher, + HttpFetcherAsync* buyer_reporting_http_fetcher, V8Dispatcher* dispatcher, + const auction_service::SellerCodeFetchConfig& udf_config, + bool enable_protected_app_signals) + : executor_(*executor), + seller_http_fetcher_(*seller_http_fetcher), + buyer_reporting_http_fetcher_(*buyer_reporting_http_fetcher), + dispatcher_(*dispatcher), + enable_protected_app_signals_(enable_protected_app_signals), + udf_config_(udf_config), + blob_storage_client_(std::move(blob_storage_client)) {} + + // Not copyable or movable. + SellerCodeFetchManager(const SellerCodeFetchManager&) = delete; + SellerCodeFetchManager& operator=(const SellerCodeFetchManager&) = delete; + + absl::Status Init(); + + absl::Status End(); + + private: + WrapCodeForDispatch GetUdfWrapper(); + + absl::Status InitializeLocalCodeFetch(); + + absl::Status InitBucketClient(); + + absl::StatusOr> + InitializeBucketCodeFetch(); + + absl::StatusOr> InitializeUrlCodeFetch(); + + server_common::Executor& executor_; + HttpFetcherAsync& seller_http_fetcher_; + HttpFetcherAsync& buyer_reporting_http_fetcher_; + V8Dispatcher& dispatcher_; + const bool enable_protected_app_signals_; + const auction_service::SellerCodeFetchConfig& udf_config_; + std::unique_ptr + blob_storage_client_; + + std::unique_ptr buyer_reporting_fetcher_; + std::unique_ptr seller_code_fetcher_; +}; + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_SELLER_CODE_FETCH_MANAGER_H_ diff --git a/services/auction_service/seller_code_fetch_manager_test.cc b/services/auction_service/seller_code_fetch_manager_test.cc new file mode 100644 index 00000000..0e304956 --- /dev/null +++ b/services/auction_service/seller_code_fetch_manager_test.cc @@ -0,0 +1,255 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/auction_service/seller_code_fetch_manager.h" + +#include "absl/strings/str_cat.h" +#include "absl/synchronization/blocking_counter.h" +#include "absl/synchronization/notification.h" +#include "absl/time/time.h" +#include "gtest/gtest.h" +#include "services/auction_service/auction_constants.h" +#include "services/auction_service/code_wrapper/seller_code_wrapper.h" +#include "services/common/test/mocks.h" +#include "services/common/util/file_util.h" +#include "src/core/interface/async_context.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/public/cpio/interface/error_codes.h" +#include "src/public/cpio/mock/blob_storage_client/mock_blob_storage_client.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +using auction_service::FetchMode; +using auction_service::SellerCodeFetchConfig; + +using ::google::cmrt::sdk::blob_storage_service::v1::BlobMetadata; +using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobRequest; +using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobResponse; +using ::google::cmrt::sdk::blob_storage_service::v1::ListBlobsMetadataRequest; +using ::google::cmrt::sdk::blob_storage_service::v1::ListBlobsMetadataResponse; +using ::google::scp::core::AsyncContext; +using ::google::scp::core::ExecutionResult; +using ::google::scp::core::FailureExecutionResult; +using ::google::scp::core::SuccessExecutionResult; +using ::google::scp::cpio::MockBlobStorageClient; +using ::testing::Return; + +class SellerCodeFetchManagerTest : public testing::Test { + protected: + void SetUp() override { + executor_ = std::make_unique(); + http_fetcher_ = std::make_unique(); + dispatcher_ = std::make_unique(); + blob_storage_client_ = std::make_unique(); + } + std::unique_ptr executor_; + std::unique_ptr http_fetcher_; + std::unique_ptr dispatcher_; + std::unique_ptr blob_storage_client_; +}; + +TEST_F(SellerCodeFetchManagerTest, FetchModeLocalTriesFileLoad) { + EXPECT_CALL(*blob_storage_client_, Init).Times(0); + EXPECT_CALL(*blob_storage_client_, Run).Times(0); + + SellerCodeFetchConfig udf_config; + udf_config.set_fetch_mode(FetchMode::FETCH_MODE_LOCAL); + const std::string bad_path = "error"; + const bool enable_protected_app_signals = true; + + udf_config.set_auction_js_path(bad_path); + SellerCodeFetchManager udf_fetcher(std::move(blob_storage_client_), + executor_.get(), http_fetcher_.get(), + http_fetcher_.get(), dispatcher_.get(), + udf_config, enable_protected_app_signals); + absl::Status load_status = udf_fetcher.Init(); + EXPECT_FALSE(load_status.ok()); + EXPECT_EQ(load_status.message(), absl::StrCat(kPathFailed, bad_path)); +} + +TEST_F(SellerCodeFetchManagerTest, + FetchModeBucketSucceedsLoadingWrappedBucketBlobs) { + std::string fake_udf = "udf_data"; + std::string pa_buyer_origin = "pa_origin"; + std::string pas_buyer_origin = "pas_origin"; + std::string pa_reporting_udf_data = "pa_reporting_udf_data"; + std::string pas_reporting_udf_data = "pas_reporting_udf_data"; + const bool enable_protected_app_signals = true; + + SellerCodeFetchConfig udf_config; + udf_config.set_fetch_mode(FetchMode::FETCH_MODE_BUCKET); + udf_config.set_enable_report_win_url_generation(true); + udf_config.set_enable_report_result_url_generation(true); + udf_config.mutable_buyer_report_win_js_urls()->try_emplace(pa_buyer_origin, + "foo.com"); + udf_config.mutable_protected_app_signals_buyer_report_win_js_urls() + ->try_emplace(pas_buyer_origin, "bar.com"); + udf_config.set_auction_js_bucket("js"); + udf_config.set_auction_js_bucket_default_blob("default"); + + EXPECT_CALL(*blob_storage_client_, Init) + .WillOnce(Return(SuccessExecutionResult())); + EXPECT_CALL(*blob_storage_client_, Run) + .WillOnce(Return(SuccessExecutionResult())); + EXPECT_CALL(*executor_, RunAfter).Times(2); + EXPECT_CALL(*http_fetcher_, FetchUrls) + .Times(1) + .WillOnce([&](const std::vector& requests, + absl::Duration timeout, OnDoneFetchUrls done_callback) { + EXPECT_EQ(requests[0].url, + udf_config.buyer_report_win_js_urls().at(pa_buyer_origin)); + EXPECT_EQ( + requests[1].url, + udf_config.protected_app_signals_buyer_report_win_js_urls().at( + pas_buyer_origin)); + + std::move(done_callback)( + {pa_reporting_udf_data, pas_reporting_udf_data}); + }); + + EXPECT_CALL(*blob_storage_client_, ListBlobsMetadata) + .WillOnce( + [&](AsyncContext + context) { + EXPECT_EQ(context.request->blob_metadata().bucket_name(), + udf_config.auction_js_bucket()); + BlobMetadata md; + md.set_bucket_name(udf_config.auction_js_bucket()); + md.set_blob_name(udf_config.auction_js_bucket_default_blob()); + + context.response = std::make_shared(); + context.response->mutable_blob_metadatas()->Add(std::move(md)); + context.Finish(SuccessExecutionResult()); + return SuccessExecutionResult(); + }); + + EXPECT_CALL(*blob_storage_client_, GetBlob) + .WillOnce( + [&](AsyncContext async_context) { + auto async_bucket_name = + async_context.request->blob_metadata().bucket_name(); + auto async_blob_name = + async_context.request->blob_metadata().blob_name(); + EXPECT_EQ(async_bucket_name, udf_config.auction_js_bucket()); + EXPECT_EQ(async_blob_name, + udf_config.auction_js_bucket_default_blob()); + + async_context.response = std::make_shared(); + async_context.response->mutable_blob()->set_data(fake_udf); + async_context.result = SuccessExecutionResult(); + async_context.Finish(); + + return SuccessExecutionResult(); + }); + + EXPECT_CALL(*dispatcher_, LoadSync) + .WillOnce([&](std::string_view version, absl::string_view blob_data) { + EXPECT_EQ(version, udf_config.auction_js_bucket_default_blob()); + absl::flat_hash_map pa_reporting_udfs; + pa_reporting_udfs.try_emplace(pa_buyer_origin, pa_reporting_udf_data); + absl::flat_hash_map pas_reporting_udfs; + pas_reporting_udfs.try_emplace(pas_buyer_origin, + pas_reporting_udf_data); + + EXPECT_EQ( + blob_data, + GetSellerWrappedCode( + fake_udf, udf_config.enable_report_result_url_generation(), + enable_protected_app_signals, + udf_config.enable_report_win_url_generation(), + pa_reporting_udfs, pas_reporting_udfs)); + return absl::OkStatus(); + }); + + SellerCodeFetchManager udf_fetcher(std::move(blob_storage_client_), + executor_.get(), http_fetcher_.get(), + http_fetcher_.get(), dispatcher_.get(), + udf_config, enable_protected_app_signals); + absl::Status load_status = udf_fetcher.Init(); + EXPECT_TRUE(load_status.ok()); +} + +TEST_F(SellerCodeFetchManagerTest, FetchModeUrlSucceedsLoadingWrappedUrlBlobs) { + std::string fake_udf = "udf_data"; + std::string pa_buyer_origin = "pa_origin"; + std::string pas_buyer_origin = "pas_origin"; + std::string pa_reporting_udf_data = "pa_reporting_udf_data"; + std::string pas_reporting_udf_data = "pas_reporting_udf_data"; + const bool enable_protected_app_signals = true; + + SellerCodeFetchConfig udf_config; + udf_config.set_fetch_mode(FetchMode::FETCH_MODE_URL); + udf_config.set_enable_report_win_url_generation(true); + udf_config.set_enable_report_result_url_generation(true); + udf_config.mutable_buyer_report_win_js_urls()->try_emplace(pa_buyer_origin, + "foo.com"); + udf_config.mutable_protected_app_signals_buyer_report_win_js_urls() + ->try_emplace(pas_buyer_origin, "bar.com"); + udf_config.set_auction_js_url("auction.com"); + udf_config.set_url_fetch_timeout_ms(70000000); + udf_config.set_url_fetch_period_ms(70000001); + EXPECT_CALL(*blob_storage_client_, Init).Times(0); + EXPECT_CALL(*blob_storage_client_, Run).Times(0); + EXPECT_CALL(*executor_, RunAfter).Times(2); + EXPECT_CALL(*http_fetcher_, FetchUrls) + .Times(2) + .WillOnce([&](const std::vector& requests, + absl::Duration timeout, OnDoneFetchUrls done_callback) { + EXPECT_EQ(requests[0].url, + udf_config.buyer_report_win_js_urls().at(pa_buyer_origin)); + EXPECT_EQ( + requests[1].url, + udf_config.protected_app_signals_buyer_report_win_js_urls().at( + pas_buyer_origin)); + + std::move(done_callback)( + {pa_reporting_udf_data, pas_reporting_udf_data}); + }) + .WillOnce([&](const std::vector& requests, + absl::Duration timeout, OnDoneFetchUrls done_callback) { + EXPECT_EQ(requests[0].url, udf_config.auction_js_url()); + std::move(done_callback)({fake_udf}); + }); + + EXPECT_CALL(*dispatcher_, LoadSync) + .WillOnce([&](std::string_view version, absl::string_view blob_data) { + EXPECT_EQ(version, kScoreAdBlobVersion); + absl::flat_hash_map pa_reporting_udfs; + pa_reporting_udfs.try_emplace(pa_buyer_origin, pa_reporting_udf_data); + absl::flat_hash_map pas_reporting_udfs; + pas_reporting_udfs.try_emplace(pas_buyer_origin, + pas_reporting_udf_data); + + EXPECT_EQ( + blob_data, + GetSellerWrappedCode( + fake_udf, udf_config.enable_report_result_url_generation(), + enable_protected_app_signals, + udf_config.enable_report_win_url_generation(), + pa_reporting_udfs, pas_reporting_udfs)); + return absl::OkStatus(); + }); + + SellerCodeFetchManager udf_fetcher(std::move(blob_storage_client_), + executor_.get(), http_fetcher_.get(), + http_fetcher_.get(), dispatcher_.get(), + udf_config, enable_protected_app_signals); + absl::Status load_status = udf_fetcher.Init(); + EXPECT_TRUE(load_status.ok()); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/utils/BUILD b/services/auction_service/utils/BUILD index 337ddc95..bddbe20c 100644 --- a/services/auction_service/utils/BUILD +++ b/services/auction_service/utils/BUILD @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@rules_cc//cc:defs.bzl", "cc_library") +load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test") package(default_visibility = ["//services/auction_service:__pkg__"]) @@ -35,8 +35,22 @@ cc_library( "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@rapidjson", ], ) + +cc_test( + name = "proto_utils_test", + srcs = [ + "proto_utils_test.cc", + ], + deps = [ + ":proto_utils", + "//services/auction_service:score_ads_reactor_test_util", + "//services/common/test:random", + "@com_google_googletest//:gtest_main", + "@google_privacysandbox_servers_common//src/core/test/utils", + ], +) diff --git a/services/auction_service/utils/proto_utils.cc b/services/auction_service/utils/proto_utils.cc index d95d1a44..f01466e1 100644 --- a/services/auction_service/utils/proto_utils.cc +++ b/services/auction_service/utils/proto_utils.cc @@ -148,6 +148,44 @@ absl::StatusOr AddComponentSignals( return combined_signals_for_this_bid; } +// Create bid metadata json string but doesn't close the json object +// for adding more fields. +std::string MakeOpenBidMetadataJson( + absl::string_view publisher_hostname, + absl::string_view interest_group_owner, absl::string_view render_url, + const google::protobuf::RepeatedPtrField& + ad_component_render_urls, + absl::string_view bid_currency) { + std::string bid_metadata = "{"; + if (!interest_group_owner.empty()) { + absl::StrAppend(&bid_metadata, R"JSON(")JSON", kIGOwnerPropertyForScoreAd, + R"JSON(":")JSON", interest_group_owner, R"JSON(",)JSON"); + } + if (!publisher_hostname.empty()) { + absl::StrAppend(&bid_metadata, R"JSON(")JSON", + kTopWindowHostnamePropertyForScoreAd, R"JSON(":")JSON", + publisher_hostname, R"JSON(",)JSON"); + } + if (!ad_component_render_urls.empty()) { + absl::StrAppend(&bid_metadata, R"("adComponents":[)"); + for (int i = 0; i < ad_component_render_urls.size(); i++) { + absl::StrAppend(&bid_metadata, "\"", ad_component_render_urls.at(i), + "\""); + if (i != ad_component_render_urls.size() - 1) { + absl::StrAppend(&bid_metadata, ","); + } + } + absl::StrAppend(&bid_metadata, R"(],)"); + } + // Only add bid currency to bid metadata if it's non-empty. + if (!bid_currency.empty()) { + absl::StrAppend(&bid_metadata, R"JSON(")JSON", + kBidCurrencyPropertyForScoreAd, R"JSON(":")JSON", + bid_currency, R"JSON(",)JSON"); + } + return bid_metadata; +} + } // namespace void MayLogScoreAdsInput(const std::vector>& input, @@ -160,8 +198,8 @@ void MayLogScoreAdsInput(const std::vector>& input, << *(input[ScoreArgIndex(ScoreAdArgs::kAuctionConfig)]) << "\nScoring Signals:\n" << *(input[ScoreArgIndex(ScoreAdArgs::kScoringSignals)]) - << "\nDevice Signals:\n" - << *(input[ScoreArgIndex(ScoreAdArgs::kDeviceSignals)]) + << "\nBid Metadata:\n" + << *(input[ScoreArgIndex(ScoreAdArgs::kBidMetadata)]) << "\nDirectFromSellerSignals:\n" << (input[ScoreArgIndex(ScoreAdArgs::kDirectFromSellerSignals)]); } @@ -178,42 +216,41 @@ std::shared_ptr BuildAuctionConfig( "}")); } -std::string MakeDeviceSignals( +std::string MakeBidMetadata( absl::string_view publisher_hostname, absl::string_view interest_group_owner, absl::string_view render_url, const google::protobuf::RepeatedPtrField& ad_component_render_urls, absl::string_view top_level_seller, absl::string_view bid_currency) { - std::string device_signals = - absl::StrCat("{", R"("interestGroupOwner":")", interest_group_owner, "\"", - R"(,"topWindowHostname":")", publisher_hostname, "\""); - - if (!ad_component_render_urls.empty()) { - absl::StrAppend(&device_signals, R"(,"adComponents":[)"); - for (int i = 0; i < ad_component_render_urls.size(); i++) { - absl::StrAppend(&device_signals, "\"", ad_component_render_urls.at(i), - "\""); - if (i != ad_component_render_urls.size() - 1) { - absl::StrAppend(&device_signals, ","); - } - } - absl::StrAppend(&device_signals, R"(])"); - } - // Only add bid currency to device signals if it's non-empty. - if (!bid_currency.empty()) { - absl::StrAppend(&device_signals, R"JSON(,")JSON", - kBidCurrencyPropertyForScoreAd, R"JSON(":")JSON", - bid_currency, R"JSON(")JSON"); - } - // Only add top level seller to device signals if it's non empty. + std::string bid_metadata = MakeOpenBidMetadataJson( + publisher_hostname, interest_group_owner, render_url, + ad_component_render_urls, bid_currency); + // Only add top level seller to bid metadata if it's non empty. if (!top_level_seller.empty()) { - absl::StrAppend(&device_signals, R"JSON(,")JSON", + absl::StrAppend(&bid_metadata, R"JSON(")JSON", kTopLevelSellerFieldPropertyForScoreAd, R"JSON(":")JSON", - top_level_seller, R"JSON(")JSON"); + top_level_seller, R"JSON(",)JSON"); } - absl::StrAppend(&device_signals, ",\"", kRenderUrlsPropertyForScoreAd, - "\":\"", render_url, "\"}"); - return device_signals; + absl::StrAppend(&bid_metadata, R"JSON(")JSON", kRenderUrlsPropertyForScoreAd, + R"JSON(":")JSON", render_url, R"JSON("})JSON"); + return bid_metadata; +} + +std::string MakeBidMetadataForTopLevelAuction( + absl::string_view publisher_hostname, + absl::string_view interest_group_owner, absl::string_view render_url, + const google::protobuf::RepeatedPtrField& + ad_component_render_urls, + absl::string_view component_seller, absl::string_view bid_currency) { + std::string bid_metadata = MakeOpenBidMetadataJson( + publisher_hostname, interest_group_owner, render_url, + ad_component_render_urls, bid_currency); + absl::StrAppend(&bid_metadata, R"JSON(")JSON", + kComponentSellerFieldPropertyForScoreAd, R"JSON(":")JSON", + component_seller, R"JSON(",)JSON"); + absl::StrAppend(&bid_metadata, R"JSON(")JSON", kRenderUrlsPropertyForScoreAd, + R"JSON(":")JSON", render_url, R"JSON("})JSON"); + return bid_metadata; } absl::StatusOr> @@ -422,6 +459,7 @@ absl::StatusOr ParseScoreAdResponse( } // Parse component auction fields. + // For now this is only valid for ProtectedAuction ads. if (device_component_auction) { auto component_auction_itr = score_ad_resp.FindMember(kAllowComponentAuctionPropertyForScoreAd); @@ -504,4 +542,60 @@ absl::StatusOr ParseScoreAdResponse( return score_ads_response; } +absl::StatusOr BuildScoreAdRequest( + absl::string_view ad_render_url, absl::string_view ad_metadata_json, + absl::string_view scoring_signals, float ad_bid, + const std::shared_ptr& auction_config, + absl::string_view bid_metadata, + server_common::log::ContextImpl& log_context, + const bool enable_adtech_code_logging, const bool enable_debug_reporting) { + // Construct the wrapper struct for our V8 Dispatch Request. + DispatchRequest score_ad_request; + // TODO(b/250893468) Revisit dispatch id. + score_ad_request.id = ad_render_url; + // TODO(b/258790164) Update after code is fetched periodically. + score_ad_request.version_string = "v1"; + score_ad_request.handler_name = DispatchHandlerFunctionWithSellerWrapper; + + score_ad_request.input = std::vector>( + kArgSizeWithWrapper); // ScoreAdArgs size + + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kAdMetadata)] = + std::make_shared(ad_metadata_json); + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kBid)] = + std::make_shared(std::to_string(ad_bid)); + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kAuctionConfig)] = + auction_config; + // TODO(b/258697130): Roma client string support bug + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kScoringSignals)] = + std::make_shared(scoring_signals); + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kBidMetadata)] = + std::make_shared(bid_metadata); + // This is only added to prevent errors in the score ad script, and + // will always be an empty object. + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kDirectFromSellerSignals)] = + std::make_shared("{}"); + score_ad_request.input[ScoreArgIndex(ScoreAdArgs::kFeatureFlags)] = + std::make_shared(GetFeatureFlagJson( + enable_adtech_code_logging, enable_debug_reporting)); + + MayLogScoreAdsInput(score_ad_request.input, log_context); + return score_ad_request; +} + +std::unique_ptr MapAuctionResultToAdWithBidMetadata( + AuctionResult& auction_result) { + auto ad = std::make_unique(); + ad->set_bid(auction_result.bid()); + ad->set_allocated_render(auction_result.release_ad_render_url()); + ad->mutable_ad_components()->Swap( + auction_result.mutable_ad_component_render_urls()); + ad->set_allocated_interest_group_name( + auction_result.release_interest_group_name()); + ad->set_allocated_interest_group_owner( + auction_result.release_interest_group_owner()); + ad->set_allocated_bid_currency(auction_result.release_bid_currency()); + return ad; +} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/utils/proto_utils.h b/services/auction_service/utils/proto_utils.h index 0aa76083..dad6b471 100644 --- a/services/auction_service/utils/proto_utils.h +++ b/services/auction_service/utils/proto_utils.h @@ -32,18 +32,25 @@ #include "services/auction_service/auction_constants.h" #include "services/auction_service/code_wrapper/seller_code_wrapper.h" #include "services/common/clients/code_dispatcher/v8_dispatcher.h" -#include "src/cpp/logger/request_context_impl.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/logger/request_context_impl.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { -std::string MakeDeviceSignals( +std::string MakeBidMetadata( absl::string_view publisher_hostname, absl::string_view interest_group_owner, absl::string_view render_url, const google::protobuf::RepeatedPtrField& ad_component_render_urls, absl::string_view top_level_seller, absl::string_view bid_currency); +std::string MakeBidMetadataForTopLevelAuction( + absl::string_view publisher_hostname, + absl::string_view interest_group_owner, absl::string_view render_url, + const google::protobuf::RepeatedPtrField& + ad_component_render_urls, + absl::string_view component_seller, absl::string_view bid_currency); + std::shared_ptr BuildAuctionConfig( const ScoreAdsRequest::ScoreAdsRawRequest& raw_request); @@ -80,6 +87,16 @@ absl::StatusOr ParseScoreAdResponse( constexpr int ScoreArgIndex(ScoreAdArgs arg) { return static_cast>(arg); } + +/** + * Maps component auction result object to AdWithBidMetadata object for + * creating dispatch requests and performing post auction operations. + * The auction result object is invalidated after this method is called + * since this method will try to move values rather than copy. + */ +std::unique_ptr +MapAuctionResultToAdWithBidMetadata(AuctionResult& auction_result); + /** * Builds the ScoreAdInput, following the description here: * https://github.com/privacysandbox/fledge-docs/blob/main/bidding_auction_services_api.md#scoreads @@ -87,71 +104,44 @@ constexpr int ScoreArgIndex(ScoreAdArgs arg) { * https://github.com/WICG/turtledove/blob/main/FLEDGE.md#23-scoring-bids. * * NOTE: All inputs MUST be valid JSON, not just something Javascript would - * accept. Property names need to be in quotes! Additionally: See issues with - * input formatting in b/258697130. + * accept. Property names need to be in quotes! Additionally there are + * considerations for how ROMA parses these - + * null can passed + * undefined doesn't work + * empty string is mapped to undefined + * all strings have to be valid JSON objects ({}) or escaped JSON Strings + * (\"hello-world\"). */ -template -absl::StatusOr>> ScoreAdInput( - const T& ad, std::shared_ptr auction_config, - const absl::flat_hash_map& - scoring_signals, +absl::StatusOr BuildScoreAdRequest( + absl::string_view ad_render_url, absl::string_view ad_metadata_json, + absl::string_view scoring_signals, float ad_bid, + const std::shared_ptr& auction_config, + absl::string_view bid_metadata, server_common::log::ContextImpl& log_context, - bool enable_adtech_code_logging, bool enable_debug_reporting, - absl::string_view device_signals) { - std::vector> input( - kArgSizeWithWrapper); // ScoreAdArgs size - - // TODO: b/260265272 - std::string adMetadataAsJson; - const auto& it = ad.ad().struct_value().fields().find("metadata"); - if (it != ad.ad().struct_value().fields().end()) { - PS_RETURN_IF_ERROR(google::protobuf::util::MessageToJsonString( - it->second, &adMetadataAsJson)); - } - input[ScoreArgIndex(ScoreAdArgs::kAdMetadata)] = - std::make_shared(adMetadataAsJson); - input[ScoreArgIndex(ScoreAdArgs::kBid)] = - std::make_shared(std::to_string(ad.bid())); - input[ScoreArgIndex(ScoreAdArgs::kAuctionConfig)] = auction_config; - // TODO(b/258697130): Roma client string support bug - input[ScoreArgIndex(ScoreAdArgs::kScoringSignals)] = - std::make_shared( - scoring_signals.at(ad.render()).GetString()); - input[ScoreArgIndex(ScoreAdArgs::kDeviceSignals)] = - std::make_shared(device_signals); - // This is only added to prevent errors in the score ad script, and - // will always be an empty object. - input[ScoreArgIndex(ScoreAdArgs::kDirectFromSellerSignals)] = - std::make_shared("{}"); - input[ScoreArgIndex(ScoreAdArgs::kFeatureFlags)] = - std::make_shared(GetFeatureFlagJson( - enable_adtech_code_logging, enable_debug_reporting)); - - MayLogScoreAdsInput(input, log_context); - return input; -} + const bool enable_adtech_code_logging, const bool enable_debug_reporting); +/** + * Builds ScoreAdInput with AdWithBid or ProtectedAppSignalsAdWithBid. + */ template absl::StatusOr BuildScoreAdRequest( - const T& ad, std::shared_ptr auction_config, + const T& ad, const std::shared_ptr& auction_config, const absl::flat_hash_map& scoring_signals, const bool enable_debug_reporting, server_common::log::ContextImpl& log_context, - const bool enable_adtech_code_logging, absl::string_view device_signals) { - // Construct the wrapper struct for our V8 Dispatch Request. - DispatchRequest score_ad_request; - // TODO(b/250893468) Revisit dispatch id. - score_ad_request.id = ad.render(); - // TODO(b/258790164) Update after code is fetched periodically. - score_ad_request.version_string = "v1"; - score_ad_request.handler_name = DispatchHandlerFunctionWithSellerWrapper; - - PS_ASSIGN_OR_RETURN(score_ad_request.input, - ScoreAdInput(ad, auction_config, scoring_signals, - log_context, enable_adtech_code_logging, - enable_debug_reporting, device_signals)); - return score_ad_request; + const bool enable_adtech_code_logging, absl::string_view bid_metadata) { + std::string ad_metadata_as_json; + const auto& it = ad.ad().struct_value().fields().find("metadata"); + if (it != ad.ad().struct_value().fields().end()) { + PS_RETURN_IF_ERROR(google::protobuf::util::MessageToJsonString( + it->second, &ad_metadata_as_json)); + } + return BuildScoreAdRequest(ad.render(), ad_metadata_as_json, + scoring_signals.at(ad.render()).GetString(), + ad.bid(), auction_config, bid_metadata, + log_context, enable_adtech_code_logging, + enable_debug_reporting); } } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/auction_service/utils/proto_utils_test.cc b/services/auction_service/utils/proto_utils_test.cc new file mode 100644 index 00000000..a12d3ef6 --- /dev/null +++ b/services/auction_service/utils/proto_utils_test.cc @@ -0,0 +1,299 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/auction_service/utils/proto_utils.h" + +#include + +#include "gmock/gmock.h" +#include "gtest/gtest.h" +#include "rapidjson/document.h" +#include "rapidjson/pointer.h" +#include "rapidjson/writer.h" +#include "services/auction_service/score_ads_reactor_test_util.h" +#include "services/common/test/random.h" +#include "src/core/test/utils/proto_test_utils.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +constexpr char kTestPublisher[] = "publisher.com"; +constexpr char kTestIGOwner[] = "interest_group_owner"; +constexpr char kTestRenderUrl[] = "https://render_url"; +constexpr char kTestComponentSeller[] = "component_seller_origin"; +constexpr char kTestTopLevelSeller[] = "top_level_seller_origin"; +constexpr char kTestAdComponentUrl_1[] = "https://compoennt_1"; +constexpr char kTestAdComponentUrl_2[] = "https://component_2"; +constexpr char kTestBidCurrency[] = "ABC"; + +google::protobuf::RepeatedPtrField MakeMockAdComponentUrls() { + google::protobuf::RepeatedPtrField component_urls; + component_urls.Add(kTestAdComponentUrl_1); + component_urls.Add(kTestAdComponentUrl_2); + return component_urls; +} + +rapidjson::Document ParseJson(absl::string_view output) { + rapidjson::Document parsed_output; + rapidjson::ParseResult parsed_result = + parsed_output.Parse(output.data()); + EXPECT_FALSE(parsed_result.IsError()) << parsed_result.Code(); + return parsed_output; +} + +TEST(MakeBidMetadataForTopLevelAuctionTest, PopulatesExpectedValues) { + std::string output = MakeBidMetadataForTopLevelAuction( + kTestPublisher, kTestIGOwner, kTestRenderUrl, MakeMockAdComponentUrls(), + kTestComponentSeller, kTestBidCurrency); + + auto parsed_output = ParseJson(output); + EXPECT_EQ(parsed_output["topWindowHostname"], kTestPublisher); + EXPECT_EQ(parsed_output["interestGroupOwner"], kTestIGOwner); + EXPECT_EQ(parsed_output["renderUrl"], kTestRenderUrl); + EXPECT_EQ(parsed_output["componentSeller"], kTestComponentSeller); + ASSERT_TRUE(parsed_output["adComponents"].IsArray()); + EXPECT_EQ(parsed_output["adComponents"].GetArray()[0], kTestAdComponentUrl_1); + EXPECT_EQ(parsed_output["adComponents"].GetArray()[1], kTestAdComponentUrl_2); + EXPECT_EQ(parsed_output["bidCurrency"], kTestBidCurrency); +} + +TEST(MakeBidMetadataTest, PopulatesExpectedValues) { + std::string output = + MakeBidMetadata(kTestPublisher, kTestIGOwner, kTestRenderUrl, + MakeMockAdComponentUrls(), "", kTestBidCurrency); + + auto parsed_output = ParseJson(output); + EXPECT_EQ(parsed_output["topWindowHostname"], kTestPublisher); + EXPECT_EQ(parsed_output["interestGroupOwner"], kTestIGOwner); + EXPECT_EQ(parsed_output["renderUrl"], kTestRenderUrl); + ASSERT_TRUE(parsed_output["adComponents"].IsArray()); + EXPECT_EQ(parsed_output["adComponents"].GetArray()[0], kTestAdComponentUrl_1); + EXPECT_EQ(parsed_output["adComponents"].GetArray()[1], kTestAdComponentUrl_2); + EXPECT_EQ(parsed_output["bidCurrency"], kTestBidCurrency); +} + +TEST(MakeBidMetadataTest, PopulatesExpectedValuesForComponentAuction) { + std::string output = MakeBidMetadata( + kTestPublisher, kTestIGOwner, kTestRenderUrl, MakeMockAdComponentUrls(), + kTestTopLevelSeller, kTestBidCurrency); + + auto parsed_output = ParseJson(output); + EXPECT_EQ(parsed_output["topWindowHostname"], kTestPublisher); + EXPECT_EQ(parsed_output["interestGroupOwner"], kTestIGOwner); + EXPECT_EQ(parsed_output["renderUrl"], kTestRenderUrl); + ASSERT_TRUE(parsed_output["adComponents"].IsArray()); + EXPECT_EQ(parsed_output["adComponents"].GetArray()[0], kTestAdComponentUrl_1); + EXPECT_EQ(parsed_output["adComponents"].GetArray()[1], kTestAdComponentUrl_2); + EXPECT_EQ(parsed_output["topLevelSeller"], kTestTopLevelSeller); + EXPECT_EQ(parsed_output["bidCurrency"], kTestBidCurrency); +} + +using AdWithBidMetadata = + ScoreAdsRequest::ScoreAdsRawRequest::AdWithBidMetadata; +using google::scp::core::test::EqualsProto; + +TEST(MapAuctionResultToAdWithBidMetadataTest, PopulatesExpectedValues) { + AuctionResult auction_result = MakeARandomComponentAuctionResult( + MakeARandomString(), kTestTopLevelSeller); + + // copy since this will be invalidated after this function. + AuctionResult input; + input.CopyFrom(auction_result); + auto output = MapAuctionResultToAdWithBidMetadata(input); + + EXPECT_EQ(auction_result.bid(), output->bid()); + EXPECT_EQ(auction_result.ad_render_url(), output->render()); + EXPECT_EQ(auction_result.interest_group_name(), + output->interest_group_name()); + EXPECT_EQ(auction_result.interest_group_owner(), + output->interest_group_owner()); + EXPECT_EQ(auction_result.bid_currency(), output->bid_currency()); + EXPECT_EQ(output->ad_components_size(), + auction_result.ad_component_render_urls_size()); + for (auto& ad_component_url : auction_result.ad_component_render_urls()) { + EXPECT_THAT(output->ad_components(), testing::Contains(ad_component_url)); + } +} + +constexpr char kTestAdMetadataJson[] = R"JSON({"arbitraryMetadataKey":2})JSON"; +constexpr char kTestScoringSignals[] = R"JSON({"RandomScoringSignals":{}})JSON"; +constexpr char kTestAuctionConfig[] = R"JSON({"RandomAuctionConfig":{}})JSON"; +constexpr char kTestBidMetadata[] = R"JSON({"RandomBidMetadata":{}})JSON"; +server_common::log::ContextImpl log_context{ + {}, server_common::ConsentedDebugConfiguration()}; + +TEST(BuildScoreAdRequestTest, PopulatesExpectedValuesInDispatchRequest) { + float test_bid = MakeARandomNumber(0.1, 10.1); + auto output = BuildScoreAdRequest( + kTestRenderUrl, kTestAdMetadataJson, kTestScoringSignals, test_bid, + std::make_shared(kTestAuctionConfig), kTestBidMetadata, + log_context, + /*enable_adtech_code_logging = */ false, + /*enable_debug_reporting = */ false); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(output->id, kTestRenderUrl); + EXPECT_EQ(output->version_string, "v1"); + EXPECT_EQ(output->handler_name, DispatchHandlerFunctionWithSellerWrapper); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kAdMetadata)], + kTestAdMetadataJson); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kBid)], + std::to_string(test_bid)); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kAuctionConfig)], + kTestAuctionConfig); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kScoringSignals)], + kTestScoringSignals); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kBidMetadata)], + kTestBidMetadata); + EXPECT_EQ( + *output->input[ScoreArgIndex(ScoreAdArgs::kDirectFromSellerSignals)], + "{}"); +} + +AdWithBidMetadata MakeAnAdWithMetadata(float bid) { + AdWithBidMetadata ad; + ad.set_render(kTestRenderUrl); + ad.set_bid(bid); + ad.mutable_ad()->mutable_struct_value()->MergeFrom( + MakeAnAd(kTestRenderUrl, "arbitraryMetadataKey", 2)); + return ad; +} + +absl::flat_hash_map +MakeScoringSignalsForAd() { + absl::flat_hash_map + combined_formatted_ad_signals; + rapidjson::Document scoring_signal; + scoring_signal.Parse(kTestScoringSignals); + rapidjson::StringBuffer buffer; + rapidjson::Writer writer(buffer); + scoring_signal.Accept(writer); + combined_formatted_ad_signals.try_emplace(kTestRenderUrl, std::move(buffer)); + return combined_formatted_ad_signals; +} + +TEST(BuildScoreAdRequestTest, PopulatesExpectedValuesForAdWithBidMetadata) { + float test_bid = MakeARandomNumber(0.1, 10.1); + auto output = BuildScoreAdRequest( + MakeAnAdWithMetadata(test_bid), + std::make_shared(kTestAuctionConfig), + MakeScoringSignalsForAd(), /*enable_debug_reporting = */ false, + log_context, /*enable_adtech_code_logging = */ false, kTestBidMetadata); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(output->id, kTestRenderUrl); + EXPECT_EQ(output->version_string, "v1"); + EXPECT_EQ(output->handler_name, DispatchHandlerFunctionWithSellerWrapper); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kAdMetadata)], + kTestAdMetadataJson); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kBid)], + std::to_string(test_bid)); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kAuctionConfig)], + kTestAuctionConfig); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kScoringSignals)], + kTestScoringSignals); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kBidMetadata)], + kTestBidMetadata); + EXPECT_EQ( + *output->input[ScoreArgIndex(ScoreAdArgs::kDirectFromSellerSignals)], + "{}"); +} + +TEST(BuildScoreAdRequestTest, + PopulatesExpectedValuesForForProtectedAppSignalsAd) { + float test_bid = MakeARandomNumber(0.1, 10.1); + auto output = BuildScoreAdRequest( + GetProtectedAppSignalsAdWithBidMetadata(kTestRenderUrl, test_bid), + std::make_shared(kTestAuctionConfig), + MakeScoringSignalsForAd(), /*enable_debug_reporting = */ false, + log_context, /*enable_adtech_code_logging = */ false, kTestBidMetadata); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(output->id, kTestRenderUrl); + EXPECT_EQ(output->version_string, "v1"); + EXPECT_EQ(output->handler_name, DispatchHandlerFunctionWithSellerWrapper); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kAdMetadata)], + kTestAdMetadataJson); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kBid)], + std::to_string(test_bid)); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kAuctionConfig)], + kTestAuctionConfig); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kScoringSignals)], + kTestScoringSignals); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kBidMetadata)], + kTestBidMetadata); + EXPECT_EQ( + *output->input[ScoreArgIndex(ScoreAdArgs::kDirectFromSellerSignals)], + "{}"); +} + +std::string MakeFeatureFlagJson(bool enable_adtech_code_logging, + bool enable_debug_reporting) { + return absl::StrCat("{\"", "enable_logging\": ", + enable_adtech_code_logging ? "true" : "false", + ",\"enable_debug_url_generation\": ", + enable_debug_reporting ? "true" : "false", "}"); +} + +TEST(BuildScoreAdRequestTest, PopulatesFeatureFlagsInDispatchRequest) { + std::vector flag_values = {true, false}; + for (auto flag_1 : flag_values) { + for (auto flag_2 : flag_values) { + auto output = BuildScoreAdRequest( + kTestRenderUrl, kTestAdMetadataJson, kTestScoringSignals, + MakeARandomNumber(0.1, 10.1), + std::make_shared(kTestAuctionConfig), kTestBidMetadata, + log_context, + /*enable_adtech_code_logging = */ flag_1, + /*enable_debug_reporting = */ flag_2); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kFeatureFlags)], + MakeFeatureFlagJson(flag_1, flag_2)); + } + } +} + +TEST(BuildScoreAdRequestTest, PopulatesFeatureFlagsForAdWithBidMetadata) { + std::vector flag_values = {true, false}; + for (auto flag_1 : flag_values) { + for (auto flag_2 : flag_values) { + auto output = BuildScoreAdRequest( + MakeAnAdWithMetadata(MakeARandomNumber(0.1, 10.1)), + std::make_shared(kTestAuctionConfig), + MakeScoringSignalsForAd(), /*enable_debug_reporting = */ flag_2, + log_context, /*enable_adtech_code_logging = */ flag_1, + kTestBidMetadata); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kFeatureFlags)], + MakeFeatureFlagJson(flag_1, flag_2)); + } + } +} + +TEST(BuildScoreAdRequestTest, PopulatesFeatureFlagsForPASAdWithBidMetadata) { + std::vector flag_values = {true, false}; + for (auto flag_1 : flag_values) { + for (auto flag_2 : flag_values) { + auto output = BuildScoreAdRequest( + GetProtectedAppSignalsAdWithBidMetadata(kTestRenderUrl), + std::make_shared(kTestAuctionConfig), + MakeScoringSignalsForAd(), /*enable_debug_reporting = */ flag_2, + log_context, /*enable_adtech_code_logging = */ flag_1, + kTestBidMetadata); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(*output->input[ScoreArgIndex(ScoreAdArgs::kFeatureFlags)], + MakeFeatureFlagJson(flag_1, flag_2)); + } + } +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/bidding_service/BUILD b/services/bidding_service/BUILD index 5ce45fa1..f36c64e3 100644 --- a/services/bidding_service/BUILD +++ b/services/bidding_service/BUILD @@ -23,6 +23,7 @@ cc_library( ], visibility = ["//visibility:public"], deps = [ + "//services/bidding_service/inference:inference_flags", "//services/common/constants:common_service_flags", "@com_google_absl//absl/strings", ], @@ -46,12 +47,13 @@ cc_library( "@com_google_absl//absl/status", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:errors", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:execution_result", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/blob_storage_client", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -74,7 +76,7 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/mock/blob_storage_client:blob_storage_client_mock", + "@google_privacysandbox_servers_common//src/public/cpio/mock/blob_storage_client:blob_storage_client_mock", ], ) @@ -107,9 +109,9 @@ cc_library( "@com_github_grpc_grpc//:grpc++", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", "@rapidjson", ], ) @@ -135,8 +137,8 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -158,7 +160,7 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", ], ) @@ -207,9 +209,9 @@ cc_library( "//services/common/metric:server_definition", "@aws_sdk_cpp//:core", "@com_github_grpc_grpc//:grpc++", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/telemetry", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/telemetry", ], ) @@ -231,6 +233,7 @@ cc_binary( name = "server", srcs = ["bidding_main.cc"], defines = select({ + "//:inference": ["PS_INFERENCE_NON_PROD=1"], "//conditions:default": [], }), linkopts = [ @@ -249,6 +252,8 @@ cc_binary( "//services/bidding_service/benchmarking:bidding_no_op_logger", "//services/bidding_service/code_wrapper:buyer_code_wrapper", "//services/bidding_service/data:runtime_config", + "//services/bidding_service/inference:inference_utils", + "//services/common/blob_fetch:blob_fetcher", "//services/common/clients/config:config_client_util", "//services/common/clients/http:multi_curl_http_fetcher_async", "//services/common/code_fetch:periodic_code_fetcher", @@ -264,12 +269,14 @@ cc_binary( "@com_google_absl//absl/flags:parse", "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/blob_storage_client", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/telemetry", - "@google_privacysandbox_servers_common//src/cpp/util:rlimit_core_config", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@com_google_sandboxed_api//sandboxed_api/sandbox2:comms", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + "@google_privacysandbox_servers_common//src/telemetry", + "@google_privacysandbox_servers_common//src/util:rlimit_core_config", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@inference_common//sandbox:sandbox_executor", ], ) @@ -277,6 +284,7 @@ cc_library( name = "bidding_constants", hdrs = ["constants.h"], visibility = ["//visibility:public"], + deps = ["@com_google_absl//absl/strings"], ) cc_test( @@ -298,8 +306,8 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", "@service_value_key_fledge_privacysandbox//public/query/v2:get_values_v2_cc_grpc", "@service_value_key_fledge_privacysandbox//public/query/v2:get_values_v2_cc_proto", ], @@ -326,9 +334,9 @@ cc_library( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@service_value_key_fledge_privacysandbox//public/query/v2:get_values_v2_cc_proto", ], ) diff --git a/services/bidding_service/base_generate_bids_reactor.h b/services/bidding_service/base_generate_bids_reactor.h index 1b707ba9..529841e2 100644 --- a/services/bidding_service/base_generate_bids_reactor.h +++ b/services/bidding_service/base_generate_bids_reactor.h @@ -23,7 +23,7 @@ #include "services/bidding_service/data/runtime_config.h" #include "services/common/code_dispatch/code_dispatch_reactor.h" #include "services/common/util/request_response_constants.h" -#include "src/cpp/logger/request_context_impl.h" +#include "src/logger/request_context_impl.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/bidding_service/benchmarking/BUILD b/services/bidding_service/benchmarking/BUILD index 13f2b3b3..2c6e80a2 100644 --- a/services/bidding_service/benchmarking/BUILD +++ b/services/bidding_service/benchmarking/BUILD @@ -58,7 +58,7 @@ cc_binary( "//services/common/test:random", "@com_google_benchmark//:benchmark", "@com_google_benchmark//:benchmark_main", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) diff --git a/services/bidding_service/benchmarking/generate_bids_reactor_benchmarks.cc b/services/bidding_service/benchmarking/generate_bids_reactor_benchmarks.cc index ab297925..46c3bf98 100644 --- a/services/bidding_service/benchmarking/generate_bids_reactor_benchmarks.cc +++ b/services/bidding_service/benchmarking/generate_bids_reactor_benchmarks.cc @@ -30,7 +30,7 @@ #include "services/common/reporters/async_reporter.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" -#include "src/cpp/concurrent/event_engine_executor.h" +#include "src/concurrent/event_engine_executor.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/bidding_service/bidding_main.cc b/services/bidding_service/bidding_main.cc index fa975886..7911206f 100644 --- a/services/bidding_service/bidding_main.cc +++ b/services/bidding_service/bidding_main.cc @@ -30,8 +30,6 @@ #include "grpcpp/ext/proto_server_reflection_plugin.h" #include "grpcpp/grpcpp.h" #include "grpcpp/health_check_service_interface.h" -#include "public/cpio/interface/cpio.h" -#include "scp/cc/public/cpio/proto/blob_storage_service/v1/blob_storage_service.pb.h" #include "services/bidding_service/benchmarking/bidding_benchmarking_logger.h" #include "services/bidding_service/benchmarking/bidding_no_op_logger.h" #include "services/bidding_service/bidding_code_fetch_config.pb.h" @@ -52,10 +50,20 @@ #include "services/common/telemetry/configure_telemetry.h" #include "services/common/util/file_util.h" #include "services/common/util/request_response_constants.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/util/rlimit_core_config.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/concurrent/event_engine_executor.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/public/cpio/proto/blob_storage_service/v1/blob_storage_service.pb.h" +#include "src/util/rlimit_core_config.h" +#include "src/util/status_macro/status_macros.h" + +#ifdef PS_INFERENCE_NON_PROD +#include "sandbox/sandbox_executor.h" +#include "sandboxed_api/sandbox2/comms.h" +#include "services/bidding_service/inference/inference_utils.h" +#include "services/common/blob_fetch/blob_fetcher.h" +#endif ABSL_FLAG(std::optional, port, std::nullopt, "Port the server is listening on."); @@ -90,6 +98,7 @@ namespace privacy_sandbox::bidding_auction_servers { using bidding_service::BuyerCodeFetchConfig; using ::google::scp::cpio::BlobStorageClientFactory; +using ::google::scp::cpio::BlobStorageClientInterface; using ::google::scp::cpio::Cpio; using ::google::scp::cpio::CpioOptions; using ::google::scp::cpio::LogOption; @@ -97,7 +106,7 @@ using ::grpc::Server; using ::grpc::ServerBuilder; absl::StatusOr GetConfigClient( - std::string config_param_prefix) { + absl::string_view config_param_prefix) { TrustedServersConfigClient config_client(GetServiceFlags()); config_client.SetFlag(FLAGS_port, PORT); config_client.SetFlag(FLAGS_healthcheck_port, HEALTHCHECK_PORT); @@ -154,6 +163,14 @@ absl::StatusOr GetConfigClient( config_client.SetFlag(FLAGS_ad_retrieval_kv_server_egress_tls, AD_RETRIEVAL_KV_SERVER_EGRESS_TLS); config_client.SetFlag(FLAGS_kv_server_egress_tls, KV_SERVER_EGRESS_TLS); + config_client.SetFlag(FLAGS_inference_sidecar_binary_path, + INFERENCE_SIDECAR_BINARY_PATH); + config_client.SetFlag(FLAGS_inference_model_local_paths, + INFERENCE_MODEL_LOCAL_PATHS); + config_client.SetFlag(FLAGS_inference_model_bucket_name, + INFERENCE_MODEL_BUCKET_NAME); + config_client.SetFlag(FLAGS_inference_model_bucket_paths, + INFERENCE_MODEL_BUCKET_PATHS); if (absl::GetFlag(FLAGS_init_config_client)) { PS_RETURN_IF_ERROR(config_client.Init(config_param_prefix)).LogError() @@ -180,6 +197,14 @@ absl::StatusOr GetConfigClient( return config_client; } +absl::string_view GetStringParameter(const TrustedServersConfigClient& client, + absl::string_view name) { + if (client.HasParameter(name)) { + return client.GetStringParameter(name); + } + return ""; +} + // Brings up the gRPC BiddingService on FLAGS_port. absl::Status RunServer() { TrustedServerConfigUtil config_util(absl::GetFlag(FLAGS_init_config_client)); @@ -192,12 +217,40 @@ absl::Status RunServer() { CHECK(!config_client.GetStringParameter(BUYER_CODE_FETCH_CONFIG).empty()) << "BUYER_CODE_FETCH_CONFIG is a mandatory flag."; - auto dispatcher = V8Dispatcher([&config_client]() { + std::string_view inference_sidecar_binary_path = + GetStringParameter(config_client, INFERENCE_SIDECAR_BINARY_PATH); + const bool enable_inference = !inference_sidecar_binary_path.empty(); + + auto dispatcher = V8Dispatcher([&config_client, &enable_inference]() { DispatchConfig config; config.worker_queue_max_items = config_client.GetIntParameter(JS_WORKER_QUEUE_LEN); config.number_of_workers = config_client.GetIntParameter(JS_NUM_WORKERS); + if (enable_inference) { +#ifdef PS_INFERENCE_NON_PROD + PS_VLOG(1) << "Register runInference API."; + auto run_inference_function_object = + std::make_unique>(); + run_inference_function_object->function_name = + std::string(inference::kInferenceFunctionName); + run_inference_function_object->function = inference::RunInference; + config.RegisterFunctionBinding(std::move(run_inference_function_object)); + PS_VLOG(1) << "RunInference registered."; + + PS_VLOG(1) << "Start the inference sidecar."; + // This usage of flag is not consistent with rest of the codebase, where + // we use the parameter from the config client directly instead of passing + // it back to the absl flag. + absl::SetFlag( + &FLAGS_inference_sidecar_binary_path, + config_client.GetStringParameter(INFERENCE_SIDECAR_BINARY_PATH) + .data()); + inference::SandboxExecutor& inference_executor = inference::Executor(); + CHECK_EQ(inference_executor.StartSandboxee().code(), + absl::StatusCode::kOk); +#endif + } return config; }()); CodeDispatchClient client(dispatcher); @@ -218,9 +271,6 @@ absl::Status RunServer() { "a proto message: " << result.message(); - auto curl_http_fetcher = - std::make_unique(executor.get()); - bool enable_buyer_debug_url_generation = udf_config.enable_buyer_debug_url_generation(); bool enable_adtech_code_logging = udf_config.enable_adtech_code_logging(); @@ -239,6 +289,42 @@ absl::Status RunServer() { enable_protected_app_signals); PS_RETURN_IF_ERROR(udf_fetcher.Init()) << "Failed to initialize UDF fetch."; +#ifdef PS_INFERENCE_NON_PROD + bool init_config_client = absl::GetFlag(FLAGS_init_config_client); + if (enable_inference) { + if (init_config_client) { + PS_VLOG(1) << "Start blob fetcher to read from a cloud bucket."; + std::unique_ptr blob_storage_client = + BlobStorageClientFactory::Create(); + std::string_view bucket_name = + GetStringParameter(config_client, INFERENCE_MODEL_BUCKET_NAME); + std::string_view bucket_paths = + GetStringParameter(config_client, INFERENCE_MODEL_BUCKET_PATHS); + std::vector models = absl::StrSplit(bucket_paths, ','); + + auto blob_fetcher = std::make_unique( + bucket_name, executor.get(), std::move(blob_storage_client)); + CHECK(blob_fetcher->FetchSync().ok()) << "FetchSync() failed."; + const std::vector& files = blob_fetcher->snapshot(); + PS_VLOG(1) << "Register models from bucket."; + if (absl::Status status = + inference::RegisterModelsFromBucket(bucket_name, models, files); + !status.ok()) { + PS_VLOG(1) << "Skip registering models from bucket: " + << status.message(); + } + } + PS_VLOG(1) << "Register models from local."; + std::string_view local_paths = + GetStringParameter(config_client, INFERENCE_MODEL_LOCAL_PATHS); + if (absl::Status status = inference::RegisterModelsFromLocal( + absl::StrSplit(local_paths, ',')); + !status.ok()) { + PS_VLOG(1) << "Skip registering models from local: " << status.message(); + } + } +#endif + bool enable_bidding_service_benchmark = config_client.GetBooleanParameter(ENABLE_BIDDING_SERVICE_BENCHMARK); @@ -370,9 +456,6 @@ absl::Status RunServer() { // responsible for shutting down the server for this call to ever return. PS_VLOG(1) << "Server listening on " << server_address; server->Wait(); - PS_RETURN_IF_ERROR(udf_fetcher.End()) << "Error shutting down UDF fetcher."; - PS_RETURN_IF_ERROR(dispatcher.Stop()) - << "Error shutting down code dispatcher."; return absl::OkStatus(); } } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/bidding_service/bidding_service.cc b/services/bidding_service/bidding_service.cc index 2db11194..7420d9ae 100644 --- a/services/bidding_service/bidding_service.cc +++ b/services/bidding_service/bidding_service.cc @@ -19,7 +19,7 @@ #include "api/bidding_auction_servers.pb.h" #include "services/bidding_service/generate_bids_reactor.h" #include "services/common/metric/server_definition.h" -#include "src/cpp/telemetry/telemetry.h" +#include "src/telemetry/telemetry.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/bidding_service/bidding_service.h b/services/bidding_service/bidding_service.h index 3139fdfb..086e63ac 100644 --- a/services/bidding_service/bidding_service.h +++ b/services/bidding_service/bidding_service.h @@ -26,9 +26,9 @@ #include "services/common/clients/async_grpc/default_async_grpc_client.h" #include "services/common/clients/http/multi_curl_http_fetcher_async.h" #include "services/common/clients/kv_server/kv_async_client.h" +#include "src/concurrent/event_engine_executor.h" #include "src/core/lib/event_engine/default_event_engine.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/bidding_service/bidding_service_integration_test.cc b/services/bidding_service/bidding_service_integration_test.cc index 789d8237..6d1512f7 100644 --- a/services/bidding_service/bidding_service_integration_test.cc +++ b/services/bidding_service/bidding_service_integration_test.cc @@ -574,7 +574,6 @@ TEST_F(GenerateBidsReactorIntegrationTest, GeneratesBidsByInterestGroupCode) { .number_value(), 1.0); } - EXPECT_TRUE(dispatcher.Stop().ok()); } TEST_F(GenerateBidsReactorIntegrationTest, @@ -662,7 +661,6 @@ TEST_F(GenerateBidsReactorIntegrationTest, .number_value(), 1.0); } - EXPECT_TRUE(dispatcher.Stop().ok()); } TEST_F(GenerateBidsReactorIntegrationTest, ReceivesTrustedBiddingSignals) { @@ -733,7 +731,6 @@ TEST_F(GenerateBidsReactorIntegrationTest, ReceivesTrustedBiddingSignals) { ad_with_bid.ad().struct_value().fields().at("tbsLength").number_value(), 1); } - EXPECT_TRUE(dispatcher.Stop().ok()); } TEST_F(GenerateBidsReactorIntegrationTest, ReceivesTrustedBiddingSignalsKeys) { @@ -806,7 +803,6 @@ TEST_F(GenerateBidsReactorIntegrationTest, ReceivesTrustedBiddingSignalsKeys) { raw_request.interest_group_for_bidding(0) .trusted_bidding_signals_keys_size()); } - EXPECT_TRUE(dispatcher.Stop().ok()); } /* @@ -900,7 +896,6 @@ TEST_F(GenerateBidsReactorIntegrationTest, // All instances of the script should have crashed; no bids should have been // generated. ASSERT_EQ(raw_response.bids_size(), 0); - EXPECT_TRUE(dispatcher.Stop().ok()); } TEST_F(GenerateBidsReactorIntegrationTest, GeneratesBidsFromDevice) { @@ -997,7 +992,6 @@ TEST_F(GenerateBidsReactorIntegrationTest, GeneratesBidsFromDevice) { .number_value(), 1.0); } - EXPECT_TRUE(dispatcher.Stop().ok()); } void GenerateBidCodeWrapperTestHelper( @@ -1059,7 +1053,7 @@ void GenerateBidCodeWrapperTestHelper( key_fetcher_manager = CreateKeyFetcherManager( config_client, /* public_key_fetcher= */ nullptr); - const BiddingServiceRuntimeConfig& runtime_config = { + BiddingServiceRuntimeConfig runtime_config{ .enable_buyer_debug_url_generation = enable_buyer_debug_url_generation, .enable_adtech_code_logging = enable_adtech_code_logging}; BiddingService service( @@ -1069,7 +1063,6 @@ void GenerateBidCodeWrapperTestHelper( service.GenerateBids(&context, &request, response); std::this_thread::sleep_for( absl::ToChronoSeconds(absl::Seconds(kGenerateBidExecutionTimeSeconds))); - EXPECT_TRUE(dispatcher.Stop().ok()); } TEST_F(GenerateBidsReactorIntegrationTest, BuyerDebugUrlGenerationDisabled) { diff --git a/services/bidding_service/bidding_service_test.cc b/services/bidding_service/bidding_service_test.cc index 3052e1f5..637a89e7 100644 --- a/services/bidding_service/bidding_service_test.cc +++ b/services/bidding_service/bidding_service_test.cc @@ -33,7 +33,7 @@ #include "services/common/metric/server_definition.h" #include "services/common/test/mocks.h" #include "services/common/test/utils/service_utils.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/bidding_service/buyer_code_fetch_manager.cc b/services/bidding_service/buyer_code_fetch_manager.cc index 852a1b5e..62eb5792 100644 --- a/services/bidding_service/buyer_code_fetch_manager.cc +++ b/services/bidding_service/buyer_code_fetch_manager.cc @@ -27,8 +27,9 @@ #include "services/common/code_fetch/periodic_bucket_fetcher.h" #include "services/common/code_fetch/periodic_code_fetcher.h" #include "services/common/util/file_util.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/concurrent/event_engine_executor.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" #include "buyer_code_fetch_manager.h" @@ -41,8 +42,16 @@ constexpr int kMinNumCodeBlobs = 1; constexpr int kMaxNumCodeBlobs = 2; constexpr char kUnusedWasmBlob[] = ""; +using ::google::scp::core::errors::GetErrorMessage; + } // namespace +BuyerCodeFetchManager::~BuyerCodeFetchManager() { + if (absl::Status shutdown = End(); !shutdown.ok()) { + PS_VLOG(1) << "BuyerCodeFetchManager shutdown failed. " << shutdown; + } +} + absl::Status BuyerCodeFetchManager::Init() { switch (udf_config_.fetch_mode()) { case bidding_service::FETCH_MODE_LOCAL: diff --git a/services/bidding_service/buyer_code_fetch_manager.h b/services/bidding_service/buyer_code_fetch_manager.h index 3c2139d3..ac3d3055 100644 --- a/services/bidding_service/buyer_code_fetch_manager.h +++ b/services/bidding_service/buyer_code_fetch_manager.h @@ -26,8 +26,8 @@ #include "services/common/clients/http/http_fetcher_async.h" #include "services/common/code_fetch/periodic_bucket_fetcher.h" #include "services/common/code_fetch/periodic_code_fetcher.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/concurrent/event_engine_executor.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -57,7 +57,7 @@ class BuyerCodeFetchManager { V8Dispatcher* dispatcher, std::unique_ptr blob_storage_client, - const bidding_service::BuyerCodeFetchConfig udf_config, + const bidding_service::BuyerCodeFetchConfig& udf_config, bool enable_protected_audience, bool enable_protected_app_signals) : executor_(*executor), http_fetcher_(*http_fetcher), @@ -67,6 +67,8 @@ class BuyerCodeFetchManager { enable_protected_audience_(enable_protected_audience), enable_protected_app_signals_(enable_protected_app_signals) {} + ~BuyerCodeFetchManager(); + // Not copyable or movable. BuyerCodeFetchManager(const BuyerCodeFetchManager&) = delete; BuyerCodeFetchManager& operator=(const BuyerCodeFetchManager&) = delete; @@ -76,13 +78,13 @@ class BuyerCodeFetchManager { // A successful Init means that Roma has succeeded in loading a UDF. absl::Status Init(); + private: // Must be called exactly once. This should only be called on server shutdown, // and only after Init has returned (either a success or error is fine). // Failure to End means there was an issue releasing resources and should // be investigated to ensure that requests are being terminated gracefully. absl::Status End(); - private: absl::Status InitializeLocalCodeFetch(); absl::Status InitBucketClient(); diff --git a/services/bidding_service/buyer_code_fetch_manager_test.cc b/services/bidding_service/buyer_code_fetch_manager_test.cc index 44cdc18d..dfe9ac85 100644 --- a/services/bidding_service/buyer_code_fetch_manager_test.cc +++ b/services/bidding_service/buyer_code_fetch_manager_test.cc @@ -24,14 +24,14 @@ #include "absl/synchronization/notification.h" #include "absl/time/time.h" #include "gtest/gtest.h" -#include "scp/cc/core/interface/async_context.h" -#include "scp/cc/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" -#include "scp/cc/public/cpio/interface/cpio.h" -#include "scp/cc/public/cpio/interface/error_codes.h" -#include "scp/cc/public/cpio/mock/blob_storage_client/mock_blob_storage_client.h" #include "services/bidding_service/code_wrapper/buyer_code_wrapper.h" #include "services/common/test/mocks.h" #include "services/common/util/file_util.h" +#include "src/core/interface/async_context.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/public/cpio/interface/error_codes.h" +#include "src/public/cpio/mock/blob_storage_client/mock_blob_storage_client.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -188,16 +188,16 @@ TEST_F(BuyerCodeFetchManagerTest, context.Finish(SuccessExecutionResult()); return SuccessExecutionResult(); }) - .WillOnce( - [&](AsyncContext - context) { - EXPECT_EQ(context.request->blob_metadata().bucket_name(), - ads_bucket); - return FailureExecutionResult(SC_UNKNOWN); - }); + .WillOnce([&ads_bucket]( + const AsyncContext& context) { + EXPECT_EQ(context.request->blob_metadata().bucket_name(), ads_bucket); + return FailureExecutionResult(SC_UNKNOWN); + }); EXPECT_CALL(*blob_storage_client_, GetBlob) .WillOnce( - [&](AsyncContext async_context) { + [&pas_bucket, &pas_object, &pas_data]( + AsyncContext async_context) { auto async_bucket_name = async_context.request->blob_metadata().bucket_name(); auto async_blob_name = @@ -214,7 +214,8 @@ TEST_F(BuyerCodeFetchManagerTest, }); EXPECT_CALL(*dispatcher_, LoadSync) - .WillOnce([&](std::string_view version, absl::string_view blob_data) { + .WillOnce([&pas_object, &pas_data](std::string_view version, + absl::string_view blob_data) { EXPECT_EQ(version, pas_object); EXPECT_EQ( blob_data, @@ -264,24 +265,27 @@ TEST_F(BuyerCodeFetchManagerTest, FetchModeUrlTriesUrlLoad) { EXPECT_CALL(*dispatcher_, LoadSync) .WillRepeatedly( - [&](std::string_view version, absl::string_view blob_data) { + [](std::string_view version, absl::string_view blob_data) { return absl::OkStatus(); }); EXPECT_CALL(*http_fetcher_, FetchUrls) - .WillOnce([&](const std::vector& requests, + .WillOnce([&pa_wasm_url, &pa_url]( + const std::vector& requests, absl::Duration timeout, OnDoneFetchUrls done_callback) { EXPECT_EQ(requests[0].url, pa_url); EXPECT_EQ(requests[1].url, pa_wasm_url); std::move(done_callback)({""}); }) - .WillOnce([&](const std::vector& requests, + .WillOnce([&pas_url, &pas_wasm_url]( + const std::vector& requests, absl::Duration timeout, OnDoneFetchUrls done_callback) { EXPECT_EQ(requests[0].url, pas_url); EXPECT_EQ(requests[1].url, pas_wasm_url); std::move(done_callback)({""}); }) - .WillOnce([&](const std::vector& requests, + .WillOnce([&ads_retrieval_wasm_url, &ads_retrieval_url]( + const std::vector& requests, absl::Duration timeout, OnDoneFetchUrls done_callback) { EXPECT_EQ(requests[0].url, ads_retrieval_url); EXPECT_EQ(requests[1].url, ads_retrieval_wasm_url); diff --git a/services/bidding_service/code_wrapper/BUILD b/services/bidding_service/code_wrapper/BUILD index 6d00664b..1d8718a9 100644 --- a/services/bidding_service/code_wrapper/BUILD +++ b/services/bidding_service/code_wrapper/BUILD @@ -30,7 +30,7 @@ cc_library( "//services/common/constants:common_service_flags", "//services/common/util:request_response_constants", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", ], ) diff --git a/services/bidding_service/code_wrapper/buyer_code_wrapper.cc b/services/bidding_service/code_wrapper/buyer_code_wrapper.cc index d26b60f2..02d784c6 100644 --- a/services/bidding_service/code_wrapper/buyer_code_wrapper.cc +++ b/services/bidding_service/code_wrapper/buyer_code_wrapper.cc @@ -21,7 +21,7 @@ #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" #include "services/bidding_service/constants.h" -#include "src/cpp/logger/request_context_impl.h" +#include "src/logger/request_context_impl.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/bidding_service/constants.h b/services/bidding_service/constants.h index cf32398e..97daa032 100644 --- a/services/bidding_service/constants.h +++ b/services/bidding_service/constants.h @@ -19,6 +19,8 @@ #include +#include "absl/strings/string_view.h" + namespace privacy_sandbox::bidding_auction_servers { inline constexpr char kProtectedAudienceGenerateBidBlobVersion[] = "v1"; diff --git a/services/bidding_service/generate_bids_reactor.cc b/services/bidding_service/generate_bids_reactor.cc index 7cf281c3..0dc3dde5 100644 --- a/services/bidding_service/generate_bids_reactor.cc +++ b/services/bidding_service/generate_bids_reactor.cc @@ -29,8 +29,8 @@ #include "services/bidding_service/constants.h" #include "services/common/util/json_util.h" #include "services/common/util/request_response_constants.h" -#include "src/cpp/util/status_macro/status_macros.h" -#include "src/cpp/util/status_macro/status_util.h" +#include "src/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_util.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -115,6 +115,7 @@ constexpr char kJsonStringValueStart[] = R"JSON(":")JSON"; constexpr char kJsonValueStart[] = R"JSON(":)JSON"; constexpr char kJsonValueEnd[] = R"JSON(,")JSON"; constexpr char kJsonEmptyString[] = R"JSON("")JSON"; +constexpr char kEmptyDeviceSignals[] = R"JSON("{}")JSON"; std::string MakeBrowserSignalsForScript(absl::string_view publisher_name, absl::string_view seller, @@ -139,7 +140,8 @@ std::string MakeBrowserSignalsForScript(absl::string_view publisher_name, } absl::StatusOr SerializeRepeatedStringField( - google::protobuf::RepeatedPtrField repeated_string_field) { + const google::protobuf::RepeatedPtrField& + repeated_string_field) { rapidjson::Document json_array; json_array.SetArray(); for (const auto& item : repeated_string_field) { @@ -161,7 +163,7 @@ constexpr char kUserBiddingSignals[] = "userBiddingSignals"; // No default, null, or dummy values are filled in. // Device signals are not serialized since they are passed to generateBid() // in a different parameter. -absl::StatusOr SerializeIG(IGForBidding ig) { +absl::StatusOr SerializeIG(const IGForBidding& ig) { // Insert the name in quotes. std::string serialized_ig = absl::StrFormat(R"JSON({"%s":"%s")JSON", kName, ig.name()); @@ -327,11 +329,11 @@ absl::StatusOr BuildGenerateBidRequest( ProtoToJson(interest_group.android_signals())); generate_bid_request.input[ArgIndex(GenerateBidArgs::kDeviceSignals)] = std::make_shared((serialized_android_signals.empty()) - ? R"JSON("")JSON" + ? kEmptyDeviceSignals : serialized_android_signals); } else { generate_bid_request.input[ArgIndex(GenerateBidArgs::kDeviceSignals)] = - std::make_shared(R"JSON("")JSON"); + std::make_shared(kEmptyDeviceSignals); } generate_bid_request.input[ArgIndex(GenerateBidArgs::kFeatureFlags)] = std::make_shared( diff --git a/services/bidding_service/generate_bids_reactor_test.cc b/services/bidding_service/generate_bids_reactor_test.cc index e26c49a0..fd70dc6c 100644 --- a/services/bidding_service/generate_bids_reactor_test.cc +++ b/services/bidding_service/generate_bids_reactor_test.cc @@ -36,7 +36,7 @@ #include "services/common/metric/server_definition.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -123,7 +123,7 @@ class GenerateBidsReactorTest : public testing::Test { } void CheckGenerateBids(const RawRequest& raw_request, - Response expected_response, + const Response& expected_response, bool enable_buyer_debug_url_generation = false, bool enable_adtech_code_logging = false) { Response response; @@ -135,8 +135,7 @@ class GenerateBidsReactorTest : public testing::Test { request_.set_request_ciphertext(raw_request.SerializeAsString()); GenerateBidsReactor reactor( dispatcher_, &request_, &response, std::move(benchmarkingLogger), - key_fetcher_manager_.get(), crypto_client_.get(), - std::move(runtime_config)); + key_fetcher_manager_.get(), crypto_client_.get(), runtime_config); reactor.Execute(); google::protobuf::util::MessageDifferencer diff; std::string diff_output; @@ -678,8 +677,8 @@ TEST_F(GenerateBidsReactorTest, GeneratesBidDespiteNoBrowserSignals) { auto input = batch.at(0).input; IGForBidding received; EXPECT_EQ(*input[3], expected_signals); - // Check that browser signals are an empty JSON string. - EXPECT_EQ(*input[4], R"JSON("")JSON"); + // Check that device signals are an empty JSON object. + EXPECT_EQ(*input[4], R"JSON("{}")JSON"); return FakeExecute(batch, std::move(batch_callback), response_json); }); RawRequest raw_request; @@ -833,10 +832,9 @@ TEST_F(GenerateBidsReactorTest, AddsTrustedBiddingSignalsKeysToScriptInput) { BiddingServiceRuntimeConfig runtime_config = { .enable_buyer_debug_url_generation = false, }; - GenerateBidsReactor reactor(dispatcher_, &request_, &response, - std::move(benchmarkingLogger), - key_fetcher_manager_.get(), crypto_client_.get(), - std::move(runtime_config)); + GenerateBidsReactor reactor( + dispatcher_, &request_, &response, std::move(benchmarkingLogger), + key_fetcher_manager_.get(), crypto_client_.get(), runtime_config); reactor.Execute(); notification.WaitForNotification(); } @@ -876,10 +874,9 @@ TEST_F(GenerateBidsReactorTest, std::make_unique(); BiddingServiceRuntimeConfig runtime_config; - GenerateBidsReactor reactor(dispatcher_, &request_, &response, - std::move(benchmarkingLogger), - key_fetcher_manager_.get(), crypto_client_.get(), - std::move(runtime_config)); + GenerateBidsReactor reactor( + dispatcher_, &request_, &response, std::move(benchmarkingLogger), + key_fetcher_manager_.get(), crypto_client_.get(), runtime_config); reactor.Execute(); notification.WaitForNotification(); diff --git a/services/bidding_service/generate_bids_reactor_test_utils.cc b/services/bidding_service/generate_bids_reactor_test_utils.cc index 509b3bec..7fcccb02 100644 --- a/services/bidding_service/generate_bids_reactor_test_utils.cc +++ b/services/bidding_service/generate_bids_reactor_test_utils.cc @@ -27,8 +27,8 @@ #include "services/common/encryption/mock_crypto_client_wrapper.h" #include "services/common/metric/server_definition.h" #include "services/common/test/mocks.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -80,8 +80,8 @@ void SetupMockCryptoClientWrapper(MockCryptoClientWrapper& crypto_client) { GenerateProtectedAppSignalsBidsRawRequest CreateRawProtectedAppSignalsRequest( const std::string& auction_signals, const std::string& buyer_signals, - const ProtectedAppSignals& protected_app_signals, const std::string seller, - const std::string publisher_name, + const ProtectedAppSignals& protected_app_signals, const std::string& seller, + const std::string& publisher_name, absl::optional contextual_pas_data) { GenerateProtectedAppSignalsBidsRawRequest raw_request; raw_request.set_auction_signals(auction_signals); diff --git a/services/bidding_service/generate_bids_reactor_test_utils.h b/services/bidding_service/generate_bids_reactor_test_utils.h index 51e8ac1a..98757545 100644 --- a/services/bidding_service/generate_bids_reactor_test_utils.h +++ b/services/bidding_service/generate_bids_reactor_test_utils.h @@ -79,8 +79,8 @@ void SetupMockCryptoClientWrapper(MockCryptoClientWrapper& crypto_client); // provided parameters. GenerateProtectedAppSignalsBidsRawRequest CreateRawProtectedAppSignalsRequest( const std::string& auction_signals, const std::string& buyer_signals, - const ProtectedAppSignals& protected_app_signals, const std::string seller, - const std::string publisher_name, + const ProtectedAppSignals& protected_app_signals, const std::string& seller, + const std::string& publisher_name, absl::optional contextual_pas_data = absl::nullopt); diff --git a/services/bidding_service/inference/BUILD b/services/bidding_service/inference/BUILD new file mode 100644 index 00000000..e798b19d --- /dev/null +++ b/services/bidding_service/inference/BUILD @@ -0,0 +1,80 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test") + +cc_library( + name = "inference_utils", + srcs = [ + "inference_utils.cc", + ], + hdrs = [ + "inference_utils.h", + ], + visibility = ["//visibility:public"], + deps = [ + ":inference_flags", + "//services/common/blob_fetch:blob_fetcher", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/status", + "@com_google_absl//absl/synchronization", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/roma/roma_service", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", + "@inference_common//proto:inference_sidecar_cc_grpc_proto", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//sandbox:sandbox_executor", + "@inference_common//utils:file_util", + ], +) + +cc_test( + name = "inference_utils_test", + size = "small", + srcs = ["inference_utils_test.cc"], + data = [ + "@inference_common//:inference_sidecar", + "@inference_common//testdata:models/tensorflow_1_mib_saved_model.pb", + ], + flaky = True, + deps = [ + ":inference_flags", + ":inference_utils", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/flags:reflection", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@google_privacysandbox_servers_common//src/roma/roma_service", + ], +) + +cc_library( + name = "inference_flags", + srcs = [ + "inference_flags.cc", + ], + hdrs = [ + "inference_flags.h", + ], + visibility = [ + "//services/bidding_service:__subpackages__", + ], + deps = [ + "@com_google_absl//absl/flags:flag", + ], +) diff --git a/services/bidding_service/inference/inference_flags.cc b/services/bidding_service/inference/inference_flags.cc new file mode 100644 index 00000000..97102937 --- /dev/null +++ b/services/bidding_service/inference/inference_flags.cc @@ -0,0 +1,37 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/bidding_service/inference/inference_flags.h" + +#include +#include + +#include "absl/flags/flag.h" + +ABSL_FLAG(std::optional, inference_sidecar_binary_path, + std::nullopt, "The binary path of the inference sidecar."); +// Used to load local models for debugging purposes. In the prod setting, this +// flag is not used, and we fetch models from cloud buckets. +// TODO(b/330817239): Bidding server should throw an error if this flag is set +// in the prod mode. +ABSL_FLAG(std::optional, inference_model_local_paths, std::nullopt, + "Comma separated list of inference model paths to read from the " + "local disk. It's mainly used for testing."); +ABSL_FLAG(std::optional, inference_model_bucket_name, std::nullopt, + "Bucket name for fetching models."); +// TODO(b/330942801): Clean up the flag. +ABSL_FLAG(std::optional, inference_model_bucket_paths, + std::nullopt, + "Comma separated list of bucket paths. Used to specify a list of " + "directories to fetch the blobs from."); diff --git a/services/bidding_service/inference/inference_flags.h b/services/bidding_service/inference/inference_flags.h new file mode 100644 index 00000000..f9591482 --- /dev/null +++ b/services/bidding_service/inference/inference_flags.h @@ -0,0 +1,45 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_BIDDING_SERVICE_INFERENCE_INFERENCE_FLAGS_H_ +#define SERVICES_BIDDING_SERVICE_INFERENCE_INFERENCE_FLAGS_H_ + +#include +#include + +#include "absl/flags/declare.h" +#include "absl/flags/flag.h" + +ABSL_DECLARE_FLAG(std::optional, inference_sidecar_binary_path); +ABSL_DECLARE_FLAG(std::optional, inference_model_local_paths); +ABSL_DECLARE_FLAG(std::optional, inference_model_bucket_name); +ABSL_DECLARE_FLAG(std::optional, inference_model_bucket_paths); + +namespace privacy_sandbox::bidding_auction_servers { + +inline constexpr char INFERENCE_SIDECAR_BINARY_PATH[] = + "INFERENCE_SIDECAR_BINARY_PATH"; +inline constexpr char INFERENCE_MODEL_LOCAL_PATHS[] = + "INFERENCE_MODEL_LOCAL_PATHS"; +inline constexpr char INFERENCE_MODEL_BUCKET_NAME[] = + "INFERENCE_MODEL_BUCKET_NAME"; +inline constexpr char INFERENCE_MODEL_BUCKET_PATHS[] = + "INFERENCE_MODEL_BUCKET_PATHS"; +inline constexpr absl::string_view kInferenceFlags[] = { + INFERENCE_SIDECAR_BINARY_PATH, INFERENCE_MODEL_LOCAL_PATHS, + INFERENCE_MODEL_BUCKET_NAME, INFERENCE_MODEL_BUCKET_PATHS}; + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_BIDDING_SERVICE_INFERENCE_INFERENCE_FLAGS_H_ diff --git a/services/bidding_service/inference/inference_utils.cc b/services/bidding_service/inference/inference_utils.cc new file mode 100644 index 00000000..d971a635 --- /dev/null +++ b/services/bidding_service/inference/inference_utils.cc @@ -0,0 +1,157 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/bidding_service/inference/inference_utils.h" + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "absl/base/const_init.h" +#include "absl/flags/flag.h" +#include "absl/status/status.h" +#include "absl/strings/match.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_split.h" +#include "absl/synchronization/mutex.h" +#include "proto/inference_sidecar.grpc.pb.h" +#include "services/bidding_service/inference/inference_flags.h" +#include "src/logger/request_context_logger.h" +#include "src/roma/interface/roma.h" +#include "src/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_util.h" +#include "utils/file_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +SandboxExecutor& Executor() { + // TODO(b/314976301): Use absl::NoDestructor when it becomes available. + // Static object will be lazily initiated within static storage. + + // TODO(b/317124648): Pass a SandboxExecutor object via Roma's `TMetadata`. + static SandboxExecutor* executor = new SandboxExecutor( + *absl::GetFlag(FLAGS_inference_sidecar_binary_path), {""}); + return *executor; +} + +std::shared_ptr InferenceChannel( + const SandboxExecutor& executor) { + // TODO(b/314976301): Use absl::NoDestructor when it becomes available. + // Static object will be lazily initiated within static storage. + + // TODO(b/317124648): Pass a gRPC channel object via Roma's `TMetadata`. + static std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor.FileDescriptor()); + return client_channel; +} + +absl::Status RegisterModelsFromLocal(const std::vector& paths) { + if (paths.size() == 0 || (paths.size() == 1 && paths[0].empty())) { + return absl::NotFoundError("No model to register in local disk"); + } + + SandboxExecutor& executor = Executor(); + std::unique_ptr stub = + InferenceService::NewStub(InferenceChannel(executor)); + + for (const auto& path : paths) { + RegisterModelRequest register_request; + PS_RETURN_IF_ERROR(PopulateRegisterModelRequest(path, register_request)); + grpc::ClientContext context; + RegisterModelResponse register_response; + grpc::Status status = + stub->RegisterModel(&context, register_request, ®ister_response); + + if (!status.ok()) { + return server_common::ToAbslStatus(status); + } + } + return absl::OkStatus(); +} + +absl::Status RegisterModelsFromBucket( + absl::string_view bucket_name, const std::vector& paths, + const std::vector& blobs) { + if (bucket_name.empty()) { + return absl::InvalidArgumentError("Cloud bucket name is not set"); + } + if (paths.size() == 0 || blobs.size() == 0 || + (paths.size() == 1 && paths[0].empty())) { + return absl::NotFoundError("No model to register in the cloud bucket"); + } + + SandboxExecutor& executor = Executor(); + std::unique_ptr stub = + InferenceService::NewStub(InferenceChannel(executor)); + + for (const auto& model_path : paths) { + RegisterModelRequest request; + request.mutable_model_spec()->set_model_path(model_path); + PS_VLOG(10) << "model_path: " << model_path; + + for (const BlobFetcher::Blob& blob : blobs) { + if (absl::StartsWith(blob.path, model_path)) { + (*request.mutable_model_files())[blob.path] = blob.bytes; + PS_VLOG(10) << "model_files: " << blob.path; + } + } + + grpc::ClientContext context; + RegisterModelResponse response; + grpc::Status status = stub->RegisterModel(&context, request, &response); + + if (!status.ok()) { + return server_common::ToAbslStatus(status); + } + } + // TODO(b/316960066): Handles register models response once the proto has been + // fleshed out. + return absl::OkStatus(); +} + +void RunInference(google::scp::roma::FunctionBindingPayload<>& wrapper) { + const std::string& payload = wrapper.io_proto.input_string(); + + SandboxExecutor& executor = Executor(); + std::unique_ptr stub = + InferenceService::NewStub(InferenceChannel(executor)); + + PS_VLOG(1) << "RunInference input: " << payload; + PredictRequest predict_request; + predict_request.set_input(payload); + + grpc::ClientContext context; + PredictResponse predict_response; + grpc::Status rpc_status = + stub->Predict(&context, predict_request, &predict_response); + if (rpc_status.ok()) { + wrapper.io_proto.set_output_string(predict_response.output()); + PS_VLOG(10) << "Inference response received: " + << predict_response.DebugString(); + return; + } + absl::Status status = server_common::ToAbslStatus(rpc_status); + // TODO(b/321284008): Communicate inference failure with JS caller. + PS_VLOG(1) << "Response error: " << status.message(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/bidding_service/inference/inference_utils.h b/services/bidding_service/inference/inference_utils.h new file mode 100644 index 00000000..a6e69576 --- /dev/null +++ b/services/bidding_service/inference/inference_utils.h @@ -0,0 +1,49 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_BIDDING_SERVICE_INFERENCE_INFERENCE_UTILS_H_ +#define SERVICES_BIDDING_SERVICE_INFERENCE_INFERENCE_UTILS_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "proto/inference_sidecar.pb.h" +#include "sandbox/sandbox_executor.h" +#include "services/common/blob_fetch/blob_fetcher.h" +#include "src/roma/interface/roma.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +constexpr absl::string_view kInferenceFunctionName = "runInference"; + +// Accesses a sandbox exectuor that uses static storage. +SandboxExecutor& Executor(); + +// Registers AdTech models with the inference sidecar. These models are +// downloaded to the bidding server and sent to the inference sidecar via IPC. +absl::Status RegisterModelsFromLocal(const std::vector& paths); +absl::Status RegisterModelsFromBucket( + absl::string_view bucket_name, const std::vector& paths, + const std::vector& blobs); + +// Registered with Roma to provide an inference API in JS code. It sends a +// single inference request to the inference sidecar. +// +// wrapper: Inference request backed by JS string. +void RunInference(google::scp::roma::FunctionBindingPayload<>& wrapper); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_BIDDING_SERVICE_INFERENCE_INFERENCE_UTILS_H_ diff --git a/services/bidding_service/inference/inference_utils_test.cc b/services/bidding_service/inference/inference_utils_test.cc new file mode 100644 index 00000000..1a2ed135 --- /dev/null +++ b/services/bidding_service/inference/inference_utils_test.cc @@ -0,0 +1,93 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/bidding_service/inference/inference_utils.h" + +#include "absl/flags/flag.h" +#include "absl/flags/reflection.h" +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "gtest/gtest.h" +#include "services/bidding_service/inference/inference_flags.h" +#include "src/roma/interface/roma.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +constexpr absl::string_view kSidecarBinary = + "__main__/external/inference_common/inference_sidecar"; +constexpr absl::string_view kInit = "non-empty"; +constexpr absl::string_view kTestModelPath = + "external/inference_common/testdata/models/tensorflow_1_mib_saved_model.pb"; +constexpr absl::string_view kBucketName = "test_bucket"; + +class InferenceUtilsTest : public ::testing::Test { + protected: + void SetUp() override { + absl::SetFlag(&FLAGS_testonly_allow_policies_for_bazel, true); + absl::SetFlag(&FLAGS_inference_sidecar_binary_path, + GetFilePath(kSidecarBinary)); + } + + private: + absl::FlagSaver flag_saver_; +}; + +// TODO(b/322030670): Making static SandboxExecutor compatible with multiple +// tests. + +TEST_F(InferenceUtilsTest, ReturnValueIsSet) { + SandboxExecutor& inference_executor = Executor(); + CHECK_EQ(inference_executor.StartSandboxee().code(), absl::StatusCode::kOk); + ASSERT_TRUE(RegisterModelsFromLocal({std::string(kTestModelPath)}).ok()); + google::scp::roma::proto::FunctionBindingIoProto input_output_proto; + google::scp::roma::FunctionBindingPayload<> wrapper{input_output_proto, {}}; + wrapper.io_proto.set_input_string(absl::StrCat("1.0")); + wrapper.io_proto.set_output_string(kInit); + RunInference(wrapper); + // TODO(b/317124477): Update the output string after Tensorflow execution + // logic. Currently, this test uses a test inference module that doesn't + // populate the output string. + ASSERT_EQ(wrapper.io_proto.output_string(), "0.57721"); + + absl::StatusOr result = inference_executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + ASSERT_EQ(result->reason_code(), 0); +} + +TEST_F(InferenceUtilsTest, RegisterModelsFromLocal_NoPath_Error) { + EXPECT_EQ(RegisterModelsFromLocal({}).code(), absl::StatusCode::kNotFound); +} + +TEST_F(InferenceUtilsTest, RegisterModelsFromLocal_EmptyPath_Error) { + EXPECT_EQ(RegisterModelsFromLocal({""}).code(), absl::StatusCode::kNotFound); +} + +TEST_F(InferenceUtilsTest, RegisterModelsFromBucket_Error) { + EXPECT_EQ( + RegisterModelsFromBucket("", {std::string(kTestModelPath)}, {}).code(), + absl::StatusCode::kInvalidArgument); + EXPECT_EQ( + RegisterModelsFromBucket(kBucketName, {std::string(kTestModelPath)}, {}) + .code(), + absl::StatusCode::kNotFound); + EXPECT_EQ(RegisterModelsFromBucket(kBucketName, {}, {}).code(), + absl::StatusCode::kNotFound); + EXPECT_EQ(RegisterModelsFromBucket(kBucketName, {""}, {{"", ""}}).code(), + absl::StatusCode::kNotFound); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/bidding_service/protected_app_signals_generate_bids_reactor.cc b/services/bidding_service/protected_app_signals_generate_bids_reactor.cc index 612ee59b..1e0e342b 100644 --- a/services/bidding_service/protected_app_signals_generate_bids_reactor.cc +++ b/services/bidding_service/protected_app_signals_generate_bids_reactor.cc @@ -30,7 +30,7 @@ #include "services/common/util/json_util.h" #include "services/common/util/reporting_util.h" #include "services/common/util/request_metadata.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -108,7 +108,7 @@ absl::Status ProtectedAppSignalsGenerateBidsReactor::ValidateRomaResponse( } std::unique_ptr -ProtectedAppSignalsGenerateBidsReactor::CreateAdsRetrievalOrKVLookupRequest( +ProtectedAppSignalsGenerateBidsReactor::CreateAdsRetrievalRequest( const std::string& prepare_data_for_ads_retrieval_response, absl::optional ad_render_ids) { @@ -126,13 +126,24 @@ ProtectedAppSignalsGenerateBidsReactor::CreateAdsRetrievalOrKVLookupRequest( raw_request_.buyer_signals(), std::move(ad_render_ids_list))); } +std::unique_ptr +ProtectedAppSignalsGenerateBidsReactor::CreateKVLookupRequest( + const ProtectedAppSignalsGenerateBidsReactor::AdRenderIds& ad_render_ids) { + std::vector ad_render_ids_list = + std::vector(ad_render_ids.begin(), ad_render_ids.end()); + auto request = std::make_unique( + kv_server::application_pas::BuildLookupRequest( + std::move(ad_render_ids_list))); + PS_VLOG(8) << __func__ + << " Created KV lookup request: " << request->DebugString(); + return request; +} + void ProtectedAppSignalsGenerateBidsReactor::FetchAds( const std::string& prepare_data_for_ads_retrieval_response) { PS_VLOG(8, log_context_) << __func__; auto status = ad_retrieval_async_client_->ExecuteInternal( - CreateAdsRetrievalOrKVLookupRequest( - prepare_data_for_ads_retrieval_response), - {}, + CreateAdsRetrievalRequest(prepare_data_for_ads_retrieval_response), {}, [this, prepare_data_for_ads_retrieval_response]( KVLookUpResult ad_retrieval_result) { if (!ad_retrieval_result.ok()) { @@ -346,11 +357,10 @@ void ProtectedAppSignalsGenerateBidsReactor::FetchAdsMetadata( .ad_render_ids(), ", "); auto status = kv_async_client_->ExecuteInternal( - CreateAdsRetrievalOrKVLookupRequest( - prepare_data_for_ads_retrieval_response, ad_render_ids), - {}, + CreateKVLookupRequest(ad_render_ids), {}, [this, prepare_data_for_ads_retrieval_response]( KVLookUpResult kv_look_up_result) { + PS_VLOG(8) << "On KV response"; if (!kv_look_up_result.ok()) { PS_VLOG(2, log_context_) << "KV metadata request failed: " << kv_look_up_result.status(); diff --git a/services/bidding_service/protected_app_signals_generate_bids_reactor.h b/services/bidding_service/protected_app_signals_generate_bids_reactor.h index a33d58af..13245fd4 100644 --- a/services/bidding_service/protected_app_signals_generate_bids_reactor.h +++ b/services/bidding_service/protected_app_signals_generate_bids_reactor.h @@ -72,11 +72,13 @@ class ProtectedAppSignalsGenerateBidsReactor void StartNonContextualAdsRetrieval(); using AdRenderIds = google::protobuf::RepeatedPtrField; - std::unique_ptr - CreateAdsRetrievalOrKVLookupRequest( + std::unique_ptr CreateAdsRetrievalRequest( const std::string& prepare_data_for_ads_retrieval_response, absl::optional ad_render_ids = absl::nullopt); + std::unique_ptr CreateKVLookupRequest( + const AdRenderIds& ad_render_ids); + void FetchAds(const std::string& prepare_data_for_ads_retrieval_response); void FetchAdsMetadata( const std::string& prepare_data_for_ads_retrieval_response); diff --git a/services/bidding_service/protected_app_signals_generate_bids_reactor_test.cc b/services/bidding_service/protected_app_signals_generate_bids_reactor_test.cc index 8e4c2c74..9eb48fff 100644 --- a/services/bidding_service/protected_app_signals_generate_bids_reactor_test.cc +++ b/services/bidding_service/protected_app_signals_generate_bids_reactor_test.cc @@ -37,15 +37,16 @@ #include "services/common/test/mocks.h" #include "services/common/test/random.h" #include "services/common/util/json_util.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { namespace { constexpr int kPreparedRetrievalDataIndex = 0; constexpr int kContextualSignalsIndex = 2; -constexpr int kAdRenderIdsIndex = 3; -constexpr int kNumAdRetrievalOrKvLookupUdfArguments = 4; +constexpr int kAdRenderIdsIndex = 0; +constexpr int kNumAdRetrievalUdfArguments = 4; +constexpr int kNumKVLookupUdfArguments = 1; constexpr char kTestAdRenderId[] = "TestAdId"; using Request = GenerateProtectedAppSignalsBidsRequest; @@ -228,7 +229,7 @@ TEST_F(GenerateBidsReactorTest, AdRetrievalClientInputIsCorrect) { absl::Duration timeout) { EXPECT_EQ(raw_request->partitions().size(), 1); const auto& udf_arguments = raw_request->partitions()[0].arguments(); - EXPECT_EQ(udf_arguments.size(), kNumAdRetrievalOrKvLookupUdfArguments); + EXPECT_EQ(udf_arguments.size(), kNumAdRetrievalUdfArguments); auto parsed_retrieval_data = ParseJsonString( udf_arguments[kPreparedRetrievalDataIndex].data().string_value()); auto decoded_signals_itr = @@ -565,17 +566,7 @@ TEST_F(GenerateBidsReactorTest, KvInputIsCorrect) { EXPECT_EQ(raw_request->partitions().size(), 1); const auto& udf_arguments = raw_request->partitions()[0].arguments(); - EXPECT_EQ(udf_arguments.size(), - kNumAdRetrievalOrKvLookupUdfArguments); - EXPECT_TRUE(udf_arguments[kPreparedRetrievalDataIndex] - .data() - .string_value() - .empty()); - - EXPECT_EQ(std::string(udf_arguments[kContextualSignalsIndex] - .data() - .string_value()), - kTestBuyerSignals); + EXPECT_EQ(udf_arguments.size(), kNumKVLookupUdfArguments); const auto& ad_render_ids = udf_arguments[kAdRenderIdsIndex].data().list_value().values(); diff --git a/services/bidding_service/runtime_flags.h b/services/bidding_service/runtime_flags.h index 411d1735..783f281f 100644 --- a/services/bidding_service/runtime_flags.h +++ b/services/bidding_service/runtime_flags.h @@ -18,6 +18,7 @@ #include #include "absl/strings/string_view.h" +#include "services/bidding_service/inference/inference_flags.h" #include "services/common/constants/common_service_flags.h" namespace privacy_sandbox::bidding_auction_servers { @@ -59,6 +60,10 @@ inline std::vector GetServiceFlags() { flags.push_back(flag); } + for (absl::string_view flag : kInferenceFlags) { + flags.push_back(flag); + } + return flags; } diff --git a/services/buyer_frontend_service/BUILD b/services/buyer_frontend_service/BUILD index 8c1ad137..56a2d05f 100644 --- a/services/buyer_frontend_service/BUILD +++ b/services/buyer_frontend_service/BUILD @@ -76,10 +76,10 @@ cc_library( "//services/common/clients/http:multi_curl_http_fetcher_async", "//services/common/metric:server_definition", "@com_github_grpc_grpc//:grpc++", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/telemetry", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/telemetry", ], ) @@ -97,8 +97,8 @@ cc_test( "//services/common/test:random", "//services/common/test/utils:test_utils", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -130,11 +130,11 @@ cc_binary( "@com_google_absl//absl/flags:parse", "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/telemetry", - "@google_privacysandbox_servers_common//src/cpp/util:rlimit_core_config", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/telemetry", + "@google_privacysandbox_servers_common//src/util:rlimit_core_config", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -158,7 +158,7 @@ cc_test( "@com_google_absl//absl/log:scoped_mock_log", "@com_google_absl//absl/strings", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) diff --git a/services/buyer_frontend_service/buyer_frontend_main.cc b/services/buyer_frontend_service/buyer_frontend_main.cc index 99484e09..274e5892 100644 --- a/services/buyer_frontend_service/buyer_frontend_main.cc +++ b/services/buyer_frontend_service/buyer_frontend_main.cc @@ -28,7 +28,6 @@ #include "grpcpp/ext/proto_server_reflection_plugin.h" #include "grpcpp/grpcpp.h" #include "grpcpp/health_check_service_interface.h" -#include "scp/cc/public/cpio/interface/cpio.h" #include "services/buyer_frontend_service/buyer_frontend_service.h" #include "services/buyer_frontend_service/providers/http_bidding_signals_async_provider.h" #include "services/buyer_frontend_service/runtime_flags.h" @@ -41,11 +40,12 @@ #include "services/common/encryption/crypto_client_factory.h" #include "services/common/encryption/key_fetcher_factory.h" #include "services/common/telemetry/configure_telemetry.h" +#include "src/concurrent/event_engine_executor.h" #include "src/core/lib/event_engine/default_event_engine.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/util/rlimit_core_config.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/util/rlimit_core_config.h" +#include "src/util/status_macro/status_macros.h" ABSL_FLAG(std::optional, port, std::nullopt, "Port the server is listening on."); @@ -93,7 +93,7 @@ using ::grpc::Server; using ::grpc::ServerBuilder; absl::StatusOr GetConfigClient( - std::string config_param_prefix) { + absl::string_view config_param_prefix) { TrustedServersConfigClient config_client(GetServiceFlags()); config_client.SetFlag(FLAGS_port, PORT); config_client.SetFlag(FLAGS_healthcheck_port, HEALTHCHECK_PORT); diff --git a/services/buyer_frontend_service/buyer_frontend_service.cc b/services/buyer_frontend_service/buyer_frontend_service.cc index d718f3a4..acacbe8f 100644 --- a/services/buyer_frontend_service/buyer_frontend_service.cc +++ b/services/buyer_frontend_service/buyer_frontend_service.cc @@ -22,7 +22,7 @@ #include "api/bidding_auction_servers.pb.h" #include "services/buyer_frontend_service/get_bids_unary_reactor.h" #include "services/common/metric/server_definition.h" -#include "src/cpp/telemetry/telemetry.h" +#include "src/telemetry/telemetry.h" namespace privacy_sandbox::bidding_auction_servers { @@ -35,7 +35,7 @@ BuyerFrontEndService::BuyerFrontEndService( const GetBidsConfig config, bool enable_benchmarking) : bidding_signals_async_provider_( std::move(bidding_signals_async_provider)), - config_(std::move(config)), + config_(config), enable_benchmarking_(enable_benchmarking), key_fetcher_manager_(std::move(key_fetcher_manager)), crypto_client_(std::move(crypto_client)), @@ -54,11 +54,11 @@ BuyerFrontEndService::BuyerFrontEndService( } BuyerFrontEndService::BuyerFrontEndService(ClientRegistry client_registry, - GetBidsConfig config, + const GetBidsConfig config, bool enable_benchmarking) : bidding_signals_async_provider_( std::move(client_registry.bidding_signals_async_provider)), - config_(std::move(config)), + config_(config), enable_benchmarking_(enable_benchmarking), key_fetcher_manager_(std::move(client_registry.key_fetcher_manager)), crypto_client_(std::move(client_registry.crypto_client)), diff --git a/services/buyer_frontend_service/buyer_frontend_service.h b/services/buyer_frontend_service/buyer_frontend_service.h index e08008e6..bcaf0e37 100644 --- a/services/buyer_frontend_service/buyer_frontend_service.h +++ b/services/buyer_frontend_service/buyer_frontend_service.h @@ -23,7 +23,7 @@ #include "services/buyer_frontend_service/data/get_bids_config.h" #include "services/buyer_frontend_service/providers/bidding_signals_async_provider.h" #include "services/common/clients/bidding_server/bidding_async_client.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -56,7 +56,7 @@ class BuyerFrontEndService final : public BuyerFrontEnd::CallbackService { const GetBidsConfig config, bool enable_benchmarking = false); explicit BuyerFrontEndService(ClientRegistry client_registry, - GetBidsConfig config, + const GetBidsConfig config, bool enable_benchmarking = false); // Returns bid for the top eligible ad candidate. diff --git a/services/buyer_frontend_service/buyer_frontend_service_test.cc b/services/buyer_frontend_service/buyer_frontend_service_test.cc index e272ae8c..0a975af7 100644 --- a/services/buyer_frontend_service/buyer_frontend_service_test.cc +++ b/services/buyer_frontend_service/buyer_frontend_service_test.cc @@ -41,7 +41,7 @@ #include "services/common/test/mocks.h" #include "services/common/test/utils/service_utils.h" #include "services/common/test/utils/test_utils.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/buyer_frontend_service/get_bids_unary_reactor.cc b/services/buyer_frontend_service/get_bids_unary_reactor.cc index ec8dacc0..c391ec36 100644 --- a/services/buyer_frontend_service/get_bids_unary_reactor.cc +++ b/services/buyer_frontend_service/get_bids_unary_reactor.cc @@ -142,9 +142,9 @@ GetBidsUnaryReactor::GetBidsUnaryReactor( CryptoClientWrapperInterface* crypto_client, bool enable_benchmarking) : GetBidsUnaryReactor(context, get_bids_request, get_bids_response, bidding_signals_async_provider, bidding_async_client, - config, - /*pas_bidding_client=*/nullptr, key_fetcher_manager, - crypto_client, enable_benchmarking) {} + config, /*pas_bidding_async_client=*/nullptr, + key_fetcher_manager, crypto_client, + enable_benchmarking) {} void GetBidsUnaryReactor::OnAllBidsDone(bool any_successful_bids) { if (context_->IsCancelled()) { @@ -198,7 +198,7 @@ grpc::Status GetBidsUnaryReactor::DecryptRequest() { if (!decrypt_response.ok()) { return {grpc::StatusCode::INVALID_ARGUMENT, kMalformedCiphertext}; } - hpke_secret_ = std::move(decrypt_response->secret()); + hpke_secret_ = std::move(*decrypt_response->mutable_secret()); if (!raw_request_.ParseFromString(decrypt_response->payload())) { return {grpc::StatusCode::INVALID_ARGUMENT, kMalformedCiphertext}; } diff --git a/services/buyer_frontend_service/get_bids_unary_reactor.h b/services/buyer_frontend_service/get_bids_unary_reactor.h index 9847776e..334f20bc 100644 --- a/services/buyer_frontend_service/get_bids_unary_reactor.h +++ b/services/buyer_frontend_service/get_bids_unary_reactor.h @@ -37,8 +37,8 @@ #include "services/common/loggers/benchmarking_logger.h" #include "services/common/metric/server_definition.h" #include "services/common/util/async_task_tracker.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/logger/request_context_impl.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/logger/request_context_impl.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/buyer_frontend_service/get_bids_unary_reactor_test.cc b/services/buyer_frontend_service/get_bids_unary_reactor_test.cc index b42bb95d..e8f87f60 100644 --- a/services/buyer_frontend_service/get_bids_unary_reactor_test.cc +++ b/services/buyer_frontend_service/get_bids_unary_reactor_test.cc @@ -30,7 +30,7 @@ #include "services/common/test/mocks.h" #include "services/common/test/random.h" #include "services/common/test/utils/test_utils.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -382,8 +382,8 @@ TEST_F(GetProtectedAppSignalsTest, CorrectGenerateBidSentToBiddingService) { EXPECT_CALL( protected_app_signals_bidding_client_mock_, ExecuteInternal( - Pointee(EqGenerateProtectedAppSignalsBidsRawRequest( - std::move(expected_request))), + Pointee( + EqGenerateProtectedAppSignalsBidsRawRequest(expected_request)), An(), An(); if (bidding_signals_value.has_value()) { diff --git a/services/common/blob_fetch/BUILD b/services/common/blob_fetch/BUILD new file mode 100644 index 00000000..137a3861 --- /dev/null +++ b/services/common/blob_fetch/BUILD @@ -0,0 +1,55 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test") + +package(default_visibility = ["//:__subpackages__"]) + +cc_library( + name = "blob_fetcher", + srcs = ["blob_fetcher.cc"], + hdrs = ["blob_fetcher.h"], + deps = [ + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + ], +) + +cc_test( + name = "blob_fetcher_test", + size = "small", + srcs = ["blob_fetcher_test.cc"], + deps = [ + ":blob_fetcher", + "//services/common/test:mocks", + "@com_google_absl//absl/functional:any_invocable", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + "@google_privacysandbox_servers_common//src/public/cpio/mock/blob_storage_client:blob_storage_client_mock", + ], +) diff --git a/services/common/blob_fetch/blob_fetcher.cc b/services/common/blob_fetch/blob_fetcher.cc new file mode 100644 index 00000000..bdae3a7e --- /dev/null +++ b/services/common/blob_fetch/blob_fetcher.cc @@ -0,0 +1,170 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/common/blob_fetch/blob_fetcher.h" + +#include +#include + +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/strings/str_format.h" +#include "absl/synchronization/notification.h" +#include "src/core/interface/async_context.h" +#include "src/core/interface/errors.h" +#include "src/logger/request_context_logger.h" +#include "src/public/core/interface/execution_result.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/proto/blob_storage_service/v1/blob_storage_service.pb.h" + +using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobRequest; +using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobResponse; +using ::google::cmrt::sdk::blob_storage_service::v1::ListBlobsMetadataRequest; +using ::google::cmrt::sdk::blob_storage_service::v1::ListBlobsMetadataResponse; +using ::google::scp::core::AsyncContext; +using ::google::scp::core::ExecutionResult; +using ::google::scp::core::errors::GetErrorMessage; +using ::google::scp::cpio::BlobStorageClientInterface; + +namespace privacy_sandbox::bidding_auction_servers { + +BlobFetcher::BlobFetcher( + absl::string_view bucket_name, server_common::Executor* executor, + std::unique_ptr blob_storage_client) + : bucket_name_(bucket_name), + executor_(executor), + blob_storage_client_(std::move(blob_storage_client)) { + InitAndRunConfigClient(); +} + +absl::Status BlobFetcher::FetchSync() { + absl::Status status; + absl::Notification done; + executor_->Run([&status, &done, this]() { + status = InternalFetch(); + done.Notify(); + }); + done.WaitForNotification(); + return status; +} + +void BlobFetcher::InitAndRunConfigClient() { + auto result = blob_storage_client_->Init(); + CHECK(result.Successful()) + << absl::StrFormat("Failed to init BlobStorageClient (status_code: %s)\n", + GetErrorMessage(result.status_code)); + + result = blob_storage_client_->Run(); + CHECK(result.Successful()) + << absl::StrFormat("Failed to run BlobStorageClient (status_code: %s)\n", + GetErrorMessage(result.status_code)); +} + +absl::Status BlobFetcher::InternalFetch() { + absl::Status status; + std::vector blob_names; + + // List all the blobs in the bucket. + auto list_blobs_request = std::make_shared(); + absl::Notification notification; + list_blobs_request->mutable_blob_metadata()->set_bucket_name(bucket_name_); + AsyncContext + list_blobs_context(list_blobs_request, [&status, &blob_names, + ¬ification](auto& context) { + if (!context.result.Successful()) { + PS_VLOG(0) << "Failed to list blobs: " + << GetErrorMessage(context.result.status_code); + status = absl::InternalError("Failed to list blobs"); + } else { + PS_VLOG(10) << "BlobStorageClient ListBlobsMetadata() Response: " + << context.response->DebugString(); + for (int i = 0; i < context.response->blob_metadatas_size(); i++) { + blob_names.push_back( + context.response->blob_metadatas(i).blob_name()); + } + } + // The caller waits for the notification. + // Please note that the callback might not be called by + // ListBlobsMetadata. + // TODO(b/316960066): Inspect the BlobStorageClient code and fix + // bugs. + notification.Notify(); + }); + + auto list_blobs_execution = + blob_storage_client_->ListBlobsMetadata(list_blobs_context); + if (!list_blobs_execution.Successful()) { + // If ListBlobsMetadata fails fast, we return the error early without + // waiting for `notification`. + return absl::InternalError( + absl::StrCat("BlobStorageClient -> ListBlobsMetadata() failed: ", + GetErrorMessage(list_blobs_execution.status_code))); + } + notification.WaitForNotification(); + // Checks the error from the callback. + if (!status.ok()) { + return status; + } + + std::vector new_file_snapshot; + // Fetches all the blobs in the bucket. + // TODO(b/329674737): Fetch blobs in parallel. + for (const std::string& blob_name : blob_names) { + absl::Notification per_blob_notification; + auto get_blob_request = std::make_shared(); + get_blob_request->mutable_blob_metadata()->set_bucket_name(bucket_name_); + get_blob_request->mutable_blob_metadata()->set_blob_name(blob_name); + + AsyncContext get_blob_context( + get_blob_request, + [&status, &new_file_snapshot, &per_blob_notification](auto& context) { + if (!context.result.Successful()) { + PS_VLOG(0) << "Failed to fetch blobs: " + << GetErrorMessage(context.result.status_code); + status = absl::InternalError("Failed to fetch blobs"); + } else { + // Should not log blob().data(), which can be very large bytes. + PS_VLOG(10) << "BlobStorageClient GetBlob() Response: " + << context.response->blob().metadata().DebugString(); + + const std::string& path = + context.response->blob().metadata().blob_name(); + const std::string& bytes = context.response->blob().data(); + new_file_snapshot.emplace_back(path, bytes); + } + // TODO(b/316960066): Inspect the BlobStorageClient code and fix bugs. + per_blob_notification.Notify(); + }); + + auto get_blob_execution = blob_storage_client_->GetBlob(get_blob_context); + if (!get_blob_execution.Successful()) { + // If GetBlob fails fast, we return the error early. We update the file + // snapshot only when all the file fetching is successfully done. + return absl::InternalError( + absl::StrCat("BlobStorageClient -> GetBlob() failed: ", + GetErrorMessage(get_blob_execution.status_code))); + } + per_blob_notification.WaitForNotification(); + // Checks the error from the callback. + if (!status.ok()) { + return status; + } + } + + // All the blobs are successfully fetched. + snapshot_ = std::move(new_file_snapshot); + return status; +} + +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/blob_fetch/blob_fetcher.h b/services/common/blob_fetch/blob_fetcher.h new file mode 100644 index 00000000..cc4eacd1 --- /dev/null +++ b/services/common/blob_fetch/blob_fetcher.h @@ -0,0 +1,78 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SERVICES_COMMON_BLOB_FETCH_BLOB_FETCHER_H_ +#define SERVICES_COMMON_BLOB_FETCH_BLOB_FETCHER_H_ + +#include +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "src/concurrent/executor.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" + +namespace privacy_sandbox::bidding_auction_servers { + +// Blob fetching system to read AdTech's files from the cloud storage buckets. +// TODO(b/316960066): Support periodic fetching. +// TODO(b/316960066): Write the common lib with PeriodicBucketFetcher +class BlobFetcher { + public: + // A pair of file path and byte string. + struct Blob { + std::string path; + std::string bytes; + + Blob(const std::string& path, const std::string& bytes) + : path(path), bytes(bytes) {} + }; + + // Constructs a new BlobFetcher. + // `bucket_name`: The cloud storage bucket name to read from. + BlobFetcher(absl::string_view bucket_name, server_common::Executor* executor, + std::unique_ptr + blob_storage_client); + + // Not copyable or movable. + BlobFetcher(const BlobFetcher&) = delete; + BlobFetcher& operator=(const BlobFetcher&) = delete; + + const std::vector& snapshot() const { return snapshot_; } + + // Fetches the bucket synchronously. + absl::Status FetchSync(); + + private: + // Performs bucket fetching with BlobStorageClient. + // It's not thread-safe. + absl::Status InternalFetch(); + // Initializes BlobStorageClient. + // Prerequisite: Caller must call google::scp::cpio::Cpio::InitCpio() ahead. + void InitAndRunConfigClient(); + + const std::string bucket_name_; + server_common::Executor* executor_; // not owned + std::unique_ptr + blob_storage_client_; + // Keeps the latest snapshot of the storage bucket. + std::vector snapshot_; +}; + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_COMMON_BLOB_FETCH_BLOB_FETCHER_H_ diff --git a/services/common/blob_fetch/blob_fetcher_test.cc b/services/common/blob_fetch/blob_fetcher_test.cc new file mode 100644 index 00000000..2861901c --- /dev/null +++ b/services/common/blob_fetch/blob_fetcher_test.cc @@ -0,0 +1,146 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/common/blob_fetch/blob_fetcher.h" + +#include +#include + +#include "absl/synchronization/blocking_counter.h" +#include "gtest/gtest.h" +#include "services/common/test/mocks.h" +#include "src/core/interface/async_context.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/interface/error_codes.h" +#include "src/public/cpio/mock/blob_storage_client/mock_blob_storage_client.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobRequest; +using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobResponse; +using ::google::cmrt::sdk::blob_storage_service::v1::ListBlobsMetadataRequest; +using ::google::cmrt::sdk::blob_storage_service::v1::ListBlobsMetadataResponse; +using ::google::scp::core::AsyncContext; +using ::google::scp::core::ExecutionResult; +using ::google::scp::core::FailureExecutionResult; +using ::google::scp::core::SuccessExecutionResult; +using ::google::scp::cpio::MockBlobStorageClient; + +constexpr char kSampleBucketName[] = "BucketName"; +constexpr char kSampleBlobName[] = "blob_name"; +constexpr char kSampleData[] = "test"; + +TEST(BlobFetcherTest, FetchBucket) { + auto executor = std::make_unique(); + auto blob_storage_client = std::make_unique(); + + EXPECT_CALL(*blob_storage_client, Run).WillOnce([&]() { + return SuccessExecutionResult(); + }); + + EXPECT_CALL(*executor, Run).WillOnce([&](absl::AnyInvocable closure) { + closure(); + }); + + EXPECT_CALL(*blob_storage_client, ListBlobsMetadata) + .WillOnce( + [&](AsyncContext + async_context) { + auto async_bucket_name = + async_context.request->blob_metadata().bucket_name(); + EXPECT_EQ(async_bucket_name, kSampleBucketName); + + async_context.response = + std::make_shared(); + auto* blob_metadata = async_context.response->add_blob_metadatas(); + blob_metadata->set_bucket_name(kSampleBucketName); + blob_metadata->set_blob_name(kSampleBlobName); + async_context.result = SuccessExecutionResult(); + async_context.Finish(); + + return SuccessExecutionResult(); + }); + + EXPECT_CALL(*blob_storage_client, GetBlob) + .WillOnce( + [&](AsyncContext async_context) { + auto async_bucket_name = + async_context.request->blob_metadata().bucket_name(); + auto async_blob_name = + async_context.request->blob_metadata().blob_name(); + EXPECT_EQ(async_bucket_name, kSampleBucketName); + EXPECT_EQ(async_blob_name, kSampleBlobName); + + async_context.response = std::make_shared(); + async_context.response->mutable_blob()->set_data(kSampleData); + async_context.result = SuccessExecutionResult(); + async_context.Finish(); + + return SuccessExecutionResult(); + }); + + BlobFetcher bucket_fetcher(kSampleBucketName, executor.get(), + std::move(blob_storage_client)); + EXPECT_TRUE(bucket_fetcher.FetchSync().ok()); +} + +TEST(BlobFetcherTest, FetchBucket_Failure) { + auto executor = std::make_unique(); + auto blob_storage_client = std::make_unique(); + + EXPECT_CALL(*blob_storage_client, Run).WillOnce([&]() { + return SuccessExecutionResult(); + }); + + EXPECT_CALL(*executor, Run).WillOnce([&](absl::AnyInvocable closure) { + closure(); + }); + + EXPECT_CALL(*blob_storage_client, ListBlobsMetadata) + .WillOnce( + [&](AsyncContext + async_context) { + auto async_bucket_name = + async_context.request->blob_metadata().bucket_name(); + EXPECT_EQ(async_bucket_name, kSampleBucketName); + + async_context.response = + std::make_shared(); + auto* blob_metadata = async_context.response->add_blob_metadatas(); + blob_metadata->set_bucket_name(kSampleBucketName); + blob_metadata->set_blob_name(kSampleBlobName); + async_context.result = SuccessExecutionResult(); + async_context.Finish(); + + return SuccessExecutionResult(); + }); + + EXPECT_CALL(*blob_storage_client, GetBlob) + .WillOnce( + [&](AsyncContext async_context) { + async_context.result = FailureExecutionResult(SC_UNKNOWN); + async_context.Finish(); + + return FailureExecutionResult(SC_UNKNOWN); + }); + + BlobFetcher bucket_fetcher(kSampleBucketName, executor.get(), + std::move(blob_storage_client)); + auto status = bucket_fetcher.FetchSync(); + EXPECT_EQ(status.code(), absl::StatusCode::kInternal); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/clients/BUILD b/services/common/clients/BUILD index c073265b..84a27473 100644 --- a/services/common/clients/BUILD +++ b/services/common/clients/BUILD @@ -42,7 +42,7 @@ cc_library( "@com_google_absl//absl/functional:any_invocable", "@com_google_absl//absl/status", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", ], ) diff --git a/services/common/clients/async_grpc/BUILD b/services/common/clients/async_grpc/BUILD index 17d03297..c86f2691 100644 --- a/services/common/clients/async_grpc/BUILD +++ b/services/common/clients/async_grpc/BUILD @@ -45,9 +45,9 @@ cc_library( "//services/common/encryption:crypto_client_wrapper_interface", "//services/common/encryption:key_fetcher_factory", "//services/common/util:error_categories", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -66,7 +66,7 @@ cc_test( "@com_google_absl//absl/synchronization", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -85,7 +85,7 @@ cc_test( "@com_google_absl//absl/synchronization", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -121,7 +121,7 @@ cc_test( "@com_google_absl//absl/synchronization", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -138,8 +138,8 @@ cc_library( "//services/common/encryption:key_fetcher_factory", "//services/common/util:hpke_utils", "@com_google_absl//absl/status", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) diff --git a/services/common/clients/async_grpc/bidding_async_grpc_client_stub_test.h b/services/common/clients/async_grpc/bidding_async_grpc_client_stub_test.h index 68bc5828..c3d80687 100644 --- a/services/common/clients/async_grpc/bidding_async_grpc_client_stub_test.h +++ b/services/common/clients/async_grpc/bidding_async_grpc_client_stub_test.h @@ -35,7 +35,7 @@ #include "services/common/encryption/mock_crypto_client_wrapper.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { constexpr char kTestSecret[] = "secret"; diff --git a/services/common/clients/async_grpc/default_async_grpc_client.h b/services/common/clients/async_grpc/default_async_grpc_client.h index 42e0b121..a7a38d10 100644 --- a/services/common/clients/async_grpc/default_async_grpc_client.h +++ b/services/common/clients/async_grpc/default_async_grpc_client.h @@ -26,9 +26,9 @@ #include "services/common/clients/client_params.h" #include "services/common/encryption/crypto_client_wrapper_interface.h" #include "services/common/util/error_categories.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/logger/request_context_logger.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -166,7 +166,7 @@ inline std::shared_ptr CreateChannel( // Set the default compression algorithm for the channel. args.SetCompressionAlgorithm(GRPC_COMPRESS_GZIP); } - return grpc::CreateCustomChannel(server_addr.data(), std::move(creds), args); + return grpc::CreateCustomChannel(server_addr.data(), creds, args); } } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/clients/async_grpc/default_async_grpc_client_stub_test.h b/services/common/clients/async_grpc/default_async_grpc_client_stub_test.h index 1b77d9cc..fa93a82b 100644 --- a/services/common/clients/async_grpc/default_async_grpc_client_stub_test.h +++ b/services/common/clients/async_grpc/default_async_grpc_client_stub_test.h @@ -35,7 +35,7 @@ #include "services/common/encryption/mock_crypto_client_wrapper.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { constexpr char kTestSecret[] = "secret"; diff --git a/services/common/clients/async_grpc/default_async_grpc_client_test.cc b/services/common/clients/async_grpc/default_async_grpc_client_test.cc index 9007d0a7..8f39133e 100644 --- a/services/common/clients/async_grpc/default_async_grpc_client_test.cc +++ b/services/common/clients/async_grpc/default_async_grpc_client_test.cc @@ -119,7 +119,7 @@ class TestDefaultAsyncGrpcClient absl::Duration expected_timeout) : DefaultAsyncGrpcClient(key_fetcher_manager, crypto_client), notification_(notification), - req_(req), + req_(std::move(req)), expected_timeout_(expected_timeout) {} void SendRpc(const std::string& hpke_secret, diff --git a/services/common/clients/async_grpc/default_raw_async_grpc_client_stub_test.h b/services/common/clients/async_grpc/default_raw_async_grpc_client_stub_test.h index 6ce1f756..4fecbb22 100644 --- a/services/common/clients/async_grpc/default_raw_async_grpc_client_stub_test.h +++ b/services/common/clients/async_grpc/default_raw_async_grpc_client_stub_test.h @@ -31,7 +31,7 @@ #include "services/common/encryption/mock_crypto_client_wrapper.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/clients/async_grpc/grpc_client_utils.h b/services/common/clients/async_grpc/grpc_client_utils.h index d2229eba..6a4bfd7d 100644 --- a/services/common/clients/async_grpc/grpc_client_utils.h +++ b/services/common/clients/async_grpc/grpc_client_utils.h @@ -24,9 +24,9 @@ #include "absl/status/statusor.h" #include "services/common/encryption/crypto_client_wrapper_interface.h" #include "services/common/util/hpke_utils.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/logger/request_context_logger.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/clients/auction_server/BUILD b/services/common/clients/auction_server/BUILD index 4cfd7777..d94a43ea 100644 --- a/services/common/clients/auction_server/BUILD +++ b/services/common/clients/auction_server/BUILD @@ -44,7 +44,7 @@ cc_library( "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:key_fetcher_manager_interface", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:key_fetcher_manager_interface", ], ) @@ -61,7 +61,7 @@ cc_test( "@com_google_absl//absl/status:statusor", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) diff --git a/services/common/clients/auction_server/scoring_async_client.cc b/services/common/clients/auction_server/scoring_async_client.cc index 5f0e79df..ce5d5787 100644 --- a/services/common/clients/auction_server/scoring_async_client.cc +++ b/services/common/clients/auction_server/scoring_async_client.cc @@ -15,7 +15,7 @@ #include "services/common/clients/auction_server/scoring_async_client.h" #include "absl/status/statusor.h" -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -24,7 +24,7 @@ using ::google::cmrt::sdk::public_key_service::v1::PublicKey; ScoringAsyncGrpcClient::ScoringAsyncGrpcClient( server_common::KeyFetcherManagerInterface* key_fetcher_manager, CryptoClientWrapperInterface* crypto_client, - AuctionServiceClientConfig client_config) + const AuctionServiceClientConfig& client_config) : DefaultAsyncGrpcClient(key_fetcher_manager, crypto_client) { stub_ = Auction::NewStub(CreateChannel(client_config.server_addr, client_config.compression, @@ -38,7 +38,7 @@ void ScoringAsyncGrpcClient::SendRpc( PS_VLOG(5) << "ScoringAsyncGrpcClient SendRpc invoked ..."; stub_->async()->ScoreAds( params->ContextRef(), params->RequestRef(), params->ResponseRef(), - [this, params, hpke_secret](grpc::Status status) { + [this, params, hpke_secret](const grpc::Status& status) { if (!status.ok()) { PS_VLOG(1) << "SendRPC completion status not ok: " << server_common::ToAbslStatus(status); diff --git a/services/common/clients/auction_server/scoring_async_client.h b/services/common/clients/auction_server/scoring_async_client.h index d2e6c1cb..42e3950c 100644 --- a/services/common/clients/auction_server/scoring_async_client.h +++ b/services/common/clients/auction_server/scoring_async_client.h @@ -26,7 +26,7 @@ #include "api/bidding_auction_servers.grpc.pb.h" #include "api/bidding_auction_servers.pb.h" #include "services/common/clients/async_grpc/default_async_grpc_client.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { using ScoringAsyncClient = AsyncClientasync()->GenerateBids( params->ContextRef(), params->RequestRef(), params->ResponseRef(), - [this, params, hpke_secret](grpc::Status status) { + [this, params, hpke_secret](const grpc::Status& status) { OnRpcDone( status, params, @@ -92,7 +92,7 @@ void ProtectedAppSignalsBiddingAsyncGrpcClient::SendRpc( PS_VLOG(5) << "ProtectedAppSignalsBiddingAsyncGrpcClient SendRpc invoked ..."; stub_->async()->GenerateProtectedAppSignalsBids( params->ContextRef(), params->RequestRef(), params->ResponseRef(), - [this, params, hpke_secret](grpc::Status status) { + [this, params, hpke_secret](const grpc::Status& status) { OnRpcDone( diff --git a/services/common/clients/buyer_frontend_server/BUILD b/services/common/clients/buyer_frontend_server/BUILD index 7d95ff19..409f3fd3 100644 --- a/services/common/clients/buyer_frontend_server/BUILD +++ b/services/common/clients/buyer_frontend_server/BUILD @@ -28,7 +28,7 @@ cc_library( "@com_github_grpc_grpc//:grpc++", "@com_google_absl//absl/functional:any_invocable", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", "@rapidjson", ], ) diff --git a/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.cc b/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.cc index 68256107..ed7a3f79 100644 --- a/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.cc +++ b/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.cc @@ -14,7 +14,7 @@ #include "services/common/clients/buyer_frontend_server/buyer_frontend_async_client.h" -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -23,7 +23,7 @@ using ::google::cmrt::sdk::public_key_service::v1::PublicKey; BuyerFrontEndAsyncGrpcClient::BuyerFrontEndAsyncGrpcClient( server_common::KeyFetcherManagerInterface* key_fetcher_manager, CryptoClientWrapperInterface* crypto_client, - BuyerServiceClientConfig client_config, + const BuyerServiceClientConfig& client_config, std::unique_ptr stub) : DefaultAsyncGrpcClient(key_fetcher_manager, crypto_client, client_config.cloud_platform), @@ -42,7 +42,7 @@ void BuyerFrontEndAsyncGrpcClient::SendRpc( PS_VLOG(5) << "BuyerFrontEndAsyncGrpcClient SendRpc invoked ..."; stub_->async()->GetBids( params->ContextRef(), params->RequestRef(), params->ResponseRef(), - [this, params, hpke_secret](grpc::Status status) { + [this, params, hpke_secret](const grpc::Status& status) { if (!status.ok()) { PS_VLOG(1) << "SendRPC completion status not ok: " << server_common::ToAbslStatus(status); diff --git a/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.h b/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.h index f16b1654..e27d4669 100644 --- a/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.h +++ b/services/common/clients/buyer_frontend_server/buyer_frontend_async_client.h @@ -25,7 +25,7 @@ #include "api/bidding_auction_servers.pb.h" #include "services/common/clients/async_client.h" #include "services/common/clients/async_grpc/default_async_grpc_client.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { using BuyerFrontEndAsyncClient = @@ -50,7 +50,7 @@ class BuyerFrontEndAsyncGrpcClient explicit BuyerFrontEndAsyncGrpcClient( server_common::KeyFetcherManagerInterface* key_fetcher_manager, CryptoClientWrapperInterface* crypto_client, - BuyerServiceClientConfig client_config, + const BuyerServiceClientConfig& client_config, std::unique_ptr stub = nullptr); protected: diff --git a/services/common/clients/buyer_frontend_server/buyer_frontend_async_client_factory.h b/services/common/clients/buyer_frontend_server/buyer_frontend_async_client_factory.h index 0b2cec51..7d5d7d81 100644 --- a/services/common/clients/buyer_frontend_server/buyer_frontend_async_client_factory.h +++ b/services/common/clients/buyer_frontend_server/buyer_frontend_async_client_factory.h @@ -24,7 +24,7 @@ #include "services/common/clients/client_factory.h" #include "services/common/concurrent/local_cache.h" #include "services/seller_frontend_service/util/config_param_parser.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/clients/client_params.h b/services/common/clients/client_params.h index 98dfc500..06771278 100644 --- a/services/common/clients/client_params.h +++ b/services/common/clients/client_params.h @@ -25,7 +25,7 @@ #include "absl/functional/any_invocable.h" #include "absl/status/statusor.h" #include "absl/time/time.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/clients/code_dispatcher/BUILD b/services/common/clients/code_dispatcher/BUILD index cc04c096..244bdcbd 100644 --- a/services/common/clients/code_dispatcher/BUILD +++ b/services/common/clients/code_dispatcher/BUILD @@ -18,27 +18,21 @@ package(default_visibility = ["//:__subpackages__"]) cc_library( name = "v8_dispatcher", - srcs = [ - "v8_dispatcher.cc", - ], - hdrs = [ - "v8_dispatcher.h", - ], + srcs = ["v8_dispatcher.cc"], + hdrs = ["v8_dispatcher.h"], deps = [ "@com_google_absl//absl/functional:any_invocable", "@com_google_absl//absl/status", - "@google_privacysandbox_servers_common//scp/cc/roma/roma_service:roma_service_lib", + "@com_google_absl//absl/synchronization", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/roma/roma_service", ], ) cc_library( name = "code_dispatch_client", - srcs = [ - "code_dispatch_client.cc", - ], - hdrs = [ - "code_dispatch_client.h", - ], + srcs = ["code_dispatch_client.cc"], + hdrs = ["code_dispatch_client.h"], deps = [ ":v8_dispatcher", "@com_google_absl//absl/functional:any_invocable", diff --git a/services/common/clients/code_dispatcher/code_dispatch_client.h b/services/common/clients/code_dispatcher/code_dispatch_client.h index 5d7e155c..01b84f41 100644 --- a/services/common/clients/code_dispatcher/code_dispatch_client.h +++ b/services/common/clients/code_dispatcher/code_dispatch_client.h @@ -20,8 +20,8 @@ #include "absl/functional/any_invocable.h" #include "absl/status/status.h" #include "absl/status/statusor.h" -#include "scp/cc/roma/interface/roma.h" #include "services/common/clients/code_dispatcher/v8_dispatcher.h" +#include "src/roma/interface/roma.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/clients/code_dispatcher/code_dispatch_client_integration_test.cc b/services/common/clients/code_dispatcher/code_dispatch_client_integration_test.cc index 9161e056..04ce4d66 100644 --- a/services/common/clients/code_dispatcher/code_dispatch_client_integration_test.cc +++ b/services/common/clients/code_dispatcher/code_dispatch_client_integration_test.cc @@ -79,7 +79,6 @@ TEST(CodeDispatchClientTest, PassesJavascriptResultToCallback) { ASSERT_TRUE(status.ok()); done.Wait(); - ASSERT_TRUE(dispatcher.Stop().ok()); } TEST(CodeDispatchClientTest, RunsLatestCodeVersion) { @@ -108,7 +107,6 @@ TEST(CodeDispatchClientTest, RunsLatestCodeVersion) { ASSERT_TRUE(status.ok()); done.Wait(); } - ASSERT_TRUE(dispatcher.Stop().ok()); } } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/clients/code_dispatcher/v8_dispatcher.cc b/services/common/clients/code_dispatcher/v8_dispatcher.cc index f038c237..8c612c42 100644 --- a/services/common/clients/code_dispatcher/v8_dispatcher.cc +++ b/services/common/clients/code_dispatcher/v8_dispatcher.cc @@ -20,8 +20,9 @@ #include #include "absl/status/status.h" -#include "absl/synchronization/blocking_counter.h" -#include "scp/cc/roma/interface/roma.h" +#include "absl/synchronization/notification.h" +#include "src/logger/request_context_logger.h" +#include "src/roma/interface/roma.h" namespace privacy_sandbox::bidding_auction_servers { @@ -32,33 +33,37 @@ using LoadDoneCallback = ::google::scp::roma::Callback; V8Dispatcher::V8Dispatcher(DispatchConfig&& config) : roma_service_(std::move(config)) {} -absl::Status V8Dispatcher::Init() { return roma_service_.Init(); } +V8Dispatcher::~V8Dispatcher() { + PS_VLOG(1) << "Stopping roma service..."; + absl::Status stop_status = roma_service_.Stop(); + PS_VLOG(1) << "Roma service stop status: " << stop_status; +} -absl::Status V8Dispatcher::Stop() { return roma_service_.Stop(); } +absl::Status V8Dispatcher::Init() { return roma_service_.Init(); } absl::Status V8Dispatcher::LoadSync(absl::string_view version, absl::string_view js) { - LoadRequest request; - request.version_string = version; - request.js = js; - absl::BlockingCounter is_loading(1); - + auto request = std::make_unique(LoadRequest{ + .version_string = std::string(version), + .js = std::string(js), + }); + absl::Notification load_finished; absl::Status load_status; - absl::Status try_load = roma_service_.LoadCodeObj( - std::make_unique(request), - [&is_loading, &load_status](const absl::StatusOr& res) { - if (!res.ok()) { - load_status.Update(res.status()); - } - is_loading.DecrementCount(); - }); - if (!try_load.ok()) { + if (absl::Status try_load = roma_service_.LoadCodeObj( + std::move(request), + [&load_finished, + &load_status](absl::StatusOr res) { // NOLINT + if (!res.ok()) { + load_status.Update(res.status()); + } + load_finished.Notify(); + }); + !try_load.ok()) { // Load callback won't be called, we can return. return try_load; - } else { - is_loading.Wait(); - return load_status; } + load_finished.WaitForNotification(); + return load_status; } absl::Status V8Dispatcher::Execute(std::unique_ptr request, diff --git a/services/common/clients/code_dispatcher/v8_dispatcher.h b/services/common/clients/code_dispatcher/v8_dispatcher.h index c5950ef6..eb66f8bf 100644 --- a/services/common/clients/code_dispatcher/v8_dispatcher.h +++ b/services/common/clients/code_dispatcher/v8_dispatcher.h @@ -20,8 +20,8 @@ #include #include "absl/status/status.h" -#include "scp/cc/roma/interface/roma.h" -#include "scp/cc/roma/roma_service/roma_service.h" +#include "src/roma/interface/roma.h" +#include "src/roma/roma_service/roma_service.h" namespace privacy_sandbox::bidding_auction_servers { @@ -45,6 +45,8 @@ class V8Dispatcher { public: explicit V8Dispatcher(DispatchConfig&& config = DispatchConfig()); + ~V8Dispatcher(); + // Init the dispatcher. Note that this call may bring up multiple processes, // which can be slow and should only happen on server startup. // @@ -54,14 +56,6 @@ class V8Dispatcher { // fails, a client may retry. absl::Status Init(); - // Stop the dispatcher. Note that this call may bring down multiple processes, - // which can be slow and should only happen on server shutdown. - // - // This will clean up all running processes the dispatcher owns. - // return: a status indicated success or failure in stopping. If stopping - // fails, a client may retry. - absl::Status Stop(); - // Load new execution code synchronously. This is a blocking wrapper around // the google::scp::roma::LoadCodeObj method. // diff --git a/services/common/clients/config/BUILD b/services/common/clients/config/BUILD index e31441ca..c1267b6a 100644 --- a/services/common/clients/config/BUILD +++ b/services/common/clients/config/BUILD @@ -29,9 +29,9 @@ cc_library( "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:execution_result", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/instance_client", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface/instance_client", ], ) @@ -46,11 +46,11 @@ cc_library( "@com_google_absl//absl/log:check", "@com_google_absl//absl/status", "@com_google_absl//absl/strings:str_format", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:errors", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/parameter_client", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", - "@google_privacysandbox_servers_common//src/cpp/telemetry/flag:telemetry_flag", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/cpio/interface/parameter_client", + "@google_privacysandbox_servers_common//src/telemetry/flag:telemetry_flag", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -64,7 +64,7 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/mock/parameter_client:parameter_client_mock", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/public/cpio/mock/parameter_client:parameter_client_mock", ], ) diff --git a/services/common/clients/config/add_zone_aws.cc b/services/common/clients/config/add_zone_aws.cc index c75069c2..1a2e0407 100644 --- a/services/common/clients/config/add_zone_aws.cc +++ b/services/common/clients/config/add_zone_aws.cc @@ -14,8 +14,8 @@ #include -#include "scp/cc/cpio/client_providers/instance_client_provider/src/aws/aws_instance_client_utils.h" #include "services/common/clients/config/trusted_server_config_client_util.h" +#include "src/cpio/client_providers/instance_client_provider/aws/aws_instance_client_utils.h" using google::scp::cpio::client_providers::AwsInstanceClientUtils; using google::scp::cpio::client_providers::AwsResourceNameDetails; @@ -26,4 +26,8 @@ void TrustedServerConfigUtil::ComputeZone(absl::string_view resource_name) { AwsInstanceClientUtils::GetResourceNameDetails(resource_name, details); region_ = details.region; } +absl::btree_map +TrustedServerConfigUtil::GetAttribute() const { + return {}; +} } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/clients/config/add_zone_default.cc b/services/common/clients/config/add_zone_default.cc index f68ad691..7c04eb00 100644 --- a/services/common/clients/config/add_zone_default.cc +++ b/services/common/clients/config/add_zone_default.cc @@ -17,5 +17,9 @@ #include "services/common/clients/config/trusted_server_config_client_util.h" namespace privacy_sandbox::bidding_auction_servers { +absl::btree_map +TrustedServerConfigUtil::GetAttribute() const { + return {}; +} void TrustedServerConfigUtil::ComputeZone(absl::string_view resource_name) {} } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/clients/config/add_zone_gcp.cc b/services/common/clients/config/add_zone_gcp.cc index f59be52b..d9bb4125 100644 --- a/services/common/clients/config/add_zone_gcp.cc +++ b/services/common/clients/config/add_zone_gcp.cc @@ -14,8 +14,8 @@ #include -#include "scp/cc/cpio/client_providers/instance_client_provider/src/gcp/gcp_instance_client_utils.h" #include "services/common/clients/config/trusted_server_config_client_util.h" +#include "src/cpio/client_providers/instance_client_provider/gcp/gcp_instance_client_utils.h" using google::scp::cpio::client_providers::GcpInstanceClientUtils; using google::scp::cpio::client_providers::GcpInstanceResourceNameDetails; @@ -31,4 +31,8 @@ void TrustedServerConfigUtil::ComputeZone(absl::string_view resource_name) { // of the zone id. region_ = zone_.substr(0, zone_.size() - 2); } +absl::btree_map +TrustedServerConfigUtil::GetAttribute() const { + return {{kZone.data(), TrustedServerConfigUtil::GetZone().data()}}; +} } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/clients/config/trusted_server_config_client.cc b/services/common/clients/config/trusted_server_config_client.cc index 18a6fd1b..1e9ae631 100644 --- a/services/common/clients/config/trusted_server_config_client.cc +++ b/services/common/clients/config/trusted_server_config_client.cc @@ -25,10 +25,10 @@ #include "absl/strings/ascii.h" #include "absl/strings/str_format.h" #include "absl/synchronization/blocking_counter.h" -#include "scp/cc/core/interface/errors.h" -#include "scp/cc/public/core/interface/execution_result.h" -#include "scp/cc/public/cpio/interface/parameter_client/parameter_client_interface.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/core/interface/errors.h" +#include "src/public/core/interface/execution_result.h" +#include "src/public/cpio/interface/parameter_client/parameter_client_interface.h" +#include "src/util/status_macro/status_macros.h" using ::google::cmrt::sdk::parameter_service::v1::GetParameterRequest; using ::google::cmrt::sdk::parameter_service::v1::GetParameterResponse; diff --git a/services/common/clients/config/trusted_server_config_client.h b/services/common/clients/config/trusted_server_config_client.h index 4af2f3c9..725180af 100644 --- a/services/common/clients/config/trusted_server_config_client.h +++ b/services/common/clients/config/trusted_server_config_client.h @@ -20,15 +20,14 @@ #include #include -#include - #include "absl/container/flat_hash_map.h" #include "absl/flags/flag.h" #include "absl/functional/any_invocable.h" #include "absl/log/check.h" #include "absl/status/status.h" -#include "scp/cc/core/interface/type_def.h" -#include "src/cpp/telemetry/flag/telemetry_flag.h" +#include "src/core/interface/type_def.h" +#include "src/public/cpio/interface/parameter_client/parameter_client_interface.h" +#include "src/telemetry/flag/telemetry_flag.h" namespace privacy_sandbox::bidding_auction_servers { @@ -60,10 +59,10 @@ class TrustedServersConfigClient { std::unique_ptr( google::scp::cpio::ParameterClientOptions) &&> config_client_provider_fn = - [](google::scp::cpio::ParameterClientOptions + [](const google::scp::cpio::ParameterClientOptions& parameter_client_options) { return google::scp::cpio::ParameterClientFactory::Create( - std::move(parameter_client_options)); + parameter_client_options); }); // Creates and initializes the config client to fetch config values diff --git a/services/common/clients/config/trusted_server_config_client_test.cc b/services/common/clients/config/trusted_server_config_client_test.cc index 02aefbf5..c516e07d 100644 --- a/services/common/clients/config/trusted_server_config_client_test.cc +++ b/services/common/clients/config/trusted_server_config_client_test.cc @@ -24,10 +24,10 @@ #include "absl/time/time.h" #include "gmock/gmock.h" #include "gtest/gtest.h" -#include "scp/cc/public/cpio/interface/error_codes.h" -#include "scp/cc/public/cpio/interface/parameter_client/parameter_client_interface.h" -#include "scp/cc/public/cpio/mock/parameter_client/mock_parameter_client.h" #include "services/common/test/mocks.h" +#include "src/public/cpio/interface/error_codes.h" +#include "src/public/cpio/interface/parameter_client/parameter_client_interface.h" +#include "src/public/cpio/mock/parameter_client/mock_parameter_client.h" ABSL_FLAG(std::optional, config_param_1, std::nullopt, "test flag 1"); @@ -73,7 +73,7 @@ TEST(TrustedServerConfigClientTest, CanReadFlagsPassedThroughConstructor) { std::vector> f; TrustedServersConfigClient config_client( kFlags, - [&](ParameterClientOptions parameter_client_options) + [&f](const ParameterClientOptions& parameter_client_options) -> std::unique_ptr { std::unique_ptr mock_config_client = std::make_unique(); @@ -82,8 +82,8 @@ TEST(TrustedServerConfigClientTest, CanReadFlagsPassedThroughConstructor) { EXPECT_CALL(*mock_config_client, Run) .WillOnce(Return(SuccessExecutionResult())); EXPECT_CALL(*mock_config_client, GetParameter) - .WillRepeatedly([&](GetParameterRequest get_param_req, - Callback callback) + .WillRepeatedly([&f](const GetParameterRequest& get_param_req, + Callback callback) -> ExecutionResult { // async reading parameter like the real case. f.push_back(std::async(std::launch::async, [cb = std::move( @@ -132,7 +132,8 @@ TEST(TrustedServerConfigClientTest, FetchesConfigValueFromConfigClient) { std::vector> f; TrustedServersConfigClient config_client( kFlags, - [&](ParameterClientOptions parameter_client_options) + [&f, &expected_param_values]( + const ParameterClientOptions& parameter_client_options) -> std::unique_ptr { std::unique_ptr mock_config_client = std::make_unique(); @@ -141,7 +142,8 @@ TEST(TrustedServerConfigClientTest, FetchesConfigValueFromConfigClient) { EXPECT_CALL(*mock_config_client, Run) .WillOnce(Return(SuccessExecutionResult())); EXPECT_CALL(*mock_config_client, GetParameter) - .WillRepeatedly([&](GetParameterRequest get_param_req, + .WillRepeatedly([&f, &expected_param_values]( + GetParameterRequest get_param_req, Callback callback) -> ExecutionResult { // async reading parameter like the real case @@ -185,7 +187,8 @@ TEST(TrustedServerConfigClientTest, OverwritesConfigValueFromCloud) { std::vector> f; TrustedServersConfigClient config_client( {key}, - [&](ParameterClientOptions parameter_client_options) + [&f, &expected_param_values]( + const ParameterClientOptions& parameter_client_options) -> std::unique_ptr { std::unique_ptr mock_config_client = std::make_unique(); @@ -194,7 +197,8 @@ TEST(TrustedServerConfigClientTest, OverwritesConfigValueFromCloud) { EXPECT_CALL(*mock_config_client, Run) .WillOnce(Return(SuccessExecutionResult())); EXPECT_CALL(*mock_config_client, GetParameter) - .WillRepeatedly([&](GetParameterRequest get_param_req, + .WillRepeatedly([&f, &expected_param_values]( + GetParameterRequest get_param_req, Callback callback) -> ExecutionResult { // async reading parameter like the real case @@ -227,7 +231,7 @@ TEST(TrustedServerConfigClientTest, ThrowsUnavailableErrorOnClientInitFail) { TrustedServersConfigClient config_client( {"config_param_1"}, - [](ParameterClientOptions parameter_client_options) + [](const ParameterClientOptions& parameter_client_options) -> std::unique_ptr { std::unique_ptr mock_config_client = std::make_unique(); @@ -246,7 +250,7 @@ TEST(TrustedServerConfigClientTest, ThrowsUnavailableErrorOnClientInitFail) { TEST(TrustedServerConfigClientTest, PrependsFlagNamesWithTag) { TrustedServersConfigClient config_client( {"config_param_1"}, - [](ParameterClientOptions parameter_client_options) + [](const ParameterClientOptions& parameter_client_options) -> std::unique_ptr { std::unique_ptr mock_config_client = std::make_unique(); @@ -255,18 +259,18 @@ TEST(TrustedServerConfigClientTest, PrependsFlagNamesWithTag) { EXPECT_CALL(*mock_config_client, Run()) .WillOnce(Return(SuccessExecutionResult())); EXPECT_CALL(*mock_config_client, GetParameter) - .WillOnce( - [](GetParameterRequest get_param_req, - Callback callback) -> ExecutionResult { - // Verify we query for fetched config values with the prefix. - EXPECT_EQ(get_param_req.parameter_name(), - "MyConfigParamPrefix-config_param_1"); + .WillOnce([](const GetParameterRequest& get_param_req, + const Callback& callback) + -> ExecutionResult { + // Verify we query for fetched config values with the prefix. + EXPECT_EQ(get_param_req.parameter_name(), + "MyConfigParamPrefix-config_param_1"); - GetParameterResponse response; - response.set_parameter_value("config_value_1"); - callback(SuccessExecutionResult(), response); - return SuccessExecutionResult(); - }); + GetParameterResponse response; + response.set_parameter_value("config_value_1"); + callback(SuccessExecutionResult(), response); + return SuccessExecutionResult(); + }); return std::move(mock_config_client); }); ASSERT_TRUE(config_client.Init("MyConfigParamPrefix-").ok()); diff --git a/services/common/clients/config/trusted_server_config_client_util.cc b/services/common/clients/config/trusted_server_config_client_util.cc index 47adf814..5d4159b9 100644 --- a/services/common/clients/config/trusted_server_config_client_util.cc +++ b/services/common/clients/config/trusted_server_config_client_util.cc @@ -24,8 +24,8 @@ #include "absl/strings/str_format.h" #include "absl/strings/string_view.h" #include "absl/synchronization/notification.h" -#include "scp/cc/public/core/interface/execution_result.h" -#include "scp/cc/public/cpio/interface/instance_client/instance_client_interface.h" +#include "src/public/core/interface/execution_result.h" +#include "src/public/cpio/interface/instance_client/instance_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -62,7 +62,7 @@ absl::Status HandleFailure(absl::string_view error) noexcept { } absl::StatusOr GetResourceName( - std::shared_ptr client) { + std::shared_ptr client) { // NOLINT std::string resource_name; absl::Notification done; @@ -103,8 +103,7 @@ TrustedServerConfigUtil::TrustedServerConfigUtil(bool init_config_client) } std::shared_ptr client = - google::scp::cpio::InstanceClientFactory::Create( - std::move(InstanceClientOptions())); + google::scp::cpio::InstanceClientFactory::Create(InstanceClientOptions()); client->Init(); absl::StatusOr resource_name = GetResourceName(client); CHECK_OK(resource_name) << "Could not fetch host resource name."; @@ -138,7 +137,7 @@ TrustedServerConfigUtil::TrustedServerConfigUtil(bool init_config_client) } } -// Returns the string to preprend the names of all keys/flags fetched from the +// Returns the string to prepend the names of all keys/flags fetched from the // Parameter Store. The prepended string follows the format // "{operator}-{environment}-" where {operator} and {environment} are values for // the EC2 tag keys by these names. diff --git a/services/common/clients/config/trusted_server_config_client_util.h b/services/common/clients/config/trusted_server_config_client_util.h index 76bcb46e..3eb9cbc4 100644 --- a/services/common/clients/config/trusted_server_config_client_util.h +++ b/services/common/clients/config/trusted_server_config_client_util.h @@ -17,10 +17,13 @@ #include +#include "absl/container/btree_map.h" #include "absl/strings/string_view.h" namespace privacy_sandbox::bidding_auction_servers { +inline constexpr std::string_view kZone = "zone"; + class TrustedServerConfigUtil { public: // Uses an SCP provided client to parse details of the running instance. @@ -42,6 +45,8 @@ class TrustedServerConfigUtil { void ComputeZone(absl::string_view resource_name); + absl::btree_map GetAttribute() const; + // Returns a config param prefix that is to be prepended to all keys fetched // from the metadata store. std::string GetConfigParameterPrefix() noexcept; diff --git a/services/common/clients/http/BUILD b/services/common/clients/http/BUILD index aed7081c..28eee87e 100644 --- a/services/common/clients/http/BUILD +++ b/services/common/clients/http/BUILD @@ -23,6 +23,8 @@ cc_library( ], visibility = ["//visibility:public"], deps = [ + "@com_google_absl//absl/functional:any_invocable", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/time", ], ) @@ -44,8 +46,8 @@ cc_library( "@com_google_absl//absl/strings", "@com_google_absl//absl/time", "@curl", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", ], ) @@ -64,7 +66,7 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", + "@google_privacysandbox_servers_common//src/concurrent:executor", "@rapidjson", ], ) diff --git a/services/common/clients/http/multi_curl_http_fetcher_async.cc b/services/common/clients/http/multi_curl_http_fetcher_async.cc index 1aeddc25..0e80fc4c 100644 --- a/services/common/clients/http/multi_curl_http_fetcher_async.cc +++ b/services/common/clients/http/multi_curl_http_fetcher_async.cc @@ -23,7 +23,7 @@ #include "absl/strings/str_cat.h" #include "absl/strings/string_view.h" #include "absl/time/time.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { using ::grpc_event_engine::experimental::EventEngine; @@ -269,7 +269,7 @@ void MultiCurlHttpFetcherAsync::ExecuteCurlRequest( // Release request data ownership so it can be tracked // completely through the curl easy handle. This will be manually cleaned // when the request completes or when this class is destroyed. - request.release(); + request.release(); // NOLINT return; case CURLM_BAD_HANDLE: case CURLM_BAD_EASY_HANDLE: @@ -304,8 +304,8 @@ void MultiCurlHttpFetcherAsync::PutUrl(const HTTPRequest& http_request, CreateCurlRequest(http_request, timeout_ms, keepalive_idle_sec_, keepalive_interval_sec_, std::move(done_callback)); - request->body = std::make_unique( - DataToUpload{std::move(http_request.body)}); + request->body = + std::make_unique(DataToUpload{http_request.body}); curl_easy_setopt(request->req_handle, CURLOPT_UPLOAD, 1L); curl_easy_setopt(request->req_handle, CURLOPT_PUT, 1L); curl_easy_setopt(request->req_handle, CURLOPT_POSTFIELDSIZE_LARGE, diff --git a/services/common/clients/http/multi_curl_http_fetcher_async.h b/services/common/clients/http/multi_curl_http_fetcher_async.h index 77339eff..2077c552 100644 --- a/services/common/clients/http/multi_curl_http_fetcher_async.h +++ b/services/common/clients/http/multi_curl_http_fetcher_async.h @@ -28,7 +28,7 @@ #include "absl/synchronization/notification.h" #include "services/common/clients/http/http_fetcher_async.h" #include "services/common/clients/http/multi_curl_request_manager.h" -#include "src/cpp/concurrent/event_engine_executor.h" +#include "src/concurrent/event_engine_executor.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/clients/http/multi_curl_http_fetcher_async_test.cc b/services/common/clients/http/multi_curl_http_fetcher_async_test.cc index 2cdf7903..2badab6d 100644 --- a/services/common/clients/http/multi_curl_http_fetcher_async_test.cc +++ b/services/common/clients/http/multi_curl_http_fetcher_async_test.cc @@ -107,6 +107,7 @@ TEST_F(MultiCurlHttpFetcherAsyncTest, TEST_F(MultiCurlHttpFetcherAsyncTest, HandlesTimeoutByReturningError) { std::string msg; absl::BlockingCounter done(1); + // NOLINTNEXTLINE auto done_cb = [&done](absl::StatusOr result) { done.DecrementCount(); ASSERT_FALSE(result.ok()); @@ -120,6 +121,7 @@ TEST_F(MultiCurlHttpFetcherAsyncTest, HandlesTimeoutByReturningError) { TEST_F(MultiCurlHttpFetcherAsyncTest, HandlesMalformattedUrlByReturningError) { std::string msg; absl::BlockingCounter done(1); + // NOLINTNEXTLINE auto done_cb = [&done](absl::StatusOr result) { done.DecrementCount(); ASSERT_FALSE(result.ok()); @@ -158,9 +160,9 @@ TEST_F(MultiCurlHttpFetcherAsyncTest, CanFetchMultipleUrlsInParallel) { std::vector test_requests = { {kUrlA.begin(), {}}, {kUrlB.begin(), {}}, {kUrlC.begin(), {}}}; auto done_cb = [&done, &test_requests]( - std::vector> results) { + const std::vector>& results) { EXPECT_EQ(results.size(), test_requests.size()); - for (auto result : results) { + for (const auto& result : results) { ASSERT_TRUE(result.ok()) << result.status(); } done.DecrementCount(); @@ -188,6 +190,7 @@ TEST_F(MultiCurlHttpFetcherAsyncTest, PutsUrlSuccessfully) { TEST_F(MultiCurlHttpFetcherAsyncTest, PutsUrlFails) { std::string msg; absl::Notification notification; + // NOLINTNEXTLINE auto done_cb = [¬ification](absl::StatusOr result) { EXPECT_FALSE(result.ok()) << result.status(); EXPECT_THAT(result.status().message(), diff --git a/services/common/clients/http_kv_server/buyer/buyer_key_value_async_http_client_test.cc b/services/common/clients/http_kv_server/buyer/buyer_key_value_async_http_client_test.cc index 82326623..1d21b884 100644 --- a/services/common/clients/http_kv_server/buyer/buyer_key_value_async_http_client_test.cc +++ b/services/common/clients/http_kv_server/buyer/buyer_key_value_async_http_client_test.cc @@ -117,7 +117,7 @@ TEST_F(KeyValueAsyncHttpClientTest, "interestGroupNames=ig_name_likes_boots")); // Now we define what we expect to get back out of the client, which is a // GetBuyerValuesOutput struct. - const std::string expectedResult = R"json({ + const std::string expected_result = R"json({ "keys": { "1j1043317685": { "constitution_author": "madison", @@ -136,10 +136,9 @@ TEST_F(KeyValueAsyncHttpClientTest, } })json"; - const std::string actualResult = expectedResult; std::unique_ptr expectedOutputStructUPtr = std::make_unique( - GetBuyerValuesOutput({expectedResult})); + GetBuyerValuesOutput({expected_result})); // Define the lambda function which is the callback. // Inside this callback, we will actually check that the client correctly @@ -167,14 +166,14 @@ TEST_F(KeyValueAsyncHttpClientTest, // the following: // (This part is NOT an assertion of expected behavior but rather a mock // defining what it shall be) - .WillOnce([actualResult, &expected_urls]( - HTTPRequest request, int timeout_ms, + .WillOnce([actual_result = expected_result, &expected_urls]( + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls.contains(request.url)); // Pack said string into a statusOr absl::StatusOr resp = - absl::StatusOr(actualResult); + absl::StatusOr(actual_result); // Now, call the callback (Note: we defined it above!) with the // 'response' from the 'server' std::move(done_callback)(resp); @@ -305,7 +304,7 @@ TEST_F(KeyValueAsyncHttpClientTest, // (This part is NOT an assertion of expected behavior but rather a mock // defining what it shall be) .WillOnce([actualResult, &expected_urls]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls.contains(request.url)) << request.url; @@ -333,7 +332,7 @@ TEST_F(KeyValueAsyncHttpClientTest, MakesDSPUrlCorrectlyWithDuplicateKey) { EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce( [expected_url]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(expected_url, request.url); }); CheckGetValuesFromKeysViaHttpClient(std::move(input)); @@ -348,7 +347,7 @@ TEST_F(KeyValueAsyncHttpClientTest, MakesDSPUrlCorrectlyWithNoKeys) { EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce( [expected_url]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(expected_url, request.url); }); CheckGetValuesFromKeysViaHttpClient(std::move(input)); @@ -362,7 +361,7 @@ TEST_F(KeyValueAsyncHttpClientTest, std::make_unique(getValuesClientInput); EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce([expected_urls_1 = &(expected_urls_1)]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls_1->contains(request.url)); @@ -384,7 +383,7 @@ TEST_F(KeyValueAsyncHttpClientTest, // testing to always include a client_type. EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce([expected_urls_1 = &(expected_urls_1)]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls_1->contains(request.url)); @@ -415,7 +414,7 @@ TEST_F(KeyValueAsyncHttpClientTest, MakesDSPUrlCorrectlyWithClientTypeAndroid) { "?client_type=1&keys=clementine,lloyd_george,birkenhead")}; EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce([&expected_urls]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls.contains(request.url)); @@ -466,20 +465,21 @@ TEST_F(KeyValueAsyncHttpClientTest, AddsMandatoryHeaders) { std::unique_ptr input = std::make_unique(getValuesClientInput); - std::vector expectedHeaders; + std::vector expected_headers; + expected_headers.reserve(kMandatoryHeaders.size()); for (const auto& mandatory_header : kMandatoryHeaders) { - expectedHeaders.push_back(absl::StrCat(mandatory_header, ":")); + expected_headers.push_back(absl::StrCat(mandatory_header, ":")); } // Assert that the mocked fetcher will have the method FetchUrl called on it, // with the metadata. EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) - .WillOnce([&expectedHeaders]( + .WillOnce([&expected_headers]( HTTPRequest request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { - std::sort(expectedHeaders.begin(), expectedHeaders.end()); + std::sort(expected_headers.begin(), expected_headers.end()); std::sort(request.headers.begin(), request.headers.end()); - EXPECT_EQ(expectedHeaders, request.headers); + EXPECT_EQ(expected_headers, request.headers); }); absl::AnyInvocable< @@ -514,7 +514,7 @@ TEST_F(KeyValueAsyncHttpClientTest, SpacesInKeysGetEncoded) { // Now we define what we expect to get back out of the client, which is a // GetBuyerValuesOutput struct. // Note that this test is trivial; we use the same string - const std::string expectedResult = R"json({ + const std::string expected_result = R"json({ "keys": { "1j1043317685": { "constitution_author": "madison", @@ -526,10 +526,9 @@ TEST_F(KeyValueAsyncHttpClientTest, SpacesInKeysGetEncoded) { }, })json"; - const std::string actualResult = expectedResult; std::unique_ptr expectedOutputStructUPtr = std::make_unique( - GetBuyerValuesOutput({expectedResult})); + GetBuyerValuesOutput({expected_result})); // Define the lambda function which is the callback. // Inside this callback, we will actually check that the client correctly @@ -557,14 +556,14 @@ TEST_F(KeyValueAsyncHttpClientTest, SpacesInKeysGetEncoded) { // the following: // (This part is NOT an assertion of expected behavior but rather a mock // defining what it shall be) - .WillOnce([actualResult, &expectedUrl]( - HTTPRequest request, int timeout_ms, + .WillOnce([actual_result = expected_result, &expectedUrl]( + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(request.url, expectedUrl); // Pack said string into a statusOr absl::StatusOr resp = - absl::StatusOr(actualResult); + absl::StatusOr(actual_result); // Now, call the callback (Note: we defined it above!) with the // 'response' from the 'server' std::move(done_callback)(resp); diff --git a/services/common/clients/http_kv_server/buyer/fake_buyer_key_value_async_http_client_test.cc b/services/common/clients/http_kv_server/buyer/fake_buyer_key_value_async_http_client_test.cc index 96aa5e63..20b1cd66 100644 --- a/services/common/clients/http_kv_server/buyer/fake_buyer_key_value_async_http_client_test.cc +++ b/services/common/clients/http_kv_server/buyer/fake_buyer_key_value_async_http_client_test.cc @@ -34,7 +34,7 @@ TEST(FakeBuyerKeyValueAsyncHttpClient, match_request) { auto buyer_input = std::make_unique(); absl::btree_set interest_group = {"1j386134098"}; buyer_input->interest_group_names = interest_group; - buyer_input->buyer_kv_experiment_group_id = 100; + buyer_input->buyer_kv_experiment_group_id = "100"; FakeBuyerKeyValueAsyncHttpClient client("not used", RequestToPath()); } diff --git a/services/common/clients/http_kv_server/seller/seller_key_value_async_http_client_test.cc b/services/common/clients/http_kv_server/seller/seller_key_value_async_http_client_test.cc index 477d2936..6ddeefb6 100644 --- a/services/common/clients/http_kv_server/seller/seller_key_value_async_http_client_test.cc +++ b/services/common/clients/http_kv_server/seller/seller_key_value_async_http_client_test.cc @@ -173,7 +173,7 @@ TEST_F(KeyValueAsyncHttpClientTest, // Now we define what we expect to get back out of the client, which is a // GetSellerValuesOutput struct. // Note that this test is trivial; we use the same string - const std::string expectedResult = R"json({ + const std::string expected_result = R"json({ "render_urls": { "https%3A%2F%2Fams.creativecdn.com%2Fcreatives%3Ffls%3Dtrue%26id%3D000rHxs8auvkixVNAyYw%26c%3DVMR8favvTg6zsLGCra37%26s%3Drtbhfledge": { "constitution_author": "madison", @@ -195,10 +195,9 @@ TEST_F(KeyValueAsyncHttpClientTest, } })json"; - const std::string actualResult = expectedResult; std::unique_ptr expectedOutputStructUPtr = std::make_unique( - GetSellerValuesOutput({expectedResult})); + GetSellerValuesOutput({expected_result})); // Define the lambda function which is the callback. // Inside this callback, we will actually check that the client correctly @@ -226,14 +225,15 @@ TEST_F(KeyValueAsyncHttpClientTest, // the following: // (This part is NOT an assertion of expected behavior but rather a mock // defining what it shall be) - .WillOnce([actualResult, expected_urls = &(expected_urls_with_egid_)]( + .WillOnce([actual_result = expected_result, + expected_urls = &(expected_urls_with_egid_)]( const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls->contains(request.url)); // Pack said string into a statusOr absl::StatusOr resp = - absl::StatusOr(actualResult); + absl::StatusOr(actual_result); // Now, call the callback (Note: we defined it above!) with the // 'response' from the 'server' std::move(done_callback)(resp); @@ -282,7 +282,7 @@ TEST_F(KeyValueAsyncHttpClientTest, } })json"; - const std::string actualResult = R"json({ + const std::string actual_result = R"json({ "render_urls": { "https%3A%2F%2Fams.creativecdn.com%2Fcreatives%3Ffls%3Dtrue%26id%3D000rHxs8auvkixVNAyYw%26c%3DVMR8favvTg6zsLGCra37%26s%3Drtbhfledge": { "constitution_author": "WRONG_NAME", @@ -333,14 +333,14 @@ TEST_F(KeyValueAsyncHttpClientTest, // the following: // (This part is NOT an assertion of expected behavior but rather a mock // defining what it shall be) - .WillOnce([actualResult, expected_urls = &(expected_urls_)]( - HTTPRequest request, int timeout_ms, + .WillOnce([actual_result, expected_urls = &(expected_urls_)]( + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_TRUE(expected_urls->contains(request.url)); // Pack said string into a statusOr absl::StatusOr resp = - absl::StatusOr(actualResult); + absl::StatusOr(actual_result); // Now, call the callback (Note: we defined it above!) with the // 'response' from the 'server' std::move(done_callback)(resp); @@ -355,6 +355,7 @@ TEST_F(KeyValueAsyncHttpClientTest, TEST_F(KeyValueAsyncHttpClientTest, MakesSSPUrlCorrectlyWithNoAdComponentRenderUrlsAndADuplicateKey) { const GetSellerValuesInput getValuesClientInput3 = { + // NOLINTNEXTLINE {"https://ams.creativecdn.com/" "creatives?fls=true&id=000rHxs8auvkixVNAyYw&c=VMR8favvTg6zsLGCra37&s=" "rtbhfledge", @@ -389,7 +390,7 @@ TEST_F(KeyValueAsyncHttpClientTest, MakesSSPUrlCorrectlyWithNoClientType) { EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce( [&expected_url]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(request.url, expected_url); }); CheckGetValuesFromKeysViaHttpClient(std::move(input)); @@ -417,7 +418,7 @@ TEST_F(KeyValueAsyncHttpClientTest, MakesSSPUrlCorrectlyWithClientTypeBrowser) { EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce( [&expected_url]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(request.url, expected_url); }); CheckGetValuesFromKeysViaHttpClient(std::move(input)); @@ -441,7 +442,7 @@ TEST_F(KeyValueAsyncHttpClientTest, MakesSSPUrlCorrectlyWithClientTypeAndroid) { EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce( [&expected_url]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(request.url, expected_url); }); CheckGetValuesFromKeysViaHttpClient(std::move(input)); @@ -485,7 +486,7 @@ TEST_F(KeyValueAsyncHttpClientTest, AddsMetadataToHeaders) { // with the metadata. EXPECT_CALL(*mock_http_fetcher_async_, FetchUrl) .WillOnce([&expectedHeaders]( - HTTPRequest request, int timeout_ms, + const HTTPRequest& request, int timeout_ms, absl::AnyInvocable) &&> done_callback) { EXPECT_EQ(expectedHeaders, request.headers); diff --git a/services/common/clients/kv_server/BUILD b/services/common/clients/kv_server/BUILD index 29a001da..4bb4c595 100644 --- a/services/common/clients/kv_server/BUILD +++ b/services/common/clients/kv_server/BUILD @@ -75,6 +75,6 @@ cc_test( "@com_github_grpc_grpc//:grpc++", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", ], ) diff --git a/services/common/clients/kv_server/kv_async_client.cc b/services/common/clients/kv_server/kv_async_client.cc index 93a03173..cb0e9d3d 100644 --- a/services/common/clients/kv_server/kv_async_client.cc +++ b/services/common/clients/kv_server/kv_async_client.cc @@ -17,9 +17,9 @@ #include #include "include/grpcpp/support/status_code_enum.h" -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" -#include "scp/cc/public/cpio/proto/public_key_service/v1/public_key_service.pb.h" #include "services/common/clients/async_grpc/default_async_grpc_client.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" +#include "src/public/cpio/proto/public_key_service/v1/public_key_service.pb.h" namespace privacy_sandbox::bidding_auction_servers { @@ -49,7 +49,7 @@ void KVAsyncGrpcClient::SendRpc( stub_->async()->ObliviousGetValues( params->ContextRef(), params->RequestRef(), params->ResponseRef(), [captured_oblivious_http_context = oblivious_http_context.release(), - params](grpc::Status status) { + params](const grpc::Status& status) { auto oblivious_http_request_uptr = ObliviousHttpRequestUptr(captured_oblivious_http_context); if (!status.ok()) { diff --git a/services/common/clients/kv_server/kv_async_client.h b/services/common/clients/kv_server/kv_async_client.h index 95b9a557..e2519834 100644 --- a/services/common/clients/kv_server/kv_async_client.h +++ b/services/common/clients/kv_server/kv_async_client.h @@ -40,11 +40,11 @@ #include "services/common/encryption/crypto_client_wrapper_interface.h" #include "services/common/util/binary_http_utils.h" #include "services/common/util/oblivious_http_utils.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_utils.h" -#include "src/cpp/logger/request_context_logger.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/encryption/key_fetcher/key_fetcher_utils.h" #include "src/include/openssl/hpke.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" using google::protobuf::util::JsonStringToMessage; diff --git a/services/common/clients/kv_server/kv_async_client_test.cc b/services/common/clients/kv_server/kv_async_client_test.cc index 193ff8c2..b922de61 100644 --- a/services/common/clients/kv_server/kv_async_client_test.cc +++ b/services/common/clients/kv_server/kv_async_client_test.cc @@ -21,7 +21,7 @@ #include "services/common/constants/common_service_flags.h" #include "services/common/encryption/key_fetcher_factory.h" #include "services/common/test/mocks.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/common/clients/seller_frontend_server/seller_frontend_async_client.cc b/services/common/clients/seller_frontend_server/seller_frontend_async_client.cc index ec441538..b3a0a115 100644 --- a/services/common/clients/seller_frontend_server/seller_frontend_async_client.cc +++ b/services/common/clients/seller_frontend_server/seller_frontend_async_client.cc @@ -34,7 +34,7 @@ SellerFrontEndGrpcClient::SellerFrontEndGrpcClient( args.SetCompressionAlgorithm(GRPC_COMPRESS_GZIP); } stub_ = SellerFrontEnd::NewStub(grpc::CreateCustomChannel( - absl::StrCat(client_config.server_addr), std::move(creds), args)); + absl::StrCat(client_config.server_addr), creds, args)); absl::MutexLock l(&active_calls_mutex_); active_calls_count_ = 0; } @@ -53,7 +53,7 @@ absl::Status SellerFrontEndGrpcClient::Execute( ++active_calls_count_; stub_->async()->SelectAd( params->ContextRef(), params->RequestRef(), params->ResponseRef(), - [params_ptr = params.release(), this](grpc::Status status) { + [params_ptr = params.release(), this](const grpc::Status& status) { params_ptr->OnDone(status); absl::MutexLock l(&active_calls_mutex_); --active_calls_count_; diff --git a/services/common/code_dispatch/BUILD b/services/common/code_dispatch/BUILD index 46e64501..2629a8ec 100644 --- a/services/common/code_dispatch/BUILD +++ b/services/common/code_dispatch/BUILD @@ -27,7 +27,7 @@ cc_library( "//services/common/encryption:crypto_client_wrapper_interface", "@com_github_grpc_grpc//:grpc++", "@com_google_absl//absl/status", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:key_fetcher_manager_interface", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:key_fetcher_manager_interface", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", ], ) diff --git a/services/common/code_dispatch/code_dispatch_reactor.h b/services/common/code_dispatch/code_dispatch_reactor.h index 3ffa94ea..5653c253 100644 --- a/services/common/code_dispatch/code_dispatch_reactor.h +++ b/services/common/code_dispatch/code_dispatch_reactor.h @@ -28,8 +28,8 @@ #include "services/common/clients/code_dispatcher/code_dispatch_client.h" #include "services/common/constants/user_error_strings.h" #include "services/common/encryption/crypto_client_wrapper_interface.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { @@ -113,7 +113,7 @@ class CodeDispatchReactor : public grpc::ServerUnaryReactor { return false; } - hpke_secret_ = std::move(decrypt_response->secret()); + hpke_secret_ = std::move(*decrypt_response->mutable_secret()); return raw_request_.ParseFromString(decrypt_response->payload()); } diff --git a/services/common/code_fetch/BUILD b/services/common/code_fetch/BUILD index 1c195f9f..a5bacc1a 100644 --- a/services/common/code_fetch/BUILD +++ b/services/common/code_fetch/BUILD @@ -20,6 +20,9 @@ cc_library( name = "code_fetcher_interface", hdrs = ["code_fetcher_interface.h"], visibility = ["//visibility:public"], + deps = [ + "@com_google_absl//absl/status", + ], ) cc_library( @@ -36,8 +39,8 @@ cc_library( "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", ], ) @@ -55,13 +58,13 @@ cc_library( "@com_google_absl//absl/strings", "@com_google_absl//absl/synchronization", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:errors", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:execution_result", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/blob_storage_client", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_util", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", ], ) @@ -78,7 +81,7 @@ cc_test( "@com_google_absl//absl/status", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", + "@google_privacysandbox_servers_common//src/concurrent:executor", ], ) @@ -97,12 +100,12 @@ cc_test( "@com_google_absl//absl/time", "@com_google_googletest//:gtest", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//scp/cc/core/interface:async_context_lib", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:errors", - "@google_privacysandbox_servers_common//scp/cc/public/core/interface:execution_result", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/blob_storage_client", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/mock/blob_storage_client:blob_storage_client_mock", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/core/interface:async_context", + "@google_privacysandbox_servers_common//src/public/core/interface:errors", + "@google_privacysandbox_servers_common//src/public/core/interface:execution_result", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/public/cpio/interface/blob_storage_client", + "@google_privacysandbox_servers_common//src/public/cpio/mock/blob_storage_client:blob_storage_client_mock", ], ) diff --git a/services/common/code_fetch/periodic_bucket_fetcher.cc b/services/common/code_fetch/periodic_bucket_fetcher.cc index c5733f9f..21fe1743 100644 --- a/services/common/code_fetch/periodic_bucket_fetcher.cc +++ b/services/common/code_fetch/periodic_bucket_fetcher.cc @@ -28,14 +28,14 @@ #include "absl/strings/str_format.h" #include "absl/synchronization/blocking_counter.h" #include "absl/time/time.h" -#include "scp/cc/core/interface/async_context.h" -#include "scp/cc/core/interface/errors.h" -#include "scp/cc/public/core/interface/execution_result.h" -#include "scp/cc/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" -#include "scp/cc/public/cpio/proto/blob_storage_service/v1/blob_storage_service.pb.h" #include "services/common/code_fetch/code_fetcher_interface.h" -#include "src/cpp/logger/request_context_logger.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/core/interface/async_context.h" +#include "src/core/interface/errors.h" +#include "src/logger/request_context_logger.h" +#include "src/public/core/interface/execution_result.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/proto/blob_storage_service/v1/blob_storage_service.pb.h" +#include "src/util/status_macro/status_macros.h" using ::google::cmrt::sdk::blob_storage_service::v1::BlobMetadata; using ::google::cmrt::sdk::blob_storage_service::v1::GetBlobRequest; @@ -74,7 +74,7 @@ absl::Status PeriodicBucketFetcher::Start() { void PeriodicBucketFetcher::End() { if (task_id_.has_value()) { - executor_.Cancel(*std::move(task_id_)); + executor_.Cancel(*task_id_); task_id_ = absl::nullopt; } } @@ -88,8 +88,7 @@ PeriodicBucketFetcher::ListBlobsSync() { AsyncContext list_blobs_context( - std::move(list_blobs_request), - [¬ification, &result](auto& context) { + list_blobs_request, [¬ification, &result](auto& context) { if (!context.result.Successful()) { std::string error_msg = absl::StrCat("Failed to list available blobs: ", @@ -162,8 +161,7 @@ void PeriodicBucketFetcher::PeriodicBucketFetchSync() { md.bucket_name()); get_blob_request->mutable_blob_metadata()->set_blob_name(md.blob_name()); AsyncContext get_blob_context( - std::move(get_blob_request), - [&blobs_remaining, this](const auto& context) { + get_blob_request, [&blobs_remaining, this](const auto& context) { HandleBlobFetchResult(context); blobs_remaining.DecrementCount(); }); diff --git a/services/common/code_fetch/periodic_bucket_fetcher.h b/services/common/code_fetch/periodic_bucket_fetcher.h index 15685185..631f95e6 100644 --- a/services/common/code_fetch/periodic_bucket_fetcher.h +++ b/services/common/code_fetch/periodic_bucket_fetcher.h @@ -25,10 +25,10 @@ #include "absl/status/status.h" #include "absl/strings/string_view.h" #include "absl/time/time.h" -#include "scp/cc/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" #include "services/common/clients/code_dispatcher/v8_dispatcher.h" #include "services/common/code_fetch/code_fetcher_interface.h" -#include "src/cpp/concurrent/executor.h" +#include "src/concurrent/executor.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/code_fetch/periodic_bucket_fetcher_test.cc b/services/common/code_fetch/periodic_bucket_fetcher_test.cc index a77ea65e..b8477675 100644 --- a/services/common/code_fetch/periodic_bucket_fetcher_test.cc +++ b/services/common/code_fetch/periodic_bucket_fetcher_test.cc @@ -25,12 +25,12 @@ #include "absl/synchronization/notification.h" #include "absl/time/time.h" #include "gtest/gtest.h" -#include "scp/cc/core/interface/async_context.h" -#include "scp/cc/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" -#include "scp/cc/public/cpio/interface/cpio.h" -#include "scp/cc/public/cpio/interface/error_codes.h" -#include "scp/cc/public/cpio/mock/blob_storage_client/mock_blob_storage_client.h" #include "services/common/test/mocks.h" +#include "src/core/interface/async_context.h" +#include "src/public/cpio/interface/blob_storage_client/blob_storage_client_interface.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/public/cpio/interface/error_codes.h" +#include "src/public/cpio/mock/blob_storage_client/mock_blob_storage_client.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/common/code_fetch/periodic_code_fetcher.cc b/services/common/code_fetch/periodic_code_fetcher.cc index 007f76f7..b7afc479 100644 --- a/services/common/code_fetch/periodic_code_fetcher.cc +++ b/services/common/code_fetch/periodic_code_fetcher.cc @@ -24,7 +24,7 @@ #include "absl/log/check.h" #include "absl/status/statusor.h" #include "absl/time/time.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { @@ -61,7 +61,7 @@ absl::Status PeriodicCodeFetcher::Start() { void PeriodicCodeFetcher::End() { if (task_id_.has_value()) { - executor_.Cancel(*std::move(task_id_)); + executor_.Cancel(*task_id_); task_id_ = absl::nullopt; } } @@ -70,7 +70,7 @@ void PeriodicCodeFetcher::PeriodicCodeFetchSync() { absl::Notification notification; auto done_callback = [¬ification, - this](std::vector> results) mutable { + this](const std::vector>& results) mutable { bool all_status_ok = true; std::vector results_value; @@ -108,10 +108,10 @@ void PeriodicCodeFetcher::PeriodicCodeFetchSync() { // Create a HTTPRequest object from the url_endpoint_ std::vector requests; - for (std::string endpoint : url_endpoints_) { + for (const std::string& endpoint : url_endpoints_) { HTTPRequest request; - PS_VLOG(1) << "Requesting UDF from: " << std::string(endpoint); - request.url = std::string(endpoint); + PS_VLOG(1) << "Requesting UDF from: " << endpoint; + request.url = endpoint; requests.push_back(request); } diff --git a/services/common/code_fetch/periodic_code_fetcher.h b/services/common/code_fetch/periodic_code_fetcher.h index ecbd1292..a843ad9c 100644 --- a/services/common/code_fetch/periodic_code_fetcher.h +++ b/services/common/code_fetch/periodic_code_fetcher.h @@ -27,7 +27,7 @@ #include "services/common/clients/code_dispatcher/v8_dispatcher.h" #include "services/common/clients/http/http_fetcher_async.h" #include "services/common/code_fetch/code_fetcher_interface.h" -#include "src/cpp/concurrent/executor.h" +#include "src/concurrent/executor.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/code_fetch/periodic_code_fetcher_test.cc b/services/common/code_fetch/periodic_code_fetcher_test.cc index 100f61a1..b070044e 100644 --- a/services/common/code_fetch/periodic_code_fetcher_test.cc +++ b/services/common/code_fetch/periodic_code_fetcher_test.cc @@ -23,7 +23,7 @@ #include "absl/synchronization/blocking_counter.h" #include "gtest/gtest.h" #include "services/common/test/mocks.h" -#include "src/cpp/concurrent/event_engine_executor.h" +#include "src/concurrent/event_engine_executor.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -224,7 +224,7 @@ TEST(PeriodicCodeFetcherTest, LoadFetchFrequencyMustBeGreaterThan1Min) { {"fake-test-code-blob.com"}, absl::Minutes(1), curl_http_fetcher.get(), &dispatcher, executor.get(), absl::Milliseconds(100), [](const std::vector& ad_tech_code_blobs) { return ""; }, - /*version_num=*/"v25"); + /*version=*/"v25"); EXPECT_DEATH(auto status = code_fetcher.Start(), ""); } diff --git a/services/common/constants/BUILD b/services/common/constants/BUILD index 8381abf1..c85d284d 100644 --- a/services/common/constants/BUILD +++ b/services/common/constants/BUILD @@ -36,7 +36,7 @@ cc_library( deps = [ "@com_google_absl//absl/flags:flag", "@com_google_absl//absl/flags:parse", - "@google_privacysandbox_servers_common//src/cpp/telemetry/flag:telemetry_flag", + "@google_privacysandbox_servers_common//src/telemetry/flag:telemetry_flag", ], ) diff --git a/services/common/constants/common_service_flags.h b/services/common/constants/common_service_flags.h index c60d3e95..5339da8b 100644 --- a/services/common/constants/common_service_flags.h +++ b/services/common/constants/common_service_flags.h @@ -22,7 +22,7 @@ #include "absl/flags/declare.h" #include "absl/flags/flag.h" -#include "src/cpp/telemetry/flag/telemetry_flag.h" +#include "src/telemetry/flag/telemetry_flag.h" ABSL_DECLARE_FLAG(std::optional, test_mode); ABSL_DECLARE_FLAG(std::optional, public_key_endpoint); diff --git a/services/common/constants/user_error_strings.h b/services/common/constants/user_error_strings.h index 78886a8d..680f7697 100644 --- a/services/common/constants/user_error_strings.h +++ b/services/common/constants/user_error_strings.h @@ -28,6 +28,8 @@ inline constexpr char kMalformedCiphertext[] = "Malformed request ciphertext."; inline constexpr char kMissingInputs[] = "Missing inputs"; inline constexpr char kInternalServerError[] = "Internal server error."; +inline constexpr char kSellerDomainEmpty[] = + "Seller domain in server must be configured."; } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/encryption/BUILD b/services/common/encryption/BUILD index b84307e1..ff4cda19 100644 --- a/services/common/encryption/BUILD +++ b/services/common/encryption/BUILD @@ -51,11 +51,11 @@ cc_library( "@com_github_grpc_grpc//:grpc++", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:fake_key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:private_key_fetcher", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:public_key_fetcher", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:fake_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:private_key_fetcher", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:public_key_fetcher", ], ) @@ -66,10 +66,10 @@ cc_library( ], deps = [ "@com_google_absl//absl/status:statusor", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/crypto_client", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/proto/crypto_service/v1:crypto_service_cc_proto", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:private_key_fetcher_interface", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:public_key_fetcher_interface", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:private_key_fetcher_interface", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:public_key_fetcher_interface", + "@google_privacysandbox_servers_common//src/public/cpio/interface/crypto_client", + "@google_privacysandbox_servers_common//src/public/cpio/proto/crypto_service/v1:crypto_service_cc_proto", ], ) @@ -85,10 +85,10 @@ cc_library( deps = [ ":crypto_client_wrapper_interface", "@com_google_absl//absl/status:statusor", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface/crypto_client", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/proto/crypto_service/v1:crypto_service_cc_proto", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:private_key_fetcher_interface", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:private_key_fetcher_interface", + "@google_privacysandbox_servers_common//src/public/cpio/interface/crypto_client", + "@google_privacysandbox_servers_common//src/public/cpio/proto/crypto_service/v1:crypto_service_cc_proto", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) diff --git a/services/common/encryption/crypto_client_factory.cc b/services/common/encryption/crypto_client_factory.cc index 43eca66e..14ace0c1 100644 --- a/services/common/encryption/crypto_client_factory.cc +++ b/services/common/encryption/crypto_client_factory.cc @@ -17,9 +17,9 @@ #include #include -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" #include "services/common/encryption/crypto_client_wrapper.h" #include "services/common/encryption/crypto_client_wrapper_interface.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/encryption/crypto_client_wrapper.cc b/services/common/encryption/crypto_client_wrapper.cc index 3d4b01dd..8446e953 100644 --- a/services/common/encryption/crypto_client_wrapper.cc +++ b/services/common/encryption/crypto_client_wrapper.cc @@ -22,9 +22,9 @@ #include "absl/strings/str_format.h" #include "proto/hpke.pb.h" #include "proto/tink.pb.h" -#include "scp/cc/core/interface/errors.h" -#include "scp/cc/public/cpio/interface/crypto_client/type_def.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/core/interface/errors.h" +#include "src/public/cpio/interface/crypto_client/type_def.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -104,7 +104,7 @@ absl::StatusOr CryptoClientWrapper::HpkeEncrypt( std::move(request), [&response, &success]( const google::scp::core::ExecutionResult& result, - const google::cmrt::sdk::crypto_service::v1::HpkeEncryptResponse& + google::cmrt::sdk::crypto_service::v1::HpkeEncryptResponse encrypt_response) { if (result.Successful()) { success = true; @@ -134,7 +134,7 @@ absl::StatusOr CryptoClientWrapper::AeadEncrypt( ExecutionResult execution_result = crypto_client_->AeadEncrypt( std::move(request), [&response, &success](const google::scp::core::ExecutionResult& result, - const AeadEncryptResponse& encrypt_response) { + AeadEncryptResponse encrypt_response) { if (result.Successful()) { success = true; response = std::move(encrypt_response); @@ -171,7 +171,7 @@ absl::StatusOr CryptoClientWrapper::HpkeDecrypt( // Only the ciphertext field needs to be set for decryption. HpkeEncryptedData encrypted_data; - encrypted_data.set_ciphertext(std::move(ciphertext)); + encrypted_data.set_ciphertext(ciphertext); HpkeDecryptRequest request; *request.mutable_private_key() = std::move(key); @@ -188,7 +188,7 @@ absl::StatusOr CryptoClientWrapper::HpkeDecrypt( std::move(request), [&response, &success]( const google::scp::core::ExecutionResult& result, - const google::cmrt::sdk::crypto_service::v1::HpkeDecryptResponse& + google::cmrt::sdk::crypto_service::v1::HpkeDecryptResponse decrypt_response) { if (result.Successful()) { success = true; @@ -218,7 +218,7 @@ absl::StatusOr CryptoClientWrapper::AeadDecrypt( ExecutionResult execution_result = crypto_client_->AeadDecrypt( std::move(request), [&response, &success](const google::scp::core::ExecutionResult& result, - const AeadDecryptResponse& decrypt_response) { + AeadDecryptResponse decrypt_response) { if (result.Successful()) { success = true; response = std::move(decrypt_response); diff --git a/services/common/encryption/crypto_client_wrapper.h b/services/common/encryption/crypto_client_wrapper.h index c09ad7ab..c5e0e77e 100644 --- a/services/common/encryption/crypto_client_wrapper.h +++ b/services/common/encryption/crypto_client_wrapper.h @@ -23,9 +23,9 @@ #include "absl/status/statusor.h" #include "absl/strings/string_view.h" -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" #include "services/common/encryption/crypto_client_wrapper_interface.h" -#include "src/cpp/encryption/key_fetcher/interface/private_key_fetcher_interface.h" +#include "src/encryption/key_fetcher/interface/private_key_fetcher_interface.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/encryption/crypto_client_wrapper_interface.h b/services/common/encryption/crypto_client_wrapper_interface.h index 72c0b86b..8477274b 100644 --- a/services/common/encryption/crypto_client_wrapper_interface.h +++ b/services/common/encryption/crypto_client_wrapper_interface.h @@ -23,8 +23,8 @@ #include "absl/status/statusor.h" #include "absl/strings/string_view.h" -#include "scp/cc/public/cpio/interface/crypto_client/crypto_client_interface.h" -#include "src/cpp/encryption/key_fetcher/interface/private_key_fetcher_interface.h" +#include "src/encryption/key_fetcher/interface/private_key_fetcher_interface.h" +#include "src/public/cpio/interface/crypto_client/crypto_client_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/encryption/crypto_client_wrapper_test.cc b/services/common/encryption/crypto_client_wrapper_test.cc index 4f65635f..fade6cdb 100644 --- a/services/common/encryption/crypto_client_wrapper_test.cc +++ b/services/common/encryption/crypto_client_wrapper_test.cc @@ -64,18 +64,22 @@ class MockCryptoClientProvider ExecutionResult stop_result_mock = SuccessExecutionResult(); ExecutionResult Stop() noexcept override { return stop_result_mock; } + // NOLINTNEXTLINE MOCK_METHOD(ExecutionResult, HpkeEncrypt, (HpkeEncryptRequest request, Callback callback), (override, noexcept)); + // NOLINTNEXTLINE MOCK_METHOD(ExecutionResult, HpkeDecrypt, (HpkeDecryptRequest request, Callback callback), (override, noexcept)); + // NOLINTNEXTLINE MOCK_METHOD(ExecutionResult, AeadEncrypt, (AeadEncryptRequest request, Callback callback), (override, noexcept)); + // NOLINTNEXTLINE MOCK_METHOD(ExecutionResult, AeadDecrypt, (AeadDecryptRequest request, Callback callback), @@ -107,8 +111,9 @@ TEST(CryptoClientWrapperTest, HpkeEncrypt_Success) { std::unique_ptr mock_crypto_client = std::make_unique(); EXPECT_CALL(*mock_crypto_client, HpkeEncrypt) - .WillOnce([&](HpkeEncryptRequest request, - Callback callback) -> ExecutionResult { + .WillOnce([&](const HpkeEncryptRequest& request, + const Callback& callback) + -> ExecutionResult { EXPECT_TRUE(google::protobuf::util::MessageDifferencer::Equals( request, expected_request)); callback(SuccessExecutionResult(), mock_response); @@ -127,8 +132,9 @@ TEST(CryptoClientWrapperTest, HpkeEncrypt_Failure) { std::unique_ptr mock_crypto_client = std::make_unique(); EXPECT_CALL(*mock_crypto_client, HpkeEncrypt) - .WillOnce([&](HpkeEncryptRequest request, - Callback callback) -> ExecutionResult { + .WillOnce([&](const HpkeEncryptRequest& request, + const Callback& callback) + -> ExecutionResult { callback(FailureExecutionResult(0), HpkeEncryptResponse()); return SuccessExecutionResult(); }); @@ -179,8 +185,9 @@ TEST(CryptoClientWrapperTest, HpkeDecrypt_Success) { std::unique_ptr mock_crypto_client = std::make_unique(); EXPECT_CALL(*mock_crypto_client, HpkeDecrypt) - .WillOnce([&](HpkeDecryptRequest request, - Callback callback) -> ExecutionResult { + .WillOnce([&](const HpkeDecryptRequest& request, + const Callback& callback) + -> ExecutionResult { EXPECT_TRUE(google::protobuf::util::MessageDifferencer::Equals( request, expected_request)); callback(SuccessExecutionResult(), mock_response); @@ -211,8 +218,9 @@ TEST(CryptoClientWrapperTest, AeadEncrypt_Success) { std::unique_ptr mock_crypto_client = std::make_unique(); EXPECT_CALL(*mock_crypto_client, AeadEncrypt) - .WillOnce([&](AeadEncryptRequest request, - Callback callback) -> ExecutionResult { + .WillOnce([&](const AeadEncryptRequest& request, + const Callback& callback) + -> ExecutionResult { EXPECT_TRUE(google::protobuf::util::MessageDifferencer::Equals( request, expected_request)); callback(SuccessExecutionResult(), mock_response); @@ -244,8 +252,9 @@ TEST(CryptoClientWrapperTest, AeadDecrypt_Success) { std::unique_ptr mock_crypto_client = std::make_unique(); EXPECT_CALL(*mock_crypto_client, AeadDecrypt) - .WillOnce([&](AeadDecryptRequest request, - Callback callback) -> ExecutionResult { + .WillOnce([&](const AeadDecryptRequest& request, + const Callback& callback) + -> ExecutionResult { EXPECT_TRUE(google::protobuf::util::MessageDifferencer::Equals( request, expected_request)); callback(SuccessExecutionResult(), mock_response); diff --git a/services/common/encryption/key_fetcher_factory.cc b/services/common/encryption/key_fetcher_factory.cc index b5e822d0..73b9a21d 100644 --- a/services/common/encryption/key_fetcher_factory.cc +++ b/services/common/encryption/key_fetcher_factory.cc @@ -26,11 +26,11 @@ #include "services/common/clients/config/trusted_server_config_client.h" #include "services/common/constants/common_service_flags.h" +#include "src/concurrent/event_engine_executor.h" #include "src/core/lib/event_engine/default_event_engine.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/encryption/key_fetcher/src/fake_key_fetcher_manager.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/encryption/key_fetcher_factory.h b/services/common/encryption/key_fetcher_factory.h index 656f5f35..5d9449a3 100644 --- a/services/common/encryption/key_fetcher_factory.h +++ b/services/common/encryption/key_fetcher_factory.h @@ -20,7 +20,7 @@ #define SERVICES_ENCRYPTION_KEY_FETCHER_UTIL_H_ #include "services/common/clients/config/trusted_server_config_client.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/encryption/mock_crypto_client_wrapper.h b/services/common/encryption/mock_crypto_client_wrapper.h index 6a83df5b..854363bd 100644 --- a/services/common/encryption/mock_crypto_client_wrapper.h +++ b/services/common/encryption/mock_crypto_client_wrapper.h @@ -32,23 +32,27 @@ class MockCryptoClientWrapper : public CryptoClientWrapperInterface { public: virtual ~MockCryptoClientWrapper() = default; + // NOLINTNEXTLINE MOCK_METHOD(absl::StatusOr< google::cmrt::sdk::crypto_service::v1::HpkeEncryptResponse>, HpkeEncrypt, (const google::cmrt::sdk::public_key_service::v1::PublicKey&, const std::string&), (noexcept, override)); + // NOLINTNEXTLINE MOCK_METHOD(absl::StatusOr< google::cmrt::sdk::crypto_service::v1::HpkeDecryptResponse>, HpkeDecrypt, (const server_common::PrivateKey& private_key, const std::string& ciphertext), (noexcept, override)); + // NOLINTNEXTLINE MOCK_METHOD(absl::StatusOr< google::cmrt::sdk::crypto_service::v1::AeadEncryptResponse>, AeadEncrypt, (const std::string& plaintext_payload, const std::string& secret), (noexcept, override)); + // NOLINTNEXTLINE MOCK_METHOD(absl::StatusOr< google::cmrt::sdk::crypto_service::v1::AeadDecryptResponse>, AeadDecrypt, diff --git a/services/common/loggers/BUILD b/services/common/loggers/BUILD index e3e8d2d9..213ba918 100644 --- a/services/common/loggers/BUILD +++ b/services/common/loggers/BUILD @@ -36,7 +36,7 @@ cc_library( "//services/common/loggers:timer", "@com_google_absl//absl/strings", "@com_google_absl//absl/time", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", ], ) @@ -56,6 +56,6 @@ cc_library( "source_location_context.h", ], deps = [ - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:source_location", + "@google_privacysandbox_servers_common//src/util/status_macro:source_location", ], ) diff --git a/services/common/loggers/benchmarking_logger.cc b/services/common/loggers/benchmarking_logger.cc index dddb0998..c4fd45cd 100644 --- a/services/common/loggers/benchmarking_logger.cc +++ b/services/common/loggers/benchmarking_logger.cc @@ -19,7 +19,7 @@ #include #include "absl/strings/str_cat.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/loggers/source_location_context.h b/services/common/loggers/source_location_context.h index 23880a65..ef4cafec 100644 --- a/services/common/loggers/source_location_context.h +++ b/services/common/loggers/source_location_context.h @@ -18,7 +18,7 @@ #define SERVICES_COMMON_LOGGERS_SOURCE_LOCATION_CONTEXT_H_ #include -#include "src/cpp/util/status_macro/source_location.h" +#include "src/util/status_macro/source_location.h" namespace privacy_sandbox::bidding_auction_servers::log { template struct ParamWithSourceLoc { diff --git a/services/common/metric/BUILD b/services/common/metric/BUILD index 79248f46..be56f50b 100644 --- a/services/common/metric/BUILD +++ b/services/common/metric/BUILD @@ -35,8 +35,8 @@ cc_library( ":error_code", "//services/common/util:read_system", "//services/common/util:reporting_util", - "@google_privacysandbox_servers_common//src/cpp/metric:context_map", - "@google_privacysandbox_servers_common//src/cpp/metric:key_fetch", + "@google_privacysandbox_servers_common//src/metric:context_map", + "@google_privacysandbox_servers_common//src/metric:key_fetch", ], ) diff --git a/services/common/metric/server_definition.h b/services/common/metric/server_definition.h index 852419a5..767a1f13 100644 --- a/services/common/metric/server_definition.h +++ b/services/common/metric/server_definition.h @@ -23,8 +23,9 @@ #include "services/common/metric/error_code.h" #include "services/common/util/read_system.h" #include "services/common/util/reporting_util.h" -#include "src/cpp/metric/context_map.h" -#include "src/cpp/metric/key_fetch.h" +#include "src/metric/context_map.h" +#include "src/metric/definition.h" +#include "src/metric/key_fetch.h" // Defines API used by Bidding auction servers, and B&A specific metrics. namespace privacy_sandbox::bidding_auction_servers { @@ -53,14 +54,14 @@ constexpr int kMaxBuyersSolicited = 2; inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kNonImpacting, server_common::metrics::Instrument::kHistogram> - kProtectedCiphertextSize("protected_ciphertext.size_bytes", + kProtectedCiphertextSize("sfe.protected_ciphertext.size_bytes", "Size of protected ciphertext in Bytes", server_common::metrics::kSizeHistogram); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kNonImpacting, server_common::metrics::Instrument::kHistogram> - kAuctionConfigSize("auction_config.size_bytes", + kAuctionConfigSize("sfe.auction_config.size_bytes", "Size of auction config in Bytes", server_common::metrics::kSizeHistogram); @@ -74,7 +75,7 @@ inline constexpr server_common::metrics::Definition< inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kUpDownCounter> - kJSExecutionErrorCount("js_execution.error.count", + kJSExecutionErrorCount("js_execution.errors_count", "No. of times js execution returned status != OK", 1, 0); @@ -82,49 +83,49 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedRequestKVDuration( - "initiated_request.kv.duration_ms", + "initiated_request.to_kv.duration_ms", "Total duration request takes to get response back from KV server", server_common::metrics::kTimeHistogram, 1'000, 10); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedRequestBiddingDuration( - "initiated_request.bidding.duration_ms", + "bfe.initiated_request.to_bidding.duration_ms", "Total duration request takes to get response back from bidding server", server_common::metrics::kTimeHistogram, 1'000, 10); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedRequestAuctionDuration( - "initiated_request.auction.duration_ms", + "sfe.initiated_request.to_auction.duration_ms", "Total duration request takes to get response back from Auction server", server_common::metrics::kTimeHistogram, 1'000, 10); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedRequestKVSize( - "initiated_request.kv.size_bytes", + "initiated_request.to_kv.size_bytes", "Size of the Initiated Request to KV server in Bytes", server_common::metrics::kSizeHistogram, 25'000, 100); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedRequestBiddingSize( - "initiated_request.bidding.size_bytes", + "bfe.initiated_request.to_bidding.size_bytes", "Size of the Initiated Request to Bidding server in Bytes", server_common::metrics::kSizeHistogram, 5'000'000, 5'000); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedRequestAuctionSize( - "initiated_request.auction.size_bytes", + "sfe.initiated_request.to_auction.size_bytes", "Size of the initiated Request to Auction server in Bytes", server_common::metrics::kSizeHistogram, 1'000'000, 1'000); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kBfeInitiatedResponseKVSize( - "bfe.initiated_response.kv.size_bytes", + "bfe.initiated_response.to_kv.size_bytes", "Size of the Initiated Response by BFE KV server in Bytes", server_common::metrics::kSizeHistogram, 5'000'000, 5'000); @@ -132,7 +133,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kSfeInitiatedResponseKVSize( - "sfe.initiated_response.kv.size_bytes", + "sfe.initiated_response.to_kv.size_bytes", "Size of the Initiated Response by SFE KV server in Bytes", server_common::metrics::kSizeHistogram, 100'000, 1'000); @@ -140,14 +141,14 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedResponseBiddingSize( - "initiated_response.bidding.size_bytes", + "bfe.initiated_response.to_bidding.size_bytes", "Size of the Initiated Response by Bidding server in Bytes", server_common::metrics::kSizeHistogram, 100'000, 1'000); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kInitiatedResponseAuctionSize( - "initiated_response.auction.size_bytes", + "sfe.initiated_response.to_auction.size_bytes", "Size of the initiated Response by Auction server in Bytes", server_common::metrics::kSizeHistogram, 100'000, 1'000); @@ -155,7 +156,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kInitiatedRequestCountByServer( - /*name*/ "initiated_request.count_by_server", + /*name*/ "initiated_request.count", /*description*/ "Total number of requests initiated by the server partitioned by " "outgoing server", @@ -169,14 +170,14 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kUpDownCounter> kBiddingZeroBidCount( - "business_logic.bidding.zero_bid.count", + "bidding.business_logic.zero_bid_count", "Total number of times bidding service returns a zero bid", 15, 0); inline constexpr server_common::metrics::Definition< double, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kBiddingZeroBidPercent( - "business_logic.bidding.zero_bid.percent", + "bidding.business_logic.zero_bid_percent", "Percentage of times bidding service returns a zero bid", kPercentHistogram, 1, 0); @@ -197,7 +198,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kAuctionBidRejectedCount( - /*name*/ "business_logic.auction.bid_rejected.count", + /*name*/ "auction.business_logic.bid_rejected_count", /*description*/ "Total number of times auction service rejects a bid partitioned by the" "seller rejection reason", @@ -211,7 +212,7 @@ inline constexpr server_common::metrics::Definition< double, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kAuctionBidRejectedPercent( - /*name*/ "business_logic.auction.bid_rejected.percent", + /*name*/ "auction.business_logic.bid_rejected_percent", /*description*/ "Percentage of times auction service rejects a bid", kPercentHistogram, /*upper_bound*/ 1, @@ -221,7 +222,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kUpDownCounter> kBiddingTotalBidsCount( - /*name*/ "business_logic.bidding.bids.count", + /*name*/ "bidding.business_logic.bids_count", /*description*/ "Total number of bids generated by bidding service", /*upper_bound*/ 100, /*lower_bound*/ 0); @@ -235,7 +236,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kUpDownCounter> kAuctionTotalBidsCount( - /*name*/ "business_logic.auction.bids.count", + /*name*/ "auction.business_logic.bids_count", /*description*/ "Total number of bids used to score in auction service", /*upper_bound*/ 100, @@ -245,7 +246,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kUpDownCounter> kRequestWithWinnerCount( - /*name*/ "business_logic.sfe.request_with_winner.count", + /*name*/ "sfe.business_logic.request_with_winner_count", /*description*/ "Total number of SFE request with winner ads", 1, 0); @@ -253,7 +254,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kHistogram> kSfeWithWinnerTimeMs( - /*name*/ "business_logic.sfe.request_with_winner.duration_ms", + /*name*/ "sfe.business_logic.request_with_winner.duration_ms", /*description*/ "Total time taken by SFE to execute the request with winner ads", server_common::metrics::kTimeHistogram, 1000, 100); @@ -264,10 +265,10 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kAuctionErrorCountByErrorCode( - /*name*/ "auction.error_code", + /*name*/ "auction.errors_count", /*description*/ "Number of errors in the auction server by error code", - /*partition_type*/ "error code", + /*partition_type*/ "error_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ kAuctionErrorCode, /*upper_bound*/ 1, @@ -278,10 +279,10 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kBfeErrorCountByErrorCode( - /*name*/ "bfe.error_code", + /*name*/ "bfe.errors_count", /*description*/ "Number of errors in the BFE server by error code", - /*partition_type*/ "error code", + /*partition_type*/ "error_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ kBfeErrorCode, /*upper_bound*/ 1, @@ -292,10 +293,10 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kBiddingErrorCountByErrorCode( - /*name*/ "bidding.error_code", + /*name*/ "bidding.errors_count", /*description*/ "Number of errors in the bidding server by error code", - /*partition_type*/ "error code", + /*partition_type*/ "error_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ kBiddingErrorCode, /*upper_bound*/ 1, @@ -306,10 +307,10 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kSfeErrorCountByErrorCode( - /*name*/ "sfe.error_code", + /*name*/ "sfe.errors_count", /*description*/ "Number of errors in the SFE server by error code", - /*partition_type*/ "error code", + /*partition_type*/ "error_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ kSfeErrorCode, /*upper_bound*/ 1, @@ -320,7 +321,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kSfeInitiatedRequestErrorsCountByBuyer( - /*name*/ "sfe.initiated_request.errors_count_by_buyer", + /*name*/ "sfe.initiated_request.to_bfe.errors_count_by_buyer", /*description*/ "Total number of initiated requests failed per buyer", /*partition_type*/ "buyer", @@ -333,7 +334,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kSfeInitiatedRequestCountByBuyer( - /*name*/ "sfe.initiated_request.count_by_buyer", + /*name*/ "sfe.initiated_request.to_bfe.count", /*description*/ "Total number of initiated requests per buyer", /*partition_type*/ "buyer", @@ -346,7 +347,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kSfeInitiatedRequestDurationByBuyer( - /*name*/ "sfe.initiated_request.duration_by_buyer", + /*name*/ "sfe.initiated_request.to_bfe.duration_ms", /*description*/ "Initiated requests duration per buyer", /*partition_type*/ "buyer", @@ -359,7 +360,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kSfeInitiatedRequestSizeByBuyer( - /*name*/ "sfe.initiated_request.size_by_buyer", + /*name*/ "sfe.initiated_request.to_bfe.size_bytes", /*description*/ "Initiated requests size per buyer", /*partition_type*/ "buyer", @@ -372,7 +373,7 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kSfeInitiatedResponseSizeByBuyer( - /*name*/ "sfe.initiated_response.size_by_buyer", + /*name*/ "sfe.initiated_response.to_bfe.size_bytes", /*description*/ "Initiated response size per buyer", /*partition_type*/ "buyer", @@ -385,22 +386,22 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kNonImpacting, server_common::metrics::Instrument::kPartitionedCounter> kRequestFailedCountByStatus( - /*name*/ "request.failed_count_by_status", + /*name*/ "request.failed_count", /*description*/ "Total number of requests that resulted in failure partitioned by " - "Error Code", - /*partition_type*/ "error_status_code", + "status code", + /*partition_type*/ "status_code", /*public_partitions*/ server_common::metrics::kEmptyPublicPartition); inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kInitiatedRequestKVErrorCountByStatus( - /*name*/ "initiated_request.kv.errors_count_by_status", + /*name*/ "initiated_request.to_kv.errors_count", /*description*/ "Initiated requests by KV that resulted in failure partitioned by " - "Error Code", - /*partition_type*/ "error_status_code", + "status code", + /*partition_type*/ "status_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ server_common::metrics::kEmptyPublicPartition, /*upper_bound*/ 1, @@ -410,11 +411,11 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kInitiatedRequestAuctionErrorCountByStatus( - /*name*/ "initiated_request.auction.errors_count_by_status", + /*name*/ "sfe.initiated_request.to_auction.errors_count", /*description*/ "Initiated requests by auction that resulted in failure partitioned by " - "Error Code", - /*partition_type*/ "error_status_code", + "status code", + /*partition_type*/ "status_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ server_common::metrics::kEmptyPublicPartition, /*upper_bound*/ 1, @@ -423,11 +424,11 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kInitiatedRequestBiddingErrorCountByStatus( - /*name*/ "initiated_request.bidding.errors_count_by_status", + /*name*/ "bfe.initiated_request.to_bidding.errors_count", /*description*/ - "Initiated requests by KV that resulted in failure partitioned by " - "Error Code", - /*partition_type*/ "error_status_code", + "Initiated requests by bidding that resulted in failure partitioned by " + "status code", + /*partition_type*/ "status_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ server_common::metrics::kEmptyPublicPartition, /*upper_bound*/ 1, @@ -436,11 +437,11 @@ inline constexpr server_common::metrics::Definition< int, server_common::metrics::Privacy::kImpacting, server_common::metrics::Instrument::kPartitionedCounter> kInitiatedRequestBfeErrorCountByStatus( - /*name*/ "initiated_request.bfe.errors_count_by_status", + /*name*/ "sfe.initiated_request.to_bfe.errors_count_by_status", /*description*/ - "Initiated requests by KV that resulted in failure partitioned by " - "Error Code", - /*partition_type*/ "error_status_code", + "Initiated requests by BFE that resulted in failure partitioned by " + "status code", + /*partition_type*/ "status_code", /*max_partitions_contributed*/ 1, /*public_partitions*/ server_common::metrics::kEmptyPublicPartition, /*upper_bound*/ 1, @@ -612,7 +613,7 @@ class InitiatedRequest { void SetResponseSize(int response_size) { response_size_ = response_size; } - void SetBuyer(std::string buyer) { buyer_ = buyer; } + void SetBuyer(std::string buyer) { buyer_ = std::move(buyer); } ~InitiatedRequest() { LogMetrics(); } @@ -715,11 +716,11 @@ inline void AddSystemMetric(T* context_map) { server_common::metrics::kKeyFetchFailureCount, server_common::KeyFetchResultCounter::GetKeyFetchFailureCount); context_map->AddObserverable( - server_common::metrics::kNumKeysParsedOnRecentFetch, - server_common::KeyFetchResultCounter::GetNumKeysParsedOnRecentFetch); + server_common::metrics::kNumKeysParsed, + server_common::KeyFetchResultCounter::GetNumKeysParsed); context_map->AddObserverable( - server_common::metrics::kNumKeysCachedAfterRecentFetch, - server_common::KeyFetchResultCounter::GetNumKeysCachedAfterRecentFetch); + server_common::metrics::kNumKeysCached, + server_common::KeyFetchResultCounter::GetNumKeysCached); } inline void AddBuyerPartition( diff --git a/services/common/providers/BUILD b/services/common/providers/BUILD index b1a13538..40e9b853 100644 --- a/services/common/providers/BUILD +++ b/services/common/providers/BUILD @@ -24,6 +24,7 @@ cc_library( deps = [ "@com_google_absl//absl/functional:any_invocable", "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/time", ], ) diff --git a/services/common/reporters/async_reporter_test.cc b/services/common/reporters/async_reporter_test.cc index c7ead639..4dea1ce9 100644 --- a/services/common/reporters/async_reporter_test.cc +++ b/services/common/reporters/async_reporter_test.cc @@ -46,6 +46,7 @@ class AsyncReporterTest : public ::testing::Test { TEST_F(AsyncReporterTest, DoReportSuccessfully) { absl::BlockingCounter done(1); + // NOLINTNEXTLINE auto done_cb = [&done](absl::StatusOr result) { done.DecrementCount(); ASSERT_TRUE(result.ok()); @@ -57,6 +58,7 @@ TEST_F(AsyncReporterTest, DoReportSuccessfully) { TEST_F(AsyncReporterTest, DoReportEmptyUrl) { absl::BlockingCounter done(1); + // NOLINTNEXTLINE auto done_cb = [&done](absl::StatusOr result) { done.DecrementCount(); ASSERT_FALSE(result.ok()); @@ -68,6 +70,7 @@ TEST_F(AsyncReporterTest, DoReportEmptyUrl) { TEST_F(AsyncReporterTest, DoReportMalformedUrl) { absl::BlockingCounter done(1); + // NOLINTNEXTLINE auto done_cb = [&done](absl::StatusOr result) { done.DecrementCount(); ASSERT_FALSE(result.ok()); diff --git a/services/common/telemetry/BUILD b/services/common/telemetry/BUILD index d34dc7dc..f3536eb3 100644 --- a/services/common/telemetry/BUILD +++ b/services/common/telemetry/BUILD @@ -28,7 +28,8 @@ cc_library( "//services/common/clients/config:config_client_util", "//services/common/constants:common_service_flags", "//services/common/metric:server_definition", - "@google_privacysandbox_servers_common//src/cpp/telemetry", + "//services/common/util:build_info", + "@google_privacysandbox_servers_common//src/telemetry", "@io_opentelemetry_cpp//sdk/src/resource", ], ) diff --git a/services/common/telemetry/configure_telemetry.h b/services/common/telemetry/configure_telemetry.h index 16698e52..4c33f44d 100644 --- a/services/common/telemetry/configure_telemetry.h +++ b/services/common/telemetry/configure_telemetry.h @@ -29,17 +29,15 @@ #include "services/common/clients/config/trusted_server_config_client_util.h" #include "services/common/constants/common_service_flags.h" #include "services/common/metric/server_definition.h" -#include "src/cpp/logger/request_context_impl.h" -#include "src/cpp/telemetry/flag/telemetry_flag.h" -#include "src/cpp/telemetry/telemetry.h" +#include "services/common/util/build_info.h" +#include "src/logger/request_context_impl.h" +#include "src/telemetry/flag/telemetry_flag.h" +#include "src/telemetry/telemetry.h" namespace privacy_sandbox::bidding_auction_servers { -// TODO (b/278899152): get version dynamically inline constexpr std::string_view kOpenTelemetryVersion = "1.9.1"; -inline constexpr std::string_view kBuildVersion = "3.3.0"; inline constexpr std::string_view kOperator = "operator"; -inline constexpr std::string_view kZone = "zone"; inline constexpr std::string_view kRegion = "region"; template @@ -54,6 +52,20 @@ void InitTelemetry(const TrustedServerConfigUtil& config_util, namespace semantic_conventions = ::opentelemetry::sdk::resource::SemanticConventions; + ResourceAttributes resource_attributes = { + {semantic_conventions::kServiceName, config_util.GetService().data()}, + {semantic_conventions::kDeploymentEnvironment, + config_util.GetEnvironment().data()}, + {semantic_conventions::kServiceInstanceId, + config_util.GetInstanceId().data()}, + {semantic_conventions::kServiceVersion, kBuildVersion.data()}, + {kOperator.data(), config_util.GetOperator().data()}, + {kRegion.data(), config_util.GetRegion().data()}}; + + absl::btree_map zone_attribute = + config_util.GetAttribute(); + resource_attributes.insert(zone_attribute.begin(), zone_attribute.end()); + server_common::telemetry::BuildDependentConfig telemetry_config( config_client .GetCustomParameter( @@ -72,16 +84,7 @@ void InitTelemetry(const TrustedServerConfigUtil& config_util, config_util.GetService().data(), kOpenTelemetryVersion.data(), telemetry_config.TraceAllowed(), telemetry_config.MetricAllowed(), consented_log_enabled); - Resource server_info = Resource::Create(ResourceAttributes{ - {semantic_conventions::kServiceName, config_util.GetService().data()}, - {semantic_conventions::kDeploymentEnvironment, - config_util.GetEnvironment().data()}, - {semantic_conventions::kServiceInstanceId, - config_util.GetInstanceId().data()}, - {semantic_conventions::kServiceVersion, kBuildVersion.data()}, - {kOperator.data(), config_util.GetOperator().data()}, - {kZone.data(), config_util.GetZone().data()}, - {kRegion.data(), config_util.GetRegion().data()}}); + Resource server_info = Resource::Create(resource_attributes); server_common::ConfigureTracer(server_info, collector_endpoint); static LoggerProvider* log_provider = diff --git a/services/common/test/BUILD b/services/common/test/BUILD index 2a044fed..c5a97f2b 100644 --- a/services/common/test/BUILD +++ b/services/common/test/BUILD @@ -39,7 +39,7 @@ cc_library( "//services/common/reporters:async_reporter", "@com_google_absl//absl/time", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", + "@google_privacysandbox_servers_common//src/concurrent:executor", "@service_value_key_fledge_privacysandbox//public/query/v2:get_values_v2_cc_grpc", "@service_value_key_fledge_privacysandbox//public/query/v2:get_values_v2_cc_proto", ], diff --git a/services/common/test/mocks.h b/services/common/test/mocks.h index 394d1239..4dc7402b 100644 --- a/services/common/test/mocks.h +++ b/services/common/test/mocks.h @@ -43,7 +43,7 @@ #include "services/common/concurrent/local_cache.h" #include "services/common/providers/async_provider.h" #include "services/common/reporters/async_reporter.h" -#include "src/cpp/concurrent/executor.h" +#include "src/concurrent/executor.h" namespace privacy_sandbox::bidding_auction_servers { @@ -376,8 +376,7 @@ class MockScoreAdsReactor : public ScoreAdsReactor { const AsyncReporter* async_reporter, absl::string_view js) : ScoreAdsReactor(dispatcher, request, response, std::move(benchmarking_logger), key_fetcher_manager, - crypto_client, async_reporter, - std::move(runtime_config)) {} + crypto_client, async_reporter, runtime_config) {} MOCK_METHOD(void, Execute, (), (override)); }; @@ -392,7 +391,7 @@ class MockGenerateBidsReactor : public GenerateBidsReactor { const BiddingServiceRuntimeConfig& runtime_config) : GenerateBidsReactor(dispatcher, request, response, std::move(benchmarkingLogger), key_fetcher_manager, - crypto_client_, std::move(runtime_config)) {} + crypto_client_, runtime_config) {} MOCK_METHOD(void, Execute, (), (override)); }; diff --git a/services/common/test/random.cc b/services/common/test/random.cc index b4a04179..cc20bd6a 100644 --- a/services/common/test/random.cc +++ b/services/common/test/random.cc @@ -450,7 +450,7 @@ ScoreAdsResponse::AdScore MakeARandomAdScore( ad_score.set_interest_group_owner(MakeARandomString()); ad_score.set_ad_metadata(MakeARandomString()); ad_score.set_allow_component_auction(false); - ad_score.set_bid(bid); + ad_score.set_bid(MakeARandomNumber(1, 2.5)); *ad_score.mutable_debug_report_urls() = MakeARandomDebugReportUrls(); *ad_score.mutable_win_reporting_urls() = MakeARandomWinReportingUrls(); for (int index = 0; index < hob_buyer_entries; index++) { @@ -520,7 +520,7 @@ GetBidsRequest::GetBidsRawRequest MakeARandomGetBidsRawRequest() { GetBidsRequest::GetBidsRawRequest raw_request; raw_request.set_publisher_name("publisher_name"); raw_request.set_allocated_auction_signals( - std::move(MakeARandomStructJsonString(1).release())); + MakeARandomStructJsonString(1).release()); return raw_request; } @@ -583,20 +583,43 @@ ProtectedAuctionInput MakeARandomProtectedAuctionInput(ClientType client_type) { return input; } -AuctionResult MakeARandomSingleSellerAuctionResult() { +AuctionResult MakeARandomSingleSellerAuctionResult( + std::vector buyer_list) { AuctionResult result; result.set_ad_render_url(MakeARandomString()); result.set_interest_group_name(MakeARandomString()); result.set_interest_group_owner(MakeARandomString()); result.set_score(MakeARandomNumber(0.0, 1.0)); - AuctionResult::InterestGroupIndex ig_indices; - ig_indices.add_index(MakeARandomInt(1, 5)); - result.mutable_bidding_groups()->try_emplace(MakeARandomString(), - std::move(ig_indices)); + if (buyer_list.empty()) { + buyer_list.push_back(MakeARandomString()); + } + for (const auto& buyer : buyer_list) { + AuctionResult::InterestGroupIndex ig_indices; + ig_indices.add_index(MakeARandomInt(1, 5)); + result.mutable_bidding_groups()->try_emplace(buyer, std::move(ig_indices)); + } + // TODO(b/287074572): Add reporting URLs and other reporting fields here // when adding support for reporting. return result; } +AuctionResult MakeARandomComponentAuctionResult( + std::string generation_id, std::string top_level_seller, + std::vector buyer_list) { + AuctionResult result = + MakeARandomSingleSellerAuctionResult(std::move(buyer_list)); + result.set_top_level_seller(std::move(top_level_seller)); + result.set_ad_type(AdType::AD_TYPE_PROTECTED_AUDIENCE_AD); + result.mutable_auction_params()->set_ciphertext_generation_id( + std::move(generation_id)); + result.mutable_auction_params()->set_component_seller(MakeARandomString()); + result.mutable_ad_component_render_urls()->Add(MakeARandomString()); + result.mutable_ad_component_render_urls()->Add(MakeARandomString()); + result.set_bid(MakeARandomNumber(0.1, 1.0)); + result.set_bid_currency(MakeARandomString()); + return result; +} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/common/test/random.h b/services/common/test/random.h index 4326c1eb..0c769238 100644 --- a/services/common/test/random.h +++ b/services/common/test/random.h @@ -29,7 +29,7 @@ #include "api/bidding_auction_servers.pb.h" #include "services/common/test/utils/cbor_test_utils.h" #include "services/seller_frontend_service/test/app_test_utils.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" // helper functions to generate random objects for testing namespace privacy_sandbox::bidding_auction_servers { @@ -224,8 +224,14 @@ BuyerInput MakeARandomBuyerInput(); ProtectedAuctionInput MakeARandomProtectedAuctionInput(ClientType client_type); -AuctionResult MakeARandomSingleSellerAuctionResult(); +// Populates fields for a auction result object for a single seller auction. +AuctionResult MakeARandomSingleSellerAuctionResult( + std::vector buyer_list = {}); +// Populates fields for a auction result object for a component seller auction. +AuctionResult MakeARandomComponentAuctionResult( + std::string generation_id, std::string top_level_seller, + std::vector buyer_list = {}); } // namespace privacy_sandbox::bidding_auction_servers #endif // SERVICES_COMMON_TEST_RANDOM_H_ diff --git a/services/common/test/utils/BUILD b/services/common/test/utils/BUILD index dac5d52f..affd673a 100644 --- a/services/common/test/utils/BUILD +++ b/services/common/test/utils/BUILD @@ -51,7 +51,7 @@ cc_library( "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@libcbor//:cbor", "@rapidjson", ], diff --git a/services/common/test/utils/cbor_test_utils.cc b/services/common/test/utils/cbor_test_utils.cc index 95683bd2..e8b32ecb 100644 --- a/services/common/test/utils/cbor_test_utils.cc +++ b/services/common/test/utils/cbor_test_utils.cc @@ -32,7 +32,7 @@ #include "services/common/util/request_response_constants.h" #include "services/common/util/scoped_cbor.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" #include "cbor.h" @@ -56,7 +56,7 @@ absl::Status CborSerializeKeyValue(absl::string_view key, T invocable_builder, .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = cbor_move(invocable_builder(std::forward(value_args)...))}; - if (!cbor_map_add(&map, std::move(kv))) { + if (!cbor_map_add(&map, kv)) { return absl::InternalError( absl::StrCat("Failed to serialize ", key, " to CBOR")); } @@ -98,7 +98,7 @@ absl::Status CborSerializeStringArray( struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = *array_encoded}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { return absl::InternalError( absl::StrCat("Failed to add serialized ", key, " to overall CBOR")); } @@ -170,7 +170,7 @@ absl::Status CborSerializeBrowserSignals(absl::string_view key, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(kPrevWins, sizeof(kPrevWins) - 1)), .value = *wins}; - if (!cbor_map_add(*browser_signals_map, std::move(kv))) { + if (!cbor_map_add(*browser_signals_map, kv)) { return absl::InvalidArgumentError( "Unable to serialize the complete prev wins data"); } @@ -178,7 +178,7 @@ absl::Status CborSerializeBrowserSignals(absl::string_view key, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = *browser_signals_map}; - if (!cbor_map_add(&interest_group_root, std::move(kv))) { + if (!cbor_map_add(&interest_group_root, kv)) { return absl::InternalError( absl::StrCat("Failed to serialize ", key, " to CBOR")); } @@ -227,7 +227,7 @@ absl::Status CborSerializeBuyerInput(const BuyerInputMapEncoded& buyer_inputs, struct cbor_pair kv = {.key = cbor_move(cbor_build_stringn( kInterestGroups, sizeof(kInterestGroups) - 1)), .value = cbor_move(serialized_buyer_input)}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { return absl::InternalError( absl::StrCat("Failed to serialize ", kInterestGroups, " to CBOR")); } @@ -256,7 +256,7 @@ absl::Status CborSerializeConsentedDebugConfig( sizeof(kConsentedDebugConfig) - 1)), .value = *serialized_consented_debug_config, }; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { return absl::InternalError(absl::StrCat("Failed to serialize ", kConsentedDebugConfig, " to CBOR")); } diff --git a/services/common/test/utils/service_utils.h b/services/common/test/utils/service_utils.h index 03bab727..82604486 100644 --- a/services/common/test/utils/service_utils.h +++ b/services/common/test/utils/service_utils.h @@ -22,6 +22,10 @@ #include "absl/strings/str_format.h" #include "api/bidding_auction_servers.grpc.pb.h" +#include "include/grpcpp/create_channel.h" +#include "include/grpcpp/security/credentials.h" +#include "include/grpcpp/server.h" +#include "include/grpcpp/server_builder.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/BUILD b/services/common/util/BUILD index 202b8a04..4ec44099 100644 --- a/services/common/util/BUILD +++ b/services/common/util/BUILD @@ -119,8 +119,9 @@ cc_library( hdrs = ["error_reporter.h"], deps = [ ":error_categories", + "//services/common/loggers:source_location_context", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:source_location", + "@google_privacysandbox_servers_common//src/util/status_macro:source_location", ], ) @@ -134,8 +135,8 @@ cc_library( "//services/common/loggers:source_location_context", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:source_location", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:source_location", ], ) @@ -157,6 +158,7 @@ cc_library( name = "post_auction_signals", hdrs = ["post_auction_signals.h"], deps = [ + "//api:bidding_auction_servers_cc_proto", "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/strings", ], @@ -178,8 +180,8 @@ cc_library( "@com_google_absl//absl/status", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@rapidjson", ], ) @@ -224,7 +226,7 @@ cc_library( name = "signal_handler", hdrs = ["signal_handler.h"], deps = [ - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:examine_stack", + "@google_privacysandbox_servers_common//src/util/status_macro:examine_stack", ], ) @@ -237,8 +239,8 @@ cc_library( "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings:str_format", "@com_google_absl//absl/synchronization", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:source_location", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:source_location", ], ) @@ -280,8 +282,8 @@ cc_library( "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_protobuf//:protobuf", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -344,8 +346,8 @@ cc_library( "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_protobuf//:protobuf", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_impl", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -370,13 +372,16 @@ cc_library( hdrs = [ "hpke_utils.h", ], + visibility = [ + "//visibility:public", + ], deps = [ "//services/common/constants:user_error_strings", "//services/common/encryption:crypto_client_factory", "//services/common/encryption:crypto_client_wrapper_interface", "//services/common/encryption:key_fetcher_factory", "@com_google_absl//absl/status", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", ], ) @@ -391,6 +396,25 @@ cc_test( "//services/common/encryption:mock_crypto_client_wrapper", "//services/common/test:random", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) + +genrule( + name = "generate_build_info", + outs = ["build_info.cc"], + cmd = "$(execpath :generate_build_info_cc) >$@", + # Undocumented attr to depend on workspace status files. + # Used here because generate_build_info_cc depends on the workspace status files. + stamp = 1, + tools = [":generate_build_info_cc"], + visibility = ["//visibility:private"], +) + +cc_library( + name = "build_info", + srcs = ["build_info.cc"], + hdrs = ["build_info.h"], + visibility = ["//services:__subpackages__"], + deps = ["@com_google_absl//absl/strings"], +) diff --git a/services/common/util/async_task_tracker.h b/services/common/util/async_task_tracker.h index 577ec52d..cd6ad975 100644 --- a/services/common/util/async_task_tracker.h +++ b/services/common/util/async_task_tracker.h @@ -23,7 +23,7 @@ #include "absl/base/thread_annotations.h" #include "absl/functional/any_invocable.h" #include "absl/synchronization/mutex.h" -#include "src/cpp/logger/request_context_impl.h" +#include "src/logger/request_context_impl.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/async_task_tracker_test.cc b/services/common/util/async_task_tracker_test.cc index dde3884c..560dcb18 100644 --- a/services/common/util/async_task_tracker_test.cc +++ b/services/common/util/async_task_tracker_test.cc @@ -153,6 +153,7 @@ TEST_F(AsyncTasksTrackerTest, CallbackCalledOnlyOnce) { }); std::vector threads; + threads.reserve(kNumMaxThreads); for (int i = 0; i < kNumMaxThreads; ++i) { threads.emplace_back( [&task_tracker]() { task_tracker.TaskCompleted(TaskStatus::SUCCESS); }); @@ -171,6 +172,7 @@ TEST_F(AsyncTasksTrackerTest, OnSingleTaskDoneCalledWithLock) { [this](bool any_successful) { notification_.Notify(); }); std::vector threads; + threads.reserve(kNumMaxThreads); int count = 0; for (int i = 0; i < kNumMaxThreads; ++i) { threads.emplace_back([&task_tracker, &count]() { diff --git a/services/common/util/binary_http_utils.h b/services/common/util/binary_http_utils.h index 894d77bb..db6d6773 100644 --- a/services/common/util/binary_http_utils.h +++ b/services/common/util/binary_http_utils.h @@ -23,8 +23,8 @@ #include "absl/status/statusor.h" #include "google/protobuf/util/json_util.h" #include "quiche/binary_http/binary_http_message.h" -#include "src/cpp/logger/request_context_logger.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/build_info.h b/services/common/util/build_info.h new file mode 100644 index 00000000..b100de3b --- /dev/null +++ b/services/common/util/build_info.h @@ -0,0 +1,26 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_COMMON_UTIL_BUILD_INFO_H_ +#define SERVICES_COMMON_UTIL_BUILD_INFO_H_ + +#include "absl/strings/string_view.h" + +namespace privacy_sandbox::bidding_auction_servers { + +extern const std::string_view kBuildVersion; + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_COMMON_UTIL_BUILD_INFO_H_ diff --git a/services/common/util/error_accumulator.h b/services/common/util/error_accumulator.h index 98298959..c8b55cd8 100644 --- a/services/common/util/error_accumulator.h +++ b/services/common/util/error_accumulator.h @@ -26,8 +26,8 @@ #include "absl/strings/string_view.h" #include "services/common/loggers/source_location_context.h" #include "services/common/util/error_reporter.h" -#include "src/cpp/logger/request_context_impl.h" -#include "src/cpp/util/status_macro/source_location.h" +#include "src/logger/request_context_impl.h" +#include "src/util/status_macro/source_location.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/error_categories.h b/services/common/util/error_categories.h index c315d83e..353769d1 100644 --- a/services/common/util/error_categories.h +++ b/services/common/util/error_categories.h @@ -45,19 +45,64 @@ inline constexpr char kNonEmptyBuyerInputMalformed[] = "BuyerInput map is present but malformed: %s"; inline constexpr char kBadProtectedAudienceBinaryProto[] = "Unable to decode ProtectedAudienceInput binary proto"; +inline constexpr char kBadBinaryProto[] = "Unable to decode binary proto"; inline constexpr char kBadCompressedBuyerInput[] = "Unable to decompress buyer input for buyer: %s"; inline constexpr char kBadBuyerInputProto[] = "Unable to decode BuyerInput binary proto for buyer: %s"; +inline constexpr char kPASSignalsForComponentAuction[] = + "Unsupported component auction input (protected signals) for buyer: %s"; inline constexpr char kMalformedBuyerInput[] = "Malformed buyer input."; +inline constexpr char kEmptyProtectedAuctionCiphertextError[] = + "protected_auction_ciphertext must be non-null."; +inline constexpr char kUnsupportedClientType[] = "Unsupported client type."; inline constexpr char kErrorDecryptingCiphertextError[] = "Error while decrypting protected_auction_ciphertext: %s."; +inline constexpr char kErrorDecryptingAuctionResultError[] = + "Error while decrypting auction_result_ciphertext: %s."; +inline constexpr char kErrorDecodingAuctionResultError[] = + "Error while decoding component auction_result: %s."; +inline constexpr char kErrorInAuctionResult[] = + "Error while validating component auction_result: %s."; +inline constexpr char kMismatchedGenerationIdInAuctionResultError[] = + "Mismatched generationId."; +inline constexpr char kEmptyComponentSellerInAuctionResultError[] = + "Component Seller Auction Result must contain component seller domain"; +inline constexpr char kMismatchedTopLevelSellerInAuctionResultError[] = + "Mismatched top level seller."; +inline constexpr char kUnsupportedAdTypeInAuctionResultError[] = + "Unsupported ad type."; +inline constexpr char kTopLevelWinReportingUrlsInAuctionResultError[] = + "Top Level Win Reporting URLs should not be present."; // Server side errors listed here. inline constexpr char kInternalError[] = "Internal Error"; inline constexpr char kEncryptionFailed[] = "Encryption Failure."; inline constexpr char kRequestCancelled[] = "Request Cancelled by Client."; +// Constants for bad Ad server provided inputs. +inline constexpr char kEmptySellerSignals[] = + "Seller signals missing in auction config"; +inline constexpr char kEmptyAuctionSignals[] = + "Auction signals missing in auction config"; +inline constexpr char kEmptyBuyerList[] = "No buyers specified"; +inline constexpr char kEmptySeller[] = + "Seller origin missing in auction config"; +inline constexpr char kEmptyBuyerSignals[] = + "Buyer signals missing in auction config for buyer: %s"; +inline constexpr char kUnknownClientType[] = + "Unknown client type in SelectAdRequest"; +inline constexpr char kWrongSellerDomain[] = + "Seller domain passed in request does not match this server's domain"; +inline constexpr char kEmptyBuyerInPerBuyerConfig[] = + "One or more buyer keys are empty in per buyer config map"; +inline constexpr char kDeviceComponentAuctionWithAndroid[] = + "Device orchestrated Component Auctions not supported for Android"; +inline constexpr char kNoComponentAuctionResults[] = + "No Component Auction Results for Top Level Seller auction"; +inline constexpr char kEmptyComponentAuctionResults[] = + "Empty Component Auction Results for Top Level Seller auction"; + // Error handling related constants. inline constexpr char kErrorDelimiter[] = "; "; diff --git a/services/common/util/error_reporter.h b/services/common/util/error_reporter.h index ffe0a050..5ee03b62 100644 --- a/services/common/util/error_reporter.h +++ b/services/common/util/error_reporter.h @@ -21,8 +21,9 @@ #include #include "absl/strings/string_view.h" +#include "services/common/loggers/source_location_context.h" #include "services/common/util/error_categories.h" -#include "src/cpp/util/status_macro/source_location.h" +#include "src/util/status_macro/source_location.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/generate_build_info_cc b/services/common/util/generate_build_info_cc new file mode 100755 index 00000000..4fd20cd9 --- /dev/null +++ b/services/common/util/generate_build_info_cc @@ -0,0 +1,34 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +function value_for() { + local -r var="$1" + local -r line="$(grep -wh "^${var}" bazel-out/{volatile,stable}-status.txt)" + printf "%s\n" "${line//${var} }" +} + +VERSION="$(value_for STABLE_VERSION)" +readonly VERSION + +cat < HpkeEncrypt( const server_common::CloudPlatform& cloud_platform) { auto key = key_fetcher_manager.GetPublicKey(cloud_platform); if (!key.ok()) { - const std::string error = + std::string error = absl::StrCat("Could not find public key for HPKE encryption: ", key.status().message()); ABSL_LOG(ERROR) << error; @@ -35,8 +35,8 @@ absl::StatusOr HpkeEncrypt( auto encrypt_response = crypto_client.HpkeEncrypt(key.value(), plaintext); if (!encrypt_response.ok()) { - const std::string error = absl::StrCat("Failed encrypting request: ", - encrypt_response.status().message()); + std::string error = absl::StrCat("Failed encrypting request: ", + encrypt_response.status().message()); ABSL_LOG(ERROR) << error; return absl::InternalError(std::move(error)); } @@ -67,8 +67,8 @@ absl::StatusOr HpkeDecrypt( } auto decrypt_response = crypto_client.HpkeDecrypt(*private_key, ciphertext); if (!decrypt_response.ok()) { - const std::string error = absl::StrCat("Failed decrypting request: ", - decrypt_response.status().message()); + std::string error = absl::StrCat("Failed decrypting request: ", + decrypt_response.status().message()); ABSL_LOG(ERROR) << error; return absl::Status(absl::StatusCode::kInvalidArgument, std::move(error)); } diff --git a/services/common/util/hpke_utils.h b/services/common/util/hpke_utils.h index b6e67f81..00d846c1 100644 --- a/services/common/util/hpke_utils.h +++ b/services/common/util/hpke_utils.h @@ -22,7 +22,7 @@ #include "absl/status/statusor.h" #include "services/common/constants/user_error_strings.h" #include "services/common/encryption/crypto_client_wrapper_interface.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/hpke_utils_test.cc b/services/common/util/hpke_utils_test.cc index 1dcc3097..21e196de 100644 --- a/services/common/util/hpke_utils_test.cc +++ b/services/common/util/hpke_utils_test.cc @@ -19,7 +19,7 @@ #include "gtest/gtest.h" #include "services/common/encryption/mock_crypto_client_wrapper.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/common/util/json_util.h b/services/common/util/json_util.h index 8f4b566c..264dafba 100644 --- a/services/common/util/json_util.h +++ b/services/common/util/json_util.h @@ -30,9 +30,9 @@ namespace privacy_sandbox::bidding_auction_servers { -#define PS_ASSIGN_IF_PRESENT(dst, src, key, getter) \ - if (auto it = src.FindMember(key); it != src.MemberEnd()) { \ - dst = it->value.getter(); \ +#define PS_ASSIGN_IF_PRESENT(dst, src, key, getter) \ + if (auto it = (src).FindMember(key); it != (src).MemberEnd()) { \ + (dst) = it->value.getter(); \ } inline constexpr char kMissingMember[] = "Missing %s in the JSON document"; diff --git a/services/common/util/oblivious_http_utils.cc b/services/common/util/oblivious_http_utils.cc index ad826253..37bd5264 100644 --- a/services/common/util/oblivious_http_utils.cc +++ b/services/common/util/oblivious_http_utils.cc @@ -16,9 +16,9 @@ #include "absl/strings/escaping.h" #include "quiche/oblivious_http/oblivious_http_client.h" -#include "src/cpp/logger/request_context_logger.h" -#include "src/cpp/util/status_macro/status_macros.h" #include "src/include/openssl/hpke.h" +#include "src/logger/request_context_logger.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/oblivious_http_utils_test.cc b/services/common/util/oblivious_http_utils_test.cc index 534dbd56..a16643e3 100644 --- a/services/common/util/oblivious_http_utils_test.cc +++ b/services/common/util/oblivious_http_utils_test.cc @@ -22,7 +22,7 @@ #include "public/constants.h" #include "quiche/oblivious_http/common/oblivious_http_header_key_config.h" #include "services/common/test/utils/ohttp_utils.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { namespace { diff --git a/services/common/util/post_auction_signals.h b/services/common/util/post_auction_signals.h index de9db8be..9060954b 100644 --- a/services/common/util/post_auction_signals.h +++ b/services/common/util/post_auction_signals.h @@ -20,6 +20,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/strings/string_view.h" +#include "api/bidding_auction_servers.pb.h" namespace privacy_sandbox::bidding_auction_servers { // Captures the signals from the auction winner. diff --git a/services/common/util/read_system.cc b/services/common/util/read_system.cc index 91bf30cf..32679d1d 100644 --- a/services/common/util/read_system.cc +++ b/services/common/util/read_system.cc @@ -83,7 +83,7 @@ Utilization ReadCpuTime(const std::vector& cpu_times, // calculate and update total_time size_t total_time_new = - std::accumulate(cpu_times.begin(), cpu_times.end(), 0); + std::accumulate(cpu_times.begin(), cpu_times.end(), 0UL); size_t total_time_diff = total_time_new >= total_time ? total_time_new - total_time : 0; total_time = total_time_new; diff --git a/services/common/util/reporting_util.cc b/services/common/util/reporting_util.cc index 75434ebc..b9011186 100644 --- a/services/common/util/reporting_util.cc +++ b/services/common/util/reporting_util.cc @@ -23,7 +23,7 @@ #include "absl/strings/str_cat.h" #include "absl/strings/str_replace.h" #include "services/common/util/json_util.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -188,7 +188,9 @@ SellerRejectionReason ToSellerRejectionReason( absl::string_view rejection_reason_str) { if (rejection_reason_str.empty()) { return SellerRejectionReason::SELLER_REJECTION_REASON_NOT_AVAILABLE; - } else if (kRejectionReasonInvalidBid == rejection_reason_str) { + } + + if (kRejectionReasonInvalidBid == rejection_reason_str) { return SellerRejectionReason::INVALID_BID; } else if (kRejectionReasonBidBelowAuctionFloor == rejection_reason_str) { return SellerRejectionReason::BID_BELOW_AUCTION_FLOOR; diff --git a/services/common/util/reporting_util.h b/services/common/util/reporting_util.h index 57549b33..443af3e8 100644 --- a/services/common/util/reporting_util.h +++ b/services/common/util/reporting_util.h @@ -24,7 +24,7 @@ #include "api/bidding_auction_servers.pb.h" #include "services/common/clients/http/http_fetcher_async.h" #include "services/common/util/post_auction_signals.h" -#include "src/cpp/logger/request_context_impl.h" +#include "src/logger/request_context_impl.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/common/util/reporting_util_test.cc b/services/common/util/reporting_util_test.cc index 84e17884..5985d93f 100644 --- a/services/common/util/reporting_util_test.cc +++ b/services/common/util/reporting_util_test.cc @@ -159,8 +159,7 @@ TEST(CreateDebugReportingHttpRequestTest, GetWithHighestOtherBidSuccess) { .rejection_reason = SellerRejectionReason::SELLER_REJECTION_REASON_NOT_AVAILABLE}; expected_url = "https://wikipedia.org?hob=2.18&m_hob=false"; - request = - CreateDebugReportingHttpRequest(url, std::move(placeholder_2), true); + request = CreateDebugReportingHttpRequest(url, placeholder_2, true); EXPECT_EQ(request.url, expected_url); } @@ -178,7 +177,7 @@ TEST(CreateDebugReportingHttpRequestTest, GetWithHighestOtherBidAsZero) { SellerRejectionReason::SELLER_REJECTION_REASON_NOT_AVAILABLE}; absl::string_view expected_url = "https://wikipedia.org?hob=0.00&m_hob=false"; HTTPRequest request = - CreateDebugReportingHttpRequest(url, std::move(placeholder_1), true); + CreateDebugReportingHttpRequest(url, placeholder_1, true); EXPECT_EQ(request.url, expected_url); } diff --git a/services/common/util/signal_handler.h b/services/common/util/signal_handler.h index 5b3cc89f..0298fa4f 100644 --- a/services/common/util/signal_handler.h +++ b/services/common/util/signal_handler.h @@ -22,7 +22,7 @@ #include #include -#include "src/cpp/util/status_macro/examine_stack.h" +#include "src/util/status_macro/examine_stack.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/inference_sidecar/README.md b/services/inference_sidecar/README.md new file mode 100644 index 00000000..78e395f4 --- /dev/null +++ b/services/inference_sidecar/README.md @@ -0,0 +1,174 @@ +# Privacy Sandbox - ML Inference in Bidding and Auction Services + +This directory contains the implementation of the inference sidecar, designed to execute ML +inference with the bidding server. + +For a comprehensive overview, please refer to the +[Inference Overview](https://github.com/privacysandbox/protected-auction-services-docs/blob/main/inference_overview.md). + +## Status + +Experimental. A special build flag is required for Docker image creation. + +- Platform: Currrently supports GCP. AWS support is in progress. +- ML Frameworks: TensorFlow and PyTorch are supported. + +## Build the B&A Inference Docker Image + +Use the `production/packaging/build_and_test_all_in_docker` script to build and push Docker images +of B&A to your GCP image repo. Please make sure you use `inference_non_prod` build flag. + +Here's how to enable TensorFlow v2.14: + +```shell +cd services/inference_sidecar/modules/tensorflow_v2_14_0 +./builders/tools/bazel-debian run //:generate_artifacts + +export BAZEL_EXTRA_ARGS=--//:inference_runtime=tensorflow +cd $(git rev-parse --show-toplevel) +./production/packaging/build_and_test_all_in_docker \ + --service-path bidding_service --instance gcp --platform gcp \ + --no-precommit --no-tests \ + --gcp-image-repo $REPO_PATH \ + --build-flavor inference_non_prod --gcp-image-tag $IMAGE_TAG +unset BAZEL_EXTRA_ARGS +``` + +Similarly, to enable PyTorch v2.1, you can navigate to the +`services/inference_sidecar/modules/pytorch_v2_1_1` directory and execute the `generate_artifacts` +tool. During the `build_and_test_all_in_docker` step, replace the BAZEL_EXTRA_ARGS environment +variable with `--//:inference_runtime=pytorch`. + +## Start the B&A servers in GCP + +- Create a GCS bucket and store ML models into it. +- Set runtime flags: `INFERENCE_SIDECAR_BINARY_PATH`, `INFERENCE_MODEL_BUCKET_NAME`, and + `INFERENCE_MODEL_BUCKET_PATHS`. + - Set `INFERENCE_SIDECAR_BINARY_PATH` to `/server/bin/inference_sidecar`. +- Refer to + [README.md](https://github.com/privacysandbox/bidding-auction-servers/tree/main/production/deploy/gcp/terraform/environment/demo/README.md). + +## Start the B&A servers locally for testing/debugging + +The servers run locally and the ML models are directly read from the local disk. + +- Use the command-line flags: `--inference_sidecar_binary_path` and + `--inference_model_local_paths`. +- Utilize services/inference_sidecar/common/tools/debug/start_inference script. + +## Trigger ML Inference + +To invoke ML inference within generateBid() or any other UDF: + +1. Create a batchInferenceRequest object. For each model: + + - Specify the `model_path` (where the model is located). + - Specify input `tensors` containing the data for the model. + +2. Serialize the batchInferenceRequest into a JSON string compatible with the inference API. + +3. Trigger inference. + + - Call the runInference(json_string) function, passing your serialized request as the + argument. This initiates the inference execution. + +4. Process results. + - Parse the inference output returned by `runInference()`. + - Extract the model predictions for your bid calculations. + +Here's the UDF example code: + +```javascript +function generateBid() { + const batchInferenceRequest = { + request: [ + { + model_path: '/model/path', + tensors: [ + { + tensor_name: 'input', + data_type: 'INT32', + tensor_shape: [1, 1], + tensor_content: ['8'], + }, + ], + }, + ], + }; + + const jsonRequest = JSON.stringify(batchInferenceRequest); + const inferenceResult = runInference(jsonRequest); + + // Implement parsing logic based on your model's output format. + const bidValue = parseInferenceResult(inferenceResult); + return { bid: bidValue }; +} +``` + +## Inference API + +A Batch Inference Request (called `request`in JSON API) is an array of single inference requests. +Each single inference request contains an obligatory `model_path` string field (the same path you +used to register the ML model), as well as `tensors` array field. Each input tensor has three +required fields: `data_type`, `tensor_shape` and `tensor_content`. Tensorflow additionally requires +`tensor_name` field which should match tensor's name in SavedModel. + +`tensor_shape` is an array representing the shape of tensor. The order of entries indicates the +layout of the values in the tensor in-memory representation. The first entry is the outermost +dimension, which usually represents the input batch size. The last entry is the innermost dimension. +We support only dense tensors. + +`tensor_content` holds the flattened representation of the tensor which can be reconstructed using +`tensor_shape`. Only the representation corresponding to `data_type` field can be set. The number of +elements in `tensor_content` should be equal to the product of tensor_shape elements, for example a +tensor of shape `[1, 4]` will expect a flat array or 4 elements (e.g. `["1", "2", "7", "4"]`), and +one with a shape `[2,3]` will expect a 6 element one. + +Currently we support 6 tensor types (both in Tensorflow and PyTorch): double, float, int8, int16, +int32, int64. + +All numbers in `tensor_content` MUST be surrounded by quotes. + +See an example of a Batch Inference Request in a JSON format with 2 models: + +```json +{ + "request" : [ + { + "model_path" : "my_bucket/models/pcvr/1/", + "tensors" : [ + { + "tensor_name": "feature1", + "data_type": "DOUBLE", + "tensor_shape": [2, 1], + "tensor_content": ["0.454920", "-0.25752"] + } + ] + }, + { + "model_path" : "my_bucket/models/pctr/2/", + "tensors" : [ + { + "tensor_name": "feature1", + "data_type": "INT32", + "tensor_shape": [2, 1], + "tensor_content": ["5", "6"] + }, + { + "tensor_name": "feature2", + "data_type": "FLOAT", + "tensor_shape": [2, 3], + "tensor_content": ["0.5", "0.6", "0.7", "0.8". "0.21", "-0.99"] + } + ] + } + ] +} +``` + +- Please refer to + [request_parser.h](https://github.com/privacysandbox/bidding-auction-servers/tree/main/services/inference_sidecar/common/utils/request_parser.h) + and + [inference_payload.proto](https://github.com/privacysandbox/bidding-auction-servers/tree/main/services/inference_sidecar/common/proto/inference_payload.proto). +- Note: Protocol buffer API support is not currently available; `inference_payload.proto` provides + a documentation-only schema. diff --git a/services/inference_sidecar/common/.bazelrc b/services/inference_sidecar/common/.bazelrc new file mode 100644 index 00000000..dd13100e --- /dev/null +++ b/services/inference_sidecar/common/.bazelrc @@ -0,0 +1,54 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +build --announce_rc +build --verbose_failures +build --config=clang +build --compilation_mode=opt +build --output_filter='^//((?!(third_party):).)*$'` +build --color=yes + +build:clang --cxxopt=-fbracket-depth=512 +build:clang --client_env=CC=clang +build:clang --cxxopt=-std=c++17 +build:clang --host_cxxopt=-std=c++17 +build:clang --client_env=BAZEL_CXXOPTS=-std=c++17 +# Makes thread safety analysis warnings into errors. +build:clang --copt=-Werror=thread-safety + +# Address sanitizer, set action_env to segregate cache entries +build:asan --action_env=PRIVACY_SANDBOX_SERVERS_ASAN=1 +build:asan --strip=never +build:asan --compilation_mode=dbg +build:asan --copt=-fsanitize=address +build:asan --copt=-DADDRESS_SANITIZER +build:asan --copt=-O1 +build:asan --copt=-g +build:asan --copt=-fno-omit-frame-pointer +build:asan --linkopt=-fsanitize=address +build:asan --linkopt=-fuse-ld=lld +build:asan --action_env=ASAN_OPTIONS=detect_leaks=1:color=always + +# Thread sanitizer, set action_env to segregate cache entries +build:tsan --action_env=PRIVACY_SANDBOX_SERVERS_TSAN=1 +build:tsan --strip=never +build:tsan --copt=-fsanitize=thread +build:tsan --copt=-fno-omit-frame-pointer +build:tsan --copt=-DGPR_NO_DIRECT_SYSCALLS +build:tsan --copt=-DGRPC_TSAN +build:tsan --copt=-g +build:tsan --linkopt=-fsanitize=thread +# This is needed to address false positive problem with abseil. +# https://github.com/google/sanitizers/issues/953 +build:tsan --test_env=TSAN_OPTIONS=report_atomic_races=0 diff --git a/services/inference_sidecar/common/.bazelversion b/services/inference_sidecar/common/.bazelversion new file mode 100644 index 00000000..798e3899 --- /dev/null +++ b/services/inference_sidecar/common/.bazelversion @@ -0,0 +1 @@ +6.3.0 diff --git a/services/inference_sidecar/common/.gitignore b/services/inference_sidecar/common/.gitignore new file mode 100644 index 00000000..d5c5f7a0 --- /dev/null +++ b/services/inference_sidecar/common/.gitignore @@ -0,0 +1,11 @@ +# Bazel symlinks +/bazel-* + +# Docker +docker-buildx-*.log + +# Artifacts +/artifacts + +# Tests +/dist diff --git a/services/inference_sidecar/common/BUILD b/services/inference_sidecar/common/BUILD new file mode 100644 index 00000000..4464a27b --- /dev/null +++ b/services/inference_sidecar/common/BUILD @@ -0,0 +1,121 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test") + +package(default_visibility = ["//visibility:public"]) + +# We export inference_sidecar's main and test files. Inference sidecar binary +# with different inference backends need to be built in their respective +# workspaces. +exports_files([ + "inference_sidecar_main.cc", + "inference_sidecar_test.cc", +]) + +cc_library( + name = "ipc_sidecar", + srcs = ["ipc_sidecar.cc"], + hdrs = ["ipc_sidecar.h"], + deps = [ + "//modules:module_interface", + "//proto:inference_sidecar_cc_proto", + "//sandbox:sandbox_worker", + "@com_google_absl//absl/log", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + ], +) + +cc_library( + name = "grpc_sidecar", + srcs = ["grpc_sidecar.cc"], + hdrs = ["grpc_sidecar.h"], + deps = [ + "//modules:module_interface", + "//proto:inference_sidecar_cc_grpc_proto", + "//proto:inference_sidecar_cc_proto", + "//sandbox:sandbox_worker", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/log", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@google_privacysandbox_servers_common//src/util/status_macro:status_util", + ], +) + +# Builds inference sidecar using test module. +cc_binary( + name = "inference_sidecar_bin", + srcs = ["inference_sidecar_main.cc"], + deps = [ + ":grpc_sidecar", + "//modules:test_module", + "@com_google_absl//absl/log:check", + ], +) + +# Generate inference sidecar as "inference_sidecar" during test time. +genrule( + name = "inference_sidecar_test_target", + srcs = [":inference_sidecar_bin"], + outs = ["inference_sidecar"], + cmd = "cp $< $@", +) + +# Rename a specific test model as "test_model" so that the inference_sidecar_test.cc +# can have a single hardcoded string to refer to the test model for various +# inference backends. +genrule( + name = "gen_test_model", + srcs = ["//testdata:models/tensorflow_1_mib_saved_model.pb"], + outs = ["test_model"], + cmd = "cp $< $@", +) + +cc_test( + name = "inference_sidecar_test", + size = "medium", + timeout = "short", + srcs = ["inference_sidecar_test.cc"], + data = [ + ":gen_test_model", + ":inference_sidecar_test_target", + ], + flaky = True, + deps = [ + "//proto:inference_sidecar_cc_grpc_proto", + "//proto:inference_sidecar_cc_proto", + "//sandbox:sandbox_executor", + "//utils:file_util", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/flags:reflection", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) + +genrule( + name = "collect-logs", + outs = ["collect_logs.bin"], + cmd_bash = """cat << EOF > '$@' +tools/collect-logs "\\$$@" +EOF""", + executable = True, + local = True, + message = "copy bazel build and test logs", +) diff --git a/services/inference_sidecar/common/README.md b/services/inference_sidecar/common/README.md new file mode 100644 index 00000000..ce8c2c14 --- /dev/null +++ b/services/inference_sidecar/common/README.md @@ -0,0 +1,15 @@ +# Privacy Sandbox - Inference Common Libraries + +`services/inference_sidecar/common` provides the shared libraries among B&A servers and inference +sidecars. + +## How to build the inference common libraries + +The directory has its own `WORKSPACE` file. You should change the current working directory before +running bazel. + +```sh +cd services/inference_sidecar/common +./builders/tools/bazel-debian build //:inference_sidecar_bin +./builders/tools/bazel-debian test //... +``` diff --git a/services/inference_sidecar/common/WORKSPACE b/services/inference_sidecar/common/WORKSPACE new file mode 100644 index 00000000..20f3ca92 --- /dev/null +++ b/services/inference_sidecar/common/WORKSPACE @@ -0,0 +1,77 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("//builders/bazel:deps.bzl", "python_deps") + +python_deps("//builders/bazel") + +### Dataplane dependencies +http_archive( + name = "google_privacysandbox_servers_common", + # commit a7515f8 2024-03-18 + sha256 = "0a0bbe51c193f9dbbd0c17ff96db9883d3cd4980df150c8747d8520071dce0a7", + strip_prefix = "data-plane-shared-libraries-a7515f845ef463450baddff60099a78a2e8eadc3", + urls = [ + "https://github.com/privacysandbox/data-plane-shared-libraries/archive/a7515f845ef463450baddff60099a78a2e8eadc3.zip", + ], +) + +load( + "@google_privacysandbox_servers_common//third_party:cpp_deps.bzl", + data_plane_shared_deps_cpp = "cpp_dependencies", +) + +data_plane_shared_deps_cpp() + +load("@google_privacysandbox_servers_common//third_party:deps1.bzl", data_plane_shared_deps1 = "deps1") + +data_plane_shared_deps1() + +load("@com_google_googleapis//:repository_rules.bzl", "switched_rules_by_language") + +switched_rules_by_language( + name = "com_google_googleapis_imports", + cc = True, + grpc = True, +) + +http_archive( + name = "rapidjson", + build_file = "//:third_party/rapidjson.BUILD.bazel", + sha256 = "bf7ced29704a1e696fbccf2a2b4ea068e7774fa37f6d7dd4039d0787f8bed98e", + strip_prefix = "rapidjson-1.1.0", + url = "https://github.com/Tencent/rapidjson/archive/v1.1.0.tar.gz", +) + +### Grpc dependencies +load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") + +grpc_deps() + +load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") + +protobuf_deps() + +load("@google_privacysandbox_servers_common//build_defs/cc/shared:sandboxed_api.bzl", "sandboxed_api") + +### Sandbox2 depndencies +sandboxed_api() + +load("@com_google_sandboxed_api//sandboxed_api/bazel:llvm_config.bzl", "llvm_disable_optional_support_deps") +load("@com_google_sandboxed_api//sandboxed_api/bazel:sapi_deps.bzl", "sapi_deps") + +llvm_disable_optional_support_deps() + +sapi_deps() diff --git a/services/inference_sidecar/common/benchmark/BUILD b/services/inference_sidecar/common/benchmark/BUILD new file mode 100644 index 00000000..bc3b5454 --- /dev/null +++ b/services/inference_sidecar/common/benchmark/BUILD @@ -0,0 +1,58 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_binary") + +cc_binary( + name = "sandbox_benchmark", + srcs = [ + "sandbox_benchmark.cc", + ], + data = [ + ":inference_sidecar_ipc_test_target", + "//:gen_test_model", + "//:inference_sidecar_test_target", + ], + deps = [ + "//proto:inference_sidecar_cc_grpc_proto", + "//proto:inference_sidecar_cc_proto", + "//sandbox:sandbox_executor", + "//utils:file_util", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@com_google_benchmark//:benchmark", + "@com_google_benchmark//:benchmark_main", + ], +) + +# Builds the IPC inference sidecar for testing. +cc_binary( + name = "ipc_inference_sidecar_test_bin", + srcs = ["ipc_sidecar_main.cc"], + deps = [ + "//:ipc_sidecar", + "//modules:test_module", + "@com_google_absl//absl/log:check", + ], +) + +genrule( + name = "inference_sidecar_ipc_test_target", + srcs = [":ipc_inference_sidecar_test_bin"], + outs = ["ipc_inference_sidecar"], + cmd = "cp $< $@", +) diff --git a/services/inference_sidecar/common/benchmark/ipc_sidecar_main.cc b/services/inference_sidecar/common/benchmark/ipc_sidecar_main.cc new file mode 100644 index 00000000..e2917ce9 --- /dev/null +++ b/services/inference_sidecar/common/benchmark/ipc_sidecar_main.cc @@ -0,0 +1,25 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// IPC sidecar for testing. + +#include "absl/log/check.h" + +#include "ipc_sidecar.h" + +int main(int argc, char** argv) { + CHECK(!privacy_sandbox::bidding_auction_servers::inference::Run().ok()) + << "Unsuccessful run of the inference sidecar."; + return 0; +} diff --git a/services/inference_sidecar/common/benchmark/sandbox_benchmark.cc b/services/inference_sidecar/common/benchmark/sandbox_benchmark.cc new file mode 100644 index 00000000..85252e53 --- /dev/null +++ b/services/inference_sidecar/common/benchmark/sandbox_benchmark.cc @@ -0,0 +1,323 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This benchmark measures the performance of the sandbox executor (e.g., +// bidding server) and the sandbox worker (e.g., inference sidecar). +// +// Run the benchmark as follows: +// builders/tools/bazel-debian run //benchmark:sandbox_benchmark -- \ +// --benchmark_counters_tabular=true --benchmark_repetitions=5 \ +// --benchmark_min_warmup_time=1 > /tmp/report.txt + +// How to read the benchmark results: +// +// Benchmark name: +// * `BM_Multiworker_*`: Triggers multiple sandbox workers. The default +// benchmarks always use a single sandbox worker. +// * `process_time/real_time`: Measures the wall time for latency and CPU time +// of all threads not just limited to the main thread. +// * `threads:8`: The number of threads concurrently executing the benchmark. +// +// Metrics: +// * `Time`: The total elapsed wall clock time per Iteration. +// * `CPU`: The total CPU time spent by all the threads per Iteration. +// * `Iterations`: The number of serial executions. +// * `Throughput` & `items_per_second`: The number of Iterations per second. +// * `Latency`: Average time spent per Iteration. +// * `NumWorkers`: The number of sandbox workers (or, inference sidecars) +// running. + +#include + +#include +#include +#include + +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "absl/synchronization/mutex.h" +#include "benchmark/benchmark.h" +#include "proto/inference_sidecar.grpc.pb.h" +#include "proto/inference_sidecar.pb.h" +#include "sandbox/sandbox_executor.h" +#include "utils/file_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +constexpr absl::string_view kGrpcInferenceSidecarBinary = "inference_sidecar"; +constexpr absl::string_view kIpcInferenceSidecarBinary = + "benchmark/ipc_inference_sidecar"; +constexpr absl::string_view kTestModelPath = "test_model"; +constexpr char kJsonString[] = R"json({ + "request" : [{ + "model_path" : "my_bucket/models/pcvr_models/1/", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["3.14"] + } + ] +}] + })json"; +constexpr char kNumWorkers[] = "NumWorkers"; +constexpr int kMaxThreads = 32; + +static void ExportMetrics(benchmark::State& state) { + state.SetItemsProcessed(state.iterations()); + + state.counters["Throughput"] = + benchmark::Counter(state.iterations(), benchmark::Counter::kIsRate); + state.counters["Latency"] = + benchmark::Counter(state.iterations(), benchmark::Counter::kIsRate | + benchmark::Counter::kInvert); +} + +static void BM_Multiworker_Predict_GRPC(benchmark::State& state) { + SandboxExecutor executor(kGrpcInferenceSidecarBinary, {""}); + CHECK_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor.FileDescriptor()); + std::unique_ptr stub = + InferenceService::NewStub(client_channel); + + RegisterModelRequest register_model_request; + RegisterModelResponse register_model_response; + CHECK(PopulateRegisterModelRequest(kTestModelPath, register_model_request) + .ok()); + grpc::ClientContext context; + grpc::Status status = stub->RegisterModel(&context, register_model_request, + ®ister_model_response); + CHECK(status.ok()) << status.error_message(); + + for (auto _ : state) { + PredictRequest predict_request; + predict_request.set_input(kJsonString); + + PredictResponse predict_response; + grpc::ClientContext context; + grpc::Status status = + stub->Predict(&context, predict_request, &predict_response); + CHECK(status.ok()) << status.error_message(); + } + + absl::StatusOr result = executor.StopSandboxee(); + CHECK(result.ok()); + CHECK_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + CHECK_EQ(result->reason_code(), 0); + + state.counters[kNumWorkers] = 1; + ExportMetrics(state); +} + +static void BM_Predict_GRPC(benchmark::State& state) { + static std::unique_ptr executor = nullptr; + static std::unique_ptr stub = nullptr; + + if (state.thread_index() == 0) { + const std::vector arg = {""}; + executor = + std::make_unique(kGrpcInferenceSidecarBinary, arg); + CHECK_EQ(executor->StartSandboxee().code(), absl::StatusCode::kOk); + + std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor->FileDescriptor()); + stub = InferenceService::NewStub(client_channel); + + RegisterModelRequest register_model_request; + RegisterModelResponse register_model_response; + CHECK(PopulateRegisterModelRequest(kTestModelPath, register_model_request) + .ok()); + grpc::ClientContext context; + grpc::Status status = stub->RegisterModel(&context, register_model_request, + ®ister_model_response); + CHECK(status.ok()) << status.error_message(); + } + + for (auto _ : state) { + PredictRequest predict_request; + predict_request.set_input(kJsonString); + + PredictResponse predict_response; + grpc::ClientContext context; + grpc::Status status = + stub->Predict(&context, predict_request, &predict_response); + CHECK(status.ok()) << status.error_message(); + } + + if (state.thread_index() == 0) { + absl::StatusOr result = executor->StopSandboxee(); + CHECK(result.ok()); + CHECK_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + CHECK_EQ(result->reason_code(), 0); + + state.counters[kNumWorkers] = 1; + } + + ExportMetrics(state); +} + +static void BM_Multiworker_Predict_IPC(benchmark::State& state) { + SandboxExecutor executor(kIpcInferenceSidecarBinary, {""}); + CHECK_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + sandbox2::Comms comms(executor.FileDescriptor()); + + RegisterModelRequest register_request; + CHECK(PopulateRegisterModelRequest(kTestModelPath, register_request).ok()); + CHECK(comms.SendProtoBuf(register_request)); + + for (auto _ : state) { + PredictRequest predict_request; + predict_request.set_input(kJsonString); + CHECK(comms.SendProtoBuf(predict_request)); + + PredictResponse predict_response; + const int kNumTrials = 10; + bool success = false; + for (int i = 0; i < kNumTrials; i++) { + success = comms.RecvProtoBuf(&predict_response); + if (success) break; + absl::SleepFor(absl::Microseconds(500)); + } + CHECK(success); + } + + absl::StatusOr result = executor.StopSandboxee(); + CHECK(result.ok()); + CHECK_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + CHECK_EQ(result->reason_code(), 0); + + state.counters[kNumWorkers] = 1; + ExportMetrics(state); +} + +static void BM_Predict_IPC(benchmark::State& state) { + static absl::Mutex mutex(absl::kConstInit); + static std::unique_ptr executor = nullptr; + static std::unique_ptr comms = nullptr; + + if (state.thread_index() == 0) { + const std::vector arg = {""}; + executor = + std::make_unique(kIpcInferenceSidecarBinary, arg); + CHECK_EQ(executor->StartSandboxee().code(), absl::StatusCode::kOk); + comms = std::make_unique(executor->FileDescriptor()); + + RegisterModelRequest register_request; + CHECK(PopulateRegisterModelRequest(kTestModelPath, register_request).ok()); + CHECK(comms->SendProtoBuf(register_request)); + } + + for (auto _ : state) { + PredictRequest predict_request; + predict_request.set_input(kJsonString); + + absl::MutexLock lock(&mutex); + + CHECK(comms->SendProtoBuf(predict_request)); + + PredictResponse predict_response; + const int kNumTrials = 10; + bool success = false; + for (int i = 0; i < kNumTrials; i++) { + success = comms->RecvProtoBuf(&predict_response); + if (success) break; + absl::SleepFor(absl::Microseconds(500)); + } + CHECK(success); + } + + if (state.thread_index() == 0) { + absl::StatusOr result = executor->StopSandboxee(); + CHECK(result.ok()); + CHECK_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + CHECK_EQ(result->reason_code(), 0); + + state.counters[kNumWorkers] = 1; + } + + ExportMetrics(state); +} + +// BM_Register_IPC is not implemented. It's too slow to run the microbenchmark +// because it requires a new sandbox worker per model registration. +static void BM_Register_GRPC(benchmark::State& state) { + SandboxExecutor executor(kGrpcInferenceSidecarBinary, {""}); + CHECK_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor.FileDescriptor()); + std::unique_ptr stub = + InferenceService::NewStub(client_channel); + + for (auto _ : state) { + RegisterModelRequest register_model_request; + RegisterModelResponse register_model_response; + CHECK(PopulateRegisterModelRequest(kTestModelPath, register_model_request) + .ok()); + grpc::ClientContext context; + grpc::Status status = stub->RegisterModel(&context, register_model_request, + ®ister_model_response); + CHECK(status.ok()) << status.error_message(); + } + + absl::StatusOr result = executor.StopSandboxee(); + CHECK(result.ok()); + CHECK_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + CHECK_EQ(result->reason_code(), 0); + + state.counters[kNumWorkers] = 1; + ExportMetrics(state); +} + +// Register the function as a benchmark +BENCHMARK(BM_Register_GRPC)->MeasureProcessCPUTime()->UseRealTime(); + +// Use a single sandbox worker (or, inference sidecar) to run the execution in +// parallel. +BENCHMARK(BM_Predict_IPC) + ->ThreadRange(1, kMaxThreads) + ->MeasureProcessCPUTime() + ->UseRealTime(); +BENCHMARK(BM_Predict_GRPC) + ->ThreadRange(1, kMaxThreads) + ->MeasureProcessCPUTime() + ->UseRealTime(); + +// BM_Multiworker benchmarks trigger one sandbox worker (or, inference +// sidecar) per Thread. NumWorkers counter is the number of the running +// sandbox workers in the benchmark. +BENCHMARK(BM_Multiworker_Predict_IPC) + ->ThreadRange(1, kMaxThreads) + ->MeasureProcessCPUTime() + ->UseRealTime(); +BENCHMARK(BM_Multiworker_Predict_GRPC) + ->ThreadRange(1, kMaxThreads) + ->MeasureProcessCPUTime() + ->UseRealTime(); + +// Run the benchmark +BENCHMARK_MAIN(); + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/builders/.coverage.bazelrc b/services/inference_sidecar/common/builders/.coverage.bazelrc new file mode 100644 index 00000000..58b34d6e --- /dev/null +++ b/services/inference_sidecar/common/builders/.coverage.bazelrc @@ -0,0 +1,17 @@ +coverage --action_env=PRIVACY_SANDBOX_SERVERS_BUILD_CONFIG_COVERAGE=1 +coverage --config=clang_coverage +coverage --build_tests_only + +# clang_coverage -- the set of args to configure C++ code coverage in bazel with clang +coverage:clang_coverage --compilation_mode=opt +coverage:clang_coverage --combined_report=lcov +coverage:clang_coverage --nocache_test_results +coverage:clang_coverage --experimental_use_llvm_covmap +coverage:clang_coverage --experimental_generate_llvm_lcov +coverage:clang_coverage --action_env=BAZEL_USE_LLVM_NATIVE_COVERAGE=1 +coverage:clang_coverage --action_env=GCOV=/usr/bin/llvm-profdata +coverage:clang_coverage --action_env=BAZEL_LLVM_COV=/usr/bin/llvm-cov +coverage:clang_coverage --action_env=BAZEL_LLVM_PROFDATA=/usr/bin/llvm-profdata +coverage:clang_coverage --strategy=TestRunner=sandboxed,local +coverage:clang_coverage --strategy=CoverageReport=sandboxed,local +coverage:clang_coverage --experimental_scale_timeouts=1.5 diff --git a/services/inference_sidecar/common/builders/.github/workflows/scorecard.yaml b/services/inference_sidecar/common/builders/.github/workflows/scorecard.yaml new file mode 100644 index 00000000..dbcb6200 --- /dev/null +++ b/services/inference_sidecar/common/builders/.github/workflows/scorecard.yaml @@ -0,0 +1,86 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Workflow for the OSSF Scorecards Action +# https://github.com/ossf/scorecard-action#installation + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '35 10 * * 4' + push: + branches: + - main + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: Checkout code + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 + with: + persist-credentials: false + + - name: Run analysis + uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: Upload artifact + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: Upload to code-scanning + uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 + with: + sarif_file: results.sarif diff --git a/services/inference_sidecar/common/builders/.gitignore b/services/inference_sidecar/common/builders/.gitignore new file mode 100644 index 00000000..da211595 --- /dev/null +++ b/services/inference_sidecar/common/builders/.gitignore @@ -0,0 +1,4 @@ +*~ +*.tmp +*.log +*.eif diff --git a/services/inference_sidecar/common/builders/.markdownlint-cli2.yaml b/services/inference_sidecar/common/builders/.markdownlint-cli2.yaml new file mode 120000 index 00000000..212461e1 --- /dev/null +++ b/services/inference_sidecar/common/builders/.markdownlint-cli2.yaml @@ -0,0 +1 @@ +etc/.markdownlint-cli2.yaml \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/.pre-commit-config.yaml b/services/inference_sidecar/common/builders/.pre-commit-config.yaml new file mode 100644 index 00000000..c7997b3e --- /dev/null +++ b/services/inference_sidecar/common/builders/.pre-commit-config.yaml @@ -0,0 +1,150 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +exclude: (?x)^( + version.txt + )$ + +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: check-json + - id: check-symlinks + - id: check-added-large-files + - id: check-vcs-permalinks + - id: check-executables-have-shebangs + - id: detect-private-key + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: git-check + - id: script-must-not-have-extension + - id: script-must-have-extension + - id: require-ascii + - id: shellcheck + +- repo: https://github.com/pre-commit/mirrors-clang-format + rev: v16.0.6 + hooks: + - id: clang-format + types_or: + - c++ + - c + +- repo: https://github.com/bufbuild/buf + rev: v1.23.1 + hooks: + - id: buf-format + +- repo: local + hooks: + - id: addlicense + name: addlicense + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.1 + always_run: false + pass_filenames: true + entry: addlicense -v + types_or: + - text + + - id: addlicense-check + name: addlicense check + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.1 + always_run: false + pass_filenames: true + entry: addlicense -check + types_or: + - text + + - id: terraform-fmt + name: terraform fmt + description: Run terraform via docker to format Terraform files + language: script + pass_filenames: false + entry: tools/terraform fmt -write=true -recursive + types_or: + - terraform + + - id: hadolint + name: Lint Dockerfiles + description: Run hadolint via docker to lint Dockerfiles + language: script + types_or: + - dockerfile + entry: tools/hadolint + +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0 + hooks: + - id: prettier + name: prettier markdown + types_or: + - markdown + +- repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.8.1 + hooks: + - id: markdownlint-cli2 + name: lint markdown + +- repo: local + hooks: + - id: buildifier + name: buildifier + description: Format bazel WORKSPACE, BUILD and .bzl files with a standard convention. + language: golang + additional_dependencies: + - github.com/bazelbuild/buildtools/buildifier@6.1.1 + always_run: true + pass_filenames: true + types_or: + - bazel + entry: buildifier + args: + - -lint=fix + - -mode=fix + - -warnings=all + +- repo: https://github.com/cpplint/cpplint + rev: 1.6.1 + hooks: + - id: cpplint + types_or: + - c++ + - c + args: + - --filter=-build/c++11,+build/c++17,-build/header_guard,-build/include_order,+build/include_what_you_use,-build/include_subdir,-readability/casting,-readability/todo,-runtime/references + - --quiet + +- repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + name: black python formatter diff --git a/services/inference_sidecar/common/builders/.prettierignore b/services/inference_sidecar/common/builders/.prettierignore new file mode 100644 index 00000000..f38e52f8 --- /dev/null +++ b/services/inference_sidecar/common/builders/.prettierignore @@ -0,0 +1,3 @@ +/.git/ +/CHANGELOG.md +/google_internal/LATEST_RELEASE.md diff --git a/services/inference_sidecar/common/builders/.prettierrc.yaml b/services/inference_sidecar/common/builders/.prettierrc.yaml new file mode 120000 index 00000000..5f7e36f6 --- /dev/null +++ b/services/inference_sidecar/common/builders/.prettierrc.yaml @@ -0,0 +1 @@ +etc/.prettierrc.yaml \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/.versionrc.json b/services/inference_sidecar/common/builders/.versionrc.json new file mode 120000 index 00000000..f0385a95 --- /dev/null +++ b/services/inference_sidecar/common/builders/.versionrc.json @@ -0,0 +1 @@ +etc/.versionrc.json \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/CHANGELOG.md b/services/inference_sidecar/common/builders/CHANGELOG.md new file mode 100644 index 00000000..36788bbb --- /dev/null +++ b/services/inference_sidecar/common/builders/CHANGELOG.md @@ -0,0 +1,835 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines. + +## 0.55.1 (2024-02-22) + + +### Bug Fixes + +* Do not invoke normalize-bazel-symlinks for cbuild --cmd + +## 0.55.0 (2024-02-22) + + +### Bug Fixes + +* Normalize bazel symlinks to a resolved path +* Pass the correct path for normalize-bazel-symlinks + +## 0.54.0 (2024-02-09) + + +### Features + +* Set cbuild workdir to pwd relative to root workspace + +## 0.53.0 (2024-01-25) + + +### Features + +* Add support to collect-coverage tool for custom lcov report + + +### Bug Fixes + +* Improve --cmd-profiler support + +## 0.52.0 (2023-12-02) + + +### Features + +* add python3.9 dev to bazel-debian + +## 0.51.0 (2023-11-30) + + +### Bug Fixes + +* Clean go build cache at the end of image build script + + +### Dependencies + +* **deps:** Upgrade bazelisk to 1.19.0 + +## 0.50.0 (2023-11-06) + + +### Features + +* Add openssh-client to build-debian image + +## 0.49.1 (2023-10-30) + + +### Bug Fixes + +* Add tools/wrk2 wrapper script + +## 0.49.0 (2023-10-27) + + +### Features + +* Add wrk2 to test-tools image +* Extend normalize-bazel-symlink to normalize within containers + + +### Dependencies + +* **deps:** Update versions in test-tools image + +## 0.48.0 (2023-10-11) + + +### Features + +* Add tools/bazel-amazonlinux2023 + + +### Bug Fixes + +* Add lldb symlink + +## 0.47.0 (2023-10-05) + + +### Features + +* Add --cmd-profiler flag to tools/cbuild +* Add google-pprof to coverage-tools image +* Add libprofiler.so to build-debian image + + +### Bug Fixes + +* Indicate default image in help text + +## 0.46.1 (2023-10-04) + + +### Bug Fixes + +* cbuild exits normally even if bazel symlinks aren't normalized +* Revert to clang 15.x +* Use bash when normalize-dist runs inside docker +* Use DOCKER_NETWORK if already set + +## 0.46.0 (2023-09-22) + + +### Bug Fixes + +* Hide normalize-bazel-symlinks in cbuild + + +### Dependencies + +* **deps:** Update amazonlinux images to Aug 2023 +* **deps:** Upgrade clang to v16 in build-debian + +## 0.45.0 (2023-09-19) + + +### Features + +* Invoke normalize-bazel-symlinks at cbuild exit + +## 0.44.0 (2023-09-11) + + +### Features + +* Add coverage bazel config and collect-coverage tool + +## 0.43.0 (2023-08-15) + + +### Features + +* Allow terraform version to be specified + + +### Bug Fixes + +* Move feat(deps) earlier for precedence over feat + +## 0.42.1 (2023-08-14) + + +### Bug Fixes + +* Revert override of /usr/bin/python links in build-amazonlinux2 + +## 0.42.0 (2023-08-07) + + +### Features + +* Add amazonlinux2 support to convert-docker-to-nitro +* Add bsdmainutils for hexdump +* Add build-amazonlinux2023 image +* Add support for amazonlinux2023 in builder.sh +* Configure python3.9 for python/python3 links +* Remove jdk from build-amazonlinux2 image + +### Dependencies + +* **deps:** Upgrade amazonlinux2 to 20230719 + + +### Bug Fixes + +* Empty bazel-* arg list not an error condition + +## 0.41.1 (2023-08-04) + + +### Bug Fixes + +* Remove debug statement + +## 0.41.0 (2023-08-03) + + +### Features + +* Create links for lld and ld.lld in build-debian + +## 0.40.0 (2023-08-03) + + +### Features + +* Add Dependencies section for release notes +* **deps:** Upgrade rules_python to 0.24.0 +* Ensure bazel-* scripts handle non-bazel args too + +## 0.39.0 (2023-08-02) + + +### Features + +* Run bazel containers in seccomp=unconfined mode + +## 0.38.1 (2023-08-02) + + +### Bug Fixes + +* Ensure python3.9 is found first in PATH + +## 0.38.0 (2023-07-28) + + +### Features + +* Add cbuild flag --seccomp-unconfined + +## 0.37.0 (2023-07-27) + + +### Features + +* Add patch tool to build-debian image + +## 0.36.1 (2023-07-25) + + +### Bug Fixes + +* Rename convert_docker_to_nitro to convert-docker-to-nitro + +## 0.36.0 (2023-07-24) + + +### Features + +* Add convert_docker_to_nitro + +## 0.35.0 (2023-07-21) + + +### Features + +* Add LICENSE file + +## 0.34.0 (2023-07-21) + + +### Features + +* Add OSSF Scorecard badge to top-level README + +## 0.33.0 (2023-07-20) + + +### Features + +* Install python libclang 15 in build-debian +* Add OSSF Scorecard GitHub Action + +## 0.32.0 (2023-07-14) + + +### Features + +* Set PYTHON_BIN_PATH/PYTHON_LIB_PATH in build-amazonlinux2 + +## 0.31.0 (2023-07-12) + + +### Features + +* Add cbuild --docker-network flag +* Add coverage-tool image plus lcov scripts +* Mount gcloud config dir into container + + +### Bug Fixes + +* Add hash for coverage-tools +* Add hash for coverage-tools +* Improve error handling for flag values +* Print build log path on error condition +* Specify latest image tag explicitly +* Upgrade pre-commit hooks + +## 0.30.1 (2023-06-27) + + +### Bug Fixes + +* Use = for --env flag +* Use = for --env flag for all tools + +## 0.30.0 (2023-06-26) + + +### Features + +* Install numpy for python3.9 +* Set PYTHON_BIN_PATH/PYTHON_LIB_PATH in build-debian +* Upgrade AmazonLinux2 to 20230530 +* Upgrade packer to v1.9.1 + + +### Bug Fixes + +* Add links for llvm-{cov,profdata} + +## 0.29.0 (2023-06-05) + + +### Features + +* Update pre-commit hook versions + + +### Bug Fixes + +* Catch error when shifting multiple args +* Remove golang from test-tools image +* Resolve WORKSPACE using realpath +* Use correct exit code in --fast mode + +## 0.28.0 (2023-05-24) + + +### Features + +* Update ca-certificates + + +### Bug Fixes + +* Downgrade to clang v15 +* Use builders version.txt for tarfile tag + +## 0.27.0 (2023-05-23) + + +### Features + +* Add buf to presubmit image + + +### Documentation + +* Add CONTRIBUTING.md + +## 0.26.0 (2023-05-16) + + +### Features + +* Remove zlib-dev package +* Upgrade clang to v16 +* Upgrade go to v1.20.4 + +## 0.25.0 (2023-05-11) + + +### Features + +* Update default bazel version to 5.4.1 +* Upgrade rules_python to 0.21.0 + + +### Bug Fixes + +* Add file utility to build-debian image + +## 0.24.0 (2023-05-05) + + +### Features + +* Add --build-images flag to tests/run-tests +* Reuse tar image if available + + +### Bug Fixes + +* Address linter warnings +* Address linter warnings for tools +* Correct mangled usage text +* Pin pre-commit to 3.x +* Remove .gz suffix from tar file +* Remove function keyword for busybox sh script +* Remove Release in changelog title +* Upgrade pre-commit hooks + +## 0.23.0 (2023-04-13) + + +### Features + +* Add wrapper for commit-and-tag-version +* Upgrade curl to version 8 +* Upgrade to amazonlinux 2.0.20230320.0 + + +### Bug Fixes + +* Use commit-and-tag-version wrapper + +## 0.22.0 (2023-04-03) + + +### Features + +* Add awscurl wrapper script +* Add tests for misc CLI wrappers +* Correctly quote bash args +* Extend test-tool to support the release image +* Use login shell for interactive container + + +### Documentation + +* Add section on tools to README +* Remove section on building images directly + +## 0.21.1 (2023-03-07) + + +### Bug Fixes + +* Relax pinned version for apache2-utils + +## 0.21.0 (2023-03-06) + + +### Features + +* Add wrapper scripts for utils + +## 0.20.0 (2023-03-01) + + +### Features + +* Add docker buildkit plugin +* Add tests for CLI wrapper scripts +* Permit testing of a single image +* Relax pinned versions in build-debian, presubmit and test-tools + +## 0.19.0 (2023-03-01) + + +### Features + +* Add jq wrapper + + +### Bug Fixes + +* Relax pinned version of openjdk to 11.0.* + +## 0.18.0 (2023-02-23) + + +### Features + +* Relax pinned versions for apk and yum packages to semver + +## 0.17.0 (2023-02-21) + + +### Features + +* Upgrade black to 23.1.0 +* Upgrade tar to 1.34-r1 +* Upgrade to buildozer 6.0.1 + + +### Bug Fixes + +* Minor code cleanup in images/presubmit/install_apps +* Upgrade ghz to 0.114.0 + +## 0.16.0 (2023-02-05) + + +### Features + +* Run test tools in docker interactive mode to admit std streams + +## 0.15.1 (2023-02-04) + + +### Bug Fixes + +* Return value from get_docker_workspace_mount() + +## 0.15.0 (2023-02-03) + + +### Features + +* Use WORKSPACE_MOUNT if set + + +### Bug Fixes + +* Pin commit-and-tag-version to v10.1.0 + +## 0.14.0 (2023-01-27) + + +### Features + +* Improve verbose output for get-builder-image-tagged + +## 0.13.1 (2023-01-26) + + +### Bug Fixes + +* Upgrade software-properties-common + +## 0.13.0 (2023-01-23) + + +### Features + +* Add ab tool +* Add cassowary http load testing tool +* Add h2load tool +* Add slowhttptest tool +* Adjust get-builder-image-tagged verbose output +* Upgrade to packer v1.8.5 + + +### Bug Fixes + +* Re-pin dependencies in test-tools image +* Relax version pins to semver +* Upgrade amazonlinux2 base image +* Upgrade git on amazonlinux2 + +## 0.12.0 (2023-01-10) + + +### Features + +* Modify ghz wrapper for generic use. Add curl +* Use test-tools image for grpcurl + +## 0.11.0 (2023-01-09) + + +### Features + +* Add chrpath + + +### Bug Fixes + +* Clean up tmpdir via RETURN trap + +## 0.10.0 (2023-01-06) + + +### Features + +* Drop ubuntu package version minor for curl + +## 0.9.0 (2023-01-04) + + +### Features + +* Add zlib1g-dev to build-debian per scp build dependency +* Update hook versions + + +### Bug Fixes + +* Correct non-zero error message and drop sourcing of tools/builder.sh +* Elide warnings from bazel info +* Revert from clang-format v15 to v14 + +## 0.8.0 (2022-12-29) + + +### Features + +* Add bash and jq to test-tools image +* Add script to normalize bazel- symlinks +* Ensure run-tests includes all images +* Skip symlinks that resolve in normalize-bazel-symlink + +## 0.7.0 (2022-12-27) + + +### Features + +* Add ghz wrapper script +* Add test-tools image + +## 0.6.0 (2022-12-12) + + +### Features + +* Add EXTRA_DOCKER_RUN_ARGS support in aws-cli + + +### Bug Fixes + +* Emit docker build output only on non-zero exit +* Remove tempfile before exiting + +## 0.5.0 (2022-12-06) + + +### Features + +* Pin versions in presubmit image + + +### Bug Fixes + +* Avoid cache for tar image +* Update version pin for ca-certificates +* Use images subdirs for image list + +## 0.4.4 (2022-11-18) + + +### Bug Fixes + +* Retain execute permissions when normalizing dist + +## 0.4.3 (2022-11-17) + + +### Bug Fixes + +* Add gettext package for envsubst +* Avoid yum package version strings specific to CPU architecture +* Improve verbose output for get-builder-image-tagged +* Pin apt and yum package versions + +## 0.4.2 (2022-11-17) + + +### Bug Fixes + +* Generate SHA within docker container + +## 0.4.1 (2022-11-15) + + +### Bug Fixes + +* Reduce noise creating presubmit image +* Remove docker run --interactive flag + +## 0.4.0 (2022-11-14) + + +### Features + +* Add buildozer to release image +* Add grpcurl helper script +* Substitute variables in EXTRA_DOCKER_RUN_ARGS +* Use specific version of GNU tar + + +### Bug Fixes + +* Add /opt/bin/python link +* Add CC=clang env var +* Add xz to build-debian +* Explicitly add machine type and OS release to toolchains hash + +## 0.3.1 (2022-11-01) + + +### Bug Fixes + +* Add OpenJDK 11 in build-amazonlinux2 + +## 0.3.0 (2022-11-01) + + +### Features + +* Add git to build-amazonlinux2 image +* Add google-java-format pre-commit hook +* Add OpenJDK 11 +* Move python3 to /opt/bin/python3 + + +### Bug Fixes + +* Ensure builder::set_workspace does not overwrite WORKSPACE + +## 0.2.0 (2022-10-26) + + +### Features + +* Add bazel support to register container-based python toolchain +* Add docker uri env var as override +* Add tools/terraform +* Use image etc files in workspace root + + +### Bug Fixes + +* Avoid empty tar error if no workspace etc files + + +### Documentation + +* Add Getting Started section to README.md + +## 0.1.0 (2022-10-25) + + +### Features + +* Add --env flag to cbuild +* Add arg processing to cbuild script +* Add aws-cli helper script +* Add bazel-debian helper script +* Add builders/utils docker image +* Add hadolint to lint Dockerfiles +* Add optional flags to cbuild tool +* Add preliminary support for commit-and-tag-version and copybara +* Add the release-please tool +* Add toolchain short hash to bazel output_user_root path +* Add tools/lib/builder.sh +* **build:** Add GitHub CLI tool https://cli.github.com/ +* Determine workspace mount point from docker inspect if inside docker container +* Inject clang-version as a bazel action_env +* Migrate generation of builders/release container image to Dockerfile +* Move image directories to images/ top-level directory +* Overhaul building on amazonlinux2 +* Remove python build dependencies from bazel +* Set BUILD_ARCH env var in docker images +* Update release image to node v18 +* Upgrade to bazel 5.3.2 +* Upgrade to clang v14 on bazel-debian +* Use Packer to build AMI. + + +### Bug Fixes + +* Add builders/tools/normalize-dist to chmod/chgrp/chown dist/ directory tree +* Add get_workspace_mount function to encapsulate code block +* Add python version to action_env +* Add/remove basic pre-commit hooks +* Adopt shellcheck +* Avoid installing recommended debian packages +* Avoid use of git rev-parse to determine tools path +* Bump to latest version of bazelisk +* Clean bazel_root for smaller docker image +* Correct argument handling in cbuild script +* Correct errors generating Amazon Linux 2-based builder image +* Define python3 toolchain +* Drop packer from build-debian image +* Ensure /etc/gitconfig is readable by all +* Improve cbuild help text +* Install bazel version as specified in .bazelversion +* Invoke addlicense for all text files +* Modifications as indicated by shellcheck +* Mount $HOME/aws in aws-cli container +* Move bazel env vars from comments to help text +* Move builder-related configs to builders/etc +* Move WORKSPACE definition to cbuild script global +* Only propagate AWS env vars into amazonlinux2 build container +* Pin version of bazelisk +* Pin version of libc++-dev +* Pin version of python3.8 +* Print pre-commit version rather than help +* Remove container when get-architecture exits +* Remove debugging statement +* Remove dockerfile linter ignore and correct ENTRYPOINT +* Remove pre-commit config from build-debian +* Remove shellcheck from build-debian +* Remove unused nitro_enclave_image bazel rule +* Set bazel output_base to accommodate distinct workspaces +* Set bazel output_user_root in image bazelrc +* Set locale in build-debian +* Set WORKSPACE correctly from submodule +* Set WORKSPACE variable +* Switch from hardcoded arch to using dpkg --print-architecture +* Update normalize-dist to function inside build container +* Update pre-commit to use cbuild +* Use PRE_COMMIT_TOOL env var +* Various path-related fixes + + +### Build System + +* Add arch to docker image tags +* Add get_builder_image_tagged tool to determine a content-based tag +* Add get-architecture helper script +* Add missing imports into nitro BUILD +* Add tools/pre-commit +* Correct propagation of quoted args in gh wrapper +* Move commit-and-tag-version into tools dir +* Move gh into tools dir +* Optionally build the AMI +* Propagate status code in exit functions +* Reduce redundant installation commands +* Remove release-please tool +* Rename builders/bazel to build-debian +* Simplify use of npm image in container_run_and_commit() +* Support GH_TOKEN env var + + +### Documentation + +* Add top-level README.md +* Improve prose in README.md +* Move docker build instructions to README.md +* Reformat and lint markdown diff --git a/services/inference_sidecar/common/builders/CONTRIBUTING.md b/services/inference_sidecar/common/builders/CONTRIBUTING.md new file mode 100644 index 00000000..0d0e3e85 --- /dev/null +++ b/services/inference_sidecar/common/builders/CONTRIBUTING.md @@ -0,0 +1,3 @@ +# How to Contribute + +Presently this project is not accepting contributions. diff --git a/services/inference_sidecar/common/builders/LICENSE b/services/inference_sidecar/common/builders/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/services/inference_sidecar/common/builders/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/services/inference_sidecar/common/builders/README.md b/services/inference_sidecar/common/builders/README.md new file mode 100644 index 00000000..3ba69a91 --- /dev/null +++ b/services/inference_sidecar/common/builders/README.md @@ -0,0 +1,35 @@ +# Privacy Sandbox Builders + +Build tools and docker images used by the [Privacy Sandbox](https://github.com/privacysandbox) +open-source ecosystem. The docker images generated using the Privacy Sandbox Builders are used as +the build environment(s) for other Privacy Sandbox software. These docker images can be used as part +of CI/CD workflows. + +## Getting Started + +This repo is designed to be used via `git submodule` by other Privacy Sandbox repos. This is not a +requirement per se, it can be used standalone or using other approaches like `git subtree`. + +To use Privacy Sandbox Builders, you need: + +- `docker` with [BuildKit](https://docs.docker.com/build/buildkit/). + +## Building Docker Images + +To build a docker image directly, you can use the `tools/get-builder-image-tagged` tool. + +## Tools + +The `tools` directory contains wrapper scripts along with some non-wrapper scripts. The wrapper +scripts, such as `terraform` and `curl` execute the corresponding tool installed in one of the build +images. + +For example, to execute `curl`: + +```sh +tools/curl --help +``` + +--- + +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/privacysandbox/build-system/badge)](https://securityscorecards.dev/viewer/?uri=github.com/privacysandbox/build-system) diff --git a/services/inference_sidecar/common/builders/bazel/BUILD b/services/inference_sidecar/common/builders/bazel/BUILD new file mode 100644 index 00000000..9d103c15 --- /dev/null +++ b/services/inference_sidecar/common/builders/bazel/BUILD @@ -0,0 +1,40 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Register container-based python toolchain.""" + +load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") +load("@rules_python//python:defs.bzl", "py_runtime") + +# define the python3 runtime; this path must exist in the bazel build environment +PY3_PATH = "/opt/bin/python3" + +py_runtime( + name = "py_runtime", + interpreter_path = PY3_PATH, + python_version = "PY3", + visibility = ["//visibility:public"], +) + +py_runtime_pair( + name = "py_runtime_pair", + py2_runtime = None, + py3_runtime = ":py_runtime", +) + +toolchain( + name = "py_toolchain", + toolchain = ":py_runtime_pair", + toolchain_type = "@bazel_tools//tools/python:toolchain_type", +) diff --git a/services/inference_sidecar/common/builders/bazel/deps.bzl b/services/inference_sidecar/common/builders/bazel/deps.bzl new file mode 100644 index 00000000..0fe6743b --- /dev/null +++ b/services/inference_sidecar/common/builders/bazel/deps.bzl @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Load definitions for use in WORKSPACE files.""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +def python_deps(bazel_package): + """Load rules_python and register container-based python toolchain + + Note: the bazel_package arg will depend on the import/submodule location in your workspace + + Args: + bazel_package: repo-relative bazel package to builders/bazel/BUILD eg. "//builders/bazel" + """ + http_archive( + name = "rules_python", + sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578", + strip_prefix = "rules_python-0.24.0", + urls = [ + "https://github.com/bazelbuild/rules_python/releases/download/0.24.0/rules_python-0.24.0.tar.gz", + ], + ) + native.register_toolchains("{}:py_toolchain".format(bazel_package)) diff --git a/services/inference_sidecar/common/builders/etc/.bazelversion b/services/inference_sidecar/common/builders/etc/.bazelversion new file mode 100644 index 00000000..ade65226 --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.bazelversion @@ -0,0 +1 @@ +5.4.1 diff --git a/services/inference_sidecar/common/builders/etc/.clang-format b/services/inference_sidecar/common/builders/etc/.clang-format new file mode 100644 index 00000000..8c45f0ce --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.clang-format @@ -0,0 +1,33 @@ +BasedOnStyle: Google +Standard: c++17 +DerivePointerAlignment: false +SortIncludes: true +IncludeBlocks: Regroup +IncludeCategories: + # gtest, this should be put first in tests. + - Regex: '^ from OS. + - Regex: '^<[_A-Za-z0-9-]+\.h>' + Priority: 30 + # 2nd level .h headers in <>, POSIX standard. + - Regex: '^<(sys|arpa|net|netinet)\/[_A-Za-z0-9-]+\.h>' + Priority: 30 + # Linux-specific .h headers in <>. + - Regex: '^' + Priority: 40 + # Headers in <> without extension, these are basically C++ STL headers + - Regex: '^<[\/_A-Za-z0-9-]+>' + Priority: 50 + # Headers in <> from specific external libraries. + - Regex: '^<(grpcpp|absl)\/' + Priority: 60 + # Any other uncaught headers in <> + - Regex: '^<' + Priority: 70 + # Headers in "" of current directory this should be the last category. + - Regex: '^"[_A-Za-z0-9-]+\.h' + Priority: 200 + # Headers in "" with directory hierarchy + - Regex: '^"[\/_A-Za-z0-9-]+' + Priority: 80 diff --git a/services/inference_sidecar/common/builders/etc/.hadolint.yaml b/services/inference_sidecar/common/builders/etc/.hadolint.yaml new file mode 100644 index 00000000..73dc9121 --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.hadolint.yaml @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://github.com/hadolint/hadolint for more information + +failure-threshold: error + +trustedRegistries: +- docker.io diff --git a/services/inference_sidecar/common/builders/etc/.markdownlint-cli2.yaml b/services/inference_sidecar/common/builders/etc/.markdownlint-cli2.yaml new file mode 100644 index 00000000..0671ea1a --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.markdownlint-cli2.yaml @@ -0,0 +1,43 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +config: + line-length: + line_length: 120 + #stern: true + code_blocks: false + + # these are apparently in conflict with prettier's markdown formatting + list-marker-space: false + list-indent: false + ul-indent: false + + headings: false + + proper-names: + code_blocks: false + names: + - CommonMark + - JavaScript + - Markdown + - markdown-it + - markdownlint + - markdownlint-cli2 + - Node.js + +fix: true + +ignores: +- CHANGELOG.md +- google_internal/LATEST_RELEASE.md diff --git a/services/inference_sidecar/common/builders/etc/.pre-commit-config.yaml b/services/inference_sidecar/common/builders/etc/.pre-commit-config.yaml new file mode 100644 index 00000000..6a3ebd44 --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.pre-commit-config.yaml @@ -0,0 +1,156 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +exclude: (?x)^( + bazel-(bin|out|testlogs|workspace)/.*| + .bazel_output/.*| + version.txt + )$ + +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: check-json + - id: check-symlinks + - id: check-added-large-files + - id: check-vcs-permalinks + - id: check-executables-have-shebangs + - id: detect-private-key + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: git-check + - id: script-must-not-have-extension + exclude: '^google_internal/.*/kokoro_(presubmit|continuous).sh$' + - id: script-must-have-extension + - id: require-ascii + - id: shellcheck + exclude: '^(production|tools|google_internal|builders/images)/.*$' + +- repo: https://github.com/pre-commit/mirrors-clang-format + rev: v14.0.6 + hooks: + - id: clang-format + types_or: + - c++ + - c + +- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.4.0 + hooks: + - id: pretty-format-java + name: Google Java Formatter + args: [--autofix] + +- repo: local + hooks: + - id: addlicense + name: addlicense + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.0 + always_run: false + pass_filenames: true + entry: addlicense -v + types_or: + - text + + - id: addlicense-check + name: addlicense check + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.0 + always_run: false + pass_filenames: true + entry: addlicense -check + types_or: + - text + +# - id: terraform-fmt +# name: terraform fmt +# description: Run terraform via docker to format Terraform files +# language: script +# pass_filenames: false +# entry: builders/tools/terraform fmt -write=true -recursive +# types_or: +# - terraform + +# - id: hadolint +# name: Lint Dockerfiles +# description: Run hadolint via docker to lint Dockerfiles +# language: script +# types_or: +# - dockerfile +# entry: builders/tools/hadolint + +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 + hooks: + - id: prettier + name: prettier markdown + types_or: + - markdown + +- repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.6.0 + hooks: + - id: markdownlint-cli2 + name: lint markdown + +- repo: local + hooks: + - id: buildifier + name: buildifier + description: Format bazel WORKSPACE, BUILD and .bzl files with a standard convention. + language: golang + additional_dependencies: + - github.com/bazelbuild/buildtools/buildifier@5.1.0 + always_run: true + pass_filenames: true + types_or: + - bazel + entry: buildifier + args: + - -lint=fix + - -mode=fix + - -warnings=all + +- repo: https://github.com/cpplint/cpplint + rev: 1.6.1 + hooks: + - id: cpplint + types_or: + - c++ + - c + args: + - --filter=-build/c++11,+build/c++17,-build/header_guard,-build/include_order,+build/include_what_you_use,-build/include_subdir,-readability/casting,-readability/todo,-runtime/references + - --quiet + +- repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + name: black python formatter diff --git a/services/inference_sidecar/common/builders/etc/.prettierrc.yaml b/services/inference_sidecar/common/builders/etc/.prettierrc.yaml new file mode 100644 index 00000000..bf26ea63 --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.prettierrc.yaml @@ -0,0 +1,25 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +printWidth: 120 +trailingComma: es5 +tabWidth: 2 +useTabs: false +singleQuote: true +proseWrap: always +overrides: +- files: '*.md' + options: + tabWidth: 4 + printWidth: 100 diff --git a/services/inference_sidecar/common/builders/etc/.versionrc.json b/services/inference_sidecar/common/builders/etc/.versionrc.json new file mode 100644 index 00000000..7d175563 --- /dev/null +++ b/services/inference_sidecar/common/builders/etc/.versionrc.json @@ -0,0 +1,27 @@ +{ + "bumpFiles": [ + { + "filename": "version.txt", + "type": "plain-text" + } + ], + "commitUrlFormat": " ", + "issueUrlFormat": " ", + "packageFiles": [ + { + "filename": "version.txt", + "type": "plain-text" + } + ], + "tagPrefix": "release-", + "types": [ + { "type": "feat", "scope": "deps", "section": "Dependencies" }, + { "type": "feat", "section": "Features" }, + { "type": "fix", "section": "Bug Fixes" }, + { "type": "docs", "section": "Documentation" }, + { "type": "internal", "hidden": true }, + { "type": "chore", "hidden": true }, + { "type": "test", "hidden": true }, + { "type": "refactor", "hidden": true } + ] +} diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/.bazelversion b/services/inference_sidecar/common/builders/images/build-amazonlinux2/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/Dockerfile b/services/inference_sidecar/common/builders/images/build-amazonlinux2/Dockerfile new file mode 100644 index 00000000..bb6c1edc --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM amazonlinux:2.0.20230822.0 + +COPY /install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps && \ + /scripts/generate_system_bazelrc --user-root-name amazonlinux2 && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin:/opt/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.8" diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/generate_system_bazelrc b/services/inference_sidecar/common/builders/images/build-amazonlinux2/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/get_workspace_mount b/services/inference_sidecar/common/builders/images/build-amazonlinux2/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/gitconfig b/services/inference_sidecar/common/builders/images/build-amazonlinux2/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_apps b/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_apps new file mode 100755 index 00000000..00b6f31e --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_apps @@ -0,0 +1,107 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) + usage 0 + break + ;; + *) + usage + break + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function yum_update() { + yum -y update +} + +function install_python() { + amazon-linux-extras install -y \ + python3.8 + mkdir -p /opt/bin + update-alternatives \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.8 100 \ + --slave /opt/bin/python python /usr/bin/python3.8 + +} + +function install_nitro() { + amazon-linux-extras install -y aws-nitro-enclaves-cli + yum install -y aws-nitro-enclaves-cli-devel +} + +function install_gcc() { + # install gcc, which is required to build socat + yum install -y "gcc-7.3.1*" +} + +function install_misc() { + yum install -y \ + "gettext-0.19.8.*" \ + "git-2.38*" \ + "tar-1.26*" \ + "unzip-6.0*" \ + "which-2.*" \ + "zip-3.0*" +} + +function install_packer() { + yum install -y "yum-utils-1.1.31*" + yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + yum -y install "packer-1.9.1*" + update-alternatives --install /usr/local/bin/packer packer /usr/bin/packer 100 + + /usr/local/bin/packer version +} + +function install_clang() { + yum install -y "clang-11.1.0*" + clang --version +} + +function cleanup() { + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +yum_update +install_nitro +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_gcc +install_packer +install_python +cleanup diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_go.sh b/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_golang_apps b/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2/test/commands.yaml b/services/inference_sidecar/common/builders/images/build-amazonlinux2/test/commands.yaml new file mode 100644 index 00000000..41823ae5 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: nitro-cli version + command: "nitro-cli" + args: ["--version"] + exitCode: 0 + + - name: nitro-cli build-enclave help + command: "nitro-cli" + args: ["build-enclave", "--help"] + exitCode: 0 diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/.bazelversion b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/Dockerfile b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/Dockerfile new file mode 100644 index 00000000..d24ba9af --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM amazonlinux:2023.1.20230825.0 + +COPY /install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps && \ + /scripts/generate_system_bazelrc --user-root-name amazonlinux2023 && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin:/opt/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.8" diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/generate_system_bazelrc b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/get_workspace_mount b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/gitconfig b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_apps b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_apps new file mode 100755 index 00000000..2cf10696 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_apps @@ -0,0 +1,106 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) + usage 0 + break + ;; + *) + usage + break + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function dnf_update() { + dnf -y update +} + +function install_python() { + mkdir -p /opt/bin + alternatives \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.9 100 \ + --slave /usr/bin/python3 python3-usr /usr/bin/python3.9 \ + --slave /usr/bin/python python-usr /usr/bin/python3.9 \ + --slave /opt/bin/python python /usr/bin/python3.9 +} + +function install_nitro() { + dnf install -y \ + "aws-nitro-enclaves-cli-1.2.*" \ + "aws-nitro-enclaves-cli-devel-1.2.*" +} + +function install_gcc() { + # install gcc, which is required to build socat + dnf install -y "gcc-11.3.*" +} + +function install_misc() { + dnf install -y \ + "gettext-0.21*" \ + "git-2.40.*" \ + "tar-2:1.34*" \ + "unzip-6.0*" \ + "which-2.*" \ + "zip-3.0*" +} + +function install_packer() { + dnf install -y "dnf-utils-4.1.*" + dnf config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + dnf -y install "packer-1.9.2*" + update-alternatives --install /usr/local/bin/packer packer /usr/bin/packer 100 + /usr/local/bin/packer version +} + +function install_clang() { + dnf install -y "clang-15.*" + clang --version +} + +function cleanup() { + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +install_python +dnf_update +install_nitro +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_gcc +install_packer +cleanup diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_go.sh b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_golang_apps b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-amazonlinux2023/test/commands.yaml b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/test/commands.yaml new file mode 100644 index 00000000..41823ae5 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-amazonlinux2023/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: nitro-cli version + command: "nitro-cli" + args: ["--version"] + exitCode: 0 + + - name: nitro-cli build-enclave help + command: "nitro-cli" + args: ["build-enclave", "--help"] + exitCode: 0 diff --git a/services/inference_sidecar/common/builders/images/build-debian/.bazelversion b/services/inference_sidecar/common/builders/images/build-debian/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-debian/Dockerfile b/services/inference_sidecar/common/builders/images/build-debian/Dockerfile new file mode 100644 index 00000000..eb370730 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/Dockerfile @@ -0,0 +1,61 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ARG BASE_IMAGE=ubuntu:20.04 + +# ignore this hadolint error as BASE_IMAGE contains an image tag +# hadolint ignore=DL3006 +FROM ${BASE_IMAGE} as libprofiler-builder +ENV CC=clang \ + CXX=clang +ADD https://github.com/gperftools/gperftools/releases/download/gperftools-2.13/gperftools-2.13.tar.gz /build/gperftools.tar.gz +ADD https://apt.llvm.org/llvm.sh /build/llvm.sh +COPY compile_libprofiler /scripts/ +RUN /scripts/compile_libprofiler + +FROM docker/buildx-bin:v0.10 AS buildx-bin + +# ignore this hadolint error as BASE_IMAGE contains an image tag +# hadolint ignore=DL3006 +FROM ${BASE_IMAGE} +ARG LOCALE=en_US.UTF-8 +ARG TARGETARCH +COPY install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc +COPY --from=buildx-bin /buildx /usr/libexec/docker/cli-plugins/docker-buildx +COPY --from=libprofiler-builder /usr/lib/libprofiler.so.* /usr/lib/x86_64-linux-gnu/ +RUN \ + ln -rs /usr/lib/x86_64-linux-gnu/libprofiler.so.0.* /usr/lib/x86_64-linux-gnu/libprofiler.so.0 && \ + ln -rs /usr/lib/x86_64-linux-gnu/libprofiler.so.0.* /usr/lib/x86_64-linux-gnu/libprofiler.so + +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang \ + TZ=Etc/UTC \ + LANG=${LOCALE} \ + LANGUAGE=${LOCALE} \ + LC_ALL=${LOCALE} \ + LC_CTYPE=${LOCALE} + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps --locale ${LOCALE} && \ + /scripts/generate_system_bazelrc --user-root-name ubuntu && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="/opt/bin:${PATH}:/usr/local/go/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.9" diff --git a/services/inference_sidecar/common/builders/images/build-debian/compile_libprofiler b/services/inference_sidecar/common/builders/images/build-debian/compile_libprofiler new file mode 100755 index 00000000..f05397fb --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/compile_libprofiler @@ -0,0 +1,45 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -r -i CLANG_VER=15 + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_build_tools() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + gnupg="2.2.*" \ + lsb-release="11.1.*" \ + make="4.*" \ + software-properties-common="0.99.*" \ + wget="1.20.*" +} + +function install_clang() { + ls -l /build + chmod +x /build/llvm.sh + /build/llvm.sh ${CLANG_VER} + apt-get --quiet install -y --no-install-recommends libc++-${CLANG_VER}-dev + update-alternatives --install /usr/bin/clang clang /usr/bin/clang-${CLANG_VER} 100 + rm -f llvm.sh + + clang --version +} + +function install_profiler() { + cd /build + tar xz --strip-components 1 -f gperftools.tar.gz + ./configure + make libprofiler.la + declare -r ver=0.5.9 + cp .libs/libprofiler.so.${ver} /usr/lib + ls -l /usr/lib/libprofiler* +} + +apt_update +install_build_tools +install_clang +install_profiler diff --git a/services/inference_sidecar/common/builders/images/build-debian/generate_system_bazelrc b/services/inference_sidecar/common/builders/images/build-debian/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-debian/get_workspace_mount b/services/inference_sidecar/common/builders/images/build-debian/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-debian/gitconfig b/services/inference_sidecar/common/builders/images/build-debian/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-debian/install_apps b/services/inference_sidecar/common/builders/images/build-debian/install_apps new file mode 100755 index 00000000..8734f014 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/install_apps @@ -0,0 +1,146 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 +declare INSTALL_LOCALE=en_US.UTF-8 +declare -r -i CLANG_VER=15 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --locale Set locale. Default: ${INSTALL_LOCALE} + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --locale) + INSTALL_LOCALE="$2" + shift 2 || usage + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_python() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + python3.9-venv="3.9.*" python3.9-dev + mkdir -p /opt/bin + update-alternatives \ + --force \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.9 100 \ + --slave /usr/bin/python3 python3-usr /usr/bin/python3.9 \ + --slave /usr/bin/python python-usr /usr/bin/python3.9 \ + --slave /opt/bin/python python /usr/bin/python3.9 + curl https://bootstrap.pypa.io/get-pip.py -o /tmp/get-pip.py + /usr/bin/python3 /tmp/get-pip.py + rm -f /tmp/get-pip.py + /usr/bin/python3 -m pip --version + /usr/bin/python3 -m pip install \ + "libclang~=${CLANG_VER}.0" \ + "numpy~=1.25" +} + +function install_misc() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-transport-https="2.0.*" \ + bsdmainutils \ + ca-certificates \ + chrpath="0.16-*" \ + libcurl4="7.68.*" \ + curl="7.68.*" \ + file="1:5.*" \ + gettext="0.19.*" \ + git="1:2.25.*" \ + gnupg="2.2.*" \ + google-perftools="2.*" \ + locales="2.31-*" \ + lsb-release="11.1.*" \ + openssh-client="1:8.2*" \ + patch="2.7.*" \ + rename="1.10-*" \ + software-properties-common="0.99.*" \ + unzip="6.0-*" \ + wget="1.20.*" \ + xz-utils="5.2.*" \ + zip="3.0-*" + if [[ -n ${INSTALL_LOCALE} ]]; then + printf "\nSetting locale to: %s\n" "${INSTALL_LOCALE}" + locale-gen "${INSTALL_LOCALE}" + update-locale LANG="${INSTALL_LOCALE}" + fi +} + +function install_clang() { + curl --silent --fail --show-error --location --remote-name https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + ./llvm.sh ${CLANG_VER} + apt-get --quiet install -y --no-install-recommends libc++-${CLANG_VER}-dev + for prog in clang lldb lld ld.lld llvm-cov llvm-profdata; do + update-alternatives --install /usr/bin/${prog} ${prog} /usr/bin/${prog}-${CLANG_VER} 100 + done + rm -f llvm.sh + + clang --version + llvm-cov --version + llvm-profdata show --version +} + +# Install Docker (https://docs.docker.com/engine/install/debian/) +function install_docker() { + declare -r arch="$1" + apt-get --quiet remove docker docker.io containerd runc + mkdir -p /etc/apt/keyrings + declare -r dist=ubuntu + curl --silent --fail --show-error --location https://download.docker.com/linux/${dist}/gpg \ + | gpg --dearmor -o /etc/apt/keyrings/docker.gpg + declare lsb_release + lsb_release="$(lsb_release -cs)" + echo "deb [arch=${arch} signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/${dist} ${lsb_release} stable" \ + | tee /etc/apt/sources.list.d/docker.list + apt_update + apt-get --quiet install -y --no-install-recommends docker-ce docker-ce-cli containerd.io +} + +function cleanup() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +apt_update +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_docker "${BUILD_ARCH}" +install_python # should run after other install_* +cleanup diff --git a/services/inference_sidecar/common/builders/images/build-debian/install_go.sh b/services/inference_sidecar/common/builders/images/build-debian/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-debian/install_golang_apps b/services/inference_sidecar/common/builders/images/build-debian/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/build-debian/test/commands.yaml b/services/inference_sidecar/common/builders/images/build-debian/test/commands.yaml new file mode 100644 index 00000000..b3fc2beb --- /dev/null +++ b/services/inference_sidecar/common/builders/images/build-debian/test/commands.yaml @@ -0,0 +1,39 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: "curl version" + command: "curl" + args: ["--version"] + exitCode: 0 + + - name: "clang version" + command: "clang" + args: ["--version"] + exitCode: 0 + + - name: "bazel version" + command: "bazelisk" + args: ["version"] + exitCode: 0 + + - name: "docker image help" + command: "docker" + args: ["help", "image"] + exitCode: 0 diff --git a/services/inference_sidecar/common/builders/images/coverage-tools/Dockerfile b/services/inference_sidecar/common/builders/images/coverage-tools/Dockerfile new file mode 100644 index 00000000..c2f25551 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/coverage-tools/Dockerfile @@ -0,0 +1,21 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:20.04 + +COPY install_apps /scripts/ + +RUN \ + /scripts/install_apps && \ + rm -rf /scripts diff --git a/services/inference_sidecar/common/builders/images/coverage-tools/install_apps b/services/inference_sidecar/common/builders/images/coverage-tools/install_apps new file mode 100755 index 00000000..72fd822a --- /dev/null +++ b/services/inference_sidecar/common/builders/images/coverage-tools/install_apps @@ -0,0 +1,54 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_misc() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + lcov="1.*" \ + google-perftools="2.*" +} + +function clean_debian() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +apt_update +install_misc +clean_debian diff --git a/services/inference_sidecar/common/builders/images/coverage-tools/test/commands.yaml b/services/inference_sidecar/common/builders/images/coverage-tools/test/commands.yaml new file mode 100644 index 00000000..bf962e40 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/coverage-tools/test/commands.yaml @@ -0,0 +1,39 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: "lcov version" + command: "lcov" + args: ["--version"] + exitCode: 0 + + - name: "clang version" + command: "clang" + args: ["--version"] + exitCode: 0 + + - name: "bazel version" + command: "bazelisk" + args: ["version"] + exitCode: 0 + + - name: "docker image help" + command: "docker" + args: ["help", "image"] + exitCode: 0 diff --git a/services/inference_sidecar/common/builders/images/generate_system_bazelrc b/services/inference_sidecar/common/builders/images/generate_system_bazelrc new file mode 100755 index 00000000..9c8e419f --- /dev/null +++ b/services/inference_sidecar/common/builders/images/generate_system_bazelrc @@ -0,0 +1,83 @@ +#!/bin/bash + +# Generate a system bazelrc file +# Designed to be executed when generating a container image, for example from Dockerfile. +# For info on bazelrc files, refer to https://bazel.build/run/bazelrc?hl=en. + +set -o pipefail +set -o errexit + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --user-root-name Name of bazel user-root directory (within bazel cache dir) +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --user-root-name) + USER_ROOT_NAME="$2" + shift + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + break + ;; + esac +done + +if [[ -z ${USER_ROOT_NAME} ]]; then + printf -- "error: --user-root-name must be specified\n" &>/dev/stderr + usage 1 +fi + +function _get_toolchains_hash() { + { + # emit versions of all tools relevant to builds + uname --machine + cat /etc/os-release + clang --version + /opt/bin/python3 --version + } | sha256sum | cut --delimiter=" " --fields=1 +} + +TOOLCHAINS_HASH=$(_get_toolchains_hash) +readonly TOOLCHAINS_HASH + +readonly BAZELRC="/etc/bazel.bazelrc" +readonly BAZEL_ROOT=/bazel_root +readonly BAZEL_OUTPUT_USER_ROOT="${BAZEL_ROOT}/build_${USER_ROOT_NAME}_${TOOLCHAINS_HASH:0:7}" +mkdir -p "${BAZEL_OUTPUT_USER_ROOT}" + +# Within the running container, globally set these variables. This is less +# obvious than using Dockerfile's ENV, but this particular value isn't +# available to the Dockerfile +# Note: The /etc/profile.d/bazel_env.sh will often be sourced automatically +# but this may only occur for login shells. For bash, the --login +# flag should be supplied. Otherwise, the bazel_env.sh file can be +# sourced directly. +cat >/etc/profile.d/bazel_env.sh <> \$HOME/.bazelrc +EOF + +printf "generating %s\n" "${BAZELRC}" &>/dev/stderr + +# the output_user_root setting will have no effect as long as output_base is set. However, in the +# case that $HOME/.bazelrc is not created via sourcing /etc/profile.d/bazel_env.sh, at least +# the appropriate container-specific output_user_root directory will be used +cat <"${BAZELRC}" +startup --output_user_root="${BAZEL_OUTPUT_USER_ROOT}" +# set a variable based on the hash of all build tool dependencies other than bazel itself +# primarily to avoid spurious cache hits for distinct sets of toolchains +build --action_env=TOOLCHAINS_HASH=${TOOLCHAINS_HASH} +BAZELRC diff --git a/services/inference_sidecar/common/builders/images/get_workspace_mount b/services/inference_sidecar/common/builders/images/get_workspace_mount new file mode 100755 index 00000000..517b2ce3 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/get_workspace_mount @@ -0,0 +1,35 @@ +#!/bin/bash + +function get_docker_workspace_mount() { + # when running inside a docker container, expect /.dockerenv to exist + if ! [[ -f /.dockerenv ]]; then + printf "Error: %s must execute inside a docker container\n" "$(basename "${BASH_SOURCE[0]}")" &>/dev/stderr + exit 1 + fi + + # if running inside a docker container, the workspace mount point cannot be + # determined by git or bazel or inspecting the filesystem itself. Instead, we + # need to use docker to expose its mount info for the /src/workspace path. + # determine the current container's ID + local -r CONTAINER_ID="$(uname --nodename)" + # use docker inspect to extract the current mount path for /src/workspace + # this format string is a golang template (https://pkg.go.dev/text/template) processed + # by docker's --format flag, per https://docs.docker.com/config/formatting/ + # shellcheck disable=SC2016 + local -r FORMAT_STR=' + {{- range $v := .HostConfig.Binds -}} + {{$pathpair := split $v ":" -}} + {{if eq (index $pathpair 1) "/src/workspace" -}} + {{print (index $pathpair 0) -}} + {{end -}} + {{end -}} + ' + local -r MOUNT_PATH="$(docker inspect --format "${FORMAT_STR}" "${CONTAINER_ID}")" + if [[ -z ${MOUNT_PATH} ]]; then + printf "Error: Unable to determine mount point for /src/workspace. Exiting\n" &>/dev/stderr + exit 1 + fi + printf "%s" "${MOUNT_PATH}" +} + +get_docker_workspace_mount diff --git a/services/inference_sidecar/common/builders/images/gitconfig b/services/inference_sidecar/common/builders/images/gitconfig new file mode 100644 index 00000000..eaf257f4 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/gitconfig @@ -0,0 +1,2 @@ +[safe] + directory = /src/workspace diff --git a/services/inference_sidecar/common/builders/images/install_go.sh b/services/inference_sidecar/common/builders/images/install_go.sh new file mode 100644 index 00000000..a436d20a --- /dev/null +++ b/services/inference_sidecar/common/builders/images/install_go.sh @@ -0,0 +1,50 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# shell library to install and uninstall golang + +function _golang_install_dir() { + printf "/usr/local/go\n" +} + +function install_golang() { + declare -r _ARCH="$1" + declare -r FNAME=gobin.tar.gz + declare -r VERSION=1.20.4 + # shellcheck disable=SC2155 + declare -r GO_INSTALL_DIR="$(_golang_install_dir)" + declare -r -A GO_HASHES=( + [amd64]="698ef3243972a51ddb4028e4a1ac63dc6d60821bf18e59a807e051fee0a385bd" + [arm64]="105889992ee4b1d40c7c108555222ca70ae43fccb42e20fbf1eebb822f5e72c6" + ) + declare -r GO_HASH=${GO_HASHES[${_ARCH}]} + if [[ -z ${GO_HASH} ]]; then + printf "Unrecognized or unsupported architecture for golang: %s\n" "${_ARCH}" &>/dev/stderr + exit 1 + fi + + curl --silent -fSL --output ${FNAME} "https://go.dev/dl/go${VERSION}.linux-${_ARCH}.tar.gz" + echo "${GO_HASH} ${FNAME}" | sha256sum -c + tar --directory /usr/local -xzf ${FNAME} + rm -f ${FNAME} + update-alternatives --install /usr/bin/go go "${GO_INSTALL_DIR}"/bin/go 100 + + go version +} + +function remove_golang() { + update-alternatives --remove-all go + rm -rf "$(_golang_install_dir)" +} diff --git a/services/inference_sidecar/common/builders/images/install_golang_apps b/services/inference_sidecar/common/builders/images/install_golang_apps new file mode 100755 index 00000000..4254e438 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/install_golang_apps @@ -0,0 +1,46 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +BAZEL_PATH=/usr/bin + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + set -o xtrace + shift + ;; + -h | --help) usage 0 ;; + *) + usage + break + ;; + esac +done + +function install_bazelisk() { + go install github.com/bazelbuild/bazelisk@v1.19.0 + BAZELISK="$(go env GOPATH)"/bin/bazelisk + if [[ -n ${BAZEL_PATH} ]] && [[ -d ${BAZEL_PATH} ]]; then + ln -s "${BAZELISK}" "${BAZEL_PATH}"/bazel + fi + # install the specified version of bazel + USE_BAZEL_VERSION="$(cat /scripts/.bazelversion)" + export USE_BAZEL_VERSION + "${BAZELISK}" version + rm -rf /bazel_root/* +} + +install_bazelisk diff --git a/services/inference_sidecar/common/builders/images/presubmit/.pre-commit-config.yaml b/services/inference_sidecar/common/builders/images/presubmit/.pre-commit-config.yaml new file mode 120000 index 00000000..9971a06a --- /dev/null +++ b/services/inference_sidecar/common/builders/images/presubmit/.pre-commit-config.yaml @@ -0,0 +1 @@ +../../etc/.pre-commit-config.yaml \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/presubmit/Dockerfile b/services/inference_sidecar/common/builders/images/presubmit/Dockerfile new file mode 100644 index 00000000..024c05fe --- /dev/null +++ b/services/inference_sidecar/common/builders/images/presubmit/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:20.04 + +COPY install_apps install_go.sh .pre-commit-config.yaml /scripts/ +COPY gitconfig /etc + +ARG PRE_COMMIT_VENV_DIR=/usr/pre-commit-venv +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + PRE_COMMIT_HOME=/var/cache/pre-commit \ + PRE_COMMIT_TOOL=${PRE_COMMIT_VENV_DIR}/bin/pre-commit \ + TZ=Etc/UTC + +RUN \ + chmod 644 /etc/gitconfig && \ + /usr/bin/env -v PRE_COMMIT_VENV_DIR=${PRE_COMMIT_VENV_DIR} /scripts/install_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin" diff --git a/services/inference_sidecar/common/builders/images/presubmit/gitconfig b/services/inference_sidecar/common/builders/images/presubmit/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/common/builders/images/presubmit/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/presubmit/install_apps b/services/inference_sidecar/common/builders/images/presubmit/install_apps new file mode 100755 index 00000000..883ef650 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/presubmit/install_apps @@ -0,0 +1,128 @@ +#!/bin/bash + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + set -o xtrace + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_packages() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-transport-https="2.0.*" \ + ca-certificates \ + libcurl4="7.68.*" \ + curl="7.68.*" \ + gnupg="2.2.*" \ + lsb-release="11.1.*" \ + openjdk-11-jre="11.0.*" \ + python3.9-venv="3.9.*" \ + shellcheck="0.7.*" \ + software-properties-common="0.99.*" \ + wget="1.20.*" + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 100 +} + +# Install Docker (https://docs.docker.com/engine/install/debian/) +function install_docker() { + apt-get --quiet remove docker docker.io containerd runc + local -r KEYRING_DIR=/etc/apt/keyrings + mkdir -p "${KEYRING_DIR}" + local -r DIST=ubuntu + curl --silent --fail --show-error --location https://download.docker.com/linux/${DIST}/gpg \ + | /usr/bin/gpg --dearmor -o "${KEYRING_DIR}"/docker.gpg + local -r LSB_RELEASE="$(/usr/bin/lsb_release -cs)" + echo "deb [arch=${ARCH} signed-by=${KEYRING_DIR}/docker.gpg] https://download.docker.com/linux/${DIST} ${LSB_RELEASE} stable" \ + | tee /etc/apt/sources.list.d/docker.list + apt_update + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y docker-ce docker-ce-cli containerd.io +} + +function install_precommit() { + /usr/bin/python3.9 -m venv "${PRE_COMMIT_VENV_DIR}" + "${PRE_COMMIT_VENV_DIR}"/bin/pip install pre-commit~=3.1 + "${PRE_COMMIT_TOOL}" --version + + # initialize pre-commit cache, which needs a git repo (a temporary will suffice) + local -r GIT_REPO="$(mktemp -d)" + git init "${GIT_REPO}" + cd "${GIT_REPO}" + # run pre-commit with no files in scope to only trigger population of the cache + { + cat "${SCRIPT_DIR}"/.pre-commit-config.yaml + printf "\nfiles: ^$\n" + } > .pre-commit-config.yaml + git add .pre-commit-config.yaml + "${PRE_COMMIT_TOOL}" run --config .pre-commit-config.yaml || true + rm -rf "${GIT_REPO}" +} + +function cleanup() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +ARCH=$(dpkg --print-architecture) +readonly ARCH + +apt_update +install_packages + +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh +# golang is used to compile go-based hooks +install_golang "${ARCH}" +install_docker +install_precommit +cleanup diff --git a/services/inference_sidecar/common/builders/images/presubmit/install_go.sh b/services/inference_sidecar/common/builders/images/presubmit/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/presubmit/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/images/release/Dockerfile b/services/inference_sidecar/common/builders/images/release/Dockerfile new file mode 100644 index 00000000..e7a05704 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/release/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM golang:1.19.3-alpine3.16 AS golang-1.19 +FROM node:18.9.0-alpine3.16 + +COPY --from=golang-1.19 /usr/local/go/ /usr/local/go/ +ENV PATH="${PATH}:/usr/local/go/bin" + +COPY install_release_apps /scripts/ +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_release_apps && \ + rm -rf /scripts + +ENTRYPOINT ["/bin/sh"] diff --git a/services/inference_sidecar/common/builders/images/release/gitconfig b/services/inference_sidecar/common/builders/images/release/gitconfig new file mode 100644 index 00000000..a72cd197 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/release/gitconfig @@ -0,0 +1,3 @@ +[user] + email = nobody@google.com + name = Privacy Sandbox Release System diff --git a/services/inference_sidecar/common/builders/images/release/install_release_apps b/services/inference_sidecar/common/builders/images/release/install_release_apps new file mode 100755 index 00000000..e6c12253 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/release/install_release_apps @@ -0,0 +1,8 @@ +#!/bin/sh + +npm install --global commit-and-tag-version@10.1.0 + +# Install the GitHub CLI tool (https://cli.github.com/) +apk add github-cli + +GOBIN=/usr/local/go/bin go install github.com/bazelbuild/buildtools/buildozer@6.0.1 diff --git a/services/inference_sidecar/common/builders/images/release/test/commands.yaml b/services/inference_sidecar/common/builders/images/release/test/commands.yaml new file mode 100644 index 00000000..62ac6631 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/release/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: npm version + command: "npm" + args: ["--version"] + exitCode: 0 + + - name: commit-and-tag-version help + command: "commit-and-tag-version" + args: ["--help"] + exitCode: 0 diff --git a/services/inference_sidecar/common/builders/images/release/test/structure.yaml b/services/inference_sidecar/common/builders/images/release/test/structure.yaml new file mode 100644 index 00000000..1d704640 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/release/test/structure.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +fileExistenceTests: + - name: git config + path: /etc/gitconfig + shouldExist: true + permissions: "-r--r--r--" diff --git a/services/inference_sidecar/common/builders/images/test-tools/Dockerfile b/services/inference_sidecar/common/builders/images/test-tools/Dockerfile new file mode 100644 index 00000000..45fab3cf --- /dev/null +++ b/services/inference_sidecar/common/builders/images/test-tools/Dockerfile @@ -0,0 +1,47 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:3.18 as slowhttptest_builder +# hadolint ignore=DL3018 +RUN apk add --no-cache autoconf automake build-base git openssl-dev +WORKDIR /build +ADD https://github.com/shekyan/slowhttptest/archive/refs/tags/v1.9.0.tar.gz /build/src.tar.gz +RUN tar xz --strip-components 1 -f src.tar.gz && ./configure && make + +FROM alpine:3.18 as wrk_builder +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" +COPY build_wrk /build/ +WORKDIR /build +ADD https://github.com/giltene/wrk2/archive/44a94c17d8e6a0bac8559b53da76848e430cb7a7.tar.gz /build/src.tar.gz +RUN /build/build_wrk + +FROM golang:1.21-alpine3.18 AS golang +ENV GOBIN=/usr/local/go/bin +COPY build_golang_apps /scripts/ +RUN /scripts/build_golang_apps + +FROM fullstorydev/grpcurl:v1.8.9-alpine AS grpcurl + +FROM alpine:3.18 +COPY --from=golang /usr/local/go/bin/* /usr/local/bin/ +COPY --from=grpcurl /bin/grpcurl /usr/local/bin/ +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + PATH="${PATH}:/usr/local/go/bin" \ + GOBIN=/usr/local/go/bin +COPY install_apps /scripts/ +RUN /scripts/install_apps +COPY --from=slowhttptest_builder /build/src/slowhttptest /usr/bin/ +COPY --from=wrk_builder /build/wrk /usr/bin/wrk2 diff --git a/services/inference_sidecar/common/builders/images/test-tools/build_golang_apps b/services/inference_sidecar/common/builders/images/test-tools/build_golang_apps new file mode 100755 index 00000000..b79e20c4 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/test-tools/build_golang_apps @@ -0,0 +1,16 @@ +#!/bin/busybox sh + +set -o errexit + +install_ghz() { + go install github.com/bojand/ghz/cmd/ghz@v0.114.0 + ghz --help +} + +install_cassowary() { + go install github.com/rogerwelin/cassowary/cmd/cassowary@v0.16.0 + cassowary --help +} + +install_ghz +install_cassowary diff --git a/services/inference_sidecar/common/builders/images/test-tools/build_wrk b/services/inference_sidecar/common/builders/images/test-tools/build_wrk new file mode 100755 index 00000000..d843dd06 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/test-tools/build_wrk @@ -0,0 +1,27 @@ +#!/bin/busybox sh + +set -o errexit +set -o xtrace + +install_no_wrk() { + cat </build/wrk +#!/bin/busybox sh +PROGRAM_NAME="\$(basename \$0)" +printf "Error: %s not supported on Aarch64\n" "\${PROGRAM_NAME}" +printf "build_arch: %s\n" "${BUILD_ARCH}" >/dev/stderr +exit 1 +EOF + chmod 755 /build/wrk +} + +install_wrk() { + apk add --no-cache build-base git openssl-dev zlib-dev + tar xz --strip-components 1 -f src.tar.gz + make -j6 +} + +if [[ ${BUILD_ARCH} == arm64 ]]; then + install_no_wrk +else + install_wrk +fi diff --git a/services/inference_sidecar/common/builders/images/test-tools/install_apps b/services/inference_sidecar/common/builders/images/test-tools/install_apps new file mode 100755 index 00000000..597f937b --- /dev/null +++ b/services/inference_sidecar/common/builders/images/test-tools/install_apps @@ -0,0 +1,28 @@ +#!/bin/busybox sh + +set -o errexit + +install_packages() { + apk --no-cache add \ + bash~=5 \ + curl~=8 \ + jq~=1 \ + libstdc++ +} + +install_nghttp2() { + apk --no-cache add \ + nghttp2~=1 + h2load --version +} + +install_apache2_utils() { + apk --no-cache add \ + apache2-utils~=2.4 + ab -V +} + +apk --no-cache update +install_packages +install_nghttp2 +install_apache2_utils diff --git a/services/inference_sidecar/common/builders/images/utils/Dockerfile b/services/inference_sidecar/common/builders/images/utils/Dockerfile new file mode 100644 index 00000000..0d1defd6 --- /dev/null +++ b/services/inference_sidecar/common/builders/images/utils/Dockerfile @@ -0,0 +1,19 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:3.16 + +RUN apk --no-cache add \ + unzip~=6.0 \ + zip~=3.0 diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/build-amazonlinux2 b/services/inference_sidecar/common/builders/tests/data/hashes/build-amazonlinux2 new file mode 100644 index 00000000..b9750ae4 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/build-amazonlinux2 @@ -0,0 +1 @@ +b35ee4e2b6210bc66e97083805642945433b0ef648a9d297a2a4ccdfb518413f diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/build-amazonlinux2023 b/services/inference_sidecar/common/builders/tests/data/hashes/build-amazonlinux2023 new file mode 100644 index 00000000..7b708170 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/build-amazonlinux2023 @@ -0,0 +1 @@ +7501f9f920a794ba6fa7c6cabb17d076ed3db0faf3fe5393e2904ce71978dec5 diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/build-debian b/services/inference_sidecar/common/builders/tests/data/hashes/build-debian new file mode 100644 index 00000000..0e9a76c9 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/build-debian @@ -0,0 +1 @@ +a99a8af64d61e3eb635aecfb81d229437890693cd6f6d33bd269e9f1c55ca760 diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/coverage-tools b/services/inference_sidecar/common/builders/tests/data/hashes/coverage-tools new file mode 100644 index 00000000..f0336331 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/coverage-tools @@ -0,0 +1 @@ +cd3fb189dd23793af3bdfa02d6774ccb35bddbec7059761e25c4f7be4c1e8ca1 diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/presubmit b/services/inference_sidecar/common/builders/tests/data/hashes/presubmit new file mode 100644 index 00000000..ada65387 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/presubmit @@ -0,0 +1 @@ +79f2509c74607ab33bc5fe3f425f61ddf89e2f75efc299b1a5a7a3039367c90e diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/release b/services/inference_sidecar/common/builders/tests/data/hashes/release new file mode 100644 index 00000000..e5218ba2 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/release @@ -0,0 +1 @@ +d60fb40a53b1704f7ac353d0d036f49eac10bfd08ccb19f9b436acf8bdf2cb79 diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/test-tools b/services/inference_sidecar/common/builders/tests/data/hashes/test-tools new file mode 100644 index 00000000..fc5e0b5c --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/test-tools @@ -0,0 +1 @@ +dd1ec6137d4dd22fec555044cd85f484adfa6c7b686880ea5449cff936bad34e diff --git a/services/inference_sidecar/common/builders/tests/data/hashes/utils b/services/inference_sidecar/common/builders/tests/data/hashes/utils new file mode 100644 index 00000000..da29b2fa --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/data/hashes/utils @@ -0,0 +1 @@ +9fca27d931acc2bc96fa0560466cc0914a0d1cc73fb8749af057caacf2911f85 diff --git a/services/inference_sidecar/common/builders/tests/run-tests b/services/inference_sidecar/common/builders/tests/run-tests new file mode 100755 index 00000000..f98389a4 --- /dev/null +++ b/services/inference_sidecar/common/builders/tests/run-tests @@ -0,0 +1,264 @@ +#!/bin/bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Test runner for the build system. This script should preferably be run +# "directly" and not inside docker, permitting the test cases to execute +# both inside and outside a docker container. + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ -d ${TMP_HASHES_DIR1} ]]; then + rm -rf "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" + fi + if [[ ${STATUS} -ne 0 ]]; then + printf "Error: run-tests status code: %d\n" "${STATUS}" + sleep 5s + fi + exit ${STATUS} +} + +function get_image_list() { + local -r _images_dir="$1" + find "${_images_dir}" -maxdepth 1 -mindepth 1 -type d -printf "%P\n" \ + | sort +} + +function usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr < + --image Run tests only for specified image + --fast Only generate hashes directly rather than also using cbuild + --build-images Build the images + --verbose Produce verbose output +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +declare -i FAST=0 +declare -i VERBOSE=0 +declare -i BUILD_IMAGES=0 +declare IMAGE + +while [[ $# -gt 0 ]]; do + case "$1" in + --image) + IMAGE="$2" + if [[ -z ${IMAGE} ]]; then + usage + fi + shift 2 || usage + ;; + --fast) + FAST=1 + shift + ;; + --build-images) + BUILD_IMAGES=1 + shift + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + ;; + esac +done + +TESTS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TESTS_DIR +readonly TOOLS_DIR="${TESTS_DIR}"/../tools +readonly HASHES_DIR="${TESTS_DIR}"/data/hashes +readonly IMAGES_DIR="${TESTS_DIR}"/../images +if [[ -n ${IMAGE} ]]; then + IMAGE_LIST="${IMAGE}" +else + IMAGE_LIST="$(get_image_list "${IMAGES_DIR}")" +fi +readonly IMAGE_LIST +declare -i RETCODE=0 + +function cli_tests_test-tools() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != test-tools ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/curl --version" + "${TOOLS_DIR}/ghz --version" + "${TOOLS_DIR}/ab -V" + "${TOOLS_DIR}/cassowary --version" + "${TOOLS_DIR}/grpcurl -version" + "${TOOLS_DIR}/h2load --version" + "${TOOLS_DIR}/hadolint --version" + "${TOOLS_DIR}/jq --version" + ) + printf "Testing test CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_misc() { + declare -a -r TOOLS=( + "${TOOLS_DIR}/aws-cli help" + "${TOOLS_DIR}/awscurl --help" + ) + printf "Testing utils CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_utils() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != utils ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/unzip -h" + "${TOOLS_DIR}/zip -h" + ) + printf "Testing utils CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_release() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != release ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/buildozer -version" + ) + printf "Testing release CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function create_temp_hash_dir() { + local -r DIR="$(mktemp --directory)" + cp "${HASHES_DIR}"/* "${DIR}" + printf "%s" "${DIR}" +} + +# warning when running inside a docker container +if [[ -f /.dockerenv ]]; then + printf "warning: Executing within docker container, which obviates testing in a non-docker environment\n" &>/dev/stderr +fi + +declare -a GET_BUILDER_IMAGE_ARGS=( + --sha-only +) +if [[ ${BUILD_IMAGES} -eq 0 ]]; then + GET_BUILDER_IMAGE_ARGS+=(--no-build) +fi + +function generate_hashes_direct() { + TMP_HASHES_DIR1=$(create_temp_hash_dir) + printf "Generating image hashes (direct mode)\n" + + for img in ${IMAGE_LIST}; do + # shellcheck disable=SC2086 + if ! "${TOOLS_DIR}"/get-builder-image-tagged "${GET_BUILDER_IMAGE_ARGS[@]}" --image ${img} >"${TMP_HASHES_DIR1}/${img}"; then + printf "Error generating image hash: %s\n" "${img}" &>/dev/stderr + RETCODE+=1 + fi + done + + BASELINE_UPDATES="$(diff --brief "${HASHES_DIR}" "${TMP_HASHES_DIR1}" || true)" + readonly BASELINE_UPDATES + if [[ -n $BASELINE_UPDATES ]]; then + # shellcheck disable=SC2086 + printf "detected shift in baseline files:\n%s\n" "${BASELINE_UPDATES}" + if [[ ${VERBOSE} -eq 1 ]]; then + diff "${HASHES_DIR}" "${TMP_HASHES_DIR1}" || true + fi + cp --force "${TMP_HASHES_DIR1}"/* "${HASHES_DIR}" + RETCODE+=10 + else + printf "hashes unchanged\n" + fi +} + +function generate_hashes_cbuild() { + TMP_HASHES_DIR2=$(create_temp_hash_dir) + + printf "Generating image hashes (cbuild mode)\n" + for img in ${IMAGE_LIST}; do + if ! "${TOOLS_DIR}"/cbuild --image build-debian --cmd "tools/get-builder-image-tagged ${GET_BUILDER_IMAGE_ARGS[*]} --image ${img}" >"${TMP_HASHES_DIR2}/${img}"; then + printf "Error generating image hash: %s\n" "${img}" &>/dev/stderr + RETCODE+=1 + fi + done + + MODE_MISMATCH="$(diff --brief "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" || true)" + readonly MODE_MISMATCH + if [[ -n $MODE_MISMATCH ]]; then + # shellcheck disable=SC2086 + printf "Error: mismatch between direct and cbuild modes\n%s" "${MODE_MISMATCH}" &>/dev/stderr + if [[ ${VERBOSE} -eq 1 ]]; then + diff "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" || true + fi + RETCODE+=100 + else + printf "hashes unchanged\n" + fi +} + +generate_hashes_direct + +if [[ ${FAST} -eq 1 ]]; then + exit ${RETCODE} +fi + +generate_hashes_cbuild + +# CLI tests + +cli_tests_misc +for img in ${IMAGE_LIST}; do + cli_tests_test-tools "${img}" + cli_tests_utils "${img}" + cli_tests_release "${img}" +done + +printf "test for error in direct execution of underlying wrapper scripts\n" +for tool in test-tool utils-tool; do + printf " %s: " "${tool}" + if ! "${TOOLS_DIR}"/${tool} &>/dev/null; then + printf "ok\n" + else + printf "failed to exit non-zero\n" + fi +done + +exit ${RETCODE} diff --git a/services/inference_sidecar/common/builders/tools/ab b/services/inference_sidecar/common/builders/tools/ab new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/ab @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/aws-cli b/services/inference_sidecar/common/builders/tools/aws-cli new file mode 100755 index 00000000..0ded1392 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/aws-cli @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path +# EXTRA_DOCKER_RUN_ARGS additional arguments to pass to docker run invocations +# +# environment variables exported to AWS CLI +# For more info on supported env vars, see: +# https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" +) + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD + +# shellcheck disable=SC2068 +docker run \ + ${DOCKER_RUN_ARGS[@]} \ + --user "$(id -u):$(id -g)" \ + --volume "${HOME}"/.aws/:/home/.aws/ \ + --volume "${WORKSPACE}":/src/workspace \ + --workdir /src/workspace/"${REL_PWD}" \ + amazon/aws-cli:latest "$@" diff --git a/services/inference_sidecar/common/builders/tools/awscurl b/services/inference_sidecar/common/builders/tools/awscurl new file mode 100755 index 00000000..355d58e1 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/awscurl @@ -0,0 +1,71 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path +# EXTRA_DOCKER_RUN_ARGS additional arguments to pass to docker run invocations +# +# environment variables exported to AWS CLI +# For more info on supported env vars, see: +# https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" + "PYTHONPATH=/" +) + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + --rm + --interactive + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + --tty + ) +fi + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD + +# shellcheck disable=SC2068 +docker run \ + ${DOCKER_RUN_ARGS[@]} \ + --user "$(id -u):$(id -g)" \ + --volume "${HOME}"/.aws/:/home/.aws/ \ + --volume "${WORKSPACE}":/src/workspace \ + --workdir /src/workspace/"${REL_PWD}" \ + ghcr.io/okigan/awscurl:latest "$@" diff --git a/services/inference_sidecar/common/builders/tools/bazel-amazonlinux2 b/services/inference_sidecar/common/builders/tools/bazel-amazonlinux2 new file mode 120000 index 00000000..98d6d309 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/bazel-amazonlinux2 @@ -0,0 +1 @@ +bazel-debian \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/bazel-amazonlinux2023 b/services/inference_sidecar/common/builders/tools/bazel-amazonlinux2023 new file mode 120000 index 00000000..98d6d309 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/bazel-amazonlinux2023 @@ -0,0 +1 @@ +bazel-debian \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/bazel-debian b/services/inference_sidecar/common/builders/tools/bazel-debian new file mode 100755 index 00000000..843b3b6e --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/bazel-debian @@ -0,0 +1,69 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables supported by cbuild (all optional): +# WORKSPACE Set the path to the workspace (repo root) +# AWS_ACCESS_KEY_ID AWS auth token +# AWS_SECRET_ACCESS_KEY AWS auth token +# BAZEL_STARTUP_ARGS Additional startup arguments to pass to bazel invocations +# BAZEL_EXTRA_ARGS Additional command arguments to pass to bazel invocations +# EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + +set -o pipefail +set -o errexit + +function partition_array() { + if [[ ${#@} -ne 3 ]]; then + printf "insufficient args\n" &>/dev/stderr + return 1 + fi + declare -n arr=$1 + declare -n pre=$2 + declare -n post=$3 + if [[ ${#arr[@]} -eq 0 ]]; then + return + fi + + declare -r delim="--" + declare target=pre + for i in "${!arr[@]}"; do + if [[ ${arr[$i]} == "${delim}" ]]; then + target=post + fi + if [[ ${target} == pre ]]; then + pre+=("${arr[$i]}") + else + post+=("${arr[$i]}") + fi + done +} + +SCRIPT_NAME="$(basename "${BASH_SOURCE[0]}")" +readonly SCRIPT_NAME +readonly IMAGE=build-"${SCRIPT_NAME/bazel-}" +CBUILD=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/cbuild +readonly CBUILD + +# partition the command-line args into "bazel args" and "app args", where the +# app args are all those after the presence of the "--" arg, if any +declare -a BAZEL_ARGS +declare -a APP_ARGS +# shellcheck disable=SC2034,SC2206 +declare -r -a ARGLIST=("$@") +partition_array ARGLIST BAZEL_ARGS APP_ARGS + +"${CBUILD}" --seccomp-unconfined --image "${IMAGE}" --cmd " +printf 'bazel output_base: [%s]\n' \"\$(bazel info output_base 2>/dev/null)\" +bazel ${BAZEL_STARTUP_ARGS} ${BAZEL_ARGS[*]@Q} ${BAZEL_EXTRA_ARGS} ${APP_ARGS[*]@Q} +" diff --git a/services/inference_sidecar/common/builders/tools/builder.sh b/services/inference_sidecar/common/builders/tools/builder.sh new file mode 100644 index 00000000..77317aae --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/builder.sh @@ -0,0 +1,220 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# shell package enabling support for docker +# + +# require this script to be sourced rather than executed +if ! (return 0 2>/dev/null); then + printf "Error: Script %s must be sourced\n" "${BASH_SOURCE[0]}" &>/dev/stderr + exit 1 +fi + +####################################### +# Configure and export the WORKSPACE variable. Does not +# overwrite existing WORKSPACE value. +####################################### +function builder::set_workspace() { + export WORKSPACE + if [[ -v WORKSPACE ]]; then + return + fi + local -r GIT_TOPLEVEL="$(git rev-parse --show-superproject-working-tree)" + if [[ -n ${GIT_TOPLEVEL} ]]; then + WORKSPACE="${GIT_TOPLEVEL}" + else + WORKSPACE="$(git rev-parse --show-toplevel)" + fi + local -r ws_path="$(realpath "${WORKSPACE}"/WORKSPACE)" + WORKSPACE="$(dirname "${ws_path}")" +} + +####################################### +# Return the path to the build workspace, for use with +# the docker volume or mount argument +####################################### +function builder::get_docker_workspace_mount() { + if [[ -v WORKSPACE_MOUNT ]]; then + printf "%s" "${WORKSPACE_MOUNT}" + return + fi + # when running inside a docker container, expect /.dockerenv to exist + if ! [[ -f /.dockerenv ]]; then + printf "%s" "${WORKSPACE}" + return + fi + + # if running inside a docker container, the workspace mount point cannot be + # determined by git or bazel or inspecting the filesystem itself. Instead, we + # need to use docker to expose its mount info for the /src/workspace path. + # determine the current container's ID + local -r CONTAINER_ID="$(uname --nodename)" + # use docker inspect to extract the current mount path for /src/workspace + # this format string is a golang template (https://pkg.go.dev/text/template) processed + # by docker's --format flag, per https://docs.docker.com/config/formatting/ + # shellcheck disable=SC2016 + declare -r FORMAT_STR=' + {{- range $v := .HostConfig.Binds -}} + {{$pathpair := split $v ":" -}} + {{if eq (index $pathpair 1) "/src/workspace" -}} + {{print (index $pathpair 0) -}} + {{end -}} + {{end -}} + ' + local -r MOUNT_PATH="$(docker inspect --format "${FORMAT_STR}" "${CONTAINER_ID}")" + if [[ -z ${MOUNT_PATH} ]]; then + printf "Error: Unable to determine mount point for /src/workspace. Exiting\n" &>/dev/stderr + exit 1 + fi + printf "%s" "${MOUNT_PATH}" +} + +function builder::get_tools_dir() { + dirname "$(readlink -f "${BASH_SOURCE[0]}")" +} + +####################################### +# Invoke cbuild tool in a build-debian container +####################################### +function builder::cbuild_debian() { + local -r CBUILD="$(builder::get_tools_dir)"/cbuild + printf "=== cbuild debian action envs ===\n" + # shellcheck disable=SC2086 + "${CBUILD}" ${CBUILD_ARGS} --image build-debian --cmd "grep -o 'action_env.*' /etc/bazel.bazelrc 1>/dev/stderr 2>/dev/null" + # shellcheck disable=SC2086 + "${CBUILD}" ${CBUILD_ARGS} --image build-debian --cmd "$*" +} + +####################################### +# Add AWS env vars to a specified array +# Arguments: +# * the name of an array to which to append values +####################################### +function builder::add_aws_env_vars() { + declare -n args=$1 + args+=( + "AWS_ACCESS_KEY_ID" + "AWS_SECRET_ACCESS_KEY" + "AWS_SESSION_TOKEN" + "AWS_REGION" + "AWS_DEFAULT_REGION" + "AWS_PROFILE" + ) +} + +####################################### +# Build a Nitro EIF from a docker image tarfile +# Arguments: +# DOCKER_IMAGE_TAR Docker image tarfile +# DOCKER_IMAGE_URI Docker image uri corresponding to tarfile (without tag) +# DOCKER_IMAGE_TAG Docker image tag corresponding to tarfile +# OUTPUT_PATH Output path, must be within the workspace +# EIF_NAME Output base filename +# BUILDER_IMAGE_NAME (optional) Amazon Linux builder image name +####################################### +function builder::docker_img_to_nitro() { + local -r docker_image_tar="$1" + local -r docker_image_uri="$2" + local -r docker_image_tag="$3" + local -r output_path="$4" + local -r eif_name="$5" + local -r image="${6-build-amazonlinux2}" + local -r temp_tag="$(mktemp --dry-run temp-XXXXXX)" + docker load -i "${docker_image_tar}" + # add a temp tag to reduce the chance of conflicts or race conditions + docker tag "${docker_image_uri}:${docker_image_tag}" "${docker_image_uri}:${temp_tag}" + builder::docker_uri_to_nitro \ + "${docker_image_uri}:${temp_tag}" \ + "${output_path}/${eif_name}" \ + "${image}" + # remove the temp tag + docker image rm "${docker_image_uri}:${temp_tag}" +} + +####################################### +# Build a Nitro EIF from a docker image +# Arguments: +# DOCKER_URI (with tag) +# OUTPUT_BASE_FILENAME path and base filename +# BUILDER_IMAGE_NAME (optional) Amazon Linux builder image name +####################################### +function builder::docker_uri_to_nitro() { + local -r docker_uri="$1" + local -r output_base_fname="$2" + local -r image="${3-build-amazonlinux2}" + local -r output_eif="${output_base_fname}".eif + local -r output_json="${output_base_fname}".json + local -r output_pcr0_json="${output_base_fname}".pcr0.json + local -r output_pcr0_txt="${output_base_fname}".pcr0.txt + builder::cbuild_al "${image}" " +nitro-cli build-enclave --docker-uri ${docker_uri} --output-file ${output_eif@Q} >${output_json@Q} +jq --compact-output '{PCR0: .Measurements.PCR0}' ${output_json@Q} >${output_pcr0_json@Q} +jq --compact-output --raw-output '.Measurements.PCR0' ${output_json@Q} >${output_pcr0_txt@Q} +" +} + +####################################### +# Invoke cbuild tool in an amazonlinux build container +# Arguments: +# BUILDER_IMAGE_NAME Amazon Linux builder image name +####################################### +function builder::cbuild_al() { + local -r image="$1" + shift + local -r cbuild="$(builder::get_tools_dir)"/cbuild + declare -a env_vars + builder::add_aws_env_vars env_vars + declare env_args + for evar in "${env_vars[@]}"; do + env_args+=(--env "${evar}") + done + printf "=== cbuild %s action envs ===\n" "${image}" + # shellcheck disable=SC2086 + "${cbuild}" ${CBUILD_ARGS} "${env_args[@]}" --image "${image}" --cmd "grep -o 'action_env.*' /etc/bazel.bazelrc 1>/dev/stderr 2>/dev/null" + # shellcheck disable=SC2086 + "${cbuild}" ${CBUILD_ARGS} "${env_args[@]}" --image "${image}" --cmd "$*" +} + +function builder::cbuild_al2() { + builder::cbuild_al build-amazonlinux2 "$@" +} + +function builder::cbuild_al2023() { + builder::cbuild_al build-amazonlinux2023 "$@" +} + +####################################### +# Return the numeric id of the user or group +# Arguments: +# single character, either "u" for user or "g" for group +####################################### +function builder::id() { + declare -r mode="$1" + declare -i _id + _id=$(id "-${mode}") + if [[ ${_id} -ne 0 ]]; then + printf "%s" "${_id}" + else + # id is 0 (root), use the owner/group of the WORKSPACE file instead + stat -c "%${mode}" "${WORKSPACE}"/WORKSPACE + fi +} + +####################################### +# invoke functions to configure the build environment +####################################### + +builder::set_workspace diff --git a/services/inference_sidecar/common/builders/tools/buildozer b/services/inference_sidecar/common/builders/tools/buildozer new file mode 120000 index 00000000..78c3d50c --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/buildozer @@ -0,0 +1 @@ +release-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/cassowary b/services/inference_sidecar/common/builders/tools/cassowary new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/cassowary @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/cbuild b/services/inference_sidecar/common/builders/tools/cbuild new file mode 100755 index 00000000..c40a3aed --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/cbuild @@ -0,0 +1,252 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +IMAGE="build-debian" +VERBOSE=0 +declare -r -a IMAGE_LIST=( + "build-debian" + "build-amazonlinux2" + "build-amazonlinux2023" + "presubmit" +) + +declare -a ENV_VARS +ENV_VARS+=( + "BAZEL_STARTUP_ARGS" + "BAZEL_EXTRA_ARGS" +) + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --cmd bash command string to execute within the docker container + --cmd-profiler enable profiler for the command + --image Image name for the build runtime. Valid names: +USAGE + + for elem in "${IMAGE_LIST[@]}" + do + if [[ ${elem} == "${IMAGE}" ]]; then + default_text=" (default)" + fi + printf " * %s%s\n" "${elem}" "${default_text}" &>/dev/stderr + default_text="" + done + + cat &>/dev/stderr << USAGE + --env [=] Name (or name=value) of exported environment variable, propagated into container + --without-shared-cache Containers will not mount ${HOME}/.cache/bazel + --without-embedded-docker Disable docker client within container + --docker-network Specify docker network type or name, value passed to docker run --network. Default: ${DOCKER_NETWORK} + --seccomp-unconfined Run docker container without a seccomp profile + --verbose Enable verbose output + +Environment variables (all optional): + WORKSPACE Full path to the workspace (repo root) + WORKSPACE_MOUNT Full path to the workspace on the host filesystem + EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + CBUILD_IMAGE docker URI to the specific image to run + DOCKER_NETWORK Specify docker network, equivalent of --docker-network flag. + +Environment variables propagated into container: + BAZEL_STARTUP_ARGS + BAZEL_EXTRA_ARGS +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +declare -i WITH_SHARED_CACHE=1 +declare -i WITH_DOCKER_SOCK=1 +declare -i WITH_CMD_PROFILER=0 +DOCKER_NETWORK="${DOCKER_NETWORK:-bridge}" +declare -i DOCKER_SECCOMP_UNCONFINED=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --cmd) + CMD="$2" + shift 2 || usage + ;; + --cmd-profiler) + WITH_CMD_PROFILER=1 + shift + ;; + --env) + ENV_VARS+=("$2") + shift 2 || usage + ;; + --image) + IMAGE="$2" + shift 2 || usage + ;; + --without-shared-cache) + WITH_SHARED_CACHE=0 + shift + ;; + --without-embedded-docker) + WITH_DOCKER_SOCK=0 + shift + ;; + --docker-network) + DOCKER_NETWORK="$2" + shift 2 || usage + ;; + --seccomp-unconfined) + DOCKER_SECCOMP_UNCONFINED=1 + shift + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + ;; + esac +done + +if [[ -z ${IMAGE} ]]; then + printf -- "error: --image must be specified\n" &>/dev/stderr + usage 1 +fi +# shellcheck disable=SC2076 +if ! [[ " ${IMAGE_LIST[*]} " =~ " ${IMAGE} " ]]; then + printf -- "error: image [%s] not recognized\n" "${IMAGE}" &>/dev/stderr + usage 1 +fi + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh + +trap _cleanup EXIT +function _cleanup() { + local -r -i status=$? + "${TOOLS_DIR}"/normalize-bazel-symlinks &>/dev/null || true + exit ${status} +} + +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT +export WORKSPACE_MOUNT +if [[ ${VERBOSE} -eq 1 ]]; then + printf "mounting workspace into container: %s\n" "${WORKSPACE_MOUNT}" &>/dev/stderr +fi + +TOOLS_RELDIR="$(realpath "${TOOLS_DIR}" --relative-to="${PWD}")" +readonly TOOLS_RELDIR + +if [[ -n ${CBUILD_IMAGE} ]]; then + IMAGE_TAGGED=${CBUILD_IMAGE} +else + IMAGE_TAGGED=$("${TOOLS_DIR}"/get-builder-image-tagged --image "${IMAGE}") +fi +readonly IMAGE_TAGGED + +PWD_WORKSPACE_REL_PATH="$(realpath --relative-base="${WORKSPACE}" "${PWD}")" +readonly PWD_WORKSPACE_REL_PATH +WORKDIR=/src/workspace +if [[ ${PWD_WORKSPACE_REL_PATH:0:1} != / ]]; then + WORKDIR="/src/workspace/${PWD_WORKSPACE_REL_PATH}" +fi +readonly WORKDIR + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" + "--entrypoint=/bin/bash" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=${WORKDIR}" + "--network=${DOCKER_NETWORK}" + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) + +if [[ ${DOCKER_SECCOMP_UNCONFINED} -eq 1 ]]; then + DOCKER_RUN_ARGS+=("--security-opt=seccomp=unconfined") +fi + +if [[ ${WITH_CMD_PROFILER} -eq 1 ]]; then + if [[ ${IMAGE} != build-debian ]]; then + printf "error: --cmd-profiler is only compatible with build-debian\n" &>/dev/stderr + usage 1 + fi + DOCKER_RUN_ARGS+=( + "--env=CMD_PROFILER=LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libprofiler.so" + "--env=CPUPROFILE=benchmark.prof" + ) +fi + +# inside the docker build images, /bazel_root is the bazel cache dir, per the system-wide bazelrc +readonly BAZEL_ROOT=/bazel_root +if [[ ${WITH_SHARED_CACHE} -eq 0 ]]; then + # use tmpfs for as temporary, container-bound bazel cache + DOCKER_RUN_ARGS+=( + "--tmpfs ${BAZEL_ROOT}:exec" + ) +else + # mount host filesystem for "shared" use by multiple docker container invocations + DOCKER_RUN_ARGS+=( + "--volume ${HOME}/.cache/bazel:${BAZEL_ROOT}" + ) +fi +if [[ ${WITH_DOCKER_SOCK} -eq 1 ]]; then + DOCKER_RUN_ARGS+=( + "--volume /var/run/docker.sock:/var/run/docker.sock" + ) +fi +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + --interactive + --tty + ) +fi + +if [[ ${VERBOSE} -eq 1 ]]; then + set -o xtrace +fi +if [[ -z ${CMD} ]]; then + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login +elif [[ ${WITH_CMD_PROFILER} -eq 1 ]]; then + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login -c "'${TOOLS_RELDIR}'/normalize-bazel-symlinks; env \${CMD_PROFILER} ${CMD}" +else + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login -c "${CMD}" +fi diff --git a/services/inference_sidecar/common/builders/tools/collect-coverage b/services/inference_sidecar/common/builders/tools/collect-coverage new file mode 100755 index 00000000..746185c2 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/collect-coverage @@ -0,0 +1,105 @@ +#!/usr/bin/env bash + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables: +# WORKSPACE Set the path to the workspace (repo root) + +set -o pipefail +set -o errexit + +declare LCOV_REPORT="${WORKSPACE}/bazel-out/_coverage/_coverage_report.dat" +declare COVERAGE_FILENAME=coverage.zip + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --lcov_report path to lcov report relative to the WORKSPACE + --coverage_output_filename name of ZIP file that will contain artifacts from coverage collection +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --lcov_report) + LCOV_REPORT="${WORKSPACE}/$2" + shift 2 || usage + ;; + --coverage_output_filename) + COVERAGE_FILENAME="$2" + shift 2 || usage + if [[ ${COVERAGE_FILENAME##*.} != zip ]]; then + printf "error: --coverage_output_filename must be a ZIP file\n" &>/dev/stderr + exit 1 + fi + ;; + -h | --help) + usage 0 + ;; + *) + usage + ;; + esac +done + +trap _cleanup EXIT +function _cleanup() { + declare -r -i status=$? + if [[ ${status} -ne 0 ]]; then + printf "collect-coverage exit code: %d\n" ${status} &>/dev/stderr + sleep 5s + fi + exit ${status} +} + +function generate_coverage_report() { + local -r cov_dir="$(mktemp --tmpdir="${WORKSPACE}" --directory coverage-XXXX)" + trap 'rm -rf "${cov_dir}"' RETURN EXIT + cp "${LCOV_REPORT}" "${cov_dir}"/_coverage_report.dat + local -r dist_dir="${WORKSPACE}"/dist + cp "${LCOV_REPORT}" "${dist_dir}"/_coverage_report.dat + chmod -x {"${cov_dir}","${dist_dir}"}/_coverage_report.dat + + "${TOOLS_DIR}"/lcov --list dist/_coverage_report.dat >"${dist_dir}"/coverage_report.txt + "${TOOLS_DIR}"/lcov --summary dist/_coverage_report.dat + + local -r commit_sha=$(git rev-parse HEAD) + "${TOOLS_DIR}"/genhtml \ + --output-directory="$(basename "${cov_dir}")" \ + --title "coverage for commit ${commit_sha:0:7}" \ + --show-details \ + --legend \ + --quiet \ + dist/_coverage_report.dat + + if pushd "${cov_dir}" &>/dev/null; then + local -r zipfile=dist/"${COVERAGE_FILENAME}" + "${TOOLS_DIR}"/zip -q -r "../${zipfile}" ./* + printf "coverage archived to: %s\n" "${zipfile}" + popd &>/dev/null + fi +} + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh +TOOLS_DIR="$(builder::get_tools_dir)" +readonly TOOLS_DIR + +generate_coverage_report +"${TOOLS_DIR}"/normalize-dist diff --git a/services/inference_sidecar/common/builders/tools/commit-and-tag-version b/services/inference_sidecar/common/builders/tools/commit-and-tag-version new file mode 120000 index 00000000..78c3d50c --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/commit-and-tag-version @@ -0,0 +1 @@ +release-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/convert-docker-to-nitro b/services/inference_sidecar/common/builders/tools/convert-docker-to-nitro new file mode 100755 index 00000000..7586e039 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/convert-docker-to-nitro @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +trap cleanup EXIT +function cleanup() { + local -r -i status=$? + exit ${status} +} + +declare BUILDER_IMAGE_NAME=build-amazonlinux2 + +function usage() { + declare -r -i exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --docker-image-tar Docker image tarfile + --docker-image-uri Docker image uri (from tarfile) + --docker-image-tag Docker image tag (from tarfile) + --builder-image Amazon Linux builder image name. Default: ${BUILDER_IMAGE_NAME} + --outdir Output path for EIF files + --eif-name Base filename for EIF files + +environment variables (all optional): + WORKSPACE Set the path to the workspace (repo root) +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --docker-image-tar) + DOCKER_IMAGE_TAR="$2" + shift 2 || usage + ;; + --docker-image-uri) + DOCKER_IMAGE_URI="$2" + shift 2 || usage + ;; + --docker-image-tag) + DOCKER_IMAGE_TAG="$2" + shift 2 || usage + ;; + --outdir) + OUTPUT_DIR="$2" + shift 2 || usage + ;; + --eif-name) + EIF_NAME="$2" + shift 2 || usage + ;; + --builder-image) + BUILDER_IMAGE_NAME="$2" + shift 2 || usage + ;; + -h | --help) usage 0 ;; + *) usage ;; + esac +done + +: "${DOCKER_IMAGE_TAR?"--docker-image-tar not specified"}" +: "${DOCKER_IMAGE_URI?"--docker-image-uri not specified"}" +: "${DOCKER_IMAGE_TAG?"--docker-image-tag not specified"}" +: "${OUTPUT_DIR?"--outdir not specified"}" +: "${EIF_NAME?"--eif-name not specified"}" + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +WS_OUTPUT_RELDIR="$(realpath "${OUTPUT_DIR}" --relative-to="${WORKSPACE}")" +if [[ ${WS_OUTPUT_RELDIR} =~ ^\.\..*\/ ]]; then + printf "Output dir must be within the workspace directory tree\n" &>/dev/stderr + exit 1 +fi +if ! [[ -d ${WORKSPACE}/${WS_OUTPUT_RELDIR} ]]; then + printf "Output dir [%s] is not found\n" "${WORKSPACE}/${WS_OUTPUT_RELDIR}" &>/dev/stderr + exit 1 +fi + +readonly WS_OUTPUT_DIR=/src/workspace/"${WS_OUTPUT_RELDIR}" +printf "==== building AWS Nitro image using %s =====\n" "${BUILDER_IMAGE_NAME}" +builder::docker_img_to_nitro \ + "${DOCKER_IMAGE_TAR}" \ + "${DOCKER_IMAGE_URI}" \ + "${DOCKER_IMAGE_TAG}" \ + "${WS_OUTPUT_DIR}" \ + "${EIF_NAME}" \ + "${BUILDER_IMAGE_NAME}" diff --git a/services/inference_sidecar/common/builders/tools/coverage-tool b/services/inference_sidecar/common/builders/tools/coverage-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/coverage-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/curl b/services/inference_sidecar/common/builders/tools/curl new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/curl @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/genhtml b/services/inference_sidecar/common/builders/tools/genhtml new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/genhtml @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/get-architecture b/services/inference_sidecar/common/builders/tools/get-architecture new file mode 100755 index 00000000..a4415602 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/get-architecture @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Print the CPU architecture + +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ ${STATUS} -ne 0 ]]; then + printf "Error: get-architecture status code: %s\n" "${STATUS}" &>/dev/stderr + fi + exit ${STATUS} +} + +if [[ -n ${BUILD_ARCH} ]]; then + printf "%s\n" "${BUILD_ARCH}" +else + docker run --rm --entrypoint=/usr/bin/dpkg ubuntu:20.04 --print-architecture +fi diff --git a/services/inference_sidecar/common/builders/tools/get-builder-image-tagged b/services/inference_sidecar/common/builders/tools/get-builder-image-tagged new file mode 100755 index 00000000..35371aea --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/get-builder-image-tagged @@ -0,0 +1,256 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Return an image:tag for the specified builder docker image +# Given the input files (Dockerfile, configs, installation scripts etc) for building the docker +# image, first generate a hash of the file contents (ie. exclude timestamps, file ownership etc). +# This hash will be used as the image tag + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + declare -r -i status=$? + if [[ -n ${TEMPTAR} ]]; then + rm -f "${TEMPTAR}" "${SHAFILE}" + fi + if [[ ${status} -ne 0 ]]; then + if [[ -s ${BUILD_OUTPUT} ]] && [[ ${VERBOSE} -eq 1 ]]; then + cat "${BUILD_OUTPUT}" + fi + printf "Error: get-builder-image-tagged status code: %d\n" ${status} &>/dev/stderr + printf "Docker build log: %s\n" "${BUILD_OUTPUT}" &>/dev/stderr + if [[ -v KOKORO_ARTIFACTS_DIR ]]; then + sleep 5s + fi + elif [[ -f ${BUILD_OUTPUT} ]] && [[ ${VERBOSE} -eq 0 ]]; then + rm -f "${BUILD_OUTPUT}" + fi + exit ${status} +} + +function make_temp() { + declare _suffix="$1" + mktemp --tmpdir="${WORKSPACE}" --dry-run "docker-buildx-${IMG}-XXXX" --suffix="${_suffix}" +} + +function get_image_list() { + declare -r _images_dir="$1" + find "${_images_dir}" -maxdepth 1 -mindepth 1 -type d -printf "%P\n" | sort +} + +function get_image_fullname() { + declare -r _img="$1" + declare -r _image_name=privacysandbox/builders/${_img} + if [[ -z ${_image_name} ]]; then + printf -- "error: image [%s] not recognized\n" "${_img}" &>/dev/stderr + return 1 + fi + printf "%s" "${_image_name}" +} + +function usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr < + --no-build Do not build image if it doesn't exist + --image Image name for the build runtime. Valid names: +USAGE + + for elem in $(get_image_list "${IMAGES_DIR}"); do + printf " * %s\n" "${elem}" &>/dev/stderr + done + + cat &>/dev/stderr </dev/stderr + usage 1 +fi + +if ! IMAGE_NAME="$(get_image_fullname "${IMG}")"; then + usage 1 +fi +readonly IMAGE_NAME + +IMAGE_PATH_FULL="${IMAGES_DIR}/${IMG}" +if ! [[ -s ${IMAGE_PATH_FULL}/Dockerfile ]]; then + printf "error: unable to locate [%s/Dockerfile]\n" "${IMAGE_PATH_FULL}" &>/dev/stderr + exit 1 +fi + +# create an image containing gnu tar +function generate_image() { + { + cat <"${BUILD_OUTPUT}" + rm -f "${BUILD_OUTPUT}" + if ! docker image inspect "${TAR_IMAGE}" &>/dev/null; then + printf "error creating docker image [%s]\n" "${TAR_IMAGE}" &>/dev/stderr + exit 1 + fi +} + +BUILD_OUTPUT="$(make_temp .log)" +readonly BUILD_OUTPUT +BUILDSYS_VERSION="$(<"${BUILDERS_DIR}"/version.txt)" +readonly BUILDSYS_VERSION +readonly TAR_IMAGE="builders/tar-get-builder-image-tagged:v${BUILDSYS_VERSION}" +TAR_IMAGE_HASH="$(docker image ls --filter "reference=${TAR_IMAGE}" --quiet)" +readonly TAR_IMAGE_HASH +if [[ -z ${TAR_IMAGE_HASH} ]]; then + generate_image +fi + +# Create a deterministic tar file for the specified file path, returning +# the SHA for the tar file content +# Any etc files in the $WORKSPACE (root) directory override the etc +# files in the image dir, as long as it's a file also found in the +# builders etc directory +# the TARFILE and SHAFILE args must be paths located in /tmp +function _tar_for_dir() { + local -r FILE_TAR="$1" + local -r FILE_SHA="$2" + local -r FILEPATH="$3" + local -r TMP_IMAGE_DIR="$(mktemp --tmpdir="${WORKSPACE}" --directory)" + # shellcheck disable=SC2064 + # expand TMP_IMAGE_DIR immediately + trap "rm -rf '${TMP_IMAGE_DIR}'" RETURN + + local -r WS_FILE_TAR="$(realpath "${FILE_TAR}" --relative-to="${WORKSPACE}")" + local -r WS_FILE_SHA="$(realpath "${FILE_SHA}" --relative-to="${WORKSPACE}")" + local -r WS_FILEPATH="$(realpath "${FILEPATH}" --relative-to="${WORKSPACE}")" + local -r WS_TMP_IMAGE_DIR="$(realpath "${TMP_IMAGE_DIR}" --relative-to="${WORKSPACE}")" + # find workspace etc files that are also in the image dir and the builders etc dir + local -r WORKSPACE_ETC_FILES="$({ + # shellcheck disable=SC2012 + ls -A -1 "${FILEPATH}" "${ETC_DIR}" | sort | uniq -d + ls -A -1 "${WORKSPACE}" + } | sort | uniq -d)" + # create a deterministic tarball of the collected files + docker run \ + --rm \ + --entrypoint /bin/sh \ + --volume "${WORKSPACE_MOUNT}":/workspace \ + --workdir /workspace \ + "${TAR_IMAGE}" -c " +tar --create --dereference --directory='${WS_FILEPATH}' --exclude=test . \ + | tar --extract --overwrite --directory='${WS_TMP_IMAGE_DIR}' + +# overwrite etc files in the image with the WORKSPACE's etc files +if [ -n '${WORKSPACE_ETC_FILES}' ]; then + tar --create --dereference --exclude=test ${WORKSPACE_ETC_FILES} \ + | tar --extract --overwrite --directory='${WS_TMP_IMAGE_DIR}' +fi + +tar --create --dereference --sort=name --owner=0 --group=0 --numeric-owner --format=gnu --directory='${WS_TMP_IMAGE_DIR}' --file='${WS_FILE_TAR}' . +{ + printf 'blob %d\0' \$(wc -c <'${WS_FILE_TAR}') + tar --extract --file='${WS_FILE_TAR}' --to-stdout +} \ + | sha256sum \ + | cut -f1 -d' ' \ + >'${WS_FILE_SHA}' +" +} + +TEMPTAR="$(make_temp .tar)" +readonly TEMPTAR +SHAFILE="$(make_temp .sha)" +readonly SHAFILE +# use the tarfile size and file content to generate a sha256 hash +_tar_for_dir "${TEMPTAR}" "${SHAFILE}" "${IMAGE_PATH_FULL}" +SHA="$(<"${SHAFILE}")" +readonly SHA +ARCH="$("${TOOLS_DIR}"/get-architecture)" +readonly ARCH +readonly IMAGE_TAG="${ARCH}-${SHA}" +readonly IMAGE_TAGGED="${IMAGE_NAME}:${IMAGE_TAG}" +# generate output +if [[ ${SHA_ONLY} -eq 0 ]]; then + printf "%s\n" "${IMAGE_TAGGED}" +else + printf "%s\n" "${SHA}" +fi + +if [[ ${BUILD_IMAGE_IF_NEEDED} -eq 1 ]]; then + # Create a builder docker image + # build container image and load it into the local docker client + if ! docker image inspect "${IMAGE_TAGGED}" &>/dev/null; then + printf "generating docker image %s\n" "${IMAGE_TAGGED}" &>/dev/stderr + docker buildx build "${DOCKER_BUILD_ARGS[@]}" --output=type=docker --tag "${IMAGE_TAGGED}" - <"${TEMPTAR}" &>"${BUILD_OUTPUT}" + fi +fi diff --git a/services/inference_sidecar/common/builders/tools/ghz b/services/inference_sidecar/common/builders/tools/ghz new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/ghz @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/git-hooks/pre-commit b/services/inference_sidecar/common/builders/tools/git-hooks/pre-commit new file mode 100755 index 00000000..21d56711 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/git-hooks/pre-commit @@ -0,0 +1,18 @@ +#!/bin/bash + +TOP_LEVEL_DIR="$(git rev-parse --show-toplevel)" +GIT_HOOKS_DIR="${TOP_LEVEL_DIR}/$(git rev-parse --git-dir)/hooks" +declare -a ARGS=( + hook-impl + "--hook-type=pre-commit" + --hook-dir "${GIT_HOOKS_DIR}" + -- + "$@" +) + +if command -v "${TOP_LEVEL_DIR}"/builders/tools/pre-commit > /dev/null; then + exec "${TOP_LEVEL_DIR}"/builders/tools/pre-commit "${ARGS[@]}" +else + printf "[builders/tools/pre-commit] not found" 2>&1 + exit 1 +fi diff --git a/services/inference_sidecar/common/builders/tools/google-pprof b/services/inference_sidecar/common/builders/tools/google-pprof new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/google-pprof @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/grpcurl b/services/inference_sidecar/common/builders/tools/grpcurl new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/grpcurl @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/h2load b/services/inference_sidecar/common/builders/tools/h2load new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/h2load @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/hadolint b/services/inference_sidecar/common/builders/tools/hadolint new file mode 100755 index 00000000..2ab1de00 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/hadolint @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" +) +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + --user="$(id -u):$(id -g)" \ + --volume="${WORKSPACE_MOUNT}":/src \ + --workdir=/src \ + ghcr.io/hadolint/hadolint:v2.12.0 \ + hadolint "$@" diff --git a/services/inference_sidecar/common/builders/tools/jq b/services/inference_sidecar/common/builders/tools/jq new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/jq @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/lcov b/services/inference_sidecar/common/builders/tools/lcov new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/lcov @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/normalize-bazel-symlinks b/services/inference_sidecar/common/builders/tools/normalize-bazel-symlinks new file mode 100755 index 00000000..8506ac96 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/normalize-bazel-symlinks @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +declare -r BAZEL_CACHE_DIR="${HOME}/.cache/bazel" + +function normalize_symlink() { + declare -r link_name="$1" + if readlink --canonicalize-existing "${link_name}" &>/dev/null ; then + printf "symlink %s resolves fully, skipping\n" "${link_name}" + return + fi + local -r link_path="$(readlink "${link_name}")" + local -r output_user_root="${link_path///bazel_root/}" + rm -f "${link_name}" + ln -s "$(realpath "${BAZEL_CACHE_DIR}/${output_user_root}")" "${link_name}" +} + +function normalize_symlink_docker() { + declare -r link_name="$1" + if readlink --canonicalize-existing "${link_name}" &>/dev/null ; then + printf "symlink %s resolves fully, skipping\n" "${link_name}" + return + fi + local -r link_path="$(readlink "${link_name}")" + local -r output_user_root="${link_path##*/.cache/bazel/}" + rm -f "${link_name}" + ln -s "$(realpath "/bazel_root/${output_user_root}")" "${link_name}" +} + +declare _normalize_fn=normalize_symlink +if [[ -f /.dockerenv ]]; then + _normalize_fn=normalize_symlink_docker +fi + +declare -a -r LINK_DIRS=( + bazel-bin + bazel-out + bazel-testlogs + bazel-workspace +) +for link in "${LINK_DIRS[@]}"; do + if [[ -L ${link} ]]; then + ${_normalize_fn} "${link}" + fi +done diff --git a/services/inference_sidecar/common/builders/tools/normalize-dist b/services/inference_sidecar/common/builders/tools/normalize-dist new file mode 100755 index 00000000..93627d25 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/normalize-dist @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables supported by cbuild (all optional): +# WORKSPACE Set the path to the workspace (repo root) +# EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ ${STATUS} -eq 0 ]]; then + printf "normalize-dist completed successfully\n" &>/dev/stderr + else + printf "Error: normalize-dist completed with status code: %s\n" "${STATUS}" &>/dev/stderr + fi + exit 0 +} + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh +readonly IMAGE=build-debian +GROUP="$(builder::id g)" +readonly GROUP +USER="$(builder::id u)" +readonly USER + +readonly TOP_LEVEL_DIRS="dist" + +printf "Setting file ownership [%s], group [%s] in dirs [%s]\n" "${USER}" "${GROUP}" "${TOP_LEVEL_DIRS}" +declare -a runner=() +if [[ -f /.dockerenv ]]; then + runner+=(bash -c) +else + runner+=("${TOOLS_DIR}"/cbuild "--image" "${IMAGE}" "--cmd") +fi + +"${runner[@]}" " +for TOP_LEVEL_DIR in ${TOP_LEVEL_DIRS}; do + find \${TOP_LEVEL_DIR} -type f ! -executable -exec chmod 644 {} \; + find \${TOP_LEVEL_DIR} -type f -executable -exec chmod 755 {} \; + find \${TOP_LEVEL_DIR} -type d -exec chmod 755 {} \; + chgrp --recursive ${GROUP} \${TOP_LEVEL_DIR} + chown --recursive ${USER} \${TOP_LEVEL_DIR} +done +" diff --git a/services/inference_sidecar/common/builders/tools/pre-commit b/services/inference_sidecar/common/builders/tools/pre-commit new file mode 100755 index 00000000..be8c5c7c --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/pre-commit @@ -0,0 +1,165 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh + +trap __cleanup EXIT +function __cleanup() { + declare -r -i STATUS=$? + readonly LOGFILE=/var/cache/pre-commit/pre-commit.log + if [[ -s ${LOGFILE} ]]; then + printf "==== %s ====\n" ${LOGFILE} &>/dev/stderr + cat ${LOGFILE} &>/dev/stderr + printf "===========\n" &>/dev/stderr + fi + if [[ ${CLEANUP} -eq 0 ]]; then + exit ${STATUS} + fi + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --cmd $" +# change file ownership back to user +{ + git ls-files . --modified + git diff --staged --name-only +} | \ + sort -u | \ + xargs --replace={} /bin/bash -c '{ chown -f $(builder::id u) {}; chgrp -f $(builder::id g) {};}' +# clean up 'empty' node_modules dir created by prettier 2.x +if [[ -d node_modules ]]; then + rmdir node_modules/.cache/prettier/ node_modules/.cache/ node_modules/ +fi +" + exit ${STATUS} +} + +function __exit_msg() { + declare -r MSG="$1" + printf "%s. Exiting\n" "${MSG}" &>/dev/stderr + exit 1 +} + +function __install_git_hooks() { + git rev-parse --is-inside-work-tree >/dev/null || __exit_msg "Not in a git repository" + declare -r GIT_HOOKS_SRC_DIR="${TOOLS_DIR}/git-hooks" + declare -r GIT_HOOKS_DIR="${WORKSPACE}/.git/hooks" + chmod +x "${GIT_HOOKS_SRC_DIR}" + cp -p "${GIT_HOOKS_SRC_DIR}"/* "${GIT_HOOKS_DIR}" + printf "installed git hooks\n" +} + +function __usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 [command] + +A CLI tool to run pre-commit checks. + + if command not specified, run the pre-commit tool on all files in entire workspace + install install as a git pre-commit hook in the current git repo + autoupdate update hook versions in .pre-commit-config.yaml + +environment variables (all optional): + SKIP comma-delimited list of hook ids to skip + PRESUBMIT_IMAGE docker URI to the specific presubmit image to run + IMAGE_BUILD_VERBOSE capture docker build output if set +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +function __init() { + if [[ -n ${PRESUBMIT_IMAGE} ]]; then + IMAGE_TAGGED="${PRESUBMIT_IMAGE}" + else + IMAGE_TAGGED="$("${TOOLS_DIR}"/get-builder-image-tagged --image presubmit)" + fi + readonly IMAGE_TAGGED + export CBUILD_IMAGE="${IMAGE_TAGGED}" + + local -r ARCH="$("${TOOLS_DIR}"/get-architecture)" + if [[ ${ARCH} == arm64 ]]; then + if [[ -z ${SKIP} ]]; then + SKIP_HOOKS="terraform-fmt" + else + SKIP_HOOKS="${SKIP},terraform-fmt" + fi + else + SKIP_HOOKS="${SKIP}" + fi + if [[ -n ${SKIP_HOOKS} ]]; then + printf "Skipping pre-commit hooks: %s\n" "${SKIP_HOOKS}" + fi + SKIP_ENV="SKIP=${SKIP_HOOKS}" +} + +declare -i CLEANUP=0 +declare -a -r CBUILD_COMMON_ARGS=( + "--without-shared-cache" + "--image" + presubmit +) +declare -r PRECOMMIT=/usr/pre-commit-venv/bin/pre-commit + +# TODO: run bazel //:precommit-hooks rather than just the pre-commit tool +if [[ $# -gt 0 ]]; then + case "$1" in + install) __install_git_hooks ;; + + help | --help | -h) __usage 0 ;; + + autoupdate) + PRECOMMIT_CMD="$1" + shift + __init + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} ${PRECOMMIT_CMD} --config ./.pre-commit-config.yaml $*" + ;; + + hook-impl) + PRECOMMIT_CMD="$1" + CLEANUP=1 + shift + __init + docker run --rm \ + --entrypoint=/usr/pre-commit-venv/bin/pre-commit \ + --volume "${WORKSPACE}":/src/workspace \ + -v /var/run/docker.sock:/var/run/docker.sock \ + --env="${SKIP_ENV}" \ + --workdir /src/workspace \ + "${IMAGE_TAGGED}" \ + "${PRECOMMIT_CMD}" --config ./.pre-commit-config.yaml "$@" + ;; + + *) + __init + CLEANUP=1 + for HOOK in "$@"; do + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} run --config ./.pre-commit-config.yaml --all-files ${HOOK}" + done + esac +else + __init + CLEANUP=1 + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} run --config ./.pre-commit-config.yaml --all-files" +fi diff --git a/services/inference_sidecar/common/builders/tools/release-tool b/services/inference_sidecar/common/builders/tools/release-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/release-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/slowhttptest b/services/inference_sidecar/common/builders/tools/slowhttptest new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/slowhttptest @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/terraform b/services/inference_sidecar/common/builders/tools/terraform new file mode 100755 index 00000000..71a3e708 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/terraform @@ -0,0 +1,85 @@ +#!/usr/bin/env bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# TERRAFORM_VERSION specify the terraform version to use +# WORKSPACE repo root directory, must be an absolute path +# +# AWS-related environment variables exported into the terraform container: +# For more info on supported env vars, see: +# https://registry.terraform.io/providers/hashicorp/aws/latest/docs#environment-variables +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +if ! [[ -v TERRAFORM_VERSION ]]; then + TERRAFORM_VERSION=1.0.4 +fi + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" +) + +declare -a DOCKER_RUN_ARGS=( + "--rm" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +readonly IMAGE_TAGGED=hashicorp/terraform:"${TERRAFORM_VERSION}" +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +if [[ -d ${HOME}/.aws ]]; then + DOCKER_RUN_ARGS+=("--volume=${HOME}/.aws/:/home/.aws/") +fi +if [[ -d ${HOME}/.config/gcloud ]]; then + DOCKER_RUN_ARGS+=("--volume=${HOME}/.config/gcloud/:/home/.config/gcloud/") +fi + +DOCKER_RUN_ARGS+=( + "--user=$(id -u):$(id -g)" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=/src/workspace/${REL_PWD}" +) + +# shellcheck disable=SC2068 +docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + ${IMAGE_TAGGED} "$@" diff --git a/services/inference_sidecar/common/builders/tools/test-tool b/services/inference_sidecar/common/builders/tools/test-tool new file mode 100755 index 00000000..6a275370 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/test-tool @@ -0,0 +1,90 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path + +set -o errexit + +APP="$(basename "${BASH_SOURCE[0]}")" +readonly APP +APP_LINK_TARGET=$(readlink "${BASH_SOURCE[0]}") || true +readonly APP_LINK_TARGET +if [[ -z ${APP_LINK_TARGET} ]] || [[ ${APP_LINK_TARGET} == test-tool && ${APP} =~ ^(release|utils|coverage)-tool$ ]]; then + printf "%s designed for use via symlink with target program name\n" "${APP}" >/dev/stderr + exit 1 +fi + +declare -i APP_AS_ENTRYPOINT=0 + +case "${APP_LINK_TARGET}" in + release-tool) + IMAGE="release" + APP_AS_ENTRYPOINT=1 + ;; + coverage-tool) + IMAGE="coverage-tools" ;; + test-tool) + IMAGE="test-tools" ;; + utils-tool) + IMAGE="utils" ;; + *) + printf "Unsupported image target: %s\n" "${APP_LINK_TARGET}" >/dev/stderr + exit 1 +esac +readonly IMAGE + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh +TOOLS_DIR="$(builder::get_tools_dir)" +readonly TOOLS_DIR +IMAGE_TAGGED="$("${TOOLS_DIR}"/get-builder-image-tagged --image "${IMAGE}")" +readonly IMAGE_TAGGED + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +declare -a DOCKER_RUN_ARGS=( + "--rm" + "--interactive" + "--user=$(id -u):$(id -g)" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=/src/workspace/${REL_PWD}" +) +if [[ -n ${EXTRA_DOCKER_RUN_ARGS} ]]; then + # shellcheck disable=SC2207 + DOCKER_RUN_ARGS+=( + $(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst) + ) +fi +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=("--tty") +fi +if [[ ${APP_AS_ENTRYPOINT} -eq 1 ]]; then + # shellcheck disable=SC2068 + docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + --entrypoint="${APP}" \ + "${IMAGE_TAGGED}" \ + "$@" +else + # shellcheck disable=SC2068 + docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + "${IMAGE_TAGGED}" \ + "${APP}" "$@" +fi diff --git a/services/inference_sidecar/common/builders/tools/unzip b/services/inference_sidecar/common/builders/tools/unzip new file mode 120000 index 00000000..33a2cc95 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/unzip @@ -0,0 +1 @@ +utils-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/utils-tool b/services/inference_sidecar/common/builders/tools/utils-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/utils-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/wrk2 b/services/inference_sidecar/common/builders/tools/wrk2 new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/wrk2 @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/tools/zip b/services/inference_sidecar/common/builders/tools/zip new file mode 120000 index 00000000..33a2cc95 --- /dev/null +++ b/services/inference_sidecar/common/builders/tools/zip @@ -0,0 +1 @@ +utils-tool \ No newline at end of file diff --git a/services/inference_sidecar/common/builders/version.txt b/services/inference_sidecar/common/builders/version.txt new file mode 100644 index 00000000..b9daa0c1 --- /dev/null +++ b/services/inference_sidecar/common/builders/version.txt @@ -0,0 +1 @@ +0.55.1 \ No newline at end of file diff --git a/services/inference_sidecar/common/grpc_sidecar.cc b/services/inference_sidecar/common/grpc_sidecar.cc new file mode 100644 index 00000000..74d7ed7d --- /dev/null +++ b/services/inference_sidecar/common/grpc_sidecar.cc @@ -0,0 +1,96 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Runs a simple gRPC server. + +#include "grpc_sidecar.h" + +#include +#include +#include + +#include +#include +#include +#include + +#include "absl/log/absl_log.h" +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "absl/time/clock.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.grpc.pb.h" +#include "proto/inference_sidecar.pb.h" +#include "sandbox/sandbox_worker.h" +#include "sandboxed_api/sandbox2/comms.h" +#include "src/util/status_macro/status_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// Inference service implementation. +class InferenceServiceImpl final : public InferenceService::Service { + public: + InferenceServiceImpl(std::unique_ptr inference_module) + : inference_module_(std::move(inference_module)) {} + + grpc::Status RegisterModel(grpc::ServerContext* context, + const RegisterModelRequest* request, + RegisterModelResponse* response) override { + absl::StatusOr register_model_response = + inference_module_->RegisterModel(*request); + if (!register_model_response.ok()) { + return server_common::FromAbslStatus(register_model_response.status()); + } + *response = register_model_response.value(); + return grpc::Status::OK; + } + + grpc::Status Predict(grpc::ServerContext* context, + const PredictRequest* request, + PredictResponse* response) override { + absl::StatusOr predict_response = + inference_module_->Predict(*request); + if (!predict_response.ok()) { + return server_common::FromAbslStatus(predict_response.status()); + } + *response = predict_response.value(); + return grpc::Status::OK; + } + + private: + std::unique_ptr inference_module_; +}; + +} // namespace + +absl::Status Run() { + SandboxWorker worker; + grpc::ServerBuilder builder; + + auto server_impl = + std::make_unique(ModuleInterface::Create()); + builder.RegisterService(server_impl.get()); + std::unique_ptr server(builder.BuildAndStart()); + if (server == nullptr) { + ABSL_LOG(ERROR) << "Cannot start the gRPC sidecar."; + return absl::UnavailableError("Cannot start the gRPC sidecar"); + } + + // Starts up gRPC over IPC. + grpc::AddInsecureChannelFromFd(server.get(), worker.FileDescriptor()); + server->Wait(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/grpc_sidecar.h b/services/inference_sidecar/common/grpc_sidecar.h new file mode 100644 index 00000000..7d9c1960 --- /dev/null +++ b/services/inference_sidecar/common/grpc_sidecar.h @@ -0,0 +1,27 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_GRPC_SIDECAR_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_GRPC_SIDECAR_H_ + +#include "absl/status/status.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// Runs a simple gRPC server. It is thread safe. +absl::Status Run(); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_GRPC_SIDECAR_H_ diff --git a/services/inference_sidecar/common/inference_sidecar_main.cc b/services/inference_sidecar/common/inference_sidecar_main.cc new file mode 100644 index 00000000..daaa1940 --- /dev/null +++ b/services/inference_sidecar/common/inference_sidecar_main.cc @@ -0,0 +1,25 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Inference sidecar binary. + +#include "absl/log/check.h" + +#include "grpc_sidecar.h" + +int main(int argc, char** argv) { + CHECK(!privacy_sandbox::bidding_auction_servers::inference::Run().ok()) + << "Unsuccessful run of the inference sidecar."; + return 0; +} diff --git a/services/inference_sidecar/common/inference_sidecar_test.cc b/services/inference_sidecar/common/inference_sidecar_test.cc new file mode 100644 index 00000000..d4003c29 --- /dev/null +++ b/services/inference_sidecar/common/inference_sidecar_test.cc @@ -0,0 +1,107 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/flags/reflection.h" +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "absl/time/clock.h" +#include "gtest/gtest.h" +#include "proto/inference_sidecar.grpc.pb.h" +#include "proto/inference_sidecar.pb.h" +#include "sandbox/sandbox_executor.h" +#include "utils/file_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +constexpr absl::string_view kInferenceSidecarBinary = "inference_sidecar"; +constexpr absl::string_view kTestModelPath = "test_model"; +constexpr char kJsonString[] = R"json({ + "request" : [{ + "model_path" : "test_model", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["3.14"] + } + ] +}] + })json"; + +TEST(InferenceSidecarTest, RegisterModelAndRunInference_Grpc) { + absl::FlagSaver flag_saver; + absl::SetFlag(&FLAGS_testonly_allow_policies_for_bazel, true); + + SandboxExecutor executor(kInferenceSidecarBinary, {""}); + ASSERT_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor.FileDescriptor()); + std::unique_ptr stub = + InferenceService::NewStub(client_channel); + + RegisterModelRequest register_model_request; + RegisterModelResponse register_model_response; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelPath, register_model_request) + .ok()); + { + grpc::ClientContext context; + grpc::Status status = stub->RegisterModel(&context, register_model_request, + ®ister_model_response); + EXPECT_TRUE(status.ok()) << status.error_message(); + } + + const int kNumThreads = 100; + const int kNumIterations = 5; + std::vector threads; + threads.reserve(kNumThreads); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&stub]() { + PredictRequest predict_request; + predict_request.set_input(kJsonString); + PredictResponse predict_response; + for (int j = 0; j < kNumIterations; j++) { + grpc::ClientContext context; + grpc::Status status = + stub->Predict(&context, predict_request, &predict_response); + EXPECT_TRUE(status.ok()) << status.error_message(); + } + })); + } + + // Waits for all threads to finish. + for (auto& thread : threads) { + thread.join(); + } + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + ASSERT_EQ(result->reason_code(), 0); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/ipc_sidecar.cc b/services/inference_sidecar/common/ipc_sidecar.cc new file mode 100644 index 00000000..d0e54b9b --- /dev/null +++ b/services/inference_sidecar/common/ipc_sidecar.cc @@ -0,0 +1,90 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Runs a simple IPC single-threaded processor. + +#include "ipc_sidecar.h" + +#include + +#include "absl/log/absl_log.h" +#include "absl/log/check.h" +#include "absl/time/clock.h" +#include "absl/time/time.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "sandbox/sandbox_worker.h" +#include "sandboxed_api/sandbox2/comms.h" +#include "src/util/status_macro/status_macros.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +using ::sandbox2::Comms; + +// Multiple receive trials to handle IPC non-blocking receive failure. +constexpr int kRecvNumTrials = 10; + +} // namespace + +absl::Status Run() { + SandboxWorker worker; + sandbox2::Comms comms(worker.FileDescriptor()); + + std::unique_ptr inference_module = ModuleInterface::Create(); + + // TODO(b/322109220): Handles arbitrary request proto simultaneously. + // TODO(b/325123788): Remove retry logic with gRPC over IPC. + RegisterModelRequest register_request; + bool register_request_received = false; + for (int i = 0; i < kRecvNumTrials; i++) { + register_request_received = comms.RecvProtoBuf(®ister_request); + if (register_request_received) break; + ABSL_LOG(INFO) << "Recv RegisterModelRequest failure #" << i + 1; + absl::SleepFor(absl::Milliseconds(100)); + } + if (!register_request_received || + !inference_module->RegisterModel(register_request).ok()) { + return absl::FailedPreconditionError("Failure to register model."); + } + + RegisterModelResponse register_response; + ABSL_LOG_IF(ERROR, !comms.SendProtoBuf(register_response)) + << "Failure to send RegisterModelResponse"; + + while (true) { + PredictRequest predict_request; + bool predict_request_received = false; + // TODO(b/325123788): Remove retry logic with gRPC over IPC. + // Retry to mitigate "Resource temporary not available" errors. + for (int i = 0; i < kRecvNumTrials; i++) { + predict_request_received = comms.RecvProtoBuf(&predict_request); + if (predict_request_received) break; + ABSL_LOG(INFO) << "Recv PredictRequest failure #" << i + 1; + absl::SleepFor(absl::Microseconds(500)); + } + + if (!predict_request_received) { + ABSL_LOG(INFO) << "No PredictRequest received."; + } else { + // Stops the sidecar if any inference request fails for now. + PS_ASSIGN_OR_RETURN(PredictResponse inference_result, + inference_module->Predict(predict_request)); + ABSL_LOG_IF(ERROR, !comms.SendProtoBuf(inference_result)) + << "Failure to send PredictResponse"; + } + } +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/ipc_sidecar.h b/services/inference_sidecar/common/ipc_sidecar.h new file mode 100644 index 00000000..1d81f590 --- /dev/null +++ b/services/inference_sidecar/common/ipc_sidecar.h @@ -0,0 +1,27 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_IPC_SIDECAR_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_IPC_SIDECAR_H_ + +#include "absl/status/status.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// Runs a simple IPC single-threaded processor. +absl::Status Run(); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_IPC_SIDECAR_H_ diff --git a/services/inference_sidecar/common/modules/BUILD b/services/inference_sidecar/common/modules/BUILD new file mode 100644 index 00000000..933b2f5a --- /dev/null +++ b/services/inference_sidecar/common/modules/BUILD @@ -0,0 +1,85 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test") + +package(default_visibility = ["//visibility:public"]) + +# We export the test files where different inference backends can run the test in their respective workspaces. +exports_files([ + "module_concurrency_test.cc", +]) + +cc_library( + name = "test_module", + srcs = ["test_module.cc"], + hdrs = ["test_module.h"], + deps = [ + ":module_interface", + "//proto:inference_sidecar_cc_proto", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_sandboxed_api//sandboxed_api/util:runfiles", + ], +) + +cc_test( + name = "test_module_test", + size = "small", + srcs = ["test_module_test.cc"], + data = [ + "//testdata:models/tensorflow_1_mib_saved_model.pb", + ], + deps = [ + ":test_module", + "//proto:inference_sidecar_cc_proto", + "@com_google_absl//absl/status:statusor", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) + +cc_library( + name = "module_interface", + hdrs = [ + "module_interface.h", + ], + deps = [ + "//proto:inference_sidecar_cc_proto", + "@com_google_absl//absl/status:statusor", + ], +) + +# TODO(b/327907675): Enable the concurrency test for TensorFlow. +# TODO(b/327860817): Enable ASAN/TSAN continuous tests for inference. +cc_test( + name = "module_concurrency_test", + size = "small", + srcs = ["module_concurrency_test.cc"], + data = [ + "//:gen_test_model", + ], + deps = [ + ":test_module", + "//proto:inference_sidecar_cc_proto", + "//utils:file_util", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) diff --git a/services/inference_sidecar/common/modules/module_concurrency_test.cc b/services/inference_sidecar/common/modules/module_concurrency_test.cc new file mode 100644 index 00000000..060b4db5 --- /dev/null +++ b/services/inference_sidecar/common/modules/module_concurrency_test.cc @@ -0,0 +1,157 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_split.h" +#include "absl/strings/string_view.h" +#include "gtest/gtest.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "utils/file_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +const int kNumThreads = 100; + +constexpr absl::string_view kTestModelPath = "test_model"; +constexpr char kJsonString[] = R"json({ + "request" : [{ + "model_path" : "test_model", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["3.14"] + } + ] +}] + })json"; + +// Create a new request with model files in the new model path +RegisterModelRequest CreateNewRegisterRequest( + const RegisterModelRequest& register_request, + const std::string& new_model_path) { + RegisterModelRequest new_register_request = register_request; + new_register_request.mutable_model_spec()->set_model_path(new_model_path); + + std::map new_model_files; + + absl::string_view request_model_path = + register_request.model_spec().model_path(); + for (const auto& [relative_path, file_content] : + register_request.model_files()) { + std::string new_file_path = absl::StrCat( + new_model_path, relative_path.substr(request_model_path.length())); + new_model_files[new_file_path] = file_content; + } + new_register_request.clear_model_files(); + for (const auto& [new_file_path, file_content] : new_model_files) { + (*new_register_request.mutable_model_files())[new_file_path] = file_content; + } + return new_register_request; +} + +TEST(ModuleConcurrencyTest, RegisterModel_Success) { + std::unique_ptr module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelPath, register_request).ok()); + ASSERT_TRUE(module->RegisterModel(register_request).ok()); + + std::vector threads; + threads.reserve(kNumThreads); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&module, ®ister_request, i]() { + std::string new_model_path = + absl::StrCat(register_request.model_spec().model_path(), i); + RegisterModelRequest new_register_request = + CreateNewRegisterRequest(register_request, new_model_path); + + EXPECT_TRUE(module->RegisterModel(new_register_request).ok()); + })); + } + // Waits for all threads to finish. + for (auto& thread : threads) { + thread.join(); + } +} + +TEST(ModuleConcurrencyTest, Predict_Success) { + std::unique_ptr module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelPath, register_request).ok()); + ASSERT_TRUE(module->RegisterModel(register_request).ok()); + + std::vector threads; + threads.reserve(kNumThreads); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&module]() { + PredictRequest predict_request; + predict_request.set_input(kJsonString); + EXPECT_TRUE(module->Predict(predict_request).ok()); + })); + } + // Waits for all threads to finish. + for (auto& thread : threads) { + thread.join(); + } +} + +TEST(ModuleConcurrencyTest, RegisterModel_Predict_Success) { + std::unique_ptr module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelPath, register_request).ok()); + ASSERT_TRUE(module->RegisterModel(register_request).ok()); + + std::vector threads; + threads.reserve(kNumThreads * 2); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&module, ®ister_request, i]() { + std::string new_model_path = + absl::StrCat(register_request.model_spec().model_path(), i); + RegisterModelRequest new_register_request = + CreateNewRegisterRequest(register_request, new_model_path); + + EXPECT_TRUE(module->RegisterModel(new_register_request).ok()); + })); + } + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&module]() { + PredictRequest predict_request; + // Runs concurrent predictions against a single model. + predict_request.set_input(kJsonString); + EXPECT_TRUE(module->Predict(predict_request).ok()); + })); + } + // Waits for all threads to finish. + for (auto& thread : threads) { + thread.join(); + } +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/modules/module_interface.h b/services/inference_sidecar/common/modules/module_interface.h new file mode 100644 index 00000000..ab49382d --- /dev/null +++ b/services/inference_sidecar/common/modules/module_interface.h @@ -0,0 +1,45 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_MODULES_MODULE_INTERFACE_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_MODULES_MODULE_INTERFACE_H_ + +#include + +#include "absl/status/statusor.h" +#include "proto/inference_sidecar.pb.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +class ModuleInterface { + public: + // Different implementations of ModuleInterface return specialized modules, + // for example, Tensorflow module and PyTorch module. These specialized + // modules can only be called with the methods defined by this interface. + static std::unique_ptr Create(); + virtual ~ModuleInterface() = default; + + // Executes inference on a registered ML model. + virtual absl::StatusOr Predict( + const PredictRequest& request) = 0; + // Registers a new model. + virtual absl::StatusOr RegisterModel( + const RegisterModelRequest& request) = 0; +}; + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_MODULES_MODULE_INTERFACE_H_ diff --git a/services/inference_sidecar/common/modules/test_module.cc b/services/inference_sidecar/common/modules/test_module.cc new file mode 100644 index 00000000..813d70e3 --- /dev/null +++ b/services/inference_sidecar/common/modules/test_module.cc @@ -0,0 +1,82 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "modules/test_module.h" + +#include +#include +#include + +#include + +#include "absl/log/absl_log.h" +#include "absl/log/check.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "sandboxed_api/util/runfiles.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +TestModule::~TestModule() { + if (model_ptr_ != nullptr) { + CHECK(munmap(model_ptr_, model_size_) == 0) << "Failed to munmap."; + } + if (model_fd_ != -1) { + close(model_fd_); + } +} + +absl::StatusOr TestModule::Predict( + const PredictRequest& request) { + // Returns a placeholder value. + PredictResponse response; + response.set_output("0.57721"); + return response; +} + +absl::StatusOr TestModule::RegisterModel( + const RegisterModelRequest& request) { + // Returns empty response. + RegisterModelResponse response; + + if (!model_path_.empty()) { + std::string path = sapi::GetDataDependencyFilePath(model_path_); + model_fd_ = open(path.c_str(), O_RDONLY); + if (model_fd_ == -1) { + ABSL_LOG(ERROR) << "Failed to read the model: " << path; + return response; + } + model_size_ = lseek(model_fd_, 0, SEEK_END); + if (model_size_ <= 0) { + ABSL_LOG(ERROR) << "Empty file. Size=" << model_size_; + return response; + } + model_ptr_ = mmap(0, model_size_, PROT_READ, MAP_PRIVATE, model_fd_, 0); + if (model_ptr_ == MAP_FAILED) { + ABSL_LOG(ERROR) << "Failed to mmap."; + return response; + } + } + return response; +} + +std::unique_ptr ModuleInterface::Create() { + return std::make_unique(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/modules/test_module.h b/services/inference_sidecar/common/modules/test_module.h new file mode 100644 index 00000000..3aaaa604 --- /dev/null +++ b/services/inference_sidecar/common/modules/test_module.h @@ -0,0 +1,62 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_MODULES_TEST_MODULE_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_MODULES_TEST_MODULE_H_ + +#include +#include + +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// TestModule is the module used for testing. The module is buildable in the +// main B&A workspace while TensorFlow and PyTorch modules should be built in +// separate workspaces. The module implements very simple logic for each API; +// oftentimes, the implementation is a no-op. +class TestModule final : public ModuleInterface { + public: + // The constructor maps the embedded model file into memory. + TestModule() = default; + ~TestModule() override; + + absl::StatusOr Predict( + const PredictRequest& request) override; + absl::StatusOr RegisterModel( + const RegisterModelRequest& request) override; + + void set_model_path(absl::string_view path) { model_path_ = path; } + + int model_size() const { return model_size_; } + + private: + // The file descriptor of the memory mapped model file. + int model_fd_ = -1; + // The size of the model file. + int model_size_ = 0; + // The memory address of the memory mapped model file. + void* model_ptr_ = nullptr; + // The model path. + std::string model_path_ = ""; +}; + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_MODULES_TEST_MODULE_H_ diff --git a/services/inference_sidecar/common/modules/test_module_test.cc b/services/inference_sidecar/common/modules/test_module_test.cc new file mode 100644 index 00000000..db789a16 --- /dev/null +++ b/services/inference_sidecar/common/modules/test_module_test.cc @@ -0,0 +1,54 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "modules/test_module.h" + +#include + +#include "absl/status/statusor.h" +#include "gtest/gtest.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +constexpr absl::string_view kModelPath = + "__main__/testdata/models/tensorflow_1_mib_saved_model.pb"; + +TEST(TestModule, Success_Predict) { + std::unique_ptr module = ModuleInterface::Create(); + PredictRequest request; + auto result = module->Predict(request); + EXPECT_TRUE(result.ok()); +} + +TEST(TestModule, Success_RegisterModel) { + std::unique_ptr module = ModuleInterface::Create(); + RegisterModelRequest request; + auto result = module->RegisterModel(request); + EXPECT_TRUE(result.ok()); +} + +TEST(TestModule, Success_ReadModel) { + std::unique_ptr module = std::make_unique(); + module->set_model_path(kModelPath); + RegisterModelRequest request; + auto result = module->RegisterModel(request); + EXPECT_TRUE(result.ok()); + EXPECT_GT(module->model_size(), 0); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/proto/BUILD b/services/inference_sidecar/common/proto/BUILD new file mode 100644 index 00000000..3d91f56c --- /dev/null +++ b/services/inference_sidecar/common/proto/BUILD @@ -0,0 +1,60 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@com_github_grpc_grpc//bazel:cc_grpc_library.bzl", "cc_grpc_library") +load("@rules_cc//cc:defs.bzl", "cc_proto_library") +load("@rules_proto//proto:defs.bzl", "proto_descriptor_set", "proto_library") + +package(default_visibility = ["//visibility:public"]) + +proto_library( + name = "inference_sidecar_proto", + srcs = ["inference_sidecar.proto"], + deps = [ + ], +) + +proto_descriptor_set( + name = "inference_sidecar_proto_descriptor_set", + deps = [":inference_sidecar_proto"], +) + +cc_proto_library( + name = "inference_sidecar_cc_proto", + deps = [":inference_sidecar_proto"], +) + +cc_grpc_library( + name = "inference_sidecar_cc_grpc_proto", + srcs = [":inference_sidecar_proto"], + grpc_only = True, + deps = [":inference_sidecar_cc_proto"], +) + +proto_library( + name = "testproto_proto", + srcs = ["testproto.proto"], +) + +cc_proto_library( + name = "testproto_cc_proto", + deps = [":testproto_proto"], +) + +cc_grpc_library( + name = "testproto_cc_grpc_proto", + srcs = [":testproto_proto"], + grpc_only = True, + deps = [":testproto_cc_proto"], +) diff --git a/services/inference_sidecar/common/proto/inference_payload.proto b/services/inference_sidecar/common/proto/inference_payload.proto new file mode 100644 index 00000000..e1ceb74d --- /dev/null +++ b/services/inference_sidecar/common/proto/inference_payload.proto @@ -0,0 +1,72 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package privacy_sandbox.bidding_auction_servers.inference; + +// A BatchInferenceRequest can run across multiple models. Each InferenceRequest +// request is for a single model. +// WARNING: this proto is not used now, it's used to guide the conversion from +// a JS request to C++, see services/inference_sidecar/common/utils/request_parser.h. +message BatchInferenceRequest { + repeated InferenceRequest request = 1; +} + +message BatchInferenceResponse { + repeated InferenceResponse response = 1; +} + +message InferenceRequest { + // Required servable model path; e.g. "my_bucket/models/pcvr_models/1". + string model_path = 1; + repeated Tensor tensors = 2; +} + +message InferenceResponse { + // Required servable model path; e.g. "my_bucket/models/pcvr/1". + string model_path = 1; + repeated Tensor tensors = 2; +} + +message Tensor { + // Type of data stored in tensor_content. A tensor exclusively holds data of + // a uniform type. + DataType data_type = 1; + + // Tensor shape. + // The order of entries in "tensor_shape" matters: It indicates the layout of + // the values in the tensor in-memory representation. The first entry is + // the outermost dimension. The last entry is the innermost dimension. + repeated int64 tensor_shape = 2; + + // Optional name of the tensor. + string tensor_name = 3; + + // Serialized raw tensor content. It holds the flattened representation of + // the tensor in row-major order. Only the representation corresponding to + // "data_type" field can be set. The number of elements in tensor_content + // should be equal to the product of tensor_shape elements, for example + // a tensor of shape [1,4] will expect a flat array or 4 elements + // (e.g. [1, 2, 7, 4]) and one with a shape [2,3] will expect a 6 element one. + bytes tensor_content = 4; +} + +// Supported tensor data types. +enum DataType { + FLOAT = 0; // 32-bit floating point + INT64 = 1; // 64-bit integer (signed) + INT32 = 2; // 32-bit integer (signed) + DOUBLE = 3; // 64-bit floating point +} diff --git a/services/inference_sidecar/common/proto/inference_sidecar.proto b/services/inference_sidecar/common/proto/inference_sidecar.proto new file mode 100644 index 00000000..048e7daf --- /dev/null +++ b/services/inference_sidecar/common/proto/inference_sidecar.proto @@ -0,0 +1,54 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package privacy_sandbox.bidding_auction_servers.inference; + +service InferenceService { + // Runs inference. + rpc Predict(PredictRequest) returns (PredictResponse) { + } + // Registers model. + rpc RegisterModel(RegisterModelRequest) returns (RegisterModelResponse) { + } +} + +message PredictRequest { + // Input data. + bytes input = 1; +} + +// Response for PredictRequest on a successful run. +message PredictResponse { + // Output data. + bytes output = 1; +} + +message ModelSpec { + // Required servable model path; e.g. "my_bucket/models/pcvr_models/1". + string model_path = 1; +} + +// RegisterModelRequest specifies a model to register. +message RegisterModelRequest { + // Model Specification. + ModelSpec model_spec = 1; + // Raw payload of a ML model. + // This represents a list of file path and content pairs. + map model_files = 2; +} + +message RegisterModelResponse { +} diff --git a/services/inference_sidecar/common/proto/testproto.proto b/services/inference_sidecar/common/proto/testproto.proto new file mode 100644 index 00000000..311e31de --- /dev/null +++ b/services/inference_sidecar/common/proto/testproto.proto @@ -0,0 +1,28 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package privacy_sandbox.bidding_auction_servers.inference; + +// For testing purpose. +message Empty { +} + +// For testing purpose. +service TestService { + // Runs inference. + rpc Predict(Empty) returns (Empty) { + } +} diff --git a/services/inference_sidecar/common/sandbox/BUILD b/services/inference_sidecar/common/sandbox/BUILD new file mode 100644 index 00000000..da936fc4 --- /dev/null +++ b/services/inference_sidecar/common/sandbox/BUILD @@ -0,0 +1,117 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test") + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "sandbox_worker", + srcs = ["sandbox_worker.cc"], + hdrs = ["sandbox_worker.h"], + deps = [ + "@com_google_absl//absl/log:check", + "@com_google_absl//absl/status", + "@com_google_sandboxed_api//sandboxed_api/sandbox2", + "@com_google_sandboxed_api//sandboxed_api/sandbox2:comms", + ], +) + +cc_library( + name = "sandbox_executor", + srcs = ["sandbox_executor.cc"], + hdrs = ["sandbox_executor.h"], + deps = [ + ":sandbox_worker", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + "@com_google_sandboxed_api//sandboxed_api/sandbox2", + "@com_google_sandboxed_api//sandboxed_api/sandbox2/util:bpf_helper", + "@com_google_sandboxed_api//sandboxed_api/util:runfiles", + ], +) + +cc_test( + name = "sandbox_executor_test", + size = "small", + srcs = ["sandbox_executor_test.cc"], + data = [ + ":sandboxee_exit_test_bin", + ":sandboxee_grpc_test_bin", + ":sandboxee_ipc_test_bin", + ], + flaky = True, + deps = [ + ":sandbox_executor", + "//proto:testproto_cc_grpc_proto", + "//proto:testproto_cc_proto", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/flags:reflection", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) + +cc_binary( + name = "sandboxee_exit_test_bin", + testonly = 1, + srcs = ["sandboxee_exit_test_bin.cc"], + features = [ + "-pie", + "fully_static_link", + ], + deps = [ + ":sandbox_worker", + ], +) + +cc_binary( + name = "sandboxee_ipc_test_bin", + testonly = 1, + srcs = ["sandboxee_ipc_test_bin.cc"], + features = [ + "-pie", + "fully_static_link", + ], + deps = [ + ":sandbox_worker", + "//proto:testproto_cc_proto", + "@com_google_absl//absl/log:absl_log", + ], +) + +cc_binary( + name = "sandboxee_grpc_test_bin", + testonly = 1, + srcs = ["sandboxee_grpc_test_bin.cc"], + features = [ + "-pie", + "fully_static_link", + ], + deps = [ + ":sandbox_worker", + "//proto:testproto_cc_grpc_proto", + "//proto:testproto_cc_proto", + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/log:absl_log", + ], +) diff --git a/services/inference_sidecar/common/sandbox/sandbox_executor.cc b/services/inference_sidecar/common/sandbox/sandbox_executor.cc new file mode 100644 index 00000000..5d3686c3 --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandbox_executor.cc @@ -0,0 +1,275 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "sandbox/sandbox_executor.h" + +#include +#include + +#include +#include + +#include "absl/flags/flag.h" +#include "absl/log/absl_log.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" +#include "absl/time/time.h" +#include "sandbox/sandbox_worker.h" +#include "sandboxed_api/sandbox2/allow_all_syscalls.h" +#include "sandboxed_api/sandbox2/policy.h" +#include "sandboxed_api/sandbox2/policybuilder.h" +#include "sandboxed_api/sandbox2/result.h" +#include "sandboxed_api/sandbox2/sandbox2.h" +#include "sandboxed_api/sandbox2/util/bpf_helper.h" +#include "sandboxed_api/util/runfiles.h" + +ABSL_FLAG(bool, testonly_disable_sandbox, false, + "Disable sandbox restricted policies for testing purposes."); +ABSL_FLAG(bool, testonly_allow_policies_for_bazel, false, + "Allow sandbox policies used in bazel for testing purposes."); + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// The same sandbox policy that Roma enforces. +// https://github.com/privacysandbox/data-plane-shared-libraries/blob/3db380a699ff33e359bfac60130b027948261a6a/src/roma/sandbox/worker_api/sapi/src/worker_sandbox_api.h#L182 +void AllowSamePolicyAsRoma(sandbox2::PolicyBuilder& builder) { + builder.AllowRead() + .AllowWrite() + .AllowOpen() + .AllowSystemMalloc() + .AllowHandleSignals() + .AllowExit() + .AllowStat() + .AllowTime() + .AllowGetIDs() + .AllowGetPIDs() + .AllowReadlink() + .AllowMmap() + .AllowFork() +#ifdef UNDEFINED_BEHAVIOR_SANITIZER + .AllowPipe() + .AllowLlvmSanitizers() +#endif + .AllowSyscall(__NR_tgkill) + .AllowSyscall(__NR_recvmsg) + .AllowSyscall(__NR_sendmsg) + .AllowSyscall(__NR_lseek) + .AllowSyscall(__NR_futex) + .AllowSyscall(__NR_close) + .AllowSyscall(__NR_nanosleep) + .AllowSyscall(__NR_sched_getaffinity) + .AllowSyscall(__NR_mprotect) + .AllowSyscall(__NR_clone3) + .AllowSyscall(__NR_rseq) + .AllowSyscall(__NR_set_robust_list) + .AllowSyscall(__NR_prctl) + .AllowSyscall(__NR_uname) + .AllowSyscall(__NR_pkey_alloc) + .AllowSyscall(__NR_madvise) + .AllowSyscall(__NR_ioctl) + .AllowSyscall(__NR_prlimit64) + //------------------------ + // These are for TCMalloc: + .AllowTcMalloc() + .AllowSyscall(__NR_sched_getparam) + .AllowSyscall(__NR_sched_getscheduler) + .AllowSyscall(__NR_clock_nanosleep) + .AllowSyscall(__NR_sched_yield) + .AllowSyscall(__NR_rseq) + //------------------------ + .AllowDynamicStartup() + .DisableNamespaces() + .CollectStacktracesOnViolation(false) + .CollectStacktracesOnSignal(false) + .CollectStacktracesOnTimeout(false) + .CollectStacktracesOnKill(false) + .CollectStacktracesOnExit(false); + +// Stack traces are only collected in DEBUG mode. +#ifndef NDEBUG + builder.CollectStacktracesOnViolation(true) + .CollectStacktracesOnSignal(true) + .CollectStacktracesOnTimeout(true) + .CollectStacktracesOnKill(true) + .CollectStacktracesOnExit(true); +#endif +} + +void AllowBazelTest(sandbox2::PolicyBuilder& builder) { + // For `bazel coverage`. + builder.AllowMkdir().AllowSyscall(__NR_ftruncate); +} + +void AllowGrpc(sandbox2::PolicyBuilder& builder) { + builder.AllowSyscall(__NR_eventfd2) + .AllowEpoll() + .AllowSyscall(__NR_getsockname) + .AddPolicyOnSyscall(__NR_setsockopt, + { + ARG(1) /*level*/, + JNE(IPPROTO_TCP, DENY), + ARG(2), /*option_name*/ + JEQ(36 /*TCP_INQ*/, ALLOW), + }) + + .AllowSafeFcntl() + .AllowSyscall(__NR_getrandom) + .AllowSyscalls({__NR_recvmsg, __NR_sendmsg}) + .AddPolicyOnSyscall(__NR_tgkill, + {ARG_32(2) /*sig*/, JEQ32(SIGABRT, ALLOW)}); +} + +void AllowPyTorch(sandbox2::PolicyBuilder& builder) { + // TODO(b/323601668): Rewrite the PyTorch sidecar in order not to call + // sysinfo. + builder.AllowSyscall(__NR_sysinfo); +} + +void AllowTensorFlow(sandbox2::PolicyBuilder& builder) { + // TODO(b/316960066): Do not read directories in the image. + builder.AllowReaddir(); +} + +std::unique_ptr MakePolicy() { + if (absl::GetFlag(FLAGS_testonly_disable_sandbox)) { + return sandbox2::PolicyBuilder() + .DisableNamespaces() + .DefaultAction(sandbox2::AllowAllSyscalls()) + .BuildOrDie(); + } + + sandbox2::PolicyBuilder builder = sandbox2::PolicyBuilder(); + if (absl::GetFlag(FLAGS_testonly_allow_policies_for_bazel)) { + AllowBazelTest(builder); + } + AllowSamePolicyAsRoma(builder); + AllowGrpc(builder); + AllowPyTorch(builder); + AllowTensorFlow(builder); + builder.AllowStaticStartup().AllowLogForwarding(); + + return builder.BuildOrDie(); +} + +} // namespace + +std::string SandboxeeStateToString(SandboxeeState sandboxee_state) { + switch (sandboxee_state) { + case SandboxeeState::kNotStarted: + return "NotStarted"; + case SandboxeeState::kRunning: + return "Running"; + case SandboxeeState::kStopped: + return "Stopped"; + } + + int sandboxee_state_raw = static_cast(sandboxee_state); + ABSL_LOG(ERROR) << "Unexpected sandboxee_state_ value: " + << sandboxee_state_raw; + return absl::StrCat(sandboxee_state_raw); +} + +std::string GetFilePath(absl::string_view relative_binary_path) { + return sapi::GetDataDependencyFilePath(relative_binary_path); +} + +SandboxExecutor::SandboxExecutor(absl::string_view binary_path, + const std::vector& args) { + auto executor = std::make_unique(binary_path, args); + executor->limits() + ->set_rlimit_cpu(RLIM64_INFINITY) + .set_rlimit_as(RLIM64_INFINITY) + .set_rlimit_core(0) + .set_walltime_limit(absl::InfiniteDuration()); + executor->ipc()->EnableLogServer(); + + // The sandboxee should call `SandboxMeHere()` manually to start applying + // syscall policy. + executor->set_enable_sandbox_before_exec(false); + + // The executor receives a file descriptor of the sandboxee FD. + file_descriptor_ = executor->ipc()->ReceiveFd(kFileDescriptorName); + sandbox_ = + std::make_unique(std::move(executor), MakePolicy()); +} + +SandboxExecutor::~SandboxExecutor() { StopSandboxee().IgnoreError(); } + +absl::Status SandboxExecutor::StartSandboxee() { + if (sandboxee_state_ != SandboxeeState::kNotStarted) { + return absl::FailedPreconditionError( + absl::StrCat("Sandboxee cannot start. Current state: ", + SandboxeeStateToString(sandboxee_state_))); + } + + ABSL_LOG(INFO) << "SandboxExecutor: Starting sandboxee"; + if (!sandbox_->RunAsync()) { + sandboxee_state_ = SandboxeeState::kStopped; + run_result_ = sandbox_->AwaitResult(); + ABSL_LOG(ERROR) << "SandboxExecutor: Failed to start: " + << run_result_.ToString(); + return run_result_.ToStatus(); + } + sandboxee_state_ = SandboxeeState::kRunning; + ABSL_LOG(INFO) << "SandboxExecutor: Started sandboxee"; + return absl::OkStatus(); +} + +absl::StatusOr SandboxExecutor::StopSandboxee() { + if (sandboxee_state_ == SandboxeeState::kStopped) { + return run_result_; + } + + if (sandboxee_state_ == SandboxeeState::kNotStarted) { + ABSL_LOG(ERROR) << "Sandboxee wasn't started when StopSandboxee is called"; + sandboxee_state_ = SandboxeeState::kStopped; + return absl::FailedPreconditionError( + "Sandboxee wasn't started when StopSandboxee is called"); + } + + sandboxee_state_ = SandboxeeState::kStopped; + + ABSL_LOG(INFO) + << "SandboxExecutor: Checking if sandboxee stopped on their own"; + absl::StatusOr self_stopped_result = + sandbox_->AwaitResultWithTimeout(absl::ZeroDuration()); + if (self_stopped_result.ok()) { + ABSL_LOG(INFO) << "SandboxExecutor: Sandboxee stopped on their own"; + run_result_ = self_stopped_result.value(); + return run_result_; + } + + // sandbox_->Kill() can be called only if sandboxee had been started before + // and setup didn't fail + ABSL_LOG(INFO) << "SandboxExecutor: Sending SIGKILL to sandboxee"; + sandbox_->Kill(); + absl::StatusOr kill_result = + sandbox_->AwaitResultWithTimeout(absl::Minutes(1)); + // Note: SIGKILL can be delayed. + if (kill_result.ok()) { + ABSL_LOG(INFO) << "SandboxExecutor: Killed sandboxee"; + run_result_ = kill_result.value(); + return run_result_; + } else { + ABSL_LOG(ERROR) + << "SandboxExecutor: Failed to send SIGKILL or to wait until " + "sandboxee killed. Continuing with stopping"; + return kill_result; + } +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/sandbox/sandbox_executor.h b/services/inference_sidecar/common/sandbox/sandbox_executor.h new file mode 100644 index 00000000..dce4e643 --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandbox_executor.h @@ -0,0 +1,89 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SANDBOX_SANDBOX_EXECUTOR_H_ +#define SANDBOX_SANDBOX_EXECUTOR_H_ + +#include +#include +#include +#include + +#include "absl/flags/declare.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "sandboxed_api/sandbox2/sandbox2.h" + +ABSL_DECLARE_FLAG(bool, testonly_disable_sandbox); +ABSL_DECLARE_FLAG(bool, testonly_allow_policies_for_bazel); + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// State of the sandboxee. +enum class SandboxeeState { + // The initial state before sandboxee is started. + kNotStarted = 0, + // The state to indicate that sandboxee has started successfully. + kRunning, + // The final state after sandboxee has been stopped or killed, cannot be + // switched to any other state. + kStopped, +}; + +// Transforms sandboxee state to string for logging purposes. +std::string SandboxeeStateToString(SandboxeeState sandboxee_state); + +// Returns the full path of the sandboxee binary added as the data of the +// executor binary. +std::string GetFilePath(absl::string_view relative_binary_path); + +// SandboxExecutor runs a given binary as a child process under Sandbox2. +// It also sets up the IPC communication with `SandboxWorker`. +// Not thread safe. +class SandboxExecutor { + public: + SandboxExecutor(absl::string_view binary_path, + const std::vector& args); + ~SandboxExecutor(); + + SandboxExecutor(const SandboxExecutor&) = delete; + SandboxExecutor& operator=(const SandboxExecutor&) = delete; + + // Starts a sandboxee instance asynchronously. + // You should not call more than once. + // You should not call after `StopSandboxee`. + absl::Status StartSandboxee(); + + // Stops the sandboxee instance. Returns an error status if the sandboxee + // fails to stop. Returns `sandbox2::Result` if stopped successfully. You can + // call many times safely. You should not call other operations like + // `StartSandboxee` after this call. + absl::StatusOr StopSandboxee(); + + // Returns opened file descriptor to communicate with the sandboxee via IPC. + int FileDescriptor() const { return file_descriptor_; } + + private: + std::unique_ptr sandbox_; + // File descriptor used for IPC. + int file_descriptor_; + SandboxeeState sandboxee_state_ = SandboxeeState::kNotStarted; + // Populated on the first successful attempt of the sandboxee stop. + sandbox2::Result run_result_; +}; + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SANDBOX_SANDBOX_EXECUTOR_H_ diff --git a/services/inference_sidecar/common/sandbox/sandbox_executor_test.cc b/services/inference_sidecar/common/sandbox/sandbox_executor_test.cc new file mode 100644 index 00000000..5936ac2b --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandbox_executor_test.cc @@ -0,0 +1,221 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "sandbox/sandbox_executor.h" + +#include + +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/flags/reflection.h" +#include "absl/status/status.h" +#include "absl/strings/string_view.h" +#include "absl/time/clock.h" +#include "gmock/gmock.h" +#include "gtest/gtest.h" +#include "proto/testproto.grpc.pb.h" +#include "proto/testproto.pb.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +using ::testing::HasSubstr; +using ::testing::TestWithParam; + +constexpr absl::string_view kExitBinary = + "__main__/sandbox/sandboxee_exit_test_bin"; +constexpr absl::string_view kGrpcBinary = + "__main__/sandbox/sandboxee_grpc_test_bin"; +constexpr absl::string_view kIpcBinary = + "__main__/sandbox/sandboxee_ipc_test_bin"; +constexpr absl::string_view kNonExistentBinary = "non_existent_binary_path"; + +class SandboxExecutorTest : public ::testing::Test { + protected: + void SetUp() override { + absl::SetFlag(&FLAGS_testonly_allow_policies_for_bazel, true); + } + + private: + absl::FlagSaver flag_saver_; +}; + +TEST_F(SandboxExecutorTest, NonExistentBinary) { + SandboxExecutor executor(GetFilePath(kNonExistentBinary), {""}); + + absl::Status status = executor.StartSandboxee(); + ASSERT_EQ(status.code(), absl::StatusCode::kInternal); + ASSERT_THAT(status.message(), HasSubstr("SETUP_ERROR")); + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::SETUP_ERROR); +} + +TEST_F(SandboxExecutorTest, StopBeforeStart) { + SandboxExecutor executor(GetFilePath(kNonExistentBinary), {""}); + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_EQ(result.status().code(), absl::StatusCode::kFailedPrecondition); + + absl::Status status = executor.StartSandboxee(); + ASSERT_EQ(status.code(), absl::StatusCode::kFailedPrecondition); +} + +TEST_F(SandboxExecutorTest, DoubleStop) { + SandboxExecutor executor(GetFilePath(kExitBinary), {""}); + ASSERT_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + // Wait for sandboxee to stop on its own. + absl::SleepFor(absl::Seconds(1)); + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::OK); + result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::OK); +} + +TEST_F(SandboxExecutorTest, Kill) { + // `sandboxee_ipc_test_bin` waits for the proto message. + SandboxExecutor executor(GetFilePath(kIpcBinary), {""}); + absl::Status status = executor.StartSandboxee(); + ASSERT_EQ(status.code(), absl::StatusCode::kOk); + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); +} + +TEST_F(SandboxExecutorTest, DoubleStart) { + SandboxExecutor executor(GetFilePath(kIpcBinary), {""}); + ASSERT_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + ASSERT_EQ(executor.StartSandboxee().code(), + absl::StatusCode::kFailedPrecondition); +} + +TEST_F(SandboxExecutorTest, Ipc) { + SandboxExecutor executor(GetFilePath(kIpcBinary), {""}); + sandbox2::Comms comms(executor.FileDescriptor()); + + ASSERT_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + // Send and receive message. + Empty request; + ASSERT_TRUE(comms.SendProtoBuf(request)); + + Empty response; + ASSERT_TRUE(comms.RecvProtoBuf(&response)); + + // Wait for sandboxee to stop on its own. + absl::SleepFor(absl::Seconds(1)); + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::OK); + ASSERT_EQ(result->reason_code(), 0); +} + +TEST_F(SandboxExecutorTest, Grpc) { + SandboxExecutor executor(GetFilePath(kGrpcBinary), {""}); + sandbox2::Comms comms(executor.FileDescriptor()); + + ASSERT_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + // Wait for sandboxee to start gRPC server. + absl::SleepFor(absl::Seconds(1)); + + std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor.FileDescriptor()); + std::unique_ptr stub = + TestService::NewStub(client_channel); + + Empty request, response; + const int kNumIterations = 1000; + for (int i = 0; i < kNumIterations; i++) { + grpc::ClientContext context; + grpc::Status status = stub->Predict(&context, request, &response); + ASSERT_TRUE(status.ok()) << status.error_message(); + } + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + ASSERT_EQ(result->reason_code(), 0); +} + +TEST_F(SandboxExecutorTest, ConcurrentGrpc) { + SandboxExecutor executor(GetFilePath(kGrpcBinary), {""}); + sandbox2::Comms comms(executor.FileDescriptor()); + + ASSERT_EQ(executor.StartSandboxee().code(), absl::StatusCode::kOk); + + // Wait for sandboxee to start gRPC server. + absl::SleepFor(absl::Seconds(1)); + + std::shared_ptr client_channel = + grpc::CreateInsecureChannelFromFd("GrpcChannel", + executor.FileDescriptor()); + std::unique_ptr stub = + TestService::NewStub(client_channel); + + const int kNumThreads = 1000; + const int kNumIterations = 5; + std::vector threads; + threads.reserve(kNumThreads); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&stub, &kNumIterations]() { + Empty request, response; + for (int j = 0; j < kNumIterations; j++) { + grpc::ClientContext context; + grpc::Status status = stub->Predict(&context, request, &response); + EXPECT_TRUE(status.ok()) << status.error_message(); + } + })); + } + + // Waits for all threads to finish. + for (auto& thread : threads) { + thread.join(); + } + + absl::StatusOr result = executor.StopSandboxee(); + ASSERT_TRUE(result.ok()); + ASSERT_EQ(result->final_status(), sandbox2::Result::EXTERNAL_KILL); + ASSERT_EQ(result->reason_code(), 0); +} + +TEST(SandboxeeStateToString, NotStarted) { + EXPECT_EQ(SandboxeeStateToString(SandboxeeState::kNotStarted), "NotStarted"); +} + +TEST(SandboxeeStateToString, Running) { + EXPECT_EQ(SandboxeeStateToString(SandboxeeState::kRunning), "Running"); +} + +TEST(SandboxeeStateToString, Stopped) { + EXPECT_EQ(SandboxeeStateToString(SandboxeeState::kStopped), "Stopped"); +} + +TEST(SandboxeeStateToString, Invalid) { + ASSERT_EQ(SandboxeeStateToString(static_cast(-1)), "-1"); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/sandbox/sandbox_worker.cc b/services/inference_sidecar/common/sandbox/sandbox_worker.cc new file mode 100644 index 00000000..9ee53e27 --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandbox_worker.cc @@ -0,0 +1,51 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "sandbox/sandbox_worker.h" + +#include + +#include "absl/log/check.h" +#include "absl/status/status.h" +#include "sandboxed_api/sandbox2/client.h" +#include "sandboxed_api/sandbox2/comms.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// Makes `file_descriptor` non-blocking to allow communication via gRPC. +absl::Status MakeFileDescriptorNonBlocking(int file_descriptor) { + int flags = fcntl(file_descriptor, F_GETFL, 0); + if (fcntl(file_descriptor, F_SETFL, flags | O_NONBLOCK) == -1) { + return absl::InternalError("Failed to set `O_NONBLOCK`"); + } + return absl::OkStatus(); +} + +} // namespace + +SandboxWorker::SandboxWorker() + : comms_(sandbox2::Comms::kSandbox2ClientCommsFD), + sandbox2_client_(&comms_) { + // Enable sandboxing from here. Getting FDs should happen after the call. + sandbox2_client_.SandboxMeHere(); + sandbox2_client_.SendLogsToSupervisor(); + + file_descriptor_ = sandbox2_client_.GetMappedFD(kFileDescriptorName); + CHECK_OK(MakeFileDescriptorNonBlocking(file_descriptor_)) + << "Unable to make fd=" << file_descriptor_ + << " non-blocking. It prevents communication via gRPC"; +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/sandbox/sandbox_worker.h b/services/inference_sidecar/common/sandbox/sandbox_worker.h new file mode 100644 index 00000000..2c99a3db --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandbox_worker.h @@ -0,0 +1,49 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SANDBOX_SANDBOX_WORKER_H_ +#define SANDBOX_SANDBOX_WORKER_H_ + +#include + +#include "sandboxed_api/sandbox2/client.h" +#include "sandboxed_api/sandbox2/comms.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// Name of the `file_descriptor` to establish IPC connection. +// A host sends a request to a sandboxee via this `file_descriptor`. +inline constexpr char kFileDescriptorName[] = "CommFD"; + +// Sandbox worker. +// It's attached to the sandboxee, and provides communication channel between +// the host and the sandboxee. +class SandboxWorker { + public: + SandboxWorker(); + SandboxWorker(const SandboxWorker&) = delete; + SandboxWorker& operator=(const SandboxWorker&) = delete; + + int FileDescriptor() { return file_descriptor_; } + + private: + sandbox2::Comms comms_; + sandbox2::Client sandbox2_client_; + // File descriptor used for IPC. + int file_descriptor_; +}; + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SANDBOX_SANDBOX_WORKER_H_ diff --git a/services/inference_sidecar/common/sandbox/sandboxee_exit_test_bin.cc b/services/inference_sidecar/common/sandbox/sandboxee_exit_test_bin.cc new file mode 100644 index 00000000..15e78ecb --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandboxee_exit_test_bin.cc @@ -0,0 +1,22 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Sandboxee binary that exits on its own. + +#include "sandbox/sandbox_worker.h" + +int main(int argc, char** argv) { + privacy_sandbox::bidding_auction_servers::inference::SandboxWorker worker; + return 0; +} diff --git a/services/inference_sidecar/common/sandbox/sandboxee_grpc_test_bin.cc b/services/inference_sidecar/common/sandbox/sandboxee_grpc_test_bin.cc new file mode 100644 index 00000000..b2bd60a4 --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandboxee_grpc_test_bin.cc @@ -0,0 +1,70 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Sandboxee binary to test gRPC over IPC. + +#include + +#include + +#include +#include +#include +#include + +#include "absl/log/absl_log.h" +#include "absl/status/status.h" +#include "proto/testproto.grpc.pb.h" +#include "proto/testproto.pb.h" +#include "sandbox/sandbox_worker.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// Test service implementation. +class TestServiceImpl final : public TestService::Service { + public: + TestServiceImpl() = default; + + grpc::Status Predict(grpc::ServerContext* context, const Empty* request, + Empty* response) override { + ABSL_LOG(INFO) << "Received Predict gRPC call"; + return grpc::Status::OK; + } +}; + +void Run() { + SandboxWorker worker; + grpc::ServerBuilder builder; + + auto server_impl = std::make_unique(); + builder.RegisterService(server_impl.get()); + std::unique_ptr server(builder.BuildAndStart()); + if (server == nullptr) { + ABSL_LOG(ERROR) << "Cannot start the server."; + return; + } + + // Set up gRPC over IPC. + AddInsecureChannelFromFd(server.get(), worker.FileDescriptor()); + server->Wait(); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference + +int main(int argc, char** argv) { + privacy_sandbox::bidding_auction_servers::inference::Run(); + return 0; +} diff --git a/services/inference_sidecar/common/sandbox/sandboxee_ipc_test_bin.cc b/services/inference_sidecar/common/sandbox/sandboxee_ipc_test_bin.cc new file mode 100644 index 00000000..6660e510 --- /dev/null +++ b/services/inference_sidecar/common/sandbox/sandboxee_ipc_test_bin.cc @@ -0,0 +1,54 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Sandboxee binary to test IPC via `sandbox2::Comms`. + +#include "absl/log/absl_log.h" +#include "absl/log/check.h" +#include "absl/time/clock.h" +#include "absl/time/time.h" +#include "proto/testproto.pb.h" +#include "sandbox/sandbox_worker.h" +#include "sandboxed_api/sandbox2/comms.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +using ::sandbox2::Comms; + +void Run() { + SandboxWorker worker; + Comms comms(worker.FileDescriptor()); + + // Retry to mitigate "Resource temporary not available" errors. + Empty request; + bool request_received = false; + const int attempts_count = 5; + for (int i = 0; i < attempts_count; i++) { + request_received = comms.RecvProtoBuf(&request); + if (request_received) break; + ABSL_LOG(INFO) << i + 1 << "th failure"; + } + CHECK(request_received); + Empty response; + CHECK(comms.SendProtoBuf(response)); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference + +int main(int argc, char** argv) { + privacy_sandbox::bidding_auction_servers::inference::Run(); + return 0; +} diff --git a/services/inference_sidecar/common/testdata/BUILD b/services/inference_sidecar/common/testdata/BUILD new file mode 100644 index 00000000..38871392 --- /dev/null +++ b/services/inference_sidecar/common/testdata/BUILD @@ -0,0 +1,28 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package(default_visibility = ["//visibility:public"]) + +exports_files([ + "models/tensorflow_1_mib_saved_model.pb", + # This PyTorch model inputs a float and returns the same float. + "models/pytorch_simple_model.pt", + # This PyTorch model uses a realistic input/output format for e2e test flow. + "models/pytorch_e2e_model1.pt", + # Same architecture as e2e_model1 with different parameters. + "models/pytorch_e2e_model2.pt", + # This pytorch model takes one int64 value and three floats as input. It + # returns two tensors: a float embedding of size 4 and one int64 value. + "models/pytorch_mixed_inputs_mixed_outputs_model.pt", +]) diff --git a/services/inference_sidecar/common/testdata/models/pytorch_e2e_model1.pt b/services/inference_sidecar/common/testdata/models/pytorch_e2e_model1.pt new file mode 100644 index 00000000..a17283f0 Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/pytorch_e2e_model1.pt differ diff --git a/services/inference_sidecar/common/testdata/models/pytorch_e2e_model2.pt b/services/inference_sidecar/common/testdata/models/pytorch_e2e_model2.pt new file mode 100644 index 00000000..b5b2ea55 Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/pytorch_e2e_model2.pt differ diff --git a/services/inference_sidecar/common/testdata/models/pytorch_mixed_inputs_mixed_outputs_model.pt b/services/inference_sidecar/common/testdata/models/pytorch_mixed_inputs_mixed_outputs_model.pt new file mode 100644 index 00000000..3446b687 Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/pytorch_mixed_inputs_mixed_outputs_model.pt differ diff --git a/services/inference_sidecar/common/testdata/models/pytorch_simple_model.pt b/services/inference_sidecar/common/testdata/models/pytorch_simple_model.pt new file mode 100644 index 00000000..026443b9 Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/pytorch_simple_model.pt differ diff --git a/services/inference_sidecar/common/testdata/models/tensorflow_1_mib_saved_model.pb b/services/inference_sidecar/common/testdata/models/tensorflow_1_mib_saved_model.pb new file mode 100644 index 00000000..2a8b4609 Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/tensorflow_1_mib_saved_model.pb differ diff --git a/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/BUILD b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/BUILD new file mode 100644 index 00000000..54c30e67 --- /dev/null +++ b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/BUILD @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +filegroup( + name = "test_model", + srcs = [ + "saved_model.pb", + "variables/variables.data-00000-of-00001", + "variables/variables.index", + ], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/saved_model.pb b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/saved_model.pb new file mode 100644 index 00000000..01b39566 Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/saved_model.pb differ diff --git a/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/variables/variables.data-00000-of-00001 b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/variables/variables.data-00000-of-00001 new file mode 100644 index 00000000..e603f3eb Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/variables/variables.data-00000-of-00001 differ diff --git a/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/variables/variables.index b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/variables/variables.index new file mode 100644 index 00000000..92276eeb Binary files /dev/null and b/services/inference_sidecar/common/testdata/models/tensorflow_simple_model/variables/variables.index differ diff --git a/services/inference_sidecar/common/third_party/BUILD b/services/inference_sidecar/common/third_party/BUILD new file mode 100644 index 00000000..633bb7f1 --- /dev/null +++ b/services/inference_sidecar/common/third_party/BUILD @@ -0,0 +1,13 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/services/inference_sidecar/common/third_party/rapidjson.BUILD.bazel b/services/inference_sidecar/common/third_party/rapidjson.BUILD.bazel new file mode 100644 index 00000000..7589f81d --- /dev/null +++ b/services/inference_sidecar/common/third_party/rapidjson.BUILD.bazel @@ -0,0 +1,44 @@ +licenses(["notice"]) # MIT + +cc_library( + name = "rapidjson", + srcs = [ + "include/rapidjson/internal/biginteger.h", + "include/rapidjson/internal/diyfp.h", + "include/rapidjson/internal/dtoa.h", + "include/rapidjson/internal/ieee754.h", + "include/rapidjson/internal/itoa.h", + "include/rapidjson/internal/meta.h", + "include/rapidjson/internal/pow10.h", + "include/rapidjson/internal/regex.h", + "include/rapidjson/internal/stack.h", + "include/rapidjson/internal/strfunc.h", + "include/rapidjson/internal/strtod.h", + "include/rapidjson/internal/swap.h", + ], + hdrs = [ + "include/rapidjson/allocators.h", + "include/rapidjson/document.h", + "include/rapidjson/encodedstream.h", + "include/rapidjson/encodings.h", + "include/rapidjson/error/en.h", + "include/rapidjson/error/error.h", + "include/rapidjson/filereadstream.h", + "include/rapidjson/filewritestream.h", + "include/rapidjson/fwd.h", + "include/rapidjson/istreamwrapper.h", + "include/rapidjson/memorybuffer.h", + "include/rapidjson/memorystream.h", + "include/rapidjson/ostreamwrapper.h", + "include/rapidjson/pointer.h", + "include/rapidjson/prettywriter.h", + "include/rapidjson/rapidjson.h", + "include/rapidjson/reader.h", + "include/rapidjson/schema.h", + "include/rapidjson/stream.h", + "include/rapidjson/stringbuffer.h", + "include/rapidjson/writer.h", + ], + includes = ["include"], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/common/tools/collect-logs b/services/inference_sidecar/common/tools/collect-logs new file mode 100755 index 00000000..3924139e --- /dev/null +++ b/services/inference_sidecar/common/tools/collect-logs @@ -0,0 +1,89 @@ +#!/usr/bin/env bash + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE Set the path to the workspace (repo root) + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + declare -r -i STATUS=$? + if [[ ${STATUS} -ne 0 ]]; then + printf "collect-logs exit code: %d\n" ${STATUS} &>/dev/stderr + sleep 5s + fi + exit ${STATUS} +} + +function copy_log_outputs() { + declare -r _rootdest="$1" + declare -r _prune_to_dir="$2" + declare -r _filepath="$3" + declare -r _logpath="${_filepath##*/${_prune_to_dir}/}" + declare -r _destdir="${_rootdest}/${_logpath%/*}" + declare -r _fname="${_filepath##*/}" + declare -r _destfname="${_fname/#test./sponge_log.}" + mkdir -p "${_destdir}" + cp "${_filepath}" "${_destdir}/${_destfname}" +} +export -f copy_log_outputs + +function extract_test_outputs() { + declare -r _rootdest="$1" + declare -r _filepath="$2" + declare -r _logpath="${_filepath##*bazel-testlogs/}" + declare -r _destdir="${_rootdest}/${_logpath%/*/*}" + mkdir -p "${_destdir}" + unzip -q -d "${_destdir}" "${_filepath}" +} +export -f extract_test_outputs + + +declare ZIP_FILENAME="$1" +if [[ ${ZIP_FILENAME##*.} != zip ]]; then + ZIP_FILENAME=logs.zip +fi +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly SCRIPT_DIR +WORKSPACE="${WORKSPACE-"$(readlink -f "${SCRIPT_DIR}"/..)"}" +readonly WORKSPACE +OUTDIR="$(mktemp --directory)" +readonly OUTDIR +export OUTDIR +mkdir -p "${OUTDIR}"/{test,other} + +if [[ -d "${WORKSPACE}"/bazel-testlogs ]]; then + # copy all test.log and test.xml files + find -L "${WORKSPACE}"/bazel-testlogs -type f '(' -name test.log -o -name test.xml ')' -exec bash -c 'copy_log_outputs "${OUTDIR}"/test bazel-testlogs "$0"' {} ';' + # extract test outputs + find -L "${WORKSPACE}"/bazel-testlogs -type f -name outputs.zip -exec bash -c 'extract_test_outputs "${OUTDIR}"/test "$0"' {} ';' +fi +if [[ -d "${WORKSPACE}"/bazel-out ]]; then + # copy log files under bazel-out (except for test.log) + find -L "${WORKSPACE}"/bazel-out -type f -name "*.log" ! -name test.log -exec bash -c 'copy_log_outputs "${OUTDIR}"/other bazel-out "$0"' {} ';' +fi + +declare -r DISTDIR="${WORKSPACE}"/dist +mkdir -p "${DISTDIR}" +( + cd "${OUTDIR}" + zip -r -q "${DISTDIR}/${ZIP_FILENAME}" -- * +) +printf "stored bazel logs to %s\n" "${DISTDIR}/${ZIP_FILENAME}" &>/dev/stderr +unzip -Z -h "${DISTDIR}/${ZIP_FILENAME}" &>/dev/stderr +rm -rf "${OUTDIR}" diff --git a/services/inference_sidecar/common/tools/debug/start_inference b/services/inference_sidecar/common/tools/debug/start_inference new file mode 100755 index 00000000..b5026451 --- /dev/null +++ b/services/inference_sidecar/common/tools/debug/start_inference @@ -0,0 +1,149 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script is used in place of //tools/debug/start_bidding with other sever +# scripts in //tools/debug to create a local debug server stack. +# It requires a prebuilt bidding server and an inference sidecar. After building +# the targets, modify the docker mount variables as needed. + +# To build the inference sidecar, choose a inference backend workspace, e.g., +# cd services/inference_sidecar/modules/pytorch_v2_1_1 +# ./builders/tools/bazel-debian run //:generate_artifacts + +# To build the bidding server, run from B&A root +# cd $(git rev-parse --show-toplevel) +# ./builders/tools/bazel-debian build services/bidding_service/server --//:inference_build=yes --//:inference_runtime=pytorch + + +export PROJECT_ROOT=$(git rev-parse --show-toplevel) + +# Default mount variables: +BIDDING_SERVER_MOUNT=${PROJECT_ROOT}/bazel-bin/services/bidding_service/server:/server +INFERENCE_SIDECAR_MOUNT=${PROJECT_ROOT}/services/inference_sidecar/modules/pytorch_v2_1_1/artifacts/inference_sidecar:/inference_sidecar +INFERENCE_MODEL_MOUNT=${PROJECT_ROOT}/services/inference_sidecar/common/testdata/models/pytorch_e2e_model1.pt:/test_model + +# Default client JS code: +BIDDING_JS_PATH="/src/workspace/testing/functional/suts/inference/data/buyer-bidding-service/generateBidRunInference.js" + +print_usage() { + cat << USAGE +This script is used in place of //tools/debug/start_bidding with other sever +scripts in //tools/debug to create a local debug server stack. +It requires a prebuilt bidding server and an inference sidecar. After building +the targets, modify the docker mount variables as needed. + +To build the inference sidecar, choose a inference backend workspace, e.g., +cd services/inference_sidecar/modules/pytorch_v2_1_1 +./builders/tools/bazel-debian run //:generate_artifacts + +To build the bidding server, run the build_and_test_all_in_docker script, e.g., +export BAZEL_EXTRA_ARGS="--//:inference_runtime=pytorch" +./production/packaging/build_and_test_all_in_docker --service-path bidding_service --instance local --platform aws --build-flavor inference_non_prod + +Usage: + + -h, --help Print usage information + --gdb Run with GDB + --bidding-server-mount PATH Set the bidding server mount path (default: $BIDDING_SERVER_MOUNT) + --inference-mount PATH Set the inference sidecar mount path (default: $INFERENCE_SIDECAR_MOUNT) + --model-mount PATH Set the inference model mount path (default: $INFERENCE_MODEL_MOUNT) + --bidding-js-path PATH Set the bidding JS path (default: $BIDDING_JS_PATH) +USAGE + exit 0 +} + +# Parse command-line options +while [[ $# -gt 0 ]]; do + case $1 in + -h|--help) + print_usage + ;; + --gdb) + GDB_OPTION="--gdb" + ;; + --bidding-server-mount) + BIDDING_SERVER_MOUNT=$2 + shift 2 + ;; + --inference-mount) + INFERENCE_SIDECAR_MOUNT=$2 + shift 2 + ;; + --model-mount) + INFERENCE_MODEL_MOUNT=$2 + shift 2 + ;; + --bidding-js-path) + BIDDING_JS_PATH=$2 + shift 2 + ;; + *) + echo "Unknown option: $1" + echo "" + print_usage + ;; + esac +done + +declare -a -r DOCKER_RUN_ARGS=( + "--volume=${BIDDING_SERVER_MOUNT}" + "--volume=${INFERENCE_SIDECAR_MOUNT}" + "--volume=${INFERENCE_MODEL_MOUNT}" + "--name=bidding" +) + +source ${PROJECT_ROOT}/tools/debug/common +export EXTRA_DOCKER_RUN_ARGS="${COMMON_DOCKER_RUN_ARGS[@]} ${DOCKER_RUN_ARGS[@]}" + +echo $EXTRA_DOCKER_RUN_ARGS + +export SERVER_START_CMD=$(cat << END +/server \ +--inference_sidecar_binary_path="/inference_sidecar" \ +--inference_model_local_paths="/test_model" \ +--enable_bidding_service_benchmark="true" \ +--init_config_client="false" --port=50057 \ +--js_num_workers=4 \ +--js_worker_queue_len=100 \ +--test_mode="true" \ +--telemetry_config="mode: EXPERIMENT" \ +--roma_timeout_ms="120000" \ +--buyer_code_fetch_config='{ + "fetchMode": 2, + "biddingJsPath": "${BIDDING_JS_PATH}", + "protectedAppSignalsBiddingJsUrl": "https://td.doubleclick.net/td/bjs", + "biddingWasmHelperUrl": "", + "protectedAppSignalsBiddingWasmHelperUrl": "", + "urlFetchPeriodMs": 13000000, + "urlFetchTimeoutMs": 30000, + "enableBuyerDebugUrlGeneration": true, + "enableAdtechCodeLogging": false, + "prepareDataForAdsRetrievalJsUrl": "", + "prepareDataForAdsRetrievalWasmHelperUrl": "" + }' \ +--enable_otel_based_logging="false" \ +--consented_debug_token="" \ +--enable_protected_app_signals="false" \ +--ps_verbosity=2 \ +--ad_retrieval_timeout_ms="60000" && exit +END +) + +if [[ $1 == "--gdb" ]]; then + ${PROJECT_ROOT}/builders/tools/cbuild --seccomp-unconfined \ + --docker-network host --image build-debian \ + --cmd "apt-get update && apt-get -y install gdb && gdb -ex=r --args ${SERVER_START_CMD}" +else + ${PROJECT_ROOT}/builders/tools/cbuild --seccomp-unconfined --docker-network host --image build-debian --cmd "${SERVER_START_CMD}" +fi diff --git a/services/inference_sidecar/common/utils/BUILD b/services/inference_sidecar/common/utils/BUILD new file mode 100644 index 00000000..dfed33bb --- /dev/null +++ b/services/inference_sidecar/common/utils/BUILD @@ -0,0 +1,69 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test") + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "file_util", + srcs = ["file_util.cc"], + hdrs = ["file_util.h"], + deps = [ + "//proto:inference_sidecar_cc_proto", + "@com_google_absl//absl/base:log_severity", + "@com_google_absl//absl/log:absl_log", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + ], +) + +cc_library( + name = "json_util", + hdrs = [ + "json_util.h", + ], + visibility = ["//visibility:public"], + deps = [ + "@com_google_absl//absl/status:statusor", + "@rapidjson", + ], +) + +cc_library( + name = "request_parser", + srcs = [ + "request_parser.cc", + ], + hdrs = [ + "request_parser.h", + ], + deps = [ + ":json_util", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@rapidjson", + ], +) + +cc_test( + name = "request_parser_test", + size = "small", + timeout = "short", + srcs = ["request_parser_test.cc"], + deps = [ + ":request_parser", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + ], +) diff --git a/services/inference_sidecar/common/utils/file_util.cc b/services/inference_sidecar/common/utils/file_util.cc new file mode 100644 index 00000000..57663e95 --- /dev/null +++ b/services/inference_sidecar/common/utils/file_util.cc @@ -0,0 +1,74 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "utils/file_util.h" + +#include +#include +#include +#include + +#include "absl/base/log_severity.h" +#include "absl/log/absl_log.h" +#include "absl/strings/str_cat.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +absl::StatusOr ReadFile(absl::string_view path) { + std::ifstream ifs(path.data(), std::ios::in | std::ios::binary); + if (!ifs.good()) { + return absl::InvalidArgumentError( + absl::StrCat("Failed to load file from path: ", path)); + } + return std::string(std::istreambuf_iterator(ifs), + std::istreambuf_iterator()); +} + +} // namespace + +absl::Status PopulateRegisterModelRequest(absl::string_view path, + RegisterModelRequest& request, + bool log_on_error) { + request.mutable_model_spec()->set_model_path(std::string(path)); + + if (std::filesystem::is_directory(path)) { + // Tensorflow models consist of multiple files under a directroy. + for (const auto& entry : + std::filesystem::recursive_directory_iterator(path)) { + if (entry.is_regular_file()) { + const std::string& entry_path = entry.path().string(); + absl::StatusOr bytes = ReadFile(entry_path); + if (!bytes.ok()) { + ABSL_LOG_IF(ERROR, log_on_error) << bytes.status(); + return bytes.status(); + } + (*request.mutable_model_files())[entry_path] = std::move(*bytes); + } + } + return absl::OkStatus(); + } + + // PyTorch model is exactly one file. We read it here and put the byte string + // in request. PyTorch sidecar loads the model with RegisterModelRequest. + absl::StatusOr bytes = ReadFile(path); + if (!bytes.ok()) { + ABSL_LOG_IF(ERROR, log_on_error) << bytes.status(); + return bytes.status(); + } + (*request.mutable_model_files())[path] = std::move(*bytes); + return absl::OkStatus(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/utils/file_util.h b/services/inference_sidecar/common/utils/file_util.h new file mode 100644 index 00000000..a63ef6c6 --- /dev/null +++ b/services/inference_sidecar/common/utils/file_util.h @@ -0,0 +1,36 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_FILE_UTIL_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_FILE_UTIL_H_ + +#include "absl/status/statusor.h" +#include "absl/strings/string_view.h" +#include "proto/inference_sidecar.pb.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// Reads the contents of a provided model and populates the given +// RegisterModelRequest. +// TODO(b/322106194): Depending on the final implementation of model loading, +// this function might need to read from tmpfs or ramfs. +absl::Status PopulateRegisterModelRequest(absl::string_view path, + RegisterModelRequest& request, + bool log_on_error = false); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_FILE_UTIL_H_ diff --git a/services/inference_sidecar/common/utils/json_util.h b/services/inference_sidecar/common/utils/json_util.h new file mode 100644 index 00000000..6a1b2d2b --- /dev/null +++ b/services/inference_sidecar/common/utils/json_util.h @@ -0,0 +1,179 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// TODO(b/322271490): This file is copied from the parent B&A workspace at +// services/common/util/json_util.h so that the inference sidecar common +// workspace doesn't need to depend on the parent workspace. We will need +// to move code shared between B&A and inference sidecar common to the data +// plane common repository. + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_JSON_UTIL_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_JSON_UTIL_H_ + +#include +#include + +#include "absl/status/statusor.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/str_format.h" +#include "rapidjson/document.h" +#include "rapidjson/error/en.h" +#include "rapidjson/pointer.h" +#include "rapidjson/writer.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +#define PS_ASSIGN_IF_PRESENT(dst, src, key, getter) \ + if (auto it = src.FindMember(key); it != src.MemberEnd()) { \ + dst = it->value.getter(); \ + } + +inline constexpr char kMissingMember[] = "Missing %s in the JSON document"; +inline constexpr char kUnexpectedMemberType[] = + "Value of member: %s, has unexpected member type (expected: %d, observed: " + "%d)"; +inline constexpr char kEmptyStringMember[] = + "Value of member: %s, is unexpectedly an empty string."; + +// This is a custom class that implements the necessary methods for +// serialization of a Rapid JSON document in a shared string. +class SharedStringHolder { + public: + using Ch = char; // Character type for the stream + + SharedStringHolder() : shared_string_(std::make_shared()) {} + explicit SharedStringHolder(int size) + : shared_string_(std::make_shared()) { + shared_string_->reserve(size); + } + + ~SharedStringHolder() { Flush(); } + + void Put(Ch c) { shared_string_->push_back(c); } + + void Clear() { shared_string_->clear(); } + void Flush() { return; } + size_t Size() const { return shared_string_->size(); } + + std::string GetString() const { return *shared_string_; } + + std::shared_ptr GetSharedPointer() const { + return shared_string_; + } + + private: + std::shared_ptr shared_string_; +}; + +// Parse string into a rapidjson::Document. Returns error status or document. +inline absl::StatusOr ParseJsonString( + absl::string_view str) { + rapidjson::Document doc; + // Parse into struct + rapidjson::ParseResult parse_result = + doc.Parse(str.data()); + if (parse_result.IsError()) { + return absl::InvalidArgumentError( + absl::StrCat("JSON Parse Error: ", + rapidjson::GetParseError_En(parse_result.Code()))); + } + return doc; +} + +// Converts rapidjson::Document to a shared string. This provides a +// shared string to prevent copying large string parameters required +// by the ROMA engine interface. The reserve_string_len argument helps +// reserve a large string size up front to prevent reallocation and copying. +inline absl::StatusOr> SerializeJsonDoc( + const rapidjson::Document& document, int reserve_string_len) { + SharedStringHolder shared_string_holder(reserve_string_len); + rapidjson::Writer writer(shared_string_holder); + if (document.Accept(writer)) { + return shared_string_holder.GetSharedPointer(); + } + return absl::InternalError("Unknown JSON to string serialization error"); +} + +// Converts rapidjson::Value& to a string +inline absl::StatusOr SerializeJsonDoc( + const rapidjson::Value& document) { + rapidjson::StringBuffer string_buffer; + rapidjson::Writer writer(string_buffer); + if (document.Accept(writer)) { + return std::string(string_buffer.GetString()); + } + return absl::InternalError("Error converting inner Json to String."); +} + +// Converts rapidjson::Document to a string. +inline absl::StatusOr SerializeJsonDoc( + const rapidjson::Document& document) { + rapidjson::StringBuffer string_buffer; + rapidjson::Writer writer(string_buffer); + if (document.Accept(writer)) { + return std::string(string_buffer.GetString()); + } + return absl::InternalError("Unknown JSON to string serialization error"); +} + +// Retrieves the string value of the specified member in the document. +template +inline absl::StatusOr GetStringMember( + const T& document, const std::string& member_name, + bool is_empty_ok = false) { + auto it = document.FindMember(member_name.c_str()); + if (it == document.MemberEnd()) { + return absl::InvalidArgumentError( + absl::StrFormat(kMissingMember, member_name)); + } + + if (!it->value.IsString()) { + return absl::InvalidArgumentError( + absl::StrFormat(kUnexpectedMemberType, member_name, + rapidjson::kStringType, it->value.GetType())); + } + + const auto result = std::string(it->value.GetString()); + if (!is_empty_ok && result.empty()) { + return absl::InvalidArgumentError( + absl::StrFormat(kEmptyStringMember, member_name)); + } + + return result; +} + +// Retrieves the array value of the specified member in the document. +template +inline absl::StatusOr>::ConstArray> +GetArrayMember(const T& document, const std::string& member_name) { + auto it = document.FindMember(member_name.c_str()); + if (it == document.MemberEnd()) { + return absl::InvalidArgumentError( + absl::StrFormat(kMissingMember, member_name)); + } + + if (!it->value.IsArray()) { + return absl::InvalidArgumentError( + absl::StrFormat(kUnexpectedMemberType, member_name, + rapidjson::kArrayType, it->value.GetType())); + } + + return it->value.GetArray(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_JSON_UTIL_H_ diff --git a/services/inference_sidecar/common/utils/request_parser.cc b/services/inference_sidecar/common/utils/request_parser.cc new file mode 100644 index 00000000..a49466c8 --- /dev/null +++ b/services/inference_sidecar/common/utils/request_parser.cc @@ -0,0 +1,158 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "utils/request_parser.h" + +#include + +#include "absl/strings/str_join.h" +#include "rapidjson/document.h" +#include "src/util/status_macro/status_macros.h" +#include "utils/json_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +std::string Tensor::DebugString() const { + std::string result; + + if (!tensor_name.empty()) { + absl::StrAppend(&result, "Tensor name: ", tensor_name, "\n"); + } + + std::string data_type_str = + (kDataTypeToStringMap.find(data_type) != kDataTypeToStringMap.end()) + ? kDataTypeToStringMap.at(data_type) + : "unknown DataType"; + + absl::StrAppend(&result, "Data type: ", data_type_str, "\n"); + + absl::StrAppend(&result, "Tensor shape: [", absl::StrJoin(tensor_shape, ", "), + "]\n"); + + absl::StrAppend(&result, "Tensor content: [", + absl::StrJoin(tensor_content, ", "), "]\n"); + + return result; +} + +std::string InferenceRequest::DebugString() const { + std::string result; + + absl::StrAppend(&result, "Model path: ", model_path, "\n"); + absl::StrAppend(&result, "Tensors: [\n"); + + absl::StrAppend( + &result, + absl::StrJoin(inputs, "\n", [](std::string* out, const Tensor& tensor) { + absl::StrAppend(out, tensor.DebugString()); + })); + + absl::StrAppend(&result, "]\n"); + return result; +} + +absl::StatusOr StringToDataType(std::string data_type) { + auto it = kStringToDataTypeMap.find(data_type); + if (it != kStringToDataTypeMap.end()) { + return it->second; + } else { + return absl::InvalidArgumentError(absl::StrFormat( + "Invalid JSON format: Unsupported 'data_type' field %s", data_type)); + } +} + +// Converts a rapidjson object to a dense tensor struct. +absl::StatusOr ParseTensor(const rapidjson::Value& tensor_value) { + Tensor tensor; + // Tensor name is optional. + PS_ASSIGN_IF_PRESENT(tensor.tensor_name, tensor_value, "tensor_name", + GetString); + + std::string data_type; + PS_ASSIGN_OR_RETURN(data_type, GetStringMember(tensor_value, "data_type")); + + PS_ASSIGN_OR_RETURN(tensor.data_type, StringToDataType(data_type)); + + PS_ASSIGN_OR_RETURN( + rapidjson::GenericValue>::ConstArray tensor_shape, + GetArrayMember(tensor_value, "tensor_shape"), _.LogError()); + + // Only dense tensors are supported. + std::size_t product_of_dimensions = 1; + for (const rapidjson::Value& shape_value : tensor_shape) { + int dim = shape_value.GetInt(); + if (dim < 1) { + return absl::InvalidArgumentError( + "Invalid tensor dimension: it has to be greater than 0"); + } + product_of_dimensions *= dim; + tensor.tensor_shape.push_back(dim); + } + + PS_ASSIGN_OR_RETURN( + rapidjson::GenericValue>::ConstArray tensor_content, + GetArrayMember(tensor_value, "tensor_content"), _.LogError()); + + for (const rapidjson::Value& content_value : tensor_content) { + if (!content_value.IsString()) { + return absl::InvalidArgumentError( + "All numbers within the tensor_content must be enclosed in quotes"); + } + tensor.tensor_content.push_back(content_value.GetString()); + } + std::size_t num_tensors = tensor.tensor_content.size(); + if (num_tensors != product_of_dimensions) { + return absl::InvalidArgumentError( + absl::StrFormat("Mismatch between the size of tensor_content (%d) and " + "a product of tensor dimensions (%d)", + num_tensors, product_of_dimensions)); + } + return tensor; +} + +absl::StatusOr> ParseJsonInferenceRequest( + absl::string_view json_string) { + PS_ASSIGN_OR_RETURN(rapidjson::Document document, + ParseJsonString(json_string), _.LogError()); + + PS_ASSIGN_OR_RETURN(rapidjson::GenericValue>::ConstArray + batch_inference_request, + GetArrayMember(document, "request"), _.LogError()); + + std::vector parsed_inputs; + // Extract values. + for (const rapidjson::Value& inference_request : batch_inference_request) { + InferenceRequest model_input; + + // Get model_path. + PS_ASSIGN_OR_RETURN(model_input.model_path, + GetStringMember(inference_request, "model_path"), + _.LogError()); + + // Get tensors. + PS_ASSIGN_OR_RETURN( + rapidjson::GenericValue>::ConstArray tensors, + GetArrayMember(inference_request, "tensors"), _.LogError()); + for (const rapidjson::Value& tensor_value : tensors) { + PS_ASSIGN_OR_RETURN(Tensor result, ParseTensor(tensor_value), + _.LogError()); + model_input.inputs.push_back(std::move(result)); + } + parsed_inputs.push_back(std::move(model_input)); + } + // TODO(b/319500803): Implement input validation against the model signature. + return parsed_inputs; +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/common/utils/request_parser.h b/services/inference_sidecar/common/utils/request_parser.h new file mode 100644 index 00000000..2fca5ef9 --- /dev/null +++ b/services/inference_sidecar/common/utils/request_parser.h @@ -0,0 +1,96 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_REQUEST_PARSER_H_ +#define SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_REQUEST_PARSER_H_ + +#include +#include +#include +#include + +#include "absl/status/statusor.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +enum DataType { + kFloat, // tf::float32, torch::ScalarType::Float (32-bit single-precision + // floating-point number) + kDouble, // tf::float64, torch::ScalarType::Double (64-bit double-precision + // floating-point number) + kInt8, // tf::int8 , torch::ScalarType::Char (8-bit signed integer) + kInt16, // tf::int16, torch::ScalarType::Short (16-bit signed integer) + kInt32, // tf::int32, torch::ScalarType::Int (32-bit signed integer) + kInt64 // tf::int64, torch::ScalarType::Long (long) +}; + +static const std::unordered_map kStringToDataTypeMap = { + {"FLOAT", DataType::kFloat}, {"DOUBLE", DataType::kDouble}, + {"INT8", DataType::kInt8}, {"INT16", DataType::kInt16}, + {"INT32", DataType::kInt32}, {"INT64", DataType::kInt64}}; + +static const std::unordered_map kDataTypeToStringMap = { + {DataType::kFloat, "FLOAT"}, {DataType::kDouble, "DOUBLE"}, + {DataType::kInt8, "INT8"}, {DataType::kInt16, "INT16"}, + {DataType::kInt32, "INT32"}, {DataType::kInt64, "INT64"}}; + +// A struct representing a framework agnostic tensor object. +// TODO(b/319272024): Replace this struct with a native Tensor type once TF or +// Pytorch library is available in B&A. +struct Tensor { + // Optional tensor name. + std::string tensor_name; + + // Type of the tensor. + DataType data_type; + + // Tensor shape. + // The order of entries matters: It indicates the layout of the + // values in the tensor in-memory representation. + // + // The first entry is the outermost dimension. The last entry is the + // innermost dimension. + std::vector tensor_shape; + + // Serialized raw tensor content. It holds the flattened representation of + // the tensor. Only the representation corresponding to "data_type" field can + // be set. The number of elements in tensor_content should be equal to + // the product of tensor_shape elements, for example + // a tensor of shape [1,4] will expect a flat array or 4 elements + // (e.g. [1, 2, 7, 4]) and one with a shape [2,3] will expect a 6 element one. + std::vector tensor_content; + + std::string DebugString() const; +}; + +// A struct representing input data for a specific Ads ML model. +// TODO(b/323592662): Use a proto instead of a C++ struct. +struct InferenceRequest { + // A unique path to the model. + std::string model_path; + + // A vector of input tensors. + std::vector inputs; + + std::string DebugString() const; +}; + +// Validates and parses JSON inference request to an internal data structure. +// For a practical example, see generateBidRunInference.js. +absl::StatusOr> ParseJsonInferenceRequest( + absl::string_view json_string); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_COMMON_UTILS_REQUEST_PARSER_H_ diff --git a/services/inference_sidecar/common/utils/request_parser_test.cc b/services/inference_sidecar/common/utils/request_parser_test.cc new file mode 100644 index 00000000..ba34f9ce --- /dev/null +++ b/services/inference_sidecar/common/utils/request_parser_test.cc @@ -0,0 +1,388 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "utils/request_parser.h" + +#include "absl/status/statusor.h" +#include "googletest/include/gtest/gtest.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +TEST(Test, Failure_EmptyString) { + const absl::StatusOr> output = + ParseJsonInferenceRequest(""); + ASSERT_FALSE(output.ok()); + EXPECT_EQ(output.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_EQ(output.status().message(), + "JSON Parse Error: The document is empty."); +} + +TEST(Test, Failure_WrongFormat) { + const absl::StatusOr> output = + ParseJsonInferenceRequest("1.0"); + ASSERT_FALSE(output.ok()); + EXPECT_EQ(output.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_EQ(output.status().message(), "Missing request in the JSON document"); +} + +constexpr char kJsonStringBasic[] = R"json({ + "request" : [{ + "model_path" : "my_bucket/models/pcvr_models/1/", + "tensors" : [ + { + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["1"] + } + ] +}] + })json"; + +TEST(Test, Success_BasicInput) { + const absl::StatusOr> output = + ParseJsonInferenceRequest(kJsonStringBasic); + ASSERT_TRUE(output.ok()); + ASSERT_EQ(output.value().size(), 1); + const InferenceRequest parsed_data = output.value()[0]; + ASSERT_EQ(parsed_data.model_path, "my_bucket/models/pcvr_models/1/"); + std::vector inputs = parsed_data.inputs; + ASSERT_EQ(inputs.size(), 1); + constexpr char kExpectedString[] = + R"(Model path: my_bucket/models/pcvr_models/1/ +Tensors: [ +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [1] +] +)"; + ASSERT_EQ(parsed_data.DebugString(), kExpectedString); +} + +constexpr char kJsonStringWithNumber[] = R"json({ + "request" : [{ + "model_path" : "my_bucket/models/pcvr_models/1/", + "tensors" : [ + { + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": [1] + } + ] +}] + })json"; + +TEST(Test, Failure_NumberInsteadOfString) { + const absl::StatusOr> output = + ParseJsonInferenceRequest(kJsonStringWithNumber); + ASSERT_FALSE(output.ok()); + ASSERT_EQ(output.status().message(), + "All numbers within the tensor_content must be enclosed in quotes"); +} + +constexpr char kJsonString[] = R"json({ + "request" : [{ + "model_path" : "my_bucket/models/pcvr/1/", + "tensors" : [ + { + "tensor_name": "scalar1", + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["1"] + }, + { + "tensor_name": "scalar2", + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["2"] + }, + { + "tensor_name": "scalar3", + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["3"] + }, + { + "tensor_name": "scalar4", + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["4"] + }, + { + "tensor_name": "scalar5", + "data_type": "INT64", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["5"] + }, + { + "tensor_name": "embedding1", + "data_type": "FLOAT", + "tensor_shape": [ + 1, + 3 + ], + "tensor_content": ["0.32", "0.12", "0.98"] + }, + { + "tensor_name": "embedding2", + "data_type": "FLOAT", + "tensor_shape": [ + 1, + 3 + ], + "tensor_content": ["0.62", "0.18", "0.23"] + } + ] +}] + })json"; + +TEST(Test, SmallModel) { + absl::StatusOr> output = + ParseJsonInferenceRequest(kJsonString); + ASSERT_TRUE(output.ok()); + ASSERT_EQ(output.value().size(), 1); + for (const privacy_sandbox::bidding_auction_servers::inference:: + InferenceRequest& parsed_data : output.value()) { + ASSERT_EQ(parsed_data.model_path, "my_bucket/models/pcvr/1/"); + std::vector inputs = parsed_data.inputs; + ASSERT_EQ(inputs.size(), 7); + constexpr char kExpectedString[] = + R"(Model path: my_bucket/models/pcvr/1/ +Tensors: [ +Tensor name: scalar1 +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [1] + +Tensor name: scalar2 +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [2] + +Tensor name: scalar3 +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [3] + +Tensor name: scalar4 +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [4] + +Tensor name: scalar5 +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [5] + +Tensor name: embedding1 +Data type: FLOAT +Tensor shape: [1, 3] +Tensor content: [0.32, 0.12, 0.98] + +Tensor name: embedding2 +Data type: FLOAT +Tensor shape: [1, 3] +Tensor content: [0.62, 0.18, 0.23] +] +)"; + ASSERT_EQ(parsed_data.DebugString(), kExpectedString); + } +} + +constexpr char kJsonStringTwoModels[] = R"json({ + "request" : [ + { + "model_path" : "my_bucket/models/pcvr/1/", + "tensors" : [ + { + "tensor_name": "scalar1", + "data_type": "INT64", + "tensor_shape": [1, 1], + "tensor_content": ["1"] + } + ] + }, + { + "model_path" : "my_bucket/models/pctr/2/", + "tensors" : [ + { + "tensor_name": "vector1", + "data_type": "FLOAT", + "tensor_shape": [1, 4], + "tensor_content": ["0.5", "0.6", "0.7", "0.8"] + } + ] + } + ] +})json"; + +TEST(Test, TwoModels) { + absl::StatusOr> output = + ParseJsonInferenceRequest(kJsonStringTwoModels); + ASSERT_TRUE(output.ok()); + ASSERT_EQ(output.value().size(), 2); + + constexpr char kExpectedStringPcvrModel[] = + R"(Model path: my_bucket/models/pcvr/1/ +Tensors: [ +Tensor name: scalar1 +Data type: INT64 +Tensor shape: [1, 1] +Tensor content: [1] +] +)"; + + constexpr char kExpectedStringPctrModel[] = + R"(Model path: my_bucket/models/pctr/2/ +Tensors: [ +Tensor name: vector1 +Data type: FLOAT +Tensor shape: [1, 4] +Tensor content: [0.5, 0.6, 0.7, 0.8] +] +)"; + + bool found_pcvr = false, found_pctr = false; + + for (const auto& model_data : output.value()) { + std::string model_path = model_data.model_path; + if (model_path == "my_bucket/models/pcvr/1/") { + found_pcvr = true; + ASSERT_EQ(model_data.DebugString(), kExpectedStringPcvrModel); + } else if (model_path == "my_bucket/models/pctr/2/") { + found_pctr = true; + ASSERT_EQ(model_data.DebugString(), kExpectedStringPctrModel); + } + } + + ASSERT_TRUE(found_pcvr); + ASSERT_TRUE(found_pctr); +} + +constexpr char kJsonStringIntTypes[] = R"json({ + "request" : [{ + "model_path" : "my_bucket/models/pcvr/1/", + "tensors" : [ + { + "tensor_name": "scalar1", + "data_type": "INT8", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["2"] + }, + { + "tensor_name": "scalar2", + "data_type": "INT16", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["2"] + }, + { + "tensor_name": "scalar3", + "data_type": "INT32", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["2"] + } + ] +}] + })json"; + +TEST(Test, IntTypes) { + absl::StatusOr> output = + ParseJsonInferenceRequest(kJsonStringIntTypes); + ASSERT_TRUE(output.ok()); + EXPECT_EQ(output.value().size(), 1); + for (const privacy_sandbox::bidding_auction_servers::inference:: + InferenceRequest& parsed_data : output.value()) { + EXPECT_EQ(parsed_data.model_path, "my_bucket/models/pcvr/1/"); + std::vector inputs = parsed_data.inputs; + EXPECT_EQ(inputs.size(), 3); + constexpr char kExpectedString[] = + R"(Model path: my_bucket/models/pcvr/1/ +Tensors: [ +Tensor name: scalar1 +Data type: INT8 +Tensor shape: [1, 1] +Tensor content: [2] + +Tensor name: scalar2 +Data type: INT16 +Tensor shape: [1, 1] +Tensor content: [2] + +Tensor name: scalar3 +Data type: INT32 +Tensor shape: [1, 1] +Tensor content: [2] +] +)"; + EXPECT_EQ(parsed_data.DebugString(), kExpectedString); + } +} + +constexpr char kJsonStringFloat16[] = R"json({ + "request" : [{ + "model_path" : "my_bucket/models/pcvr/1/", + "tensors" : [ + { + "tensor_name": "scalar1", + "data_type": "FLOAT16", + "tensor_shape": [ + 1, + 1 + ], + "tensor_content": ["1.2"] + } + ] +}] + })json"; + +TEST(Test, UnsupportedFloat16Type) { + absl::StatusOr> output = + ParseJsonInferenceRequest(kJsonStringFloat16); + ASSERT_FALSE(output.ok()); + EXPECT_EQ(output.status().message(), + "Invalid JSON format: Unsupported 'data_type' field FLOAT16"); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/.bazelrc b/services/inference_sidecar/modules/pytorch_v2_1_1/.bazelrc new file mode 100644 index 00000000..7303ce32 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/.bazelrc @@ -0,0 +1,63 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +build --announce_rc +build --verbose_failures +build --config=clang +build --compilation_mode=opt +build --output_filter='^//((?!(third_party):).)*$'` +build --color=yes + +build:clang --cxxopt=-fbracket-depth=512 +build:clang --client_env=CC=clang +build:clang --cxxopt=-std=c++17 +build:clang --host_cxxopt=-std=c++17 +build:clang --client_env=BAZEL_CXXOPTS=-std=c++17 +# Makes thread safety analysis warnings into errors. +build:clang --copt=-Werror=thread-safety + +build --copt=-I. +build --cxxopt=-fopenmp=libgomp +build --linkopt=-fopenmp=libgomp + +build --copt=-isystem --copt bazel-out/k8-opt/bin +build --copt=-isystem --copt bazel-out/k8-dbg/bin + +build --per_file_copt='^external/pytorch_v2_1_1*'/@-w + +# Address sanitizer, set action_env to segregate cache entries +build:asan --action_env=PRIVACY_SANDBOX_SERVERS_ASAN=1 +build:asan --strip=never +build:asan --compilation_mode=dbg +build:asan --copt=-fsanitize=address +build:asan --copt=-DADDRESS_SANITIZER +build:asan --copt=-O1 +build:asan --copt=-g +build:asan --copt=-fno-omit-frame-pointer +build:asan --linkopt=-fsanitize=address +build:asan --linkopt=-fuse-ld=lld +build:asan --action_env=ASAN_OPTIONS=detect_leaks=1:color=always + +# Thread sanitizer, set action_env to segregate cache entries +build:tsan --action_env=PRIVACY_SANDBOX_SERVERS_TSAN=1 +build:tsan --strip=never +build:tsan --copt=-fsanitize=thread +build:tsan --copt=-fno-omit-frame-pointer +build:tsan --copt=-DGPR_NO_DIRECT_SYSCALLS +build:tsan --copt=-DGRPC_TSAN +build:tsan --copt=-g +build:tsan --linkopt=-fsanitize=thread +# This is needed to address false positive problem with abseil. +# https://github.com/google/sanitizers/issues/953 +build:tsan --test_env=TSAN_OPTIONS=report_atomic_races=0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/.bazelversion b/services/inference_sidecar/modules/pytorch_v2_1_1/.bazelversion new file mode 100644 index 00000000..798e3899 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/.bazelversion @@ -0,0 +1 @@ +6.3.0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/.gitignore b/services/inference_sidecar/modules/pytorch_v2_1_1/.gitignore new file mode 100644 index 00000000..d5c5f7a0 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/.gitignore @@ -0,0 +1,11 @@ +# Bazel symlinks +/bazel-* + +# Docker +docker-buildx-*.log + +# Artifacts +/artifacts + +# Tests +/dist diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/BUILD b/services/inference_sidecar/modules/pytorch_v2_1_1/BUILD new file mode 100644 index 00000000..080fd9a6 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/BUILD @@ -0,0 +1,220 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test") + +cc_binary( + name = "inference_sidecar_bin", + srcs = ["@inference_common//:inference_sidecar_main.cc"], + deps = [ + ":pytorch", + "@inference_common//:grpc_sidecar", + ], +) + +# Generate inference sidecar as "inference_sidecar" during test time. +genrule( + name = "inference_sidecar_test_target", + srcs = [":inference_sidecar_bin"], + outs = ["inference_sidecar"], + cmd = "cp $< $@", +) + +# Rename a specific test model as "test_model" so that the inference_sidecar_test.cc +# can have a single hardcoded string to refer to the test model for various +# inference backends. +genrule( + name = "test_model_target", + srcs = ["@inference_common//testdata:models/pytorch_simple_model.pt"], + outs = ["test_model"], + cmd = "cp $< $@", +) + +cc_test( + name = "inference_sidecar_test", + size = "medium", + timeout = "short", + srcs = ["@inference_common//:inference_sidecar_test.cc"], + data = [ + ":inference_sidecar_test_target", + ":test_model_target", + ], + flaky = True, + deps = [ + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/flags:reflection", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_grpc_proto", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//sandbox:sandbox_executor", + "@inference_common//utils:file_util", + ], +) + +cc_library( + name = "pytorch", + srcs = ["pytorch.cc"], + deps = [ + ":pytorch_parser", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@inference_common//modules:module_interface", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:request_parser", + "@pytorch_v2_1_1//:torch", + ], +) + +genrule( + name = "test_simple_model_target", + srcs = ["@inference_common//testdata:models/pytorch_simple_model.pt"], + outs = ["simple_model"], + cmd = "cp $< $@", +) + +genrule( + name = "test_e2e_model1_target", + srcs = ["@inference_common//testdata:models/pytorch_e2e_model1.pt"], + outs = ["e2e_model1"], + cmd = "cp $< $@", +) + +genrule( + name = "test_e2e_model2_target", + srcs = ["@inference_common//testdata:models/pytorch_e2e_model2.pt"], + outs = ["e2e_model2"], + cmd = "cp $< $@", +) + +genrule( + name = "test_mixed_inputs_mixed_outputs_target", + srcs = ["@inference_common//testdata:models/pytorch_mixed_inputs_mixed_outputs_model.pt"], + outs = ["mixed_inputs_mixed_outputs_model"], + cmd = "cp $< $@", +) + +cc_test( + name = "pytorch_test", + size = "small", + srcs = ["pytorch_test.cc"], + data = [ + ":test_e2e_model1_target", + ":test_e2e_model2_target", + ":test_mixed_inputs_mixed_outputs_target", + ":test_simple_model_target", + ], + deps = [ + ":pytorch", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:file_util", + ], +) + +cc_library( + name = "pytorch_parser", + srcs = ["pytorch_parser.cc"], + hdrs = [ + "pytorch_parser.h", + ], + deps = [ + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:request_parser", + "@pytorch_v2_1_1//:torch", + ], +) + +cc_test( + name = "pytorch_parser_test", + size = "small", + srcs = ["pytorch_parser_test.cc"], + deps = [ + ":pytorch_parser", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_proto", + ], +) + +cc_test( + name = "module_concurrency_test", + size = "small", + srcs = ["@inference_common//modules:module_concurrency_test.cc"], + data = [ + ":test_model_target", + ], + deps = [ + ":pytorch", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:file_util", + ], +) + +genrule( + name = "generate_artifacts", + srcs = [ + ":inference_sidecar_bin", + ], + outs = ["inference_sidecar_binary"], + cmd_bash = """cat << EOF > '$@' +mkdir -p artifacts +cp $(execpath :inference_sidecar_bin) artifacts/inference_sidecar +EOF""", + executable = True, + local = True, + message = "generate inference sidecar artifacts", +) + +# Exports PyTorch inference sidecar binary to be packaged in the B&A workspace. +exports_files( + [ + "artifacts/inference_sidecar", + ], +) + +genrule( + name = "collect-logs", + outs = ["collect_logs.bin"], + cmd_bash = """cat << EOF > '$@' +tools/collect-logs "\\$$@" +EOF""", + executable = True, + local = True, + message = "copy bazel build and test logs", +) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/README.md b/services/inference_sidecar/modules/pytorch_v2_1_1/README.md new file mode 100644 index 00000000..81cc08af --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/README.md @@ -0,0 +1,24 @@ +# Privacy Sandbox - PyTorch Sidecar + +`services/inference_sidecar/modules/pytorch_v2_1_1` provides the PyTorch specific code to build the +inference sidecar with PyTorch version 2.1.1 as the inference backend. + +## How to build the PyTorch sidecar binary + +The directory has its own `WORKSPACE` file. You should change the current working directory before +running bazel. + +```sh +cd services/inference_sidecar/modules/pytorch_v2_1_1 +./builders/tools/bazel-debian build //:inference_sidecar_bin +./builders/tools/bazel-debian test //:pytorch_test +``` + +To enable packaging of the inference sidecar with the bidding server, we need to generate the +inference sidecar binary to a local artifact directory. Specifically, we need to run this following +command before `build_and_test_all_in_docker`. + +```sh +cd services/inference_sidecar/modules/pytorch_v2_1_1 +./builders/tools/bazel-debian run //:generate_artifacts +``` diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/WORKSPACE b/services/inference_sidecar/modules/pytorch_v2_1_1/WORKSPACE new file mode 100644 index 00000000..a23c1fc1 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/WORKSPACE @@ -0,0 +1,117 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +### PyTorch dependencies +load("//third_party:pytorch_v2_1_1_deps1.bzl", "pytorch_v2_1_1_deps1") +load("//third_party:pytorch_v2_1_1_deps2.bzl", "pytorch_v2_1_1_deps2") + +pytorch_v2_1_1_deps1() + +pytorch_v2_1_1_deps2() + +### Dataplane dependencies +http_archive( + name = "google_privacysandbox_servers_common", + # 2024-03-18 + sha256 = "0a0bbe51c193f9dbbd0c17ff96db9883d3cd4980df150c8747d8520071dce0a7", + strip_prefix = "data-plane-shared-libraries-a7515f845ef463450baddff60099a78a2e8eadc3", + urls = [ + "https://github.com/privacysandbox/data-plane-shared-libraries/archive/a7515f845ef463450baddff60099a78a2e8eadc3.zip", + ], +) + +load( + "@google_privacysandbox_servers_common//third_party:cpp_deps.bzl", + data_plane_shared_deps_cpp = "cpp_dependencies", +) + +data_plane_shared_deps_cpp() + +load("@google_privacysandbox_servers_common//third_party:deps1.bzl", data_plane_shared_deps1 = "deps1") + +data_plane_shared_deps1() + +load("@com_google_googleapis//:repository_rules.bzl", "switched_rules_by_language") + +switched_rules_by_language( + name = "com_google_googleapis_imports", + cc = True, + grpc = True, +) + +### Inference Common +local_repository( + name = "inference_common", + path = "../../common", +) + +http_archive( + name = "rapidjson", + build_file = "@inference_common//:third_party/rapidjson.BUILD.bazel", + sha256 = "bf7ced29704a1e696fbccf2a2b4ea068e7774fa37f6d7dd4039d0787f8bed98e", + strip_prefix = "rapidjson-1.1.0", + url = "https://github.com/Tencent/rapidjson/archive/v1.1.0.tar.gz", +) + +### Grpc dependencies +load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") + +grpc_deps() + +load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") + +protobuf_deps() + +load("@google_privacysandbox_servers_common//build_defs/cc/shared:sandboxed_api.bzl", "sandboxed_api") + +### Sandbox2 depndencies +sandboxed_api() + +load("@com_google_sandboxed_api//sandboxed_api/bazel:llvm_config.bzl", "llvm_disable_optional_support_deps") +load("@com_google_sandboxed_api//sandboxed_api/bazel:sapi_deps.bzl", "sapi_deps") + +llvm_disable_optional_support_deps() + +sapi_deps() + +### Configure Python +load("//builders/bazel:deps.bzl", "python_deps") + +python_deps("//builders/bazel") + +http_archive( + name = "pybind11_bazel", + sha256 = "b72c5b44135b90d1ffaba51e08240be0b91707ac60bea08bb4d84b47316211bb", + strip_prefix = "pybind11_bazel-b162c7c88a253e3f6b673df0c621aca27596ce6b", + urls = ["https://github.com/pybind/pybind11_bazel/archive/b162c7c88a253e3f6b673df0c621aca27596ce6b.zip"], +) + +load("@pybind11_bazel//:python_configure.bzl", "python_configure") + +python_configure( + name = "local_config_python", +) + +load("@rules_python//python:pip.bzl", "pip_parse") + +pip_parse( + name = "pytorch_v2_1_1_pip_deps", + requirements_lock = "@pytorch_v2_1_1//:tools/build/bazel/requirements.txt", +) + +load("@pytorch_v2_1_1_pip_deps//:requirements.bzl", pytorch_v2_1_1_pip_install_deps = "install_deps") + +pytorch_v2_1_1_pip_install_deps() diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.coverage.bazelrc b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.coverage.bazelrc new file mode 100644 index 00000000..58b34d6e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.coverage.bazelrc @@ -0,0 +1,17 @@ +coverage --action_env=PRIVACY_SANDBOX_SERVERS_BUILD_CONFIG_COVERAGE=1 +coverage --config=clang_coverage +coverage --build_tests_only + +# clang_coverage -- the set of args to configure C++ code coverage in bazel with clang +coverage:clang_coverage --compilation_mode=opt +coverage:clang_coverage --combined_report=lcov +coverage:clang_coverage --nocache_test_results +coverage:clang_coverage --experimental_use_llvm_covmap +coverage:clang_coverage --experimental_generate_llvm_lcov +coverage:clang_coverage --action_env=BAZEL_USE_LLVM_NATIVE_COVERAGE=1 +coverage:clang_coverage --action_env=GCOV=/usr/bin/llvm-profdata +coverage:clang_coverage --action_env=BAZEL_LLVM_COV=/usr/bin/llvm-cov +coverage:clang_coverage --action_env=BAZEL_LLVM_PROFDATA=/usr/bin/llvm-profdata +coverage:clang_coverage --strategy=TestRunner=sandboxed,local +coverage:clang_coverage --strategy=CoverageReport=sandboxed,local +coverage:clang_coverage --experimental_scale_timeouts=1.5 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.github/workflows/scorecard.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.github/workflows/scorecard.yaml new file mode 100644 index 00000000..dbcb6200 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.github/workflows/scorecard.yaml @@ -0,0 +1,86 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Workflow for the OSSF Scorecards Action +# https://github.com/ossf/scorecard-action#installation + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '35 10 * * 4' + push: + branches: + - main + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: Checkout code + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 + with: + persist-credentials: false + + - name: Run analysis + uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: Upload artifact + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: Upload to code-scanning + uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 + with: + sarif_file: results.sarif diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.gitignore b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.gitignore new file mode 100644 index 00000000..da211595 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.gitignore @@ -0,0 +1,4 @@ +*~ +*.tmp +*.log +*.eif diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.markdownlint-cli2.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.markdownlint-cli2.yaml new file mode 120000 index 00000000..212461e1 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.markdownlint-cli2.yaml @@ -0,0 +1 @@ +etc/.markdownlint-cli2.yaml \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.pre-commit-config.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.pre-commit-config.yaml new file mode 100644 index 00000000..c7997b3e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.pre-commit-config.yaml @@ -0,0 +1,150 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +exclude: (?x)^( + version.txt + )$ + +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: check-json + - id: check-symlinks + - id: check-added-large-files + - id: check-vcs-permalinks + - id: check-executables-have-shebangs + - id: detect-private-key + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: git-check + - id: script-must-not-have-extension + - id: script-must-have-extension + - id: require-ascii + - id: shellcheck + +- repo: https://github.com/pre-commit/mirrors-clang-format + rev: v16.0.6 + hooks: + - id: clang-format + types_or: + - c++ + - c + +- repo: https://github.com/bufbuild/buf + rev: v1.23.1 + hooks: + - id: buf-format + +- repo: local + hooks: + - id: addlicense + name: addlicense + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.1 + always_run: false + pass_filenames: true + entry: addlicense -v + types_or: + - text + + - id: addlicense-check + name: addlicense check + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.1 + always_run: false + pass_filenames: true + entry: addlicense -check + types_or: + - text + + - id: terraform-fmt + name: terraform fmt + description: Run terraform via docker to format Terraform files + language: script + pass_filenames: false + entry: tools/terraform fmt -write=true -recursive + types_or: + - terraform + + - id: hadolint + name: Lint Dockerfiles + description: Run hadolint via docker to lint Dockerfiles + language: script + types_or: + - dockerfile + entry: tools/hadolint + +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0 + hooks: + - id: prettier + name: prettier markdown + types_or: + - markdown + +- repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.8.1 + hooks: + - id: markdownlint-cli2 + name: lint markdown + +- repo: local + hooks: + - id: buildifier + name: buildifier + description: Format bazel WORKSPACE, BUILD and .bzl files with a standard convention. + language: golang + additional_dependencies: + - github.com/bazelbuild/buildtools/buildifier@6.1.1 + always_run: true + pass_filenames: true + types_or: + - bazel + entry: buildifier + args: + - -lint=fix + - -mode=fix + - -warnings=all + +- repo: https://github.com/cpplint/cpplint + rev: 1.6.1 + hooks: + - id: cpplint + types_or: + - c++ + - c + args: + - --filter=-build/c++11,+build/c++17,-build/header_guard,-build/include_order,+build/include_what_you_use,-build/include_subdir,-readability/casting,-readability/todo,-runtime/references + - --quiet + +- repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + name: black python formatter diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.prettierignore b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.prettierignore new file mode 100644 index 00000000..f38e52f8 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.prettierignore @@ -0,0 +1,3 @@ +/.git/ +/CHANGELOG.md +/google_internal/LATEST_RELEASE.md diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.prettierrc.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.prettierrc.yaml new file mode 120000 index 00000000..5f7e36f6 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.prettierrc.yaml @@ -0,0 +1 @@ +etc/.prettierrc.yaml \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.versionrc.json b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.versionrc.json new file mode 120000 index 00000000..f0385a95 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/.versionrc.json @@ -0,0 +1 @@ +etc/.versionrc.json \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/CHANGELOG.md b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/CHANGELOG.md new file mode 100644 index 00000000..36788bbb --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/CHANGELOG.md @@ -0,0 +1,835 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines. + +## 0.55.1 (2024-02-22) + + +### Bug Fixes + +* Do not invoke normalize-bazel-symlinks for cbuild --cmd + +## 0.55.0 (2024-02-22) + + +### Bug Fixes + +* Normalize bazel symlinks to a resolved path +* Pass the correct path for normalize-bazel-symlinks + +## 0.54.0 (2024-02-09) + + +### Features + +* Set cbuild workdir to pwd relative to root workspace + +## 0.53.0 (2024-01-25) + + +### Features + +* Add support to collect-coverage tool for custom lcov report + + +### Bug Fixes + +* Improve --cmd-profiler support + +## 0.52.0 (2023-12-02) + + +### Features + +* add python3.9 dev to bazel-debian + +## 0.51.0 (2023-11-30) + + +### Bug Fixes + +* Clean go build cache at the end of image build script + + +### Dependencies + +* **deps:** Upgrade bazelisk to 1.19.0 + +## 0.50.0 (2023-11-06) + + +### Features + +* Add openssh-client to build-debian image + +## 0.49.1 (2023-10-30) + + +### Bug Fixes + +* Add tools/wrk2 wrapper script + +## 0.49.0 (2023-10-27) + + +### Features + +* Add wrk2 to test-tools image +* Extend normalize-bazel-symlink to normalize within containers + + +### Dependencies + +* **deps:** Update versions in test-tools image + +## 0.48.0 (2023-10-11) + + +### Features + +* Add tools/bazel-amazonlinux2023 + + +### Bug Fixes + +* Add lldb symlink + +## 0.47.0 (2023-10-05) + + +### Features + +* Add --cmd-profiler flag to tools/cbuild +* Add google-pprof to coverage-tools image +* Add libprofiler.so to build-debian image + + +### Bug Fixes + +* Indicate default image in help text + +## 0.46.1 (2023-10-04) + + +### Bug Fixes + +* cbuild exits normally even if bazel symlinks aren't normalized +* Revert to clang 15.x +* Use bash when normalize-dist runs inside docker +* Use DOCKER_NETWORK if already set + +## 0.46.0 (2023-09-22) + + +### Bug Fixes + +* Hide normalize-bazel-symlinks in cbuild + + +### Dependencies + +* **deps:** Update amazonlinux images to Aug 2023 +* **deps:** Upgrade clang to v16 in build-debian + +## 0.45.0 (2023-09-19) + + +### Features + +* Invoke normalize-bazel-symlinks at cbuild exit + +## 0.44.0 (2023-09-11) + + +### Features + +* Add coverage bazel config and collect-coverage tool + +## 0.43.0 (2023-08-15) + + +### Features + +* Allow terraform version to be specified + + +### Bug Fixes + +* Move feat(deps) earlier for precedence over feat + +## 0.42.1 (2023-08-14) + + +### Bug Fixes + +* Revert override of /usr/bin/python links in build-amazonlinux2 + +## 0.42.0 (2023-08-07) + + +### Features + +* Add amazonlinux2 support to convert-docker-to-nitro +* Add bsdmainutils for hexdump +* Add build-amazonlinux2023 image +* Add support for amazonlinux2023 in builder.sh +* Configure python3.9 for python/python3 links +* Remove jdk from build-amazonlinux2 image + +### Dependencies + +* **deps:** Upgrade amazonlinux2 to 20230719 + + +### Bug Fixes + +* Empty bazel-* arg list not an error condition + +## 0.41.1 (2023-08-04) + + +### Bug Fixes + +* Remove debug statement + +## 0.41.0 (2023-08-03) + + +### Features + +* Create links for lld and ld.lld in build-debian + +## 0.40.0 (2023-08-03) + + +### Features + +* Add Dependencies section for release notes +* **deps:** Upgrade rules_python to 0.24.0 +* Ensure bazel-* scripts handle non-bazel args too + +## 0.39.0 (2023-08-02) + + +### Features + +* Run bazel containers in seccomp=unconfined mode + +## 0.38.1 (2023-08-02) + + +### Bug Fixes + +* Ensure python3.9 is found first in PATH + +## 0.38.0 (2023-07-28) + + +### Features + +* Add cbuild flag --seccomp-unconfined + +## 0.37.0 (2023-07-27) + + +### Features + +* Add patch tool to build-debian image + +## 0.36.1 (2023-07-25) + + +### Bug Fixes + +* Rename convert_docker_to_nitro to convert-docker-to-nitro + +## 0.36.0 (2023-07-24) + + +### Features + +* Add convert_docker_to_nitro + +## 0.35.0 (2023-07-21) + + +### Features + +* Add LICENSE file + +## 0.34.0 (2023-07-21) + + +### Features + +* Add OSSF Scorecard badge to top-level README + +## 0.33.0 (2023-07-20) + + +### Features + +* Install python libclang 15 in build-debian +* Add OSSF Scorecard GitHub Action + +## 0.32.0 (2023-07-14) + + +### Features + +* Set PYTHON_BIN_PATH/PYTHON_LIB_PATH in build-amazonlinux2 + +## 0.31.0 (2023-07-12) + + +### Features + +* Add cbuild --docker-network flag +* Add coverage-tool image plus lcov scripts +* Mount gcloud config dir into container + + +### Bug Fixes + +* Add hash for coverage-tools +* Add hash for coverage-tools +* Improve error handling for flag values +* Print build log path on error condition +* Specify latest image tag explicitly +* Upgrade pre-commit hooks + +## 0.30.1 (2023-06-27) + + +### Bug Fixes + +* Use = for --env flag +* Use = for --env flag for all tools + +## 0.30.0 (2023-06-26) + + +### Features + +* Install numpy for python3.9 +* Set PYTHON_BIN_PATH/PYTHON_LIB_PATH in build-debian +* Upgrade AmazonLinux2 to 20230530 +* Upgrade packer to v1.9.1 + + +### Bug Fixes + +* Add links for llvm-{cov,profdata} + +## 0.29.0 (2023-06-05) + + +### Features + +* Update pre-commit hook versions + + +### Bug Fixes + +* Catch error when shifting multiple args +* Remove golang from test-tools image +* Resolve WORKSPACE using realpath +* Use correct exit code in --fast mode + +## 0.28.0 (2023-05-24) + + +### Features + +* Update ca-certificates + + +### Bug Fixes + +* Downgrade to clang v15 +* Use builders version.txt for tarfile tag + +## 0.27.0 (2023-05-23) + + +### Features + +* Add buf to presubmit image + + +### Documentation + +* Add CONTRIBUTING.md + +## 0.26.0 (2023-05-16) + + +### Features + +* Remove zlib-dev package +* Upgrade clang to v16 +* Upgrade go to v1.20.4 + +## 0.25.0 (2023-05-11) + + +### Features + +* Update default bazel version to 5.4.1 +* Upgrade rules_python to 0.21.0 + + +### Bug Fixes + +* Add file utility to build-debian image + +## 0.24.0 (2023-05-05) + + +### Features + +* Add --build-images flag to tests/run-tests +* Reuse tar image if available + + +### Bug Fixes + +* Address linter warnings +* Address linter warnings for tools +* Correct mangled usage text +* Pin pre-commit to 3.x +* Remove .gz suffix from tar file +* Remove function keyword for busybox sh script +* Remove Release in changelog title +* Upgrade pre-commit hooks + +## 0.23.0 (2023-04-13) + + +### Features + +* Add wrapper for commit-and-tag-version +* Upgrade curl to version 8 +* Upgrade to amazonlinux 2.0.20230320.0 + + +### Bug Fixes + +* Use commit-and-tag-version wrapper + +## 0.22.0 (2023-04-03) + + +### Features + +* Add awscurl wrapper script +* Add tests for misc CLI wrappers +* Correctly quote bash args +* Extend test-tool to support the release image +* Use login shell for interactive container + + +### Documentation + +* Add section on tools to README +* Remove section on building images directly + +## 0.21.1 (2023-03-07) + + +### Bug Fixes + +* Relax pinned version for apache2-utils + +## 0.21.0 (2023-03-06) + + +### Features + +* Add wrapper scripts for utils + +## 0.20.0 (2023-03-01) + + +### Features + +* Add docker buildkit plugin +* Add tests for CLI wrapper scripts +* Permit testing of a single image +* Relax pinned versions in build-debian, presubmit and test-tools + +## 0.19.0 (2023-03-01) + + +### Features + +* Add jq wrapper + + +### Bug Fixes + +* Relax pinned version of openjdk to 11.0.* + +## 0.18.0 (2023-02-23) + + +### Features + +* Relax pinned versions for apk and yum packages to semver + +## 0.17.0 (2023-02-21) + + +### Features + +* Upgrade black to 23.1.0 +* Upgrade tar to 1.34-r1 +* Upgrade to buildozer 6.0.1 + + +### Bug Fixes + +* Minor code cleanup in images/presubmit/install_apps +* Upgrade ghz to 0.114.0 + +## 0.16.0 (2023-02-05) + + +### Features + +* Run test tools in docker interactive mode to admit std streams + +## 0.15.1 (2023-02-04) + + +### Bug Fixes + +* Return value from get_docker_workspace_mount() + +## 0.15.0 (2023-02-03) + + +### Features + +* Use WORKSPACE_MOUNT if set + + +### Bug Fixes + +* Pin commit-and-tag-version to v10.1.0 + +## 0.14.0 (2023-01-27) + + +### Features + +* Improve verbose output for get-builder-image-tagged + +## 0.13.1 (2023-01-26) + + +### Bug Fixes + +* Upgrade software-properties-common + +## 0.13.0 (2023-01-23) + + +### Features + +* Add ab tool +* Add cassowary http load testing tool +* Add h2load tool +* Add slowhttptest tool +* Adjust get-builder-image-tagged verbose output +* Upgrade to packer v1.8.5 + + +### Bug Fixes + +* Re-pin dependencies in test-tools image +* Relax version pins to semver +* Upgrade amazonlinux2 base image +* Upgrade git on amazonlinux2 + +## 0.12.0 (2023-01-10) + + +### Features + +* Modify ghz wrapper for generic use. Add curl +* Use test-tools image for grpcurl + +## 0.11.0 (2023-01-09) + + +### Features + +* Add chrpath + + +### Bug Fixes + +* Clean up tmpdir via RETURN trap + +## 0.10.0 (2023-01-06) + + +### Features + +* Drop ubuntu package version minor for curl + +## 0.9.0 (2023-01-04) + + +### Features + +* Add zlib1g-dev to build-debian per scp build dependency +* Update hook versions + + +### Bug Fixes + +* Correct non-zero error message and drop sourcing of tools/builder.sh +* Elide warnings from bazel info +* Revert from clang-format v15 to v14 + +## 0.8.0 (2022-12-29) + + +### Features + +* Add bash and jq to test-tools image +* Add script to normalize bazel- symlinks +* Ensure run-tests includes all images +* Skip symlinks that resolve in normalize-bazel-symlink + +## 0.7.0 (2022-12-27) + + +### Features + +* Add ghz wrapper script +* Add test-tools image + +## 0.6.0 (2022-12-12) + + +### Features + +* Add EXTRA_DOCKER_RUN_ARGS support in aws-cli + + +### Bug Fixes + +* Emit docker build output only on non-zero exit +* Remove tempfile before exiting + +## 0.5.0 (2022-12-06) + + +### Features + +* Pin versions in presubmit image + + +### Bug Fixes + +* Avoid cache for tar image +* Update version pin for ca-certificates +* Use images subdirs for image list + +## 0.4.4 (2022-11-18) + + +### Bug Fixes + +* Retain execute permissions when normalizing dist + +## 0.4.3 (2022-11-17) + + +### Bug Fixes + +* Add gettext package for envsubst +* Avoid yum package version strings specific to CPU architecture +* Improve verbose output for get-builder-image-tagged +* Pin apt and yum package versions + +## 0.4.2 (2022-11-17) + + +### Bug Fixes + +* Generate SHA within docker container + +## 0.4.1 (2022-11-15) + + +### Bug Fixes + +* Reduce noise creating presubmit image +* Remove docker run --interactive flag + +## 0.4.0 (2022-11-14) + + +### Features + +* Add buildozer to release image +* Add grpcurl helper script +* Substitute variables in EXTRA_DOCKER_RUN_ARGS +* Use specific version of GNU tar + + +### Bug Fixes + +* Add /opt/bin/python link +* Add CC=clang env var +* Add xz to build-debian +* Explicitly add machine type and OS release to toolchains hash + +## 0.3.1 (2022-11-01) + + +### Bug Fixes + +* Add OpenJDK 11 in build-amazonlinux2 + +## 0.3.0 (2022-11-01) + + +### Features + +* Add git to build-amazonlinux2 image +* Add google-java-format pre-commit hook +* Add OpenJDK 11 +* Move python3 to /opt/bin/python3 + + +### Bug Fixes + +* Ensure builder::set_workspace does not overwrite WORKSPACE + +## 0.2.0 (2022-10-26) + + +### Features + +* Add bazel support to register container-based python toolchain +* Add docker uri env var as override +* Add tools/terraform +* Use image etc files in workspace root + + +### Bug Fixes + +* Avoid empty tar error if no workspace etc files + + +### Documentation + +* Add Getting Started section to README.md + +## 0.1.0 (2022-10-25) + + +### Features + +* Add --env flag to cbuild +* Add arg processing to cbuild script +* Add aws-cli helper script +* Add bazel-debian helper script +* Add builders/utils docker image +* Add hadolint to lint Dockerfiles +* Add optional flags to cbuild tool +* Add preliminary support for commit-and-tag-version and copybara +* Add the release-please tool +* Add toolchain short hash to bazel output_user_root path +* Add tools/lib/builder.sh +* **build:** Add GitHub CLI tool https://cli.github.com/ +* Determine workspace mount point from docker inspect if inside docker container +* Inject clang-version as a bazel action_env +* Migrate generation of builders/release container image to Dockerfile +* Move image directories to images/ top-level directory +* Overhaul building on amazonlinux2 +* Remove python build dependencies from bazel +* Set BUILD_ARCH env var in docker images +* Update release image to node v18 +* Upgrade to bazel 5.3.2 +* Upgrade to clang v14 on bazel-debian +* Use Packer to build AMI. + + +### Bug Fixes + +* Add builders/tools/normalize-dist to chmod/chgrp/chown dist/ directory tree +* Add get_workspace_mount function to encapsulate code block +* Add python version to action_env +* Add/remove basic pre-commit hooks +* Adopt shellcheck +* Avoid installing recommended debian packages +* Avoid use of git rev-parse to determine tools path +* Bump to latest version of bazelisk +* Clean bazel_root for smaller docker image +* Correct argument handling in cbuild script +* Correct errors generating Amazon Linux 2-based builder image +* Define python3 toolchain +* Drop packer from build-debian image +* Ensure /etc/gitconfig is readable by all +* Improve cbuild help text +* Install bazel version as specified in .bazelversion +* Invoke addlicense for all text files +* Modifications as indicated by shellcheck +* Mount $HOME/aws in aws-cli container +* Move bazel env vars from comments to help text +* Move builder-related configs to builders/etc +* Move WORKSPACE definition to cbuild script global +* Only propagate AWS env vars into amazonlinux2 build container +* Pin version of bazelisk +* Pin version of libc++-dev +* Pin version of python3.8 +* Print pre-commit version rather than help +* Remove container when get-architecture exits +* Remove debugging statement +* Remove dockerfile linter ignore and correct ENTRYPOINT +* Remove pre-commit config from build-debian +* Remove shellcheck from build-debian +* Remove unused nitro_enclave_image bazel rule +* Set bazel output_base to accommodate distinct workspaces +* Set bazel output_user_root in image bazelrc +* Set locale in build-debian +* Set WORKSPACE correctly from submodule +* Set WORKSPACE variable +* Switch from hardcoded arch to using dpkg --print-architecture +* Update normalize-dist to function inside build container +* Update pre-commit to use cbuild +* Use PRE_COMMIT_TOOL env var +* Various path-related fixes + + +### Build System + +* Add arch to docker image tags +* Add get_builder_image_tagged tool to determine a content-based tag +* Add get-architecture helper script +* Add missing imports into nitro BUILD +* Add tools/pre-commit +* Correct propagation of quoted args in gh wrapper +* Move commit-and-tag-version into tools dir +* Move gh into tools dir +* Optionally build the AMI +* Propagate status code in exit functions +* Reduce redundant installation commands +* Remove release-please tool +* Rename builders/bazel to build-debian +* Simplify use of npm image in container_run_and_commit() +* Support GH_TOKEN env var + + +### Documentation + +* Add top-level README.md +* Improve prose in README.md +* Move docker build instructions to README.md +* Reformat and lint markdown diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/CONTRIBUTING.md b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/CONTRIBUTING.md new file mode 100644 index 00000000..0d0e3e85 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/CONTRIBUTING.md @@ -0,0 +1,3 @@ +# How to Contribute + +Presently this project is not accepting contributions. diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/LICENSE b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/README.md b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/README.md new file mode 100644 index 00000000..3ba69a91 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/README.md @@ -0,0 +1,35 @@ +# Privacy Sandbox Builders + +Build tools and docker images used by the [Privacy Sandbox](https://github.com/privacysandbox) +open-source ecosystem. The docker images generated using the Privacy Sandbox Builders are used as +the build environment(s) for other Privacy Sandbox software. These docker images can be used as part +of CI/CD workflows. + +## Getting Started + +This repo is designed to be used via `git submodule` by other Privacy Sandbox repos. This is not a +requirement per se, it can be used standalone or using other approaches like `git subtree`. + +To use Privacy Sandbox Builders, you need: + +- `docker` with [BuildKit](https://docs.docker.com/build/buildkit/). + +## Building Docker Images + +To build a docker image directly, you can use the `tools/get-builder-image-tagged` tool. + +## Tools + +The `tools` directory contains wrapper scripts along with some non-wrapper scripts. The wrapper +scripts, such as `terraform` and `curl` execute the corresponding tool installed in one of the build +images. + +For example, to execute `curl`: + +```sh +tools/curl --help +``` + +--- + +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/privacysandbox/build-system/badge)](https://securityscorecards.dev/viewer/?uri=github.com/privacysandbox/build-system) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/bazel/BUILD b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/bazel/BUILD new file mode 100644 index 00000000..9d103c15 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/bazel/BUILD @@ -0,0 +1,40 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Register container-based python toolchain.""" + +load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") +load("@rules_python//python:defs.bzl", "py_runtime") + +# define the python3 runtime; this path must exist in the bazel build environment +PY3_PATH = "/opt/bin/python3" + +py_runtime( + name = "py_runtime", + interpreter_path = PY3_PATH, + python_version = "PY3", + visibility = ["//visibility:public"], +) + +py_runtime_pair( + name = "py_runtime_pair", + py2_runtime = None, + py3_runtime = ":py_runtime", +) + +toolchain( + name = "py_toolchain", + toolchain = ":py_runtime_pair", + toolchain_type = "@bazel_tools//tools/python:toolchain_type", +) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/bazel/deps.bzl b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/bazel/deps.bzl new file mode 100644 index 00000000..0fe6743b --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/bazel/deps.bzl @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Load definitions for use in WORKSPACE files.""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +def python_deps(bazel_package): + """Load rules_python and register container-based python toolchain + + Note: the bazel_package arg will depend on the import/submodule location in your workspace + + Args: + bazel_package: repo-relative bazel package to builders/bazel/BUILD eg. "//builders/bazel" + """ + http_archive( + name = "rules_python", + sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578", + strip_prefix = "rules_python-0.24.0", + urls = [ + "https://github.com/bazelbuild/rules_python/releases/download/0.24.0/rules_python-0.24.0.tar.gz", + ], + ) + native.register_toolchains("{}:py_toolchain".format(bazel_package)) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.bazelversion b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.bazelversion new file mode 100644 index 00000000..ade65226 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.bazelversion @@ -0,0 +1 @@ +5.4.1 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.clang-format b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.clang-format new file mode 100644 index 00000000..8c45f0ce --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.clang-format @@ -0,0 +1,33 @@ +BasedOnStyle: Google +Standard: c++17 +DerivePointerAlignment: false +SortIncludes: true +IncludeBlocks: Regroup +IncludeCategories: + # gtest, this should be put first in tests. + - Regex: '^ from OS. + - Regex: '^<[_A-Za-z0-9-]+\.h>' + Priority: 30 + # 2nd level .h headers in <>, POSIX standard. + - Regex: '^<(sys|arpa|net|netinet)\/[_A-Za-z0-9-]+\.h>' + Priority: 30 + # Linux-specific .h headers in <>. + - Regex: '^' + Priority: 40 + # Headers in <> without extension, these are basically C++ STL headers + - Regex: '^<[\/_A-Za-z0-9-]+>' + Priority: 50 + # Headers in <> from specific external libraries. + - Regex: '^<(grpcpp|absl)\/' + Priority: 60 + # Any other uncaught headers in <> + - Regex: '^<' + Priority: 70 + # Headers in "" of current directory this should be the last category. + - Regex: '^"[_A-Za-z0-9-]+\.h' + Priority: 200 + # Headers in "" with directory hierarchy + - Regex: '^"[\/_A-Za-z0-9-]+' + Priority: 80 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.hadolint.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.hadolint.yaml new file mode 100644 index 00000000..73dc9121 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.hadolint.yaml @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://github.com/hadolint/hadolint for more information + +failure-threshold: error + +trustedRegistries: +- docker.io diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.markdownlint-cli2.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.markdownlint-cli2.yaml new file mode 100644 index 00000000..0671ea1a --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.markdownlint-cli2.yaml @@ -0,0 +1,43 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +config: + line-length: + line_length: 120 + #stern: true + code_blocks: false + + # these are apparently in conflict with prettier's markdown formatting + list-marker-space: false + list-indent: false + ul-indent: false + + headings: false + + proper-names: + code_blocks: false + names: + - CommonMark + - JavaScript + - Markdown + - markdown-it + - markdownlint + - markdownlint-cli2 + - Node.js + +fix: true + +ignores: +- CHANGELOG.md +- google_internal/LATEST_RELEASE.md diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.pre-commit-config.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.pre-commit-config.yaml new file mode 100644 index 00000000..6a3ebd44 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.pre-commit-config.yaml @@ -0,0 +1,156 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +exclude: (?x)^( + bazel-(bin|out|testlogs|workspace)/.*| + .bazel_output/.*| + version.txt + )$ + +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: check-json + - id: check-symlinks + - id: check-added-large-files + - id: check-vcs-permalinks + - id: check-executables-have-shebangs + - id: detect-private-key + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: git-check + - id: script-must-not-have-extension + exclude: '^google_internal/.*/kokoro_(presubmit|continuous).sh$' + - id: script-must-have-extension + - id: require-ascii + - id: shellcheck + exclude: '^(production|tools|google_internal|builders/images)/.*$' + +- repo: https://github.com/pre-commit/mirrors-clang-format + rev: v14.0.6 + hooks: + - id: clang-format + types_or: + - c++ + - c + +- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.4.0 + hooks: + - id: pretty-format-java + name: Google Java Formatter + args: [--autofix] + +- repo: local + hooks: + - id: addlicense + name: addlicense + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.0 + always_run: false + pass_filenames: true + entry: addlicense -v + types_or: + - text + + - id: addlicense-check + name: addlicense check + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.0 + always_run: false + pass_filenames: true + entry: addlicense -check + types_or: + - text + +# - id: terraform-fmt +# name: terraform fmt +# description: Run terraform via docker to format Terraform files +# language: script +# pass_filenames: false +# entry: builders/tools/terraform fmt -write=true -recursive +# types_or: +# - terraform + +# - id: hadolint +# name: Lint Dockerfiles +# description: Run hadolint via docker to lint Dockerfiles +# language: script +# types_or: +# - dockerfile +# entry: builders/tools/hadolint + +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 + hooks: + - id: prettier + name: prettier markdown + types_or: + - markdown + +- repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.6.0 + hooks: + - id: markdownlint-cli2 + name: lint markdown + +- repo: local + hooks: + - id: buildifier + name: buildifier + description: Format bazel WORKSPACE, BUILD and .bzl files with a standard convention. + language: golang + additional_dependencies: + - github.com/bazelbuild/buildtools/buildifier@5.1.0 + always_run: true + pass_filenames: true + types_or: + - bazel + entry: buildifier + args: + - -lint=fix + - -mode=fix + - -warnings=all + +- repo: https://github.com/cpplint/cpplint + rev: 1.6.1 + hooks: + - id: cpplint + types_or: + - c++ + - c + args: + - --filter=-build/c++11,+build/c++17,-build/header_guard,-build/include_order,+build/include_what_you_use,-build/include_subdir,-readability/casting,-readability/todo,-runtime/references + - --quiet + +- repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + name: black python formatter diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.prettierrc.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.prettierrc.yaml new file mode 100644 index 00000000..bf26ea63 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.prettierrc.yaml @@ -0,0 +1,25 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +printWidth: 120 +trailingComma: es5 +tabWidth: 2 +useTabs: false +singleQuote: true +proseWrap: always +overrides: +- files: '*.md' + options: + tabWidth: 4 + printWidth: 100 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.versionrc.json b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.versionrc.json new file mode 100644 index 00000000..7d175563 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/etc/.versionrc.json @@ -0,0 +1,27 @@ +{ + "bumpFiles": [ + { + "filename": "version.txt", + "type": "plain-text" + } + ], + "commitUrlFormat": " ", + "issueUrlFormat": " ", + "packageFiles": [ + { + "filename": "version.txt", + "type": "plain-text" + } + ], + "tagPrefix": "release-", + "types": [ + { "type": "feat", "scope": "deps", "section": "Dependencies" }, + { "type": "feat", "section": "Features" }, + { "type": "fix", "section": "Bug Fixes" }, + { "type": "docs", "section": "Documentation" }, + { "type": "internal", "hidden": true }, + { "type": "chore", "hidden": true }, + { "type": "test", "hidden": true }, + { "type": "refactor", "hidden": true } + ] +} diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/.bazelversion b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/Dockerfile new file mode 100644 index 00000000..bb6c1edc --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM amazonlinux:2.0.20230822.0 + +COPY /install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps && \ + /scripts/generate_system_bazelrc --user-root-name amazonlinux2 && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin:/opt/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.8" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/generate_system_bazelrc b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/get_workspace_mount b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/gitconfig b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_apps new file mode 100755 index 00000000..00b6f31e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_apps @@ -0,0 +1,107 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) + usage 0 + break + ;; + *) + usage + break + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function yum_update() { + yum -y update +} + +function install_python() { + amazon-linux-extras install -y \ + python3.8 + mkdir -p /opt/bin + update-alternatives \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.8 100 \ + --slave /opt/bin/python python /usr/bin/python3.8 + +} + +function install_nitro() { + amazon-linux-extras install -y aws-nitro-enclaves-cli + yum install -y aws-nitro-enclaves-cli-devel +} + +function install_gcc() { + # install gcc, which is required to build socat + yum install -y "gcc-7.3.1*" +} + +function install_misc() { + yum install -y \ + "gettext-0.19.8.*" \ + "git-2.38*" \ + "tar-1.26*" \ + "unzip-6.0*" \ + "which-2.*" \ + "zip-3.0*" +} + +function install_packer() { + yum install -y "yum-utils-1.1.31*" + yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + yum -y install "packer-1.9.1*" + update-alternatives --install /usr/local/bin/packer packer /usr/bin/packer 100 + + /usr/local/bin/packer version +} + +function install_clang() { + yum install -y "clang-11.1.0*" + clang --version +} + +function cleanup() { + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +yum_update +install_nitro +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_gcc +install_packer +install_python +cleanup diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_go.sh b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_golang_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/test/commands.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/test/commands.yaml new file mode 100644 index 00000000..41823ae5 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: nitro-cli version + command: "nitro-cli" + args: ["--version"] + exitCode: 0 + + - name: nitro-cli build-enclave help + command: "nitro-cli" + args: ["build-enclave", "--help"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/.bazelversion b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/Dockerfile new file mode 100644 index 00000000..d24ba9af --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM amazonlinux:2023.1.20230825.0 + +COPY /install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps && \ + /scripts/generate_system_bazelrc --user-root-name amazonlinux2023 && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin:/opt/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.8" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/generate_system_bazelrc b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/get_workspace_mount b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/gitconfig b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_apps new file mode 100755 index 00000000..2cf10696 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_apps @@ -0,0 +1,106 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) + usage 0 + break + ;; + *) + usage + break + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function dnf_update() { + dnf -y update +} + +function install_python() { + mkdir -p /opt/bin + alternatives \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.9 100 \ + --slave /usr/bin/python3 python3-usr /usr/bin/python3.9 \ + --slave /usr/bin/python python-usr /usr/bin/python3.9 \ + --slave /opt/bin/python python /usr/bin/python3.9 +} + +function install_nitro() { + dnf install -y \ + "aws-nitro-enclaves-cli-1.2.*" \ + "aws-nitro-enclaves-cli-devel-1.2.*" +} + +function install_gcc() { + # install gcc, which is required to build socat + dnf install -y "gcc-11.3.*" +} + +function install_misc() { + dnf install -y \ + "gettext-0.21*" \ + "git-2.40.*" \ + "tar-2:1.34*" \ + "unzip-6.0*" \ + "which-2.*" \ + "zip-3.0*" +} + +function install_packer() { + dnf install -y "dnf-utils-4.1.*" + dnf config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + dnf -y install "packer-1.9.2*" + update-alternatives --install /usr/local/bin/packer packer /usr/bin/packer 100 + /usr/local/bin/packer version +} + +function install_clang() { + dnf install -y "clang-15.*" + clang --version +} + +function cleanup() { + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +install_python +dnf_update +install_nitro +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_gcc +install_packer +cleanup diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_go.sh b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_golang_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/test/commands.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/test/commands.yaml new file mode 100644 index 00000000..41823ae5 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-amazonlinux2023/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: nitro-cli version + command: "nitro-cli" + args: ["--version"] + exitCode: 0 + + - name: nitro-cli build-enclave help + command: "nitro-cli" + args: ["build-enclave", "--help"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/.bazelversion b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/Dockerfile new file mode 100644 index 00000000..eb370730 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/Dockerfile @@ -0,0 +1,61 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ARG BASE_IMAGE=ubuntu:20.04 + +# ignore this hadolint error as BASE_IMAGE contains an image tag +# hadolint ignore=DL3006 +FROM ${BASE_IMAGE} as libprofiler-builder +ENV CC=clang \ + CXX=clang +ADD https://github.com/gperftools/gperftools/releases/download/gperftools-2.13/gperftools-2.13.tar.gz /build/gperftools.tar.gz +ADD https://apt.llvm.org/llvm.sh /build/llvm.sh +COPY compile_libprofiler /scripts/ +RUN /scripts/compile_libprofiler + +FROM docker/buildx-bin:v0.10 AS buildx-bin + +# ignore this hadolint error as BASE_IMAGE contains an image tag +# hadolint ignore=DL3006 +FROM ${BASE_IMAGE} +ARG LOCALE=en_US.UTF-8 +ARG TARGETARCH +COPY install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc +COPY --from=buildx-bin /buildx /usr/libexec/docker/cli-plugins/docker-buildx +COPY --from=libprofiler-builder /usr/lib/libprofiler.so.* /usr/lib/x86_64-linux-gnu/ +RUN \ + ln -rs /usr/lib/x86_64-linux-gnu/libprofiler.so.0.* /usr/lib/x86_64-linux-gnu/libprofiler.so.0 && \ + ln -rs /usr/lib/x86_64-linux-gnu/libprofiler.so.0.* /usr/lib/x86_64-linux-gnu/libprofiler.so + +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang \ + TZ=Etc/UTC \ + LANG=${LOCALE} \ + LANGUAGE=${LOCALE} \ + LC_ALL=${LOCALE} \ + LC_CTYPE=${LOCALE} + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps --locale ${LOCALE} && \ + /scripts/generate_system_bazelrc --user-root-name ubuntu && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="/opt/bin:${PATH}:/usr/local/go/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.9" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/compile_libprofiler b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/compile_libprofiler new file mode 100755 index 00000000..f05397fb --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/compile_libprofiler @@ -0,0 +1,45 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -r -i CLANG_VER=15 + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_build_tools() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + gnupg="2.2.*" \ + lsb-release="11.1.*" \ + make="4.*" \ + software-properties-common="0.99.*" \ + wget="1.20.*" +} + +function install_clang() { + ls -l /build + chmod +x /build/llvm.sh + /build/llvm.sh ${CLANG_VER} + apt-get --quiet install -y --no-install-recommends libc++-${CLANG_VER}-dev + update-alternatives --install /usr/bin/clang clang /usr/bin/clang-${CLANG_VER} 100 + rm -f llvm.sh + + clang --version +} + +function install_profiler() { + cd /build + tar xz --strip-components 1 -f gperftools.tar.gz + ./configure + make libprofiler.la + declare -r ver=0.5.9 + cp .libs/libprofiler.so.${ver} /usr/lib + ls -l /usr/lib/libprofiler* +} + +apt_update +install_build_tools +install_clang +install_profiler diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/generate_system_bazelrc b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/get_workspace_mount b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/gitconfig b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_apps new file mode 100755 index 00000000..8734f014 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_apps @@ -0,0 +1,146 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 +declare INSTALL_LOCALE=en_US.UTF-8 +declare -r -i CLANG_VER=15 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --locale Set locale. Default: ${INSTALL_LOCALE} + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --locale) + INSTALL_LOCALE="$2" + shift 2 || usage + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_python() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + python3.9-venv="3.9.*" python3.9-dev + mkdir -p /opt/bin + update-alternatives \ + --force \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.9 100 \ + --slave /usr/bin/python3 python3-usr /usr/bin/python3.9 \ + --slave /usr/bin/python python-usr /usr/bin/python3.9 \ + --slave /opt/bin/python python /usr/bin/python3.9 + curl https://bootstrap.pypa.io/get-pip.py -o /tmp/get-pip.py + /usr/bin/python3 /tmp/get-pip.py + rm -f /tmp/get-pip.py + /usr/bin/python3 -m pip --version + /usr/bin/python3 -m pip install \ + "libclang~=${CLANG_VER}.0" \ + "numpy~=1.25" +} + +function install_misc() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-transport-https="2.0.*" \ + bsdmainutils \ + ca-certificates \ + chrpath="0.16-*" \ + libcurl4="7.68.*" \ + curl="7.68.*" \ + file="1:5.*" \ + gettext="0.19.*" \ + git="1:2.25.*" \ + gnupg="2.2.*" \ + google-perftools="2.*" \ + locales="2.31-*" \ + lsb-release="11.1.*" \ + openssh-client="1:8.2*" \ + patch="2.7.*" \ + rename="1.10-*" \ + software-properties-common="0.99.*" \ + unzip="6.0-*" \ + wget="1.20.*" \ + xz-utils="5.2.*" \ + zip="3.0-*" + if [[ -n ${INSTALL_LOCALE} ]]; then + printf "\nSetting locale to: %s\n" "${INSTALL_LOCALE}" + locale-gen "${INSTALL_LOCALE}" + update-locale LANG="${INSTALL_LOCALE}" + fi +} + +function install_clang() { + curl --silent --fail --show-error --location --remote-name https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + ./llvm.sh ${CLANG_VER} + apt-get --quiet install -y --no-install-recommends libc++-${CLANG_VER}-dev + for prog in clang lldb lld ld.lld llvm-cov llvm-profdata; do + update-alternatives --install /usr/bin/${prog} ${prog} /usr/bin/${prog}-${CLANG_VER} 100 + done + rm -f llvm.sh + + clang --version + llvm-cov --version + llvm-profdata show --version +} + +# Install Docker (https://docs.docker.com/engine/install/debian/) +function install_docker() { + declare -r arch="$1" + apt-get --quiet remove docker docker.io containerd runc + mkdir -p /etc/apt/keyrings + declare -r dist=ubuntu + curl --silent --fail --show-error --location https://download.docker.com/linux/${dist}/gpg \ + | gpg --dearmor -o /etc/apt/keyrings/docker.gpg + declare lsb_release + lsb_release="$(lsb_release -cs)" + echo "deb [arch=${arch} signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/${dist} ${lsb_release} stable" \ + | tee /etc/apt/sources.list.d/docker.list + apt_update + apt-get --quiet install -y --no-install-recommends docker-ce docker-ce-cli containerd.io +} + +function cleanup() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +apt_update +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_docker "${BUILD_ARCH}" +install_python # should run after other install_* +cleanup diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_go.sh b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_golang_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/test/commands.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/test/commands.yaml new file mode 100644 index 00000000..b3fc2beb --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/build-debian/test/commands.yaml @@ -0,0 +1,39 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: "curl version" + command: "curl" + args: ["--version"] + exitCode: 0 + + - name: "clang version" + command: "clang" + args: ["--version"] + exitCode: 0 + + - name: "bazel version" + command: "bazelisk" + args: ["version"] + exitCode: 0 + + - name: "docker image help" + command: "docker" + args: ["help", "image"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/Dockerfile new file mode 100644 index 00000000..c2f25551 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/Dockerfile @@ -0,0 +1,21 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:20.04 + +COPY install_apps /scripts/ + +RUN \ + /scripts/install_apps && \ + rm -rf /scripts diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/install_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/install_apps new file mode 100755 index 00000000..72fd822a --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/install_apps @@ -0,0 +1,54 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_misc() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + lcov="1.*" \ + google-perftools="2.*" +} + +function clean_debian() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +apt_update +install_misc +clean_debian diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/test/commands.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/test/commands.yaml new file mode 100644 index 00000000..bf962e40 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/coverage-tools/test/commands.yaml @@ -0,0 +1,39 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: "lcov version" + command: "lcov" + args: ["--version"] + exitCode: 0 + + - name: "clang version" + command: "clang" + args: ["--version"] + exitCode: 0 + + - name: "bazel version" + command: "bazelisk" + args: ["version"] + exitCode: 0 + + - name: "docker image help" + command: "docker" + args: ["help", "image"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/generate_system_bazelrc b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/generate_system_bazelrc new file mode 100755 index 00000000..9c8e419f --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/generate_system_bazelrc @@ -0,0 +1,83 @@ +#!/bin/bash + +# Generate a system bazelrc file +# Designed to be executed when generating a container image, for example from Dockerfile. +# For info on bazelrc files, refer to https://bazel.build/run/bazelrc?hl=en. + +set -o pipefail +set -o errexit + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --user-root-name Name of bazel user-root directory (within bazel cache dir) +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --user-root-name) + USER_ROOT_NAME="$2" + shift + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + break + ;; + esac +done + +if [[ -z ${USER_ROOT_NAME} ]]; then + printf -- "error: --user-root-name must be specified\n" &>/dev/stderr + usage 1 +fi + +function _get_toolchains_hash() { + { + # emit versions of all tools relevant to builds + uname --machine + cat /etc/os-release + clang --version + /opt/bin/python3 --version + } | sha256sum | cut --delimiter=" " --fields=1 +} + +TOOLCHAINS_HASH=$(_get_toolchains_hash) +readonly TOOLCHAINS_HASH + +readonly BAZELRC="/etc/bazel.bazelrc" +readonly BAZEL_ROOT=/bazel_root +readonly BAZEL_OUTPUT_USER_ROOT="${BAZEL_ROOT}/build_${USER_ROOT_NAME}_${TOOLCHAINS_HASH:0:7}" +mkdir -p "${BAZEL_OUTPUT_USER_ROOT}" + +# Within the running container, globally set these variables. This is less +# obvious than using Dockerfile's ENV, but this particular value isn't +# available to the Dockerfile +# Note: The /etc/profile.d/bazel_env.sh will often be sourced automatically +# but this may only occur for login shells. For bash, the --login +# flag should be supplied. Otherwise, the bazel_env.sh file can be +# sourced directly. +cat >/etc/profile.d/bazel_env.sh <> \$HOME/.bazelrc +EOF + +printf "generating %s\n" "${BAZELRC}" &>/dev/stderr + +# the output_user_root setting will have no effect as long as output_base is set. However, in the +# case that $HOME/.bazelrc is not created via sourcing /etc/profile.d/bazel_env.sh, at least +# the appropriate container-specific output_user_root directory will be used +cat <"${BAZELRC}" +startup --output_user_root="${BAZEL_OUTPUT_USER_ROOT}" +# set a variable based on the hash of all build tool dependencies other than bazel itself +# primarily to avoid spurious cache hits for distinct sets of toolchains +build --action_env=TOOLCHAINS_HASH=${TOOLCHAINS_HASH} +BAZELRC diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/get_workspace_mount b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/get_workspace_mount new file mode 100755 index 00000000..517b2ce3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/get_workspace_mount @@ -0,0 +1,35 @@ +#!/bin/bash + +function get_docker_workspace_mount() { + # when running inside a docker container, expect /.dockerenv to exist + if ! [[ -f /.dockerenv ]]; then + printf "Error: %s must execute inside a docker container\n" "$(basename "${BASH_SOURCE[0]}")" &>/dev/stderr + exit 1 + fi + + # if running inside a docker container, the workspace mount point cannot be + # determined by git or bazel or inspecting the filesystem itself. Instead, we + # need to use docker to expose its mount info for the /src/workspace path. + # determine the current container's ID + local -r CONTAINER_ID="$(uname --nodename)" + # use docker inspect to extract the current mount path for /src/workspace + # this format string is a golang template (https://pkg.go.dev/text/template) processed + # by docker's --format flag, per https://docs.docker.com/config/formatting/ + # shellcheck disable=SC2016 + local -r FORMAT_STR=' + {{- range $v := .HostConfig.Binds -}} + {{$pathpair := split $v ":" -}} + {{if eq (index $pathpair 1) "/src/workspace" -}} + {{print (index $pathpair 0) -}} + {{end -}} + {{end -}} + ' + local -r MOUNT_PATH="$(docker inspect --format "${FORMAT_STR}" "${CONTAINER_ID}")" + if [[ -z ${MOUNT_PATH} ]]; then + printf "Error: Unable to determine mount point for /src/workspace. Exiting\n" &>/dev/stderr + exit 1 + fi + printf "%s" "${MOUNT_PATH}" +} + +get_docker_workspace_mount diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/gitconfig b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/gitconfig new file mode 100644 index 00000000..eaf257f4 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/gitconfig @@ -0,0 +1,2 @@ +[safe] + directory = /src/workspace diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/install_go.sh b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/install_go.sh new file mode 100644 index 00000000..a436d20a --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/install_go.sh @@ -0,0 +1,50 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# shell library to install and uninstall golang + +function _golang_install_dir() { + printf "/usr/local/go\n" +} + +function install_golang() { + declare -r _ARCH="$1" + declare -r FNAME=gobin.tar.gz + declare -r VERSION=1.20.4 + # shellcheck disable=SC2155 + declare -r GO_INSTALL_DIR="$(_golang_install_dir)" + declare -r -A GO_HASHES=( + [amd64]="698ef3243972a51ddb4028e4a1ac63dc6d60821bf18e59a807e051fee0a385bd" + [arm64]="105889992ee4b1d40c7c108555222ca70ae43fccb42e20fbf1eebb822f5e72c6" + ) + declare -r GO_HASH=${GO_HASHES[${_ARCH}]} + if [[ -z ${GO_HASH} ]]; then + printf "Unrecognized or unsupported architecture for golang: %s\n" "${_ARCH}" &>/dev/stderr + exit 1 + fi + + curl --silent -fSL --output ${FNAME} "https://go.dev/dl/go${VERSION}.linux-${_ARCH}.tar.gz" + echo "${GO_HASH} ${FNAME}" | sha256sum -c + tar --directory /usr/local -xzf ${FNAME} + rm -f ${FNAME} + update-alternatives --install /usr/bin/go go "${GO_INSTALL_DIR}"/bin/go 100 + + go version +} + +function remove_golang() { + update-alternatives --remove-all go + rm -rf "$(_golang_install_dir)" +} diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/install_golang_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/install_golang_apps new file mode 100755 index 00000000..4254e438 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/install_golang_apps @@ -0,0 +1,46 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +BAZEL_PATH=/usr/bin + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + set -o xtrace + shift + ;; + -h | --help) usage 0 ;; + *) + usage + break + ;; + esac +done + +function install_bazelisk() { + go install github.com/bazelbuild/bazelisk@v1.19.0 + BAZELISK="$(go env GOPATH)"/bin/bazelisk + if [[ -n ${BAZEL_PATH} ]] && [[ -d ${BAZEL_PATH} ]]; then + ln -s "${BAZELISK}" "${BAZEL_PATH}"/bazel + fi + # install the specified version of bazel + USE_BAZEL_VERSION="$(cat /scripts/.bazelversion)" + export USE_BAZEL_VERSION + "${BAZELISK}" version + rm -rf /bazel_root/* +} + +install_bazelisk diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/.pre-commit-config.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/.pre-commit-config.yaml new file mode 120000 index 00000000..9971a06a --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/.pre-commit-config.yaml @@ -0,0 +1 @@ +../../etc/.pre-commit-config.yaml \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/Dockerfile new file mode 100644 index 00000000..024c05fe --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:20.04 + +COPY install_apps install_go.sh .pre-commit-config.yaml /scripts/ +COPY gitconfig /etc + +ARG PRE_COMMIT_VENV_DIR=/usr/pre-commit-venv +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + PRE_COMMIT_HOME=/var/cache/pre-commit \ + PRE_COMMIT_TOOL=${PRE_COMMIT_VENV_DIR}/bin/pre-commit \ + TZ=Etc/UTC + +RUN \ + chmod 644 /etc/gitconfig && \ + /usr/bin/env -v PRE_COMMIT_VENV_DIR=${PRE_COMMIT_VENV_DIR} /scripts/install_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/gitconfig b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/install_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/install_apps new file mode 100755 index 00000000..883ef650 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/install_apps @@ -0,0 +1,128 @@ +#!/bin/bash + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + set -o xtrace + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_packages() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-transport-https="2.0.*" \ + ca-certificates \ + libcurl4="7.68.*" \ + curl="7.68.*" \ + gnupg="2.2.*" \ + lsb-release="11.1.*" \ + openjdk-11-jre="11.0.*" \ + python3.9-venv="3.9.*" \ + shellcheck="0.7.*" \ + software-properties-common="0.99.*" \ + wget="1.20.*" + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 100 +} + +# Install Docker (https://docs.docker.com/engine/install/debian/) +function install_docker() { + apt-get --quiet remove docker docker.io containerd runc + local -r KEYRING_DIR=/etc/apt/keyrings + mkdir -p "${KEYRING_DIR}" + local -r DIST=ubuntu + curl --silent --fail --show-error --location https://download.docker.com/linux/${DIST}/gpg \ + | /usr/bin/gpg --dearmor -o "${KEYRING_DIR}"/docker.gpg + local -r LSB_RELEASE="$(/usr/bin/lsb_release -cs)" + echo "deb [arch=${ARCH} signed-by=${KEYRING_DIR}/docker.gpg] https://download.docker.com/linux/${DIST} ${LSB_RELEASE} stable" \ + | tee /etc/apt/sources.list.d/docker.list + apt_update + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y docker-ce docker-ce-cli containerd.io +} + +function install_precommit() { + /usr/bin/python3.9 -m venv "${PRE_COMMIT_VENV_DIR}" + "${PRE_COMMIT_VENV_DIR}"/bin/pip install pre-commit~=3.1 + "${PRE_COMMIT_TOOL}" --version + + # initialize pre-commit cache, which needs a git repo (a temporary will suffice) + local -r GIT_REPO="$(mktemp -d)" + git init "${GIT_REPO}" + cd "${GIT_REPO}" + # run pre-commit with no files in scope to only trigger population of the cache + { + cat "${SCRIPT_DIR}"/.pre-commit-config.yaml + printf "\nfiles: ^$\n" + } > .pre-commit-config.yaml + git add .pre-commit-config.yaml + "${PRE_COMMIT_TOOL}" run --config .pre-commit-config.yaml || true + rm -rf "${GIT_REPO}" +} + +function cleanup() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +ARCH=$(dpkg --print-architecture) +readonly ARCH + +apt_update +install_packages + +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh +# golang is used to compile go-based hooks +install_golang "${ARCH}" +install_docker +install_precommit +cleanup diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/install_go.sh b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/presubmit/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/Dockerfile new file mode 100644 index 00000000..e7a05704 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM golang:1.19.3-alpine3.16 AS golang-1.19 +FROM node:18.9.0-alpine3.16 + +COPY --from=golang-1.19 /usr/local/go/ /usr/local/go/ +ENV PATH="${PATH}:/usr/local/go/bin" + +COPY install_release_apps /scripts/ +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_release_apps && \ + rm -rf /scripts + +ENTRYPOINT ["/bin/sh"] diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/gitconfig b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/gitconfig new file mode 100644 index 00000000..a72cd197 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/gitconfig @@ -0,0 +1,3 @@ +[user] + email = nobody@google.com + name = Privacy Sandbox Release System diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/install_release_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/install_release_apps new file mode 100755 index 00000000..e6c12253 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/install_release_apps @@ -0,0 +1,8 @@ +#!/bin/sh + +npm install --global commit-and-tag-version@10.1.0 + +# Install the GitHub CLI tool (https://cli.github.com/) +apk add github-cli + +GOBIN=/usr/local/go/bin go install github.com/bazelbuild/buildtools/buildozer@6.0.1 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/test/commands.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/test/commands.yaml new file mode 100644 index 00000000..62ac6631 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: npm version + command: "npm" + args: ["--version"] + exitCode: 0 + + - name: commit-and-tag-version help + command: "commit-and-tag-version" + args: ["--help"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/test/structure.yaml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/test/structure.yaml new file mode 100644 index 00000000..1d704640 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/release/test/structure.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +fileExistenceTests: + - name: git config + path: /etc/gitconfig + shouldExist: true + permissions: "-r--r--r--" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/Dockerfile new file mode 100644 index 00000000..45fab3cf --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/Dockerfile @@ -0,0 +1,47 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:3.18 as slowhttptest_builder +# hadolint ignore=DL3018 +RUN apk add --no-cache autoconf automake build-base git openssl-dev +WORKDIR /build +ADD https://github.com/shekyan/slowhttptest/archive/refs/tags/v1.9.0.tar.gz /build/src.tar.gz +RUN tar xz --strip-components 1 -f src.tar.gz && ./configure && make + +FROM alpine:3.18 as wrk_builder +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" +COPY build_wrk /build/ +WORKDIR /build +ADD https://github.com/giltene/wrk2/archive/44a94c17d8e6a0bac8559b53da76848e430cb7a7.tar.gz /build/src.tar.gz +RUN /build/build_wrk + +FROM golang:1.21-alpine3.18 AS golang +ENV GOBIN=/usr/local/go/bin +COPY build_golang_apps /scripts/ +RUN /scripts/build_golang_apps + +FROM fullstorydev/grpcurl:v1.8.9-alpine AS grpcurl + +FROM alpine:3.18 +COPY --from=golang /usr/local/go/bin/* /usr/local/bin/ +COPY --from=grpcurl /bin/grpcurl /usr/local/bin/ +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + PATH="${PATH}:/usr/local/go/bin" \ + GOBIN=/usr/local/go/bin +COPY install_apps /scripts/ +RUN /scripts/install_apps +COPY --from=slowhttptest_builder /build/src/slowhttptest /usr/bin/ +COPY --from=wrk_builder /build/wrk /usr/bin/wrk2 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/build_golang_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/build_golang_apps new file mode 100755 index 00000000..b79e20c4 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/build_golang_apps @@ -0,0 +1,16 @@ +#!/bin/busybox sh + +set -o errexit + +install_ghz() { + go install github.com/bojand/ghz/cmd/ghz@v0.114.0 + ghz --help +} + +install_cassowary() { + go install github.com/rogerwelin/cassowary/cmd/cassowary@v0.16.0 + cassowary --help +} + +install_ghz +install_cassowary diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/build_wrk b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/build_wrk new file mode 100755 index 00000000..d843dd06 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/build_wrk @@ -0,0 +1,27 @@ +#!/bin/busybox sh + +set -o errexit +set -o xtrace + +install_no_wrk() { + cat </build/wrk +#!/bin/busybox sh +PROGRAM_NAME="\$(basename \$0)" +printf "Error: %s not supported on Aarch64\n" "\${PROGRAM_NAME}" +printf "build_arch: %s\n" "${BUILD_ARCH}" >/dev/stderr +exit 1 +EOF + chmod 755 /build/wrk +} + +install_wrk() { + apk add --no-cache build-base git openssl-dev zlib-dev + tar xz --strip-components 1 -f src.tar.gz + make -j6 +} + +if [[ ${BUILD_ARCH} == arm64 ]]; then + install_no_wrk +else + install_wrk +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/install_apps b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/install_apps new file mode 100755 index 00000000..597f937b --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/test-tools/install_apps @@ -0,0 +1,28 @@ +#!/bin/busybox sh + +set -o errexit + +install_packages() { + apk --no-cache add \ + bash~=5 \ + curl~=8 \ + jq~=1 \ + libstdc++ +} + +install_nghttp2() { + apk --no-cache add \ + nghttp2~=1 + h2load --version +} + +install_apache2_utils() { + apk --no-cache add \ + apache2-utils~=2.4 + ab -V +} + +apk --no-cache update +install_packages +install_nghttp2 +install_apache2_utils diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/utils/Dockerfile b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/utils/Dockerfile new file mode 100644 index 00000000..0d1defd6 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/images/utils/Dockerfile @@ -0,0 +1,19 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:3.16 + +RUN apk --no-cache add \ + unzip~=6.0 \ + zip~=3.0 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-amazonlinux2 b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-amazonlinux2 new file mode 100644 index 00000000..b9750ae4 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-amazonlinux2 @@ -0,0 +1 @@ +b35ee4e2b6210bc66e97083805642945433b0ef648a9d297a2a4ccdfb518413f diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-amazonlinux2023 b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-amazonlinux2023 new file mode 100644 index 00000000..7b708170 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-amazonlinux2023 @@ -0,0 +1 @@ +7501f9f920a794ba6fa7c6cabb17d076ed3db0faf3fe5393e2904ce71978dec5 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-debian b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-debian new file mode 100644 index 00000000..0e9a76c9 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/build-debian @@ -0,0 +1 @@ +a99a8af64d61e3eb635aecfb81d229437890693cd6f6d33bd269e9f1c55ca760 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/coverage-tools b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/coverage-tools new file mode 100644 index 00000000..f0336331 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/coverage-tools @@ -0,0 +1 @@ +cd3fb189dd23793af3bdfa02d6774ccb35bddbec7059761e25c4f7be4c1e8ca1 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/presubmit b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/presubmit new file mode 100644 index 00000000..ada65387 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/presubmit @@ -0,0 +1 @@ +79f2509c74607ab33bc5fe3f425f61ddf89e2f75efc299b1a5a7a3039367c90e diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/release b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/release new file mode 100644 index 00000000..e5218ba2 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/release @@ -0,0 +1 @@ +d60fb40a53b1704f7ac353d0d036f49eac10bfd08ccb19f9b436acf8bdf2cb79 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/test-tools b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/test-tools new file mode 100644 index 00000000..fc5e0b5c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/test-tools @@ -0,0 +1 @@ +dd1ec6137d4dd22fec555044cd85f484adfa6c7b686880ea5449cff936bad34e diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/utils b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/utils new file mode 100644 index 00000000..da29b2fa --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/data/hashes/utils @@ -0,0 +1 @@ +9fca27d931acc2bc96fa0560466cc0914a0d1cc73fb8749af057caacf2911f85 diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/run-tests b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/run-tests new file mode 100755 index 00000000..f98389a4 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tests/run-tests @@ -0,0 +1,264 @@ +#!/bin/bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Test runner for the build system. This script should preferably be run +# "directly" and not inside docker, permitting the test cases to execute +# both inside and outside a docker container. + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ -d ${TMP_HASHES_DIR1} ]]; then + rm -rf "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" + fi + if [[ ${STATUS} -ne 0 ]]; then + printf "Error: run-tests status code: %d\n" "${STATUS}" + sleep 5s + fi + exit ${STATUS} +} + +function get_image_list() { + local -r _images_dir="$1" + find "${_images_dir}" -maxdepth 1 -mindepth 1 -type d -printf "%P\n" \ + | sort +} + +function usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr < + --image Run tests only for specified image + --fast Only generate hashes directly rather than also using cbuild + --build-images Build the images + --verbose Produce verbose output +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +declare -i FAST=0 +declare -i VERBOSE=0 +declare -i BUILD_IMAGES=0 +declare IMAGE + +while [[ $# -gt 0 ]]; do + case "$1" in + --image) + IMAGE="$2" + if [[ -z ${IMAGE} ]]; then + usage + fi + shift 2 || usage + ;; + --fast) + FAST=1 + shift + ;; + --build-images) + BUILD_IMAGES=1 + shift + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + ;; + esac +done + +TESTS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TESTS_DIR +readonly TOOLS_DIR="${TESTS_DIR}"/../tools +readonly HASHES_DIR="${TESTS_DIR}"/data/hashes +readonly IMAGES_DIR="${TESTS_DIR}"/../images +if [[ -n ${IMAGE} ]]; then + IMAGE_LIST="${IMAGE}" +else + IMAGE_LIST="$(get_image_list "${IMAGES_DIR}")" +fi +readonly IMAGE_LIST +declare -i RETCODE=0 + +function cli_tests_test-tools() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != test-tools ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/curl --version" + "${TOOLS_DIR}/ghz --version" + "${TOOLS_DIR}/ab -V" + "${TOOLS_DIR}/cassowary --version" + "${TOOLS_DIR}/grpcurl -version" + "${TOOLS_DIR}/h2load --version" + "${TOOLS_DIR}/hadolint --version" + "${TOOLS_DIR}/jq --version" + ) + printf "Testing test CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_misc() { + declare -a -r TOOLS=( + "${TOOLS_DIR}/aws-cli help" + "${TOOLS_DIR}/awscurl --help" + ) + printf "Testing utils CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_utils() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != utils ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/unzip -h" + "${TOOLS_DIR}/zip -h" + ) + printf "Testing utils CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_release() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != release ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/buildozer -version" + ) + printf "Testing release CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function create_temp_hash_dir() { + local -r DIR="$(mktemp --directory)" + cp "${HASHES_DIR}"/* "${DIR}" + printf "%s" "${DIR}" +} + +# warning when running inside a docker container +if [[ -f /.dockerenv ]]; then + printf "warning: Executing within docker container, which obviates testing in a non-docker environment\n" &>/dev/stderr +fi + +declare -a GET_BUILDER_IMAGE_ARGS=( + --sha-only +) +if [[ ${BUILD_IMAGES} -eq 0 ]]; then + GET_BUILDER_IMAGE_ARGS+=(--no-build) +fi + +function generate_hashes_direct() { + TMP_HASHES_DIR1=$(create_temp_hash_dir) + printf "Generating image hashes (direct mode)\n" + + for img in ${IMAGE_LIST}; do + # shellcheck disable=SC2086 + if ! "${TOOLS_DIR}"/get-builder-image-tagged "${GET_BUILDER_IMAGE_ARGS[@]}" --image ${img} >"${TMP_HASHES_DIR1}/${img}"; then + printf "Error generating image hash: %s\n" "${img}" &>/dev/stderr + RETCODE+=1 + fi + done + + BASELINE_UPDATES="$(diff --brief "${HASHES_DIR}" "${TMP_HASHES_DIR1}" || true)" + readonly BASELINE_UPDATES + if [[ -n $BASELINE_UPDATES ]]; then + # shellcheck disable=SC2086 + printf "detected shift in baseline files:\n%s\n" "${BASELINE_UPDATES}" + if [[ ${VERBOSE} -eq 1 ]]; then + diff "${HASHES_DIR}" "${TMP_HASHES_DIR1}" || true + fi + cp --force "${TMP_HASHES_DIR1}"/* "${HASHES_DIR}" + RETCODE+=10 + else + printf "hashes unchanged\n" + fi +} + +function generate_hashes_cbuild() { + TMP_HASHES_DIR2=$(create_temp_hash_dir) + + printf "Generating image hashes (cbuild mode)\n" + for img in ${IMAGE_LIST}; do + if ! "${TOOLS_DIR}"/cbuild --image build-debian --cmd "tools/get-builder-image-tagged ${GET_BUILDER_IMAGE_ARGS[*]} --image ${img}" >"${TMP_HASHES_DIR2}/${img}"; then + printf "Error generating image hash: %s\n" "${img}" &>/dev/stderr + RETCODE+=1 + fi + done + + MODE_MISMATCH="$(diff --brief "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" || true)" + readonly MODE_MISMATCH + if [[ -n $MODE_MISMATCH ]]; then + # shellcheck disable=SC2086 + printf "Error: mismatch between direct and cbuild modes\n%s" "${MODE_MISMATCH}" &>/dev/stderr + if [[ ${VERBOSE} -eq 1 ]]; then + diff "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" || true + fi + RETCODE+=100 + else + printf "hashes unchanged\n" + fi +} + +generate_hashes_direct + +if [[ ${FAST} -eq 1 ]]; then + exit ${RETCODE} +fi + +generate_hashes_cbuild + +# CLI tests + +cli_tests_misc +for img in ${IMAGE_LIST}; do + cli_tests_test-tools "${img}" + cli_tests_utils "${img}" + cli_tests_release "${img}" +done + +printf "test for error in direct execution of underlying wrapper scripts\n" +for tool in test-tool utils-tool; do + printf " %s: " "${tool}" + if ! "${TOOLS_DIR}"/${tool} &>/dev/null; then + printf "ok\n" + else + printf "failed to exit non-zero\n" + fi +done + +exit ${RETCODE} diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/ab b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/ab new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/ab @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/aws-cli b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/aws-cli new file mode 100755 index 00000000..0ded1392 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/aws-cli @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path +# EXTRA_DOCKER_RUN_ARGS additional arguments to pass to docker run invocations +# +# environment variables exported to AWS CLI +# For more info on supported env vars, see: +# https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" +) + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD + +# shellcheck disable=SC2068 +docker run \ + ${DOCKER_RUN_ARGS[@]} \ + --user "$(id -u):$(id -g)" \ + --volume "${HOME}"/.aws/:/home/.aws/ \ + --volume "${WORKSPACE}":/src/workspace \ + --workdir /src/workspace/"${REL_PWD}" \ + amazon/aws-cli:latest "$@" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/awscurl b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/awscurl new file mode 100755 index 00000000..355d58e1 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/awscurl @@ -0,0 +1,71 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path +# EXTRA_DOCKER_RUN_ARGS additional arguments to pass to docker run invocations +# +# environment variables exported to AWS CLI +# For more info on supported env vars, see: +# https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" + "PYTHONPATH=/" +) + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + --rm + --interactive + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + --tty + ) +fi + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD + +# shellcheck disable=SC2068 +docker run \ + ${DOCKER_RUN_ARGS[@]} \ + --user "$(id -u):$(id -g)" \ + --volume "${HOME}"/.aws/:/home/.aws/ \ + --volume "${WORKSPACE}":/src/workspace \ + --workdir /src/workspace/"${REL_PWD}" \ + ghcr.io/okigan/awscurl:latest "$@" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-amazonlinux2 b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-amazonlinux2 new file mode 120000 index 00000000..98d6d309 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-amazonlinux2 @@ -0,0 +1 @@ +bazel-debian \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-amazonlinux2023 b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-amazonlinux2023 new file mode 120000 index 00000000..98d6d309 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-amazonlinux2023 @@ -0,0 +1 @@ +bazel-debian \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-debian b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-debian new file mode 100755 index 00000000..843b3b6e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/bazel-debian @@ -0,0 +1,69 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables supported by cbuild (all optional): +# WORKSPACE Set the path to the workspace (repo root) +# AWS_ACCESS_KEY_ID AWS auth token +# AWS_SECRET_ACCESS_KEY AWS auth token +# BAZEL_STARTUP_ARGS Additional startup arguments to pass to bazel invocations +# BAZEL_EXTRA_ARGS Additional command arguments to pass to bazel invocations +# EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + +set -o pipefail +set -o errexit + +function partition_array() { + if [[ ${#@} -ne 3 ]]; then + printf "insufficient args\n" &>/dev/stderr + return 1 + fi + declare -n arr=$1 + declare -n pre=$2 + declare -n post=$3 + if [[ ${#arr[@]} -eq 0 ]]; then + return + fi + + declare -r delim="--" + declare target=pre + for i in "${!arr[@]}"; do + if [[ ${arr[$i]} == "${delim}" ]]; then + target=post + fi + if [[ ${target} == pre ]]; then + pre+=("${arr[$i]}") + else + post+=("${arr[$i]}") + fi + done +} + +SCRIPT_NAME="$(basename "${BASH_SOURCE[0]}")" +readonly SCRIPT_NAME +readonly IMAGE=build-"${SCRIPT_NAME/bazel-}" +CBUILD=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/cbuild +readonly CBUILD + +# partition the command-line args into "bazel args" and "app args", where the +# app args are all those after the presence of the "--" arg, if any +declare -a BAZEL_ARGS +declare -a APP_ARGS +# shellcheck disable=SC2034,SC2206 +declare -r -a ARGLIST=("$@") +partition_array ARGLIST BAZEL_ARGS APP_ARGS + +"${CBUILD}" --seccomp-unconfined --image "${IMAGE}" --cmd " +printf 'bazel output_base: [%s]\n' \"\$(bazel info output_base 2>/dev/null)\" +bazel ${BAZEL_STARTUP_ARGS} ${BAZEL_ARGS[*]@Q} ${BAZEL_EXTRA_ARGS} ${APP_ARGS[*]@Q} +" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/builder.sh b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/builder.sh new file mode 100644 index 00000000..77317aae --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/builder.sh @@ -0,0 +1,220 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# shell package enabling support for docker +# + +# require this script to be sourced rather than executed +if ! (return 0 2>/dev/null); then + printf "Error: Script %s must be sourced\n" "${BASH_SOURCE[0]}" &>/dev/stderr + exit 1 +fi + +####################################### +# Configure and export the WORKSPACE variable. Does not +# overwrite existing WORKSPACE value. +####################################### +function builder::set_workspace() { + export WORKSPACE + if [[ -v WORKSPACE ]]; then + return + fi + local -r GIT_TOPLEVEL="$(git rev-parse --show-superproject-working-tree)" + if [[ -n ${GIT_TOPLEVEL} ]]; then + WORKSPACE="${GIT_TOPLEVEL}" + else + WORKSPACE="$(git rev-parse --show-toplevel)" + fi + local -r ws_path="$(realpath "${WORKSPACE}"/WORKSPACE)" + WORKSPACE="$(dirname "${ws_path}")" +} + +####################################### +# Return the path to the build workspace, for use with +# the docker volume or mount argument +####################################### +function builder::get_docker_workspace_mount() { + if [[ -v WORKSPACE_MOUNT ]]; then + printf "%s" "${WORKSPACE_MOUNT}" + return + fi + # when running inside a docker container, expect /.dockerenv to exist + if ! [[ -f /.dockerenv ]]; then + printf "%s" "${WORKSPACE}" + return + fi + + # if running inside a docker container, the workspace mount point cannot be + # determined by git or bazel or inspecting the filesystem itself. Instead, we + # need to use docker to expose its mount info for the /src/workspace path. + # determine the current container's ID + local -r CONTAINER_ID="$(uname --nodename)" + # use docker inspect to extract the current mount path for /src/workspace + # this format string is a golang template (https://pkg.go.dev/text/template) processed + # by docker's --format flag, per https://docs.docker.com/config/formatting/ + # shellcheck disable=SC2016 + declare -r FORMAT_STR=' + {{- range $v := .HostConfig.Binds -}} + {{$pathpair := split $v ":" -}} + {{if eq (index $pathpair 1) "/src/workspace" -}} + {{print (index $pathpair 0) -}} + {{end -}} + {{end -}} + ' + local -r MOUNT_PATH="$(docker inspect --format "${FORMAT_STR}" "${CONTAINER_ID}")" + if [[ -z ${MOUNT_PATH} ]]; then + printf "Error: Unable to determine mount point for /src/workspace. Exiting\n" &>/dev/stderr + exit 1 + fi + printf "%s" "${MOUNT_PATH}" +} + +function builder::get_tools_dir() { + dirname "$(readlink -f "${BASH_SOURCE[0]}")" +} + +####################################### +# Invoke cbuild tool in a build-debian container +####################################### +function builder::cbuild_debian() { + local -r CBUILD="$(builder::get_tools_dir)"/cbuild + printf "=== cbuild debian action envs ===\n" + # shellcheck disable=SC2086 + "${CBUILD}" ${CBUILD_ARGS} --image build-debian --cmd "grep -o 'action_env.*' /etc/bazel.bazelrc 1>/dev/stderr 2>/dev/null" + # shellcheck disable=SC2086 + "${CBUILD}" ${CBUILD_ARGS} --image build-debian --cmd "$*" +} + +####################################### +# Add AWS env vars to a specified array +# Arguments: +# * the name of an array to which to append values +####################################### +function builder::add_aws_env_vars() { + declare -n args=$1 + args+=( + "AWS_ACCESS_KEY_ID" + "AWS_SECRET_ACCESS_KEY" + "AWS_SESSION_TOKEN" + "AWS_REGION" + "AWS_DEFAULT_REGION" + "AWS_PROFILE" + ) +} + +####################################### +# Build a Nitro EIF from a docker image tarfile +# Arguments: +# DOCKER_IMAGE_TAR Docker image tarfile +# DOCKER_IMAGE_URI Docker image uri corresponding to tarfile (without tag) +# DOCKER_IMAGE_TAG Docker image tag corresponding to tarfile +# OUTPUT_PATH Output path, must be within the workspace +# EIF_NAME Output base filename +# BUILDER_IMAGE_NAME (optional) Amazon Linux builder image name +####################################### +function builder::docker_img_to_nitro() { + local -r docker_image_tar="$1" + local -r docker_image_uri="$2" + local -r docker_image_tag="$3" + local -r output_path="$4" + local -r eif_name="$5" + local -r image="${6-build-amazonlinux2}" + local -r temp_tag="$(mktemp --dry-run temp-XXXXXX)" + docker load -i "${docker_image_tar}" + # add a temp tag to reduce the chance of conflicts or race conditions + docker tag "${docker_image_uri}:${docker_image_tag}" "${docker_image_uri}:${temp_tag}" + builder::docker_uri_to_nitro \ + "${docker_image_uri}:${temp_tag}" \ + "${output_path}/${eif_name}" \ + "${image}" + # remove the temp tag + docker image rm "${docker_image_uri}:${temp_tag}" +} + +####################################### +# Build a Nitro EIF from a docker image +# Arguments: +# DOCKER_URI (with tag) +# OUTPUT_BASE_FILENAME path and base filename +# BUILDER_IMAGE_NAME (optional) Amazon Linux builder image name +####################################### +function builder::docker_uri_to_nitro() { + local -r docker_uri="$1" + local -r output_base_fname="$2" + local -r image="${3-build-amazonlinux2}" + local -r output_eif="${output_base_fname}".eif + local -r output_json="${output_base_fname}".json + local -r output_pcr0_json="${output_base_fname}".pcr0.json + local -r output_pcr0_txt="${output_base_fname}".pcr0.txt + builder::cbuild_al "${image}" " +nitro-cli build-enclave --docker-uri ${docker_uri} --output-file ${output_eif@Q} >${output_json@Q} +jq --compact-output '{PCR0: .Measurements.PCR0}' ${output_json@Q} >${output_pcr0_json@Q} +jq --compact-output --raw-output '.Measurements.PCR0' ${output_json@Q} >${output_pcr0_txt@Q} +" +} + +####################################### +# Invoke cbuild tool in an amazonlinux build container +# Arguments: +# BUILDER_IMAGE_NAME Amazon Linux builder image name +####################################### +function builder::cbuild_al() { + local -r image="$1" + shift + local -r cbuild="$(builder::get_tools_dir)"/cbuild + declare -a env_vars + builder::add_aws_env_vars env_vars + declare env_args + for evar in "${env_vars[@]}"; do + env_args+=(--env "${evar}") + done + printf "=== cbuild %s action envs ===\n" "${image}" + # shellcheck disable=SC2086 + "${cbuild}" ${CBUILD_ARGS} "${env_args[@]}" --image "${image}" --cmd "grep -o 'action_env.*' /etc/bazel.bazelrc 1>/dev/stderr 2>/dev/null" + # shellcheck disable=SC2086 + "${cbuild}" ${CBUILD_ARGS} "${env_args[@]}" --image "${image}" --cmd "$*" +} + +function builder::cbuild_al2() { + builder::cbuild_al build-amazonlinux2 "$@" +} + +function builder::cbuild_al2023() { + builder::cbuild_al build-amazonlinux2023 "$@" +} + +####################################### +# Return the numeric id of the user or group +# Arguments: +# single character, either "u" for user or "g" for group +####################################### +function builder::id() { + declare -r mode="$1" + declare -i _id + _id=$(id "-${mode}") + if [[ ${_id} -ne 0 ]]; then + printf "%s" "${_id}" + else + # id is 0 (root), use the owner/group of the WORKSPACE file instead + stat -c "%${mode}" "${WORKSPACE}"/WORKSPACE + fi +} + +####################################### +# invoke functions to configure the build environment +####################################### + +builder::set_workspace diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/buildozer b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/buildozer new file mode 120000 index 00000000..78c3d50c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/buildozer @@ -0,0 +1 @@ +release-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/cassowary b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/cassowary new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/cassowary @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/cbuild b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/cbuild new file mode 100755 index 00000000..c40a3aed --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/cbuild @@ -0,0 +1,252 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +IMAGE="build-debian" +VERBOSE=0 +declare -r -a IMAGE_LIST=( + "build-debian" + "build-amazonlinux2" + "build-amazonlinux2023" + "presubmit" +) + +declare -a ENV_VARS +ENV_VARS+=( + "BAZEL_STARTUP_ARGS" + "BAZEL_EXTRA_ARGS" +) + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --cmd bash command string to execute within the docker container + --cmd-profiler enable profiler for the command + --image Image name for the build runtime. Valid names: +USAGE + + for elem in "${IMAGE_LIST[@]}" + do + if [[ ${elem} == "${IMAGE}" ]]; then + default_text=" (default)" + fi + printf " * %s%s\n" "${elem}" "${default_text}" &>/dev/stderr + default_text="" + done + + cat &>/dev/stderr << USAGE + --env [=] Name (or name=value) of exported environment variable, propagated into container + --without-shared-cache Containers will not mount ${HOME}/.cache/bazel + --without-embedded-docker Disable docker client within container + --docker-network Specify docker network type or name, value passed to docker run --network. Default: ${DOCKER_NETWORK} + --seccomp-unconfined Run docker container without a seccomp profile + --verbose Enable verbose output + +Environment variables (all optional): + WORKSPACE Full path to the workspace (repo root) + WORKSPACE_MOUNT Full path to the workspace on the host filesystem + EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + CBUILD_IMAGE docker URI to the specific image to run + DOCKER_NETWORK Specify docker network, equivalent of --docker-network flag. + +Environment variables propagated into container: + BAZEL_STARTUP_ARGS + BAZEL_EXTRA_ARGS +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +declare -i WITH_SHARED_CACHE=1 +declare -i WITH_DOCKER_SOCK=1 +declare -i WITH_CMD_PROFILER=0 +DOCKER_NETWORK="${DOCKER_NETWORK:-bridge}" +declare -i DOCKER_SECCOMP_UNCONFINED=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --cmd) + CMD="$2" + shift 2 || usage + ;; + --cmd-profiler) + WITH_CMD_PROFILER=1 + shift + ;; + --env) + ENV_VARS+=("$2") + shift 2 || usage + ;; + --image) + IMAGE="$2" + shift 2 || usage + ;; + --without-shared-cache) + WITH_SHARED_CACHE=0 + shift + ;; + --without-embedded-docker) + WITH_DOCKER_SOCK=0 + shift + ;; + --docker-network) + DOCKER_NETWORK="$2" + shift 2 || usage + ;; + --seccomp-unconfined) + DOCKER_SECCOMP_UNCONFINED=1 + shift + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + ;; + esac +done + +if [[ -z ${IMAGE} ]]; then + printf -- "error: --image must be specified\n" &>/dev/stderr + usage 1 +fi +# shellcheck disable=SC2076 +if ! [[ " ${IMAGE_LIST[*]} " =~ " ${IMAGE} " ]]; then + printf -- "error: image [%s] not recognized\n" "${IMAGE}" &>/dev/stderr + usage 1 +fi + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh + +trap _cleanup EXIT +function _cleanup() { + local -r -i status=$? + "${TOOLS_DIR}"/normalize-bazel-symlinks &>/dev/null || true + exit ${status} +} + +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT +export WORKSPACE_MOUNT +if [[ ${VERBOSE} -eq 1 ]]; then + printf "mounting workspace into container: %s\n" "${WORKSPACE_MOUNT}" &>/dev/stderr +fi + +TOOLS_RELDIR="$(realpath "${TOOLS_DIR}" --relative-to="${PWD}")" +readonly TOOLS_RELDIR + +if [[ -n ${CBUILD_IMAGE} ]]; then + IMAGE_TAGGED=${CBUILD_IMAGE} +else + IMAGE_TAGGED=$("${TOOLS_DIR}"/get-builder-image-tagged --image "${IMAGE}") +fi +readonly IMAGE_TAGGED + +PWD_WORKSPACE_REL_PATH="$(realpath --relative-base="${WORKSPACE}" "${PWD}")" +readonly PWD_WORKSPACE_REL_PATH +WORKDIR=/src/workspace +if [[ ${PWD_WORKSPACE_REL_PATH:0:1} != / ]]; then + WORKDIR="/src/workspace/${PWD_WORKSPACE_REL_PATH}" +fi +readonly WORKDIR + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" + "--entrypoint=/bin/bash" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=${WORKDIR}" + "--network=${DOCKER_NETWORK}" + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) + +if [[ ${DOCKER_SECCOMP_UNCONFINED} -eq 1 ]]; then + DOCKER_RUN_ARGS+=("--security-opt=seccomp=unconfined") +fi + +if [[ ${WITH_CMD_PROFILER} -eq 1 ]]; then + if [[ ${IMAGE} != build-debian ]]; then + printf "error: --cmd-profiler is only compatible with build-debian\n" &>/dev/stderr + usage 1 + fi + DOCKER_RUN_ARGS+=( + "--env=CMD_PROFILER=LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libprofiler.so" + "--env=CPUPROFILE=benchmark.prof" + ) +fi + +# inside the docker build images, /bazel_root is the bazel cache dir, per the system-wide bazelrc +readonly BAZEL_ROOT=/bazel_root +if [[ ${WITH_SHARED_CACHE} -eq 0 ]]; then + # use tmpfs for as temporary, container-bound bazel cache + DOCKER_RUN_ARGS+=( + "--tmpfs ${BAZEL_ROOT}:exec" + ) +else + # mount host filesystem for "shared" use by multiple docker container invocations + DOCKER_RUN_ARGS+=( + "--volume ${HOME}/.cache/bazel:${BAZEL_ROOT}" + ) +fi +if [[ ${WITH_DOCKER_SOCK} -eq 1 ]]; then + DOCKER_RUN_ARGS+=( + "--volume /var/run/docker.sock:/var/run/docker.sock" + ) +fi +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + --interactive + --tty + ) +fi + +if [[ ${VERBOSE} -eq 1 ]]; then + set -o xtrace +fi +if [[ -z ${CMD} ]]; then + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login +elif [[ ${WITH_CMD_PROFILER} -eq 1 ]]; then + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login -c "'${TOOLS_RELDIR}'/normalize-bazel-symlinks; env \${CMD_PROFILER} ${CMD}" +else + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login -c "${CMD}" +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/collect-coverage b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/collect-coverage new file mode 100755 index 00000000..746185c2 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/collect-coverage @@ -0,0 +1,105 @@ +#!/usr/bin/env bash + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables: +# WORKSPACE Set the path to the workspace (repo root) + +set -o pipefail +set -o errexit + +declare LCOV_REPORT="${WORKSPACE}/bazel-out/_coverage/_coverage_report.dat" +declare COVERAGE_FILENAME=coverage.zip + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --lcov_report path to lcov report relative to the WORKSPACE + --coverage_output_filename name of ZIP file that will contain artifacts from coverage collection +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --lcov_report) + LCOV_REPORT="${WORKSPACE}/$2" + shift 2 || usage + ;; + --coverage_output_filename) + COVERAGE_FILENAME="$2" + shift 2 || usage + if [[ ${COVERAGE_FILENAME##*.} != zip ]]; then + printf "error: --coverage_output_filename must be a ZIP file\n" &>/dev/stderr + exit 1 + fi + ;; + -h | --help) + usage 0 + ;; + *) + usage + ;; + esac +done + +trap _cleanup EXIT +function _cleanup() { + declare -r -i status=$? + if [[ ${status} -ne 0 ]]; then + printf "collect-coverage exit code: %d\n" ${status} &>/dev/stderr + sleep 5s + fi + exit ${status} +} + +function generate_coverage_report() { + local -r cov_dir="$(mktemp --tmpdir="${WORKSPACE}" --directory coverage-XXXX)" + trap 'rm -rf "${cov_dir}"' RETURN EXIT + cp "${LCOV_REPORT}" "${cov_dir}"/_coverage_report.dat + local -r dist_dir="${WORKSPACE}"/dist + cp "${LCOV_REPORT}" "${dist_dir}"/_coverage_report.dat + chmod -x {"${cov_dir}","${dist_dir}"}/_coverage_report.dat + + "${TOOLS_DIR}"/lcov --list dist/_coverage_report.dat >"${dist_dir}"/coverage_report.txt + "${TOOLS_DIR}"/lcov --summary dist/_coverage_report.dat + + local -r commit_sha=$(git rev-parse HEAD) + "${TOOLS_DIR}"/genhtml \ + --output-directory="$(basename "${cov_dir}")" \ + --title "coverage for commit ${commit_sha:0:7}" \ + --show-details \ + --legend \ + --quiet \ + dist/_coverage_report.dat + + if pushd "${cov_dir}" &>/dev/null; then + local -r zipfile=dist/"${COVERAGE_FILENAME}" + "${TOOLS_DIR}"/zip -q -r "../${zipfile}" ./* + printf "coverage archived to: %s\n" "${zipfile}" + popd &>/dev/null + fi +} + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh +TOOLS_DIR="$(builder::get_tools_dir)" +readonly TOOLS_DIR + +generate_coverage_report +"${TOOLS_DIR}"/normalize-dist diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/commit-and-tag-version b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/commit-and-tag-version new file mode 120000 index 00000000..78c3d50c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/commit-and-tag-version @@ -0,0 +1 @@ +release-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/convert-docker-to-nitro b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/convert-docker-to-nitro new file mode 100755 index 00000000..7586e039 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/convert-docker-to-nitro @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +trap cleanup EXIT +function cleanup() { + local -r -i status=$? + exit ${status} +} + +declare BUILDER_IMAGE_NAME=build-amazonlinux2 + +function usage() { + declare -r -i exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --docker-image-tar Docker image tarfile + --docker-image-uri Docker image uri (from tarfile) + --docker-image-tag Docker image tag (from tarfile) + --builder-image Amazon Linux builder image name. Default: ${BUILDER_IMAGE_NAME} + --outdir Output path for EIF files + --eif-name Base filename for EIF files + +environment variables (all optional): + WORKSPACE Set the path to the workspace (repo root) +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --docker-image-tar) + DOCKER_IMAGE_TAR="$2" + shift 2 || usage + ;; + --docker-image-uri) + DOCKER_IMAGE_URI="$2" + shift 2 || usage + ;; + --docker-image-tag) + DOCKER_IMAGE_TAG="$2" + shift 2 || usage + ;; + --outdir) + OUTPUT_DIR="$2" + shift 2 || usage + ;; + --eif-name) + EIF_NAME="$2" + shift 2 || usage + ;; + --builder-image) + BUILDER_IMAGE_NAME="$2" + shift 2 || usage + ;; + -h | --help) usage 0 ;; + *) usage ;; + esac +done + +: "${DOCKER_IMAGE_TAR?"--docker-image-tar not specified"}" +: "${DOCKER_IMAGE_URI?"--docker-image-uri not specified"}" +: "${DOCKER_IMAGE_TAG?"--docker-image-tag not specified"}" +: "${OUTPUT_DIR?"--outdir not specified"}" +: "${EIF_NAME?"--eif-name not specified"}" + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +WS_OUTPUT_RELDIR="$(realpath "${OUTPUT_DIR}" --relative-to="${WORKSPACE}")" +if [[ ${WS_OUTPUT_RELDIR} =~ ^\.\..*\/ ]]; then + printf "Output dir must be within the workspace directory tree\n" &>/dev/stderr + exit 1 +fi +if ! [[ -d ${WORKSPACE}/${WS_OUTPUT_RELDIR} ]]; then + printf "Output dir [%s] is not found\n" "${WORKSPACE}/${WS_OUTPUT_RELDIR}" &>/dev/stderr + exit 1 +fi + +readonly WS_OUTPUT_DIR=/src/workspace/"${WS_OUTPUT_RELDIR}" +printf "==== building AWS Nitro image using %s =====\n" "${BUILDER_IMAGE_NAME}" +builder::docker_img_to_nitro \ + "${DOCKER_IMAGE_TAR}" \ + "${DOCKER_IMAGE_URI}" \ + "${DOCKER_IMAGE_TAG}" \ + "${WS_OUTPUT_DIR}" \ + "${EIF_NAME}" \ + "${BUILDER_IMAGE_NAME}" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/coverage-tool b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/coverage-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/coverage-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/curl b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/curl new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/curl @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/genhtml b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/genhtml new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/genhtml @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/get-architecture b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/get-architecture new file mode 100755 index 00000000..a4415602 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/get-architecture @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Print the CPU architecture + +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ ${STATUS} -ne 0 ]]; then + printf "Error: get-architecture status code: %s\n" "${STATUS}" &>/dev/stderr + fi + exit ${STATUS} +} + +if [[ -n ${BUILD_ARCH} ]]; then + printf "%s\n" "${BUILD_ARCH}" +else + docker run --rm --entrypoint=/usr/bin/dpkg ubuntu:20.04 --print-architecture +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/get-builder-image-tagged b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/get-builder-image-tagged new file mode 100755 index 00000000..35371aea --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/get-builder-image-tagged @@ -0,0 +1,256 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Return an image:tag for the specified builder docker image +# Given the input files (Dockerfile, configs, installation scripts etc) for building the docker +# image, first generate a hash of the file contents (ie. exclude timestamps, file ownership etc). +# This hash will be used as the image tag + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + declare -r -i status=$? + if [[ -n ${TEMPTAR} ]]; then + rm -f "${TEMPTAR}" "${SHAFILE}" + fi + if [[ ${status} -ne 0 ]]; then + if [[ -s ${BUILD_OUTPUT} ]] && [[ ${VERBOSE} -eq 1 ]]; then + cat "${BUILD_OUTPUT}" + fi + printf "Error: get-builder-image-tagged status code: %d\n" ${status} &>/dev/stderr + printf "Docker build log: %s\n" "${BUILD_OUTPUT}" &>/dev/stderr + if [[ -v KOKORO_ARTIFACTS_DIR ]]; then + sleep 5s + fi + elif [[ -f ${BUILD_OUTPUT} ]] && [[ ${VERBOSE} -eq 0 ]]; then + rm -f "${BUILD_OUTPUT}" + fi + exit ${status} +} + +function make_temp() { + declare _suffix="$1" + mktemp --tmpdir="${WORKSPACE}" --dry-run "docker-buildx-${IMG}-XXXX" --suffix="${_suffix}" +} + +function get_image_list() { + declare -r _images_dir="$1" + find "${_images_dir}" -maxdepth 1 -mindepth 1 -type d -printf "%P\n" | sort +} + +function get_image_fullname() { + declare -r _img="$1" + declare -r _image_name=privacysandbox/builders/${_img} + if [[ -z ${_image_name} ]]; then + printf -- "error: image [%s] not recognized\n" "${_img}" &>/dev/stderr + return 1 + fi + printf "%s" "${_image_name}" +} + +function usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr < + --no-build Do not build image if it doesn't exist + --image Image name for the build runtime. Valid names: +USAGE + + for elem in $(get_image_list "${IMAGES_DIR}"); do + printf " * %s\n" "${elem}" &>/dev/stderr + done + + cat &>/dev/stderr </dev/stderr + usage 1 +fi + +if ! IMAGE_NAME="$(get_image_fullname "${IMG}")"; then + usage 1 +fi +readonly IMAGE_NAME + +IMAGE_PATH_FULL="${IMAGES_DIR}/${IMG}" +if ! [[ -s ${IMAGE_PATH_FULL}/Dockerfile ]]; then + printf "error: unable to locate [%s/Dockerfile]\n" "${IMAGE_PATH_FULL}" &>/dev/stderr + exit 1 +fi + +# create an image containing gnu tar +function generate_image() { + { + cat <"${BUILD_OUTPUT}" + rm -f "${BUILD_OUTPUT}" + if ! docker image inspect "${TAR_IMAGE}" &>/dev/null; then + printf "error creating docker image [%s]\n" "${TAR_IMAGE}" &>/dev/stderr + exit 1 + fi +} + +BUILD_OUTPUT="$(make_temp .log)" +readonly BUILD_OUTPUT +BUILDSYS_VERSION="$(<"${BUILDERS_DIR}"/version.txt)" +readonly BUILDSYS_VERSION +readonly TAR_IMAGE="builders/tar-get-builder-image-tagged:v${BUILDSYS_VERSION}" +TAR_IMAGE_HASH="$(docker image ls --filter "reference=${TAR_IMAGE}" --quiet)" +readonly TAR_IMAGE_HASH +if [[ -z ${TAR_IMAGE_HASH} ]]; then + generate_image +fi + +# Create a deterministic tar file for the specified file path, returning +# the SHA for the tar file content +# Any etc files in the $WORKSPACE (root) directory override the etc +# files in the image dir, as long as it's a file also found in the +# builders etc directory +# the TARFILE and SHAFILE args must be paths located in /tmp +function _tar_for_dir() { + local -r FILE_TAR="$1" + local -r FILE_SHA="$2" + local -r FILEPATH="$3" + local -r TMP_IMAGE_DIR="$(mktemp --tmpdir="${WORKSPACE}" --directory)" + # shellcheck disable=SC2064 + # expand TMP_IMAGE_DIR immediately + trap "rm -rf '${TMP_IMAGE_DIR}'" RETURN + + local -r WS_FILE_TAR="$(realpath "${FILE_TAR}" --relative-to="${WORKSPACE}")" + local -r WS_FILE_SHA="$(realpath "${FILE_SHA}" --relative-to="${WORKSPACE}")" + local -r WS_FILEPATH="$(realpath "${FILEPATH}" --relative-to="${WORKSPACE}")" + local -r WS_TMP_IMAGE_DIR="$(realpath "${TMP_IMAGE_DIR}" --relative-to="${WORKSPACE}")" + # find workspace etc files that are also in the image dir and the builders etc dir + local -r WORKSPACE_ETC_FILES="$({ + # shellcheck disable=SC2012 + ls -A -1 "${FILEPATH}" "${ETC_DIR}" | sort | uniq -d + ls -A -1 "${WORKSPACE}" + } | sort | uniq -d)" + # create a deterministic tarball of the collected files + docker run \ + --rm \ + --entrypoint /bin/sh \ + --volume "${WORKSPACE_MOUNT}":/workspace \ + --workdir /workspace \ + "${TAR_IMAGE}" -c " +tar --create --dereference --directory='${WS_FILEPATH}' --exclude=test . \ + | tar --extract --overwrite --directory='${WS_TMP_IMAGE_DIR}' + +# overwrite etc files in the image with the WORKSPACE's etc files +if [ -n '${WORKSPACE_ETC_FILES}' ]; then + tar --create --dereference --exclude=test ${WORKSPACE_ETC_FILES} \ + | tar --extract --overwrite --directory='${WS_TMP_IMAGE_DIR}' +fi + +tar --create --dereference --sort=name --owner=0 --group=0 --numeric-owner --format=gnu --directory='${WS_TMP_IMAGE_DIR}' --file='${WS_FILE_TAR}' . +{ + printf 'blob %d\0' \$(wc -c <'${WS_FILE_TAR}') + tar --extract --file='${WS_FILE_TAR}' --to-stdout +} \ + | sha256sum \ + | cut -f1 -d' ' \ + >'${WS_FILE_SHA}' +" +} + +TEMPTAR="$(make_temp .tar)" +readonly TEMPTAR +SHAFILE="$(make_temp .sha)" +readonly SHAFILE +# use the tarfile size and file content to generate a sha256 hash +_tar_for_dir "${TEMPTAR}" "${SHAFILE}" "${IMAGE_PATH_FULL}" +SHA="$(<"${SHAFILE}")" +readonly SHA +ARCH="$("${TOOLS_DIR}"/get-architecture)" +readonly ARCH +readonly IMAGE_TAG="${ARCH}-${SHA}" +readonly IMAGE_TAGGED="${IMAGE_NAME}:${IMAGE_TAG}" +# generate output +if [[ ${SHA_ONLY} -eq 0 ]]; then + printf "%s\n" "${IMAGE_TAGGED}" +else + printf "%s\n" "${SHA}" +fi + +if [[ ${BUILD_IMAGE_IF_NEEDED} -eq 1 ]]; then + # Create a builder docker image + # build container image and load it into the local docker client + if ! docker image inspect "${IMAGE_TAGGED}" &>/dev/null; then + printf "generating docker image %s\n" "${IMAGE_TAGGED}" &>/dev/stderr + docker buildx build "${DOCKER_BUILD_ARGS[@]}" --output=type=docker --tag "${IMAGE_TAGGED}" - <"${TEMPTAR}" &>"${BUILD_OUTPUT}" + fi +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/ghz b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/ghz new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/ghz @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/git-hooks/pre-commit b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/git-hooks/pre-commit new file mode 100755 index 00000000..21d56711 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/git-hooks/pre-commit @@ -0,0 +1,18 @@ +#!/bin/bash + +TOP_LEVEL_DIR="$(git rev-parse --show-toplevel)" +GIT_HOOKS_DIR="${TOP_LEVEL_DIR}/$(git rev-parse --git-dir)/hooks" +declare -a ARGS=( + hook-impl + "--hook-type=pre-commit" + --hook-dir "${GIT_HOOKS_DIR}" + -- + "$@" +) + +if command -v "${TOP_LEVEL_DIR}"/builders/tools/pre-commit > /dev/null; then + exec "${TOP_LEVEL_DIR}"/builders/tools/pre-commit "${ARGS[@]}" +else + printf "[builders/tools/pre-commit] not found" 2>&1 + exit 1 +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/google-pprof b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/google-pprof new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/google-pprof @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/grpcurl b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/grpcurl new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/grpcurl @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/h2load b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/h2load new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/h2load @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/hadolint b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/hadolint new file mode 100755 index 00000000..2ab1de00 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/hadolint @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" +) +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + --user="$(id -u):$(id -g)" \ + --volume="${WORKSPACE_MOUNT}":/src \ + --workdir=/src \ + ghcr.io/hadolint/hadolint:v2.12.0 \ + hadolint "$@" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/jq b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/jq new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/jq @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/lcov b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/lcov new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/lcov @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/normalize-bazel-symlinks b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/normalize-bazel-symlinks new file mode 100755 index 00000000..8506ac96 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/normalize-bazel-symlinks @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +declare -r BAZEL_CACHE_DIR="${HOME}/.cache/bazel" + +function normalize_symlink() { + declare -r link_name="$1" + if readlink --canonicalize-existing "${link_name}" &>/dev/null ; then + printf "symlink %s resolves fully, skipping\n" "${link_name}" + return + fi + local -r link_path="$(readlink "${link_name}")" + local -r output_user_root="${link_path///bazel_root/}" + rm -f "${link_name}" + ln -s "$(realpath "${BAZEL_CACHE_DIR}/${output_user_root}")" "${link_name}" +} + +function normalize_symlink_docker() { + declare -r link_name="$1" + if readlink --canonicalize-existing "${link_name}" &>/dev/null ; then + printf "symlink %s resolves fully, skipping\n" "${link_name}" + return + fi + local -r link_path="$(readlink "${link_name}")" + local -r output_user_root="${link_path##*/.cache/bazel/}" + rm -f "${link_name}" + ln -s "$(realpath "/bazel_root/${output_user_root}")" "${link_name}" +} + +declare _normalize_fn=normalize_symlink +if [[ -f /.dockerenv ]]; then + _normalize_fn=normalize_symlink_docker +fi + +declare -a -r LINK_DIRS=( + bazel-bin + bazel-out + bazel-testlogs + bazel-workspace +) +for link in "${LINK_DIRS[@]}"; do + if [[ -L ${link} ]]; then + ${_normalize_fn} "${link}" + fi +done diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/normalize-dist b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/normalize-dist new file mode 100755 index 00000000..93627d25 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/normalize-dist @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables supported by cbuild (all optional): +# WORKSPACE Set the path to the workspace (repo root) +# EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ ${STATUS} -eq 0 ]]; then + printf "normalize-dist completed successfully\n" &>/dev/stderr + else + printf "Error: normalize-dist completed with status code: %s\n" "${STATUS}" &>/dev/stderr + fi + exit 0 +} + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh +readonly IMAGE=build-debian +GROUP="$(builder::id g)" +readonly GROUP +USER="$(builder::id u)" +readonly USER + +readonly TOP_LEVEL_DIRS="dist" + +printf "Setting file ownership [%s], group [%s] in dirs [%s]\n" "${USER}" "${GROUP}" "${TOP_LEVEL_DIRS}" +declare -a runner=() +if [[ -f /.dockerenv ]]; then + runner+=(bash -c) +else + runner+=("${TOOLS_DIR}"/cbuild "--image" "${IMAGE}" "--cmd") +fi + +"${runner[@]}" " +for TOP_LEVEL_DIR in ${TOP_LEVEL_DIRS}; do + find \${TOP_LEVEL_DIR} -type f ! -executable -exec chmod 644 {} \; + find \${TOP_LEVEL_DIR} -type f -executable -exec chmod 755 {} \; + find \${TOP_LEVEL_DIR} -type d -exec chmod 755 {} \; + chgrp --recursive ${GROUP} \${TOP_LEVEL_DIR} + chown --recursive ${USER} \${TOP_LEVEL_DIR} +done +" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/pre-commit b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/pre-commit new file mode 100755 index 00000000..be8c5c7c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/pre-commit @@ -0,0 +1,165 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh + +trap __cleanup EXIT +function __cleanup() { + declare -r -i STATUS=$? + readonly LOGFILE=/var/cache/pre-commit/pre-commit.log + if [[ -s ${LOGFILE} ]]; then + printf "==== %s ====\n" ${LOGFILE} &>/dev/stderr + cat ${LOGFILE} &>/dev/stderr + printf "===========\n" &>/dev/stderr + fi + if [[ ${CLEANUP} -eq 0 ]]; then + exit ${STATUS} + fi + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --cmd $" +# change file ownership back to user +{ + git ls-files . --modified + git diff --staged --name-only +} | \ + sort -u | \ + xargs --replace={} /bin/bash -c '{ chown -f $(builder::id u) {}; chgrp -f $(builder::id g) {};}' +# clean up 'empty' node_modules dir created by prettier 2.x +if [[ -d node_modules ]]; then + rmdir node_modules/.cache/prettier/ node_modules/.cache/ node_modules/ +fi +" + exit ${STATUS} +} + +function __exit_msg() { + declare -r MSG="$1" + printf "%s. Exiting\n" "${MSG}" &>/dev/stderr + exit 1 +} + +function __install_git_hooks() { + git rev-parse --is-inside-work-tree >/dev/null || __exit_msg "Not in a git repository" + declare -r GIT_HOOKS_SRC_DIR="${TOOLS_DIR}/git-hooks" + declare -r GIT_HOOKS_DIR="${WORKSPACE}/.git/hooks" + chmod +x "${GIT_HOOKS_SRC_DIR}" + cp -p "${GIT_HOOKS_SRC_DIR}"/* "${GIT_HOOKS_DIR}" + printf "installed git hooks\n" +} + +function __usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 [command] + +A CLI tool to run pre-commit checks. + + if command not specified, run the pre-commit tool on all files in entire workspace + install install as a git pre-commit hook in the current git repo + autoupdate update hook versions in .pre-commit-config.yaml + +environment variables (all optional): + SKIP comma-delimited list of hook ids to skip + PRESUBMIT_IMAGE docker URI to the specific presubmit image to run + IMAGE_BUILD_VERBOSE capture docker build output if set +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +function __init() { + if [[ -n ${PRESUBMIT_IMAGE} ]]; then + IMAGE_TAGGED="${PRESUBMIT_IMAGE}" + else + IMAGE_TAGGED="$("${TOOLS_DIR}"/get-builder-image-tagged --image presubmit)" + fi + readonly IMAGE_TAGGED + export CBUILD_IMAGE="${IMAGE_TAGGED}" + + local -r ARCH="$("${TOOLS_DIR}"/get-architecture)" + if [[ ${ARCH} == arm64 ]]; then + if [[ -z ${SKIP} ]]; then + SKIP_HOOKS="terraform-fmt" + else + SKIP_HOOKS="${SKIP},terraform-fmt" + fi + else + SKIP_HOOKS="${SKIP}" + fi + if [[ -n ${SKIP_HOOKS} ]]; then + printf "Skipping pre-commit hooks: %s\n" "${SKIP_HOOKS}" + fi + SKIP_ENV="SKIP=${SKIP_HOOKS}" +} + +declare -i CLEANUP=0 +declare -a -r CBUILD_COMMON_ARGS=( + "--without-shared-cache" + "--image" + presubmit +) +declare -r PRECOMMIT=/usr/pre-commit-venv/bin/pre-commit + +# TODO: run bazel //:precommit-hooks rather than just the pre-commit tool +if [[ $# -gt 0 ]]; then + case "$1" in + install) __install_git_hooks ;; + + help | --help | -h) __usage 0 ;; + + autoupdate) + PRECOMMIT_CMD="$1" + shift + __init + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} ${PRECOMMIT_CMD} --config ./.pre-commit-config.yaml $*" + ;; + + hook-impl) + PRECOMMIT_CMD="$1" + CLEANUP=1 + shift + __init + docker run --rm \ + --entrypoint=/usr/pre-commit-venv/bin/pre-commit \ + --volume "${WORKSPACE}":/src/workspace \ + -v /var/run/docker.sock:/var/run/docker.sock \ + --env="${SKIP_ENV}" \ + --workdir /src/workspace \ + "${IMAGE_TAGGED}" \ + "${PRECOMMIT_CMD}" --config ./.pre-commit-config.yaml "$@" + ;; + + *) + __init + CLEANUP=1 + for HOOK in "$@"; do + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} run --config ./.pre-commit-config.yaml --all-files ${HOOK}" + done + esac +else + __init + CLEANUP=1 + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} run --config ./.pre-commit-config.yaml --all-files" +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/release-tool b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/release-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/release-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/slowhttptest b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/slowhttptest new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/slowhttptest @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/terraform b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/terraform new file mode 100755 index 00000000..71a3e708 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/terraform @@ -0,0 +1,85 @@ +#!/usr/bin/env bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# TERRAFORM_VERSION specify the terraform version to use +# WORKSPACE repo root directory, must be an absolute path +# +# AWS-related environment variables exported into the terraform container: +# For more info on supported env vars, see: +# https://registry.terraform.io/providers/hashicorp/aws/latest/docs#environment-variables +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +if ! [[ -v TERRAFORM_VERSION ]]; then + TERRAFORM_VERSION=1.0.4 +fi + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" +) + +declare -a DOCKER_RUN_ARGS=( + "--rm" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +readonly IMAGE_TAGGED=hashicorp/terraform:"${TERRAFORM_VERSION}" +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +if [[ -d ${HOME}/.aws ]]; then + DOCKER_RUN_ARGS+=("--volume=${HOME}/.aws/:/home/.aws/") +fi +if [[ -d ${HOME}/.config/gcloud ]]; then + DOCKER_RUN_ARGS+=("--volume=${HOME}/.config/gcloud/:/home/.config/gcloud/") +fi + +DOCKER_RUN_ARGS+=( + "--user=$(id -u):$(id -g)" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=/src/workspace/${REL_PWD}" +) + +# shellcheck disable=SC2068 +docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + ${IMAGE_TAGGED} "$@" diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/test-tool b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/test-tool new file mode 100755 index 00000000..6a275370 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/test-tool @@ -0,0 +1,90 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path + +set -o errexit + +APP="$(basename "${BASH_SOURCE[0]}")" +readonly APP +APP_LINK_TARGET=$(readlink "${BASH_SOURCE[0]}") || true +readonly APP_LINK_TARGET +if [[ -z ${APP_LINK_TARGET} ]] || [[ ${APP_LINK_TARGET} == test-tool && ${APP} =~ ^(release|utils|coverage)-tool$ ]]; then + printf "%s designed for use via symlink with target program name\n" "${APP}" >/dev/stderr + exit 1 +fi + +declare -i APP_AS_ENTRYPOINT=0 + +case "${APP_LINK_TARGET}" in + release-tool) + IMAGE="release" + APP_AS_ENTRYPOINT=1 + ;; + coverage-tool) + IMAGE="coverage-tools" ;; + test-tool) + IMAGE="test-tools" ;; + utils-tool) + IMAGE="utils" ;; + *) + printf "Unsupported image target: %s\n" "${APP_LINK_TARGET}" >/dev/stderr + exit 1 +esac +readonly IMAGE + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh +TOOLS_DIR="$(builder::get_tools_dir)" +readonly TOOLS_DIR +IMAGE_TAGGED="$("${TOOLS_DIR}"/get-builder-image-tagged --image "${IMAGE}")" +readonly IMAGE_TAGGED + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +declare -a DOCKER_RUN_ARGS=( + "--rm" + "--interactive" + "--user=$(id -u):$(id -g)" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=/src/workspace/${REL_PWD}" +) +if [[ -n ${EXTRA_DOCKER_RUN_ARGS} ]]; then + # shellcheck disable=SC2207 + DOCKER_RUN_ARGS+=( + $(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst) + ) +fi +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=("--tty") +fi +if [[ ${APP_AS_ENTRYPOINT} -eq 1 ]]; then + # shellcheck disable=SC2068 + docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + --entrypoint="${APP}" \ + "${IMAGE_TAGGED}" \ + "$@" +else + # shellcheck disable=SC2068 + docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + "${IMAGE_TAGGED}" \ + "${APP}" "$@" +fi diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/unzip b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/unzip new file mode 120000 index 00000000..33a2cc95 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/unzip @@ -0,0 +1 @@ +utils-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/utils-tool b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/utils-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/utils-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/wrk2 b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/wrk2 new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/wrk2 @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/zip b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/zip new file mode 120000 index 00000000..33a2cc95 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/tools/zip @@ -0,0 +1 @@ +utils-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/builders/version.txt b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/version.txt new file mode 100644 index 00000000..b9daa0c1 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/builders/version.txt @@ -0,0 +1 @@ +0.55.1 \ No newline at end of file diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch.cc b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch.cc new file mode 100644 index 00000000..f05e655b --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch.cc @@ -0,0 +1,175 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include + +#include "absl/base/thread_annotations.h" +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/numbers.h" +#include "absl/strings/str_cat.h" +#include "absl/synchronization/mutex.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "src/util/status_macro/status_macros.h" +#include "utils/request_parser.h" + +#include "pytorch_parser.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// Called on worker thread to dispatch per request inference task. It returns +// the inference result for a single request in a batched predict request. +// The forward method of a torch module is non-const although we disallow +// mutable models. +absl::StatusOr PredictInternal(torch::jit::script::Module* model, + const InferenceRequest& request) { + absl::string_view model_key = request.model_path; + std::vector inputs; + for (Tensor tensor : request.inputs) { + const absl::StatusOr torch_tensor = + ConvertFlatArrayToTensor(tensor); + if (!torch_tensor.ok()) { + return absl::InvalidArgumentError(absl::StrCat( + "Model ", model_key, " encounters tensor parsing error: ", + torch_tensor.status().message())); + } + inputs.push_back(*std::move(torch_tensor)); + } + torch::IValue inference_result; + // Convert PyTorch exception to absl status. + try { + // Guard against Autograd. + c10::InferenceMode guard; + return model->forward(inputs); + } catch (const std::exception& e) { + return absl::InternalError(absl::StrCat( + "Model ", model_key, + " encounters an exception during evaluation: ", std::string(e.what()))); + } catch (...) { + return absl::InternalError( + absl::StrCat("Model ", model_key, + " encounters an unknown exception during evaluation")); + } +} + +class PyTorchModule final : public ModuleInterface { + public: + absl::StatusOr Predict( + const PredictRequest& request) override ABSL_LOCKS_EXCLUDED(mu_); + absl::StatusOr RegisterModel( + const RegisterModelRequest& request) override ABSL_LOCKS_EXCLUDED(mu_); + + private: + // The key to `model map` is the `model_path` field in an inference request. + absl::flat_hash_map> + model_map_ ABSL_GUARDED_BY(mu_); + absl::Mutex mu_; +}; + +absl::StatusOr PyTorchModule::Predict( + const PredictRequest& request) { + absl::ReaderMutexLock lock(&mu_); + + absl::StatusOr> parsed_requests = + ParseJsonInferenceRequest(request.input()); + if (!parsed_requests.ok()) { + return absl::InvalidArgumentError( + absl::StrCat("Encounters batch inference request parsing error: ", + parsed_requests.status().message())); + } + + std::vector>> tasks; + for (const InferenceRequest& inference_request : (*parsed_requests)) { + absl::string_view model_key = inference_request.model_path; + auto it = model_map_.find(model_key); + if (it == model_map_.end()) { + return absl::NotFoundError( + absl::StrCat("Model ", model_key, " has not been registered")); + } + tasks.push_back(std::async(std::launch::async, &PredictInternal, + it->second.get(), inference_request)); + } + + std::vector batch_result_outputs; + for (size_t task_id = 0; task_id < tasks.size(); ++task_id) { + tasks[task_id].wait(); + // TODO(b/329280402): Handle partial results for a batched requests. + // Currently, if some inference succeeds fails for some models but fails for + // others, the batch result returns the error code of the first failure + // task. + PS_ASSIGN_OR_RETURN(torch::IValue task_result, tasks[task_id].get()); + PerModelOutput output; + output.model_path = (*parsed_requests)[task_id].model_path; + // TOOD(b/330899867): consider changing inference_output from torch::IValue + // to a json value so that the conversion is done in the worker threads + // instead of the main thread. + output.inference_output = task_result; + batch_result_outputs.push_back(output); + } + PS_ASSIGN_OR_RETURN(std::string output_json, + ConvertBatchOutputsToJson(batch_result_outputs)); + PredictResponse response; + response.set_output(output_json); + return response; +} + +absl::StatusOr PyTorchModule::RegisterModel( + const RegisterModelRequest& request) { + absl::string_view model_key = request.model_spec().model_path(); + if (model_key.empty()) { + return absl::InvalidArgumentError("Empty model key during registration"); + } + if (request.model_files().size() != 1) { + return absl::InvalidArgumentError(absl::StrCat( + "The number of model files should be exactly one to match size()=", + request.model_files().size())); + } + + absl::WriterMutexLock lock(&mu_); + if (model_map_.find(model_key) != model_map_.end()) { + return absl::AlreadyExistsError( + absl::StrCat("Model ", model_key, " has already been registered")); + } + + // Convert PyTorch exception to absl status. + try { + const std::string& model_payload = request.model_files().begin()->second; + std::istringstream is(model_payload); + model_map_[model_key] = + std::make_unique(torch::jit::load(is)); + // Turn on eval model for layers that behave differently during train and + // eval times, for example, dropout and batch norm layers. + model_map_[model_key]->eval(); + } catch (...) { + return absl::InternalError("Error loading model"); + } + + return RegisterModelResponse(); +} + +} // namespace + +std::unique_ptr ModuleInterface::Create() { + return std::make_unique(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser.cc b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser.cc new file mode 100644 index 00000000..ec26a1e7 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser.cc @@ -0,0 +1,266 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "pytorch_parser.h" + +#include +#include +#include + +#include +#include + +#include "rapidjson/document.h" +#include "src/util/status_macro/status_macros.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// Template specialization for different data types +template +absl::StatusOr Convert(const std::string& str); + +template <> +absl::StatusOr Convert(const std::string& str) { + float result; + if (absl::SimpleAtof(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in float conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + double result; + if (absl::SimpleAtod(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in double conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int result; + if (absl::SimpleAtoi(str, &result)) { + if (result < std::numeric_limits::min() || + result > std::numeric_limits::max()) { + return absl::FailedPreconditionError( + "The number is outside of bounds of int8_t."); + } + return result; // Implicit conversion to int8_t. + } else { + return absl::FailedPreconditionError("Error in int8 conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int result; + if (absl::SimpleAtoi(str, &result)) { + if (result < std::numeric_limits::min() || + result > std::numeric_limits::max()) { + return absl::FailedPreconditionError( + "The number is outside of bounds of int16_t."); + } + return result; // Implicit conversion to int16_t. + } else { + return absl::FailedPreconditionError("Error in int16 conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int result; + if (absl::SimpleAtoi(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in int32 conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int64_t result; + if (absl::SimpleAtoi(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in int64 conversion"); + } +} + +template +absl::StatusOr ConvertFlatArrayToTensorInternal( + const Tensor& tensor) { + std::vector data_array; + for (const std::string& str : tensor.tensor_content) { + PS_ASSIGN_OR_RETURN(T result, Convert(str)); + data_array.push_back(result); + } + + torch::Tensor py_torch_tensor = + torch::tensor(data_array, torch::dtype()).view(tensor.tensor_shape); + + return py_torch_tensor; +} + +// Converts rapidjson::Value& to a string +absl::StatusOr SerializeJsonDoc(const rapidjson::Value& document) { + rapidjson::StringBuffer string_buffer; + rapidjson::Writer writer(string_buffer); + if (document.Accept(writer)) { + return std::string(string_buffer.GetString()); + } + return absl::InternalError("Error converting inner Json to String."); +} + +// Converts a pytorch tensor to rapidjson::Value. +absl::StatusOr TensorToJsonValue( + const torch::Tensor& tensor, rapidjson::MemoryPoolAllocator<>& allocator) { + rapidjson::Value json_tensor(rapidjson::kObjectType); + + std::vector tensor_shape = tensor.sizes().vec(); + rapidjson::Value tensor_shape_json(rapidjson::kArrayType); + for (size_t i = 0; i < tensor_shape.size(); ++i) { + tensor_shape_json.PushBack(tensor_shape[i], allocator); + } + json_tensor.AddMember("tensor_shape", tensor_shape_json.Move(), allocator); + + // Flatten the tensor. + torch::Tensor reshaped_tensor = tensor.view({tensor.numel()}); + + rapidjson::Value tensor_content(rapidjson::kArrayType); + caffe2::TypeMeta dtype = tensor.dtype(); + if (dtype == torch::ScalarType::Float) { + json_tensor.AddMember("data_type", "FLOAT", allocator); + for (size_t i = 0; i < tensor.numel(); ++i) { + tensor_content.PushBack(reshaped_tensor[i].item(), allocator); + } + } else if (dtype == torch::ScalarType::Double) { + json_tensor.AddMember("data_type", "DOUBLE", allocator); + for (size_t i = 0; i < tensor.numel(); ++i) { + tensor_content.PushBack(reshaped_tensor[i].item(), allocator); + } + } else if (dtype == torch::ScalarType::Char) { + json_tensor.AddMember("data_type", "INT8", allocator); + for (size_t i = 0; i < tensor.numel(); ++i) { + tensor_content.PushBack(reshaped_tensor[i].item(), allocator); + } + } else if (dtype == torch::ScalarType::Short) { + json_tensor.AddMember("data_type", "INT16", allocator); + for (size_t i = 0; i < tensor.numel(); ++i) { + tensor_content.PushBack(reshaped_tensor[i].item(), allocator); + } + } else if (dtype == torch::ScalarType::Int) { + json_tensor.AddMember("data_type", "INT32", allocator); + for (size_t i = 0; i < tensor.numel(); ++i) { + tensor_content.PushBack(reshaped_tensor[i].item(), allocator); + } + } else if (dtype == torch::ScalarType::Long) { + json_tensor.AddMember("data_type", "INT64", allocator); + for (size_t i = 0; i < tensor.numel(); ++i) { + tensor_content.PushBack(reshaped_tensor[i].item(), allocator); + } + } else { + return absl::InternalError( + absl::StrCat("Unsupported type ", std::string(dtype.name()))); + } + json_tensor.AddMember("tensor_content", tensor_content.Move(), allocator); + return json_tensor; +} + +// Extracts pytorch tensors from inference_result and converts them to +// rapidjson::Value. +absl::StatusOr IValueToJsonValue( + const std::string model_path, const torch::IValue& inference_result, + rapidjson::MemoryPoolAllocator<>& allocator) { + rapidjson::Value tensors_value(rapidjson::kArrayType); + + if (inference_result.isTensor()) { + const torch::Tensor tensor = inference_result.toTensor(); + PS_ASSIGN_OR_RETURN(rapidjson::Value json, + TensorToJsonValue(tensor, allocator)); + tensors_value.PushBack(json, allocator); + } else if (inference_result.isTuple()) { + auto output_tuple = inference_result.toTuple(); + for (int i = 0; i < output_tuple->elements().size(); i++) { + at::Tensor tensor = output_tuple->elements()[i].toTensor(); + PS_ASSIGN_OR_RETURN(rapidjson::Value json, + TensorToJsonValue(tensor, allocator)); + tensors_value.PushBack(json, allocator); + } + } else { + // TODO(b/329850065): Check if we need to support any additional types. + return absl::InternalError(absl::StrCat( + "Model ", model_path, " produces a non supported output type")); + } + return tensors_value; +} + +} // namespace + +absl::StatusOr ConvertFlatArrayToTensor(const Tensor& tensor) { + switch (tensor.data_type) { + case DataType::kFloat: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kDouble: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt8: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt16: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt32: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt64: { + return ConvertFlatArrayToTensorInternal(tensor); + } + default: + return absl::InvalidArgumentError( + absl::StrFormat("Unsupported data type %d", tensor.data_type)); + } +} + +absl::StatusOr ConvertBatchOutputsToJson( + const std::vector& batch_outputs) { + rapidjson::Document document; + document.SetObject(); + rapidjson::MemoryPoolAllocator<>& allocator = document.GetAllocator(); + + rapidjson::Value batch(rapidjson::kArrayType); + for (const PerModelOutput& output : batch_outputs) { + const std::string model_path = output.model_path; + const torch::IValue tensors = output.inference_output; + + rapidjson::Value nested_object(rapidjson::kObjectType); + rapidjson::Value model_path_value; + model_path_value.SetString(model_path.c_str(), allocator); + nested_object.AddMember("model_path", model_path_value, allocator); + + PS_ASSIGN_OR_RETURN(rapidjson::Value tensors_value, + IValueToJsonValue(model_path, tensors, allocator)); + + nested_object.AddMember("tensors", tensors_value.Move(), allocator); + batch.PushBack(nested_object.Move(), allocator); + } + document.AddMember("response", batch.Move(), allocator); + + return SerializeJsonDoc(document); +} +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser.h b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser.h new file mode 100644 index 00000000..3b283abc --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser.h @@ -0,0 +1,44 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_INFERENCE_SIDECAR_MODULES_PYTORCH_V2_1_1_PYTORCH_PARSER_H_ +#define SERVICES_INFERENCE_SIDECAR_MODULES_PYTORCH_V2_1_1_PYTORCH_PARSER_H_ + +#include +#include +#include + +#include + +#include "absl/status/statusor.h" +#include "utils/request_parser.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +struct PerModelOutput { + std::string model_path; + torch::IValue inference_output; +}; + +// Transform internal generic dense tensor representation (in the format of +// one-dimensional array) into a PyTorch tensor and the desired tensor shape. +absl::StatusOr ConvertFlatArrayToTensor(const Tensor& tensor); + +// Converts inference output corresponding to each model to a JSON string. +absl::StatusOr ConvertBatchOutputsToJson( + const std::vector& batch_outputs); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_MODULES_PYTORCH_V2_1_1_PYTORCH_PARSER_H_ diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser_test.cc b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser_test.cc new file mode 100644 index 00000000..91a845a0 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_parser_test.cc @@ -0,0 +1,252 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "pytorch_parser.h" + +#include + +#include +#include + +#include "absl/status/statusor.h" +#include "googletest/include/gtest/gtest.h" +#include "utils/request_parser.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +TEST(PyTorchModuleTest, TestConversion) { + Tensor tensor; + tensor.data_type = DataType::kFloat; + tensor.tensor_content = {"1.2", "-2", "3", "4", "5", "6"}; + tensor.tensor_shape = {2, 3}; + + // Call the conversion function + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const torch::Tensor torch_tensor = *std::move(result); + + // Check the dimensions of the resulting tensor + EXPECT_EQ(torch_tensor.dim(), 2); + EXPECT_EQ(torch_tensor.size(0), 2); + EXPECT_EQ(torch_tensor.size(1), 3); + + // Check the values in the tensor + EXPECT_FLOAT_EQ(torch_tensor[0][0].item(), 1.2); + EXPECT_FLOAT_EQ(torch_tensor[0][1].item(), -2.0); + EXPECT_FLOAT_EQ(torch_tensor[0][2].item(), 3.0); + EXPECT_FLOAT_EQ(torch_tensor[1][0].item(), 4.0); + EXPECT_FLOAT_EQ(torch_tensor[1][1].item(), 5.0); + EXPECT_FLOAT_EQ(torch_tensor[1][2].item(), 6.0); +} + +TEST(PyTorchModuleTest, TestConversionInt8) { + Tensor tensor; + tensor.data_type = DataType::kInt8; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1}; + + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const torch::Tensor torch_tensor = *std::move(result); + + caffe2::TypeMeta data_type = torch_tensor.dtype(); + EXPECT_EQ(data_type, torch::ScalarType::Char); + + EXPECT_EQ(torch_tensor[0].item(), 5); +} + +TEST(PyTorchModuleTest, TestConversionInt16) { + Tensor tensor; + tensor.data_type = DataType::kInt16; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1}; + + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const torch::Tensor torch_tensor = *std::move(result); + + caffe2::TypeMeta data_type = torch_tensor.dtype(); + EXPECT_EQ(data_type, torch::ScalarType::Short); + + EXPECT_EQ(torch_tensor[0].item(), 5); +} + +TEST(PyTorchModuleTest, TestConversionInt32) { + Tensor tensor; + tensor.data_type = DataType::kInt32; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1}; + + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const torch::Tensor torch_tensor = *std::move(result); + + caffe2::TypeMeta data_type = torch_tensor.dtype(); + EXPECT_EQ(data_type, torch::ScalarType::Int); + + EXPECT_EQ(torch_tensor[0].item(), 5); +} + +TEST(PyTorchModuleTest, TestConversionInt64) { + Tensor tensor; + tensor.data_type = DataType::kInt64; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1}; + + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const torch::Tensor torch_tensor = *std::move(result); + + caffe2::TypeMeta data_type = torch_tensor.dtype(); + EXPECT_EQ(data_type, torch::ScalarType::Long); + + EXPECT_EQ(torch_tensor[0].item(), 5); +} + +TEST(PyTorchModuleTest, TestConversion_BatchSizeOne) { + Tensor tensor; + tensor.data_type = DataType::kInt64; + tensor.tensor_content = {"7"}; + tensor.tensor_shape = {1, 1}; + + // Call the conversion function + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const torch::Tensor torch_tensor = *std::move(result); + + // Check the dimensions of the resulting tensor + EXPECT_EQ(torch_tensor.dim(), 2); + EXPECT_EQ(torch_tensor.size(0), 1); + EXPECT_EQ(torch_tensor.size(1), 1); + + // Check the values in the tensor + EXPECT_EQ(torch_tensor[0][0].item(), 7); +} + +TEST(PyTorchModuleTest, TestConversion_WrongType) { + Tensor tensor; + tensor.data_type = DataType::kInt64; + tensor.tensor_content = {"seven"}; // Incompatible type. + tensor.tensor_shape = {1, 1}; + + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.status().code(), absl::StatusCode::kFailedPrecondition); + EXPECT_EQ(result.status().message(), "Error in int64 conversion"); +} + +TEST(PyTorchModuleTest, ConvertBatchOutputsToJson_UnsupportedType) { + PerModelOutput output; + output.model_path = "/path/to/model"; + torch::IValue tensor_ivalue = torch::tensor({1, 2, 3}, torch::kByte); + output.inference_output = tensor_ivalue; + + std::vector batch_outputs = {output}; + + absl::StatusOr result = ConvertBatchOutputsToJson(batch_outputs); + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.status().message(), "Unsupported type unsigned char"); +} + +TEST(PyTorchModuleTest, ConvertBatchOutputsToJson_SimpleTest) { + PerModelOutput output; + output.model_path = "/path/to/model"; + torch::IValue tensor_ivalue = torch::tensor({1, 2, 3}); + output.inference_output = tensor_ivalue; + + std::vector batch_outputs = {output}; + + absl::StatusOr result = ConvertBatchOutputsToJson(batch_outputs); + ASSERT_TRUE(result.ok()); + + const std::string expected_json = + R"({"response":[{"model_path":"/path/to/model","tensors":[{"tensor_shape":[3],"data_type":"INT64","tensor_content":[1,2,3]}]}]})"; + + EXPECT_EQ(result.value(), expected_json); +} + +TEST(PyTorchModuleTest, ConvertBatchOutputsToJsonTest_FloatDoubleInputTypes) { + PerModelOutput output; + output.model_path = "/path/to/model"; + torch::IValue double_tensor = + torch::tensor({{1.5, 2., 3.}, {4., 5., 6.}}, torch::kDouble); + torch::IValue float_tensor = torch::tensor({3.14}, torch::kFloat); + // Use a tuple + std::vector elements = {double_tensor, float_tensor}; + torch::IValue tuple = c10::ivalue::Tuple::create(elements); + output.inference_output = tuple; + + std::vector batch_outputs = {output}; + + absl::StatusOr result = ConvertBatchOutputsToJson(batch_outputs); + EXPECT_EQ(result.status().message(), ""); + EXPECT_TRUE(result.ok()); + + const std::string expected_json = + R"({"response":[{"model_path":"/path/to/model","tensors":[{"tensor_shape":[2,3],"data_type":"DOUBLE","tensor_content":[1.5,2.0,3.0,4.0,5.0,6.0]},{"tensor_shape":[1],"data_type":"FLOAT","tensor_content":[3.140000104904175]}]}]})"; + + EXPECT_EQ(result.value(), expected_json); +} + +TEST(PyTorchModuleTest, ConvertBatchOutputsToJsonTest_IntTypes) { + PerModelOutput output; + output.model_path = "/path/to/model"; + torch::IValue int8_tensor = torch::tensor({1}, torch::ScalarType::Char); + torch::IValue int16_tensor = torch::tensor({1}, torch::ScalarType::Short); + torch::IValue int32_tensor = torch::tensor({1}, torch::ScalarType::Int); + torch::IValue int64_tensor = torch::tensor({1}, torch::ScalarType::Long); + // Use a tuple + std::vector elements = {int8_tensor, int16_tensor, + int32_tensor, int64_tensor}; + torch::IValue tuple = c10::ivalue::Tuple::create(elements); + output.inference_output = tuple; + + std::vector batch_outputs = {output}; + + absl::StatusOr result = ConvertBatchOutputsToJson(batch_outputs); + EXPECT_EQ(result.status().message(), ""); + EXPECT_TRUE(result.ok()); + + const std::string expected_json = + R"({"response":[{"model_path":"/path/to/model","tensors":[{"tensor_shape":[1],"data_type":"INT8","tensor_content":[1]},{"tensor_shape":[1],"data_type":"INT16","tensor_content":[1]},{"tensor_shape":[1],"data_type":"INT32","tensor_content":[1]},{"tensor_shape":[1],"data_type":"INT64","tensor_content":[1]}]}]})"; + + EXPECT_EQ(result.value(), expected_json); +} + +TEST(PyTorchModuleTest, ConvertBatchOutputsToJsonTest_2Models) { + PerModelOutput output1; + output1.model_path = "/path/to/model/1"; + torch::IValue tensor_ivalue1 = torch::tensor({1, 2, 3}); + output1.inference_output = tensor_ivalue1; + + PerModelOutput output2; + output2.model_path = "/path/to/model/2"; + torch::IValue tensor_ivalue2 = torch::tensor({125}, torch::kDouble); + output2.inference_output = tensor_ivalue2; + + std::vector batch_outputs = {output1, output2}; + + absl::StatusOr result = ConvertBatchOutputsToJson(batch_outputs); + ASSERT_TRUE(result.ok()); + + const std::string expected_json = + R"({"response":[{"model_path":"/path/to/model/1","tensors":[{"tensor_shape":[3],"data_type":"INT64","tensor_content":[1,2,3]}]},{"model_path":"/path/to/model/2","tensors":[{"tensor_shape":[1],"data_type":"DOUBLE","tensor_content":[125.0]}]}]})"; + + EXPECT_EQ(result.value(), expected_json); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_test.cc b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_test.cc new file mode 100644 index 00000000..c3a0800b --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/pytorch_test.cc @@ -0,0 +1,1004 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include + +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/match.h" +#include "absl/strings/str_cat.h" +#include "absl/synchronization/blocking_counter.h" +#include "gtest/gtest.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "utils/file_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +// Simple model returns its input tensor as the output. +constexpr absl::string_view kSimpleModel = "simple_model"; +constexpr absl::string_view kTestModelVariedInputs1 = "e2e_model1"; +constexpr absl::string_view kTestModelVariedInputs2 = "e2e_model2"; +constexpr absl::string_view kTestModelMixedInputsMixedOutputs = + "mixed_inputs_mixed_outputs_model"; +constexpr int kNumThreads = 100; + +TEST(PyTorchModuleRegisterModelTest, + RegisterModelWithEmtpyKeyReturnsInvalidArgument) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_EQ(torch_module->RegisterModel(register_request).status().code(), + absl::StatusCode::kInvalidArgument); +} + +TEST(PyTorchModuleRegisterModelTest, + RegisterModelWithExistingKeyReturnsAlreadyExists) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + EXPECT_EQ(torch_module->RegisterModel(register_request).status().code(), + absl::StatusCode::kAlreadyExists); +} + +TEST(PyTorchModuleRegisterModelTest, + RegisterModelWithNoModelContentReturnsInvalidArgument) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + register_request.mutable_model_spec()->set_model_path("e2e_model1"); + EXPECT_EQ(torch_module->RegisterModel(register_request).status().code(), + absl::StatusCode::kInvalidArgument); +} + +TEST(PyTorchModuleRegisterModelTest, RegisterModelOk) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); +} + +TEST(PyTorchModulePredictTest, + PredictWithoutValidBatchRequestInputReturnsInvalidArgument) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + const absl::StatusOr predict_response = + torch_module->Predict(predict_request); + ASSERT_EQ(predict_response.status().code(), + absl::StatusCode::kInvalidArgument); + EXPECT_TRUE( + absl::StrContains(predict_response.status().message(), + "Encounters batch inference request parsing error:")); +} + +constexpr char kInvalidTensorContentRequest[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["seven"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, + PredictWithInvalidTensorContentReturnsInvalidArgument) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kInvalidTensorContentRequest); + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_EQ(result.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_TRUE( + absl::StrContains(result.status().message(), "tensor parsing error")); +} + +constexpr char kSimpleRequest[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["3.14"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictWithoutValidModelReturnsNotFound) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_EQ(torch_module->RegisterModel(register_request).status().code(), + absl::StatusCode::kInvalidArgument); + + PredictRequest predict_request; + *predict_request.mutable_input() = kSimpleRequest; + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_EQ(result.status().code(), absl::StatusCode::kNotFound); + EXPECT_TRUE( + absl::StrContains(result.status().message(), "has not been registered")); +} + +TEST(PyTorchModulePredictTest, PredictSimpleSuccess) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kSimpleRequest); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_TRUE(result.ok()); + EXPECT_EQ( + result->output(), + "{\"response\":[{\"model_path\":\"simple_model\",\"tensors\":[{\"tensor_" + "shape\":[1],\"data_type\":\"DOUBLE\",\"tensor_content\":[3.14]}]}]}"); +} + +constexpr char kNotRegisteredModelRequest[] = R"json({ + "request" : [{ + "model_path" : "not_registered", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["3.14"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictReturnsNotFoundError) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs1, register_request) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + *predict_request.mutable_input() = kNotRegisteredModelRequest; + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.status().code(), absl::StatusCode::kNotFound); + EXPECT_EQ(result.status().message(), + "Model not_registered has not been registered"); +} + +constexpr char kMismatchedInput[] = R"json({ + "request" : [{ + "model_path" : "e2e_model1", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["3.14"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictReturnsInternalError) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs1, register_request) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + *predict_request.mutable_input() = + kMismatchedInput; // kMismatchedInput is not compatible with + // kTestModelVariedInputs1 model. + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.status().code(), absl::StatusCode::kInternal); + EXPECT_TRUE(absl::StrContains(result.status().message(), + "encounters an exception during evaluation")); +} + +constexpr char kSimpleRequest2Elements[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 2 + ], + "tensor_content": ["3.14", "2.718"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictSimpleSuccessShape1x2) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + *predict_request.mutable_input() = kSimpleRequest2Elements; + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_TRUE(result.ok()); + EXPECT_EQ(result->output(), + "{\"response\":[{\"model_path\":\"simple_model\",\"tensors\":[{" + "\"tensor_shape\":[1,2],\"data_type\":\"DOUBLE\",\"tensor_" + "content\":[3.14,2.718]}]}]}"); +} + +constexpr char kSimpleRequestBatchSize2[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["3.14", "2.718"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictSimpleBatchSize2Success) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kSimpleRequestBatchSize2); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result->output(), + "{\"response\":[{\"model_path\":\"simple_model\",\"tensors\":[{" + "\"tensor_shape\":[2,1],\"data_type\":\"DOUBLE\",\"tensor_" + "content\":[3.14,2.718]}]}]}"); +} + +constexpr char kSameModelSameBatchSizeMultipleRequests[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["3.14"] + } + ] +}, +{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["2.718"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, + PredictSameModelSameBatchSizeMultipleRequestsSuccess) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kSameModelSameBatchSizeMultipleRequests); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_TRUE(result.ok()); + EXPECT_EQ( + result->output(), + "{\"response\":[{\"model_path\":\"simple_model\",\"tensors\":[{\"tensor_" + "shape\":[1],\"data_type\":\"DOUBLE\",\"tensor_content\":[3.14]}]},{" + "\"model_path\":\"simple_model\",\"tensors\":[{\"tensor_shape\":[1]," + "\"data_type\":\"DOUBLE\",\"tensor_content\":[2.718]}]}]}"); +} + +constexpr char kSameModelVariedBatchSizesMultipleRequests[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["3.14", "1.00"] + } + ] +}, +{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 3, 1 + ], + "tensor_content": ["2.718", "1.00", "1.00"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, + PredictSameModelVariedBatchSizesMultipleRequestsSuccess) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kSameModelVariedBatchSizesMultipleRequests); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_TRUE(result.ok()); + EXPECT_EQ( + result->output(), + "{\"response\":[{\"model_path\":\"simple_model\",\"tensors\":[{\"tensor_" + "shape\":[2,1],\"data_type\":\"DOUBLE\",\"tensor_content\":[3.14,1.0]}]}," + "{\"model_path\":\"simple_model\",\"tensors\":[{\"tensor_shape\":[3,1]," + "\"data_type\":\"DOUBLE\",\"tensor_content\":[2.718,1.0,1.0]}]}]}"); +} + +constexpr char kRequestsWithMultipleInvalidInputs[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["seven"] + } + ] +}, +{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 2 + ], + "tensor_content": ["3.14", "2.718"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictMutilpleInvalidInputsReturnsFirstError) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kRequestsWithMultipleInvalidInputs); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_EQ(result.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_TRUE( + absl::StrContains(result.status().message(), "tensor parsing error")); +} + +constexpr char kBothValidAndInvalidInputs[] = R"json({ + "request" : [{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["3.14"] + } + ] +}, +{ + "model_path" : "simple_model", + "tensors" : [ + { + "data_type": "DOUBLE", + "tensor_shape": [ + 1 + ], + "tensor_content": ["seven"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictBothValidAndInvalidInputsReturnsError) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kBothValidAndInvalidInputs); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_EQ(result.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_TRUE( + absl::StrContains(result.status().message(), "tensor parsing error")); +} + +constexpr char kVariedInputsRequestBatchSize1[] = R"json({ + "request" : [{ + "model_path" : "e2e_model1", + "tensors" : [ + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 1 + ], + "tensor_content": ["8"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 1 + ], + "tensor_content": ["6"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 1 + ], + "tensor_content": ["18"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 1 + ], + "tensor_content": ["11"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 1 + ], + "tensor_content": ["9"] + }, + { + "tensor_name": "serving_default_double1:0", + "data_type": "FLOAT", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.4313", "0.3381", "0.3103", "0.3150", "0.9595", "0.7862", "0.6386", "0.9695", "0.0469", "0.5807"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "FLOAT", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.8201", "0.8321", "0.1021", "0.6779", "0.2152", "0.4805", "0.3957", "0.0825", "0.0230", "0.1711"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictVariedInputsBatchSize1Success) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs1, register_request) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kVariedInputsRequestBatchSize1); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result->output(), + "{\"response\":[{\"model_path\":\"e2e_model1\",\"tensors\":[{" + "\"tensor_shape\":[1,1],\"data_type\":\"FLOAT\",\"tensor_content\":" + "[0.4846605658531189]}]}]}"); +} + +constexpr char kVariedInputsRequestBatchSize2[] = R"json({ + "request" : [{ + "model_path" : "e2e_model1", + "tensors" : [ + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["8", "6"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["18", "11"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["9", "14"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["2", "1"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["10", "4"] + }, + { + "tensor_name": "serving_default_double1:0", + "data_type": "FLOAT", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": [ + "0.7862", "0.6386", "0.9695", "0.0469", "0.5807", "0.8201", "0.8321", + "0.1021", "0.6779", "0.2152", "0.4805", "0.3957", "0.0825", "0.0230", + "0.1711", "0.7269", "0.7287", "0.0651", "0.3122", "0.5082" + ] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "FLOAT", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": [ + "0.0677", "0.7817", "0.9529", "0.5884", "0.1285", "0.6166", "0.6815", + "0.8959", "0.2340", "0.0520", "0.7197", "0.5311", "0.3371", "0.2905", + "0.2422", "0.9047", "0.4137", "0.8606", "0.9463", "0.5633" + ] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictVariedInputsBatchSize2Success) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs1, register_request) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kVariedInputsRequestBatchSize2); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_TRUE(result.ok()); + EXPECT_EQ(result->output(), + "{\"response\":[{\"model_path\":\"e2e_model1\",\"tensors\":[{" + "\"tensor_shape\":[2,1],\"data_type\":\"FLOAT\",\"tensor_content\":" + "[0.5412338972091675,0.4757022559642792]}]}]}"); +} + +constexpr char kMixedInputsBatchSize1[] = R"json({ + "request" : [{ + "model_path" : "mixed_inputs_mixed_outputs_model", + "tensors" : [ + { + "data_type": "INT64", + "tensor_shape": [ + 1 + ], + "tensor_content": ["3"] + }, + { + "data_type": "FLOAT", + "tensor_shape": [ + 1, 3 + ], + "tensor_content": ["0.0394", "1.7093", "-0.2127"] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, PredictMixedInputsMixedOutputsSuccess) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE(PopulateRegisterModelRequest(kTestModelMixedInputsMixedOutputs, + register_request) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kMixedInputsBatchSize1); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_TRUE(result.ok()); + EXPECT_EQ( + result->output(), + "{\"response\":[{\"model_path\":\"mixed_inputs_mixed_outputs_model\"," + "\"tensors\":[{\"tensor_shape\":[1,4],\"data_type\":\"FLOAT\",\"tensor_" + "content\":[-0.3637504279613495,-0.16950955986976624,-0." + "21080102026462556,-0.2136428952217102]},{\"tensor_shape\":[1,1]," + "\"data_" + "type\":\"INT64\",\"tensor_content\":[0]}]}]}"); +} + +constexpr char kVariedInputsMultipleModelsRequest[] = R"json({ + "request" : [{ + "model_path" : "e2e_model1", + "tensors" : [ + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["8", "6"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["18", "11"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["9", "14"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["2", "1"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["10", "4"] + }, + { + "tensor_name": "serving_default_double1:0", + "data_type": "FLOAT", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": [ + "0.7862", "0.6386", "0.9695", "0.0469", "0.5807", "0.8201", "0.8321", + "0.1021", "0.6779", "0.2152", "0.4805", "0.3957", "0.0825", "0.0230", + "0.1711", "0.7269", "0.7287", "0.0651", "0.3122", "0.5082" + ] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "FLOAT", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": [ + "0.0677", "0.7817", "0.9529", "0.5884", "0.1285", "0.6166", "0.6815", + "0.8959", "0.2340", "0.0520", "0.7197", "0.5311", "0.3371", "0.2905", + "0.2422", "0.9047", "0.4137", "0.8606", "0.9463", "0.5633" + ] + } + ] +}, +{ + "model_path" : "e2e_model2", + "tensors" : [ + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["8", "6"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["18", "11"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["9", "14"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["2", "1"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2 + ], + "tensor_content": ["10", "4"] + }, + { + "tensor_name": "serving_default_double1:0", + "data_type": "FLOAT", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": [ + "0.7862", "0.6386", "0.9695", "0.0469", "0.5807", "0.8201", "0.8321", + "0.1021", "0.6779", "0.2152", "0.4805", "0.3957", "0.0825", "0.0230", + "0.1711", "0.7269", "0.7287", "0.0651", "0.3122", "0.5082" + ] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "FLOAT", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": [ + "0.0677", "0.7817", "0.9529", "0.5884", "0.1285", "0.6166", "0.6815", + "0.8959", "0.2340", "0.0520", "0.7197", "0.5311", "0.3371", "0.2905", + "0.2422", "0.9047", "0.4137", "0.8606", "0.9463", "0.5633" + ] + } + ] +}] + })json"; + +TEST(PyTorchModulePredictTest, + PredictVariedInputsMultipleModelsBatchSize1Success) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request1; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs1, register_request1) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request1).ok()); + RegisterModelRequest register_request2; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs2, register_request2) + .ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request2).ok()); + + PredictRequest predict_request; + predict_request.set_input(kVariedInputsMultipleModelsRequest); + + const absl::StatusOr result = + torch_module->Predict(predict_request); + ASSERT_TRUE(result.ok()); + EXPECT_EQ( + result->output(), + "{\"response\":[{\"model_path\":\"e2e_model1\",\"tensors\":[{\"tensor_" + "shape\":[2,1],\"data_type\":\"FLOAT\",\"tensor_content\":[0." + "5412338972091675,0.4757022559642792]}]},{\"model_path\":\"e2e_model2\"," + "\"tensors\":[{\"tensor_shape\":[2,1],\"data_type\":\"FLOAT\",\"tensor_" + "content\":[0.3857767879962921,0.458008348941803]}]}]}"); +} + +TEST(PyTorchModuleConcurrencyTest, RegisterSameModelWithMultipleThreads) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + + absl::BlockingCounter num_returns_ok(1); + absl::BlockingCounter num_returns_already_exists(kNumThreads - 1); + + std::vector threads; + threads.reserve(kNumThreads); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back( + std::thread([&torch_module, ®ister_request, &num_returns_ok, + &num_returns_already_exists]() { + absl::StatusCode code = + torch_module->RegisterModel(register_request).status().code(); + if (code == absl::StatusCode::kOk) { + num_returns_ok.DecrementCount(); + } else if (code == absl::StatusCode::kAlreadyExists) { + num_returns_already_exists.DecrementCount(); + } + })); + } + + num_returns_ok.Wait(); + num_returns_already_exists.Wait(); + + for (auto& thread : threads) { + thread.join(); + } +} + +TEST(PyTorchModuleConcurrencyTest, + ThreadsMakingInferenceRequestAgainstSingleModel) { + std::unique_ptr torch_module = ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kSimpleModel, register_request).ok()); + ASSERT_TRUE(torch_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kSimpleRequest); + + std::vector threads; + threads.reserve(kNumThreads); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&torch_module, &predict_request]() { + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_TRUE(result.ok()); + if (result.ok()) { + EXPECT_EQ(result->output(), + "{\"response\":[{\"model_path\":\"simple_model\",\"tensors\":" + "[{\"tensor_shape\":[1],\"data_type\":\"DOUBLE\",\"tensor_" + "content\":[3.14]}]}]}"); + } + })); + } + + for (auto& thread : threads) { + thread.join(); + } +} + +TEST(PyTorchModuleConcurrencyTest, + ThreadsMakingInferenceRequestAgainstMultipleModels) { + std::unique_ptr torch_module = ModuleInterface::Create(); + + std::thread register_request_thread1([&torch_module]() { + RegisterModelRequest register_request1; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs1, register_request1) + .ok()); + EXPECT_TRUE(torch_module->RegisterModel(register_request1).ok()); + }); + + std::thread register_request_thread2([&torch_module]() { + RegisterModelRequest register_request2; + ASSERT_TRUE( + PopulateRegisterModelRequest(kTestModelVariedInputs2, register_request2) + .ok()); + EXPECT_TRUE(torch_module->RegisterModel(register_request2).ok()); + }); + + register_request_thread1.join(); + register_request_thread2.join(); + + std::vector threads; + PredictRequest predict_request; + predict_request.set_input(kVariedInputsMultipleModelsRequest); + for (int i = 0; i < kNumThreads; i++) { + threads.push_back(std::thread([&torch_module, &predict_request]() { + const absl::StatusOr result = + torch_module->Predict(predict_request); + EXPECT_TRUE(result.ok()); + if (result.ok()) { + EXPECT_EQ(result->output(), + "{\"response\":[{\"model_path\":\"e2e_model1\",\"tensors\":[{" + "\"tensor_shape\":[2,1],\"data_type\":\"FLOAT\",\"tensor_" + "content\":[0.5412338972091675,0.4757022559642792]}]},{" + "\"model_path\":\"e2e_model2\",\"tensors\":[{\"tensor_" + "shape\":[2,1],\"data_type\":\"FLOAT\",\"tensor_content\":[0." + "3857767879962921,0.458008348941803]}]}]}"); + } + })); + } + + for (auto& thread : threads) { + thread.join(); + } +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/BUILD b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/BUILD new file mode 100644 index 00000000..dd5d6a3d --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/BUILD @@ -0,0 +1,17 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package( + default_visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1.patch b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1.patch new file mode 100644 index 00000000..bd985fb3 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1.patch @@ -0,0 +1,265 @@ +diff --git a/BUILD.bazel b/BUILD.bazel +index 8776fcb2ecf..0ef44a5b738 100644 +--- a/BUILD.bazel ++++ b/BUILD.bazel +@@ -26,6 +26,7 @@ COMMON_COPTS = [ + "-DAT_PER_OPERATOR_HEADERS", + "-DATEN_THREADING=NATIVE", + "-DNO_CUDNN_DESTROY_HANDLE", ++ "-DC10_DISABLE_LEGACY_IMPORT" + ] + if_cuda([ + "-DUSE_CUDA", + "-DUSE_CUDNN", +@@ -187,7 +187,6 @@ filegroup( + name = "aten_native_mkl_cpp", + srcs = glob([ + "aten/src/ATen/native/mkl/*.cpp", +- "aten/src/ATen/mkl/*.cpp", + ]), + ) + +@@ -264,16 +263,16 @@ header_template_rule( + out = "aten/src/ATen/Config.h", + include = "aten/src", + substitutions = { +- "@AT_MKLDNN_ENABLED@": "1", ++ "@AT_MKLDNN_ENABLED@": "0", + "@AT_MKLDNN_ACL_ENABLED@": "0", +- "@AT_MKL_ENABLED@": "1", ++ "@AT_MKL_ENABLED@": "0", + "@AT_MKL_SEQUENTIAL@": "0", + "@AT_FFTW_ENABLED@": "0", + "@AT_POCKETFFT_ENABLED@": "0", + "@AT_NNPACK_ENABLED@": "0", + "@CAFFE2_STATIC_LINK_CUDA_INT@": "0", +- "@AT_BUILD_WITH_BLAS@": "1", +- "@AT_BUILD_WITH_LAPACK@": "1", ++ "@AT_BUILD_WITH_BLAS@": "0", ++ "@AT_BUILD_WITH_LAPACK@": "0", + "@AT_PARALLEL_OPENMP@": "0", + "@AT_PARALLEL_NATIVE@": "1", + "@AT_PARALLEL_NATIVE_TBB@": "0", +@@ -336,7 +335,6 @@ intern_build_aten_ops( + deps = [ + ":aten_headers", + "@fbgemm", +- "@mkl", + "@sleef", + ], + ) +@@ -371,7 +369,6 @@ cc_library( + ":caffe2_for_aten_headers", + ":torch_headers", + "@fbgemm", +- "@ideep", + ], + alwayslink = True, + ) +@@ -448,6 +445,7 @@ CAFFE2_COPTS = COMMON_COPTS + [ + "-Dcaffe2_EXPORTS", + "-DCAFFE2_USE_GLOO", + "-DCAFFE2_USE_CUDNN", ++ "-DCAFFE2_USE_EIGEN_FOR_BLAS", + "-DCAFFE2_BUILD_MAIN_LIB", + "-fvisibility-inlines-hidden", + "-fno-math-errno", +@@ -570,7 +569,7 @@ filegroup( + filegroup( + name = "caffe2_onnx_srcs", + srcs = [ +- "caffe2/onnx/backend.cc", ++ # "caffe2/onnx/backend.cc", + "caffe2/onnx/backend_rep.cc", + "caffe2/onnx/device.cc", + "caffe2/onnx/helper.cc", +@@ -1388,7 +1386,6 @@ cc_library( + ":caffe2_contrib_srcs", + ":caffe2_core_srcs", + ":caffe2_distributed_srcs", +- ":caffe2_ideep_srcs", + ":caffe2_onnx_srcs", + ":caffe2_operators_srcs", + ":caffe2_opt_srcs", +@@ -1625,7 +1623,11 @@ cc_library( + "torch/csrc/distributed/c10d/quantization/quantization_gpu.cu", + ], + )) + torch_sources, +- copts = TORCH_COPTS, ++ copts = TORCH_COPTS + ["-I.", ++ "-fopenmp=libgomp", ++ "-isystembazel-out/k8-opt/bin", ++ "-isystembazel-out/k8-dbg/bin", ++ ], + defines = [ + "CAFFE2_NIGHTLY_VERSION=20200115", + ], +@@ -1640,6 +1642,9 @@ cc_library( + "@cutlass", + ]), + alwayslink = True, ++ linkopts = ["-ldl", ++ "-fopenmp=libgomp", ++ ], + ) + + cc_library( +diff --git a/aten.bzl b/aten.bzl +index 9c6325d16ab..055d1da40b1 100644 +--- a/aten.bzl ++++ b/aten.bzl +@@ -69,7 +69,7 @@ def generate_aten_impl(ctx): + command = ctx.executable.generator.path + " $@", + arguments = [ + "--source-path", +- "aten/src/ATen", ++ "external/pytorch_v2_1_1/aten/src/ATen", + "--per-operator-headers", + "--install_dir", + install_dir, +diff --git a/build.bzl b/build.bzl +index 1324323bb2c..60dbfe98434 100644 +--- a/build.bzl ++++ b/build.bzl +@@ -22,7 +22,6 @@ def define_targets(rules): + "CAFFE2_PERF_WITH_AVX512", + "CAFFE2_USE_EXCEPTION_PTR", + "CAFFE2_USE_CUDNN", +- "USE_MKLDNN", + "CAFFE2_USE_ITT", + "TORCH_DISABLE_GPU_ASSERTS", + "EIGEN_MPL2_ONLY", +diff --git a/build_variables.bzl b/build_variables.bzl +index bf9cf2b46e1..cb0a7045e14 100644 +--- a/build_variables.bzl ++++ b/build_variables.bzl +@@ -671,7 +671,6 @@ libtorch_extra_sources = libtorch_core_jit_sources + [ + "torch/csrc/jit/serialization/export_bytecode.cpp", + "torch/csrc/jit/serialization/export_module.cpp", + "torch/csrc/jit/serialization/flatbuffer_serializer.cpp", +- "torch/csrc/jit/serialization/import_legacy.cpp", + "torch/csrc/utils/byte_order.cpp", + "torch/csrc/utils/out_types.cpp", + ] +diff --git a/c10/BUILD.bazel b/c10/BUILD.bazel +index aadcf5ee3e5..dc7e1fa9476 100644 +--- a/c10/BUILD.bazel ++++ b/c10/BUILD.bazel +@@ -16,7 +16,7 @@ bool_flag( + + bool_flag( + name = "use_glog", +- build_setting_default = True, ++ build_setting_default = False, + ) + + config_setting( +@@ -26,5 +26,5 @@ config_setting( + + config_setting( + name = "using_glog", +- flag_values = {":use_glog": "true"}, ++ flag_values = {":use_glog": "false"}, + ) +diff --git a/c10/util/build.bzl b/c10/util/build.bzl +index fb06a151783..6a06212e8ba 100644 +--- a/c10/util/build.bzl ++++ b/c10/util/build.bzl +@@ -43,6 +43,7 @@ def define_targets(rules): + "//c10:using_glog": ["@com_github_glog//:glog"], + "//conditions:default": [], + }), ++ includes = ["../.."], + # This library uses flags and registration. Do not let the + # linker remove them. + alwayslink = True, +diff --git a/third_party/sleef.BUILD b/third_party/sleef.BUILD +index 573f9c5b54a..eac2f252bc2 100644 +--- a/third_party/sleef.BUILD ++++ b/third_party/sleef.BUILD +@@ -36,8 +36,8 @@ SLEEF_PUBLIC_HEADERS = [ + ] + + SLEEF_PRIVATE_INCLUDES = [ +- "-Iexternal/sleef/src/arch", +- "-Iexternal/sleef/src/common", ++ "-Iexternal/pytorch_v2_1_1_sleef/src/arch", ++ "-Iexternal/pytorch_v2_1_1_sleef/src/common", + ] + + SLEEF_PUBLIC_INCLUDES = [ +diff --git a/third_party/tbb.BUILD b/third_party/tbb.BUILD +index b11e6584733..70e5cab4103 100644 +--- a/third_party/tbb.BUILD ++++ b/third_party/tbb.BUILD +@@ -5,7 +5,7 @@ licenses(["notice"]) # Apache 2.0 + + template_rule( + name = "version_string", +- src = "@//:aten/src/ATen/cpu/tbb/extra/version_string.ver.in", ++ src = "@pytorch_v2_1_1//:aten/src/ATen/cpu/tbb/extra/version_string.ver.in", + out = "version_string.h", + substitutions = { + "@CMAKE_SYSTEM_NAME@": "Unknown", +@@ -42,8 +42,8 @@ cc_library( + exclude = ["include/tbb/scalable_allocator.h"], + ), + copts = [ +- "-Iexternal/tbb/src/rml/include", +- "-Iexternal/tbb/src", ++ "-Iexternal/pytorch_v2_1_1_tbb/src/rml/include", ++ "-Iexternal/pytorch_v2_1_1_tbb/src", + "-pthread", + "-DDO_ITT_NOTIFY=1", + "-DUSE_PTHREAD=1", +diff --git a/tools/config/defs.bzl b/tools/config/defs.bzl +index 6ddd0e99156..97bd4f02077 100644 +--- a/tools/config/defs.bzl ++++ b/tools/config/defs.bzl +@@ -8,7 +8,7 @@ load("@bazel_skylib//lib:selects.bzl", "selects") + def if_cuda(if_true, if_false = []): + """Helper for selecting based on the whether CUDA is configured. """ + return selects.with_or({ +- "@//tools/config:cuda_enabled_and_capable": if_true, ++ "@pytorch_v2_1_1//tools/config:cuda_enabled_and_capable": if_true, + "//conditions:default": if_false, + }) + +diff --git a/tools/setup_helpers/generate_code.py b/tools/setup_helpers/generate_code.py +index afdd168d179..656759cf89a 100644 +--- a/tools/setup_helpers/generate_code.py ++++ b/tools/setup_helpers/generate_code.py +@@ -12,8 +12,8 @@ try: + except ImportError: + from yaml import SafeLoader as YamlLoader # type: ignore[assignment, misc] + +-NATIVE_FUNCTIONS_PATH = "aten/src/ATen/native/native_functions.yaml" +-TAGS_PATH = "aten/src/ATen/native/tags.yaml" ++NATIVE_FUNCTIONS_PATH = "external/pytorch_v2_1_1/aten/src/ATen/native/native_functions.yaml" ++TAGS_PATH = "external/pytorch_v2_1_1/aten/src/ATen/native/tags.yaml" + + + def generate_code( +@@ -201,8 +201,8 @@ def main() -> None: + if options.gen_lazy_ts_backend: + aten_path = os.path.dirname(os.path.dirname(options.native_functions_path)) + ts_backend_yaml = os.path.join(aten_path, "native/ts_native_functions.yaml") +- ts_native_functions = "torch/csrc/lazy/ts_backend/ts_native_functions.cpp" +- ts_node_base = "torch/csrc/lazy/ts_backend/ts_node.h" ++ ts_native_functions = "external/pytorch_v2_1_1/torch/csrc/lazy/ts_backend/ts_native_functions.cpp" ++ ts_node_base = "external/pytorch_v2_1_1/torch/csrc/lazy/ts_backend/ts_node.h" + install_dir = options.install_dir or os.fspath(options.gen_dir / "torch/csrc") + lazy_install_dir = os.path.join(install_dir, "lazy/generated") + os.makedirs(lazy_install_dir, exist_ok=True) +diff --git a/torchgen/gen_lazy_tensor.py b/torchgen/gen_lazy_tensor.py +index 3e4e4b04142..bbe19ce284c 100644 +--- a/torchgen/gen_lazy_tensor.py ++++ b/torchgen/gen_lazy_tensor.py +@@ -134,7 +134,7 @@ def validate_shape_inference_header( + shape_inference_hdr: str, expected_shape_infr_decls: List[str] + ) -> None: + try: +- with open(shape_inference_hdr) as f: ++ with open("external/pytorch_v2_1_1/" + shape_inference_hdr) as f: + shape_infr_decls = f.read() + shape_infr_decl_lines = set(shape_infr_decls.split("\n")) + except OSError as e: diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_deps1.bzl b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_deps1.bzl new file mode 100644 index 00000000..dfeb41bf --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_deps1.bzl @@ -0,0 +1,211 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +#  http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copied and modified from https://github.com/pytorch/pytorch/blob/4c55dc50355d5e923642c59ad2a23d6ad54711e7/WORKSPACE + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") + +def pytorch_v2_1_1_deps1(): + maybe( + http_archive, + name = "pytorch_v2_1_1", + sha256 = "9b73a7d3e1a2186d80de20b12eec319e740d9ec9ee59e7e1b6b04f34fa961226", + strip_prefix = "pytorch-4c55dc50355d5e923642c59ad2a23d6ad54711e7", + patch_args = ["-p1"], + patches = ["//:third_party/pytorch_v2_1_1.patch"], + urls = ["https://github.com/pytorch/pytorch/archive/4c55dc50355d5e923642c59ad2a23d6ad54711e7.tar.gz"], + repo_mapping = { + "@asmjit": "@pytorch_v2_1_1_asmjit", + "@com_github_glog": "@pytorch_v2_1_1_glog", + "@com_github_google_flatbuffers": "@pytorch_v2_1_1_flatbuffers", + "@eigen": "@pytorch_v2_1_1_eigen", + "@fbgemm": "@pytorch_v2_1_1_fbgemm", + "@fmt": "@pytorch_v2_1_1_fmt", + "@foxi": "@pytorch_v2_1_1_foxi", + "@gloo": "@pytorch_v2_1_1_gloo", + "@kineto": "@pytorch_v2_1_1_kineto", + "@onnx": "@pytorch_v2_1_1_onnx", + "@org_pytorch_cpuinfo": "@pytorch_v2_1_1_cpuinfo", + "@pip_deps": "@pytorch_v2_1_1_pip_deps", + "@pytorch": "@pytorch_v2_1_1", + "@rules_cuda": "@pytorch_v2_1_1_rules_cuda", + "@sleef": "@pytorch_v2_1_1_sleef", + "@tensorpipe": "@pytorch_v2_1_1_tensorpipe", + }, + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_glog", + build_file_content = """ +licenses(['notice']) + +load(':bazel/glog.bzl', 'glog_library') +glog_library(with_gflags=0) + """, + strip_prefix = "glog-0.4.0", + sha256 = "f28359aeba12f30d73d9e4711ef356dc842886968112162bc73002645139c39c", + urls = [ + "https://github.com/google/glog/archive/v0.4.0.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_fbgemm", + strip_prefix = "FBGEMM-cdae5d97e3aa9fda4222f31c04dbd80249c918d1", + sha256 = "4d180de8471ebea1f89db65572b79461cef1ae714e8dacc9650bcc0a6e1a1ea2", + urls = [ + "https://github.com/pytorch/fbgemm/archive/cdae5d97e3aa9fda4222f31c04dbd80249c918d1.tar.gz", + ], + repo_mapping = { + "@asmjit": "@pytorch_v2_1_1_asmjit", + "@cpuinfo": "@pytorch_v2_1_1_cpuinfo", + }, + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_flatbuffers", + strip_prefix = "flatbuffers-01834de25e4bf3975a9a00e816292b1ad0fe184b", + sha256 = "5a1f2a116b49a1aaa3563c3d9e3baf6ea256eebf57c7bc144a2d28aa73deba7b", + urls = [ + "https://github.com/google/flatbuffers/archive/01834de25e4bf3975a9a00e816292b1ad0fe184b.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_cpuinfo", + strip_prefix = "cpuinfo-6481e8bef08f606ddd627e4d3be89f64d62e1b8a", + sha256 = "e5c47db587d3ef15537fdd32f929e070aabbcf607e86ab1d86196385dedd2db9", + urls = [ + "https://github.com/pytorch/cpuinfo/archive/6481e8bef08f606ddd627e4d3be89f64d62e1b8a.tar.gz", + ], + repo_mapping = {"@org_pytorch_cpuinfo": "@pytorch_v2_1_1_cpuinfo"}, + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_sleef", + build_file = "@pytorch_v2_1_1//:third_party/sleef.BUILD", + strip_prefix = "sleef-e0a003ee838b75d11763aa9c3ef17bf71a725bff", + sha256 = "fdf8901ebeb58924e197dd14d264f7c04e9f80da720f4c38deb13992f1bb89ac", + urls = [ + "https://github.com/shibatch/sleef/archive/e0a003ee838b75d11763aa9c3ef17bf71a725bff.tar.gz", + ], + repo_mapping = {"@pytorch": "@pytorch_v2_1_1"}, + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_asmjit", + build_file = "@pytorch_v2_1_1_fbgemm//:third_party/asmjit.BUILD", + strip_prefix = "asmjit-d3fbf7c9bc7c1d1365a94a45614b91c5a3706b81", + sha256 = "bd19e308b30f0c8c8b0d34be356422b4ccce12f9dc590f77ecb886c21bcfedab", + urls = [ + "https://github.com/asmjit/asmjit/archive/d3fbf7c9bc7c1d1365a94a45614b91c5a3706b81.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_gloo", + build_file = "@pytorch_v2_1_1//:third_party/gloo.BUILD", + strip_prefix = "gloo-597accfd79f5b0f9d57b228dec088ca996686475", + sha256 = "cb734b41f3f5f772312645e44ec5d583e0f4da2b1253e2b3d0ffcba7f6da190a", + urls = [ + "https://github.com/facebookincubator/gloo/archive/597accfd79f5b0f9d57b228dec088ca996686475.tar.gz", + ], + repo_mapping = {"@pytorch": "@pytorch_v2_1_1"}, + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_tensorpipe", + build_file = "//third_party:pytorch_v2_1_1_tensorpipe.BUILD", + strip_prefix = "tensorpipe-52791a2fd214b2a9dc5759d36725909c1daa7f2e", + sha256 = "7ff0b84c0623f3360ec7c34b8c4fe02e7f9a87f8fa559c303f9574e44be0bc56", + urls = [ + "https://github.com/pytorch/tensorpipe/archive/52791a2fd214b2a9dc5759d36725909c1daa7f2e.tar.gz", + ], + repo_mapping = { + "@libnop": "@pytorch_v2_1_1_libnop", + "@libuv": "@pytorch_v2_1_1_libuv", + "@pytorch": "@pytorch_v2_1_1", + }, + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_eigen", + build_file = "@pytorch_v2_1_1//:third_party/eigen.BUILD", + strip_prefix = "eigen-git-mirror-3147391d946bb4b6c68edd901f2add6ac1f31f8c", + sha256 = "0974e0d92afc7eb581057a904559155564cbdf65b6c60be13fe4b925b34e3966", + urls = [ + "https://github.com/eigenteam/eigen-git-mirror/archive/3147391d946bb4b6c68edd901f2add6ac1f31f8c.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_foxi", + build_file = "@pytorch_v2_1_1//:third_party/foxi.BUILD", + strip_prefix = "foxi-c278588e34e535f0bb8f00df3880d26928038cad", + sha256 = "52665aae3c7352a1fa68d017a87c825559c514072409e30753f36aa6eb0c7b4d", + urls = [ + "https://github.com/houseroad/foxi/archive/c278588e34e535f0bb8f00df3880d26928038cad.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_kineto", + build_file = "@pytorch_v2_1_1//:third_party/kineto.BUILD", + strip_prefix = "kineto-49e854d805d916b2031e337763928d2f8d2e1fbf", + sha256 = "f260688160d043db4ffc2338502a1343b0ecaec0accc6dd5e535ee898eb619a1", + urls = [ + "https://github.com/pytorch/kineto/archive/49e854d805d916b2031e337763928d2f8d2e1fbf.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_fmt", + build_file = "@pytorch_v2_1_1//:third_party/fmt.BUILD", + strip_prefix = "fmt-e57ca2e3685b160617d3d95fcd9e789c4e06ca88", + sha256 = "2d18d7b1c393791a180c8321cedd702093b5527625be955178bef8af2b1cf6db", + urls = [ + "https://github.com/fmtlib/fmt/archive/e57ca2e3685b160617d3d95fcd9e789c4e06ca88.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_onnx", + build_file = "@pytorch_v2_1_1//:third_party/onnx.BUILD", + strip_prefix = "onnx-1014f41f17ecc778d63e760a994579d96ba471ff", + sha256 = "0dbfa8eaf48c983aabbca8add6ba9ec7cf12268daecbd081bdbd81dac192b668", + urls = [ + "https://github.com/onnx/onnx/archive/1014f41f17ecc778d63e760a994579d96ba471ff.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_libuv", + build_file = "//third_party:pytorch_v2_1_1_libuv.BUILD", + strip_prefix = "libuv-1dff88e5161cba5c59276d2070d2e304e4dcb242", + sha256 = "c66c8a024bb31c6134347a0d5630f6275c0bfe94d4144504d45099412fd7a054", + urls = [ + "https://github.com/libuv/libuv/archive/1dff88e5161cba5c59276d2070d2e304e4dcb242.tar.gz", + ], + ) + maybe( + http_archive, + name = "pytorch_v2_1_1_libnop", + build_file = "//third_party:pytorch_v2_1_1_libnop.BUILD", + strip_prefix = "libnop-910b55815be16109f04f4180e9adee14fb4ce281", + sha256 = "ec3604671f8ea11aed9588825f9098057ebfef7a8908e97459835150eea9f63a", + urls = [ + "https://github.com/google/libnop/archive/910b55815be16109f04f4180e9adee14fb4ce281.tar.gz", + ], + ) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_deps2.bzl b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_deps2.bzl new file mode 100644 index 00000000..a3906600 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_deps2.bzl @@ -0,0 +1,65 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +#  http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copied and modified from https://github.com/tensorflow/runtime/blob/b1c7cce21ba4661c17ac72421c6a0e2015e7bef3/third_party/rules_cuda/cuda/dependencies.bzl + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") + +def _local_cuda_impl(repository_ctx): + # cuda has not been enabled for the build, and this repository is not local + # cuda lib at /usr/local/cuda. + defs_template = "def if_local_cuda(true, false = []):\n return %s" + repository_ctx.file("BUILD") # Empty file + repository_ctx.file("defs.bzl", defs_template % "false") + +_local_cuda = repository_rule( + implementation = _local_cuda_impl, + environ = ["CUDA_PATH", "PATH"], + # remotable = True, +) + +def pytorch_v2_1_1_deps2(): + maybe( + name = "pytorch_v2_1_1_bazel_skylib", + repo_rule = http_archive, + sha256 = "97e70364e9249702246c0e9444bccdc4b847bed1eb03c5a3ece4f83dfe6abc44", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz", + "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz", + ], + ) + maybe( + name = "pytorch_v2_1_1_platforms", + repo_rule = http_archive, + sha256 = "48a2d8d343863989c232843e01afc8a986eb8738766bfd8611420a7db8f6f0c3", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/platforms/releases/download/0.0.2/platforms-0.0.2.tar.gz", + "https://github.com/bazelbuild/platforms/releases/download/0.0.2/platforms-0.0.2.tar.gz", + ], + ) + _local_cuda(name = "pytorch_v2_1_1_local_cuda") + maybe( + name = "pytorch_v2_1_1_rules_cuda", + repo_rule = http_archive, + strip_prefix = "runtime-b1c7cce21ba4661c17ac72421c6a0e2015e7bef3/third_party/rules_cuda", + sha256 = "f80438bee9906e9ecb1a8a4ae2365374ac1e8a283897281a2db2fb7fcf746333", + urls = ["https://github.com/tensorflow/runtime/archive/b1c7cce21ba4661c17ac72421c6a0e2015e7bef3.tar.gz"], + repo_mapping = { + "@bazel_skylib": "@pytorch_v2_1_1_bazel_skylib", + "@local_cuda": "@pytorch_v2_1_1_local_cuda", + "@platforms": "@pytorch_v2_1_1_platforms", + "@rules_cuda": "@pytorch_v2_1_1_rules_cuda", + }, + ) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_libnop.BUILD b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_libnop.BUILD new file mode 100644 index 00000000..b4782518 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_libnop.BUILD @@ -0,0 +1,26 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copied and modified from https://github.com/pytorch/pytorch/blob/ +# 4c55dc50355d5e923642c59ad2a23d6ad54711e7/third_party/tensorpipe.BUILD + +load("@rules_cc//cc:defs.bzl", "cc_library") + +cc_library( + name = "libnop", + srcs = [], + hdrs = glob(["include/**/*.h"]), + includes = ["include"], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_libuv.BUILD b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_libuv.BUILD new file mode 100644 index 00000000..8cd67893 --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_libuv.BUILD @@ -0,0 +1,80 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copied and modified from https://github.com/pytorch/pytorch/blob/ +# 4c55dc50355d5e923642c59ad2a23d6ad54711e7/third_party/tensorpipe.BUILD + +load("@rules_cc//cc:defs.bzl", "cc_library") + +LIBUV_COMMON_SRCS = [ + "src/fs-poll.c", + "src/idna.c", + "src/inet.c", + "src/random.c", + "src/strscpy.c", + "src/threadpool.c", + "src/timer.c", + "src/uv-common.c", + "src/uv-data-getter-setters.c", + "src/version.c", +] + +LIBUV_POSIX_SRCS = [ + "src/unix/async.c", + "src/unix/core.c", + "src/unix/dl.c", + "src/unix/fs.c", + "src/unix/getaddrinfo.c", + "src/unix/getnameinfo.c", + "src/unix/loop.c", + "src/unix/loop-watcher.c", + "src/unix/pipe.c", + "src/unix/poll.c", + "src/unix/process.c", + "src/unix/random-devurandom.c", + "src/unix/signal.c", + "src/unix/stream.c", + "src/unix/tcp.c", + "src/unix/thread.c", + "src/unix/tty.c", + "src/unix/udp.c", +] + +LIBUV_LINUX_SRCS = LIBUV_POSIX_SRCS + [ + "src/unix/proctitle.c", + "src/unix/linux-core.c", + "src/unix/linux-inotify.c", + "src/unix/linux-syscalls.c", + "src/unix/procfs-exepath.c", + "src/unix/random-getrandom.c", + "src/unix/random-sysctl-linux.c", +] + +cc_library( + name = "libuv", + srcs = LIBUV_COMMON_SRCS + LIBUV_LINUX_SRCS, + hdrs = glob( + [ + "include/*.h", + "include/uv/*.h", + "src/*.h", + "src/unix/*.h", + ], + ), + includes = [ + "include", + "src", + ], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_tensorpipe.BUILD b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_tensorpipe.BUILD new file mode 100644 index 00000000..66a3738c --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/third_party/pytorch_v2_1_1_tensorpipe.BUILD @@ -0,0 +1,128 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copied and modified from https://github.com/pytorch/pytorch/blob/ +# 4c55dc50355d5e923642c59ad2a23d6ad54711e7/third_party/tensorpipe.BUILD + +load("@pytorch//third_party:substitution.bzl", "header_template_rule") +load("@rules_cc//cc:defs.bzl", "cc_library") + +header_template_rule( + name = "tensorpipe_cpu_config_header", + src = "tensorpipe/config.h.in", + out = "tensorpipe/config.h", + substitutions = { + "#cmakedefine01 TENSORPIPE_HAS_CMA_CHANNEL": "#define TENSORPIPE_HAS_CMA_CHANNEL 1", + "#cmakedefine01 TENSORPIPE_HAS_IBV_TRANSPORT": "#define TENSORPIPE_HAS_IBV_TRANSPORT 1", + "#cmakedefine01 TENSORPIPE_HAS_SHM_TRANSPORT": "#define TENSORPIPE_HAS_SHM_TRANSPORT 1", + }, +) + +header_template_rule( + name = "tensorpipe_cuda_config_header", + src = "tensorpipe/config_cuda.h.in", + out = "tensorpipe/config_cuda.h", + substitutions = { + "#cmakedefine01 TENSORPIPE_HAS_CUDA_GDR_CHANNEL": "#define TENSORPIPE_HAS_CUDA_GDR_CHANNEL 1", + "#cmakedefine01 TENSORPIPE_HAS_CUDA_IPC_CHANNEL": "#define TENSORPIPE_HAS_CUDA_IPC_CHANNEL 1", + }, +) + +# We explicitly list the CUDA headers & sources, and we consider everything else +# as CPU (using a catch-all glob). This is both because there's fewer CUDA files +# (thus making it easier to list them exhaustively) and because it will make it +# more likely to catch a misclassified file: if we forget to mark a file as CUDA +# we'll try to build it on CPU and that's likely to fail. + +TENSORPIPE_CUDA_HEADERS = [ + "tensorpipe/tensorpipe_cuda.h", + "tensorpipe/channel/cuda_basic/*.h", + "tensorpipe/channel/cuda_gdr/*.h", + "tensorpipe/channel/cuda_ipc/*.h", + "tensorpipe/channel/cuda_xth/*.h", + "tensorpipe/common/cuda.h", + "tensorpipe/common/cuda_buffer.h", + "tensorpipe/common/cuda_lib.h", + "tensorpipe/common/cuda_loop.h", + "tensorpipe/common/nvml_lib.h", +] + +TENSORPIPE_CUDA_SOURCES = [ + "tensorpipe/channel/cuda_basic/*.cc", + "tensorpipe/channel/cuda_gdr/*.cc", + "tensorpipe/channel/cuda_ipc/*.cc", + "tensorpipe/channel/cuda_xth/*.cc", + "tensorpipe/common/cuda_buffer.cc", + "tensorpipe/common/cuda_loop.cc", +] + +TENSORPIPE_CPU_HEADERS = glob( + [ + "tensorpipe/*.h", + "tensorpipe/channel/*.h", + "tensorpipe/channel/*/*.h", + "tensorpipe/common/*.h", + "tensorpipe/core/*.h", + "tensorpipe/transport/*.h", + "tensorpipe/transport/*/*.h", + ], + exclude = TENSORPIPE_CUDA_HEADERS, +) + +TENSORPIPE_CPU_SOURCES = glob( + [ + "tensorpipe/*.cc", + "tensorpipe/channel/*.cc", + "tensorpipe/channel/*/*.cc", + "tensorpipe/common/*.cc", + "tensorpipe/core/*.cc", + "tensorpipe/transport/*.cc", + "tensorpipe/transport/*/*.cc", + ], + exclude = TENSORPIPE_CUDA_SOURCES, +) + +cc_library( + name = "tensorpipe_cpu", + srcs = TENSORPIPE_CPU_SOURCES, + hdrs = TENSORPIPE_CPU_HEADERS + [":tensorpipe_cpu_config_header"], + copts = [ + "-std=c++14", + ], + includes = [ + ".", + ], + visibility = ["//visibility:public"], + deps = [ + "@libnop", + "@libuv", + ], +) + +cc_library( + name = "tensorpipe_cuda", + srcs = glob(TENSORPIPE_CUDA_SOURCES), + hdrs = glob(TENSORPIPE_CUDA_HEADERS) + [":tensorpipe_cuda_config_header"], + copts = [ + "-std=c++14", + ], + includes = [ + ".", + ], + visibility = ["//visibility:public"], + deps = [ + ":tensorpipe_cpu", + "@cuda", + ], +) diff --git a/services/inference_sidecar/modules/pytorch_v2_1_1/tools/collect-logs b/services/inference_sidecar/modules/pytorch_v2_1_1/tools/collect-logs new file mode 100755 index 00000000..3924139e --- /dev/null +++ b/services/inference_sidecar/modules/pytorch_v2_1_1/tools/collect-logs @@ -0,0 +1,89 @@ +#!/usr/bin/env bash + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE Set the path to the workspace (repo root) + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + declare -r -i STATUS=$? + if [[ ${STATUS} -ne 0 ]]; then + printf "collect-logs exit code: %d\n" ${STATUS} &>/dev/stderr + sleep 5s + fi + exit ${STATUS} +} + +function copy_log_outputs() { + declare -r _rootdest="$1" + declare -r _prune_to_dir="$2" + declare -r _filepath="$3" + declare -r _logpath="${_filepath##*/${_prune_to_dir}/}" + declare -r _destdir="${_rootdest}/${_logpath%/*}" + declare -r _fname="${_filepath##*/}" + declare -r _destfname="${_fname/#test./sponge_log.}" + mkdir -p "${_destdir}" + cp "${_filepath}" "${_destdir}/${_destfname}" +} +export -f copy_log_outputs + +function extract_test_outputs() { + declare -r _rootdest="$1" + declare -r _filepath="$2" + declare -r _logpath="${_filepath##*bazel-testlogs/}" + declare -r _destdir="${_rootdest}/${_logpath%/*/*}" + mkdir -p "${_destdir}" + unzip -q -d "${_destdir}" "${_filepath}" +} +export -f extract_test_outputs + + +declare ZIP_FILENAME="$1" +if [[ ${ZIP_FILENAME##*.} != zip ]]; then + ZIP_FILENAME=logs.zip +fi +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly SCRIPT_DIR +WORKSPACE="${WORKSPACE-"$(readlink -f "${SCRIPT_DIR}"/..)"}" +readonly WORKSPACE +OUTDIR="$(mktemp --directory)" +readonly OUTDIR +export OUTDIR +mkdir -p "${OUTDIR}"/{test,other} + +if [[ -d "${WORKSPACE}"/bazel-testlogs ]]; then + # copy all test.log and test.xml files + find -L "${WORKSPACE}"/bazel-testlogs -type f '(' -name test.log -o -name test.xml ')' -exec bash -c 'copy_log_outputs "${OUTDIR}"/test bazel-testlogs "$0"' {} ';' + # extract test outputs + find -L "${WORKSPACE}"/bazel-testlogs -type f -name outputs.zip -exec bash -c 'extract_test_outputs "${OUTDIR}"/test "$0"' {} ';' +fi +if [[ -d "${WORKSPACE}"/bazel-out ]]; then + # copy log files under bazel-out (except for test.log) + find -L "${WORKSPACE}"/bazel-out -type f -name "*.log" ! -name test.log -exec bash -c 'copy_log_outputs "${OUTDIR}"/other bazel-out "$0"' {} ';' +fi + +declare -r DISTDIR="${WORKSPACE}"/dist +mkdir -p "${DISTDIR}" +( + cd "${OUTDIR}" + zip -r -q "${DISTDIR}/${ZIP_FILENAME}" -- * +) +printf "stored bazel logs to %s\n" "${DISTDIR}/${ZIP_FILENAME}" &>/dev/stderr +unzip -Z -h "${DISTDIR}/${ZIP_FILENAME}" &>/dev/stderr +rm -rf "${OUTDIR}" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/.bazelrc b/services/inference_sidecar/modules/tensorflow_v2_14_0/.bazelrc new file mode 100644 index 00000000..bc11676a --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/.bazelrc @@ -0,0 +1,64 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +build --experimental_repo_remote_exec + +build --announce_rc +build --verbose_failures +build --config=clang +build --compilation_mode=opt +build --output_filter='^//((?!(third_party):).)*$' +build --color=yes + +build:clang --cxxopt=-fbracket-depth=512 +build:clang --client_env=CC=clang +build:clang --cxxopt=-std=c++17 +build:clang --host_cxxopt=-std=c++17 +build:clang --client_env=BAZEL_CXXOPTS=-std=c++17 +# Makes thread safety analysis warnings into errors. +build:clang --copt=-Werror=thread-safety + +build --copt=-I. +build --per_file_copt='^external/tensorflow_v2_14_0*'/@-w +# Ignores thread-safety warnings from the TensorFlow repo. +build --per_file_copt='^external/org_tensorflow*'@-Wno-error=thread-safety + +build --copt=-isystem --copt bazel-out/k8-opt/bin +build --copt=-isystem --copt bazel-out/k8-dbg/bin + +# Address sanitizer, set action_env to segregate cache entries +build:asan --action_env=PRIVACY_SANDBOX_SERVERS_ASAN=1 +build:asan --strip=never +build:asan --compilation_mode=dbg +build:asan --copt=-fsanitize=address +build:asan --copt=-DADDRESS_SANITIZER +build:asan --copt=-O1 +build:asan --copt=-g +build:asan --copt=-fno-omit-frame-pointer +build:asan --linkopt=-fsanitize=address +build:asan --linkopt=-fuse-ld=lld +build:asan --action_env=ASAN_OPTIONS=detect_leaks=1:color=always + +# Thread sanitizer, set action_env to segregate cache entries +build:tsan --action_env=PRIVACY_SANDBOX_SERVERS_TSAN=1 +build:tsan --strip=never +build:tsan --copt=-fsanitize=thread +build:tsan --copt=-fno-omit-frame-pointer +build:tsan --copt=-DGPR_NO_DIRECT_SYSCALLS +build:tsan --copt=-DGRPC_TSAN +build:tsan --copt=-g +build:tsan --linkopt=-fsanitize=thread +# This is needed to address false positive problem with abseil. +# https://github.com/google/sanitizers/issues/953 +build:tsan --test_env=TSAN_OPTIONS=report_atomic_races=0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/.bazelversion b/services/inference_sidecar/modules/tensorflow_v2_14_0/.bazelversion new file mode 100644 index 00000000..798e3899 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/.bazelversion @@ -0,0 +1 @@ +6.3.0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/.gitignore b/services/inference_sidecar/modules/tensorflow_v2_14_0/.gitignore new file mode 100644 index 00000000..349c7dc2 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/.gitignore @@ -0,0 +1,11 @@ +# Bazel symlinks +/bazel-* + +# Bzlmod files +/MODULE* + +# Docker +docker-buildx-*.log + +# Tests +/dist diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/BUILD b/services/inference_sidecar/modules/tensorflow_v2_14_0/BUILD new file mode 100644 index 00000000..fbe5d5d6 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/BUILD @@ -0,0 +1,247 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library", "cc_test") +load( + "@rules_pkg//pkg:mappings.bzl", + "pkg_attributes", + "pkg_files", + "strip_prefix", +) +load("@rules_pkg//pkg:zip.bzl", "pkg_zip") + +cc_binary( + name = "inference_sidecar_bin", + srcs = ["@inference_common//:inference_sidecar_main.cc"], + deps = [ + ":tensorflow", + "@inference_common//:grpc_sidecar", + ], +) + +# Generate inference sidecar as "inference_sidecar" during test time. +genrule( + name = "inference_sidecar_test_target", + srcs = [":inference_sidecar_bin"], + outs = ["inference_sidecar"], + cmd = "cp $< $@", +) + +# Copy a set of model files to "test_model" directory so that +# inference_sidecar_test.cc can pass the file path to the TensorFlow +# inference sidecar. +genrule( + name = "test_model_target", + srcs = [":test_model_zip"], + outs = ["test_model"], + cmd = "mkdir -p $(OUTS) && unzip $(SRCS) -d $(OUTS)", +) + +pkg_zip( + name = "test_model_zip", + srcs = ["test_model_files"], +) + +pkg_files( + name = "test_model_files", + srcs = ["@inference_common//testdata/models/tensorflow_simple_model:test_model"], + attributes = pkg_attributes(mode = "0555"), + strip_prefix = strip_prefix.from_pkg(), +) + +cc_test( + name = "inference_sidecar_test", + size = "medium", + timeout = "short", + srcs = ["@inference_common//:inference_sidecar_test.cc"], + data = [ + ":inference_sidecar_test_target", + ":test_model_target", + ], + flaky = True, + deps = [ + "@com_github_grpc_grpc//:grpc++", + "@com_google_absl//absl/flags:flag", + "@com_google_absl//absl/flags:reflection", + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/time", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_grpc_proto", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//sandbox:sandbox_executor", + "@inference_common//utils:file_util", + ], +) + +cc_library( + name = "tensorflow_parser", + srcs = ["tensorflow_parser.cc"], + hdrs = [ + "tensorflow_parser.h", + ], + deps = [ + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:request_parser", + "@org_tensorflow//tensorflow/core:framework", + ], +) + +cc_test( + name = "tensorflow_parser_test", + size = "small", + srcs = ["tensorflow_parser_test.cc"], + deps = [ + ":tensorflow_parser", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_proto", + ], +) + +cc_library( + name = "tensorflow", + srcs = ["tensorflow.cc"], + deps = [ + ":tensorflow_parser", + "@com_google_absl//absl/base:core_headers", + "@com_google_absl//absl/container:flat_hash_map", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_absl//absl/synchronization", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + "@inference_common//modules:module_interface", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:request_parser", + "@org_tensorflow//tensorflow/cc:cc_ops", + "@org_tensorflow//tensorflow/cc:client_session", + "@org_tensorflow//tensorflow/cc:ops", + "@org_tensorflow//tensorflow/cc/saved_model:constants", + "@org_tensorflow//tensorflow/cc/saved_model:loader", + "@org_tensorflow//tensorflow/cc/saved_model:signature_constants", + "@org_tensorflow//tensorflow/cc/saved_model:tag_constants", + "@org_tensorflow//tensorflow/core:core_cpu", + "@org_tensorflow//tensorflow/core:example_parser_configuration", + "@org_tensorflow//tensorflow/core:framework", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core/profiler/lib:traceme", + "@org_tensorflow//tensorflow/tsl/platform:env", + ], +) + +cc_test( + name = "tensorflow_test", + size = "small", + srcs = ["tensorflow_test.cc"], + data = [ + "//benchmark_models/embedding:embedding_model", + "//benchmark_models/pctr:pctr_model", + "//benchmark_models/pcvr:pcvr_model", + ], + deps = [ + ":tensorflow", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:file_util", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core/profiler/lib:traceme", + ], +) + +cc_test( + name = "ram_file_system_test", + size = "small", + srcs = ["ram_file_system_test.cc"], + data = [ + ":test_model_target", + ], + deps = [ + "@com_google_absl//absl/status", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@org_tensorflow//tensorflow/cc/saved_model:constants", + "@org_tensorflow//tensorflow/cc/saved_model:loader", + "@org_tensorflow//tensorflow/cc/saved_model:signature_constants", + "@org_tensorflow//tensorflow/cc/saved_model:tag_constants", + "@org_tensorflow//tensorflow/core:framework", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/tsl/platform:env", + ], +) + +cc_test( + name = "module_concurrency_test", + size = "small", + srcs = ["@inference_common//modules:module_concurrency_test.cc"], + data = [ + ":test_model_target", + ], + deps = [ + ":tensorflow", + "@com_google_absl//absl/status", + "@com_google_absl//absl/status:statusor", + "@com_google_absl//absl/strings", + "@com_google_googletest//:gtest", + "@com_google_googletest//:gtest_main", + "@inference_common//proto:inference_sidecar_cc_proto", + "@inference_common//utils:file_util", + ], +) + +genrule( + name = "generate_artifacts", + srcs = [ + ":inference_sidecar_bin", + ], + outs = ["inference_sidecar_binary"], + cmd_bash = """cat << EOF > '$@' +mkdir -p artifacts +cp $(execpath :inference_sidecar_bin) artifacts/inference_sidecar +EOF""", + executable = True, + local = True, + message = "generate inference sidecar artifacts", +) + +# Exports TensorFlow inference sidecar binary to be packaged in the B&A workspace. +exports_files( + [ + "artifacts/inference_sidecar", + ], +) + +genrule( + name = "collect-logs", + outs = ["collect_logs.bin"], + cmd_bash = """cat << EOF > '$@' +tools/collect-logs "\\$$@" +EOF""", + executable = True, + local = True, + message = "copy bazel build and test logs", +) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/README.md b/services/inference_sidecar/modules/tensorflow_v2_14_0/README.md new file mode 100644 index 00000000..c16a7d08 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/README.md @@ -0,0 +1,24 @@ +# Privacy Sandbox - Tensorflow Sidecar + +`services/inference_sidecar/modules/tensorflow_v2_14_0` provides the Tensorflow specific code to +build the inference sidecar with Tensorflow version 2.14.0 as the inference backend. + +## How to build the Tensorflow sidecar binary + +The directory has its own `WORKSPACE` file. You should change the current working directory before +running bazel. + +```sh +cd services/inference_sidecar/modules/tensorflow_v2_14_0 +./builders/tools/bazel-debian build //:inference_sidecar_bin +./builders/tools/bazel-debian test //:tensorflow_test +``` + +To enable packaging of the inference sidecar with the bidding server, we need to generate the +inference sidecar binary to a local artifact directory. Specifically, we need to run this following +command before `build_and_test_all_in_docker`. + +```sh +cd services/inference_sidecar/modules/tensorflow_v2_14_0 +./builders/tools/bazel-debian run //:generate_artifacts +``` diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/WORKSPACE b/services/inference_sidecar/modules/tensorflow_v2_14_0/WORKSPACE new file mode 100644 index 00000000..fdf0f8fd --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/WORKSPACE @@ -0,0 +1,134 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("//builders/bazel:deps.bzl", "python_deps") + +python_deps("//builders/bazel") + +http_archive( + name = "com_google_absl", + # commit f845e60 2023-12-05 + sha256 = "b1e113eaf442b817f2a9e3bb471cb36129cd456dd999b0e0360fa891f177013b", + strip_prefix = "abseil-cpp-f845e60acd880dbf07788a5a2c0dbad0f9c57231", + urls = ["https://github.com/abseil/abseil-cpp/archive/f845e60acd880dbf07788a5a2c0dbad0f9c57231.zip"], +) + +http_archive( + name = "google_privacysandbox_servers_common", + patch_args = ["-p1"], + patches = [ + "@//third_party:google_privacysandbox_servers_common.patch", + ], + sha256 = "0a0bbe51c193f9dbbd0c17ff96db9883d3cd4980df150c8747d8520071dce0a7", + strip_prefix = "data-plane-shared-libraries-a7515f845ef463450baddff60099a78a2e8eadc3", + urls = [ + "https://github.com/privacysandbox/data-plane-shared-libraries/archive/a7515f845ef463450baddff60099a78a2e8eadc3.zip", + ], +) + +load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") + +http_archive( + name = "org_tensorflow", + patch_args = [ + "-p1", + ], + patches = [ + "@//third_party:org_tensorflow_compatibility_fixes.diff", + ], + sha256 = "ce357fd0728f0d1b0831d1653f475591662ec5bca736a94ff789e6b1944df19f", + strip_prefix = "tensorflow-2.14.0", + urls = [ + "https://github.com/tensorflow/tensorflow/archive/refs/tags/v2.14.0.tar.gz", + ], +) + +http_archive( + name = "org_kernel_libcap", + build_file = "@com_google_sandboxed_api//sandboxed_api:bazel/external/libcap.BUILD", + sha256 = "260b549c154b07c3cdc16b9ccc93c04633c39f4fb6a4a3b8d1fa5b8a9c3f5fe8", # 2019-04-16 + strip_prefix = "libcap-2.27", + urls = ["https://www.kernel.org/pub/linux/libs/security/linux-privs/libcap2/libcap-2.27.tar.gz"], +) + +# Copied from `@org_tensorflow//:WORKSPACE`. +load("@rules_python//python:repositories.bzl", "py_repositories", "python_register_toolchains") + +py_repositories() + +load("@org_tensorflow//tensorflow/tools/toolchains/python:python_repo.bzl", "python_repository") + +python_repository(name = "python_version_repo") + +# TF workspace scripts below requires `@python` toolchains repo. +# Toolchain setup here is to please the TF workspace scripts, +# and we do not use this Python version to build pip packages. +python_register_toolchains( + name = "python", + ignore_root_user_error = True, + python_version = "3.9", +) + +load("@org_tensorflow//tensorflow:workspace3.bzl", "tf_workspace3") + +tf_workspace3() + +load("@org_tensorflow//tensorflow:workspace2.bzl", "tf_workspace2") + +tf_workspace2() + +load("@org_tensorflow//tensorflow:workspace1.bzl", "tf_workspace1") + +tf_workspace1(with_rules_cc = False) + +load("@org_tensorflow//tensorflow:workspace0.bzl", "tf_workspace0") + +tf_workspace0() + +local_repository( + name = "inference_common", + path = "../../common", +) + +maybe( + http_archive, + name = "rapidjson", + build_file = "@inference_common//:third_party/rapidjson.BUILD.bazel", + sha256 = "bf7ced29704a1e696fbccf2a2b4ea068e7774fa37f6d7dd4039d0787f8bed98e", + strip_prefix = "rapidjson-1.1.0", + url = "https://github.com/Tencent/rapidjson/archive/v1.1.0.tar.gz", +) + +load( + "@google_privacysandbox_servers_common//third_party:cpp_deps.bzl", + data_plane_shared_deps_cpp = "cpp_dependencies", +) + +data_plane_shared_deps_cpp() + +load("@google_privacysandbox_servers_common//third_party:deps1.bzl", data_plane_shared_deps1 = "deps1") + +data_plane_shared_deps1() + +load("@google_privacysandbox_servers_common//build_defs/cc/shared:sandboxed_api.bzl", "sandboxed_api") + +sandboxed_api() + +load("@com_google_sandboxed_api//sandboxed_api/bazel:llvm_config.bzl", "llvm_disable_optional_support_deps") +load("@com_google_sandboxed_api//sandboxed_api/bazel:sapi_deps.bzl", "sapi_deps") + +llvm_disable_optional_support_deps() + +sapi_deps() diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/BUILD b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/BUILD new file mode 100644 index 00000000..66586850 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/BUILD @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +filegroup( + name = "embedding_model", + srcs = [ + "saved_model.pb", + "variables/variables.data-00000-of-00001", + "variables/variables.index", + ], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/saved_model.pb b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/saved_model.pb new file mode 100644 index 00000000..dc6a2eb4 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/saved_model.pb differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/variables/variables.data-00000-of-00001 b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/variables/variables.data-00000-of-00001 new file mode 100644 index 00000000..1320a6a5 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/variables/variables.data-00000-of-00001 differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/variables/variables.index b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/variables/variables.index new file mode 100644 index 00000000..3afbf857 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/embedding/variables/variables.index differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/BUILD b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/BUILD new file mode 100644 index 00000000..2adffdb6 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/BUILD @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +filegroup( + name = "pctr_model", + srcs = [ + "saved_model.pb", + "variables/variables.data-00000-of-00001", + "variables/variables.index", + ], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/saved_model.pb b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/saved_model.pb new file mode 100644 index 00000000..a37a96a2 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/saved_model.pb differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/variables/variables.data-00000-of-00001 b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/variables/variables.data-00000-of-00001 new file mode 100644 index 00000000..11ea9774 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/variables/variables.data-00000-of-00001 differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/variables/variables.index b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/variables/variables.index new file mode 100644 index 00000000..3056e2bc Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pctr/variables/variables.index differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/BUILD b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/BUILD new file mode 100644 index 00000000..005c8b2b --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/BUILD @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +filegroup( + name = "pcvr_model", + srcs = [ + "saved_model.pb", + "variables/variables.data-00000-of-00001", + "variables/variables.index", + ], + visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/saved_model.pb b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/saved_model.pb new file mode 100644 index 00000000..a37a96a2 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/saved_model.pb differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/variables/variables.data-00000-of-00001 b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/variables/variables.data-00000-of-00001 new file mode 100644 index 00000000..dbabf2ae Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/variables/variables.data-00000-of-00001 differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/variables/variables.index b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/variables/variables.index new file mode 100644 index 00000000..e9da6946 Binary files /dev/null and b/services/inference_sidecar/modules/tensorflow_v2_14_0/benchmark_models/pcvr/variables/variables.index differ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.coverage.bazelrc b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.coverage.bazelrc new file mode 100644 index 00000000..58b34d6e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.coverage.bazelrc @@ -0,0 +1,17 @@ +coverage --action_env=PRIVACY_SANDBOX_SERVERS_BUILD_CONFIG_COVERAGE=1 +coverage --config=clang_coverage +coverage --build_tests_only + +# clang_coverage -- the set of args to configure C++ code coverage in bazel with clang +coverage:clang_coverage --compilation_mode=opt +coverage:clang_coverage --combined_report=lcov +coverage:clang_coverage --nocache_test_results +coverage:clang_coverage --experimental_use_llvm_covmap +coverage:clang_coverage --experimental_generate_llvm_lcov +coverage:clang_coverage --action_env=BAZEL_USE_LLVM_NATIVE_COVERAGE=1 +coverage:clang_coverage --action_env=GCOV=/usr/bin/llvm-profdata +coverage:clang_coverage --action_env=BAZEL_LLVM_COV=/usr/bin/llvm-cov +coverage:clang_coverage --action_env=BAZEL_LLVM_PROFDATA=/usr/bin/llvm-profdata +coverage:clang_coverage --strategy=TestRunner=sandboxed,local +coverage:clang_coverage --strategy=CoverageReport=sandboxed,local +coverage:clang_coverage --experimental_scale_timeouts=1.5 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.github/workflows/scorecard.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.github/workflows/scorecard.yaml new file mode 100644 index 00000000..dbcb6200 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.github/workflows/scorecard.yaml @@ -0,0 +1,86 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Workflow for the OSSF Scorecards Action +# https://github.com/ossf/scorecard-action#installation + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '35 10 * * 4' + push: + branches: + - main + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: Checkout code + uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0 + with: + persist-credentials: false + + - name: Run analysis + uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: Upload artifact + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: Upload to code-scanning + uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4 + with: + sarif_file: results.sarif diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.gitignore b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.gitignore new file mode 100644 index 00000000..da211595 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.gitignore @@ -0,0 +1,4 @@ +*~ +*.tmp +*.log +*.eif diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.markdownlint-cli2.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.markdownlint-cli2.yaml new file mode 120000 index 00000000..212461e1 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.markdownlint-cli2.yaml @@ -0,0 +1 @@ +etc/.markdownlint-cli2.yaml \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.pre-commit-config.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.pre-commit-config.yaml new file mode 100644 index 00000000..c7997b3e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.pre-commit-config.yaml @@ -0,0 +1,150 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +exclude: (?x)^( + version.txt + )$ + +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: check-json + - id: check-symlinks + - id: check-added-large-files + - id: check-vcs-permalinks + - id: check-executables-have-shebangs + - id: detect-private-key + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: git-check + - id: script-must-not-have-extension + - id: script-must-have-extension + - id: require-ascii + - id: shellcheck + +- repo: https://github.com/pre-commit/mirrors-clang-format + rev: v16.0.6 + hooks: + - id: clang-format + types_or: + - c++ + - c + +- repo: https://github.com/bufbuild/buf + rev: v1.23.1 + hooks: + - id: buf-format + +- repo: local + hooks: + - id: addlicense + name: addlicense + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.1 + always_run: false + pass_filenames: true + entry: addlicense -v + types_or: + - text + + - id: addlicense-check + name: addlicense check + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.1 + always_run: false + pass_filenames: true + entry: addlicense -check + types_or: + - text + + - id: terraform-fmt + name: terraform fmt + description: Run terraform via docker to format Terraform files + language: script + pass_filenames: false + entry: tools/terraform fmt -write=true -recursive + types_or: + - terraform + + - id: hadolint + name: Lint Dockerfiles + description: Run hadolint via docker to lint Dockerfiles + language: script + types_or: + - dockerfile + entry: tools/hadolint + +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0 + hooks: + - id: prettier + name: prettier markdown + types_or: + - markdown + +- repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.8.1 + hooks: + - id: markdownlint-cli2 + name: lint markdown + +- repo: local + hooks: + - id: buildifier + name: buildifier + description: Format bazel WORKSPACE, BUILD and .bzl files with a standard convention. + language: golang + additional_dependencies: + - github.com/bazelbuild/buildtools/buildifier@6.1.1 + always_run: true + pass_filenames: true + types_or: + - bazel + entry: buildifier + args: + - -lint=fix + - -mode=fix + - -warnings=all + +- repo: https://github.com/cpplint/cpplint + rev: 1.6.1 + hooks: + - id: cpplint + types_or: + - c++ + - c + args: + - --filter=-build/c++11,+build/c++17,-build/header_guard,-build/include_order,+build/include_what_you_use,-build/include_subdir,-readability/casting,-readability/todo,-runtime/references + - --quiet + +- repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + name: black python formatter diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.prettierignore b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.prettierignore new file mode 100644 index 00000000..f38e52f8 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.prettierignore @@ -0,0 +1,3 @@ +/.git/ +/CHANGELOG.md +/google_internal/LATEST_RELEASE.md diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.prettierrc.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.prettierrc.yaml new file mode 120000 index 00000000..5f7e36f6 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.prettierrc.yaml @@ -0,0 +1 @@ +etc/.prettierrc.yaml \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.versionrc.json b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.versionrc.json new file mode 120000 index 00000000..f0385a95 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/.versionrc.json @@ -0,0 +1 @@ +etc/.versionrc.json \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/CHANGELOG.md b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/CHANGELOG.md new file mode 100644 index 00000000..36788bbb --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/CHANGELOG.md @@ -0,0 +1,835 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines. + +## 0.55.1 (2024-02-22) + + +### Bug Fixes + +* Do not invoke normalize-bazel-symlinks for cbuild --cmd + +## 0.55.0 (2024-02-22) + + +### Bug Fixes + +* Normalize bazel symlinks to a resolved path +* Pass the correct path for normalize-bazel-symlinks + +## 0.54.0 (2024-02-09) + + +### Features + +* Set cbuild workdir to pwd relative to root workspace + +## 0.53.0 (2024-01-25) + + +### Features + +* Add support to collect-coverage tool for custom lcov report + + +### Bug Fixes + +* Improve --cmd-profiler support + +## 0.52.0 (2023-12-02) + + +### Features + +* add python3.9 dev to bazel-debian + +## 0.51.0 (2023-11-30) + + +### Bug Fixes + +* Clean go build cache at the end of image build script + + +### Dependencies + +* **deps:** Upgrade bazelisk to 1.19.0 + +## 0.50.0 (2023-11-06) + + +### Features + +* Add openssh-client to build-debian image + +## 0.49.1 (2023-10-30) + + +### Bug Fixes + +* Add tools/wrk2 wrapper script + +## 0.49.0 (2023-10-27) + + +### Features + +* Add wrk2 to test-tools image +* Extend normalize-bazel-symlink to normalize within containers + + +### Dependencies + +* **deps:** Update versions in test-tools image + +## 0.48.0 (2023-10-11) + + +### Features + +* Add tools/bazel-amazonlinux2023 + + +### Bug Fixes + +* Add lldb symlink + +## 0.47.0 (2023-10-05) + + +### Features + +* Add --cmd-profiler flag to tools/cbuild +* Add google-pprof to coverage-tools image +* Add libprofiler.so to build-debian image + + +### Bug Fixes + +* Indicate default image in help text + +## 0.46.1 (2023-10-04) + + +### Bug Fixes + +* cbuild exits normally even if bazel symlinks aren't normalized +* Revert to clang 15.x +* Use bash when normalize-dist runs inside docker +* Use DOCKER_NETWORK if already set + +## 0.46.0 (2023-09-22) + + +### Bug Fixes + +* Hide normalize-bazel-symlinks in cbuild + + +### Dependencies + +* **deps:** Update amazonlinux images to Aug 2023 +* **deps:** Upgrade clang to v16 in build-debian + +## 0.45.0 (2023-09-19) + + +### Features + +* Invoke normalize-bazel-symlinks at cbuild exit + +## 0.44.0 (2023-09-11) + + +### Features + +* Add coverage bazel config and collect-coverage tool + +## 0.43.0 (2023-08-15) + + +### Features + +* Allow terraform version to be specified + + +### Bug Fixes + +* Move feat(deps) earlier for precedence over feat + +## 0.42.1 (2023-08-14) + + +### Bug Fixes + +* Revert override of /usr/bin/python links in build-amazonlinux2 + +## 0.42.0 (2023-08-07) + + +### Features + +* Add amazonlinux2 support to convert-docker-to-nitro +* Add bsdmainutils for hexdump +* Add build-amazonlinux2023 image +* Add support for amazonlinux2023 in builder.sh +* Configure python3.9 for python/python3 links +* Remove jdk from build-amazonlinux2 image + +### Dependencies + +* **deps:** Upgrade amazonlinux2 to 20230719 + + +### Bug Fixes + +* Empty bazel-* arg list not an error condition + +## 0.41.1 (2023-08-04) + + +### Bug Fixes + +* Remove debug statement + +## 0.41.0 (2023-08-03) + + +### Features + +* Create links for lld and ld.lld in build-debian + +## 0.40.0 (2023-08-03) + + +### Features + +* Add Dependencies section for release notes +* **deps:** Upgrade rules_python to 0.24.0 +* Ensure bazel-* scripts handle non-bazel args too + +## 0.39.0 (2023-08-02) + + +### Features + +* Run bazel containers in seccomp=unconfined mode + +## 0.38.1 (2023-08-02) + + +### Bug Fixes + +* Ensure python3.9 is found first in PATH + +## 0.38.0 (2023-07-28) + + +### Features + +* Add cbuild flag --seccomp-unconfined + +## 0.37.0 (2023-07-27) + + +### Features + +* Add patch tool to build-debian image + +## 0.36.1 (2023-07-25) + + +### Bug Fixes + +* Rename convert_docker_to_nitro to convert-docker-to-nitro + +## 0.36.0 (2023-07-24) + + +### Features + +* Add convert_docker_to_nitro + +## 0.35.0 (2023-07-21) + + +### Features + +* Add LICENSE file + +## 0.34.0 (2023-07-21) + + +### Features + +* Add OSSF Scorecard badge to top-level README + +## 0.33.0 (2023-07-20) + + +### Features + +* Install python libclang 15 in build-debian +* Add OSSF Scorecard GitHub Action + +## 0.32.0 (2023-07-14) + + +### Features + +* Set PYTHON_BIN_PATH/PYTHON_LIB_PATH in build-amazonlinux2 + +## 0.31.0 (2023-07-12) + + +### Features + +* Add cbuild --docker-network flag +* Add coverage-tool image plus lcov scripts +* Mount gcloud config dir into container + + +### Bug Fixes + +* Add hash for coverage-tools +* Add hash for coverage-tools +* Improve error handling for flag values +* Print build log path on error condition +* Specify latest image tag explicitly +* Upgrade pre-commit hooks + +## 0.30.1 (2023-06-27) + + +### Bug Fixes + +* Use = for --env flag +* Use = for --env flag for all tools + +## 0.30.0 (2023-06-26) + + +### Features + +* Install numpy for python3.9 +* Set PYTHON_BIN_PATH/PYTHON_LIB_PATH in build-debian +* Upgrade AmazonLinux2 to 20230530 +* Upgrade packer to v1.9.1 + + +### Bug Fixes + +* Add links for llvm-{cov,profdata} + +## 0.29.0 (2023-06-05) + + +### Features + +* Update pre-commit hook versions + + +### Bug Fixes + +* Catch error when shifting multiple args +* Remove golang from test-tools image +* Resolve WORKSPACE using realpath +* Use correct exit code in --fast mode + +## 0.28.0 (2023-05-24) + + +### Features + +* Update ca-certificates + + +### Bug Fixes + +* Downgrade to clang v15 +* Use builders version.txt for tarfile tag + +## 0.27.0 (2023-05-23) + + +### Features + +* Add buf to presubmit image + + +### Documentation + +* Add CONTRIBUTING.md + +## 0.26.0 (2023-05-16) + + +### Features + +* Remove zlib-dev package +* Upgrade clang to v16 +* Upgrade go to v1.20.4 + +## 0.25.0 (2023-05-11) + + +### Features + +* Update default bazel version to 5.4.1 +* Upgrade rules_python to 0.21.0 + + +### Bug Fixes + +* Add file utility to build-debian image + +## 0.24.0 (2023-05-05) + + +### Features + +* Add --build-images flag to tests/run-tests +* Reuse tar image if available + + +### Bug Fixes + +* Address linter warnings +* Address linter warnings for tools +* Correct mangled usage text +* Pin pre-commit to 3.x +* Remove .gz suffix from tar file +* Remove function keyword for busybox sh script +* Remove Release in changelog title +* Upgrade pre-commit hooks + +## 0.23.0 (2023-04-13) + + +### Features + +* Add wrapper for commit-and-tag-version +* Upgrade curl to version 8 +* Upgrade to amazonlinux 2.0.20230320.0 + + +### Bug Fixes + +* Use commit-and-tag-version wrapper + +## 0.22.0 (2023-04-03) + + +### Features + +* Add awscurl wrapper script +* Add tests for misc CLI wrappers +* Correctly quote bash args +* Extend test-tool to support the release image +* Use login shell for interactive container + + +### Documentation + +* Add section on tools to README +* Remove section on building images directly + +## 0.21.1 (2023-03-07) + + +### Bug Fixes + +* Relax pinned version for apache2-utils + +## 0.21.0 (2023-03-06) + + +### Features + +* Add wrapper scripts for utils + +## 0.20.0 (2023-03-01) + + +### Features + +* Add docker buildkit plugin +* Add tests for CLI wrapper scripts +* Permit testing of a single image +* Relax pinned versions in build-debian, presubmit and test-tools + +## 0.19.0 (2023-03-01) + + +### Features + +* Add jq wrapper + + +### Bug Fixes + +* Relax pinned version of openjdk to 11.0.* + +## 0.18.0 (2023-02-23) + + +### Features + +* Relax pinned versions for apk and yum packages to semver + +## 0.17.0 (2023-02-21) + + +### Features + +* Upgrade black to 23.1.0 +* Upgrade tar to 1.34-r1 +* Upgrade to buildozer 6.0.1 + + +### Bug Fixes + +* Minor code cleanup in images/presubmit/install_apps +* Upgrade ghz to 0.114.0 + +## 0.16.0 (2023-02-05) + + +### Features + +* Run test tools in docker interactive mode to admit std streams + +## 0.15.1 (2023-02-04) + + +### Bug Fixes + +* Return value from get_docker_workspace_mount() + +## 0.15.0 (2023-02-03) + + +### Features + +* Use WORKSPACE_MOUNT if set + + +### Bug Fixes + +* Pin commit-and-tag-version to v10.1.0 + +## 0.14.0 (2023-01-27) + + +### Features + +* Improve verbose output for get-builder-image-tagged + +## 0.13.1 (2023-01-26) + + +### Bug Fixes + +* Upgrade software-properties-common + +## 0.13.0 (2023-01-23) + + +### Features + +* Add ab tool +* Add cassowary http load testing tool +* Add h2load tool +* Add slowhttptest tool +* Adjust get-builder-image-tagged verbose output +* Upgrade to packer v1.8.5 + + +### Bug Fixes + +* Re-pin dependencies in test-tools image +* Relax version pins to semver +* Upgrade amazonlinux2 base image +* Upgrade git on amazonlinux2 + +## 0.12.0 (2023-01-10) + + +### Features + +* Modify ghz wrapper for generic use. Add curl +* Use test-tools image for grpcurl + +## 0.11.0 (2023-01-09) + + +### Features + +* Add chrpath + + +### Bug Fixes + +* Clean up tmpdir via RETURN trap + +## 0.10.0 (2023-01-06) + + +### Features + +* Drop ubuntu package version minor for curl + +## 0.9.0 (2023-01-04) + + +### Features + +* Add zlib1g-dev to build-debian per scp build dependency +* Update hook versions + + +### Bug Fixes + +* Correct non-zero error message and drop sourcing of tools/builder.sh +* Elide warnings from bazel info +* Revert from clang-format v15 to v14 + +## 0.8.0 (2022-12-29) + + +### Features + +* Add bash and jq to test-tools image +* Add script to normalize bazel- symlinks +* Ensure run-tests includes all images +* Skip symlinks that resolve in normalize-bazel-symlink + +## 0.7.0 (2022-12-27) + + +### Features + +* Add ghz wrapper script +* Add test-tools image + +## 0.6.0 (2022-12-12) + + +### Features + +* Add EXTRA_DOCKER_RUN_ARGS support in aws-cli + + +### Bug Fixes + +* Emit docker build output only on non-zero exit +* Remove tempfile before exiting + +## 0.5.0 (2022-12-06) + + +### Features + +* Pin versions in presubmit image + + +### Bug Fixes + +* Avoid cache for tar image +* Update version pin for ca-certificates +* Use images subdirs for image list + +## 0.4.4 (2022-11-18) + + +### Bug Fixes + +* Retain execute permissions when normalizing dist + +## 0.4.3 (2022-11-17) + + +### Bug Fixes + +* Add gettext package for envsubst +* Avoid yum package version strings specific to CPU architecture +* Improve verbose output for get-builder-image-tagged +* Pin apt and yum package versions + +## 0.4.2 (2022-11-17) + + +### Bug Fixes + +* Generate SHA within docker container + +## 0.4.1 (2022-11-15) + + +### Bug Fixes + +* Reduce noise creating presubmit image +* Remove docker run --interactive flag + +## 0.4.0 (2022-11-14) + + +### Features + +* Add buildozer to release image +* Add grpcurl helper script +* Substitute variables in EXTRA_DOCKER_RUN_ARGS +* Use specific version of GNU tar + + +### Bug Fixes + +* Add /opt/bin/python link +* Add CC=clang env var +* Add xz to build-debian +* Explicitly add machine type and OS release to toolchains hash + +## 0.3.1 (2022-11-01) + + +### Bug Fixes + +* Add OpenJDK 11 in build-amazonlinux2 + +## 0.3.0 (2022-11-01) + + +### Features + +* Add git to build-amazonlinux2 image +* Add google-java-format pre-commit hook +* Add OpenJDK 11 +* Move python3 to /opt/bin/python3 + + +### Bug Fixes + +* Ensure builder::set_workspace does not overwrite WORKSPACE + +## 0.2.0 (2022-10-26) + + +### Features + +* Add bazel support to register container-based python toolchain +* Add docker uri env var as override +* Add tools/terraform +* Use image etc files in workspace root + + +### Bug Fixes + +* Avoid empty tar error if no workspace etc files + + +### Documentation + +* Add Getting Started section to README.md + +## 0.1.0 (2022-10-25) + + +### Features + +* Add --env flag to cbuild +* Add arg processing to cbuild script +* Add aws-cli helper script +* Add bazel-debian helper script +* Add builders/utils docker image +* Add hadolint to lint Dockerfiles +* Add optional flags to cbuild tool +* Add preliminary support for commit-and-tag-version and copybara +* Add the release-please tool +* Add toolchain short hash to bazel output_user_root path +* Add tools/lib/builder.sh +* **build:** Add GitHub CLI tool https://cli.github.com/ +* Determine workspace mount point from docker inspect if inside docker container +* Inject clang-version as a bazel action_env +* Migrate generation of builders/release container image to Dockerfile +* Move image directories to images/ top-level directory +* Overhaul building on amazonlinux2 +* Remove python build dependencies from bazel +* Set BUILD_ARCH env var in docker images +* Update release image to node v18 +* Upgrade to bazel 5.3.2 +* Upgrade to clang v14 on bazel-debian +* Use Packer to build AMI. + + +### Bug Fixes + +* Add builders/tools/normalize-dist to chmod/chgrp/chown dist/ directory tree +* Add get_workspace_mount function to encapsulate code block +* Add python version to action_env +* Add/remove basic pre-commit hooks +* Adopt shellcheck +* Avoid installing recommended debian packages +* Avoid use of git rev-parse to determine tools path +* Bump to latest version of bazelisk +* Clean bazel_root for smaller docker image +* Correct argument handling in cbuild script +* Correct errors generating Amazon Linux 2-based builder image +* Define python3 toolchain +* Drop packer from build-debian image +* Ensure /etc/gitconfig is readable by all +* Improve cbuild help text +* Install bazel version as specified in .bazelversion +* Invoke addlicense for all text files +* Modifications as indicated by shellcheck +* Mount $HOME/aws in aws-cli container +* Move bazel env vars from comments to help text +* Move builder-related configs to builders/etc +* Move WORKSPACE definition to cbuild script global +* Only propagate AWS env vars into amazonlinux2 build container +* Pin version of bazelisk +* Pin version of libc++-dev +* Pin version of python3.8 +* Print pre-commit version rather than help +* Remove container when get-architecture exits +* Remove debugging statement +* Remove dockerfile linter ignore and correct ENTRYPOINT +* Remove pre-commit config from build-debian +* Remove shellcheck from build-debian +* Remove unused nitro_enclave_image bazel rule +* Set bazel output_base to accommodate distinct workspaces +* Set bazel output_user_root in image bazelrc +* Set locale in build-debian +* Set WORKSPACE correctly from submodule +* Set WORKSPACE variable +* Switch from hardcoded arch to using dpkg --print-architecture +* Update normalize-dist to function inside build container +* Update pre-commit to use cbuild +* Use PRE_COMMIT_TOOL env var +* Various path-related fixes + + +### Build System + +* Add arch to docker image tags +* Add get_builder_image_tagged tool to determine a content-based tag +* Add get-architecture helper script +* Add missing imports into nitro BUILD +* Add tools/pre-commit +* Correct propagation of quoted args in gh wrapper +* Move commit-and-tag-version into tools dir +* Move gh into tools dir +* Optionally build the AMI +* Propagate status code in exit functions +* Reduce redundant installation commands +* Remove release-please tool +* Rename builders/bazel to build-debian +* Simplify use of npm image in container_run_and_commit() +* Support GH_TOKEN env var + + +### Documentation + +* Add top-level README.md +* Improve prose in README.md +* Move docker build instructions to README.md +* Reformat and lint markdown diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/CONTRIBUTING.md b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/CONTRIBUTING.md new file mode 100644 index 00000000..0d0e3e85 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/CONTRIBUTING.md @@ -0,0 +1,3 @@ +# How to Contribute + +Presently this project is not accepting contributions. diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/LICENSE b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/LICENSE new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/README.md b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/README.md new file mode 100644 index 00000000..3ba69a91 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/README.md @@ -0,0 +1,35 @@ +# Privacy Sandbox Builders + +Build tools and docker images used by the [Privacy Sandbox](https://github.com/privacysandbox) +open-source ecosystem. The docker images generated using the Privacy Sandbox Builders are used as +the build environment(s) for other Privacy Sandbox software. These docker images can be used as part +of CI/CD workflows. + +## Getting Started + +This repo is designed to be used via `git submodule` by other Privacy Sandbox repos. This is not a +requirement per se, it can be used standalone or using other approaches like `git subtree`. + +To use Privacy Sandbox Builders, you need: + +- `docker` with [BuildKit](https://docs.docker.com/build/buildkit/). + +## Building Docker Images + +To build a docker image directly, you can use the `tools/get-builder-image-tagged` tool. + +## Tools + +The `tools` directory contains wrapper scripts along with some non-wrapper scripts. The wrapper +scripts, such as `terraform` and `curl` execute the corresponding tool installed in one of the build +images. + +For example, to execute `curl`: + +```sh +tools/curl --help +``` + +--- + +[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/privacysandbox/build-system/badge)](https://securityscorecards.dev/viewer/?uri=github.com/privacysandbox/build-system) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/bazel/BUILD b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/bazel/BUILD new file mode 100644 index 00000000..9d103c15 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/bazel/BUILD @@ -0,0 +1,40 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Register container-based python toolchain.""" + +load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") +load("@rules_python//python:defs.bzl", "py_runtime") + +# define the python3 runtime; this path must exist in the bazel build environment +PY3_PATH = "/opt/bin/python3" + +py_runtime( + name = "py_runtime", + interpreter_path = PY3_PATH, + python_version = "PY3", + visibility = ["//visibility:public"], +) + +py_runtime_pair( + name = "py_runtime_pair", + py2_runtime = None, + py3_runtime = ":py_runtime", +) + +toolchain( + name = "py_toolchain", + toolchain = ":py_runtime_pair", + toolchain_type = "@bazel_tools//tools/python:toolchain_type", +) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/bazel/deps.bzl b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/bazel/deps.bzl new file mode 100644 index 00000000..0fe6743b --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/bazel/deps.bzl @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Load definitions for use in WORKSPACE files.""" + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +def python_deps(bazel_package): + """Load rules_python and register container-based python toolchain + + Note: the bazel_package arg will depend on the import/submodule location in your workspace + + Args: + bazel_package: repo-relative bazel package to builders/bazel/BUILD eg. "//builders/bazel" + """ + http_archive( + name = "rules_python", + sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578", + strip_prefix = "rules_python-0.24.0", + urls = [ + "https://github.com/bazelbuild/rules_python/releases/download/0.24.0/rules_python-0.24.0.tar.gz", + ], + ) + native.register_toolchains("{}:py_toolchain".format(bazel_package)) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.bazelversion b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.bazelversion new file mode 100644 index 00000000..ade65226 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.bazelversion @@ -0,0 +1 @@ +5.4.1 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.clang-format b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.clang-format new file mode 100644 index 00000000..8c45f0ce --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.clang-format @@ -0,0 +1,33 @@ +BasedOnStyle: Google +Standard: c++17 +DerivePointerAlignment: false +SortIncludes: true +IncludeBlocks: Regroup +IncludeCategories: + # gtest, this should be put first in tests. + - Regex: '^ from OS. + - Regex: '^<[_A-Za-z0-9-]+\.h>' + Priority: 30 + # 2nd level .h headers in <>, POSIX standard. + - Regex: '^<(sys|arpa|net|netinet)\/[_A-Za-z0-9-]+\.h>' + Priority: 30 + # Linux-specific .h headers in <>. + - Regex: '^' + Priority: 40 + # Headers in <> without extension, these are basically C++ STL headers + - Regex: '^<[\/_A-Za-z0-9-]+>' + Priority: 50 + # Headers in <> from specific external libraries. + - Regex: '^<(grpcpp|absl)\/' + Priority: 60 + # Any other uncaught headers in <> + - Regex: '^<' + Priority: 70 + # Headers in "" of current directory this should be the last category. + - Regex: '^"[_A-Za-z0-9-]+\.h' + Priority: 200 + # Headers in "" with directory hierarchy + - Regex: '^"[\/_A-Za-z0-9-]+' + Priority: 80 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.hadolint.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.hadolint.yaml new file mode 100644 index 00000000..73dc9121 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.hadolint.yaml @@ -0,0 +1,20 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://github.com/hadolint/hadolint for more information + +failure-threshold: error + +trustedRegistries: +- docker.io diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.markdownlint-cli2.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.markdownlint-cli2.yaml new file mode 100644 index 00000000..0671ea1a --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.markdownlint-cli2.yaml @@ -0,0 +1,43 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +config: + line-length: + line_length: 120 + #stern: true + code_blocks: false + + # these are apparently in conflict with prettier's markdown formatting + list-marker-space: false + list-indent: false + ul-indent: false + + headings: false + + proper-names: + code_blocks: false + names: + - CommonMark + - JavaScript + - Markdown + - markdown-it + - markdownlint + - markdownlint-cli2 + - Node.js + +fix: true + +ignores: +- CHANGELOG.md +- google_internal/LATEST_RELEASE.md diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.pre-commit-config.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.pre-commit-config.yaml new file mode 100644 index 00000000..6a3ebd44 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.pre-commit-config.yaml @@ -0,0 +1,156 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +exclude: (?x)^( + bazel-(bin|out|testlogs|workspace)/.*| + .bazel_output/.*| + version.txt + )$ + +fail_fast: true +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: check-json + - id: check-symlinks + - id: check-added-large-files + - id: check-vcs-permalinks + - id: check-executables-have-shebangs + - id: detect-private-key + +- repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 3.0.0 + hooks: + - id: git-check + - id: script-must-not-have-extension + exclude: '^google_internal/.*/kokoro_(presubmit|continuous).sh$' + - id: script-must-have-extension + - id: require-ascii + - id: shellcheck + exclude: '^(production|tools|google_internal|builders/images)/.*$' + +- repo: https://github.com/pre-commit/mirrors-clang-format + rev: v14.0.6 + hooks: + - id: clang-format + types_or: + - c++ + - c + +- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.4.0 + hooks: + - id: pretty-format-java + name: Google Java Formatter + args: [--autofix] + +- repo: local + hooks: + - id: addlicense + name: addlicense + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.0 + always_run: false + pass_filenames: true + entry: addlicense -v + types_or: + - text + + - id: addlicense-check + name: addlicense check + language: golang + additional_dependencies: + - github.com/google/addlicense@v1.1.0 + always_run: false + pass_filenames: true + entry: addlicense -check + types_or: + - text + +# - id: terraform-fmt +# name: terraform fmt +# description: Run terraform via docker to format Terraform files +# language: script +# pass_filenames: false +# entry: builders/tools/terraform fmt -write=true -recursive +# types_or: +# - terraform + +# - id: hadolint +# name: Lint Dockerfiles +# description: Run hadolint via docker to lint Dockerfiles +# language: script +# types_or: +# - dockerfile +# entry: builders/tools/hadolint + +- repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 + hooks: + - id: prettier + name: prettier markdown + types_or: + - markdown + +- repo: https://github.com/DavidAnson/markdownlint-cli2 + rev: v0.6.0 + hooks: + - id: markdownlint-cli2 + name: lint markdown + +- repo: local + hooks: + - id: buildifier + name: buildifier + description: Format bazel WORKSPACE, BUILD and .bzl files with a standard convention. + language: golang + additional_dependencies: + - github.com/bazelbuild/buildtools/buildifier@5.1.0 + always_run: true + pass_filenames: true + types_or: + - bazel + entry: buildifier + args: + - -lint=fix + - -mode=fix + - -warnings=all + +- repo: https://github.com/cpplint/cpplint + rev: 1.6.1 + hooks: + - id: cpplint + types_or: + - c++ + - c + args: + - --filter=-build/c++11,+build/c++17,-build/header_guard,-build/include_order,+build/include_what_you_use,-build/include_subdir,-readability/casting,-readability/todo,-runtime/references + - --quiet + +- repo: https://github.com/psf/black + rev: 23.1.0 + hooks: + - id: black + name: black python formatter diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.prettierrc.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.prettierrc.yaml new file mode 100644 index 00000000..bf26ea63 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.prettierrc.yaml @@ -0,0 +1,25 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +printWidth: 120 +trailingComma: es5 +tabWidth: 2 +useTabs: false +singleQuote: true +proseWrap: always +overrides: +- files: '*.md' + options: + tabWidth: 4 + printWidth: 100 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.versionrc.json b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.versionrc.json new file mode 100644 index 00000000..7d175563 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/etc/.versionrc.json @@ -0,0 +1,27 @@ +{ + "bumpFiles": [ + { + "filename": "version.txt", + "type": "plain-text" + } + ], + "commitUrlFormat": " ", + "issueUrlFormat": " ", + "packageFiles": [ + { + "filename": "version.txt", + "type": "plain-text" + } + ], + "tagPrefix": "release-", + "types": [ + { "type": "feat", "scope": "deps", "section": "Dependencies" }, + { "type": "feat", "section": "Features" }, + { "type": "fix", "section": "Bug Fixes" }, + { "type": "docs", "section": "Documentation" }, + { "type": "internal", "hidden": true }, + { "type": "chore", "hidden": true }, + { "type": "test", "hidden": true }, + { "type": "refactor", "hidden": true } + ] +} diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/.bazelversion b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/Dockerfile new file mode 100644 index 00000000..bb6c1edc --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM amazonlinux:2.0.20230822.0 + +COPY /install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps && \ + /scripts/generate_system_bazelrc --user-root-name amazonlinux2 && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin:/opt/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.8" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/generate_system_bazelrc b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/get_workspace_mount b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/gitconfig b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_apps new file mode 100755 index 00000000..00b6f31e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_apps @@ -0,0 +1,107 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) + usage 0 + break + ;; + *) + usage + break + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function yum_update() { + yum -y update +} + +function install_python() { + amazon-linux-extras install -y \ + python3.8 + mkdir -p /opt/bin + update-alternatives \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.8 100 \ + --slave /opt/bin/python python /usr/bin/python3.8 + +} + +function install_nitro() { + amazon-linux-extras install -y aws-nitro-enclaves-cli + yum install -y aws-nitro-enclaves-cli-devel +} + +function install_gcc() { + # install gcc, which is required to build socat + yum install -y "gcc-7.3.1*" +} + +function install_misc() { + yum install -y \ + "gettext-0.19.8.*" \ + "git-2.38*" \ + "tar-1.26*" \ + "unzip-6.0*" \ + "which-2.*" \ + "zip-3.0*" +} + +function install_packer() { + yum install -y "yum-utils-1.1.31*" + yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + yum -y install "packer-1.9.1*" + update-alternatives --install /usr/local/bin/packer packer /usr/bin/packer 100 + + /usr/local/bin/packer version +} + +function install_clang() { + yum install -y "clang-11.1.0*" + clang --version +} + +function cleanup() { + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +yum_update +install_nitro +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_gcc +install_packer +install_python +cleanup diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_go.sh b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_golang_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/test/commands.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/test/commands.yaml new file mode 100644 index 00000000..41823ae5 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: nitro-cli version + command: "nitro-cli" + args: ["--version"] + exitCode: 0 + + - name: nitro-cli build-enclave help + command: "nitro-cli" + args: ["build-enclave", "--help"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/.bazelversion b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/Dockerfile new file mode 100644 index 00000000..d24ba9af --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/Dockerfile @@ -0,0 +1,35 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM amazonlinux:2023.1.20230825.0 + +COPY /install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps && \ + /scripts/generate_system_bazelrc --user-root-name amazonlinux2023 && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin:/opt/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.8" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/generate_system_bazelrc b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/get_workspace_mount b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/gitconfig b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_apps new file mode 100755 index 00000000..2cf10696 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_apps @@ -0,0 +1,106 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) + usage 0 + break + ;; + *) + usage + break + ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function dnf_update() { + dnf -y update +} + +function install_python() { + mkdir -p /opt/bin + alternatives \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.9 100 \ + --slave /usr/bin/python3 python3-usr /usr/bin/python3.9 \ + --slave /usr/bin/python python-usr /usr/bin/python3.9 \ + --slave /opt/bin/python python /usr/bin/python3.9 +} + +function install_nitro() { + dnf install -y \ + "aws-nitro-enclaves-cli-1.2.*" \ + "aws-nitro-enclaves-cli-devel-1.2.*" +} + +function install_gcc() { + # install gcc, which is required to build socat + dnf install -y "gcc-11.3.*" +} + +function install_misc() { + dnf install -y \ + "gettext-0.21*" \ + "git-2.40.*" \ + "tar-2:1.34*" \ + "unzip-6.0*" \ + "which-2.*" \ + "zip-3.0*" +} + +function install_packer() { + dnf install -y "dnf-utils-4.1.*" + dnf config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo + dnf -y install "packer-1.9.2*" + update-alternatives --install /usr/local/bin/packer packer /usr/bin/packer 100 + /usr/local/bin/packer version +} + +function install_clang() { + dnf install -y "clang-15.*" + clang --version +} + +function cleanup() { + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +install_python +dnf_update +install_nitro +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_gcc +install_packer +cleanup diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_go.sh b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_golang_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/test/commands.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/test/commands.yaml new file mode 100644 index 00000000..41823ae5 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-amazonlinux2023/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: nitro-cli version + command: "nitro-cli" + args: ["--version"] + exitCode: 0 + + - name: nitro-cli build-enclave help + command: "nitro-cli" + args: ["build-enclave", "--help"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/.bazelversion b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/.bazelversion new file mode 120000 index 00000000..8f79a5af --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/.bazelversion @@ -0,0 +1 @@ +../../etc/.bazelversion \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/Dockerfile new file mode 100644 index 00000000..eb370730 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/Dockerfile @@ -0,0 +1,61 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ARG BASE_IMAGE=ubuntu:20.04 + +# ignore this hadolint error as BASE_IMAGE contains an image tag +# hadolint ignore=DL3006 +FROM ${BASE_IMAGE} as libprofiler-builder +ENV CC=clang \ + CXX=clang +ADD https://github.com/gperftools/gperftools/releases/download/gperftools-2.13/gperftools-2.13.tar.gz /build/gperftools.tar.gz +ADD https://apt.llvm.org/llvm.sh /build/llvm.sh +COPY compile_libprofiler /scripts/ +RUN /scripts/compile_libprofiler + +FROM docker/buildx-bin:v0.10 AS buildx-bin + +# ignore this hadolint error as BASE_IMAGE contains an image tag +# hadolint ignore=DL3006 +FROM ${BASE_IMAGE} +ARG LOCALE=en_US.UTF-8 +ARG TARGETARCH +COPY install_apps install_golang_apps install_go.sh generate_system_bazelrc .bazelversion /scripts/ +COPY get_workspace_mount /usr/local/bin +COPY gitconfig /etc +COPY --from=buildx-bin /buildx /usr/libexec/docker/cli-plugins/docker-buildx +COPY --from=libprofiler-builder /usr/lib/libprofiler.so.* /usr/lib/x86_64-linux-gnu/ +RUN \ + ln -rs /usr/lib/x86_64-linux-gnu/libprofiler.so.0.* /usr/lib/x86_64-linux-gnu/libprofiler.so.0 && \ + ln -rs /usr/lib/x86_64-linux-gnu/libprofiler.so.0.* /usr/lib/x86_64-linux-gnu/libprofiler.so + +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + CC=clang \ + TZ=Etc/UTC \ + LANG=${LOCALE} \ + LANGUAGE=${LOCALE} \ + LC_ALL=${LOCALE} \ + LC_CTYPE=${LOCALE} + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_apps --locale ${LOCALE} && \ + /scripts/generate_system_bazelrc --user-root-name ubuntu && \ + /scripts/install_golang_apps && \ + rm -rf /scripts + +ENV PATH="/opt/bin:${PATH}:/usr/local/go/bin" \ + PYTHON_BIN_PATH="/opt/bin/python3" \ + PYTHON_LIB_PATH="/usr/lib/python3.9" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/compile_libprofiler b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/compile_libprofiler new file mode 100755 index 00000000..f05397fb --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/compile_libprofiler @@ -0,0 +1,45 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -r -i CLANG_VER=15 + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_build_tools() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + gnupg="2.2.*" \ + lsb-release="11.1.*" \ + make="4.*" \ + software-properties-common="0.99.*" \ + wget="1.20.*" +} + +function install_clang() { + ls -l /build + chmod +x /build/llvm.sh + /build/llvm.sh ${CLANG_VER} + apt-get --quiet install -y --no-install-recommends libc++-${CLANG_VER}-dev + update-alternatives --install /usr/bin/clang clang /usr/bin/clang-${CLANG_VER} 100 + rm -f llvm.sh + + clang --version +} + +function install_profiler() { + cd /build + tar xz --strip-components 1 -f gperftools.tar.gz + ./configure + make libprofiler.la + declare -r ver=0.5.9 + cp .libs/libprofiler.so.${ver} /usr/lib + ls -l /usr/lib/libprofiler* +} + +apt_update +install_build_tools +install_clang +install_profiler diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/generate_system_bazelrc b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/generate_system_bazelrc new file mode 120000 index 00000000..8f22303f --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/generate_system_bazelrc @@ -0,0 +1 @@ +../generate_system_bazelrc \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/get_workspace_mount b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/get_workspace_mount new file mode 120000 index 00000000..581579a3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/get_workspace_mount @@ -0,0 +1 @@ +../get_workspace_mount \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/gitconfig b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_apps new file mode 100755 index 00000000..8734f014 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_apps @@ -0,0 +1,146 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 +declare INSTALL_LOCALE=en_US.UTF-8 +declare -r -i CLANG_VER=15 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --locale Set locale. Default: ${INSTALL_LOCALE} + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --locale) + INSTALL_LOCALE="$2" + shift 2 || usage + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_python() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + python3.9-venv="3.9.*" python3.9-dev + mkdir -p /opt/bin + update-alternatives \ + --force \ + --install /opt/bin/python3 python3-opt /usr/bin/python3.9 100 \ + --slave /usr/bin/python3 python3-usr /usr/bin/python3.9 \ + --slave /usr/bin/python python-usr /usr/bin/python3.9 \ + --slave /opt/bin/python python /usr/bin/python3.9 + curl https://bootstrap.pypa.io/get-pip.py -o /tmp/get-pip.py + /usr/bin/python3 /tmp/get-pip.py + rm -f /tmp/get-pip.py + /usr/bin/python3 -m pip --version + /usr/bin/python3 -m pip install \ + "libclang~=${CLANG_VER}.0" \ + "numpy~=1.25" +} + +function install_misc() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-transport-https="2.0.*" \ + bsdmainutils \ + ca-certificates \ + chrpath="0.16-*" \ + libcurl4="7.68.*" \ + curl="7.68.*" \ + file="1:5.*" \ + gettext="0.19.*" \ + git="1:2.25.*" \ + gnupg="2.2.*" \ + google-perftools="2.*" \ + locales="2.31-*" \ + lsb-release="11.1.*" \ + openssh-client="1:8.2*" \ + patch="2.7.*" \ + rename="1.10-*" \ + software-properties-common="0.99.*" \ + unzip="6.0-*" \ + wget="1.20.*" \ + xz-utils="5.2.*" \ + zip="3.0-*" + if [[ -n ${INSTALL_LOCALE} ]]; then + printf "\nSetting locale to: %s\n" "${INSTALL_LOCALE}" + locale-gen "${INSTALL_LOCALE}" + update-locale LANG="${INSTALL_LOCALE}" + fi +} + +function install_clang() { + curl --silent --fail --show-error --location --remote-name https://apt.llvm.org/llvm.sh + chmod +x llvm.sh + ./llvm.sh ${CLANG_VER} + apt-get --quiet install -y --no-install-recommends libc++-${CLANG_VER}-dev + for prog in clang lldb lld ld.lld llvm-cov llvm-profdata; do + update-alternatives --install /usr/bin/${prog} ${prog} /usr/bin/${prog}-${CLANG_VER} 100 + done + rm -f llvm.sh + + clang --version + llvm-cov --version + llvm-profdata show --version +} + +# Install Docker (https://docs.docker.com/engine/install/debian/) +function install_docker() { + declare -r arch="$1" + apt-get --quiet remove docker docker.io containerd runc + mkdir -p /etc/apt/keyrings + declare -r dist=ubuntu + curl --silent --fail --show-error --location https://download.docker.com/linux/${dist}/gpg \ + | gpg --dearmor -o /etc/apt/keyrings/docker.gpg + declare lsb_release + lsb_release="$(lsb_release -cs)" + echo "deb [arch=${arch} signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/${dist} ${lsb_release} stable" \ + | tee /etc/apt/sources.list.d/docker.list + apt_update + apt-get --quiet install -y --no-install-recommends docker-ce docker-ce-cli containerd.io +} + +function cleanup() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +apt_update +install_misc +install_clang +install_golang "${BUILD_ARCH}" +install_docker "${BUILD_ARCH}" +install_python # should run after other install_* +cleanup diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_go.sh b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_golang_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_golang_apps new file mode 120000 index 00000000..acc9d5a3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/install_golang_apps @@ -0,0 +1 @@ +../install_golang_apps \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/test/commands.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/test/commands.yaml new file mode 100644 index 00000000..b3fc2beb --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/build-debian/test/commands.yaml @@ -0,0 +1,39 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: "curl version" + command: "curl" + args: ["--version"] + exitCode: 0 + + - name: "clang version" + command: "clang" + args: ["--version"] + exitCode: 0 + + - name: "bazel version" + command: "bazelisk" + args: ["version"] + exitCode: 0 + + - name: "docker image help" + command: "docker" + args: ["help", "image"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/Dockerfile new file mode 100644 index 00000000..c2f25551 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/Dockerfile @@ -0,0 +1,21 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:20.04 + +COPY install_apps /scripts/ + +RUN \ + /scripts/install_apps && \ + rm -rf /scripts diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/install_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/install_apps new file mode 100755 index 00000000..72fd822a --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/install_apps @@ -0,0 +1,54 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_misc() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + lcov="1.*" \ + google-perftools="2.*" +} + +function clean_debian() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +apt_update +install_misc +clean_debian diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/test/commands.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/test/commands.yaml new file mode 100644 index 00000000..bf962e40 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/coverage-tools/test/commands.yaml @@ -0,0 +1,39 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: "lcov version" + command: "lcov" + args: ["--version"] + exitCode: 0 + + - name: "clang version" + command: "clang" + args: ["--version"] + exitCode: 0 + + - name: "bazel version" + command: "bazelisk" + args: ["version"] + exitCode: 0 + + - name: "docker image help" + command: "docker" + args: ["help", "image"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/generate_system_bazelrc b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/generate_system_bazelrc new file mode 100755 index 00000000..9c8e419f --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/generate_system_bazelrc @@ -0,0 +1,83 @@ +#!/bin/bash + +# Generate a system bazelrc file +# Designed to be executed when generating a container image, for example from Dockerfile. +# For info on bazelrc files, refer to https://bazel.build/run/bazelrc?hl=en. + +set -o pipefail +set -o errexit + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --user-root-name Name of bazel user-root directory (within bazel cache dir) +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --user-root-name) + USER_ROOT_NAME="$2" + shift + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + break + ;; + esac +done + +if [[ -z ${USER_ROOT_NAME} ]]; then + printf -- "error: --user-root-name must be specified\n" &>/dev/stderr + usage 1 +fi + +function _get_toolchains_hash() { + { + # emit versions of all tools relevant to builds + uname --machine + cat /etc/os-release + clang --version + /opt/bin/python3 --version + } | sha256sum | cut --delimiter=" " --fields=1 +} + +TOOLCHAINS_HASH=$(_get_toolchains_hash) +readonly TOOLCHAINS_HASH + +readonly BAZELRC="/etc/bazel.bazelrc" +readonly BAZEL_ROOT=/bazel_root +readonly BAZEL_OUTPUT_USER_ROOT="${BAZEL_ROOT}/build_${USER_ROOT_NAME}_${TOOLCHAINS_HASH:0:7}" +mkdir -p "${BAZEL_OUTPUT_USER_ROOT}" + +# Within the running container, globally set these variables. This is less +# obvious than using Dockerfile's ENV, but this particular value isn't +# available to the Dockerfile +# Note: The /etc/profile.d/bazel_env.sh will often be sourced automatically +# but this may only occur for login shells. For bash, the --login +# flag should be supplied. Otherwise, the bazel_env.sh file can be +# sourced directly. +cat >/etc/profile.d/bazel_env.sh <> \$HOME/.bazelrc +EOF + +printf "generating %s\n" "${BAZELRC}" &>/dev/stderr + +# the output_user_root setting will have no effect as long as output_base is set. However, in the +# case that $HOME/.bazelrc is not created via sourcing /etc/profile.d/bazel_env.sh, at least +# the appropriate container-specific output_user_root directory will be used +cat <"${BAZELRC}" +startup --output_user_root="${BAZEL_OUTPUT_USER_ROOT}" +# set a variable based on the hash of all build tool dependencies other than bazel itself +# primarily to avoid spurious cache hits for distinct sets of toolchains +build --action_env=TOOLCHAINS_HASH=${TOOLCHAINS_HASH} +BAZELRC diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/get_workspace_mount b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/get_workspace_mount new file mode 100755 index 00000000..517b2ce3 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/get_workspace_mount @@ -0,0 +1,35 @@ +#!/bin/bash + +function get_docker_workspace_mount() { + # when running inside a docker container, expect /.dockerenv to exist + if ! [[ -f /.dockerenv ]]; then + printf "Error: %s must execute inside a docker container\n" "$(basename "${BASH_SOURCE[0]}")" &>/dev/stderr + exit 1 + fi + + # if running inside a docker container, the workspace mount point cannot be + # determined by git or bazel or inspecting the filesystem itself. Instead, we + # need to use docker to expose its mount info for the /src/workspace path. + # determine the current container's ID + local -r CONTAINER_ID="$(uname --nodename)" + # use docker inspect to extract the current mount path for /src/workspace + # this format string is a golang template (https://pkg.go.dev/text/template) processed + # by docker's --format flag, per https://docs.docker.com/config/formatting/ + # shellcheck disable=SC2016 + local -r FORMAT_STR=' + {{- range $v := .HostConfig.Binds -}} + {{$pathpair := split $v ":" -}} + {{if eq (index $pathpair 1) "/src/workspace" -}} + {{print (index $pathpair 0) -}} + {{end -}} + {{end -}} + ' + local -r MOUNT_PATH="$(docker inspect --format "${FORMAT_STR}" "${CONTAINER_ID}")" + if [[ -z ${MOUNT_PATH} ]]; then + printf "Error: Unable to determine mount point for /src/workspace. Exiting\n" &>/dev/stderr + exit 1 + fi + printf "%s" "${MOUNT_PATH}" +} + +get_docker_workspace_mount diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/gitconfig b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/gitconfig new file mode 100644 index 00000000..eaf257f4 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/gitconfig @@ -0,0 +1,2 @@ +[safe] + directory = /src/workspace diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/install_go.sh b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/install_go.sh new file mode 100644 index 00000000..a436d20a --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/install_go.sh @@ -0,0 +1,50 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# shell library to install and uninstall golang + +function _golang_install_dir() { + printf "/usr/local/go\n" +} + +function install_golang() { + declare -r _ARCH="$1" + declare -r FNAME=gobin.tar.gz + declare -r VERSION=1.20.4 + # shellcheck disable=SC2155 + declare -r GO_INSTALL_DIR="$(_golang_install_dir)" + declare -r -A GO_HASHES=( + [amd64]="698ef3243972a51ddb4028e4a1ac63dc6d60821bf18e59a807e051fee0a385bd" + [arm64]="105889992ee4b1d40c7c108555222ca70ae43fccb42e20fbf1eebb822f5e72c6" + ) + declare -r GO_HASH=${GO_HASHES[${_ARCH}]} + if [[ -z ${GO_HASH} ]]; then + printf "Unrecognized or unsupported architecture for golang: %s\n" "${_ARCH}" &>/dev/stderr + exit 1 + fi + + curl --silent -fSL --output ${FNAME} "https://go.dev/dl/go${VERSION}.linux-${_ARCH}.tar.gz" + echo "${GO_HASH} ${FNAME}" | sha256sum -c + tar --directory /usr/local -xzf ${FNAME} + rm -f ${FNAME} + update-alternatives --install /usr/bin/go go "${GO_INSTALL_DIR}"/bin/go 100 + + go version +} + +function remove_golang() { + update-alternatives --remove-all go + rm -rf "$(_golang_install_dir)" +} diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/install_golang_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/install_golang_apps new file mode 100755 index 00000000..4254e438 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/install_golang_apps @@ -0,0 +1,46 @@ +#!/bin/bash + +set -o pipefail +set -o errexit + +BAZEL_PATH=/usr/bin + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + set -o xtrace + shift + ;; + -h | --help) usage 0 ;; + *) + usage + break + ;; + esac +done + +function install_bazelisk() { + go install github.com/bazelbuild/bazelisk@v1.19.0 + BAZELISK="$(go env GOPATH)"/bin/bazelisk + if [[ -n ${BAZEL_PATH} ]] && [[ -d ${BAZEL_PATH} ]]; then + ln -s "${BAZELISK}" "${BAZEL_PATH}"/bazel + fi + # install the specified version of bazel + USE_BAZEL_VERSION="$(cat /scripts/.bazelversion)" + export USE_BAZEL_VERSION + "${BAZELISK}" version + rm -rf /bazel_root/* +} + +install_bazelisk diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/.pre-commit-config.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/.pre-commit-config.yaml new file mode 120000 index 00000000..9971a06a --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/.pre-commit-config.yaml @@ -0,0 +1 @@ +../../etc/.pre-commit-config.yaml \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/Dockerfile new file mode 100644 index 00000000..024c05fe --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM ubuntu:20.04 + +COPY install_apps install_go.sh .pre-commit-config.yaml /scripts/ +COPY gitconfig /etc + +ARG PRE_COMMIT_VENV_DIR=/usr/pre-commit-venv +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace \ + PRE_COMMIT_HOME=/var/cache/pre-commit \ + PRE_COMMIT_TOOL=${PRE_COMMIT_VENV_DIR}/bin/pre-commit \ + TZ=Etc/UTC + +RUN \ + chmod 644 /etc/gitconfig && \ + /usr/bin/env -v PRE_COMMIT_VENV_DIR=${PRE_COMMIT_VENV_DIR} /scripts/install_apps && \ + rm -rf /scripts + +ENV PATH="${PATH}:/usr/local/go/bin" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/gitconfig b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/gitconfig new file mode 120000 index 00000000..db62a00c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/gitconfig @@ -0,0 +1 @@ +../gitconfig \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/install_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/install_apps new file mode 100755 index 00000000..883ef650 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/install_apps @@ -0,0 +1,128 @@ +#!/bin/bash + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +declare -i VERBOSE=0 + +usage() { + local exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --verbose Emit verbose info. Default: false +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --verbose) + VERBOSE=1 + set -o xtrace + shift + ;; + -h | --help) usage 0 ;; + *) usage 0 ;; + esac +done + +SCRIPT_DIR="$(dirname "$(readlink -f "$0")")" +readonly SCRIPT_DIR + +function apt_update() { + apt-get --quiet -o 'Acquire::https::No-Cache=True' -o 'Acquire::http::No-Cache=True' update +} + +function install_packages() { + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y --no-install-recommends \ + apt-transport-https="2.0.*" \ + ca-certificates \ + libcurl4="7.68.*" \ + curl="7.68.*" \ + gnupg="2.2.*" \ + lsb-release="11.1.*" \ + openjdk-11-jre="11.0.*" \ + python3.9-venv="3.9.*" \ + shellcheck="0.7.*" \ + software-properties-common="0.99.*" \ + wget="1.20.*" + update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.9 100 +} + +# Install Docker (https://docs.docker.com/engine/install/debian/) +function install_docker() { + apt-get --quiet remove docker docker.io containerd runc + local -r KEYRING_DIR=/etc/apt/keyrings + mkdir -p "${KEYRING_DIR}" + local -r DIST=ubuntu + curl --silent --fail --show-error --location https://download.docker.com/linux/${DIST}/gpg \ + | /usr/bin/gpg --dearmor -o "${KEYRING_DIR}"/docker.gpg + local -r LSB_RELEASE="$(/usr/bin/lsb_release -cs)" + echo "deb [arch=${ARCH} signed-by=${KEYRING_DIR}/docker.gpg] https://download.docker.com/linux/${DIST} ${LSB_RELEASE} stable" \ + | tee /etc/apt/sources.list.d/docker.list + apt_update + DEBIAN_FRONTEND=noninteractive apt-get --quiet install -y docker-ce docker-ce-cli containerd.io +} + +function install_precommit() { + /usr/bin/python3.9 -m venv "${PRE_COMMIT_VENV_DIR}" + "${PRE_COMMIT_VENV_DIR}"/bin/pip install pre-commit~=3.1 + "${PRE_COMMIT_TOOL}" --version + + # initialize pre-commit cache, which needs a git repo (a temporary will suffice) + local -r GIT_REPO="$(mktemp -d)" + git init "${GIT_REPO}" + cd "${GIT_REPO}" + # run pre-commit with no files in scope to only trigger population of the cache + { + cat "${SCRIPT_DIR}"/.pre-commit-config.yaml + printf "\nfiles: ^$\n" + } > .pre-commit-config.yaml + git add .pre-commit-config.yaml + "${PRE_COMMIT_TOOL}" run --config .pre-commit-config.yaml || true + rm -rf "${GIT_REPO}" +} + +function cleanup() { + apt-get --quiet autoremove -y + apt-get autoclean + apt-get clean + rm -rf /var/lib/apt/lists + cd / + go clean -cache +} + +if [[ ${VERBOSE} -eq 1 ]]; then + printf "=== SHELL ENVIRONMENT ===\n" + env +fi + +ARCH=$(dpkg --print-architecture) +readonly ARCH + +apt_update +install_packages + +# shellcheck disable=SC1090 +. "${SCRIPT_DIR}"/install_go.sh +# golang is used to compile go-based hooks +install_golang "${ARCH}" +install_docker +install_precommit +cleanup diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/install_go.sh b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/install_go.sh new file mode 120000 index 00000000..211fd009 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/presubmit/install_go.sh @@ -0,0 +1 @@ +../install_go.sh \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/Dockerfile new file mode 100644 index 00000000..e7a05704 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/Dockerfile @@ -0,0 +1,33 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM golang:1.19.3-alpine3.16 AS golang-1.19 +FROM node:18.9.0-alpine3.16 + +COPY --from=golang-1.19 /usr/local/go/ /usr/local/go/ +ENV PATH="${PATH}:/usr/local/go/bin" + +COPY install_release_apps /scripts/ +COPY gitconfig /etc + +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + WORKSPACE=/src/workspace + +RUN \ + chmod 644 /etc/gitconfig && \ + /scripts/install_release_apps && \ + rm -rf /scripts + +ENTRYPOINT ["/bin/sh"] diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/gitconfig b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/gitconfig new file mode 100644 index 00000000..a72cd197 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/gitconfig @@ -0,0 +1,3 @@ +[user] + email = nobody@google.com + name = Privacy Sandbox Release System diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/install_release_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/install_release_apps new file mode 100755 index 00000000..e6c12253 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/install_release_apps @@ -0,0 +1,8 @@ +#!/bin/sh + +npm install --global commit-and-tag-version@10.1.0 + +# Install the GitHub CLI tool (https://cli.github.com/) +apk add github-cli + +GOBIN=/usr/local/go/bin go install github.com/bazelbuild/buildtools/buildozer@6.0.1 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/test/commands.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/test/commands.yaml new file mode 100644 index 00000000..62ac6631 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/test/commands.yaml @@ -0,0 +1,29 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +# command tests require the docker toolchain +commandTests: + - name: npm version + command: "npm" + args: ["--version"] + exitCode: 0 + + - name: commit-and-tag-version help + command: "commit-and-tag-version" + args: ["--help"] + exitCode: 0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/test/structure.yaml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/test/structure.yaml new file mode 100644 index 00000000..1d704640 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/release/test/structure.yaml @@ -0,0 +1,23 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# refer to docs at https://github.com/GoogleContainerTools/container-structure-test + +schemaVersion: 2.0.0 + +fileExistenceTests: + - name: git config + path: /etc/gitconfig + shouldExist: true + permissions: "-r--r--r--" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/Dockerfile new file mode 100644 index 00000000..45fab3cf --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/Dockerfile @@ -0,0 +1,47 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:3.18 as slowhttptest_builder +# hadolint ignore=DL3018 +RUN apk add --no-cache autoconf automake build-base git openssl-dev +WORKDIR /build +ADD https://github.com/shekyan/slowhttptest/archive/refs/tags/v1.9.0.tar.gz /build/src.tar.gz +RUN tar xz --strip-components 1 -f src.tar.gz && ./configure && make + +FROM alpine:3.18 as wrk_builder +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" +COPY build_wrk /build/ +WORKDIR /build +ADD https://github.com/giltene/wrk2/archive/44a94c17d8e6a0bac8559b53da76848e430cb7a7.tar.gz /build/src.tar.gz +RUN /build/build_wrk + +FROM golang:1.21-alpine3.18 AS golang +ENV GOBIN=/usr/local/go/bin +COPY build_golang_apps /scripts/ +RUN /scripts/build_golang_apps + +FROM fullstorydev/grpcurl:v1.8.9-alpine AS grpcurl + +FROM alpine:3.18 +COPY --from=golang /usr/local/go/bin/* /usr/local/bin/ +COPY --from=grpcurl /bin/grpcurl /usr/local/bin/ +ARG TARGETARCH +ENV BUILD_ARCH="${TARGETARCH}" \ + PATH="${PATH}:/usr/local/go/bin" \ + GOBIN=/usr/local/go/bin +COPY install_apps /scripts/ +RUN /scripts/install_apps +COPY --from=slowhttptest_builder /build/src/slowhttptest /usr/bin/ +COPY --from=wrk_builder /build/wrk /usr/bin/wrk2 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/build_golang_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/build_golang_apps new file mode 100755 index 00000000..b79e20c4 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/build_golang_apps @@ -0,0 +1,16 @@ +#!/bin/busybox sh + +set -o errexit + +install_ghz() { + go install github.com/bojand/ghz/cmd/ghz@v0.114.0 + ghz --help +} + +install_cassowary() { + go install github.com/rogerwelin/cassowary/cmd/cassowary@v0.16.0 + cassowary --help +} + +install_ghz +install_cassowary diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/build_wrk b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/build_wrk new file mode 100755 index 00000000..d843dd06 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/build_wrk @@ -0,0 +1,27 @@ +#!/bin/busybox sh + +set -o errexit +set -o xtrace + +install_no_wrk() { + cat </build/wrk +#!/bin/busybox sh +PROGRAM_NAME="\$(basename \$0)" +printf "Error: %s not supported on Aarch64\n" "\${PROGRAM_NAME}" +printf "build_arch: %s\n" "${BUILD_ARCH}" >/dev/stderr +exit 1 +EOF + chmod 755 /build/wrk +} + +install_wrk() { + apk add --no-cache build-base git openssl-dev zlib-dev + tar xz --strip-components 1 -f src.tar.gz + make -j6 +} + +if [[ ${BUILD_ARCH} == arm64 ]]; then + install_no_wrk +else + install_wrk +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/install_apps b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/install_apps new file mode 100755 index 00000000..597f937b --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/test-tools/install_apps @@ -0,0 +1,28 @@ +#!/bin/busybox sh + +set -o errexit + +install_packages() { + apk --no-cache add \ + bash~=5 \ + curl~=8 \ + jq~=1 \ + libstdc++ +} + +install_nghttp2() { + apk --no-cache add \ + nghttp2~=1 + h2load --version +} + +install_apache2_utils() { + apk --no-cache add \ + apache2-utils~=2.4 + ab -V +} + +apk --no-cache update +install_packages +install_nghttp2 +install_apache2_utils diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/utils/Dockerfile b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/utils/Dockerfile new file mode 100644 index 00000000..0d1defd6 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/images/utils/Dockerfile @@ -0,0 +1,19 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM alpine:3.16 + +RUN apk --no-cache add \ + unzip~=6.0 \ + zip~=3.0 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-amazonlinux2 b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-amazonlinux2 new file mode 100644 index 00000000..b9750ae4 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-amazonlinux2 @@ -0,0 +1 @@ +b35ee4e2b6210bc66e97083805642945433b0ef648a9d297a2a4ccdfb518413f diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-amazonlinux2023 b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-amazonlinux2023 new file mode 100644 index 00000000..7b708170 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-amazonlinux2023 @@ -0,0 +1 @@ +7501f9f920a794ba6fa7c6cabb17d076ed3db0faf3fe5393e2904ce71978dec5 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-debian b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-debian new file mode 100644 index 00000000..0e9a76c9 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/build-debian @@ -0,0 +1 @@ +a99a8af64d61e3eb635aecfb81d229437890693cd6f6d33bd269e9f1c55ca760 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/coverage-tools b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/coverage-tools new file mode 100644 index 00000000..f0336331 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/coverage-tools @@ -0,0 +1 @@ +cd3fb189dd23793af3bdfa02d6774ccb35bddbec7059761e25c4f7be4c1e8ca1 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/presubmit b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/presubmit new file mode 100644 index 00000000..ada65387 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/presubmit @@ -0,0 +1 @@ +79f2509c74607ab33bc5fe3f425f61ddf89e2f75efc299b1a5a7a3039367c90e diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/release b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/release new file mode 100644 index 00000000..e5218ba2 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/release @@ -0,0 +1 @@ +d60fb40a53b1704f7ac353d0d036f49eac10bfd08ccb19f9b436acf8bdf2cb79 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/test-tools b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/test-tools new file mode 100644 index 00000000..fc5e0b5c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/test-tools @@ -0,0 +1 @@ +dd1ec6137d4dd22fec555044cd85f484adfa6c7b686880ea5449cff936bad34e diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/utils b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/utils new file mode 100644 index 00000000..da29b2fa --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/data/hashes/utils @@ -0,0 +1 @@ +9fca27d931acc2bc96fa0560466cc0914a0d1cc73fb8749af057caacf2911f85 diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/run-tests b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/run-tests new file mode 100755 index 00000000..f98389a4 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tests/run-tests @@ -0,0 +1,264 @@ +#!/bin/bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Test runner for the build system. This script should preferably be run +# "directly" and not inside docker, permitting the test cases to execute +# both inside and outside a docker container. + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ -d ${TMP_HASHES_DIR1} ]]; then + rm -rf "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" + fi + if [[ ${STATUS} -ne 0 ]]; then + printf "Error: run-tests status code: %d\n" "${STATUS}" + sleep 5s + fi + exit ${STATUS} +} + +function get_image_list() { + local -r _images_dir="$1" + find "${_images_dir}" -maxdepth 1 -mindepth 1 -type d -printf "%P\n" \ + | sort +} + +function usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr < + --image Run tests only for specified image + --fast Only generate hashes directly rather than also using cbuild + --build-images Build the images + --verbose Produce verbose output +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +declare -i FAST=0 +declare -i VERBOSE=0 +declare -i BUILD_IMAGES=0 +declare IMAGE + +while [[ $# -gt 0 ]]; do + case "$1" in + --image) + IMAGE="$2" + if [[ -z ${IMAGE} ]]; then + usage + fi + shift 2 || usage + ;; + --fast) + FAST=1 + shift + ;; + --build-images) + BUILD_IMAGES=1 + shift + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + ;; + esac +done + +TESTS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TESTS_DIR +readonly TOOLS_DIR="${TESTS_DIR}"/../tools +readonly HASHES_DIR="${TESTS_DIR}"/data/hashes +readonly IMAGES_DIR="${TESTS_DIR}"/../images +if [[ -n ${IMAGE} ]]; then + IMAGE_LIST="${IMAGE}" +else + IMAGE_LIST="$(get_image_list "${IMAGES_DIR}")" +fi +readonly IMAGE_LIST +declare -i RETCODE=0 + +function cli_tests_test-tools() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != test-tools ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/curl --version" + "${TOOLS_DIR}/ghz --version" + "${TOOLS_DIR}/ab -V" + "${TOOLS_DIR}/cassowary --version" + "${TOOLS_DIR}/grpcurl -version" + "${TOOLS_DIR}/h2load --version" + "${TOOLS_DIR}/hadolint --version" + "${TOOLS_DIR}/jq --version" + ) + printf "Testing test CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_misc() { + declare -a -r TOOLS=( + "${TOOLS_DIR}/aws-cli help" + "${TOOLS_DIR}/awscurl --help" + ) + printf "Testing utils CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_utils() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != utils ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/unzip -h" + "${TOOLS_DIR}/zip -h" + ) + printf "Testing utils CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function cli_tests_release() { + declare -r _image="$1" + if [[ ${FAST} -eq 1 ]] || [[ ${_image} != release ]]; then + return + fi + declare -a -r TOOLS=( + "${TOOLS_DIR}/buildozer -version" + ) + printf "Testing release CLI tool wrappers\n" + for tool_cmd in "${TOOLS[@]}"; do + printf " %s\n" "${tool_cmd}" + ${tool_cmd} &>/dev/null + done +} + +function create_temp_hash_dir() { + local -r DIR="$(mktemp --directory)" + cp "${HASHES_DIR}"/* "${DIR}" + printf "%s" "${DIR}" +} + +# warning when running inside a docker container +if [[ -f /.dockerenv ]]; then + printf "warning: Executing within docker container, which obviates testing in a non-docker environment\n" &>/dev/stderr +fi + +declare -a GET_BUILDER_IMAGE_ARGS=( + --sha-only +) +if [[ ${BUILD_IMAGES} -eq 0 ]]; then + GET_BUILDER_IMAGE_ARGS+=(--no-build) +fi + +function generate_hashes_direct() { + TMP_HASHES_DIR1=$(create_temp_hash_dir) + printf "Generating image hashes (direct mode)\n" + + for img in ${IMAGE_LIST}; do + # shellcheck disable=SC2086 + if ! "${TOOLS_DIR}"/get-builder-image-tagged "${GET_BUILDER_IMAGE_ARGS[@]}" --image ${img} >"${TMP_HASHES_DIR1}/${img}"; then + printf "Error generating image hash: %s\n" "${img}" &>/dev/stderr + RETCODE+=1 + fi + done + + BASELINE_UPDATES="$(diff --brief "${HASHES_DIR}" "${TMP_HASHES_DIR1}" || true)" + readonly BASELINE_UPDATES + if [[ -n $BASELINE_UPDATES ]]; then + # shellcheck disable=SC2086 + printf "detected shift in baseline files:\n%s\n" "${BASELINE_UPDATES}" + if [[ ${VERBOSE} -eq 1 ]]; then + diff "${HASHES_DIR}" "${TMP_HASHES_DIR1}" || true + fi + cp --force "${TMP_HASHES_DIR1}"/* "${HASHES_DIR}" + RETCODE+=10 + else + printf "hashes unchanged\n" + fi +} + +function generate_hashes_cbuild() { + TMP_HASHES_DIR2=$(create_temp_hash_dir) + + printf "Generating image hashes (cbuild mode)\n" + for img in ${IMAGE_LIST}; do + if ! "${TOOLS_DIR}"/cbuild --image build-debian --cmd "tools/get-builder-image-tagged ${GET_BUILDER_IMAGE_ARGS[*]} --image ${img}" >"${TMP_HASHES_DIR2}/${img}"; then + printf "Error generating image hash: %s\n" "${img}" &>/dev/stderr + RETCODE+=1 + fi + done + + MODE_MISMATCH="$(diff --brief "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" || true)" + readonly MODE_MISMATCH + if [[ -n $MODE_MISMATCH ]]; then + # shellcheck disable=SC2086 + printf "Error: mismatch between direct and cbuild modes\n%s" "${MODE_MISMATCH}" &>/dev/stderr + if [[ ${VERBOSE} -eq 1 ]]; then + diff "${TMP_HASHES_DIR1}" "${TMP_HASHES_DIR2}" || true + fi + RETCODE+=100 + else + printf "hashes unchanged\n" + fi +} + +generate_hashes_direct + +if [[ ${FAST} -eq 1 ]]; then + exit ${RETCODE} +fi + +generate_hashes_cbuild + +# CLI tests + +cli_tests_misc +for img in ${IMAGE_LIST}; do + cli_tests_test-tools "${img}" + cli_tests_utils "${img}" + cli_tests_release "${img}" +done + +printf "test for error in direct execution of underlying wrapper scripts\n" +for tool in test-tool utils-tool; do + printf " %s: " "${tool}" + if ! "${TOOLS_DIR}"/${tool} &>/dev/null; then + printf "ok\n" + else + printf "failed to exit non-zero\n" + fi +done + +exit ${RETCODE} diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/ab b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/ab new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/ab @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/aws-cli b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/aws-cli new file mode 100755 index 00000000..0ded1392 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/aws-cli @@ -0,0 +1,70 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path +# EXTRA_DOCKER_RUN_ARGS additional arguments to pass to docker run invocations +# +# environment variables exported to AWS CLI +# For more info on supported env vars, see: +# https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" +) + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD + +# shellcheck disable=SC2068 +docker run \ + ${DOCKER_RUN_ARGS[@]} \ + --user "$(id -u):$(id -g)" \ + --volume "${HOME}"/.aws/:/home/.aws/ \ + --volume "${WORKSPACE}":/src/workspace \ + --workdir /src/workspace/"${REL_PWD}" \ + amazon/aws-cli:latest "$@" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/awscurl b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/awscurl new file mode 100755 index 00000000..355d58e1 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/awscurl @@ -0,0 +1,71 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path +# EXTRA_DOCKER_RUN_ARGS additional arguments to pass to docker run invocations +# +# environment variables exported to AWS CLI +# For more info on supported env vars, see: +# https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" + "PYTHONPATH=/" +) + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + --rm + --interactive + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + --tty + ) +fi + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD + +# shellcheck disable=SC2068 +docker run \ + ${DOCKER_RUN_ARGS[@]} \ + --user "$(id -u):$(id -g)" \ + --volume "${HOME}"/.aws/:/home/.aws/ \ + --volume "${WORKSPACE}":/src/workspace \ + --workdir /src/workspace/"${REL_PWD}" \ + ghcr.io/okigan/awscurl:latest "$@" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-amazonlinux2 b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-amazonlinux2 new file mode 120000 index 00000000..98d6d309 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-amazonlinux2 @@ -0,0 +1 @@ +bazel-debian \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-amazonlinux2023 b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-amazonlinux2023 new file mode 120000 index 00000000..98d6d309 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-amazonlinux2023 @@ -0,0 +1 @@ +bazel-debian \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-debian b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-debian new file mode 100755 index 00000000..843b3b6e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/bazel-debian @@ -0,0 +1,69 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables supported by cbuild (all optional): +# WORKSPACE Set the path to the workspace (repo root) +# AWS_ACCESS_KEY_ID AWS auth token +# AWS_SECRET_ACCESS_KEY AWS auth token +# BAZEL_STARTUP_ARGS Additional startup arguments to pass to bazel invocations +# BAZEL_EXTRA_ARGS Additional command arguments to pass to bazel invocations +# EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + +set -o pipefail +set -o errexit + +function partition_array() { + if [[ ${#@} -ne 3 ]]; then + printf "insufficient args\n" &>/dev/stderr + return 1 + fi + declare -n arr=$1 + declare -n pre=$2 + declare -n post=$3 + if [[ ${#arr[@]} -eq 0 ]]; then + return + fi + + declare -r delim="--" + declare target=pre + for i in "${!arr[@]}"; do + if [[ ${arr[$i]} == "${delim}" ]]; then + target=post + fi + if [[ ${target} == pre ]]; then + pre+=("${arr[$i]}") + else + post+=("${arr[$i]}") + fi + done +} + +SCRIPT_NAME="$(basename "${BASH_SOURCE[0]}")" +readonly SCRIPT_NAME +readonly IMAGE=build-"${SCRIPT_NAME/bazel-}" +CBUILD=$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")/cbuild +readonly CBUILD + +# partition the command-line args into "bazel args" and "app args", where the +# app args are all those after the presence of the "--" arg, if any +declare -a BAZEL_ARGS +declare -a APP_ARGS +# shellcheck disable=SC2034,SC2206 +declare -r -a ARGLIST=("$@") +partition_array ARGLIST BAZEL_ARGS APP_ARGS + +"${CBUILD}" --seccomp-unconfined --image "${IMAGE}" --cmd " +printf 'bazel output_base: [%s]\n' \"\$(bazel info output_base 2>/dev/null)\" +bazel ${BAZEL_STARTUP_ARGS} ${BAZEL_ARGS[*]@Q} ${BAZEL_EXTRA_ARGS} ${APP_ARGS[*]@Q} +" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/builder.sh b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/builder.sh new file mode 100644 index 00000000..77317aae --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/builder.sh @@ -0,0 +1,220 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# shell package enabling support for docker +# + +# require this script to be sourced rather than executed +if ! (return 0 2>/dev/null); then + printf "Error: Script %s must be sourced\n" "${BASH_SOURCE[0]}" &>/dev/stderr + exit 1 +fi + +####################################### +# Configure and export the WORKSPACE variable. Does not +# overwrite existing WORKSPACE value. +####################################### +function builder::set_workspace() { + export WORKSPACE + if [[ -v WORKSPACE ]]; then + return + fi + local -r GIT_TOPLEVEL="$(git rev-parse --show-superproject-working-tree)" + if [[ -n ${GIT_TOPLEVEL} ]]; then + WORKSPACE="${GIT_TOPLEVEL}" + else + WORKSPACE="$(git rev-parse --show-toplevel)" + fi + local -r ws_path="$(realpath "${WORKSPACE}"/WORKSPACE)" + WORKSPACE="$(dirname "${ws_path}")" +} + +####################################### +# Return the path to the build workspace, for use with +# the docker volume or mount argument +####################################### +function builder::get_docker_workspace_mount() { + if [[ -v WORKSPACE_MOUNT ]]; then + printf "%s" "${WORKSPACE_MOUNT}" + return + fi + # when running inside a docker container, expect /.dockerenv to exist + if ! [[ -f /.dockerenv ]]; then + printf "%s" "${WORKSPACE}" + return + fi + + # if running inside a docker container, the workspace mount point cannot be + # determined by git or bazel or inspecting the filesystem itself. Instead, we + # need to use docker to expose its mount info for the /src/workspace path. + # determine the current container's ID + local -r CONTAINER_ID="$(uname --nodename)" + # use docker inspect to extract the current mount path for /src/workspace + # this format string is a golang template (https://pkg.go.dev/text/template) processed + # by docker's --format flag, per https://docs.docker.com/config/formatting/ + # shellcheck disable=SC2016 + declare -r FORMAT_STR=' + {{- range $v := .HostConfig.Binds -}} + {{$pathpair := split $v ":" -}} + {{if eq (index $pathpair 1) "/src/workspace" -}} + {{print (index $pathpair 0) -}} + {{end -}} + {{end -}} + ' + local -r MOUNT_PATH="$(docker inspect --format "${FORMAT_STR}" "${CONTAINER_ID}")" + if [[ -z ${MOUNT_PATH} ]]; then + printf "Error: Unable to determine mount point for /src/workspace. Exiting\n" &>/dev/stderr + exit 1 + fi + printf "%s" "${MOUNT_PATH}" +} + +function builder::get_tools_dir() { + dirname "$(readlink -f "${BASH_SOURCE[0]}")" +} + +####################################### +# Invoke cbuild tool in a build-debian container +####################################### +function builder::cbuild_debian() { + local -r CBUILD="$(builder::get_tools_dir)"/cbuild + printf "=== cbuild debian action envs ===\n" + # shellcheck disable=SC2086 + "${CBUILD}" ${CBUILD_ARGS} --image build-debian --cmd "grep -o 'action_env.*' /etc/bazel.bazelrc 1>/dev/stderr 2>/dev/null" + # shellcheck disable=SC2086 + "${CBUILD}" ${CBUILD_ARGS} --image build-debian --cmd "$*" +} + +####################################### +# Add AWS env vars to a specified array +# Arguments: +# * the name of an array to which to append values +####################################### +function builder::add_aws_env_vars() { + declare -n args=$1 + args+=( + "AWS_ACCESS_KEY_ID" + "AWS_SECRET_ACCESS_KEY" + "AWS_SESSION_TOKEN" + "AWS_REGION" + "AWS_DEFAULT_REGION" + "AWS_PROFILE" + ) +} + +####################################### +# Build a Nitro EIF from a docker image tarfile +# Arguments: +# DOCKER_IMAGE_TAR Docker image tarfile +# DOCKER_IMAGE_URI Docker image uri corresponding to tarfile (without tag) +# DOCKER_IMAGE_TAG Docker image tag corresponding to tarfile +# OUTPUT_PATH Output path, must be within the workspace +# EIF_NAME Output base filename +# BUILDER_IMAGE_NAME (optional) Amazon Linux builder image name +####################################### +function builder::docker_img_to_nitro() { + local -r docker_image_tar="$1" + local -r docker_image_uri="$2" + local -r docker_image_tag="$3" + local -r output_path="$4" + local -r eif_name="$5" + local -r image="${6-build-amazonlinux2}" + local -r temp_tag="$(mktemp --dry-run temp-XXXXXX)" + docker load -i "${docker_image_tar}" + # add a temp tag to reduce the chance of conflicts or race conditions + docker tag "${docker_image_uri}:${docker_image_tag}" "${docker_image_uri}:${temp_tag}" + builder::docker_uri_to_nitro \ + "${docker_image_uri}:${temp_tag}" \ + "${output_path}/${eif_name}" \ + "${image}" + # remove the temp tag + docker image rm "${docker_image_uri}:${temp_tag}" +} + +####################################### +# Build a Nitro EIF from a docker image +# Arguments: +# DOCKER_URI (with tag) +# OUTPUT_BASE_FILENAME path and base filename +# BUILDER_IMAGE_NAME (optional) Amazon Linux builder image name +####################################### +function builder::docker_uri_to_nitro() { + local -r docker_uri="$1" + local -r output_base_fname="$2" + local -r image="${3-build-amazonlinux2}" + local -r output_eif="${output_base_fname}".eif + local -r output_json="${output_base_fname}".json + local -r output_pcr0_json="${output_base_fname}".pcr0.json + local -r output_pcr0_txt="${output_base_fname}".pcr0.txt + builder::cbuild_al "${image}" " +nitro-cli build-enclave --docker-uri ${docker_uri} --output-file ${output_eif@Q} >${output_json@Q} +jq --compact-output '{PCR0: .Measurements.PCR0}' ${output_json@Q} >${output_pcr0_json@Q} +jq --compact-output --raw-output '.Measurements.PCR0' ${output_json@Q} >${output_pcr0_txt@Q} +" +} + +####################################### +# Invoke cbuild tool in an amazonlinux build container +# Arguments: +# BUILDER_IMAGE_NAME Amazon Linux builder image name +####################################### +function builder::cbuild_al() { + local -r image="$1" + shift + local -r cbuild="$(builder::get_tools_dir)"/cbuild + declare -a env_vars + builder::add_aws_env_vars env_vars + declare env_args + for evar in "${env_vars[@]}"; do + env_args+=(--env "${evar}") + done + printf "=== cbuild %s action envs ===\n" "${image}" + # shellcheck disable=SC2086 + "${cbuild}" ${CBUILD_ARGS} "${env_args[@]}" --image "${image}" --cmd "grep -o 'action_env.*' /etc/bazel.bazelrc 1>/dev/stderr 2>/dev/null" + # shellcheck disable=SC2086 + "${cbuild}" ${CBUILD_ARGS} "${env_args[@]}" --image "${image}" --cmd "$*" +} + +function builder::cbuild_al2() { + builder::cbuild_al build-amazonlinux2 "$@" +} + +function builder::cbuild_al2023() { + builder::cbuild_al build-amazonlinux2023 "$@" +} + +####################################### +# Return the numeric id of the user or group +# Arguments: +# single character, either "u" for user or "g" for group +####################################### +function builder::id() { + declare -r mode="$1" + declare -i _id + _id=$(id "-${mode}") + if [[ ${_id} -ne 0 ]]; then + printf "%s" "${_id}" + else + # id is 0 (root), use the owner/group of the WORKSPACE file instead + stat -c "%${mode}" "${WORKSPACE}"/WORKSPACE + fi +} + +####################################### +# invoke functions to configure the build environment +####################################### + +builder::set_workspace diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/buildozer b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/buildozer new file mode 120000 index 00000000..78c3d50c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/buildozer @@ -0,0 +1 @@ +release-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/cassowary b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/cassowary new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/cassowary @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/cbuild b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/cbuild new file mode 100755 index 00000000..c40a3aed --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/cbuild @@ -0,0 +1,252 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +IMAGE="build-debian" +VERBOSE=0 +declare -r -a IMAGE_LIST=( + "build-debian" + "build-amazonlinux2" + "build-amazonlinux2023" + "presubmit" +) + +declare -a ENV_VARS +ENV_VARS+=( + "BAZEL_STARTUP_ARGS" + "BAZEL_EXTRA_ARGS" +) + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --cmd bash command string to execute within the docker container + --cmd-profiler enable profiler for the command + --image Image name for the build runtime. Valid names: +USAGE + + for elem in "${IMAGE_LIST[@]}" + do + if [[ ${elem} == "${IMAGE}" ]]; then + default_text=" (default)" + fi + printf " * %s%s\n" "${elem}" "${default_text}" &>/dev/stderr + default_text="" + done + + cat &>/dev/stderr << USAGE + --env [=] Name (or name=value) of exported environment variable, propagated into container + --without-shared-cache Containers will not mount ${HOME}/.cache/bazel + --without-embedded-docker Disable docker client within container + --docker-network Specify docker network type or name, value passed to docker run --network. Default: ${DOCKER_NETWORK} + --seccomp-unconfined Run docker container without a seccomp profile + --verbose Enable verbose output + +Environment variables (all optional): + WORKSPACE Full path to the workspace (repo root) + WORKSPACE_MOUNT Full path to the workspace on the host filesystem + EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + CBUILD_IMAGE docker URI to the specific image to run + DOCKER_NETWORK Specify docker network, equivalent of --docker-network flag. + +Environment variables propagated into container: + BAZEL_STARTUP_ARGS + BAZEL_EXTRA_ARGS +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +declare -i WITH_SHARED_CACHE=1 +declare -i WITH_DOCKER_SOCK=1 +declare -i WITH_CMD_PROFILER=0 +DOCKER_NETWORK="${DOCKER_NETWORK:-bridge}" +declare -i DOCKER_SECCOMP_UNCONFINED=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --cmd) + CMD="$2" + shift 2 || usage + ;; + --cmd-profiler) + WITH_CMD_PROFILER=1 + shift + ;; + --env) + ENV_VARS+=("$2") + shift 2 || usage + ;; + --image) + IMAGE="$2" + shift 2 || usage + ;; + --without-shared-cache) + WITH_SHARED_CACHE=0 + shift + ;; + --without-embedded-docker) + WITH_DOCKER_SOCK=0 + shift + ;; + --docker-network) + DOCKER_NETWORK="$2" + shift 2 || usage + ;; + --seccomp-unconfined) + DOCKER_SECCOMP_UNCONFINED=1 + shift + ;; + --verbose) + VERBOSE=1 + shift + ;; + -h | --help) usage 0 ;; + *) + printf "unrecognized arg: %s\n" "$1" + usage + ;; + esac +done + +if [[ -z ${IMAGE} ]]; then + printf -- "error: --image must be specified\n" &>/dev/stderr + usage 1 +fi +# shellcheck disable=SC2076 +if ! [[ " ${IMAGE_LIST[*]} " =~ " ${IMAGE} " ]]; then + printf -- "error: image [%s] not recognized\n" "${IMAGE}" &>/dev/stderr + usage 1 +fi + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh + +trap _cleanup EXIT +function _cleanup() { + local -r -i status=$? + "${TOOLS_DIR}"/normalize-bazel-symlinks &>/dev/null || true + exit ${status} +} + +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT +export WORKSPACE_MOUNT +if [[ ${VERBOSE} -eq 1 ]]; then + printf "mounting workspace into container: %s\n" "${WORKSPACE_MOUNT}" &>/dev/stderr +fi + +TOOLS_RELDIR="$(realpath "${TOOLS_DIR}" --relative-to="${PWD}")" +readonly TOOLS_RELDIR + +if [[ -n ${CBUILD_IMAGE} ]]; then + IMAGE_TAGGED=${CBUILD_IMAGE} +else + IMAGE_TAGGED=$("${TOOLS_DIR}"/get-builder-image-tagged --image "${IMAGE}") +fi +readonly IMAGE_TAGGED + +PWD_WORKSPACE_REL_PATH="$(realpath --relative-base="${WORKSPACE}" "${PWD}")" +readonly PWD_WORKSPACE_REL_PATH +WORKDIR=/src/workspace +if [[ ${PWD_WORKSPACE_REL_PATH:0:1} != / ]]; then + WORKDIR="/src/workspace/${PWD_WORKSPACE_REL_PATH}" +fi +readonly WORKDIR + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" + "--entrypoint=/bin/bash" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=${WORKDIR}" + "--network=${DOCKER_NETWORK}" + "$(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst)" +) + +if [[ ${DOCKER_SECCOMP_UNCONFINED} -eq 1 ]]; then + DOCKER_RUN_ARGS+=("--security-opt=seccomp=unconfined") +fi + +if [[ ${WITH_CMD_PROFILER} -eq 1 ]]; then + if [[ ${IMAGE} != build-debian ]]; then + printf "error: --cmd-profiler is only compatible with build-debian\n" &>/dev/stderr + usage 1 + fi + DOCKER_RUN_ARGS+=( + "--env=CMD_PROFILER=LD_PRELOAD=/usr/lib/x86_64-linux-gnu/libprofiler.so" + "--env=CPUPROFILE=benchmark.prof" + ) +fi + +# inside the docker build images, /bazel_root is the bazel cache dir, per the system-wide bazelrc +readonly BAZEL_ROOT=/bazel_root +if [[ ${WITH_SHARED_CACHE} -eq 0 ]]; then + # use tmpfs for as temporary, container-bound bazel cache + DOCKER_RUN_ARGS+=( + "--tmpfs ${BAZEL_ROOT}:exec" + ) +else + # mount host filesystem for "shared" use by multiple docker container invocations + DOCKER_RUN_ARGS+=( + "--volume ${HOME}/.cache/bazel:${BAZEL_ROOT}" + ) +fi +if [[ ${WITH_DOCKER_SOCK} -eq 1 ]]; then + DOCKER_RUN_ARGS+=( + "--volume /var/run/docker.sock:/var/run/docker.sock" + ) +fi +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + --interactive + --tty + ) +fi + +if [[ ${VERBOSE} -eq 1 ]]; then + set -o xtrace +fi +if [[ -z ${CMD} ]]; then + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login +elif [[ ${WITH_CMD_PROFILER} -eq 1 ]]; then + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login -c "'${TOOLS_RELDIR}'/normalize-bazel-symlinks; env \${CMD_PROFILER} ${CMD}" +else + # shellcheck disable=SC2068 + docker run \ + ${DOCKER_RUN_ARGS[@]} \ + "${IMAGE_TAGGED}" \ + --login -c "${CMD}" +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/collect-coverage b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/collect-coverage new file mode 100755 index 00000000..746185c2 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/collect-coverage @@ -0,0 +1,105 @@ +#!/usr/bin/env bash + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables: +# WORKSPACE Set the path to the workspace (repo root) + +set -o pipefail +set -o errexit + +declare LCOV_REPORT="${WORKSPACE}/bazel-out/_coverage/_coverage_report.dat" +declare COVERAGE_FILENAME=coverage.zip + +function usage() { + local exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 + --lcov_report path to lcov report relative to the WORKSPACE + --coverage_output_filename name of ZIP file that will contain artifacts from coverage collection +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --lcov_report) + LCOV_REPORT="${WORKSPACE}/$2" + shift 2 || usage + ;; + --coverage_output_filename) + COVERAGE_FILENAME="$2" + shift 2 || usage + if [[ ${COVERAGE_FILENAME##*.} != zip ]]; then + printf "error: --coverage_output_filename must be a ZIP file\n" &>/dev/stderr + exit 1 + fi + ;; + -h | --help) + usage 0 + ;; + *) + usage + ;; + esac +done + +trap _cleanup EXIT +function _cleanup() { + declare -r -i status=$? + if [[ ${status} -ne 0 ]]; then + printf "collect-coverage exit code: %d\n" ${status} &>/dev/stderr + sleep 5s + fi + exit ${status} +} + +function generate_coverage_report() { + local -r cov_dir="$(mktemp --tmpdir="${WORKSPACE}" --directory coverage-XXXX)" + trap 'rm -rf "${cov_dir}"' RETURN EXIT + cp "${LCOV_REPORT}" "${cov_dir}"/_coverage_report.dat + local -r dist_dir="${WORKSPACE}"/dist + cp "${LCOV_REPORT}" "${dist_dir}"/_coverage_report.dat + chmod -x {"${cov_dir}","${dist_dir}"}/_coverage_report.dat + + "${TOOLS_DIR}"/lcov --list dist/_coverage_report.dat >"${dist_dir}"/coverage_report.txt + "${TOOLS_DIR}"/lcov --summary dist/_coverage_report.dat + + local -r commit_sha=$(git rev-parse HEAD) + "${TOOLS_DIR}"/genhtml \ + --output-directory="$(basename "${cov_dir}")" \ + --title "coverage for commit ${commit_sha:0:7}" \ + --show-details \ + --legend \ + --quiet \ + dist/_coverage_report.dat + + if pushd "${cov_dir}" &>/dev/null; then + local -r zipfile=dist/"${COVERAGE_FILENAME}" + "${TOOLS_DIR}"/zip -q -r "../${zipfile}" ./* + printf "coverage archived to: %s\n" "${zipfile}" + popd &>/dev/null + fi +} + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh +TOOLS_DIR="$(builder::get_tools_dir)" +readonly TOOLS_DIR + +generate_coverage_report +"${TOOLS_DIR}"/normalize-dist diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/commit-and-tag-version b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/commit-and-tag-version new file mode 120000 index 00000000..78c3d50c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/commit-and-tag-version @@ -0,0 +1 @@ +release-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/convert-docker-to-nitro b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/convert-docker-to-nitro new file mode 100755 index 00000000..7586e039 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/convert-docker-to-nitro @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +trap cleanup EXIT +function cleanup() { + local -r -i status=$? + exit ${status} +} + +declare BUILDER_IMAGE_NAME=build-amazonlinux2 + +function usage() { + declare -r -i exitval=${1-1} + cat >&2 << USAGE +usage: + $0 + --docker-image-tar Docker image tarfile + --docker-image-uri Docker image uri (from tarfile) + --docker-image-tag Docker image tag (from tarfile) + --builder-image Amazon Linux builder image name. Default: ${BUILDER_IMAGE_NAME} + --outdir Output path for EIF files + --eif-name Base filename for EIF files + +environment variables (all optional): + WORKSPACE Set the path to the workspace (repo root) +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +while [[ $# -gt 0 ]]; do + case "$1" in + --docker-image-tar) + DOCKER_IMAGE_TAR="$2" + shift 2 || usage + ;; + --docker-image-uri) + DOCKER_IMAGE_URI="$2" + shift 2 || usage + ;; + --docker-image-tag) + DOCKER_IMAGE_TAG="$2" + shift 2 || usage + ;; + --outdir) + OUTPUT_DIR="$2" + shift 2 || usage + ;; + --eif-name) + EIF_NAME="$2" + shift 2 || usage + ;; + --builder-image) + BUILDER_IMAGE_NAME="$2" + shift 2 || usage + ;; + -h | --help) usage 0 ;; + *) usage ;; + esac +done + +: "${DOCKER_IMAGE_TAR?"--docker-image-tar not specified"}" +: "${DOCKER_IMAGE_URI?"--docker-image-uri not specified"}" +: "${DOCKER_IMAGE_TAG?"--docker-image-tag not specified"}" +: "${OUTPUT_DIR?"--outdir not specified"}" +: "${EIF_NAME?"--eif-name not specified"}" + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +WS_OUTPUT_RELDIR="$(realpath "${OUTPUT_DIR}" --relative-to="${WORKSPACE}")" +if [[ ${WS_OUTPUT_RELDIR} =~ ^\.\..*\/ ]]; then + printf "Output dir must be within the workspace directory tree\n" &>/dev/stderr + exit 1 +fi +if ! [[ -d ${WORKSPACE}/${WS_OUTPUT_RELDIR} ]]; then + printf "Output dir [%s] is not found\n" "${WORKSPACE}/${WS_OUTPUT_RELDIR}" &>/dev/stderr + exit 1 +fi + +readonly WS_OUTPUT_DIR=/src/workspace/"${WS_OUTPUT_RELDIR}" +printf "==== building AWS Nitro image using %s =====\n" "${BUILDER_IMAGE_NAME}" +builder::docker_img_to_nitro \ + "${DOCKER_IMAGE_TAR}" \ + "${DOCKER_IMAGE_URI}" \ + "${DOCKER_IMAGE_TAG}" \ + "${WS_OUTPUT_DIR}" \ + "${EIF_NAME}" \ + "${BUILDER_IMAGE_NAME}" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/coverage-tool b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/coverage-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/coverage-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/curl b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/curl new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/curl @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/genhtml b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/genhtml new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/genhtml @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/get-architecture b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/get-architecture new file mode 100755 index 00000000..a4415602 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/get-architecture @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Print the CPU architecture + +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ ${STATUS} -ne 0 ]]; then + printf "Error: get-architecture status code: %s\n" "${STATUS}" &>/dev/stderr + fi + exit ${STATUS} +} + +if [[ -n ${BUILD_ARCH} ]]; then + printf "%s\n" "${BUILD_ARCH}" +else + docker run --rm --entrypoint=/usr/bin/dpkg ubuntu:20.04 --print-architecture +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/get-builder-image-tagged b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/get-builder-image-tagged new file mode 100755 index 00000000..35371aea --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/get-builder-image-tagged @@ -0,0 +1,256 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Return an image:tag for the specified builder docker image +# Given the input files (Dockerfile, configs, installation scripts etc) for building the docker +# image, first generate a hash of the file contents (ie. exclude timestamps, file ownership etc). +# This hash will be used as the image tag + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + declare -r -i status=$? + if [[ -n ${TEMPTAR} ]]; then + rm -f "${TEMPTAR}" "${SHAFILE}" + fi + if [[ ${status} -ne 0 ]]; then + if [[ -s ${BUILD_OUTPUT} ]] && [[ ${VERBOSE} -eq 1 ]]; then + cat "${BUILD_OUTPUT}" + fi + printf "Error: get-builder-image-tagged status code: %d\n" ${status} &>/dev/stderr + printf "Docker build log: %s\n" "${BUILD_OUTPUT}" &>/dev/stderr + if [[ -v KOKORO_ARTIFACTS_DIR ]]; then + sleep 5s + fi + elif [[ -f ${BUILD_OUTPUT} ]] && [[ ${VERBOSE} -eq 0 ]]; then + rm -f "${BUILD_OUTPUT}" + fi + exit ${status} +} + +function make_temp() { + declare _suffix="$1" + mktemp --tmpdir="${WORKSPACE}" --dry-run "docker-buildx-${IMG}-XXXX" --suffix="${_suffix}" +} + +function get_image_list() { + declare -r _images_dir="$1" + find "${_images_dir}" -maxdepth 1 -mindepth 1 -type d -printf "%P\n" | sort +} + +function get_image_fullname() { + declare -r _img="$1" + declare -r _image_name=privacysandbox/builders/${_img} + if [[ -z ${_image_name} ]]; then + printf -- "error: image [%s] not recognized\n" "${_img}" &>/dev/stderr + return 1 + fi + printf "%s" "${_image_name}" +} + +function usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr < + --no-build Do not build image if it doesn't exist + --image Image name for the build runtime. Valid names: +USAGE + + for elem in $(get_image_list "${IMAGES_DIR}"); do + printf " * %s\n" "${elem}" &>/dev/stderr + done + + cat &>/dev/stderr </dev/stderr + usage 1 +fi + +if ! IMAGE_NAME="$(get_image_fullname "${IMG}")"; then + usage 1 +fi +readonly IMAGE_NAME + +IMAGE_PATH_FULL="${IMAGES_DIR}/${IMG}" +if ! [[ -s ${IMAGE_PATH_FULL}/Dockerfile ]]; then + printf "error: unable to locate [%s/Dockerfile]\n" "${IMAGE_PATH_FULL}" &>/dev/stderr + exit 1 +fi + +# create an image containing gnu tar +function generate_image() { + { + cat <"${BUILD_OUTPUT}" + rm -f "${BUILD_OUTPUT}" + if ! docker image inspect "${TAR_IMAGE}" &>/dev/null; then + printf "error creating docker image [%s]\n" "${TAR_IMAGE}" &>/dev/stderr + exit 1 + fi +} + +BUILD_OUTPUT="$(make_temp .log)" +readonly BUILD_OUTPUT +BUILDSYS_VERSION="$(<"${BUILDERS_DIR}"/version.txt)" +readonly BUILDSYS_VERSION +readonly TAR_IMAGE="builders/tar-get-builder-image-tagged:v${BUILDSYS_VERSION}" +TAR_IMAGE_HASH="$(docker image ls --filter "reference=${TAR_IMAGE}" --quiet)" +readonly TAR_IMAGE_HASH +if [[ -z ${TAR_IMAGE_HASH} ]]; then + generate_image +fi + +# Create a deterministic tar file for the specified file path, returning +# the SHA for the tar file content +# Any etc files in the $WORKSPACE (root) directory override the etc +# files in the image dir, as long as it's a file also found in the +# builders etc directory +# the TARFILE and SHAFILE args must be paths located in /tmp +function _tar_for_dir() { + local -r FILE_TAR="$1" + local -r FILE_SHA="$2" + local -r FILEPATH="$3" + local -r TMP_IMAGE_DIR="$(mktemp --tmpdir="${WORKSPACE}" --directory)" + # shellcheck disable=SC2064 + # expand TMP_IMAGE_DIR immediately + trap "rm -rf '${TMP_IMAGE_DIR}'" RETURN + + local -r WS_FILE_TAR="$(realpath "${FILE_TAR}" --relative-to="${WORKSPACE}")" + local -r WS_FILE_SHA="$(realpath "${FILE_SHA}" --relative-to="${WORKSPACE}")" + local -r WS_FILEPATH="$(realpath "${FILEPATH}" --relative-to="${WORKSPACE}")" + local -r WS_TMP_IMAGE_DIR="$(realpath "${TMP_IMAGE_DIR}" --relative-to="${WORKSPACE}")" + # find workspace etc files that are also in the image dir and the builders etc dir + local -r WORKSPACE_ETC_FILES="$({ + # shellcheck disable=SC2012 + ls -A -1 "${FILEPATH}" "${ETC_DIR}" | sort | uniq -d + ls -A -1 "${WORKSPACE}" + } | sort | uniq -d)" + # create a deterministic tarball of the collected files + docker run \ + --rm \ + --entrypoint /bin/sh \ + --volume "${WORKSPACE_MOUNT}":/workspace \ + --workdir /workspace \ + "${TAR_IMAGE}" -c " +tar --create --dereference --directory='${WS_FILEPATH}' --exclude=test . \ + | tar --extract --overwrite --directory='${WS_TMP_IMAGE_DIR}' + +# overwrite etc files in the image with the WORKSPACE's etc files +if [ -n '${WORKSPACE_ETC_FILES}' ]; then + tar --create --dereference --exclude=test ${WORKSPACE_ETC_FILES} \ + | tar --extract --overwrite --directory='${WS_TMP_IMAGE_DIR}' +fi + +tar --create --dereference --sort=name --owner=0 --group=0 --numeric-owner --format=gnu --directory='${WS_TMP_IMAGE_DIR}' --file='${WS_FILE_TAR}' . +{ + printf 'blob %d\0' \$(wc -c <'${WS_FILE_TAR}') + tar --extract --file='${WS_FILE_TAR}' --to-stdout +} \ + | sha256sum \ + | cut -f1 -d' ' \ + >'${WS_FILE_SHA}' +" +} + +TEMPTAR="$(make_temp .tar)" +readonly TEMPTAR +SHAFILE="$(make_temp .sha)" +readonly SHAFILE +# use the tarfile size and file content to generate a sha256 hash +_tar_for_dir "${TEMPTAR}" "${SHAFILE}" "${IMAGE_PATH_FULL}" +SHA="$(<"${SHAFILE}")" +readonly SHA +ARCH="$("${TOOLS_DIR}"/get-architecture)" +readonly ARCH +readonly IMAGE_TAG="${ARCH}-${SHA}" +readonly IMAGE_TAGGED="${IMAGE_NAME}:${IMAGE_TAG}" +# generate output +if [[ ${SHA_ONLY} -eq 0 ]]; then + printf "%s\n" "${IMAGE_TAGGED}" +else + printf "%s\n" "${SHA}" +fi + +if [[ ${BUILD_IMAGE_IF_NEEDED} -eq 1 ]]; then + # Create a builder docker image + # build container image and load it into the local docker client + if ! docker image inspect "${IMAGE_TAGGED}" &>/dev/null; then + printf "generating docker image %s\n" "${IMAGE_TAGGED}" &>/dev/stderr + docker buildx build "${DOCKER_BUILD_ARGS[@]}" --output=type=docker --tag "${IMAGE_TAGGED}" - <"${TEMPTAR}" &>"${BUILD_OUTPUT}" + fi +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/ghz b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/ghz new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/ghz @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/git-hooks/pre-commit b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/git-hooks/pre-commit new file mode 100755 index 00000000..21d56711 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/git-hooks/pre-commit @@ -0,0 +1,18 @@ +#!/bin/bash + +TOP_LEVEL_DIR="$(git rev-parse --show-toplevel)" +GIT_HOOKS_DIR="${TOP_LEVEL_DIR}/$(git rev-parse --git-dir)/hooks" +declare -a ARGS=( + hook-impl + "--hook-type=pre-commit" + --hook-dir "${GIT_HOOKS_DIR}" + -- + "$@" +) + +if command -v "${TOP_LEVEL_DIR}"/builders/tools/pre-commit > /dev/null; then + exec "${TOP_LEVEL_DIR}"/builders/tools/pre-commit "${ARGS[@]}" +else + printf "[builders/tools/pre-commit] not found" 2>&1 + exit 1 +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/google-pprof b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/google-pprof new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/google-pprof @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/grpcurl b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/grpcurl new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/grpcurl @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/h2load b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/h2load new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/h2load @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/hadolint b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/hadolint new file mode 100755 index 00000000..2ab1de00 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/hadolint @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +declare -a DOCKER_RUN_ARGS +DOCKER_RUN_ARGS+=( + "--rm" +) +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + --user="$(id -u):$(id -g)" \ + --volume="${WORKSPACE_MOUNT}":/src \ + --workdir=/src \ + ghcr.io/hadolint/hadolint:v2.12.0 \ + hadolint "$@" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/jq b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/jq new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/jq @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/lcov b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/lcov new file mode 120000 index 00000000..f475fb1e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/lcov @@ -0,0 +1 @@ +coverage-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/normalize-bazel-symlinks b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/normalize-bazel-symlinks new file mode 100755 index 00000000..8506ac96 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/normalize-bazel-symlinks @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +declare -r BAZEL_CACHE_DIR="${HOME}/.cache/bazel" + +function normalize_symlink() { + declare -r link_name="$1" + if readlink --canonicalize-existing "${link_name}" &>/dev/null ; then + printf "symlink %s resolves fully, skipping\n" "${link_name}" + return + fi + local -r link_path="$(readlink "${link_name}")" + local -r output_user_root="${link_path///bazel_root/}" + rm -f "${link_name}" + ln -s "$(realpath "${BAZEL_CACHE_DIR}/${output_user_root}")" "${link_name}" +} + +function normalize_symlink_docker() { + declare -r link_name="$1" + if readlink --canonicalize-existing "${link_name}" &>/dev/null ; then + printf "symlink %s resolves fully, skipping\n" "${link_name}" + return + fi + local -r link_path="$(readlink "${link_name}")" + local -r output_user_root="${link_path##*/.cache/bazel/}" + rm -f "${link_name}" + ln -s "$(realpath "/bazel_root/${output_user_root}")" "${link_name}" +} + +declare _normalize_fn=normalize_symlink +if [[ -f /.dockerenv ]]; then + _normalize_fn=normalize_symlink_docker +fi + +declare -a -r LINK_DIRS=( + bazel-bin + bazel-out + bazel-testlogs + bazel-workspace +) +for link in "${LINK_DIRS[@]}"; do + if [[ -L ${link} ]]; then + ${_normalize_fn} "${link}" + fi +done diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/normalize-dist b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/normalize-dist new file mode 100755 index 00000000..93627d25 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/normalize-dist @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables supported by cbuild (all optional): +# WORKSPACE Set the path to the workspace (repo root) +# EXTRA_DOCKER_RUN_ARGS Additional arguments to pass to docker run invocations + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + local -r -i STATUS=$? + if [[ ${STATUS} -eq 0 ]]; then + printf "normalize-dist completed successfully\n" &>/dev/stderr + else + printf "Error: normalize-dist completed with status code: %s\n" "${STATUS}" &>/dev/stderr + fi + exit 0 +} + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh +readonly IMAGE=build-debian +GROUP="$(builder::id g)" +readonly GROUP +USER="$(builder::id u)" +readonly USER + +readonly TOP_LEVEL_DIRS="dist" + +printf "Setting file ownership [%s], group [%s] in dirs [%s]\n" "${USER}" "${GROUP}" "${TOP_LEVEL_DIRS}" +declare -a runner=() +if [[ -f /.dockerenv ]]; then + runner+=(bash -c) +else + runner+=("${TOOLS_DIR}"/cbuild "--image" "${IMAGE}" "--cmd") +fi + +"${runner[@]}" " +for TOP_LEVEL_DIR in ${TOP_LEVEL_DIRS}; do + find \${TOP_LEVEL_DIR} -type f ! -executable -exec chmod 644 {} \; + find \${TOP_LEVEL_DIR} -type f -executable -exec chmod 755 {} \; + find \${TOP_LEVEL_DIR} -type d -exec chmod 755 {} \; + chgrp --recursive ${GROUP} \${TOP_LEVEL_DIR} + chown --recursive ${USER} \${TOP_LEVEL_DIR} +done +" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/pre-commit b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/pre-commit new file mode 100755 index 00000000..be8c5c7c --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/pre-commit @@ -0,0 +1,165 @@ +#!/bin/bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -o pipefail +set -o errexit + +TOOLS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly TOOLS_DIR +# shellcheck disable=SC1090 +source "${TOOLS_DIR}"/builder.sh + +trap __cleanup EXIT +function __cleanup() { + declare -r -i STATUS=$? + readonly LOGFILE=/var/cache/pre-commit/pre-commit.log + if [[ -s ${LOGFILE} ]]; then + printf "==== %s ====\n" ${LOGFILE} &>/dev/stderr + cat ${LOGFILE} &>/dev/stderr + printf "===========\n" &>/dev/stderr + fi + if [[ ${CLEANUP} -eq 0 ]]; then + exit ${STATUS} + fi + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --cmd $" +# change file ownership back to user +{ + git ls-files . --modified + git diff --staged --name-only +} | \ + sort -u | \ + xargs --replace={} /bin/bash -c '{ chown -f $(builder::id u) {}; chgrp -f $(builder::id g) {};}' +# clean up 'empty' node_modules dir created by prettier 2.x +if [[ -d node_modules ]]; then + rmdir node_modules/.cache/prettier/ node_modules/.cache/ node_modules/ +fi +" + exit ${STATUS} +} + +function __exit_msg() { + declare -r MSG="$1" + printf "%s. Exiting\n" "${MSG}" &>/dev/stderr + exit 1 +} + +function __install_git_hooks() { + git rev-parse --is-inside-work-tree >/dev/null || __exit_msg "Not in a git repository" + declare -r GIT_HOOKS_SRC_DIR="${TOOLS_DIR}/git-hooks" + declare -r GIT_HOOKS_DIR="${WORKSPACE}/.git/hooks" + chmod +x "${GIT_HOOKS_SRC_DIR}" + cp -p "${GIT_HOOKS_SRC_DIR}"/* "${GIT_HOOKS_DIR}" + printf "installed git hooks\n" +} + +function __usage() { + declare -r -i exitval=${1-1} + cat &>/dev/stderr << USAGE +usage: + $0 [command] + +A CLI tool to run pre-commit checks. + + if command not specified, run the pre-commit tool on all files in entire workspace + install install as a git pre-commit hook in the current git repo + autoupdate update hook versions in .pre-commit-config.yaml + +environment variables (all optional): + SKIP comma-delimited list of hook ids to skip + PRESUBMIT_IMAGE docker URI to the specific presubmit image to run + IMAGE_BUILD_VERBOSE capture docker build output if set +USAGE + # shellcheck disable=SC2086 + exit ${exitval} +} + +function __init() { + if [[ -n ${PRESUBMIT_IMAGE} ]]; then + IMAGE_TAGGED="${PRESUBMIT_IMAGE}" + else + IMAGE_TAGGED="$("${TOOLS_DIR}"/get-builder-image-tagged --image presubmit)" + fi + readonly IMAGE_TAGGED + export CBUILD_IMAGE="${IMAGE_TAGGED}" + + local -r ARCH="$("${TOOLS_DIR}"/get-architecture)" + if [[ ${ARCH} == arm64 ]]; then + if [[ -z ${SKIP} ]]; then + SKIP_HOOKS="terraform-fmt" + else + SKIP_HOOKS="${SKIP},terraform-fmt" + fi + else + SKIP_HOOKS="${SKIP}" + fi + if [[ -n ${SKIP_HOOKS} ]]; then + printf "Skipping pre-commit hooks: %s\n" "${SKIP_HOOKS}" + fi + SKIP_ENV="SKIP=${SKIP_HOOKS}" +} + +declare -i CLEANUP=0 +declare -a -r CBUILD_COMMON_ARGS=( + "--without-shared-cache" + "--image" + presubmit +) +declare -r PRECOMMIT=/usr/pre-commit-venv/bin/pre-commit + +# TODO: run bazel //:precommit-hooks rather than just the pre-commit tool +if [[ $# -gt 0 ]]; then + case "$1" in + install) __install_git_hooks ;; + + help | --help | -h) __usage 0 ;; + + autoupdate) + PRECOMMIT_CMD="$1" + shift + __init + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} ${PRECOMMIT_CMD} --config ./.pre-commit-config.yaml $*" + ;; + + hook-impl) + PRECOMMIT_CMD="$1" + CLEANUP=1 + shift + __init + docker run --rm \ + --entrypoint=/usr/pre-commit-venv/bin/pre-commit \ + --volume "${WORKSPACE}":/src/workspace \ + -v /var/run/docker.sock:/var/run/docker.sock \ + --env="${SKIP_ENV}" \ + --workdir /src/workspace \ + "${IMAGE_TAGGED}" \ + "${PRECOMMIT_CMD}" --config ./.pre-commit-config.yaml "$@" + ;; + + *) + __init + CLEANUP=1 + for HOOK in "$@"; do + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} run --config ./.pre-commit-config.yaml --all-files ${HOOK}" + done + esac +else + __init + CLEANUP=1 + # shellcheck disable=SC2086 + "${TOOLS_DIR}"/cbuild "${CBUILD_COMMON_ARGS[@]}" --env "${SKIP_ENV}" --cmd "${PRECOMMIT} run --config ./.pre-commit-config.yaml --all-files" +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/release-tool b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/release-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/release-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/slowhttptest b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/slowhttptest new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/slowhttptest @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/terraform b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/terraform new file mode 100755 index 00000000..71a3e708 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/terraform @@ -0,0 +1,85 @@ +#!/usr/bin/env bash +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# TERRAFORM_VERSION specify the terraform version to use +# WORKSPACE repo root directory, must be an absolute path +# +# AWS-related environment variables exported into the terraform container: +# For more info on supported env vars, see: +# https://registry.terraform.io/providers/hashicorp/aws/latest/docs#environment-variables +# +# AWS_ACCESS_KEY_ID +# AWS_SECRET_ACCESS_KEY +# AWS_SESSION_TOKEN +# AWS_REGION +# AWS_DEFAULT_REGION +# AWS_PROFILE + +set -o errexit + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh + +if ! [[ -v TERRAFORM_VERSION ]]; then + TERRAFORM_VERSION=1.0.4 +fi + +declare -a ENV_VARS +builder::add_aws_env_vars ENV_VARS +ENV_VARS+=( + "HOME=/home" +) + +declare -a DOCKER_RUN_ARGS=( + "--rm" +) +for evar in "${ENV_VARS[@]}" +do + DOCKER_RUN_ARGS+=( + "--env=${evar}" + ) +done +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=( + "--interactive" + "--tty" + ) +fi + +readonly IMAGE_TAGGED=hashicorp/terraform:"${TERRAFORM_VERSION}" +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +if [[ -d ${HOME}/.aws ]]; then + DOCKER_RUN_ARGS+=("--volume=${HOME}/.aws/:/home/.aws/") +fi +if [[ -d ${HOME}/.config/gcloud ]]; then + DOCKER_RUN_ARGS+=("--volume=${HOME}/.config/gcloud/:/home/.config/gcloud/") +fi + +DOCKER_RUN_ARGS+=( + "--user=$(id -u):$(id -g)" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=/src/workspace/${REL_PWD}" +) + +# shellcheck disable=SC2068 +docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + ${IMAGE_TAGGED} "$@" diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/test-tool b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/test-tool new file mode 100755 index 00000000..6a275370 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/test-tool @@ -0,0 +1,90 @@ +#!/usr/bin/env bash + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE repo root directory, must be an absolute path + +set -o errexit + +APP="$(basename "${BASH_SOURCE[0]}")" +readonly APP +APP_LINK_TARGET=$(readlink "${BASH_SOURCE[0]}") || true +readonly APP_LINK_TARGET +if [[ -z ${APP_LINK_TARGET} ]] || [[ ${APP_LINK_TARGET} == test-tool && ${APP} =~ ^(release|utils|coverage)-tool$ ]]; then + printf "%s designed for use via symlink with target program name\n" "${APP}" >/dev/stderr + exit 1 +fi + +declare -i APP_AS_ENTRYPOINT=0 + +case "${APP_LINK_TARGET}" in + release-tool) + IMAGE="release" + APP_AS_ENTRYPOINT=1 + ;; + coverage-tool) + IMAGE="coverage-tools" ;; + test-tool) + IMAGE="test-tools" ;; + utils-tool) + IMAGE="utils" ;; + *) + printf "Unsupported image target: %s\n" "${APP_LINK_TARGET}" >/dev/stderr + exit 1 +esac +readonly IMAGE + +# shellcheck disable=SC1090 +source "$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")"/builder.sh +TOOLS_DIR="$(builder::get_tools_dir)" +readonly TOOLS_DIR +IMAGE_TAGGED="$("${TOOLS_DIR}"/get-builder-image-tagged --image "${IMAGE}")" +readonly IMAGE_TAGGED + +REL_PWD="$(realpath --relative-to="${WORKSPACE}" "$(pwd)")" +readonly REL_PWD +WORKSPACE_MOUNT="$(builder::get_docker_workspace_mount)" +readonly WORKSPACE_MOUNT + +declare -a DOCKER_RUN_ARGS=( + "--rm" + "--interactive" + "--user=$(id -u):$(id -g)" + "--volume=${WORKSPACE_MOUNT}:/src/workspace" + "--workdir=/src/workspace/${REL_PWD}" +) +if [[ -n ${EXTRA_DOCKER_RUN_ARGS} ]]; then + # shellcheck disable=SC2207 + DOCKER_RUN_ARGS+=( + $(echo "${EXTRA_DOCKER_RUN_ARGS}" | envsubst) + ) +fi +if [[ -t 0 ]] && [[ -t 1 ]]; then + # stdin and stdout are open, assume it's an interactive tty session + DOCKER_RUN_ARGS+=("--tty") +fi +if [[ ${APP_AS_ENTRYPOINT} -eq 1 ]]; then + # shellcheck disable=SC2068 + docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + --entrypoint="${APP}" \ + "${IMAGE_TAGGED}" \ + "$@" +else + # shellcheck disable=SC2068 + docker run \ + "${DOCKER_RUN_ARGS[@]}" \ + "${IMAGE_TAGGED}" \ + "${APP}" "$@" +fi diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/unzip b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/unzip new file mode 120000 index 00000000..33a2cc95 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/unzip @@ -0,0 +1 @@ +utils-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/utils-tool b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/utils-tool new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/utils-tool @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/wrk2 b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/wrk2 new file mode 120000 index 00000000..d609abda --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/wrk2 @@ -0,0 +1 @@ +test-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/zip b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/zip new file mode 120000 index 00000000..33a2cc95 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/tools/zip @@ -0,0 +1 @@ +utils-tool \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/version.txt b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/version.txt new file mode 100644 index 00000000..b9daa0c1 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/builders/version.txt @@ -0,0 +1 @@ +0.55.1 \ No newline at end of file diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/ram_file_system_test.cc b/services/inference_sidecar/modules/tensorflow_v2_14_0/ram_file_system_test.cc new file mode 100644 index 00000000..43e486d1 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/ram_file_system_test.cc @@ -0,0 +1,89 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include "absl/status/status.h" +#include "absl/strings/str_cat.h" +#include "absl/strings/string_view.h" +#include "gtest/gtest.h" +#include "tensorflow/cc/saved_model/loader.h" +#include "tensorflow/cc/saved_model/tag_constants.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/public/session.h" +#include "tensorflow/core/public/session_options.h" +#include "tensorflow/tsl/platform/env.h" +#include "tensorflow/tsl/platform/file_system.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +constexpr absl::string_view kModelPath = "test_model"; +constexpr absl::string_view kRamFileSystemModelPath = "ram://test_model"; +constexpr absl::string_view kModelFiles[] = { + "saved_model.pb", + "variables/variables.index", + "variables/variables.data-00000-of-00001", +}; + +TEST(RamFileSystemTest, LoadModelFromLocalDisk_Success) { + tensorflow::SessionOptions session_options; + const std::unordered_set tags = {"serve"}; + + auto model_bundle = std::make_unique(); + auto status = tensorflow::LoadSavedModel( + session_options, {}, std::string(kModelPath), tags, model_bundle.get()); + EXPECT_TRUE(status.ok()); +} + +TEST(RamFileSystemTest, LoadModelFromMemory_NotFound) { + tensorflow::SessionOptions session_options; + const std::unordered_set tags = {"serve"}; + + auto model_bundle = std::make_unique(); + auto status = tensorflow::LoadSavedModel(session_options, {}, + std::string(kRamFileSystemModelPath), + tags, model_bundle.get()); + EXPECT_FALSE(status.ok()); + EXPECT_EQ(status.code(), absl::StatusCode::kNotFound); +} + +void CopyModelToRamFileSystem() { + for (const auto file : kModelFiles) { + const std::string local_file = absl::StrCat(kModelPath, "/", file); + const std::string memory_file = + absl::StrCat(kRamFileSystemModelPath, "/", file); + ASSERT_TRUE(tsl::Env::Default()->CopyFile(local_file, memory_file).ok()); + } +} + +TEST(RamFileSystemTest, LoadModelFromMemory_Success) { + CopyModelToRamFileSystem(); + + tensorflow::SessionOptions session_options; + const std::unordered_set tags = {"serve"}; + + auto model_bundle = std::make_unique(); + auto status = tensorflow::LoadSavedModel(session_options, {}, + std::string(kRamFileSystemModelPath), + tags, model_bundle.get()); + EXPECT_TRUE(status.ok()); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow.cc b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow.cc new file mode 100644 index 00000000..c5c4fac6 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow.cc @@ -0,0 +1,210 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/base/thread_annotations.h" +#include "absl/container/flat_hash_map.h" +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/numbers.h" +#include "absl/strings/str_cat.h" +#include "absl/synchronization/mutex.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "src/util/status_macro/status_macros.h" +#include "tensorflow/cc/client/client_session.h" +#include "tensorflow/cc/saved_model/loader.h" +#include "tensorflow/cc/saved_model/tag_constants.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/public/session.h" +#include "tensorflow/tsl/platform/env.h" +#include "tensorflow/tsl/platform/file_system.h" +#include "utils/request_parser.h" + +#include "tensorflow_parser.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +constexpr absl::string_view kRamFileSystemScheme = "ram://"; + +// TODO(b/316960066): Delete all files in RamFileSystem after model loading. +// TODO(b/316960066): Move the code to a separate file with more unit tests. +absl::Status SaveToRamFileSystem(const RegisterModelRequest& request) { + for (const auto& pair : request.model_files()) { + const std::string& path = absl::StrCat(kRamFileSystemScheme, pair.first); + const std::string& bytes = pair.second; + PS_RETURN_IF_ERROR( + tsl::WriteStringToFile(tsl::Env::Default(), path, bytes)); + } + return absl::OkStatus(); +} + +absl::StatusOr>> +PredictPerModel(const tensorflow::SavedModelBundle* model, + const InferenceRequest& inference_request) { + std::vector> inputs; + for (const auto& tensor : inference_request.inputs) { + if (tensor.tensor_name.empty()) { + return absl::InvalidArgumentError( + "Name is required for each TensorFlow tensor input"); + } + auto tf_tensor = ConvertFlatArrayToTensor(tensor); + if (!tf_tensor.ok()) { + return absl::InvalidArgumentError(tf_tensor.status().message()); + } + inputs.emplace_back(tensor.tensor_name, *tf_tensor); + } + + absl::string_view model_key = inference_request.model_path; + const auto& signature_map = model->meta_graph_def.signature_def(); + if (signature_map.find("serving_default") == signature_map.end()) { + return absl::InvalidArgumentError(absl::StrCat( + "The 'serving_default' signature was not found for model '", + model_key)); + } + + const auto& signature = signature_map.at("serving_default"); + std::vector output_names; + for (const auto& output : signature.outputs()) { + output_names.push_back(output.second.name()); + } + + std::vector outputs; + tensorflow::Status status = + model->session->Run(inputs, output_names, {}, &outputs); + if (!status.ok()) { + return absl::InternalError(absl::StrCat( + "Inference failed for model '", model_key, "': ", status.ToString())); + } + + return std::make_pair(std::string(model_key), outputs); +} + +class TensorflowModule final : public ModuleInterface { + public: + absl::StatusOr Predict( + const PredictRequest& request) override ABSL_LOCKS_EXCLUDED(mu_); + absl::StatusOr RegisterModel( + const RegisterModelRequest& request) override ABSL_LOCKS_EXCLUDED(mu_); + + private: + // Maps each `model_path` from an inference request to its corresponding + // tensorflow::SavedModelBundle instance. + absl::flat_hash_map> + model_map_ ABSL_GUARDED_BY(mu_); + // TODO(b/327907675) : Add a test for concurrency + absl::Mutex mu_; +}; + +absl::StatusOr TensorflowModule::Predict( + const PredictRequest& request) { + absl::ReaderMutexLock lock(&mu_); + + absl::StatusOr> parsed_requests = + ParseJsonInferenceRequest(request.input()); + if (!parsed_requests.ok()) { + return absl::InvalidArgumentError(parsed_requests.status().message()); + } + + std::vector>>>> + tasks; + + for (const InferenceRequest& inference_request : *parsed_requests) { + absl::string_view model_key = inference_request.model_path; + auto it = model_map_.find(model_key); + if (it == model_map_.end()) { + return absl::NotFoundError( + absl::StrCat("Requested model '", model_key, "' is not registered")); + } + tasks.push_back(std::async(std::launch::async, &PredictPerModel, + it->second.get(), inference_request)); + } + + std::vector>> + batch_outputs; + for (size_t task_id = 0; task_id < tasks.size(); ++task_id) { + auto result_status_or = tasks[task_id].get(); + if (!result_status_or.ok()) { + const auto& model_key = (*parsed_requests)[task_id].model_path; + return absl::Status(result_status_or.status().code(), + absl::StrCat("Error during inference for model '", + model_key, "', Task ID: ", task_id, ". ", + result_status_or.status().message())); + } + + batch_outputs.push_back(result_status_or.value()); + } + + auto output_json = ConvertTensorsToJson(batch_outputs); + if (!output_json.ok()) { + return absl::InternalError("Error during output parsing to json"); + } + + PredictResponse predict_response; + predict_response.set_output(output_json.value()); + return predict_response; +} + +absl::StatusOr TensorflowModule::RegisterModel( + const RegisterModelRequest& request) { + tensorflow::SessionOptions session_options; + const std::unordered_set tags = {"serve"}; + const auto& model_path = request.model_spec().model_path(); + + if (model_path.empty()) { + return absl::InvalidArgumentError("Model path is empty"); + } + + absl::WriterMutexLock lock(&mu_); + auto it = model_map_.find(model_path); + if (it != model_map_.end()) { + return absl::AlreadyExistsError( + absl::StrCat("Model '", model_path, "' already registered")); + } + + PS_RETURN_IF_ERROR(SaveToRamFileSystem(request)); + + auto model_bundle = std::make_unique(); + auto status = tensorflow::LoadSavedModel( + session_options, {}, absl::StrCat(kRamFileSystemScheme, model_path), tags, + model_bundle.get()); + + if (!status.ok()) { + return absl::InternalError( + absl::StrCat("Error loading model: ", model_path)); + } + model_map_[model_path] = std::move(model_bundle); + return RegisterModelResponse(); +} + +} // namespace + +std::unique_ptr ModuleInterface::Create() { + return std::make_unique(); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser.cc b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser.cc new file mode 100644 index 00000000..66b1f59f --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser.cc @@ -0,0 +1,273 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "tensorflow_parser.h" + +#include +#include +#include +#include + +#include + +#include "absl/strings/str_format.h" +#include "rapidjson/document.h" +#include "src/util/status_macro/status_macros.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/tensor_shape.h" +#include "utils/request_parser.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// TODO(b/331363417): Share common code across TF and PyTorch. +template +absl::StatusOr Convert(const std::string& str); + +template <> +absl::StatusOr Convert(const std::string& str) { + float result; + if (absl::SimpleAtof(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in float conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + double result; + if (absl::SimpleAtod(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in double conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int result; + if (absl::SimpleAtoi(str, &result)) { + if (result < std::numeric_limits::min() || + result > std::numeric_limits::max()) { + return absl::FailedPreconditionError( + "The number is outside of bounds of int8_t."); + } + return result; // Implicit conversion to int8_t. + } else { + return absl::FailedPreconditionError("Error in int8 conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int result; + if (absl::SimpleAtoi(str, &result)) { + if (result < std::numeric_limits::min() || + result > std::numeric_limits::max()) { + return absl::FailedPreconditionError( + "The number is outside of bounds of int16_t."); + } + return result; // Implicit conversion to int16_t. + } else { + return absl::FailedPreconditionError("Error in int16 conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int result; + if (absl::SimpleAtoi(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in int32 conversion"); + } +} + +template <> +absl::StatusOr Convert(const std::string& str) { + int64_t result; + if (absl::SimpleAtoi(str, &result)) { + return result; + } else { + return absl::FailedPreconditionError("Error in int64 conversion"); + } +} + +template +absl::StatusOr ConvertFlatArrayToTensorInternal( + const Tensor& tensor) { + tensorflow::TensorShape shape; + for (int dim : tensor.tensor_shape) { + shape.AddDim(dim); + } + tensorflow::Tensor tf_tensor(tensorflow::DataTypeToEnum::v(), shape); + + std::vector data_array; + for (const std::string& str : tensor.tensor_content) { + PS_ASSIGN_OR_RETURN(T result, Convert(str)); + data_array.push_back(result); + } + auto tensor_data = tf_tensor.flat().data(); + // Copy data from a vector to tensor's data. + std::copy(data_array.begin(), data_array.end(), tensor_data); + + return tf_tensor; +} + +absl::StatusOr ConvertFlatArrayToTensor( + const Tensor& tensor) { + switch (tensor.data_type) { + case DataType::kFloat: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kDouble: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt8: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt16: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt32: { + return ConvertFlatArrayToTensorInternal(tensor); + } + case DataType::kInt64: { + return ConvertFlatArrayToTensorInternal(tensor); + } + default: + return absl::InvalidArgumentError( + absl::StrFormat("Unsupported data type %d", tensor.data_type)); + } +} + +// Converts rapidjson::Value& to a string +absl::StatusOr SerializeJsonDoc(const rapidjson::Value& document) { + rapidjson::StringBuffer string_buffer; + rapidjson::Writer writer(string_buffer); + if (document.Accept(writer)) { + return std::string(string_buffer.GetString()); + } + return absl::InternalError("Error converting inner Json to String."); +} + +// Converts a single tensor to json. +absl::StatusOr TensorToJsonValue( + const tensorflow::Tensor& tensor, + rapidjson::MemoryPoolAllocator<>& allocator) { + rapidjson::Value json_tensor(rapidjson::kObjectType); + + tensorflow::TensorShape tensor_shape = tensor.shape(); + rapidjson::Value tensor_shape_json(rapidjson::kArrayType); + for (int i = 0; i < tensor_shape.dims(); ++i) { + tensor_shape_json.PushBack(tensor_shape.dim_size(i), allocator); + } + json_tensor.AddMember("tensor_shape", tensor_shape_json.Move(), allocator); + + rapidjson::Value tensor_content(rapidjson::kArrayType); + const auto data_type = tensor.dtype(); + switch (data_type) { + case tensorflow::DataType::DT_FLOAT: { + json_tensor.AddMember("data_type", "FLOAT", allocator); + auto tensor_data = tensor.flat(); + for (int i = 0; i < tensor_data.size(); ++i) { + tensor_content.PushBack(tensor_data(i), allocator); + } + break; + } + case tensorflow::DataType::DT_DOUBLE: { + json_tensor.AddMember("data_type", "DOUBLE", allocator); + auto tensor_data = tensor.flat(); + for (int i = 0; i < tensor_data.size(); ++i) { + tensor_content.PushBack(tensor_data(i), allocator); + } + break; + } + case tensorflow::DataType::DT_INT8: { + json_tensor.AddMember("data_type", "INT8", allocator); + auto tensor_data = tensor.flat(); + for (int i = 0; i < tensor_data.size(); ++i) { + tensor_content.PushBack(tensor_data(i), allocator); + } + break; + } + case tensorflow::DataType::DT_INT16: { + json_tensor.AddMember("data_type", "INT16", allocator); + auto tensor_data = tensor.flat(); + for (int i = 0; i < tensor_data.size(); ++i) { + tensor_content.PushBack(tensor_data(i), allocator); + } + break; + } + case tensorflow::DataType::DT_INT32: { + json_tensor.AddMember("data_type", "INT32", allocator); + auto tensor_data = tensor.flat(); + for (int i = 0; i < tensor_data.size(); ++i) { + tensor_content.PushBack(tensor_data(i), allocator); + } + break; + } + case tensorflow::DataType::DT_INT64: { + json_tensor.AddMember("data_type", "INT64", allocator); + auto tensor_data = tensor.flat(); + for (int i = 0; i < tensor_data.size(); ++i) { + tensor_content.PushBack(tensor_data(i), allocator); + } + break; + } + default: { + return absl::InvalidArgumentError( + absl::StrFormat("Unsupported data type %d", data_type)); + break; + } + } + json_tensor.AddMember("tensor_content", tensor_content.Move(), allocator); + return json_tensor; +} + +absl::StatusOr ConvertTensorsToJson( + const std::vector>>& + batch_outputs) { + rapidjson::Document document; + document.SetObject(); + rapidjson::MemoryPoolAllocator<>& allocator = document.GetAllocator(); + + rapidjson::Value batch(rapidjson::kArrayType); + for (const auto& pair : batch_outputs) { + const std::string model_path = pair.first; + const std::vector tensors = pair.second; + + rapidjson::Value nested_object(rapidjson::kObjectType); + rapidjson::Value model_path_value; + model_path_value.SetString(model_path.c_str(), allocator); + nested_object.AddMember("model_path", model_path_value, allocator); + + rapidjson::Value tensors_value(rapidjson::kArrayType); + for (const auto& tensor : tensors) { + absl::StatusOr json = + TensorToJsonValue(tensor, allocator); + if (json.ok()) { + tensors_value.PushBack(json.value(), allocator); + } else { + return json.status(); + } + } + nested_object.AddMember("tensors", tensors_value.Move(), allocator); + batch.PushBack(nested_object.Move(), allocator); + } + document.AddMember("response", batch.Move(), allocator); + + return SerializeJsonDoc(document); +} + +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser.h b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser.h new file mode 100644 index 00000000..7f2280ef --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser.h @@ -0,0 +1,43 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef SERVICES_INFERENCE_SIDECAR_MODULES_TENSORFLOW_V2_14_0_TENSORFLOW_PARSER_H_ +#define SERVICES_INFERENCE_SIDECAR_MODULES_TENSORFLOW_V2_14_0_TENSORFLOW_PARSER_H_ + +#include +#include +#include + +#include "absl/status/statusor.h" +#include "tensorflow/core/framework/tensor.h" +#include "utils/request_parser.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { + +// Transforms an internal ML framework agnostic dense tensor representation +// into a Tensorflow tensor. +absl::StatusOr ConvertFlatArrayToTensor( + const Tensor& tensor); + +// Converts inference output (Tensorflow tensors) corresponding to each model to +// a JSON string. +// batch_outputs contains a collection of pairs. +absl::StatusOr ConvertTensorsToJson( + const std::vector>>& + batch_outputs); + +} // namespace privacy_sandbox::bidding_auction_servers::inference + +#endif // SERVICES_INFERENCE_SIDECAR_MODULES_TENSORFLOW_V2_14_0_TENSORFLOW_PARSER_H_ diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser_test.cc b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser_test.cc new file mode 100644 index 00000000..e1959208 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_parser_test.cc @@ -0,0 +1,270 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "tensorflow_parser.h" + +#include +#include + +#include "absl/status/statusor.h" +#include "googletest/include/gtest/gtest.h" +#include "tensorflow/core/framework/tensor_types.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +TEST(TensorflowParserTest, TestConversion) { + Tensor tensor; + tensor.data_type = DataType::kFloat; + tensor.tensor_content = {"1.2", "-2", "3", "4", "5", "6"}; + tensor.tensor_shape = {2, 3}; + + // Call the conversion function + const absl::StatusOr result = + ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const tensorflow::Tensor tf_tensor = *std::move(result); + + // Check the dimensions of the resulting tensor + tensorflow::TensorShape tensorShape = tf_tensor.shape(); + EXPECT_EQ(tensorShape.dims(), 2); + EXPECT_EQ(tensorShape.dim_size(0), 2); + EXPECT_EQ(tensorShape.dim_size(1), 3); + + // Check the values in the tensor + for (int i = 0; i < tensor.tensor_content.size(); i++) { + EXPECT_FLOAT_EQ(tf_tensor.flat()(i), + std::stof(tensor.tensor_content[i])); + } +} + +TEST(TensorflowParserTest, TestConversionInt8) { + Tensor tensor; + tensor.data_type = DataType::kInt8; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1, 1}; + + const absl::StatusOr result = + ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const tensorflow::Tensor tf_tensor = *std::move(result); + + tensorflow::DataType data_type = tf_tensor.dtype(); + EXPECT_EQ(data_type, tensorflow::DataType::DT_INT8); + + EXPECT_EQ(tf_tensor.flat()(0), 5); +} + +TEST(TensorflowParserTest, TestConversionInt16) { + Tensor tensor; + tensor.data_type = DataType::kInt16; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1, 1}; + + const absl::StatusOr result = + ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const tensorflow::Tensor tf_tensor = *std::move(result); + + tensorflow::DataType data_type = tf_tensor.dtype(); + EXPECT_EQ(data_type, tensorflow::DataType::DT_INT16); + + EXPECT_EQ(tf_tensor.flat()(0), 5); +} + +TEST(TensorflowParserTest, TestConversionInt32) { + Tensor tensor; + tensor.data_type = DataType::kInt32; + tensor.tensor_content = {"5"}; + tensor.tensor_shape = {1, 1}; + + const absl::StatusOr result = + ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const tensorflow::Tensor tf_tensor = *std::move(result); + + tensorflow::DataType data_type = tf_tensor.dtype(); + EXPECT_EQ(data_type, tensorflow::DataType::DT_INT32); + + EXPECT_EQ(tf_tensor.flat()(0), 5); +} + +TEST(TensorflowParserTest, TestConversion_BatchSizeOne) { + Tensor tensor; + tensor.data_type = DataType::kInt64; + tensor.tensor_content = {"7"}; + tensor.tensor_shape = {1, 1}; + + // Call the conversion function + const absl::StatusOr result = + ConvertFlatArrayToTensor(tensor); + EXPECT_TRUE(result.ok()); + const tensorflow::Tensor tf_tensor = *std::move(result); + + // Check the dimensions of the resulting tensor + tensorflow::TensorShape tensorShape = tf_tensor.shape(); + EXPECT_EQ(tensorShape.dims(), 2); + EXPECT_EQ(tensorShape.dim_size(0), 1); + EXPECT_EQ(tensorShape.dim_size(1), 1); + + // Check the values in the tensor + EXPECT_EQ(tf_tensor.flat()(0), 7); +} + +TEST(TensorflowParserTest, TestConversion_WrongType) { + Tensor tensor; + tensor.data_type = DataType::kInt64; + tensor.tensor_content = {"seven"}; // Incompatible type. + tensor.tensor_shape = {1, 1}; + + const absl::StatusOr result = ConvertFlatArrayToTensor(tensor); + + EXPECT_FALSE(result.ok()); + EXPECT_EQ(result.status().code(), absl::StatusCode::kFailedPrecondition); + EXPECT_EQ(result.status().message(), "Error in int64 conversion"); +} + +TEST(TensorflowParserTest, ConvertTensorsToJson) { + tensorflow::TensorShape shape({2, 3}); // 2x3 tensor + tensorflow::Tensor tensor(tensorflow::DT_FLOAT, shape); + + // Access and modify tensor elements. + auto tensor_data = tensor.flat(); + for (int i = 0; i < 6; ++i) { + tensor_data(i) = i * 2.0f; // Set values (i * 2) + } + std::vector tensors; + tensors.push_back(tensor); + std::vector>> + batch_tensors; + batch_tensors.emplace_back("my_bucket/models/pcvr_models/1/", tensors); + + auto output = ConvertTensorsToJson(batch_tensors); + EXPECT_TRUE(output.ok()) << output.status(); + EXPECT_EQ( + output.value(), + R"({"response":[{"model_path":"my_bucket/models/pcvr_models/1/","tensors":[{"tensor_shape":[2,3],"data_type":"FLOAT","tensor_content":[0.0,2.0,4.0,6.0,8.0,10.0]}]}]})"); +} + +TEST(TensorflowParserTest, ConvertTensorsToJson_UnsupportedFloat16Type) { + tensorflow::Tensor half_tensor(tensorflow::DT_BFLOAT16); + + std::vector tensors; + tensors.push_back(half_tensor); + std::vector>> + batch_tensors; + batch_tensors.emplace_back("my_bucket/models/pcvr_models/1/", tensors); + + auto output = ConvertTensorsToJson(batch_tensors); + EXPECT_FALSE(output.ok()); + EXPECT_EQ(output.status().message(), "Unsupported data type 14"); +} + +TEST(TensorflowParserTest, ConvertTensorsToJson_int8) { + std::vector int8_data = {10, -5}; + tensorflow::Tensor int8_tensor(tensorflow::DT_INT8, {2}); + auto int8_data_ptr = int8_tensor.flat().data(); + std::copy(int8_data.begin(), int8_data.end(), int8_data_ptr); + + std::vector tensors; + tensors.push_back(int8_tensor); + std::vector>> + batch_tensors; + batch_tensors.emplace_back("my_bucket/models/pcvr_models/1/", tensors); + + auto output = ConvertTensorsToJson(batch_tensors); + EXPECT_TRUE(output.ok()); + EXPECT_EQ( + output.value(), + R"({"response":[{"model_path":"my_bucket/models/pcvr_models/1/","tensors":[{"tensor_shape":[2],"data_type":"INT8","tensor_content":[10,-5]}]}]})"); +} + +TEST(TensorflowParserTest, ConvertTensorsToJson_int16) { + std::vector int16_data = {10, -5}; + tensorflow::Tensor int16_tensor(tensorflow::DT_INT16, {2}); + auto int16_data_ptr = int16_tensor.flat().data(); + std::copy(int16_data.begin(), int16_data.end(), int16_data_ptr); + + std::vector tensors; + tensors.push_back(int16_tensor); + std::vector>> + batch_tensors; + batch_tensors.emplace_back("my_bucket/models/pcvr_models/1/", tensors); + + auto output = ConvertTensorsToJson(batch_tensors); + EXPECT_TRUE(output.ok()); + EXPECT_EQ( + output.value(), + R"({"response":[{"model_path":"my_bucket/models/pcvr_models/1/","tensors":[{"tensor_shape":[2],"data_type":"INT16","tensor_content":[10,-5]}]}]})"); +} + +TEST(TensorflowParserTest, TestConvertTensorsToJson_MultipleTensors) { + // Double tensor. + tensorflow::TensorShape shape_1({1, 1}); + tensorflow::Tensor tensor_1(tensorflow::DT_DOUBLE, shape_1); + auto tensor_data_1 = tensor_1.flat(); + tensor_data_1(0) = 3.14; + + // Int64 tensor. + tensorflow::TensorShape shape_2({1, 1}); + tensorflow::Tensor tensor_2(tensorflow::DT_INT64, shape_2); + auto tensor_data_2 = tensor_2.flat(); + tensor_data_2(0) = 1000; + + std::vector tensors; + tensors.push_back(tensor_1); + tensors.push_back(tensor_2); + + std::vector>> + batch_tensors; + batch_tensors.emplace_back("my_bucket/models/pcvr_models/1/", tensors); + + auto output = ConvertTensorsToJson(batch_tensors); + EXPECT_TRUE(output.ok()) << output.status(); + EXPECT_EQ( + output.value(), + R"({"response":[{"model_path":"my_bucket/models/pcvr_models/1/","tensors":[{"tensor_shape":[1,1],"data_type":"DOUBLE","tensor_content":[3.14]},{"tensor_shape":[1,1],"data_type":"INT64","tensor_content":[1000]}]}]})"); +} + +TEST(TensorflowParserTest, TestConvertTensorsToJson_BatchOdModels) { + // Double tensor. + tensorflow::TensorShape shape_1({1, 1}); + tensorflow::Tensor tensor_1(tensorflow::DT_DOUBLE, shape_1); + auto tensor_data_1 = tensor_1.flat(); + tensor_data_1(0) = 3.14; + std::vector tensors1; + tensors1.push_back(tensor_1); + + // Int64 tensor. + tensorflow::TensorShape shape_2({1, 1}); + tensorflow::Tensor tensor_2(tensorflow::DT_INT64, shape_2); + auto tensor_data_2 = tensor_2.flat(); + tensor_data_2(0) = 1000; + std::vector tensors2; + tensors2.push_back(tensor_2); + + std::vector>> + batch_tensors; + batch_tensors.emplace_back("my_bucket/models/pcvr_models/1/", tensors1); + batch_tensors.emplace_back("my_bucket/models/pctr_models/1/", tensors2); + + auto output = ConvertTensorsToJson(batch_tensors); + EXPECT_TRUE(output.ok()) << output.status(); + EXPECT_EQ( + output.value(), + R"({"response":[{"model_path":"my_bucket/models/pcvr_models/1/","tensors":[{"tensor_shape":[1,1],"data_type":"DOUBLE","tensor_content":[3.14]}]},{"model_path":"my_bucket/models/pctr_models/1/","tensors":[{"tensor_shape":[1,1],"data_type":"INT64","tensor_content":[1000]}]}]})"); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_test.cc b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_test.cc new file mode 100644 index 00000000..ead05269 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/tensorflow_test.cc @@ -0,0 +1,774 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include + +#include + +#include "absl/status/status.h" +#include "absl/status/statusor.h" +#include "absl/strings/match.h" +#include "gtest/gtest.h" +#include "modules/module_interface.h" +#include "proto/inference_sidecar.pb.h" +#include "utils/file_util.h" + +namespace privacy_sandbox::bidding_auction_servers::inference { +namespace { + +using ::testing::HasSubstr; +constexpr absl::string_view kModel1Dir = "./benchmark_models/pcvr"; +constexpr absl::string_view kModel2Dir = "./benchmark_models/pctr"; +constexpr absl::string_view kEmbeddingModelDir = "./benchmark_models/embedding"; + +TEST(TensorflowModuleTest, Failure_RegisterModelWithEmptyPath) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + register_request.mutable_model_spec()->set_model_path(""); + absl::StatusOr status_or = + tensorflow_module->RegisterModel(register_request); + ASSERT_FALSE(status_or.ok()); + + EXPECT_EQ(status_or.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_EQ(status_or.status().message(), "Model path is empty"); +} + +TEST(TensorflowModuleTest, Failure_RegisterModelAlreadyRegistered) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + + // First model registration + RegisterModelRequest register_request_1; + ASSERT_TRUE( + PopulateRegisterModelRequest(kModel1Dir, register_request_1).ok()); + absl::StatusOr response_1 = + tensorflow_module->RegisterModel(register_request_1); + ASSERT_TRUE(response_1.ok()); + + // Second registration attempt with the same model path + RegisterModelRequest register_request_2; + // Same model path as the first attempt + ASSERT_TRUE( + PopulateRegisterModelRequest(kModel1Dir, register_request_2).ok()); + absl::StatusOr response_2 = + tensorflow_module->RegisterModel(register_request_2); + + ASSERT_FALSE(response_2.ok()); + EXPECT_EQ(response_2.status().code(), absl::StatusCode::kAlreadyExists); + EXPECT_EQ(response_2.status().message(), + "Model '" + std::string(kModel1Dir) + "' already registered"); +} + +TEST(TensorflowModuleTest, Failure_RegisterModelLoadError) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + + // Setting the model path to a non-existent directory + RegisterModelRequest register_request; + register_request.mutable_model_spec()->set_model_path("./pcvr"); + absl::StatusOr status_or = + tensorflow_module->RegisterModel(register_request); + + // Verifying that the registration fails with an InternalError due to loading + // failure + ASSERT_FALSE(status_or.ok()); + EXPECT_EQ(status_or.status().code(), absl::StatusCode::kInternal); + EXPECT_NE(std::string::npos, + status_or.status().message().find("Error loading model: ./pcvr")); +} + +TEST(TensorflowModuleTest, Success_RegisterModel) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE(PopulateRegisterModelRequest(kModel1Dir, register_request).ok()); + absl::StatusOr status_or = + tensorflow_module->RegisterModel(register_request); + ASSERT_TRUE(status_or.ok()); +} + +TEST(TensorflowModuleTest, Failure_PredictInvalidJson) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + PredictRequest predict_request; + predict_request.set_input(""); + + absl::StatusOr predict_response = + tensorflow_module->Predict(predict_request); + + ASSERT_FALSE(predict_response.ok()); + EXPECT_EQ(predict_response.status().code(), + absl::StatusCode::kInvalidArgument); +} + +constexpr char kJsonString[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, Failure_PredictModelNotRegistered) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + + PredictRequest predict_request; + predict_request.set_input(kJsonString); + + absl::StatusOr predict_response = + tensorflow_module->Predict(predict_request); + + ASSERT_FALSE(predict_response.ok()); + EXPECT_EQ(predict_response.status().code(), absl::StatusCode::kNotFound); +} + +TEST(TensorflowModuleTest, Success_Predict) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE(PopulateRegisterModelRequest(kModel1Dir, register_request).ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonString); + absl::StatusOr predict_status = tensorflow_module->Predict(predict_request); + ASSERT_TRUE(predict_status.ok()); + PredictResponse response = predict_status.value(); + ASSERT_FALSE(response.output().empty()); + ASSERT_EQ(response.output(), + "{\"response\":[{\"model_path\":\"./benchmark_models/" + "pcvr\",\"tensors\":[{\"tensor_shape\":[1,1],\"data_type\":" + "\"FLOAT\",\"tensor_content\":[0.019116628915071489]}]}]}"); +} + +constexpr char kJsonStringBatchSize2[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, Success_PredictBatchSize2) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE(PopulateRegisterModelRequest(kModel1Dir, register_request).ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonStringBatchSize2); + absl::StatusOr predict_status = tensorflow_module->Predict(predict_request); + ASSERT_TRUE(predict_status.ok()); + PredictResponse response = predict_status.value(); + ASSERT_FALSE(response.output().empty()); + ASSERT_EQ( + response.output(), + "{\"response\":[{\"model_path\":\"./benchmark_models/" + "pcvr\",\"tensors\":[{\"tensor_shape\":[2,1],\"data_type\":\"FLOAT\"," + "\"tensor_content\":[0.019116630777716638,0.1847093403339386]}]}]}"); +} + +constexpr char kJsonStringMissingTensorName[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, Failure_PredictMissingTensorName) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE(PopulateRegisterModelRequest(kModel1Dir, register_request).ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonStringMissingTensorName); + absl::StatusOr predict_status = + tensorflow_module->Predict(predict_request); + ASSERT_FALSE(predict_status.ok()); + EXPECT_EQ(predict_status.status().code(), absl::StatusCode::kInvalidArgument); + EXPECT_THAT(predict_status.status().message(), + HasSubstr("Name is required for each TensorFlow tensor input")); +} + +constexpr char kJsonStringWith2Model[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + } + ] +}, +{ + "model_path" : "./benchmark_models/pctr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["8", "4"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["8", "4"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["8", "4"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["8", "4"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["8", "4"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, Success_PredictWith2Models) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request_1; + ASSERT_TRUE( + PopulateRegisterModelRequest(kModel1Dir, register_request_1).ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request_1).ok()); + + RegisterModelRequest register_request_2; + ASSERT_TRUE( + PopulateRegisterModelRequest(kModel2Dir, register_request_2).ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request_2).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonStringWith2Model); + absl::StatusOr predict_status = tensorflow_module->Predict(predict_request); + ASSERT_TRUE(predict_status.ok()); + PredictResponse response = predict_status.value(); + ASSERT_FALSE(response.output().empty()); + EXPECT_EQ( + response.output(), + "{\"response\":[{\"model_path\":\"./benchmark_models/" + "pcvr\",\"tensors\":[{\"tensor_shape\":[2,1],\"data_type\":\"FLOAT\"," + "\"tensor_content\":[0.019116630777716638,0.1847093403339386]}]},{" + "\"model_path\":\"./benchmark_models/" + "pctr\",\"tensors\":[{\"tensor_shape\":[2,1],\"data_type\":\"FLOAT\"," + "\"tensor_content\":[0.14649735391139985,0.2522672712802887]}]}]}"); +} + +constexpr char kJsonStringWith1ModelVariedSize[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 2, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + } + ] +}, +{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12"] + }, + { + "tensor_name": "serving_default_double2:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["8"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["8"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["8"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["8"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["8"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, + PredictSameModelVariedBatchSizesMultipleRequestsSuccess) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(std::string(kModel1Dir), register_request) + .ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonStringWith1ModelVariedSize); + absl::StatusOr predict_status = tensorflow_module->Predict(predict_request); + ASSERT_TRUE(predict_status.ok()); + PredictResponse response = predict_status.value(); + ASSERT_FALSE(response.output().empty()); + EXPECT_EQ(response.output(), + "{\"response\":[{\"model_path\":\"./benchmark_models/" + "pcvr\",\"tensors\":[{\"tensor_shape\":[2,1],\"data_type\":" + "\"FLOAT\",\"tensor_content\":[0.019116630777716638,0." + "1847093403339386]}]},{\"model_path\":\"./benchmark_models/" + "pcvr\",\"tensors\":[{\"tensor_shape\":[1,1],\"data_type\":" + "\"FLOAT\",\"tensor_content\":[0.010360434651374817]}]}]}"); +} + +constexpr char kJsonStringEmbeddingModel[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/embedding", + "tensors" : [ + { + "tensor_name": "serving_default_ad_embed:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_pub_embed:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.32", "0.12", "0.98", "0.11"] + }, + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input2:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input3:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input4:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + }, + { + "tensor_name": "serving_default_int_input5:0", + "data_type": "INT64", + "tensor_shape": [ + 1, 1 + ], + "tensor_content": ["7"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, Success_PredictEmbed) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request; + ASSERT_TRUE( + PopulateRegisterModelRequest(kEmbeddingModelDir, register_request).ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonStringEmbeddingModel); + absl::StatusOr predict_status = tensorflow_module->Predict(predict_request); + ASSERT_TRUE(predict_status.ok()); + PredictResponse response = predict_status.value(); + ASSERT_FALSE(response.output().empty()); + EXPECT_TRUE(absl::StrContains( + response.output(), + "{\"tensor_shape\":[1,6],\"data_type\":\"FLOAT\",\"tensor_content\":[0." + "7276111245155335,0.0728105902671814,0.11053494364023209,0." + "6876803636550903,0.3626940846443176,0.13941356539726258]}")); + EXPECT_TRUE(absl::StrContains(response.output(), + "{\"tensor_shape\":[1,1],\"data_type\":" + "\"INT32\",\"tensor_content\":[0]}")); +} + +constexpr char kJsonStringWithMultipleInvalidInputs[] = R"json({ + "request" : [{ + "model_path" : "./benchmark_models/pcvr", + "tensors" : [ + { + "tensor_name": "serving_default_int_input1:0", + "data_type": "INT64", + "tensor_shape": [ + 2, 1 + ], + "tensor_content": ["7", "3"] + } + ] +}, +{ + "model_path" : "./benchmark_models/pctr", + "tensors" : [ + { + "tensor_name": "serving_default_double1:0", + "data_type": "DOUBLE", + "tensor_shape": [ + 1, 10 + ], + "tensor_content": ["0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.33", "0.13", "0.97", "0.12"] + } + ] +}] + })json"; + +TEST(TensorflowModuleTest, PredictMultipleInvalidInputsReturnsFirstError) { + std::unique_ptr tensorflow_module = + ModuleInterface::Create(); + RegisterModelRequest register_request_1; + ASSERT_TRUE( + PopulateRegisterModelRequest(std::string(kModel1Dir), register_request_1) + .ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request_1).ok()); + RegisterModelRequest register_request_2; + ASSERT_TRUE( + PopulateRegisterModelRequest(std::string(kModel2Dir), register_request_2) + .ok()); + ASSERT_TRUE(tensorflow_module->RegisterModel(register_request_2).ok()); + + PredictRequest predict_request; + predict_request.set_input(kJsonStringWithMultipleInvalidInputs); + absl::StatusOr predict_status = tensorflow_module->Predict(predict_request); + ASSERT_FALSE(predict_status.ok()); + EXPECT_EQ(predict_status.status().code(), absl::StatusCode::kInternal); + EXPECT_THAT( + predict_status.status().message(), + HasSubstr("Error during inference for model './benchmark_models/pcvr'")); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers::inference diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/BUILD b/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/BUILD new file mode 100644 index 00000000..4d504fd2 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/BUILD @@ -0,0 +1,17 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +package( + default_visibility = ["//visibility:public"], +) diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/google_privacysandbox_servers_common.patch b/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/google_privacysandbox_servers_common.patch new file mode 100644 index 00000000..81ce4127 --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/google_privacysandbox_servers_common.patch @@ -0,0 +1,22 @@ +diff --git a/src/util/status_macro/status_util.h b/src/util/status_macro/status_util.h +index 68688367..3268233f 100644 +--- a/src/util/status_macro/status_util.h ++++ b/src/util/status_macro/status_util.h +@@ -17,6 +17,8 @@ + #ifndef FLEDGE_SERVICES_COMMON_UTIL_STATUS_UTIL_H_ + #define FLEDGE_SERVICES_COMMON_UTIL_STATUS_UTIL_H_ + ++#include ++ + #include "absl/status/status.h" + #include "google/rpc/status.pb.h" + #include "include/grpcpp/grpcpp.h" +@@ -32,7 +34,7 @@ absl::Status ToAbslStatus(const grpc::Status& status); + inline google::rpc::Status SaveStatusAsRpcStatus(const absl::Status& status) { + google::rpc::Status ret; + ret.set_code(static_cast(status.code())); +- ret.set_message(status.message()); ++ ret.set_message(std::string(status.message())); + return ret; + } + diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/org_tensorflow_compatibility_fixes.diff b/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/org_tensorflow_compatibility_fixes.diff new file mode 100644 index 00000000..01e6026a --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/third_party/org_tensorflow_compatibility_fixes.diff @@ -0,0 +1,13 @@ +diff --git a/tensorflow/tsl/lib/io/cache.h b/tensorflow/tsl/lib/io/cache.h +index f894c5916d51c..e49d09b74500e 100644 +--- a/tensorflow/tsl/lib/io/cache.h ++++ b/tensorflow/tsl/lib/io/cache.h +@@ -16,6 +16,8 @@ limitations under the License. + #ifndef TENSORFLOW_TSL_LIB_IO_CACHE_H_ + #define TENSORFLOW_TSL_LIB_IO_CACHE_H_ + ++#include ++ + #include "tensorflow/tsl/platform/stringpiece.h" + + // A Cache is an interface that maps keys to values. It has internal diff --git a/services/inference_sidecar/modules/tensorflow_v2_14_0/tools/collect-logs b/services/inference_sidecar/modules/tensorflow_v2_14_0/tools/collect-logs new file mode 100755 index 00000000..3924139e --- /dev/null +++ b/services/inference_sidecar/modules/tensorflow_v2_14_0/tools/collect-logs @@ -0,0 +1,89 @@ +#!/usr/bin/env bash + +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# environment variables (all optional): +# WORKSPACE Set the path to the workspace (repo root) + +set -o pipefail +set -o errexit + +trap _cleanup EXIT +function _cleanup() { + declare -r -i STATUS=$? + if [[ ${STATUS} -ne 0 ]]; then + printf "collect-logs exit code: %d\n" ${STATUS} &>/dev/stderr + sleep 5s + fi + exit ${STATUS} +} + +function copy_log_outputs() { + declare -r _rootdest="$1" + declare -r _prune_to_dir="$2" + declare -r _filepath="$3" + declare -r _logpath="${_filepath##*/${_prune_to_dir}/}" + declare -r _destdir="${_rootdest}/${_logpath%/*}" + declare -r _fname="${_filepath##*/}" + declare -r _destfname="${_fname/#test./sponge_log.}" + mkdir -p "${_destdir}" + cp "${_filepath}" "${_destdir}/${_destfname}" +} +export -f copy_log_outputs + +function extract_test_outputs() { + declare -r _rootdest="$1" + declare -r _filepath="$2" + declare -r _logpath="${_filepath##*bazel-testlogs/}" + declare -r _destdir="${_rootdest}/${_logpath%/*/*}" + mkdir -p "${_destdir}" + unzip -q -d "${_destdir}" "${_filepath}" +} +export -f extract_test_outputs + + +declare ZIP_FILENAME="$1" +if [[ ${ZIP_FILENAME##*.} != zip ]]; then + ZIP_FILENAME=logs.zip +fi +SCRIPT_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}")")" +readonly SCRIPT_DIR +WORKSPACE="${WORKSPACE-"$(readlink -f "${SCRIPT_DIR}"/..)"}" +readonly WORKSPACE +OUTDIR="$(mktemp --directory)" +readonly OUTDIR +export OUTDIR +mkdir -p "${OUTDIR}"/{test,other} + +if [[ -d "${WORKSPACE}"/bazel-testlogs ]]; then + # copy all test.log and test.xml files + find -L "${WORKSPACE}"/bazel-testlogs -type f '(' -name test.log -o -name test.xml ')' -exec bash -c 'copy_log_outputs "${OUTDIR}"/test bazel-testlogs "$0"' {} ';' + # extract test outputs + find -L "${WORKSPACE}"/bazel-testlogs -type f -name outputs.zip -exec bash -c 'extract_test_outputs "${OUTDIR}"/test "$0"' {} ';' +fi +if [[ -d "${WORKSPACE}"/bazel-out ]]; then + # copy log files under bazel-out (except for test.log) + find -L "${WORKSPACE}"/bazel-out -type f -name "*.log" ! -name test.log -exec bash -c 'copy_log_outputs "${OUTDIR}"/other bazel-out "$0"' {} ';' +fi + +declare -r DISTDIR="${WORKSPACE}"/dist +mkdir -p "${DISTDIR}" +( + cd "${OUTDIR}" + zip -r -q "${DISTDIR}/${ZIP_FILENAME}" -- * +) +printf "stored bazel logs to %s\n" "${DISTDIR}/${ZIP_FILENAME}" &>/dev/stderr +unzip -Z -h "${DISTDIR}/${ZIP_FILENAME}" &>/dev/stderr +rm -rf "${OUTDIR}" diff --git a/services/seller_frontend_service/BUILD b/services/seller_frontend_service/BUILD index b5a47487..b922531c 100644 --- a/services/seller_frontend_service/BUILD +++ b/services/seller_frontend_service/BUILD @@ -35,6 +35,7 @@ cc_library( "select_ad_reactor_app.cc", "select_ad_reactor_invalid_client.cc", "select_ad_reactor_web.cc", + "select_auction_result_reactor.cc", "seller_frontend_service.cc", ], hdrs = [ @@ -42,6 +43,7 @@ cc_library( "select_ad_reactor_app.h", "select_ad_reactor_invalid_client.h", "select_ad_reactor_web.h", + "select_auction_result_reactor.h", "seller_frontend_service.h", ], deps = [ @@ -71,6 +73,8 @@ cc_library( "//services/seller_frontend_service/providers:seller_frontend_providers", "//services/seller_frontend_service/util:encryption_util", "//services/seller_frontend_service/util:framing_utils", + "//services/seller_frontend_service/util:key_fetcher_utils", + "//services/seller_frontend_service/util:proto_mapping_util", "//services/seller_frontend_service/util:startup_param_parser", "//services/seller_frontend_service/util:web_utils", "@aws_sdk_cpp//:core", @@ -79,13 +83,14 @@ cc_library( "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/container:flat_hash_set", "@com_google_absl//absl/synchronization", - "@google_privacysandbox_servers_common//scp/cc/public/cpio/interface:cpio", - "@google_privacysandbox_servers_common//src/cpp/communication:encoding_utils", - "@google_privacysandbox_servers_common//src/cpp/communication:ohttp_utils", - "@google_privacysandbox_servers_common//src/cpp/concurrent:executor", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:key_fetcher_manager_interface", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/telemetry", + "@google_privacysandbox_servers_common//src/communication:encoding_utils", + "@google_privacysandbox_servers_common//src/communication:ohttp_utils", + "@google_privacysandbox_servers_common//src/concurrent:executor", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:key_fetcher_manager_interface", + "@google_privacysandbox_servers_common//src/public/cpio/interface:cpio", + "@google_privacysandbox_servers_common//src/telemetry", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@libcbor//:cbor", ], ) @@ -112,9 +117,9 @@ cc_binary( "@com_google_absl//absl/flags:parse", "@com_google_absl//absl/log:check", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/telemetry", - "@google_privacysandbox_servers_common//src/cpp/util:rlimit_core_config", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/telemetry", + "@google_privacysandbox_servers_common//src/util:rlimit_core_config", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@rapidjson", ], ) @@ -134,7 +139,7 @@ cc_test( "//services/seller_frontend_service/util:select_ad_reactor_test_utils", "//services/seller_frontend_service/util:web_utils", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -153,12 +158,32 @@ cc_test( "//services/seller_frontend_service/util:select_ad_reactor_test_utils", "@com_github_google_quiche//quiche:oblivious_http_unstable_api", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/communication:ohttp_utils", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/communication:ohttp_utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", "@libcbor//:cbor", ], ) +cc_test( + name = "select_auction_result_reactor_test", + size = "small", + srcs = [ + "select_auction_result_reactor_test.cc", + ], + deps = [ + ":seller_frontend_service", + "//api:bidding_auction_servers_cc_grpc_proto", + "//api:bidding_auction_servers_cc_proto", + "//services/common/test:mocks", + "//services/common/test:random", + "//services/seller_frontend_service/util:select_ad_reactor_test_utils", + "@com_google_absl//absl/synchronization", + "@com_google_googletest//:gtest_main", + "@google_privacysandbox_servers_common//src/core/test/utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", + ], +) + cc_test( name = "seller_frontend_service_test", size = "small", @@ -176,7 +201,7 @@ cc_test( "@com_google_absl//absl/log:scoped_mock_log", "@com_google_absl//absl/strings", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -196,8 +221,8 @@ cc_test( "//services/seller_frontend_service/util:select_ad_reactor_test_utils", "@com_github_google_quiche//quiche:oblivious_http_unstable_api", "@com_google_googletest//:gtest_main", - "@google_privacysandbox_servers_common//src/cpp/communication:encoding_utils", - "@google_privacysandbox_servers_common//src/cpp/communication:ohttp_utils", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/communication:encoding_utils", + "@google_privacysandbox_servers_common//src/communication:ohttp_utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) diff --git a/services/seller_frontend_service/benchmarking/BUILD b/services/seller_frontend_service/benchmarking/BUILD index 66cd76f9..85107cc7 100644 --- a/services/seller_frontend_service/benchmarking/BUILD +++ b/services/seller_frontend_service/benchmarking/BUILD @@ -32,6 +32,6 @@ cc_binary( "//services/seller_frontend_service/util:select_ad_reactor_test_utils", "@google_benchmark//:benchmark", "@google_benchmark//:benchmark_main", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) diff --git a/services/seller_frontend_service/benchmarking/select_ad_reactor_benchmarks.cc b/services/seller_frontend_service/benchmarking/select_ad_reactor_benchmarks.cc index be25f811..a5a55a69 100644 --- a/services/seller_frontend_service/benchmarking/select_ad_reactor_benchmarks.cc +++ b/services/seller_frontend_service/benchmarking/select_ad_reactor_benchmarks.cc @@ -27,6 +27,12 @@ namespace privacy_sandbox::bidding_auction_servers { namespace { constexpr int kDebugUrlLength = 500; +constexpr char kBaseAdRenderUrl[] = "ads.barbecue.com"; +constexpr char kInterestGroupName[] = "meat_lovers"; +constexpr char kEurIsoCode[] = "EUR"; +constexpr char kUsdIsoCode[] = "USD"; + +enum class BuyerMockType { DEBUG_REPORTING, BID_CURRENCY }; class AsyncReporterStub : public AsyncReporter { public: @@ -51,7 +57,8 @@ void AsyncReporterStub::DoReport( class BuyerFrontEndAsyncClientStub : public BuyerFrontEndAsyncClient { public: - explicit BuyerFrontEndAsyncClientStub(const std::string& ad_render_url); + explicit BuyerFrontEndAsyncClientStub(const std::string& ad_render_url, + const BuyerMockType buyer_mock_type); absl::Status Execute( std::unique_ptr request, const RequestMetadata& metadata, @@ -71,11 +78,12 @@ class BuyerFrontEndAsyncClientStub : public BuyerFrontEndAsyncClient { private: const std::string ad_render_url_; + const BuyerMockType buyer_mock_type_; }; BuyerFrontEndAsyncClientStub::BuyerFrontEndAsyncClientStub( - const std::string& ad_render_url) - : ad_render_url_(ad_render_url) {} + const std::string& ad_render_url, const BuyerMockType buyer_mock_type) + : ad_render_url_(ad_render_url), buyer_mock_type_(buyer_mock_type) {} absl::Status BuyerFrontEndAsyncClientStub::Execute( std::unique_ptr request, const RequestMetadata& metadata, @@ -93,16 +101,33 @@ absl::Status BuyerFrontEndAsyncClientStub::ExecuteInternal( GetBidsResponse::GetBidsRawResponse>>) &&> on_done, absl::Duration timeout) const { - AdWithBid bid = - BuildNewAdWithBid(ad_render_url_, "testIG", /*bid_value=*/1.0, - /*enable_event_level_debug_reporting=*/true); - auto* debug_report_urls = bid.mutable_debug_report_urls(); - *debug_report_urls->mutable_auction_debug_loss_url() = - std::string(kDebugUrlLength, 'C'); - *debug_report_urls->mutable_auction_debug_win_url() = - std::string(kDebugUrlLength, 'D'); auto response = std::make_unique(); - response->mutable_bids()->Add(std::move(bid)); + switch (buyer_mock_type_) { + case BuyerMockType::DEBUG_REPORTING: { + AdWithBid bid = + BuildNewAdWithBid(ad_render_url_, "testIG", /*bid_value=*/1.0, + /*enable_event_level_debug_reporting=*/true); + auto* debug_report_urls = bid.mutable_debug_report_urls(); + *debug_report_urls->mutable_auction_debug_loss_url() = + std::string(kDebugUrlLength, 'C'); + *debug_report_urls->mutable_auction_debug_win_url() = + std::string(kDebugUrlLength, 'D'); + response->mutable_bids()->Add(std::move(bid)); + break; + } + case BuyerMockType::BID_CURRENCY: { + std::vector ads_with_bids = GetAdWithBidsInMultipleCurrencies( + /*num_ad_with_bids=*/40, /*num_mismatched=*/15, + /*matching_currency=*/kUsdIsoCode, + /*mismatching_currency=*/kEurIsoCode, kBaseAdRenderUrl, + kInterestGroupName); + for (const auto& ad_with_bid : ads_with_bids) { + // Add all AwBs so all are returned by mock. + response->mutable_bids()->Add()->CopyFrom(ad_with_bid); + } + break; + } + } std::move(on_done)(std::move(response)); return absl::OkStatus(); } @@ -152,7 +177,7 @@ absl::Status ScoringClientStub::ExecuteInternal( score.set_render(bid.render()); score.mutable_component_renders()->CopyFrom(bid.ad_components()); score.set_desirability(i++); - score.set_buyer_bid(i); + score.set_buyer_bid(bid.bid()); score.set_interest_group_name(bid.interest_group_name()); score.set_interest_group_owner(bid.interest_group_owner()); *response.mutable_ad_score() = score; @@ -172,7 +197,8 @@ class BuyerFrontEndAsyncClientFactoryStub public: BuyerFrontEndAsyncClientFactoryStub( const SelectAdRequest& request, - const ProtectedAuctionInput& protected_auction_input); + const ProtectedAuctionInput& protected_auction_input, + const BuyerMockType buyer_mock_type); std::shared_ptr Get( absl::string_view client_key) const override; @@ -185,15 +211,17 @@ class BuyerFrontEndAsyncClientFactoryStub BuyerFrontEndAsyncClientFactoryStub::BuyerFrontEndAsyncClientFactoryStub( const SelectAdRequest& request, - const ProtectedAuctionInput& protected_auction_input) + const ProtectedAuctionInput& protected_auction_input, + const BuyerMockType buyer_mock_type) : request_(request) { ErrorAccumulator error_accumulator; absl::flat_hash_map buyer_to_ad_url = BuildBuyerWinningAdUrlMap(request_); for (const auto& buyer_ig_owner : request_.auction_config().buyer_list()) { - buyer_clients_.emplace(buyer_ig_owner, - std::make_shared( - buyer_to_ad_url.at(buyer_ig_owner))); + buyer_clients_.emplace( + buyer_ig_owner, + std::make_shared( + buyer_to_ad_url.at(buyer_ig_owner), buyer_mock_type)); } } @@ -300,14 +328,18 @@ static void BM_PerformDebugReporting(benchmark::State& state) { auto async_reporter = std::make_unique( std::make_unique()); - BuyerFrontEndAsyncClientFactoryStub buyer_clients(request, - protected_auction_input); + BuyerFrontEndAsyncClientFactoryStub buyer_clients( + request, protected_auction_input, BuyerMockType::DEBUG_REPORTING); KeyFetcherManagerStub key_fetcher_manager; TrustedServersConfigClient config_client = CreateConfig(); config_client.SetFlagForTest("", CONSENTED_DEBUG_TOKEN); config_client.SetFlagForTest(kFalse, ENABLE_PROTECTED_APP_SIGNALS); - ClientRegistry clients{scoring_provider, scoring_client, buyer_clients, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{scoring_provider, + scoring_client, + buyer_clients, + key_fetcher_manager, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; server_common::telemetry::TelemetryConfig config_proto; config_proto.set_mode(server_common::telemetry::TelemetryConfig::OFF); @@ -327,6 +359,58 @@ static void BM_PerformDebugReporting(benchmark::State& state) { BENCHMARK(BM_PerformDebugReporting); +static void BM_PerformCurrencyCheckingAndFiltering(benchmark::State& state) { + ProtectedAuctionInput protected_auction_input = + MakeARandomProtectedAuctionInput(); + SelectAdRequest request = MakeARandomSelectAdRequest( + kSellerOriginDomain, protected_auction_input, /*set_buyer_egid=*/false, + /*set_seller_egid=*/false, /*seller_currency=*/kUsdIsoCode, + /*buyer_currency=*/kEurIsoCode); + auto [encrypted_protected_auction_input, encryption_context] = + GetCborEncodedEncryptedInputAndOhttpContext( + protected_auction_input); + *request.mutable_protected_auction_ciphertext() = + std::move(encrypted_protected_auction_input); + auto context = std::make_unique( + std::move(encryption_context)); + ScoringClientStub scoring_client; + + // Scoring signal provider + ScoringSignalsProviderStub scoring_provider(request); + + auto async_reporter = std::make_unique( + std::make_unique()); + BuyerFrontEndAsyncClientFactoryStub buyer_clients( + request, protected_auction_input, BuyerMockType::BID_CURRENCY); + KeyFetcherManagerStub key_fetcher_manager; + TrustedServersConfigClient config_client = CreateConfig(); + config_client.SetFlagForTest("", CONSENTED_DEBUG_TOKEN); + config_client.SetFlagForTest(kTrue, ENABLE_PROTECTED_APP_SIGNALS); + ClientRegistry clients{scoring_provider, + scoring_client, + buyer_clients, + key_fetcher_manager, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; + + server_common::telemetry::TelemetryConfig config_proto; + config_proto.set_mode(server_common::telemetry::TelemetryConfig::OFF); + + for (auto _ : state) { + // This code gets timed. + grpc::CallbackServerContext context; + SelectAdResponse response; + metric::MetricContextMap( + server_common::telemetry::BuildDependentConfig(config_proto)) + ->Get(&request); + SelectAdReactorForWeb reactor(&context, &request, &response, clients, + config_client); + reactor.Execute(); + } +} + +BENCHMARK(BM_PerformCurrencyCheckingAndFiltering); + BENCHMARK_MAIN(); } // namespace diff --git a/services/seller_frontend_service/select_ad_reactor.cc b/services/seller_frontend_service/select_ad_reactor.cc index 72c89f1f..50f5a794 100644 --- a/services/seller_frontend_service/select_ad_reactor.cc +++ b/services/seller_frontend_service/select_ad_reactor.cc @@ -34,10 +34,11 @@ #include "services/common/util/auction_scope_util.h" #include "services/common/util/reporting_util.h" #include "services/common/util/request_response_constants.h" +#include "services/seller_frontend_service/util/key_fetcher_utils.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/communication/ohttp_utils.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/telemetry/telemetry.h" +#include "src/communication/ohttp_utils.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/telemetry/telemetry.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -60,6 +61,7 @@ SelectAdReactor::SelectAdReactor( response_(response), clients_(clients), config_client_(config_client), + auction_scope_(GetAuctionScope(*request_)), // TODO(b/278039901): Add integration test for metadata forwarding. buyer_metadata_(GrpcMetadataToRequestMetadata(context->client_metadata(), kBuyerMetadataKeysMap)), @@ -68,12 +70,13 @@ SelectAdReactor::SelectAdReactor( error_accumulator_(&log_context_), fail_fast_(fail_fast), is_protected_auction_request_(false), + // PAS should only be enabled for single seller auctions. is_pas_enabled_( - config_client_.GetBooleanParameter(ENABLE_PROTECTED_APP_SIGNALS)), + config_client_.GetBooleanParameter(ENABLE_PROTECTED_APP_SIGNALS) && + (auction_scope_ == AuctionScope::AUCTION_SCOPE_SINGLE_SELLER)), is_protected_audience_enabled_( config_client_.GetBooleanParameter(ENABLE_PROTECTED_AUDIENCE)), max_buyers_solicited_(max_buyers_solicited), - auction_scope_(GetAuctionScope(*request_)), async_task_tracker_( request->auction_config().buyer_list_size(), log_context_, [this](bool successful) { OnAllBidsDone(successful); }) { @@ -109,8 +112,7 @@ AdWithBidMetadata SelectAdReactor::BuildAdWithBidMetadata( const BuyerInput& buyer_input = buyer_inputs_->find(interest_group_owner)->second; for (const auto& interest_group : buyer_input.interest_groups()) { - if (std::strcmp(interest_group.name().c_str(), - result.interest_group_name().c_str())) { + if (interest_group.name() == result.interest_group_name()) { if (request_->client_type() == CLIENT_TYPE_BROWSER) { result.set_join_count(interest_group.browser_signals().join_count()); PS_VLOG(1, log_context_) << "BrowserSignal: Recency:" @@ -362,8 +364,7 @@ void SelectAdReactor::Execute() { std::visit( [this](const auto& protected_auction_input) { - ValidateProtectedAuctionInput(protected_auction_input, - auction_scope_); + ValidateProtectedAuctionInput(protected_auction_input); }, protected_auction_input_); } @@ -635,7 +636,7 @@ void SelectAdReactor::OnAllBidsDone(bool any_successful_bids) { // That the shared_buyer_bids_map_ is non-empty was just checked above // That each buyer has bids was checked in OnFetchBidsDone(). // Thus the preconditions of the following method are satisfied. - if (is_protected_audience_enabled_ && !FilterBidsWithMismatchingCurrency()) { + if (!FilterBidsWithMismatchingCurrency()) { PS_VLOG(2, log_context_) << kAllBidsRejectedBuyerCurrencyMismatch; FinishWithStatus(grpc::Status(grpc::INVALID_ARGUMENT, kAllBidsRejectedBuyerCurrencyMismatch)); @@ -644,15 +645,42 @@ void SelectAdReactor::OnAllBidsDone(bool any_successful_bids) { } } +template +void FilterBidsWithMismatchingCurrencyHelper( + google::protobuf::RepeatedPtrField* ads_with_bids, + absl::string_view buyer_currency) { + int i = 0; + int remove_starting_at = ads_with_bids->size(); + while (i < remove_starting_at) { + const T& ad_with_bid = (*ads_with_bids)[i]; + if (!ad_with_bid.bid_currency().empty() && + buyer_currency != ad_with_bid.bid_currency()) { + // Swap to last. Mark for removal and make sure we don't check it again + ads_with_bids->SwapElements(i, --remove_starting_at); + // TODO: Record metric. + // Leave index un-incremented so swapped element is checked. + } else { + i++; + } + } + // Delete all mismatched pas_bids. + if (remove_starting_at < ads_with_bids->size()) { + ads_with_bids->DeleteSubrange(remove_starting_at, + ads_with_bids->size() - remove_starting_at); + } +} + bool SelectAdReactor::FilterBidsWithMismatchingCurrency() { DCHECK(!shared_buyer_bids_map_.empty()) << "PRECONDITION 1: shared_buyer_bids_map_ must have at least one entry."; - DCHECK(([shared_buyer_bids_map_ptr = &(this->shared_buyer_bids_map_), - is_pas_enabled = this->is_pas_enabled_]() { + DCHECK(([shared_buyer_bids_map_ptr = &(shared_buyer_bids_map_), + is_protected_audience_enabled = is_protected_audience_enabled_, + is_pas_enabled = is_pas_enabled_]() { for (const auto& [buyer_ig_owner, get_bid_response] : *shared_buyer_bids_map_ptr) { // Check if no bids are present. - if (get_bid_response->bids().empty() && + if ((!is_protected_audience_enabled || + get_bid_response->bids().empty()) && (!is_pas_enabled || get_bid_response->protected_app_signals_bids().empty())) { return false; @@ -665,7 +693,7 @@ bool SelectAdReactor::FilterBidsWithMismatchingCurrency() { bool any_valid_bids = false; // Check that each AdWithBid's currency is as-expected. // Throw out the AdWithBids which do not match. - for (auto& [buyer_ig_owner, get_bid_response] : shared_buyer_bids_map_) { + for (auto& [buyer_ig_owner, get_bids_raw_response] : shared_buyer_bids_map_) { // It is possible for a buyer to have no buyer_config. const auto& buyer_config_itr = request_->auction_config().per_buyer_config().find(buyer_ig_owner); @@ -683,45 +711,32 @@ bool SelectAdReactor::FilterBidsWithMismatchingCurrency() { any_valid_bids = true; continue; } - // Remove each AdWithBid for which its currency does not match. - int i = 0, remove_starting_at = get_bid_response->bids_size(); - while (i < remove_starting_at) { - const AdWithBid& ad_with_bid = get_bid_response->bids(i); - if (!ad_with_bid.bid_currency().empty() && - buyer_currency != ad_with_bid.bid_currency()) { - // Swap to last. - get_bid_response->mutable_bids()->SwapElements( - i, get_bid_response->bids_size() - 1); - // Mark for removal and make sure we don't check it again - remove_starting_at--; - // TODO: Record metric. - // Leave index un-incremented so swapped element is checked. - } else { - i++; - } - } - // Delete all mismatched bids. - if (remove_starting_at < get_bid_response->bids_size()) { - get_bid_response->mutable_bids()->DeleteSubrange( - remove_starting_at, - get_bid_response->bids_size() - remove_starting_at); - } + + FilterBidsWithMismatchingCurrencyHelper( + get_bids_raw_response->mutable_bids(), buyer_currency); + FilterBidsWithMismatchingCurrencyHelper( + get_bids_raw_response->mutable_protected_app_signals_bids(), + buyer_currency); + // Check if any bids remain. - if (!get_bid_response->bids().empty() || + if ((is_protected_audience_enabled_ && + !get_bids_raw_response->bids().empty()) || (is_pas_enabled_ && - !get_bid_response->protected_app_signals_bids().empty())) { + !get_bids_raw_response->protected_app_signals_bids().empty())) { any_valid_bids = true; } } // Clear buyers with no remaining bids. - absl::erase_if(shared_buyer_bids_map_, [is_pas_enabled = is_pas_enabled_]( - const auto& buyer_to_bids_pair) { - const auto& get_bid_response = buyer_to_bids_pair.second; - // Check if any bids remain. - return (get_bid_response->bids().empty() && - (!is_pas_enabled || - get_bid_response->protected_app_signals_bids().empty())); - }); + absl::erase_if( + shared_buyer_bids_map_, + [is_p_aud_enabled = is_protected_audience_enabled_, + is_pas_enabled = is_pas_enabled_](const auto& buyer_to_bids_pair) { + const auto& get_bids_raw_response = buyer_to_bids_pair.second; + // Check if any bids remain. + return ((!is_p_aud_enabled || get_bids_raw_response->bids().empty()) && + (!is_pas_enabled || + get_bids_raw_response->protected_app_signals_bids().empty())); + }); return any_valid_bids; } @@ -964,7 +979,7 @@ void SelectAdReactor::OnScoreAdsDone( error = std::move(error_); } absl::StatusOr non_encrypted_response = - GetNonEncryptedResponse(high_score, std::move(error)); + GetNonEncryptedResponse(high_score, error); if (!non_encrypted_response.ok()) { return; } @@ -986,12 +1001,30 @@ DecodedBuyerInputs SelectAdReactor::GetDecodedBuyerinputs( } bool SelectAdReactor::EncryptResponse(std::string plaintext_response) { - // Encrypt with corresponding private key. - // we're now done with the request so we can release context. - absl::StatusOr encapsulated_response = - server_common::EncryptAndEncapsulateResponse( - std::move(plaintext_response), decrypted_request_->private_key, - decrypted_request_->context, decrypted_request_->request_label); + absl::StatusOr encapsulated_response; + if (auction_scope_ == + AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER) { + // If this will be decrypted by another SFE, encrypt with public key. + auto encrypted_request = + HpkeEncrypt(plaintext_response, *clients_.crypto_client_ptr_, + clients_.key_fetcher_manager_, + ProtoCloudPlatformToScpCloudPlatform( + request_->auction_config().top_level_cloud_platform())); + if (!encrypted_request.ok()) { + PS_VLOG(1, log_context_) << "Error while encrypting response: " + << encrypted_request.status().message(); + FinishWithStatus( + grpc::Status(grpc::StatusCode::INTERNAL, kInternalServerError)); + return false; + } + encapsulated_response = std::move(encrypted_request->ciphertext); + *response_->mutable_key_id() = std::move(encrypted_request->key_id); + } else { + // Decrypted by device -> Encrypt with corresponding private key. + encapsulated_response = server_common::EncryptAndEncapsulateResponse( + std::move(plaintext_response), decrypted_request_->private_key, + decrypted_request_->context, decrypted_request_->request_label); + } if (!encapsulated_response.ok()) { PS_VLOG(4, log_context_) @@ -1010,6 +1043,16 @@ bool SelectAdReactor::EncryptResponse(std::string plaintext_response) { void SelectAdReactor::PerformDebugReporting( const std::optional& high_score) { + // Create new metric context. + metric::MetricContextMap()->Get(request_); + // Make metric_context a unique_ptr by releasing the ownership of the context + // from ContextMap. + absl::StatusOr> metric_context = + metric::MetricContextMap()->Remove(request_); + CHECK_OK(metric_context); + std::shared_ptr shared_context = + *std::move(metric_context); + bool enable_debug_reporting = false; std::visit( [&enable_debug_reporting](const auto& protected_auction_input) { @@ -1046,19 +1089,20 @@ void SelectAdReactor::PerformDebugReporting( absl::AnyInvocable)> done_cb; if (server_common::log::PS_VLOG_IS_ON(5)) { - done_cb = [ig_owner = ig_owner, - ig_name](absl::StatusOr result) mutable { - if (result.ok()) { - PS_VLOG(5) << "Performed debug reporting for:" << ig_owner - << ", interest_group: " << ig_name; - } else { - PS_VLOG(5) << "Error while performing debug reporting for:" - << ig_owner << ", interest_group: " << ig_name - << " ,status:" << result.status(); - } - }; + done_cb = + [ig_owner = ig_owner, ig_name]( + absl::StatusOr result) mutable { // NOLINT + if (result.ok()) { + PS_VLOG(5) << "Performed debug reporting for:" << ig_owner + << ", interest_group: " << ig_name; + } else { + PS_VLOG(5) << "Error while performing debug reporting for:" + << ig_owner << ", interest_group: " << ig_name + << " ,status:" << result.status(); + } + }; } else { - done_cb = [](absl::StatusOr result) {}; + done_cb = [](absl::StatusOr result) {}; // NOLINT } HTTPRequest http_request = CreateDebugReportingHttpRequest( debug_url, diff --git a/services/seller_frontend_service/select_ad_reactor.h b/services/seller_frontend_service/select_ad_reactor.h index d0faed54..52cd2fd9 100644 --- a/services/seller_frontend_service/select_ad_reactor.h +++ b/services/seller_frontend_service/select_ad_reactor.h @@ -49,29 +49,7 @@ inline constexpr std::array, 3> {"x-user-agent", "x-user-agent"}, {"x-bna-client-ip", "x-bna-client-ip"}}}; -// Constants for user errors. -inline constexpr char kEmptyProtectedAuctionCiphertextError[] = - "protected_auction_ciphertext must be non-null."; -inline constexpr char kUnsupportedClientType[] = "Unsupported client type."; - -// Constants for bad Ad server provided inputs. -inline constexpr char kEmptySellerSignals[] = - "Seller signals missing in auction config"; -inline constexpr char kEmptyAuctionSignals[] = - "Auction signals missing in auction config"; -inline constexpr char kEmptyBuyerList[] = "No buyers specified"; -inline constexpr char kEmptySeller[] = - "Seller origin missing in auction config"; -inline constexpr char kEmptyBuyerSignals[] = - "Buyer signals missing in auction config for buyer: %s"; -inline constexpr char kUnknownClientType[] = - "Unknown client type in SelectAdRequest"; -inline constexpr char kWrongSellerDomain[] = - "Seller domain passed in request does not match this server's domain"; -inline constexpr char kEmptyBuyerInPerBuyerConfig[] = - "One or more buyer keys are empty in per buyer config map"; -inline constexpr char kDeviceComponentAuctionWithAndroid[] = - "Device orchestrated Component Auctions not supported for Android"; +// Constants for service errors. inline constexpr char kInvalidBuyerCurrency[] = "Invalid Buyer Currency"; inline constexpr char kInvalidSellerCurrency[] = "Invalid Seller Currency"; @@ -149,8 +127,7 @@ class SelectAdReactor : public grpc::ServerUnaryReactor { // Validates the mandatory fields in the request. Reports any errors to the // error accumulator. template - void ValidateProtectedAuctionInput(const T& protected_auction_input, - AuctionScope auction_scope) { + void ValidateProtectedAuctionInput(const T& protected_auction_input) { if (protected_auction_input.generation_id().empty()) { ReportError(ErrorVisibility::CLIENT_VISIBLE, kMissingGenerationId, ErrorCode::CLIENT_SIDE); @@ -180,18 +157,6 @@ class SelectAdReactor : public grpc::ServerUnaryReactor { kMissingInterestGroupsAndProtectedSignals, buyer)); any_error = true; } - if (!buyer_input.protected_app_signals() - .app_install_signals() - .empty() && - auction_scope == - AuctionScope::AUCTION_SCOPE_DEVICE_COMPONENT_MULTI_SELLER) { - // Invalid input. Device component auctions can't contain PAS - // audience. This path should not be possible since app install - // signals will not be created for web reactor and - // DeviceComponentSeller cannot be conducted for app reactor. - PS_VLOG(2, log_context_) - << "Device component auctions can't contain PAS audience"; - } if (any_error) { continue; } @@ -321,6 +286,8 @@ class SelectAdReactor : public grpc::ServerUnaryReactor { AuctionResult::Error error_; const ClientRegistry& clients_; const TrustedServersConfigClient& config_client_; + // Scope for current auction (single seller, top level or component) + const AuctionScope auction_scope_; // Key Value Fetch Result. std::unique_ptr scoring_signals_; @@ -373,9 +340,6 @@ class SelectAdReactor : public grpc::ServerUnaryReactor { // Temporary workaround for compliance, will be removed (b/308032414). const int max_buyers_solicited_; - // Scope for current auction (single seller, top level or component) - const AuctionScope auction_scope_; - private: // Keeps track of how many buyer bids were expected initially and how many // were erroneous. If all bids ended up in an error state then that should be diff --git a/services/seller_frontend_service/select_ad_reactor_app.cc b/services/seller_frontend_service/select_ad_reactor_app.cc index 1de58c9f..ad734050 100644 --- a/services/seller_frontend_service/select_ad_reactor_app.cc +++ b/services/seller_frontend_service/select_ad_reactor_app.cc @@ -21,8 +21,9 @@ #include "services/common/compression/gzip.h" #include "services/common/util/request_response_constants.h" #include "services/seller_frontend_service/util/framing_utils.h" -#include "src/cpp/communication/encoding_utils.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "services/seller_frontend_service/util/proto_mapping_util.h" +#include "src/communication/encoding_utils.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -42,7 +43,7 @@ namespace { template T GetDecodedProtectedAuctionInputHelper(absl::string_view encoded_data, bool fail_fast, - ReportErrorSignature ReportError, + const ReportErrorSignature& ReportError, ErrorAccumulator& error_accumulator) { T protected_auction_input; absl::StatusOr decoded_request = @@ -63,32 +64,6 @@ T GetDecodedProtectedAuctionInputHelper(absl::string_view encoded_data, return protected_auction_input; } -void SetReportingUrls(const WinReportingUrls& win_reporting_urls, - AuctionResult& auction_result) { - auto* mutable_win_reporting_urls = - auction_result.mutable_win_reporting_urls(); - mutable_win_reporting_urls->mutable_buyer_reporting_urls()->set_reporting_url( - win_reporting_urls.buyer_reporting_urls().reporting_url()); - *mutable_win_reporting_urls->mutable_buyer_reporting_urls() - ->mutable_interaction_reporting_urls() = - win_reporting_urls.buyer_reporting_urls().interaction_reporting_urls(); - mutable_win_reporting_urls->mutable_top_level_seller_reporting_urls() - ->set_reporting_url( - win_reporting_urls.top_level_seller_reporting_urls().reporting_url()); - *mutable_win_reporting_urls->mutable_top_level_seller_reporting_urls() - ->mutable_interaction_reporting_urls() = - win_reporting_urls.top_level_seller_reporting_urls() - .interaction_reporting_urls(); - mutable_win_reporting_urls->mutable_component_seller_reporting_urls() - ->set_reporting_url( - win_reporting_urls.component_seller_reporting_urls().reporting_url()); - - *mutable_win_reporting_urls->mutable_component_seller_reporting_urls() - ->mutable_interaction_reporting_urls() = - win_reporting_urls.component_seller_reporting_urls() - .interaction_reporting_urls(); -} - } // namespace SelectAdReactorForApp::SelectAdReactorForApp( @@ -102,25 +77,19 @@ absl::StatusOr SelectAdReactorForApp::GetNonEncryptedResponse( const std::optional& high_score, const std::optional& error) { AuctionResult auction_result; - if (error.has_value()) { - *auction_result.mutable_error() = *error; - } else if (high_score.has_value()) { - auction_result.set_is_chaff(false); - auction_result.set_bid(high_score->bid()); - auction_result.set_score(high_score->desirability()); - auction_result.set_interest_group_name( - std::move(high_score->interest_group_name())); - auction_result.set_interest_group_owner( - std::move(high_score->interest_group_owner())); - auction_result.set_ad_render_url(std::move(high_score->render())); - SetReportingUrls(high_score->win_reporting_urls(), auction_result); - *auction_result.mutable_ad_component_render_urls() = - high_score->component_renders(); - auction_result.set_ad_type(high_score->ad_type()); + if (high_score.has_value()) { + auction_result = AdScoreToAuctionResult( + high_score, GetBiddingGroups(shared_buyer_bids_map_, *buyer_inputs_), + error, auction_scope_, request_->auction_config().seller(), + protected_auction_input_, + request_->auction_config().top_level_seller()); } else { - auction_result.set_is_chaff(true); + auction_result = AdScoreToAuctionResult( + high_score, /*maybe_bidding_groups=*/std::nullopt, error, + auction_scope_, request_->auction_config().seller(), + protected_auction_input_, + request_->auction_config().top_level_seller()); } - PS_VLOG(kPlain, log_context_) << "AuctionResult:\n" << auction_result.DebugString(); @@ -130,8 +99,8 @@ absl::StatusOr SelectAdReactorForApp::GetNonEncryptedResponse( // Compress the bytes array before framing it with pre-amble and padding. absl::StatusOr compressed_data = GzipCompress(serialized_result); if (!compressed_data.ok()) { - std::string error = "Failed to compress the serialized response data\n"; - FinishWithStatus(grpc::Status(grpc::INTERNAL, error)); + std::string error_str = "Failed to compress the serialized response data\n"; + FinishWithStatus(grpc::Status(grpc::INTERNAL, error_str)); return absl::InternalError(""); } @@ -182,7 +151,7 @@ DecodedBuyerInputs SelectAdReactorForApp::GetDecodedBuyerinputs( continue; } - decoded_buyer_inputs.insert({std::move(owner), std::move(buyer_input)}); + decoded_buyer_inputs.insert({owner, std::move(buyer_input)}); } return decoded_buyer_inputs; @@ -261,7 +230,7 @@ SelectAdReactorForApp::CreateScoreAdsRequest() { ProtectedAppSignalsAdWithBidMetadata SelectAdReactorForApp::BuildProtectedAppSignalsAdWithBidMetadata( - absl::string_view buyer, const ProtectedAppSignalsAdWithBid& input) { + absl::string_view buyer_owner, const ProtectedAppSignalsAdWithBid& input) { ProtectedAppSignalsAdWithBidMetadata result; if (input.has_ad()) { *result.mutable_ad() = input.ad(); @@ -271,7 +240,8 @@ SelectAdReactorForApp::BuildProtectedAppSignalsAdWithBidMetadata( result.set_modeling_signals(input.modeling_signals()); result.set_ad_cost(input.ad_cost()); result.set_egress_features(input.egress_features()); - result.set_owner(buyer); + result.set_owner(buyer_owner); + result.set_bid_currency(input.bid_currency()); return result; } @@ -285,11 +255,11 @@ void SelectAdReactorForApp::MayPopulateProtectedAppSignalsBids( PS_VLOG(3, log_context_) << "Protected App signals, may add protected app " "signals bids to score ads request"; - for (const auto& [buyer, get_bid_response] : shared_buyer_bids_map_) { + for (const auto& [buyer_owner, get_bid_response] : shared_buyer_bids_map_) { for (int i = 0; i < get_bid_response->protected_app_signals_bids_size(); i++) { auto ad_with_bid_metadata = BuildProtectedAppSignalsAdWithBidMetadata( - buyer, get_bid_response->protected_app_signals_bids().at(i)); + buyer_owner, get_bid_response->protected_app_signals_bids().at(i)); score_ads_raw_request->mutable_protected_app_signals_ad_bids()->Add( std::move(ad_with_bid_metadata)); } diff --git a/services/seller_frontend_service/select_ad_reactor_app_test.cc b/services/seller_frontend_service/select_ad_reactor_app_test.cc index eb54bca3..9ae5e06f 100644 --- a/services/seller_frontend_service/select_ad_reactor_app_test.cc +++ b/services/seller_frontend_service/select_ad_reactor_app_test.cc @@ -41,10 +41,10 @@ #include "services/seller_frontend_service/seller_frontend_service.h" #include "services/seller_frontend_service/util/framing_utils.h" #include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" -#include "src/cpp/communication/encoding_utils.h" -#include "src/cpp/communication/ohttp_utils.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/communication/encoding_utils.h" +#include "src/communication/ohttp_utils.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -127,15 +127,10 @@ TYPED_TEST(SelectAdReactorForAppTest, VerifyEncoding) { EXPECT_EQ(payload_size, 1 << log_2_payload); EXPECT_GE(payload_size, kMinAuctionResultBytes); - // Unframe the framed response. - absl::StatusOr unframed_response = - server_common::DecodeRequestPayload(*decrypted_response); - ASSERT_TRUE(unframed_response.ok()) << unframed_response.status().message(); - // Decompress the encoded response. absl::StatusOr decompressed_response = - GzipDecompress(unframed_response->compressed_data); - EXPECT_TRUE(decompressed_response.ok()) + UnframeAndDecompressAuctionResult(*decrypted_response); + ASSERT_TRUE(decompressed_response.ok()) << decompressed_response.status().message(); AuctionResult deserialized_auction_result; EXPECT_TRUE(deserialized_auction_result.ParseFromArray( @@ -258,6 +253,69 @@ TYPED_TEST(SelectAdReactorForAppTest, VerifyEncodingWithReportingUrls) { kTestInteractionUrl); } +TYPED_TEST(SelectAdReactorForAppTest, VerifyEncodingForServerComponentAuction) { + MockAsyncProvider + scoring_signals_provider; + ScoringAsyncClientMock scoring_client; + BuyerFrontEndAsyncClientFactoryMock buyer_front_end_async_client_factory_mock; + BuyerBidsResponseMap expected_buyer_bids; + std::unique_ptr key_fetcher_manager = + std::make_unique(); + EXPECT_CALL(*key_fetcher_manager, GetPrivateKey) + .WillRepeatedly(Return(GetPrivateKey())); + EXPECT_CALL(*key_fetcher_manager, GetPublicKey) + .WillOnce(Return(google::cmrt::sdk::public_key_service::v1::PublicKey())); + MockCryptoClientWrapper crypto_client; + SetupMockCrytoClient(crypto_client); + auto [request_with_context, clients] = + GetSelectAdRequestAndClientRegistryForTest( + CLIENT_TYPE_ANDROID, kNonZeroBidValue, scoring_signals_provider, + scoring_client, buyer_front_end_async_client_factory_mock, + key_fetcher_manager.get(), expected_buyer_bids, kSellerOriginDomain, + /*expect_all_buyers_solicited=*/true, kTestTopLevelSellerOriginDomain, + /*enable_reporting=*/false, + /*force_set_modified_bid_to_zero=*/false, + {&crypto_client, + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP}); + + SelectAdResponse encrypted_response = + RunReactorRequest( + this->config_, clients, request_with_context.select_ad_request); + ASSERT_FALSE(encrypted_response.auction_result_ciphertext().empty()); + + // Decrypt the response. + absl::string_view decrypted_response = + encrypted_response.auction_result_ciphertext(); + + // Expect the payload to be of length that is a power of 2. + const size_t payload_size = decrypted_response.size(); + int log_2_payload = log2(payload_size); + EXPECT_EQ(payload_size, 1 << log_2_payload); + EXPECT_GE(payload_size, kMinAuctionResultBytes); + + // Decompress the encoded response. + absl::StatusOr decompressed_response = + UnframeAndDecompressAuctionResult(decrypted_response); + EXPECT_TRUE(decompressed_response.ok()) + << decompressed_response.status().message(); + AuctionResult deserialized_auction_result; + EXPECT_TRUE(deserialized_auction_result.ParseFromArray( + decompressed_response->data(), decompressed_response->size())); + EXPECT_EQ(deserialized_auction_result.ad_type(), + AdType::AD_TYPE_PROTECTED_AUDIENCE_AD); + + // Validate chaff bit is not set and error is not populated. + EXPECT_FALSE(deserialized_auction_result.is_chaff()); + EXPECT_FALSE(deserialized_auction_result.has_error()); + + // Validate server component auction fields. + EXPECT_EQ(deserialized_auction_result.auction_params().component_seller(), + kSellerOriginDomain); + EXPECT_EQ( + deserialized_auction_result.auction_params().ciphertext_generation_id(), + kSampleGenerationId); +} + TYPED_TEST(SelectAdReactorForAppTest, VerifyChaffedResponse) { MockAsyncProvider scoring_signals_provider; @@ -291,15 +349,10 @@ TYPED_TEST(SelectAdReactorForAppTest, VerifyChaffedResponse) { EXPECT_EQ(payload_size, 1 << log_2_payload); EXPECT_GE(payload_size, kMinAuctionResultBytes); - // Unframe the framed response. - absl::StatusOr unframed_response = - server_common::DecodeRequestPayload(*decrypted_response); - ASSERT_TRUE(unframed_response.ok()) << unframed_response.status().message(); - - // Decompress the encoded response. + // Unframe and decompress the framed response. absl::StatusOr decompressed_response = - GzipDecompress(unframed_response->compressed_data); - EXPECT_TRUE(decompressed_response.ok()) + UnframeAndDecompressAuctionResult(*decrypted_response); + ASSERT_TRUE(decompressed_response.ok()) << decompressed_response.status().message(); AuctionResult deserialized_auction_result; EXPECT_TRUE(deserialized_auction_result.ParseFromArray( @@ -362,16 +415,10 @@ TYPED_TEST(SelectAdReactorForAppTest, VerifyErrorForProtoDecodingFailure) { EXPECT_EQ(payload_size, 1 << log_2_payload); EXPECT_GE(payload_size, kMinAuctionResultBytes); - // Unframe the framed response. - absl::StatusOr unframed_response = - server_common::DecodeRequestPayload(*decrypted_response); - ASSERT_TRUE(unframed_response.ok()) << unframed_response.status().message(); - // Decompress the encoded response. absl::StatusOr decompressed_response = - GzipDecompress(unframed_response->compressed_data); - EXPECT_TRUE(decompressed_response.ok()) - << decompressed_response.status().message(); + UnframeAndDecompressAuctionResult(*decrypted_response); + ASSERT_TRUE(decompressed_response.ok()); // Validate the error message returned in the response. AuctionResult deserialized_auction_result; @@ -497,7 +544,7 @@ class SelectAdReactorPASTest : public ::testing::Test { std::optional app_install_signals = std::nullopt) { auto [request, protected_auction_input] = CreateRawSelectAdRequest( seller_origin_domain, add_interest_group, add_protected_app_signals, - std::move(app_install_signals)); + app_install_signals); auto [encrypted_request, context] = GetProtoEncodedEncryptedInputAndOhttpContext(protected_auction_input); *request.mutable_protected_auction_ciphertext() = @@ -531,9 +578,11 @@ class SelectAdReactorPASTest : public ::testing::Test { BuyerBidsResponseMap expected_buyer_bids_; std::unique_ptr key_fetcher_manager_ = std::make_unique(); - ClientRegistry clients_{scoring_signals_provider_, scoring_client_, + ClientRegistry clients_{scoring_signals_provider_, + scoring_client_, buyer_front_end_async_client_factory_mock_, *key_fetcher_manager_, + /* *crypto_client = */ nullptr, std::make_unique( std::make_unique())}; diff --git a/services/seller_frontend_service/select_ad_reactor_test.cc b/services/seller_frontend_service/select_ad_reactor_test.cc index d985d9f9..575a07df 100644 --- a/services/seller_frontend_service/select_ad_reactor_test.cc +++ b/services/seller_frontend_service/select_ad_reactor_test.cc @@ -48,7 +48,7 @@ #include "services/seller_frontend_service/select_ad_reactor_web.h" #include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -56,10 +56,6 @@ namespace { inline constexpr absl::string_view kProtectedAudienceInput = "ProtectedAudienceInput"; -inline constexpr char kEurosIsoCode[] = "EUR"; -inline constexpr char kUsdIsoCode[] = "USD"; -inline constexpr char kYenIsoCode[] = "JPY"; - using ::google::protobuf::TextFormat; using ::testing::_; using ::testing::Return; @@ -97,7 +93,7 @@ class SellerFrontEndServiceTest : public ::testing::Test { context_ = std::make_unique( std::move(context)); config_.SetFlagForTest("", CONSENTED_DEBUG_TOKEN); - config_.SetFlagForTest(kFalse, ENABLE_PROTECTED_APP_SIGNALS); + config_.SetFlagForTest(kTrue, ENABLE_PROTECTED_APP_SIGNALS); config_.SetFlagForTest(kTrue, ENABLE_PROTECTED_AUDIENCE); EXPECT_CALL(key_fetcher_manager_, GetPrivateKey) .Times(testing::AnyNumber()) @@ -135,9 +131,9 @@ class SellerFrontEndServiceTest : public ::testing::Test { void SetProtectedAuctionCipherText(const std::string& ciphertext) { const auto* descriptor = protected_auction_input_.GetDescriptor(); if (descriptor->name() == kProtectedAudienceInput) { - *request_.mutable_protected_audience_ciphertext() = std::move(ciphertext); + *request_.mutable_protected_audience_ciphertext() = ciphertext; } else { - *request_.mutable_protected_auction_ciphertext() = std::move(ciphertext); + *request_.mutable_protected_auction_ciphertext() = ciphertext; } } @@ -244,8 +240,12 @@ TYPED_TEST(SellerFrontEndServiceTest, FetchesBidsFromAllBuyers) { std::make_unique()); // Client Registry - ClientRegistry clients{scoring_provider, scoring_client, buyer_clients, - this->key_fetcher_manager_, std::move(async_reporter)}; + ClientRegistry clients{scoring_provider, + scoring_client, + buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); @@ -343,8 +343,12 @@ TYPED_TEST(SellerFrontEndServiceTest, std::make_unique()); // Client Registry - ClientRegistry clients{scoring_provider, scoring_client, buyer_clients, - this->key_fetcher_manager_, std::move(async_reporter)}; + ClientRegistry clients{scoring_provider, + scoring_client, + buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; Response response = RunRequest( this->config_, clients, this->request_, /*max_buyers_solicited=*/3); @@ -423,8 +427,12 @@ TYPED_TEST(SellerFrontEndServiceTest, FetchesTwoBidsGivenThreeBuyers) { std::make_unique()); // Client Registry - ClientRegistry clients{scoring_provider, scoring_client, buyer_clients, - this->key_fetcher_manager_, std::move(async_reporter)}; + ClientRegistry clients{scoring_provider, + scoring_client, + buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); @@ -493,8 +501,12 @@ TYPED_TEST(SellerFrontEndServiceTest, std::make_unique( std::make_unique()); // Client Registry - ClientRegistry clients{scoring_provider, scoring_client, buyer_clients, - this->key_fetcher_manager_, std::move(async_reporter)}; + ClientRegistry clients{scoring_provider, + scoring_client, + buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; this->PopulateProtectedAudienceInputCiphertextOnRequest(); Response response = @@ -509,7 +521,7 @@ TYPED_TEST(SellerFrontEndServiceTest, TYPED_TEST(SellerFrontEndServiceTest, FetchesScoringSignalsWithBidResponseAdRenderUrls) { this->SetupRequest(/*num_buyers=*/2, /*set_buyer_egid=*/false, - /*set_seller_egid=*/true, /*buyer_currency=*/kUsdIsoCode); + /*set_seller_egid=*/true, /*seller_currency=*/kUsdIsoCode); // Scoring Client ScoringAsyncClientMock scoring_client; // Expects no calls because we do not finish fetching the decision logic @@ -540,7 +552,7 @@ TYPED_TEST(SellerFrontEndServiceTest, SetupScoringProviderMock( /*provider=*/scoring_signals_provider, /*expected_buyer_bids=*/expected_buyer_bids, - /*ad_render_urls=*/std::nullopt, + /*scoring_signals_value=*/std::nullopt, /*repeated_get_allowed=*/false, /*server_error_to_return=*/std::nullopt, /*expected_num_bids=*/-1, @@ -553,9 +565,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::make_unique( std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; this->PopulateProtectedAudienceInputCiphertextOnRequest(); Response response = RunRequest(this->config_, clients, this->request_); @@ -645,9 +658,10 @@ TYPED_TEST(SellerFrontEndServiceTest, ScoresAdsAfterGettingSignals) { std::make_unique( std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); } @@ -690,9 +704,10 @@ TYPED_TEST(SellerFrontEndServiceTest, DoesNotScoreAdsAfterGettingEmptySignals) { std::make_unique( std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); // Decrypt to examine whether the result really is chaff. @@ -766,8 +781,7 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsWinningAdAfterScoring) { EXPECT_EQ(decoded_buyer_input.interest_groups_size(), 1); for (const BuyerInput::InterestGroup& interest_group : decoded_buyer_input.interest_groups()) { - if (std::strcmp(interest_group.name().c_str(), - bid.interest_group_name().c_str())) { + if (interest_group.name() == bid.interest_group_name()) { EXPECT_EQ(bid.join_count(), interest_group.browser_signals().join_count()); EXPECT_EQ(bid.recency(), @@ -799,9 +813,10 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsWinningAdAfterScoring) { std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); scoring_done.Wait(); @@ -832,6 +847,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::string decision_logic = "function scoreAds(){}"; const int num_buyers = 2; + const int num_ad_with_bids = 40; + const int num_mismatched = 2; + // (Do not modify this line obviously.) + const int num_matched = num_ad_with_bids - num_mismatched; // By setting the buyer_currency to USD, we ensure the bids will undergo // currency checking, since they will have currencies set on them below. @@ -855,64 +874,125 @@ TYPED_TEST(SellerFrontEndServiceTest, ASSERT_EQ(decoded_buyer_inputs.size(), num_buyers); for (const auto& [buyer, buyerInput] : decoded_buyer_inputs) { EXPECT_EQ(buyerInput.interest_groups_size(), 1); - // Set the bid currency on the AdWithBids to EUR. - auto actual_bids_for_mock_to_return = BuildGetBidsResponseWithSingleAd( - /*ad_url = */ absl::StrCat(buyer_to_ad_url.at(buyer), - "/wrong_currency_eur"), - /*interest_group = */ buyerInput.interest_groups().Get(0).name(), - /*bid_value = */ 1, - /*enable_event_level_debug_reporting = */ false, - /*number_ad_component_render_urls = */ kDefaultNumAdComponents, - /*bid_currency = */ kEurosIsoCode); // This is NOT USD! - // The above AwB has a mismatched currency: - // euros do not match specified buyer currency of USD. - // Neither does yen below. - // So the below AwBs which are USD should be the only ones not being - // filtered out by buyer currency match checking. - auto first_USD_AwB = BuildNewAdWithBid( - absl::StrCat(buyer_to_ad_url.at(buyer), "/right_currency_usd_1"), - std::move(buyerInput.interest_groups().Get(0).name()), - /*bid_value=*/1, - /*enable_event_level_debug_reporting=*/false, - /*number_ad_component_render_urls=*/kDefaultNumAdComponents, - kUsdIsoCode); - auto yen_AwB = BuildNewAdWithBid( - absl::StrCat(buyer_to_ad_url.at(buyer), "/wrong_currency_yen"), - std::move(buyerInput.interest_groups().Get(0).name()), - /*bid_value=*/1, - /*enable_event_level_debug_reporting=*/false, - /*number_ad_component_render_urls=*/kDefaultNumAdComponents, - kYenIsoCode); - auto second_USD_AwB = BuildNewAdWithBid( - absl::StrCat(buyer_to_ad_url.at(buyer), "/right_currency_usd_2"), - std::move(buyerInput.interest_groups().Get(0).name()), - /*bid_value=*/1, - /*enable_event_level_debug_reporting=*/false, - /*number_ad_component_render_urls=*/kDefaultNumAdComponents, - kUsdIsoCode); - // Add all AwBs so all are returned by mock. - actual_bids_for_mock_to_return.mutable_bids()->Add()->CopyFrom( - first_USD_AwB); - actual_bids_for_mock_to_return.mutable_bids()->Add()->CopyFrom(yen_AwB); - actual_bids_for_mock_to_return.mutable_bids()->Add()->CopyFrom( - second_USD_AwB); - // In fetching scoring signals we expect only the bids with matching - // currency (and they will be in reversed order). + std::vector ads_with_bids = GetAdWithBidsInMultipleCurrencies( + num_ad_with_bids, num_mismatched, + /*matching_currency=*/kUsdIsoCode, + /*mismatching_currency=*/kEurosIsoCode, buyer_to_ad_url.at(buyer), + buyerInput.interest_groups().Get(0).name()); + std::vector pas_ads_with_bids = + GetPASAdWithBidsInMultipleCurrencies( + num_ad_with_bids, num_mismatched, + /*matching_currency=*/kUsdIsoCode, + /*mismatching_currency=*/kEurosIsoCode, buyer_to_ad_url.at(buyer)); + GetBidsResponse::GetBidsRawResponse actual_bids_for_mock_to_return; GetBidsResponse::GetBidsRawResponse expected_bids_once_filtered; - expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom(second_USD_AwB); - expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom(first_USD_AwB); + // Add all AwBs so all are returned by mock. + for (const auto& ad_with_bid : ads_with_bids) { + actual_bids_for_mock_to_return.mutable_bids()->Add()->CopyFrom( + ad_with_bid); + } + for (const auto& pas_ad_with_bid : pas_ads_with_bids) { + actual_bids_for_mock_to_return.mutable_protected_app_signals_bids() + ->Add() + ->CopyFrom(pas_ad_with_bid); + } + + // The check function in scoringSignalsMock cares about order. + // Thus we construct the expected bids list in the order in which it will + // come from the reactor. If nothing else this demonstrates that the + // swap-and-delete algorithm works as expected. + // First for protected audience. + expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom( + ads_with_bids[0]); + expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom( + ads_with_bids[39]); + expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom( + ads_with_bids[2]); + expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom( + ads_with_bids[38]); + for (int i = 4; i < num_matched; i++) { + expected_bids_once_filtered.mutable_bids()->Add()->CopyFrom( + ads_with_bids[i]); + } + // Then PAS. + expected_bids_once_filtered.mutable_protected_app_signals_bids() + ->Add() + ->CopyFrom(pas_ads_with_bids[0]); + expected_bids_once_filtered.mutable_protected_app_signals_bids() + ->Add() + ->CopyFrom(pas_ads_with_bids[39]); + expected_bids_once_filtered.mutable_protected_app_signals_bids() + ->Add() + ->CopyFrom(pas_ads_with_bids[2]); + expected_bids_once_filtered.mutable_protected_app_signals_bids() + ->Add() + ->CopyFrom(pas_ads_with_bids[38]); + for (int i = 4; i < num_matched; i++) { + expected_bids_once_filtered.mutable_protected_app_signals_bids() + ->Add() + ->CopyFrom(pas_ads_with_bids[i]); + } - // Check that everything was constructed correctly. - ASSERT_EQ(actual_bids_for_mock_to_return.bids_size(), 4); + ASSERT_EQ(expected_bids_once_filtered.bids_size(), num_matched); + ASSERT_EQ(expected_bids_once_filtered.protected_app_signals_bids_size(), + num_matched); + // Check a few. + ASSERT_EQ(expected_bids_once_filtered.bids(0).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/0_USD")); + ASSERT_EQ(expected_bids_once_filtered.bids(1).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/39_USD")); + ASSERT_EQ(expected_bids_once_filtered.bids(2).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/2_USD")); + ASSERT_EQ(expected_bids_once_filtered.bids(3).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/38_USD")); + ASSERT_EQ(expected_bids_once_filtered.bids(4).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/4_USD")); + ASSERT_EQ(expected_bids_once_filtered.bids(37).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/37_USD")); + // And for PAS. + ASSERT_EQ( + expected_bids_once_filtered.protected_app_signals_bids(0).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/0_USD")); + ASSERT_EQ( + expected_bids_once_filtered.protected_app_signals_bids(1).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/39_USD")); + ASSERT_EQ( + expected_bids_once_filtered.protected_app_signals_bids(2).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/2_USD")); + ASSERT_EQ( + expected_bids_once_filtered.protected_app_signals_bids(3).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/38_USD")); + ASSERT_EQ( + expected_bids_once_filtered.protected_app_signals_bids(4).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/4_USD")); + ASSERT_EQ( + expected_bids_once_filtered.protected_app_signals_bids(37).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/37_USD")); + + // Check that a few were constructed correctly. + ASSERT_EQ(actual_bids_for_mock_to_return.bids_size(), num_ad_with_bids); + ASSERT_EQ(actual_bids_for_mock_to_return.protected_app_signals_bids_size(), + num_ad_with_bids); ASSERT_EQ(actual_bids_for_mock_to_return.bids(0).render(), - absl::StrCat(buyer_to_ad_url.at(buyer), "/wrong_currency_eur")); + absl::StrCat(buyer_to_ad_url.at(buyer), "/0_USD")); ASSERT_EQ(actual_bids_for_mock_to_return.bids(1).render(), - absl::StrCat(buyer_to_ad_url.at(buyer), "/right_currency_usd_1")); + absl::StrCat(buyer_to_ad_url.at(buyer), "/1_EUR")); ASSERT_EQ(actual_bids_for_mock_to_return.bids(2).render(), - absl::StrCat(buyer_to_ad_url.at(buyer), "/wrong_currency_yen")); + absl::StrCat(buyer_to_ad_url.at(buyer), "/2_USD")); ASSERT_EQ(actual_bids_for_mock_to_return.bids(3).render(), - absl::StrCat(buyer_to_ad_url.at(buyer), "/right_currency_usd_2")); - ASSERT_EQ(expected_bids_once_filtered.bids_size(), 2); + absl::StrCat(buyer_to_ad_url.at(buyer), "/3_EUR")); + ASSERT_EQ( + actual_bids_for_mock_to_return.protected_app_signals_bids(0).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/0_USD")); + ASSERT_EQ( + actual_bids_for_mock_to_return.protected_app_signals_bids(1).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/1_EUR")); + ASSERT_EQ( + actual_bids_for_mock_to_return.protected_app_signals_bids(2).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/2_USD")); + ASSERT_EQ( + actual_bids_for_mock_to_return.protected_app_signals_bids(3).render(), + absl::StrCat(buyer_to_ad_url.at(buyer), "/3_EUR")); SetupBuyerClientMock(buyer, buyer_clients, actual_bids_for_mock_to_return); expected_filtered_buyer_bids_map.try_emplace( @@ -950,9 +1030,7 @@ TYPED_TEST(SellerFrontEndServiceTest, ScoreAdsResponse::ScoreAdsRawResponse response; float i = 1; ErrorAccumulator error_accumulator; - // Exactly two AdWithBids per buyer (of which there are two) should have - // survived filtering. - EXPECT_EQ(score_ads_request->ad_bids_size(), num_buyers * 2); + EXPECT_EQ(score_ads_request->ad_bids_size(), num_buyers * num_matched); // Last ad_with_bid_metadata wins. for (const auto& ad_with_bid_metadata : score_ads_request->ad_bids()) { EXPECT_FALSE(ad_with_bid_metadata.render().empty()); @@ -969,9 +1047,8 @@ TYPED_TEST(SellerFrontEndServiceTest, EXPECT_EQ(decoded_buyer_input.interest_groups_size(), 1); for (const BuyerInput::InterestGroup& interest_group : decoded_buyer_input.interest_groups()) { - if (std::strcmp( - interest_group.name().c_str(), - ad_with_bid_metadata.interest_group_name().c_str())) { + if (interest_group.name() == + ad_with_bid_metadata.interest_group_name()) { EXPECT_EQ(ad_with_bid_metadata.join_count(), interest_group.browser_signals().join_count()); EXPECT_EQ(ad_with_bid_metadata.recency(), @@ -1005,9 +1082,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; // Filtered bids checked above in the scoring signals provider mock. Response response = RunRequest(this->config_, clients, this->request_); @@ -1085,11 +1163,11 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsBiddingGroups) { for (const auto& [local_buyer, unused] : this->protected_auction_input_.buyer_input()) { AdUrl url = buyer_to_ad_url.at(local_buyer); - AdWithBid bid1 = BuildNewAdWithBid(url, std::move(expected_ig_1), 1); - AdWithBid bid2 = BuildNewAdWithBid(url, std::move(expected_ig_2), 10); - AdWithBid bid3 = BuildNewAdWithBid(url, std::move(unexpected_ig_1), 0); + AdWithBid bid1 = BuildNewAdWithBid(url, expected_ig_1, 1); + AdWithBid bid2 = BuildNewAdWithBid(url, expected_ig_2, 10); + AdWithBid bid3 = BuildNewAdWithBid(url, unexpected_ig_1, 0); GetBidsResponse::GetBidsRawResponse response; - auto mutable_bids = response.mutable_bids(); + auto* mutable_bids = response.mutable_bids(); mutable_bids->Add(std::move(bid1)); mutable_bids->Add(std::move(bid2)); mutable_bids->Add(std::move(bid3)); @@ -1139,9 +1217,10 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsBiddingGroups) { std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; auto [encrypted_protected_auction_input, encrypted_context] = GetCborEncodedEncryptedInputAndOhttpContext( this->protected_auction_input_); @@ -1255,9 +1334,10 @@ TYPED_TEST(SellerFrontEndServiceTest, PerformsDebugReportingAfterScoring) { }); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); @@ -1317,9 +1397,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::make_unique()); EXPECT_CALL(*async_reporter, DoReport).Times(0); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); scoring_done.WaitForNotification(); @@ -1385,12 +1466,89 @@ TYPED_TEST(SellerFrontEndServiceTest, std::make_unique()); // Client Registry - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, this->key_fetcher_manager_, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, + /* crypto_client = */ nullptr, std::move(async_reporter)}; + Response response = + RunRequest(this->config_, clients, this->request_); + scoring_done.WaitForNotification(); +} + +TYPED_TEST(SellerFrontEndServiceTest, PassesFieldsForServerComponentAuction) { + std::string top_level_seller = "top_level_seller"; + this->SetupRequest(/*num_buyers=*/2); + this->request_.mutable_auction_config()->set_top_level_seller( + top_level_seller); + this->request_.mutable_auction_config()->set_top_level_cloud_platform( + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP); + absl::flat_hash_map buyer_to_ad_url = + BuildBuyerWinningAdUrlMap(this->request_); + + // Buyer Clients + BuyerFrontEndAsyncClientFactoryMock buyer_clients; + absl::flat_hash_map bids; + BuyerBidsResponseMap expected_buyer_bids; + for (const auto& [buyer, unused] : + this->protected_auction_input_.buyer_input()) { + AdUrl url = buyer_to_ad_url.at(buyer); + GetBidsResponse::GetBidsRawResponse response = + BuildGetBidsResponseWithSingleAd(url); + bids.insert_or_assign(url, response.bids().at(0)); + SetupBuyerClientMock(buyer, buyer_clients, response, + /*repeated_get_allowed = */ false, + /*expect_all_buyers_solicited =*/true, + /*num_buyers_solicited =*/nullptr, top_level_seller); + expected_buyer_bids.try_emplace( + buyer, std::make_unique(response)); + } + + // Scoring signal provider + MockAsyncProvider + scoring_signals_provider; + // Scoring signals must be nonzero for scoring to be attempted. + std::string scoring_signals_value = + R"JSON({"someAdRenderUrl":{"someKey":"someValue"}})JSON"; + SetupScoringProviderMock(scoring_signals_provider, expected_buyer_bids, + scoring_signals_value); + + // Scoring Client + ScoringAsyncClientMock scoring_client; + absl::Notification scoring_done; + EXPECT_CALL(scoring_client, ExecuteInternal) + .Times(1) + .WillOnce([&top_level_seller, &scoring_done]( + std::unique_ptr + score_ads_request, + const RequestMetadata& metadata, + ScoreAdsDoneCallback on_done, absl::Duration timeout) { + EXPECT_EQ(top_level_seller, score_ads_request->top_level_seller()); + std::move(on_done)( + std::make_unique()); + scoring_done.Notify(); + return absl::OkStatus(); + }); + + // Reporting Client. + std::unique_ptr async_reporter = + std::make_unique( + std::make_unique()); + + google::cmrt::sdk::public_key_service::v1::PublicKey key; + key.set_key_id("key_id"); + EXPECT_CALL(this->key_fetcher_manager_, GetPublicKey).WillOnce(Return(key)); + MockCryptoClientWrapper crypto_client; + SetupMockCrytoClient(crypto_client); + + // Client Registry + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + this->key_fetcher_manager_, &crypto_client, std::move(async_reporter)}; Response response = RunRequest(this->config_, clients, this->request_); scoring_done.WaitForNotification(); + ASSERT_FALSE(response.auction_result_ciphertext().empty()); + EXPECT_EQ(response.key_id(), key.key_id()); } } // namespace diff --git a/services/seller_frontend_service/select_ad_reactor_web.cc b/services/seller_frontend_service/select_ad_reactor_web.cc index 44ddb7d3..e6d8d57c 100644 --- a/services/seller_frontend_service/select_ad_reactor_web.cc +++ b/services/seller_frontend_service/select_ad_reactor_web.cc @@ -23,9 +23,10 @@ #include "services/common/compression/gzip.h" #include "services/common/util/request_response_constants.h" #include "services/seller_frontend_service/util/framing_utils.h" +#include "services/seller_frontend_service/util/proto_mapping_util.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/communication/encoding_utils.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/communication/encoding_utils.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -42,7 +43,7 @@ namespace { template T GetDecodedProtectedAuctionInputHelper(absl::string_view encoded_data, bool fail_fast, - ReportErrorSignature ReportError, + const ReportErrorSignature& ReportError, ErrorAccumulator& error_accumulator) { absl::StatusOr decoded_request = server_common::DecodeRequestPayload(encoded_data); @@ -66,56 +67,55 @@ SelectAdReactorForWeb::SelectAdReactorForWeb( : SelectAdReactor(context, request, response, clients, config_client, fail_fast, max_buyers_solicited) {} -BiddingGroupsMap SelectAdReactorForWeb::GetBiddingGroups() { - BiddingGroupsMap bidding_groups; - for (const auto& [buyer, ad_with_bids] : shared_buyer_bids_map_) { - // Mapping from buyer to interest groups that are associated with non-zero - // bids. - absl::flat_hash_set buyer_interest_groups; - for (const auto& ad_with_bid : ad_with_bids->bids()) { - if (ad_with_bid.bid() > 0) { - buyer_interest_groups.insert(ad_with_bid.interest_group_name()); - } - } - const auto& buyer_input = buyer_inputs_->at(buyer); - AuctionResult::InterestGroupIndex ar_interest_group_index; - int ig_index = 0; - for (const auto& interest_group : buyer_input.interest_groups()) { - // If the interest group name is one of the groups returned by the bidding - // service then record its index. - if (buyer_interest_groups.contains(interest_group.name())) { - ar_interest_group_index.add_index(ig_index); - } - ig_index++; - } - bidding_groups.try_emplace(buyer, std::move(ar_interest_group_index)); - } - return bidding_groups; -} - absl::StatusOr SelectAdReactorForWeb::GetNonEncryptedResponse( const std::optional& high_score, const std::optional& error) { auto error_handler = absl::bind_front(&SelectAdReactorForWeb::FinishWithStatus, this); std::string encoded_data; + const auto decode_lambda = [&encoded_data]() { + auto result = CborDecodeAuctionResultToProto(encoded_data); + return result.ok() ? result->DebugString() : result.status().ToString(); + }; + if (auction_scope_ == AuctionScope::AUCTION_SCOPE_DEVICE_COMPONENT_MULTI_SELLER) { PS_ASSIGN_OR_RETURN( encoded_data, - EncodeComponent(request_->auction_config().top_level_seller(), - high_score, GetBiddingGroups(), error, error_handler)); + EncodeComponent( + request_->auction_config().top_level_seller(), high_score, + GetBiddingGroups(shared_buyer_bids_map_, *buyer_inputs_), error, + error_handler)); + PS_VLOG(kPlain, log_context_) << "AuctionResult:\n" << (decode_lambda()); + } else if (auction_scope_ == + AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER) { + // If this is server component auction, serialize as proto. + AuctionResult auction_result; + if (high_score.has_value()) { + auction_result = AdScoreToAuctionResult( + high_score, GetBiddingGroups(shared_buyer_bids_map_, *buyer_inputs_), + error, auction_scope_, request_->auction_config().seller(), + protected_auction_input_, + request_->auction_config().top_level_seller()); + } else { + auction_result = AdScoreToAuctionResult( + high_score, std::nullopt, error, auction_scope_, + request_->auction_config().seller(), protected_auction_input_); + } + // Serialized the data to bytes array. + encoded_data = auction_result.SerializeAsString(); + + PS_VLOG(kPlain, log_context_) << "AuctionResult:\n" + << auction_result.DebugString(); } else { - PS_ASSIGN_OR_RETURN(encoded_data, Encode(high_score, GetBiddingGroups(), - error, error_handler)); + // SINGLE_SELLER or SERVER_TOP_LEVEL Auction + PS_ASSIGN_OR_RETURN( + encoded_data, + Encode(high_score, + GetBiddingGroups(shared_buyer_bids_map_, *buyer_inputs_), error, + error_handler)); + PS_VLOG(kPlain, log_context_) << "AuctionResult:\n" << (decode_lambda()); } - PS_VLOG(kPlain, log_context_) - << "AuctionResult:\n" - << ([&]() { - auto result = CborDecodeAuctionResultToProto(encoded_data); - return result.ok() ? result->DebugString() - : result.status().ToString(); - }()); absl::string_view data_to_compress = absl::string_view( reinterpret_cast(encoded_data.data()), encoded_data.size()); diff --git a/services/seller_frontend_service/select_ad_reactor_web.h b/services/seller_frontend_service/select_ad_reactor_web.h index 9a30b803..d957c838 100644 --- a/services/seller_frontend_service/select_ad_reactor_web.h +++ b/services/seller_frontend_service/select_ad_reactor_web.h @@ -55,10 +55,6 @@ class SelectAdReactorForWeb : public SelectAdReactor { absl::flat_hash_map GetDecodedBuyerinputs( const google::protobuf::Map& encoded_buyer_inputs) override; - - // Gets the bidding groups after scoring is done. - google::protobuf::Map - GetBiddingGroups(); }; } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/select_ad_reactor_web_test.cc b/services/seller_frontend_service/select_ad_reactor_web_test.cc index e0e2305a..87543de6 100644 --- a/services/seller_frontend_service/select_ad_reactor_web_test.cc +++ b/services/seller_frontend_service/select_ad_reactor_web_test.cc @@ -77,11 +77,9 @@ class SelectAdReactorForWebTest : public ::testing::Test { SelectAdRequest& select_ad_request) { const auto* descriptor = protected_auction_input.GetDescriptor(); if (descriptor->name() == kProtectedAuctionInput) { - *select_ad_request.mutable_protected_auction_ciphertext() = - std::move(ciphertext); + *select_ad_request.mutable_protected_auction_ciphertext() = ciphertext; } else { - *select_ad_request.mutable_protected_audience_ciphertext() = - std::move(ciphertext); + *select_ad_request.mutable_protected_audience_ciphertext() = ciphertext; } } @@ -250,9 +248,12 @@ TYPED_TEST(SelectAdReactorForWebTest, VerifyLogContextPropagates) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, + ClientRegistry clients{scoring_signals_provider, + scoring_client, buyer_front_end_async_client_factory_mock, - *key_fetcher_manager, std::move(async_reporter)}; + *key_fetcher_manager, + /* *crypto_client = */ nullptr, + std::move(async_reporter)}; // Setup expectation on buyer client to receive appropriate log context from // SFE. @@ -278,7 +279,7 @@ TYPED_TEST(SelectAdReactorForWebTest, VerifyLogContextPropagates) { &MockGetBids](std::unique_ptr buyer) { EXPECT_CALL(*buyer, ExecuteInternal(Pointee(EqGetBidsRawRequestWithLogContext( - std::move(expected_get_bid_request))), + expected_get_bid_request)), _, _, _)) .WillRepeatedly(MockGetBids); return buyer; @@ -587,9 +588,12 @@ TYPED_TEST(SelectAdReactorForWebTest, VerifyConsentedDebugConfigPropagates) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, + ClientRegistry clients{scoring_signals_provider, + scoring_client, buyer_front_end_async_client_factory_mock, - *key_fetcher_manager, std::move(async_reporter)}; + *key_fetcher_manager, + /* *crypto_client = */ nullptr, + std::move(async_reporter)}; // Setup expectation on buyer client from SFE. GetBidsRequest::GetBidsRawRequest expected_get_bid_request; @@ -616,7 +620,7 @@ TYPED_TEST(SelectAdReactorForWebTest, VerifyConsentedDebugConfigPropagates) { EXPECT_CALL( *buyer, ExecuteInternal(Pointee(EqGetBidsRawRequestWithConsentedDebugConfig( - std::move(expected_get_bid_request))), + expected_get_bid_request)), _, _, _)) .WillRepeatedly(MockGetBids); return buyer; @@ -673,7 +677,8 @@ TYPED_TEST(SelectAdReactorForWebTest, VerifyConsentedDebugConfigPropagates) { RunReactorRequest(this->config_, clients, request); } -TYPED_TEST(SelectAdReactorForWebTest, VerifyComponentAuctionCborEncoding) { +TYPED_TEST(SelectAdReactorForWebTest, + VerifyDeviceComponentAuctionCborEncoding) { MockAsyncProvider scoring_signals_provider; ScoringAsyncClientMock scoring_client; @@ -755,6 +760,7 @@ TYPED_TEST(SelectAdReactorForWebTest, FailsEncodingWhenModifiedBidIsZero) { std::make_unique(); EXPECT_CALL(*key_fetcher_manager, GetPrivateKey) .WillRepeatedly(Return(GetPrivateKey())); + auto [request_with_context, clients] = GetSelectAdRequestAndClientRegistryForTest( CLIENT_TYPE_BROWSER, kNonZeroBidValue, scoring_signals_provider, @@ -776,6 +782,71 @@ TYPED_TEST(SelectAdReactorForWebTest, FailsEncodingWhenModifiedBidIsZero) { request_with_context.context, kBiddingAuctionOhttpResponseLabel); EXPECT_FALSE(decrypted_response.ok()) << decrypted_response.status(); } +TYPED_TEST(SelectAdReactorForWebTest, + VerifyServerComponentAuctionProtoEncoding) { + MockAsyncProvider + scoring_signals_provider; + ScoringAsyncClientMock scoring_client; + BuyerFrontEndAsyncClientFactoryMock buyer_front_end_async_client_factory_mock; + BuyerBidsResponseMap expected_buyer_bids; + std::unique_ptr key_fetcher_manager = + std::make_unique(); + EXPECT_CALL(*key_fetcher_manager, GetPrivateKey) + .WillRepeatedly(Return(GetPrivateKey())); + + EXPECT_CALL(*key_fetcher_manager, GetPublicKey) + .WillOnce(Return(google::cmrt::sdk::public_key_service::v1::PublicKey())); + MockCryptoClientWrapper crypto_client; + SetupMockCrytoClient(crypto_client); + auto [request_with_context, clients] = + GetSelectAdRequestAndClientRegistryForTest( + CLIENT_TYPE_BROWSER, kNonZeroBidValue, scoring_signals_provider, + scoring_client, buyer_front_end_async_client_factory_mock, + key_fetcher_manager.get(), expected_buyer_bids, kSellerOriginDomain, + /*expect_all_buyers_solicited=*/true, kTestTopLevelSellerOriginDomain, + /*enable_reporting=*/false, + /*force_set_modified_bid_to_zero=*/false, + {&crypto_client, + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP}); + + SelectAdResponse response_with_proto = + RunReactorRequest( + this->config_, clients, request_with_context.select_ad_request); + ABSL_LOG(INFO) << "Encrypted SelectAdResponse:\n" + << MessageToJson(response_with_proto); + ASSERT_FALSE(response_with_proto.auction_result_ciphertext().empty()); + + // Decrypt the response. + absl::string_view decrypted_response = + response_with_proto.auction_result_ciphertext(); + + // Expect the payload to be of length that is a power of 2. + const size_t payload_size = decrypted_response.size(); + int log_2_payload = log2(payload_size); + EXPECT_EQ(payload_size, 1 << log_2_payload); + EXPECT_GE(payload_size, kMinAuctionResultBytes); + + // Decompress the encoded response. + absl::StatusOr decompressed_response = + UnframeAndDecompressAuctionResult(decrypted_response); + EXPECT_TRUE(decompressed_response.ok()); + + // Validate the error message returned in the response. + AuctionResult deserialized_auction_result; + EXPECT_TRUE(deserialized_auction_result.ParseFromArray( + decompressed_response->data(), decompressed_response->size())); + + // Validate chaff bit is not set if there was an input validation error. + EXPECT_FALSE(deserialized_auction_result.is_chaff()); + EXPECT_FALSE(deserialized_auction_result.has_error()); + + // Validate server component auction fields. + EXPECT_EQ(deserialized_auction_result.auction_params().component_seller(), + kSellerOriginDomain); + EXPECT_EQ( + deserialized_auction_result.auction_params().ciphertext_generation_id(), + kSampleGenerationId); +} } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/select_auction_result_reactor.cc b/services/seller_frontend_service/select_auction_result_reactor.cc new file mode 100644 index 00000000..0f193faa --- /dev/null +++ b/services/seller_frontend_service/select_auction_result_reactor.cc @@ -0,0 +1,325 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "services/seller_frontend_service/select_auction_result_reactor.h" + +#include "services/common/constants/user_error_strings.h" +#include "services/common/util/request_response_constants.h" +#include "services/seller_frontend_service/util/validation_utils.h" +#include "src/util/status_macro/status_util.h" + +namespace privacy_sandbox::bidding_auction_servers { + +// Helper functions. +namespace { +absl::string_view GetGenerationId( + const std::variant& pai) { + absl::string_view request_generation_id; + std::visit( + [&request_generation_id](const auto& protected_auction_input) { + request_generation_id = protected_auction_input.generation_id(); + }, + pai); + return request_generation_id; +} +} // namespace + +void SelectAuctionResultReactor::SetLoggingContextWithProtectedAuctionInput() { + log_context_.Update( + std::visit( + [this](const auto& protected_input) + -> absl::btree_map { + return { + {kGenerationId, protected_input.generation_id()}, + {kSellerDebugId, request_->auction_config().seller_debug_id()}}; + }, + protected_auction_input_), + std::visit( + [](const auto& protected_input) { + return protected_input.consented_debug_config(); + }, + protected_auction_input_)); +} + +void SelectAuctionResultReactor::LogRequestMetrics() { + absl::string_view encapsulated_req; + if (is_protected_auction_request_) { + encapsulated_req = request_->protected_auction_ciphertext(); + } else { + encapsulated_req = request_->protected_audience_ciphertext(); + } + LogIfError(metric_context_->LogHistogram( + (int)encapsulated_req.size())); + LogIfError(metric_context_->LogHistogram( + (int)request_->auction_config().ByteSizeLong())); +} + +void SelectAuctionResultReactor::ScoreAds( + std::vector& component_auction_results) { + auto raw_request = CreateTopLevelScoreAdsRawRequest( + request_->auction_config(), protected_auction_input_, + component_auction_results); + PS_VLOG(2, log_context_) << "\nScoreAdsRawRequest:\n" + << raw_request->DebugString(); + auto auction_request_metric = + metric::MakeInitiatedRequest(metric::kAs, metric_context_.get()); + auction_request_metric->SetRequestSize((int)raw_request->ByteSizeLong()); + auto on_scoring_done = + [this, auction_request_metric = std::move(auction_request_metric)]( + absl::StatusOr> + result) mutable { + { + int response_size = + result.ok() ? (int)result->get()->ByteSizeLong() : 0; + auction_request_metric->SetResponseSize(response_size); + // destruct auction_request, destructor measures request time + auto not_used = std::move(auction_request_metric); + } + OnScoreAdsDone(std::move(result)); + }; + absl::Status execute_result = clients_.scoring.ExecuteInternal( + std::move(raw_request), {}, std::move(on_scoring_done), + absl::Milliseconds( + config_client_.GetIntParameter(SCORE_ADS_RPC_TIMEOUT_MS))); + if (!execute_result.ok()) { + LogIfError( + metric_context_->AccumulateMetric( + 1, metric::kSfeScoreAdsFailedToCall)); + PS_LOG(ERROR, log_context_) + << "Failed to make async ScoreAds call with error: " + << execute_result.ToString(); + FinishWithStatus(grpc::Status(grpc::INTERNAL, kInternalServerError)); + } +} + +void SelectAuctionResultReactor::OnScoreAdsDone( + absl::StatusOr> + response) { + PS_VLOG(2, log_context_) << "ScoreAdsResponse status:" << response.status(); + auto scoring_return_status = server_common::FromAbslStatus(response.status()); + if (!response.ok()) { + LogIfError( + metric_context_->AccumulateMetric( + 1, metric::kSfeScoreAdsResponseError)); + LogIfError(metric_context_->AccumulateMetric< + metric::kInitiatedRequestAuctionErrorCountByStatus>( + 1, StatusCodeToString(response.status().code()))); + // Any INTERNAL errors from auction service will be suppressed by SFE and + // will cause a chaff to be sent back. Non-INTERNAL errors on the other hand + // are propagated back the seller ad service. + if (scoring_return_status.error_code() != grpc::StatusCode::INTERNAL) { + FinishWithStatus(scoring_return_status); + return; + } + // Continue sending a chaff response below if non-INTERNAL Error. + } else { + // Map AdScore to AuctionResult + const auto& score_ad_response = *response; + if (score_ad_response->has_debug_info()) { + server_common::DebugInfo& auction_log = + *response_->mutable_debug_info()->add_downstream_servers(); + auction_log = std::move(*score_ad_response->mutable_debug_info()); + auction_log.set_server_name("auction"); + } + if (score_ad_response->has_ad_score() && + score_ad_response->ad_score().buyer_bid() > 0) { + // Set metric signals for winner, used to collect + // metrics for requests with winners. + metric_context_->SetCustomState(kWinningAuctionAd, ""); + + FinishWithResponse(CreateWinningAuctionResultCiphertext( + score_ad_response->ad_score(), + GetBuyerIgsWithBidsMap(component_auction_bidding_groups_), + request_->client_type(), *decrypted_request_, log_context_)); + return; + } + } + FinishWithResponse(CreateChaffAuctionResultCiphertext( + request_->client_type(), *decrypted_request_, log_context_)); +} + +void SelectAuctionResultReactor::FinishWithStatus(const grpc::Status& status) { + if (status.error_code() != grpc::StatusCode::OK) { + PS_LOG(ERROR, log_context_) << "RPC failed: " << status.error_message(); + metric_context_->SetRequestResult(server_common::ToAbslStatus(status)); + } else { + PS_VLOG(kEncrypted, log_context_) << "Encrypted SelectAdResponse:\n" + << response_->ShortDebugString(); + } + if (metric_context_->CustomState(kWinningAuctionAd).ok()) { + LogIfError( + metric_context_->LogUpDownCounter(1)); + LogIfError(metric_context_->LogHistogram( + static_cast((absl::Now() - start_) / absl::Milliseconds(1)))); + } + Finish(status); +} + +void SelectAuctionResultReactor::FinishWithResponse( + absl::StatusOr auction_result_ciphertext) { + if (!auction_result_ciphertext.ok()) { + FinishWithStatus( + grpc::Status(grpc::StatusCode::INTERNAL, kInternalServerError)); + return; + } + *response_->mutable_auction_result_ciphertext() = + std::move(*auction_result_ciphertext); + FinishWithStatus(grpc::Status::OK); +} + +void SelectAuctionResultReactor::FinishWithClientVisibleErrors( + absl::string_view message) { + PS_VLOG(2, log_context_) + << "Finishing the SelectAdRequest RPC with client visible error: " + << message; + AuctionResult::Error auction_error; + auction_error.set_code(static_cast(ErrorCode::CLIENT_SIDE)); + auction_error.set_message(message); + FinishWithResponse( + CreateErrorAuctionResultCiphertext(auction_error, request_->client_type(), + *decrypted_request_, log_context_)); +} + +void SelectAuctionResultReactor::FinishWithServerVisibleErrors( + absl::string_view message) { + PS_VLOG(2, log_context_) + << "Finishing the SelectAdRequest RPC with ad server visible error"; + FinishWithStatus( + grpc::Status(grpc::StatusCode::INVALID_ARGUMENT, message.data())); +} + +void SelectAuctionResultReactor::Execute() { + // Do not go further if server is misconfigured. + if (seller_domain_.empty()) { + FinishWithServerVisibleErrors(kSellerDomainEmpty); + return; + } + + LogRequestMetrics(); + // Perform Validation on request. + if (!ValidateEncryptedSelectAdRequest( + *request_, AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER, + seller_domain_, error_accumulator_)) { + FinishWithServerVisibleErrors(error_accumulator_.GetAccumulatedErrorString( + ErrorVisibility::AD_SERVER_VISIBLE)); + return; + } + + // Decrypt and set PAI. + absl::string_view encapsulated_req; + if (is_protected_auction_request_) { + encapsulated_req = request_->protected_auction_ciphertext(); + } else { + encapsulated_req = request_->protected_audience_ciphertext(); + } + auto decrypt_req_status = DecryptOHTTPEncapsulatedHpkeCiphertext( + encapsulated_req, clients_.key_fetcher_manager_); + // Could not decrypt PAI. + if (!decrypt_req_status.ok()) { + PS_VLOG(1, log_context_) + << "Error decrypting the protected " + << (is_protected_auction_request_ ? "auction" : "audience") + << " input ciphertext" << decrypt_req_status.status(); + // Client side errors. + FinishWithClientVisibleErrors( + absl::StrFormat(kErrorDecryptingCiphertextError, + decrypt_req_status.status().message())); + return; + } + decrypted_request_ = std::move(*decrypt_req_status); + + if (is_protected_auction_request_) { + protected_auction_input_ = DecryptProtectedAuctionInput( + decrypted_request_->plaintext, clients_.key_fetcher_manager_, + request_->client_type(), error_accumulator_); + } else { + protected_auction_input_ = DecryptProtectedAudienceInput( + decrypted_request_->plaintext, clients_.key_fetcher_manager_, + request_->client_type(), error_accumulator_); + } + + // Populate the logging context needed for request tracing. + // If decryption fails, we still want to log the request and + // header with empty context. + SetLoggingContextWithProtectedAuctionInput(); + // Log Request after log context is set. If decryption fails, we still + // want to log the request and header with empty context. + PS_VLOG(kEncrypted, log_context_) << "Encrypted SelectAdRequest:\n" + << request_->ShortDebugString(); + + // Validate PAI. + request_generation_id_ = GetGenerationId(protected_auction_input_); + if (request_generation_id_.empty()) { + PS_VLOG(1, log_context_) << kMissingGenerationId; + // Client side errors. + FinishWithClientVisibleErrors(kMissingGenerationId); + return; + } + + // Decrypt and validate AuctionResults. + std::vector component_auction_results = + DecryptAndValidateComponentAuctionResults( + request_, seller_domain_, request_generation_id_, + *clients_.crypto_client_ptr_, clients_.key_fetcher_manager_, + error_accumulator_, log_context_); + + // No valid auction results found. + if (component_auction_results.empty()) { + std::string error_msg = error_accumulator_.GetAccumulatedErrorString( + ErrorVisibility::AD_SERVER_VISIBLE); + PS_VLOG(1, log_context_) << error_msg; + FinishWithServerVisibleErrors(error_msg); + return; + } + + // Keep bidding groups for adding to response. + for (auto& car : component_auction_results) { + component_auction_bidding_groups_.push_back( + std::move(*car.mutable_bidding_groups())); + } + + // Map and Call Auction Service. + ScoreAds(component_auction_results); +} + +void SelectAuctionResultReactor::OnDone() { delete this; } + +void SelectAuctionResultReactor::OnCancel() { + // TODO(b/245982466): Handle early abort and errors. +} + +SelectAuctionResultReactor::SelectAuctionResultReactor( + grpc::CallbackServerContext* context, const SelectAdRequest* request, + SelectAdResponse* response, const ClientRegistry& clients, + const TrustedServersConfigClient& config_client) + : request_(request), + response_(response), + is_protected_auction_request_( + !request_->protected_auction_ciphertext().empty()), + clients_(clients), + config_client_(config_client), + log_context_({}, server_common::ConsentedDebugConfiguration(), + [this]() { return response_->mutable_debug_info(); }), + error_accumulator_(&log_context_) { + seller_domain_ = config_client_.GetStringParameter(SELLER_ORIGIN_DOMAIN); + CHECK_OK([this]() { + PS_ASSIGN_OR_RETURN(metric_context_, + metric::SfeContextMap()->Remove(request_)); + return absl::OkStatus(); + }()) << "SfeContextMap()->Get(request) should have been called"; +} + +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/select_auction_result_reactor.h b/services/seller_frontend_service/select_auction_result_reactor.h new file mode 100644 index 00000000..fdf1400c --- /dev/null +++ b/services/seller_frontend_service/select_auction_result_reactor.h @@ -0,0 +1,131 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SERVICES_SELLER_FRONTEND_SERVICE_SELECT_AUCTION_RESULT_REACTOR_WEB_H_ +#define SERVICES_SELLER_FRONTEND_SERVICE_SELECT_AUCTION_RESULT_REACTOR_WEB_H_ + +#include +#include +#include +#include + +#include + +#include "api/bidding_auction_servers.grpc.pb.h" +#include "api/bidding_auction_servers.pb.h" +#include "include/grpcpp/impl/codegen/server_callback.h" +#include "services/common/clients/config/trusted_server_config_client.h" +#include "services/common/metric/server_definition.h" +#include "services/common/util/error_accumulator.h" +#include "services/seller_frontend_service/seller_frontend_service.h" +#include "services/seller_frontend_service/util/encryption_util.h" +#include "services/seller_frontend_service/util/proto_mapping_util.h" + +namespace privacy_sandbox::bidding_auction_servers { +// Marker to set state of request in metric context. +inline constexpr absl::string_view kWinningAuctionAd = "winning_auction_ad"; + +// This is a gRPC reactor that serves a single SelectAdRequest for a top +// level auction, which involves AuctionResults from other sellers. +// It stores state relevant to the request and after the +// response is finished being served, SelectAuctionResultReactor cleans up all +// necessary state and grpc releases the reactor from memory. +class SelectAuctionResultReactor : public grpc::ServerUnaryReactor { + public: + explicit SelectAuctionResultReactor( + grpc::CallbackServerContext* context, const SelectAdRequest* request, + SelectAdResponse* response, const ClientRegistry& clients, + const TrustedServersConfigClient& config_client); + virtual ~SelectAuctionResultReactor() = default; + + // SelectAuctionResultReactor is neither copyable nor movable. + SelectAuctionResultReactor(const SelectAuctionResultReactor&) = delete; + SelectAuctionResultReactor& operator=(const SelectAuctionResultReactor&) = + delete; + + // Initiate the asynchronous execution of the SelectAdRequest. + void Execute(); + + // Cleans up and deletes the SelectAuctionResultReactor. Called by the grpc + // library after the response has finished. + void OnDone() override; + + // Abandons the entire SelectAuctionResultReactor. Called by the grpc library + // if the client cancels the request. + void OnCancel() override; + + private: + // Initialization + const SelectAdRequest* request_; + SelectAdResponse* response_; + std::variant + protected_auction_input_; + absl::string_view seller_domain_; + absl::string_view request_generation_id_; + bool is_protected_auction_request_; + std::vector component_auction_bidding_groups_; + const ClientRegistry& clients_; + const TrustedServersConfigClient& config_client_; + + server_common::log::ContextImpl log_context_; + + // Used to log metric, same life time as reactor. + std::unique_ptr metric_context_; + + // Object that accumulates all the errors and aggregates them based on their + // intended visibility. + ErrorAccumulator error_accumulator_; + + // Encryption context needed throughout the lifecycle of the request. + std::unique_ptr decrypted_request_; + + // Start time of request. + absl::Time start_ = absl::Now(); + + // Finishes the RPC call with a status and publishes metrics. + void FinishWithStatus(const grpc::Status& status); + + // Logs metrics from request size. + void LogRequestMetrics(); + + // Called to start the score ads RPC. + // This function moves the elements from component_auction_results and + // signals fields from auction_config and protected_auction_input. + // These fields should not be used after this function has been called. + void ScoreAds(std::vector& component_auction_results); + + // Called after score ads RPC is complete to collate the result. + void OnScoreAdsDone( + absl::StatusOr> + response); + + // Move response object to this method. + void FinishWithResponse( + absl::StatusOr auction_result_ciphertext); + + void FinishWithClientVisibleErrors(absl::string_view message); + + void FinishWithServerVisibleErrors(absl::string_view message); + + // Sets logging context from decrypted Protected Audience Input. + // Will use the empty protected audience object if this is not + // populated from the request. + void SetLoggingContextWithProtectedAuctionInput(); +}; + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_SELLER_FRONTEND_SERVICE_SELECT_AUCTION_RESULT_REACTOR_WEB_H_ diff --git a/services/seller_frontend_service/select_auction_result_reactor_test.cc b/services/seller_frontend_service/select_auction_result_reactor_test.cc new file mode 100644 index 00000000..6869ea6c --- /dev/null +++ b/services/seller_frontend_service/select_auction_result_reactor_test.cc @@ -0,0 +1,343 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/seller_frontend_service/select_auction_result_reactor.h" + +#include + +#include "absl/synchronization/notification.h" +#include "api/bidding_auction_servers.grpc.pb.h" +#include "gtest/gtest.h" +#include "services/common/test/random.h" +#include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" +#include "src/core/test/utils/proto_test_utils.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +using google::scp::core::test::EqualsProto; + +SelectAdResponse RunRequest(const TrustedServersConfigClient& config_client, + const ClientRegistry& clients, + const SelectAdRequest& request) { + grpc::CallbackServerContext context; + SelectAdResponse response; + SelectAuctionResultReactor reactor(&context, &request, &response, clients, + config_client); + reactor.Execute(); + return response; +} + +template +class SelectAuctionResultReactorTest : public ::testing::Test { + protected: + void SetUp() override { + SetupRequest(); + config_.SetFlagForTest("", CONSENTED_DEBUG_TOKEN); + config_.SetFlagForTest(kFalse, ENABLE_PROTECTED_APP_SIGNALS); + config_.SetFlagForTest(kTrue, ENABLE_PROTECTED_AUDIENCE); + + // Return hard coded key for decryption. + EXPECT_CALL(key_fetcher_manager_, GetPrivateKey) + .Times(testing::AnyNumber()) + .WillRepeatedly( + [](const google::scp::cpio::PublicPrivateKeyPairId& key_id) { + EXPECT_EQ(key_id, std::to_string(HpkeKeyset{}.key_id)); + return GetPrivateKey(); + }); + + // Initialization for telemetry. + server_common::telemetry::TelemetryConfig config_proto; + config_proto.set_mode(server_common::telemetry::TelemetryConfig::PROD); + metric::MetricContextMap( + server_common::telemetry::BuildDependentConfig(config_proto)) + ->Get(&request_); + } + + void SetupRequest() { + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest(CLIENT_TYPE_ANDROID, kSellerOriginDomain); + protected_auction_input_ = std::move(protected_auction_input); + request_ = std::move(request); + context_ = std::make_unique( + std::move(context)); + } + + void SetupComponentAuctionResults(int num = 10) { + // The key that will be returned by mock key fetcher. + auto key_id = std::to_string(HpkeKeyset{}.key_id); + for (int i = 0; i < num; ++i) { + AuctionResult ar = MakeARandomComponentAuctionResult( + protected_auction_input_.generation_id(), kSellerOriginDomain); + auto* car = this->request_.mutable_component_auction_results()->Add(); + car->set_key_id(key_id); + car->set_auction_result_ciphertext( + FrameAndCompressProto(ar.SerializeAsString())); + component_auction_results_.push_back(std::move(ar)); + } + // Return plaintext as is. + EXPECT_CALL(crypto_client_, HpkeDecrypt) + .WillRepeatedly([](const server_common::PrivateKey& private_key, + const std::string& ciphertext) { + // Mock the HpkeDecrypt() call on the crypto client. + google::cmrt::sdk::crypto_service::v1::HpkeDecryptResponse + hpke_decrypt_response; + hpke_decrypt_response.set_payload(ciphertext); + return hpke_decrypt_response; + }); + } + + T protected_auction_input_; + std::vector component_auction_results_; + SelectAdRequest request_; + std::unique_ptr context_; + TrustedServersConfigClient config_ = CreateConfig(); + server_common::MockKeyFetcherManager key_fetcher_manager_; + MockCryptoClientWrapper crypto_client_; + // Scoring Client + ScoringAsyncClientMock scoring_client_; +}; + +using ProtectedAuctionInputTypes = + ::testing::Types; +TYPED_TEST_SUITE(SelectAuctionResultReactorTest, ProtectedAuctionInputTypes); + +TYPED_TEST(SelectAuctionResultReactorTest, CallsScoringWithComponentAuctions) { + absl::Notification scoring_done; + this->SetupComponentAuctionResults(2); + EXPECT_CALL(this->scoring_client_, ExecuteInternal) + .Times(1) + .WillOnce([this, &scoring_done]( + std::unique_ptr + score_ads_request, + const RequestMetadata& metadata, + absl::AnyInvocable< + void(absl::StatusOr>) &&> + on_done, + absl::Duration timeout) { + EXPECT_EQ(score_ads_request->auction_signals(), + this->request_.auction_config().auction_signals()); + EXPECT_EQ(score_ads_request->seller_signals(), + this->request_.auction_config().seller_signals()); + EXPECT_EQ(score_ads_request->seller(), + this->request_.auction_config().seller()); + EXPECT_EQ(score_ads_request->seller_currency(), + this->request_.auction_config().seller_currency()); + EXPECT_EQ(score_ads_request->component_auction_results_size(), + this->request_.component_auction_results_size()); + for (int i = 0; i < score_ads_request->component_auction_results_size(); + i++) { + // bidding groups are not sent to Auction server. + this->component_auction_results_[i].clear_bidding_groups(); + EXPECT_THAT(score_ads_request->component_auction_results(i), + EqualsProto(this->component_auction_results_[i])); + } + std::move(on_done)( + std::make_unique()); + scoring_done.Notify(); + return absl::OkStatus(); + }); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto response = RunRequest(this->config_, clients, this->request_); + scoring_done.WaitForNotification(); +} + +TYPED_TEST(SelectAuctionResultReactorTest, + ReturnsErrorForNoValidComponentAuctions) { + this->SetupComponentAuctionResults(0); + EXPECT_CALL(this->scoring_client_, ExecuteInternal).Times(0); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto response = RunRequest(this->config_, clients, this->request_); + EXPECT_EQ(response.auction_result_ciphertext().size(), 0); +} + +TYPED_TEST(SelectAuctionResultReactorTest, + ReturnsResponseOnWinnerFromScoringClient) { + absl::Notification scoring_done; + this->SetupComponentAuctionResults(2); + ScoreAdsResponse::AdScore winner = MakeARandomAdScore( + /*hob_buyer_entries = */ 2, + /*rejection_reason_ig_owners = */ 2, + /*rejection_reason_ig_per_owner = */ 2); + EXPECT_CALL(this->scoring_client_, ExecuteInternal) + .Times(1) + .WillOnce([&scoring_done, &winner]( + std::unique_ptr + score_ads_request, + const RequestMetadata& metadata, + absl::AnyInvocable< + void(absl::StatusOr>) &&> + on_done, + absl::Duration timeout) { + auto response = + std::make_unique(); + response->mutable_ad_score()->MergeFrom(winner); + std::move(on_done)(std::move(response)); + scoring_done.Notify(); + return absl::OkStatus(); + }); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto response = RunRequest(this->config_, clients, this->request_); + scoring_done.WaitForNotification(); + // Conversion from ad score to auction result ciphertext will be + // unit tested in util function unit tests. + EXPECT_GT(response.auction_result_ciphertext().size(), 0); +} + +TYPED_TEST(SelectAuctionResultReactorTest, + DoesNotReturnChaffForErrorFromScoringClient) { + absl::Notification scoring_done; + this->SetupComponentAuctionResults(2); + EXPECT_CALL(this->scoring_client_, ExecuteInternal) + .Times(1) + .WillOnce([&scoring_done]( + std::unique_ptr + score_ads_request, + const RequestMetadata& metadata, + absl::AnyInvocable< + void(absl::StatusOr>) &&> + on_done, + absl::Duration timeout) { + scoring_done.Notify(); + return absl::Status(absl::StatusCode::kInternal, "test msg"); + }); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto response = RunRequest(this->config_, clients, this->request_); + scoring_done.WaitForNotification(); + // Conversion from ad score to auction result ciphertext will be + // unit tested in util function unit tests. + EXPECT_EQ(response.auction_result_ciphertext().size(), 0); +} + +TYPED_TEST(SelectAuctionResultReactorTest, + DoesNotReturnChaffIfSellerNotConfiguredInConfig) { + this->SetupComponentAuctionResults(2); + EXPECT_CALL(this->scoring_client_, ExecuteInternal).Times(0); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto config = CreateConfig(); + config.SetFlagForTest("", SELLER_ORIGIN_DOMAIN); + auto response = RunRequest(config, clients, this->request_); + // Conversion from ad score to auction result ciphertext will be + // unit tested in util function unit tests. + EXPECT_EQ(response.auction_result_ciphertext().size(), 0); +} + +TYPED_TEST(SelectAuctionResultReactorTest, + DoesNotReturnChaffForNonInternalErrorFromAuctionServer) { + absl::Notification scoring_done; + this->SetupComponentAuctionResults(2); + EXPECT_CALL(this->scoring_client_, ExecuteInternal) + .Times(1) + .WillOnce([&scoring_done]( + std::unique_ptr + score_ads_request, + const RequestMetadata& metadata, + absl::AnyInvocable< + void(absl::StatusOr>) &&> + on_done, + absl::Duration timeout) { + std::move(on_done)( + absl::Status(absl::StatusCode::kInvalidArgument, "test msg")); + scoring_done.Notify(); + return absl::OkStatus(); + }); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto response = RunRequest(this->config_, clients, this->request_); + scoring_done.WaitForNotification(); + EXPECT_EQ(response.auction_result_ciphertext().size(), 0); +} + +TYPED_TEST(SelectAuctionResultReactorTest, + ReturnsResponseOnInternalErrorFromAuctionServer) { + absl::Notification scoring_done; + this->SetupComponentAuctionResults(2); + EXPECT_CALL(this->scoring_client_, ExecuteInternal) + .Times(1) + .WillOnce([&scoring_done]( + std::unique_ptr + score_ads_request, + const RequestMetadata& metadata, + absl::AnyInvocable< + void(absl::StatusOr>) &&> + on_done, + absl::Duration timeout) { + std::move(on_done)( + absl::Status(absl::StatusCode::kInternal, "test msg")); + scoring_done.Notify(); + return absl::OkStatus(); + }); + ClientRegistry clients = { + MockAsyncProvider(), + this->scoring_client_, + BuyerFrontEndAsyncClientFactoryMock(), + this->key_fetcher_manager_, + &this->crypto_client_, + std::make_unique( + std::make_unique())}; + auto response = RunRequest(this->config_, clients, this->request_); + scoring_done.WaitForNotification(); + // Conversion from ad score to auction result ciphertext will be + // unit tested in util function unit tests. + EXPECT_GT(response.auction_result_ciphertext().size(), 0); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/seller_frontend_main.cc b/services/seller_frontend_service/seller_frontend_main.cc index 4012e927..1e2ad9f7 100644 --- a/services/seller_frontend_service/seller_frontend_main.cc +++ b/services/seller_frontend_service/seller_frontend_main.cc @@ -30,7 +30,6 @@ #include "grpcpp/ext/proto_server_reflection_plugin.h" #include "grpcpp/grpcpp.h" #include "grpcpp/health_check_service_interface.h" -#include "scp/cc/public/cpio/interface/cpio.h" #include "services/common/clients/config/trusted_server_config_client.h" #include "services/common/clients/config/trusted_server_config_client_util.h" #include "services/common/encryption/crypto_client_factory.h" @@ -39,9 +38,10 @@ #include "services/seller_frontend_service/runtime_flags.h" #include "services/seller_frontend_service/seller_frontend_service.h" #include "services/seller_frontend_service/util/key_fetcher_utils.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/util/rlimit_core_config.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/public/cpio/interface/cpio.h" +#include "src/util/rlimit_core_config.h" +#include "src/util/status_macro/status_macros.h" ABSL_FLAG(std::optional, port, std::nullopt, "Port the server is listening on."); @@ -95,7 +95,7 @@ using ::grpc::Server; using ::grpc::ServerBuilder; absl::StatusOr GetConfigClient( - std::string config_param_prefix) { + absl::string_view config_param_prefix) { TrustedServersConfigClient config_client(GetServiceFlags()); config_client.SetFlag(FLAGS_port, PORT); config_client.SetFlag(FLAGS_healthcheck_port, HEALTHCHECK_PORT); diff --git a/services/seller_frontend_service/seller_frontend_service.cc b/services/seller_frontend_service/seller_frontend_service.cc index fb64bd0a..49275c0a 100644 --- a/services/seller_frontend_service/seller_frontend_service.cc +++ b/services/seller_frontend_service/seller_frontend_service.cc @@ -27,7 +27,7 @@ #include "services/seller_frontend_service/select_ad_reactor_app.h" #include "services/seller_frontend_service/select_ad_reactor_invalid_client.h" #include "services/seller_frontend_service/select_ad_reactor_web.h" -#include "src/cpp/telemetry/telemetry.h" +#include "src/telemetry/telemetry.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/seller_frontend_service/seller_frontend_service.h b/services/seller_frontend_service/seller_frontend_service.h index af4be4b6..435b7a10 100644 --- a/services/seller_frontend_service/seller_frontend_service.h +++ b/services/seller_frontend_service/seller_frontend_service.h @@ -33,9 +33,9 @@ #include "services/seller_frontend_service/providers/scoring_signals_async_provider.h" #include "services/seller_frontend_service/runtime_flags.h" #include "services/seller_frontend_service/util/config_param_parser.h" +#include "src/concurrent/event_engine_executor.h" #include "src/core/lib/event_engine/default_event_engine.h" -#include "src/cpp/concurrent/event_engine_executor.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -47,6 +47,7 @@ struct ClientRegistry { const ClientFactory& buyer_factory; server_common::KeyFetcherManagerInterface& key_fetcher_manager_; + CryptoClientWrapperInterface* const crypto_client_ptr_; std::unique_ptr reporting; }; @@ -98,8 +99,11 @@ class SellerFrontEndService final : public SellerFrontEnd::CallbackService { config_client_.GetBooleanParameter(BUYER_EGRESS_TLS)}); }()), clients_{ - *scoring_signals_async_provider_, *scoring_, *buyer_factory_, + *scoring_signals_async_provider_, + *scoring_, + *buyer_factory_, *key_fetcher_manager_, + crypto_client_.get(), std::make_unique( std::make_unique(executor_.get()))} { } diff --git a/services/seller_frontend_service/seller_frontend_service_test.cc b/services/seller_frontend_service/seller_frontend_service_test.cc index 77b5ad9c..25198244 100644 --- a/services/seller_frontend_service/seller_frontend_service_test.cc +++ b/services/seller_frontend_service/seller_frontend_service_test.cc @@ -38,7 +38,7 @@ #include "services/common/test/utils/service_utils.h" #include "services/seller_frontend_service/select_ad_reactor.h" #include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -104,8 +104,12 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsInvalidInputOnEmptyCiphertext) { MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /* crypto_client= */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); auto start_sfe_result = StartLocalService(&seller_frontend_service); @@ -144,8 +148,12 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsInvalidArgumentOnKeyNotFound) { MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /* crypto_client= */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -175,8 +183,12 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsInvalidInputOnInvalidClientType) { MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /* crypto_client= */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -209,8 +221,12 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsInvalidInputOnEmptyBuyerList) { MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /* crypto_client= */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -246,8 +262,12 @@ TYPED_TEST(SellerFrontEndServiceTest, MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /*crypto_client = */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -282,8 +302,12 @@ TYPED_TEST(SellerFrontEndServiceTest, MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /*crypto_client_ptr = */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -327,8 +351,12 @@ TYPED_TEST(SellerFrontEndServiceTest, ErrorsOnMissingBuyerInputs) { MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); auto bfe_client = BuyerFrontEndAsyncClientFactoryMock(); - ClientRegistry clients{async_provider, scoring, bfe_client, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + bfe_client, + key_fetcher_manager, + /* crypto_client= */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -381,8 +409,12 @@ TYPED_TEST(SellerFrontEndServiceTest, SendsChaffOnMissingBuyerClient) { auto async_provider = MockAsyncProvider(); auto scoring = ScoringAsyncClientMock(); - ClientRegistry clients{async_provider, scoring, client_factory_mock, - key_fetcher_manager, std::move(async_reporter)}; + ClientRegistry clients{async_provider, + scoring, + client_factory_mock, + key_fetcher_manager, + /* crypto_client = */ nullptr, + std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -465,9 +497,10 @@ TYPED_TEST(SellerFrontEndServiceTest, SendsChaffOnEmptyGetBidsResponse) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -611,9 +644,10 @@ TYPED_TEST(SellerFrontEndServiceTest, RawRequestFinishWithSuccess) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -711,9 +745,10 @@ TYPED_TEST(SellerFrontEndServiceTest, ErrorsWhenCannotContactSellerKVServer) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -786,9 +821,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -846,9 +882,10 @@ TYPED_TEST(SellerFrontEndServiceTest, AnyBuyerNotErroringMeansOverallSuccess) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -951,9 +988,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -1004,8 +1042,8 @@ TYPED_TEST(SellerFrontEndServiceTest, SkipsBuyerCallsAfterLimit) { get_bids_response.mutable_bids()->Add(std::move(bid)); SetupBuyerClientMock(buyer, buyer_clients, get_bids_response, /*repeated_get_allowed=*/false, - /*each_call_required=*/false, - /*buyer_calls_counter=*/&buyer_calls_counter); + /*expect_all_buyers_solicited=*/false, + /*num_buyers_solicited=*/&buyer_calls_counter); expected_buyer_bids.try_emplace( buyer, std::make_unique( get_bids_response)); @@ -1059,9 +1097,10 @@ TYPED_TEST(SellerFrontEndServiceTest, SkipsBuyerCallsAfterLimit) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -1113,7 +1152,7 @@ TYPED_TEST(SellerFrontEndServiceTest, InternalErrorsFromScoringCauseAChaff) { get_bids_response.mutable_bids()->Add(std::move(bid)); SetupBuyerClientMock(buyer, buyer_clients, get_bids_response, /*repeated_get_allowed=*/false, - /*each_call_required=*/false); + /*expect_all_buyers_solicited=*/false); expected_buyer_bids.try_emplace( buyer, std::make_unique( get_bids_response)); @@ -1144,9 +1183,10 @@ TYPED_TEST(SellerFrontEndServiceTest, InternalErrorsFromScoringCauseAChaff) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -1196,7 +1236,7 @@ TYPED_TEST(SellerFrontEndServiceTest, get_bids_response.mutable_bids()->Add(std::move(bid)); SetupBuyerClientMock(buyer, buyer_clients, get_bids_response, /*repeated_get_allowed=*/false, - /*each_call_required=*/false); + /*expect_all_buyers_solicited=*/false); expected_buyer_bids.try_emplace( buyer, std::make_unique( get_bids_response)); @@ -1228,9 +1268,10 @@ TYPED_TEST(SellerFrontEndServiceTest, std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); @@ -1276,9 +1317,10 @@ TYPED_TEST(SellerFrontEndServiceTest, ReturnsErrorForAndroidComponentAuction) { std::unique_ptr async_reporter = std::make_unique( std::make_unique()); - ClientRegistry clients{scoring_signals_provider, scoring_client, - buyer_clients, key_fetcher_manager, - std::move(async_reporter)}; + ClientRegistry clients{ + scoring_signals_provider, scoring_client, buyer_clients, + key_fetcher_manager, + /* crypto_client= */ nullptr, std::move(async_reporter)}; SellerFrontEndService seller_frontend_service(&this->config_, std::move(clients)); diff --git a/services/seller_frontend_service/util/BUILD b/services/seller_frontend_service/util/BUILD index e24cf4e3..51ba2022 100644 --- a/services/seller_frontend_service/util/BUILD +++ b/services/seller_frontend_service/util/BUILD @@ -33,8 +33,8 @@ cc_library( "@com_google_absl//absl/container:flat_hash_map", "@com_google_absl//absl/status", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/interface:public_key_fetcher_interface", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/interface:public_key_fetcher_interface", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@rapidjson", ], ) @@ -80,8 +80,8 @@ cc_library( "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", "@com_google_absl//absl/strings:str_format", - "@google_privacysandbox_servers_common//src/cpp/communication:compression", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/communication:compression", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", "@libcbor//:cbor", "@rapidjson", ], @@ -132,8 +132,8 @@ cc_library( "@com_google_absl//absl/functional:any_invocable", "@com_google_absl//absl/status:statusor", "@com_google_absl//absl/strings", - "@google_privacysandbox_servers_common//src/cpp/communication:encoding_utils", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/mock:mock_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/communication:encoding_utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher/mock:mock_key_fetcher_manager", ], ) @@ -156,14 +156,15 @@ cc_library( srcs = ["key_fetcher_utils.cc"], hdrs = ["key_fetcher_utils.h"], deps = [ + "//api:bidding_auction_servers_cc_proto", "//services/common/clients/config:config_client", "//services/common/constants:common_service_flags", "//services/common/util:json_util", "//services/seller_frontend_service:runtime_flags", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:fake_key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:private_key_fetcher", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:public_key_fetcher", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:fake_key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:private_key_fetcher", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:public_key_fetcher", ], ) @@ -188,11 +189,12 @@ cc_library( deps = [ "//api:bidding_auction_servers_cc_proto", "//services/common/encryption:key_fetcher_factory", + "//services/common/util:oblivious_http_utils", "@com_google_absl//absl/status", - "@google_privacysandbox_servers_common//src/cpp/communication:ohttp_utils", - "@google_privacysandbox_servers_common//src/cpp/encryption/key_fetcher/src:key_fetcher_manager", - "@google_privacysandbox_servers_common//src/cpp/logger:request_context_logger", - "@google_privacysandbox_servers_common//src/cpp/util/status_macro:status_macros", + "@google_privacysandbox_servers_common//src/communication:ohttp_utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:key_fetcher_manager", + "@google_privacysandbox_servers_common//src/logger:request_context_logger", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", ], ) @@ -205,6 +207,81 @@ cc_test( ":select_ad_reactor_test_utils", "//services/common/test:random", "@com_google_googletest//:gtest", - "@google_privacysandbox_servers_common//src/cpp/communication:encoding_utils", + "@google_privacysandbox_servers_common//src/communication:encoding_utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:fake_key_fetcher_manager", + ], +) + +cc_library( + name = "validation_utils", + srcs = [ + "validation_utils.cc", + ], + hdrs = [ + "validation_utils.h", + ], + deps = [ + "//api:bidding_auction_servers_cc_proto", + "//services/common/util:error_accumulator", + ], +) + +cc_test( + name = "validation_utils_test", + size = "small", + srcs = [ + "validation_utils_test.cc", + ], + deps = [ + ":select_ad_reactor_test_utils", + ":validation_utils", + "//services/common/test:random", + "@com_google_googletest//:gtest", + ], +) + +cc_library( + name = "proto_mapping_util", + srcs = [ + "proto_mapping_util.cc", + ], + hdrs = [ + "proto_mapping_util.h", + ], + visibility = [ + "//services/seller_frontend_service:__pkg__", + "//tools/secure_invoke/payload_generator:__pkg__", + ], + deps = [ + ":encryption_util", + ":validation_utils", + "//api:bidding_auction_servers_cc_proto", + "//services/common/compression:gzip", + "//services/common/util:error_categories", + "//services/common/util:hpke_utils", + "//services/seller_frontend_service/data:seller_frontend_data", + "//services/seller_frontend_service/util:framing_utils", + "//services/seller_frontend_service/util:web_utils", + "@google_privacysandbox_servers_common//src/communication:encoding_utils", + "@google_privacysandbox_servers_common//src/logger:request_context_impl", + "@google_privacysandbox_servers_common//src/util/status_macro:status_macros", + ], +) + +cc_test( + name = "proto_mapping_util_test", + size = "small", + srcs = [ + "proto_mapping_util_test.cc", + ], + deps = [ + ":proto_mapping_util", + ":select_ad_reactor_test_utils", + "//services/common/encryption:crypto_client_factory", + "//services/common/test:random", + "//services/common/test/utils:ohttp_test_utils", + "@com_google_googletest//:gtest", + "@google_privacysandbox_servers_common//src/core/test/utils", + "@google_privacysandbox_servers_common//src/encryption/key_fetcher:fake_key_fetcher_manager", ], ) diff --git a/services/seller_frontend_service/util/config_param_parser.cc b/services/seller_frontend_service/util/config_param_parser.cc index de4521e8..b1cd69a6 100644 --- a/services/seller_frontend_service/util/config_param_parser.cc +++ b/services/seller_frontend_service/util/config_param_parser.cc @@ -25,7 +25,7 @@ #include "absl/strings/match.h" #include "rapidjson/document.h" #include "services/common/util/json_util.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/seller_frontend_service/util/config_param_parser.h b/services/seller_frontend_service/util/config_param_parser.h index ef085292..9edc1c50 100644 --- a/services/seller_frontend_service/util/config_param_parser.h +++ b/services/seller_frontend_service/util/config_param_parser.h @@ -21,7 +21,7 @@ #include "absl/container/flat_hash_map.h" #include "absl/status/statusor.h" -#include "src/cpp/encryption/key_fetcher/interface/public_key_fetcher_interface.h" +#include "src/encryption/key_fetcher/interface/public_key_fetcher_interface.h" namespace privacy_sandbox::bidding_auction_servers { diff --git a/services/seller_frontend_service/util/encryption_util.cc b/services/seller_frontend_service/util/encryption_util.cc index b6506806..283095eb 100644 --- a/services/seller_frontend_service/util/encryption_util.cc +++ b/services/seller_frontend_service/util/encryption_util.cc @@ -19,6 +19,8 @@ #include #include "absl/strings/escaping.h" +#include "services/common/util/oblivious_http_utils.h" +#include "src/include/openssl/hpke.h" namespace privacy_sandbox::bidding_auction_servers { @@ -55,9 +57,8 @@ DecryptOHTTPEncapsulatedHpkeCiphertext( if (!private_key.has_value()) { PS_VLOG(2) << "Unable to retrieve private key for key ID: " << str_key_id; - return absl::Status( - absl::StatusCode::kNotFound, - absl::StrFormat(kMissingPrivateKey, std::move(str_key_id))); + return absl::Status(absl::StatusCode::kNotFound, + absl::StrFormat(kMissingPrivateKey, str_key_id)); } PS_VLOG(3) << "Private Key Id: " << private_key->key_id << ", Key Hex: " @@ -80,6 +81,57 @@ DecryptOHTTPEncapsulatedHpkeCiphertext( *ohttp_request, *private_key, parsed_encapsulated_request->request_label); } +absl::StatusOr HpkeEncryptAndOHTTPEncapsulate( + std::string plaintext, absl::string_view request_label, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + const server_common::CloudPlatform& cloud_platform) { + if (request_label != server_common::kBiddingAuctionOhttpRequestLabel && + request_label != + quiche::ObliviousHttpHeaderKeyConfig::kOhttpRequestLabel) { + return absl::Status( + absl::StatusCode::kInvalidArgument, + absl::StrCat("Request label must be one of: ", + server_common::kBiddingAuctionOhttpRequestLabel, ", ", + quiche::ObliviousHttpHeaderKeyConfig::kOhttpRequestLabel)); + } + auto public_key = key_fetcher_manager.GetPublicKey(cloud_platform); + if (!public_key.ok()) { + std::string error = + absl::StrCat("Could not find public key for encryption: ", + public_key.status().message()); + ABSL_LOG(ERROR) << error; + return absl::InternalError(std::move(error)); + } + + std::string unescaped_public_key_bytes; + if (!absl::Base64Unescape(public_key->public_key(), + &unescaped_public_key_bytes)) { + return absl::InternalError( + absl::StrCat("Failed to base64 decode the fetched public key: ", + public_key->public_key())); + } + + auto request = ToObliviousHTTPRequest( + plaintext, unescaped_public_key_bytes, std::stoi(public_key->key_id()), + // Must match the ones used by server_common::DecryptEncapsulatedRequest. + EVP_HPKE_DHKEM_X25519_HKDF_SHA256, EVP_HPKE_HKDF_SHA256, + EVP_HPKE_AES_256_GCM, request_label); + if (!request.ok()) { + return request.status(); + } + return OhttpHpkeEncryptedMessage(*std::move(request), request_label); +} + +OhttpHpkeEncryptedMessage::OhttpHpkeEncryptedMessage( + quiche::ObliviousHttpRequest ohttp_request, absl::string_view request_label) + // Prepend with a zero byte to follow the new B&A request format that uses + // custom media types for request encryption/request decryption. + : ciphertext(request_label == + server_common::kBiddingAuctionOhttpRequestLabel + ? '\0' + ohttp_request.EncapsulateAndSerialize() + : ohttp_request.EncapsulateAndSerialize()), + context(std::move(ohttp_request).ReleaseContext()) {} + OhttpHpkeDecryptedMessage::OhttpHpkeDecryptedMessage( quiche::ObliviousHttpRequest& decrypted_request, server_common::PrivateKey& private_key, absl::string_view request_label) @@ -88,4 +140,12 @@ OhttpHpkeDecryptedMessage::OhttpHpkeDecryptedMessage( plaintext(decrypted_request.GetPlaintextData()), context(std::move(decrypted_request).ReleaseContext()) {} +OhttpHpkeDecryptedMessage::OhttpHpkeDecryptedMessage( + std::string plaintext, quiche::ObliviousHttpRequest::Context& context, + server_common::PrivateKey& private_key, absl::string_view request_label) + : private_key(std::move(private_key)), + request_label(request_label), + plaintext(std::move(plaintext)), + context(std::move(context)) {} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/encryption_util.h b/services/seller_frontend_service/util/encryption_util.h index 58e37db2..92b25122 100644 --- a/services/seller_frontend_service/util/encryption_util.h +++ b/services/seller_frontend_service/util/encryption_util.h @@ -22,9 +22,9 @@ #include "absl/status/statusor.h" #include "quiche/oblivious_http/oblivious_http_gateway.h" -#include "src/cpp/communication/ohttp_utils.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_manager.h" -#include "src/cpp/logger/request_context_logger.h" +#include "src/communication/ohttp_utils.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/logger/request_context_logger.h" namespace privacy_sandbox::bidding_auction_servers { @@ -59,6 +59,10 @@ struct OhttpHpkeDecryptedMessage { explicit OhttpHpkeDecryptedMessage( quiche::ObliviousHttpRequest& decrypted_request, server_common::PrivateKey& private_key, absl::string_view request_label); + + explicit OhttpHpkeDecryptedMessage( + std::string plaintext, quiche::ObliviousHttpRequest::Context& context, + server_common::PrivateKey& private_key, absl::string_view request_label); }; // Decrypt a payload encrypted with OHTTP based HPKE using the common library @@ -70,6 +74,34 @@ DecryptOHTTPEncapsulatedHpkeCiphertext( absl::string_view ciphertext, server_common::KeyFetcherManagerInterface& key_fetcher_manager); +// This struct holds elements related to an HPKE +// encrypted message which was encrypted using the OHTTP +// framing standard. The related artifacts can be used +// to decrypt responses from the clients who this +// encrypted message was sent to. +struct OhttpHpkeEncryptedMessage { + // Encrypted message. + std::string ciphertext; + + // OHTTP context required for decryption. + quiche::ObliviousHttpRequest::Context context; + + explicit OhttpHpkeEncryptedMessage(quiche::ObliviousHttpRequest ohttp_request, + absl::string_view request_label); +}; + +// Encrypt a payload using HPKE key and encapsulate in OHTTP format. +// It first creates an OHTTP object with CreateClientObliviousRequest +// and then encrypts it using EncapsulateAndSerialize. +// This is used for encrypting strings that have already been +// encoded and compressed as required. +absl::StatusOr HpkeEncryptAndOHTTPEncapsulate( + // Create a copy for the next method since this will be consumed inside this + // method. + std::string plaintext, absl::string_view request_label, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + const server_common::CloudPlatform& cloud_platform); + } // namespace privacy_sandbox::bidding_auction_servers #endif // SERVICES_SELLER_FRONTEND_SERVICE_UTIL_ENCRYPTION_UTIL_H_ diff --git a/services/seller_frontend_service/util/encryption_util_test.cc b/services/seller_frontend_service/util/encryption_util_test.cc index 3ef44074..09289dc3 100644 --- a/services/seller_frontend_service/util/encryption_util_test.cc +++ b/services/seller_frontend_service/util/encryption_util_test.cc @@ -15,9 +15,12 @@ #include "services/seller_frontend_service/util/encryption_util.h" +#include + #include "gtest/gtest.h" #include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" -#include "src/cpp/communication/encoding_utils.h" +#include "src/communication/encoding_utils.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { namespace { @@ -26,14 +29,41 @@ std::unique_ptr MockKeyFetcherReturningDefaultPrivateKey() { auto key_fetcher_manager = std::make_unique(); - server_common::PrivateKey private_key; EXPECT_CALL(*key_fetcher_manager, GetPrivateKey) .Times(1) .WillOnce([](const google::scp::cpio::PublicPrivateKeyPairId& key_id) { + // Returns test mode keys + server_common::FakeKeyFetcherManager fake_key_fetcher_manager; + auto key = fake_key_fetcher_manager.GetPrivateKey(key_id); // Verify key id is sent as is. - EXPECT_EQ(key_id, std::to_string(HpkeKeyset{}.key_id)); + EXPECT_EQ(key_id, key->key_id); // Return default private key from test utils. - return GetPrivateKey(); + return key; + }); + return key_fetcher_manager; +} + +std::unique_ptr +MockKeyFetcherReturningPublicAndPrivateKey( + const server_common::CloudPlatform& expected_cp) { + auto key_fetcher_manager = + std::make_unique(); + EXPECT_CALL(*key_fetcher_manager, GetPublicKey) + .Times(1) + .WillOnce( + [expected_cp](const server_common::CloudPlatform& cloud_platform) { + // Verify cloud platform is sent as is. + EXPECT_EQ(expected_cp, cloud_platform); + // Returns test mode keys + server_common::FakeKeyFetcherManager fake_key_fetcher_manager; + return fake_key_fetcher_manager.GetPublicKey(cloud_platform); + }); + EXPECT_CALL(*key_fetcher_manager, GetPrivateKey) + .Times(1) + .WillOnce([](const google::scp::cpio::PublicPrivateKeyPairId& key_id) { + // Returns test mode keys + server_common::FakeKeyFetcherManager fake_key_fetcher_manager; + return fake_key_fetcher_manager.GetPrivateKey(key_id); }); return key_fetcher_manager; } @@ -51,6 +81,15 @@ MockKeyFetcherReturningNoPrivateKey() { return key_fetcher_manager; } +void DecodeAndTestProtectedAuctionInput( + std::string& plaintext, + const ProtectedAuctionInput& protected_auction_input) { + auto deframed_req = server_common::DecodeRequestPayload(plaintext); + ASSERT_TRUE(deframed_req.ok()) << deframed_req.status(); + EXPECT_EQ(deframed_req->compressed_data, + protected_auction_input.SerializeAsString()); +} + TEST(DecryptOHTTPEncapsulatedHpkeCiphertextTest, DecryptsRequestSuccessfully) { auto key_fetcher_manager = MockKeyFetcherReturningDefaultPrivateKey(); auto [protected_auction_input, request, context] = @@ -65,10 +104,8 @@ TEST(DecryptOHTTPEncapsulatedHpkeCiphertextTest, DecryptsRequestSuccessfully) { EXPECT_EQ((*output)->private_key.private_key, expected_private_key.private_key); - auto deframed_req = server_common::DecodeRequestPayload((*output)->plaintext); - ASSERT_TRUE(deframed_req.ok()) << deframed_req.status(); - EXPECT_EQ(deframed_req->compressed_data, - protected_auction_input.SerializeAsString()); + DecodeAndTestProtectedAuctionInput((*output)->plaintext, + protected_auction_input); } TEST(DecryptOHTTPEncapsulatedHpkeCiphertextTest, @@ -93,5 +130,86 @@ TEST(DecryptOHTTPEncapsulatedHpkeCiphertextTest, ASSERT_FALSE(output.ok()); EXPECT_EQ(output.status().code(), absl::StatusCode::kNotFound); } + +TEST(HpkeEncryptAndOHTTPEncapsulateTest, EncryptsString_WithBhttpLabel) { + auto key_fetcher_manager = MockKeyFetcherReturningPublicAndPrivateKey( + server_common::CloudPlatform::kGcp); + std::string input = "random-string"; + auto output = HpkeEncryptAndOHTTPEncapsulate( + input, quiche::ObliviousHttpHeaderKeyConfig::kOhttpRequestLabel, + *key_fetcher_manager, server_common::CloudPlatform::kGcp); + ASSERT_TRUE(output.ok()) << output.status(); + + // Decrypt and test equality with input object. + absl::StatusOr> decrypted_output = + DecryptOHTTPEncapsulatedHpkeCiphertext(output->ciphertext, + *key_fetcher_manager); + ASSERT_TRUE(decrypted_output.ok()) << decrypted_output.status(); + EXPECT_EQ((*decrypted_output)->plaintext, input); +} + +TEST(HpkeEncryptAndOHTTPEncapsulateTest, + FailesStringEncryption_WithRandomLabel) { + std::string input = "random-string"; + std::string label = "message/random label"; + auto key_fetcher_manager = + std::make_unique(); + auto output = HpkeEncryptAndOHTTPEncapsulate( + input, label, *key_fetcher_manager, server_common::CloudPlatform::kGcp); + ASSERT_FALSE(output.ok()); +} + +std::pair GetProtectedAuctionInput() { + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest(CLIENT_TYPE_ANDROID, ""); + std::string encoded_request = protected_auction_input.SerializeAsString(); + absl::StatusOr framed_request = + server_common::EncodeResponsePayload( + server_common::CompressionType::kGzip, encoded_request, + GetEncodedDataSize(encoded_request.size())); + EXPECT_TRUE(framed_request.ok()) << framed_request.status(); + return {std::move(*framed_request), std::move(protected_auction_input)}; +} + +TEST(HpkeEncryptAndOHTTPEncapsulateTest, + EncryptsProtectedAuctionInput_WithBiddingAuctionOhttpLabel_ForGcp) { + auto key_fetcher_manager = MockKeyFetcherReturningPublicAndPrivateKey( + server_common::CloudPlatform::kGcp); + auto [framed_request, protected_auction_input] = GetProtectedAuctionInput(); + + auto output = HpkeEncryptAndOHTTPEncapsulate( + framed_request, kBiddingAuctionOhttpRequestLabel, *key_fetcher_manager, + server_common::CloudPlatform::kGcp); + ASSERT_TRUE(output.ok()) << output.status(); + + // Decrypt and test equality with input object. + absl::StatusOr> decrypted_output = + DecryptOHTTPEncapsulatedHpkeCiphertext(output->ciphertext, + *key_fetcher_manager); + ASSERT_TRUE(decrypted_output.ok()) << decrypted_output.status(); + DecodeAndTestProtectedAuctionInput((*decrypted_output)->plaintext, + protected_auction_input); +} + +TEST(HpkeEncryptAndOHTTPEncapsulateTest, + EncryptsProtectedAuctionInput_WithBiddingAuctionOhttpLabel_ForAws) { + auto key_fetcher_manager = MockKeyFetcherReturningPublicAndPrivateKey( + server_common::CloudPlatform::kAws); + auto [framed_request, protected_auction_input] = GetProtectedAuctionInput(); + + auto output = HpkeEncryptAndOHTTPEncapsulate( + framed_request, kBiddingAuctionOhttpRequestLabel, *key_fetcher_manager, + server_common::CloudPlatform::kAws); + ASSERT_TRUE(output.ok()) << output.status(); + + // Decrypt and test equality with input object. + absl::StatusOr> decrypted_output = + DecryptOHTTPEncapsulatedHpkeCiphertext(output->ciphertext, + *key_fetcher_manager); + ASSERT_TRUE(decrypted_output.ok()) << decrypted_output.status(); + DecodeAndTestProtectedAuctionInput((*decrypted_output)->plaintext, + protected_auction_input); +} + } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/key_fetcher_utils.cc b/services/seller_frontend_service/util/key_fetcher_utils.cc index 58aafbb7..979dc008 100644 --- a/services/seller_frontend_service/util/key_fetcher_utils.cc +++ b/services/seller_frontend_service/util/key_fetcher_utils.cc @@ -24,9 +24,9 @@ #include "services/common/constants/common_service_flags.h" #include "services/common/util/json_util.h" #include "services/seller_frontend_service/runtime_flags.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" -#include "src/cpp/encryption/key_fetcher/src/fake_key_fetcher_manager.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -94,4 +94,16 @@ CreateSfePublicKeyFetcher(const TrustedServersConfigClient& config_client) { return PublicKeyFetcherFactory::Create(endpoints_map); } +server_common::CloudPlatform ProtoCloudPlatformToScpCloudPlatform( + EncryptionCloudPlatform cloud_platform) { + switch (cloud_platform) { + case EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_AWS: + return server_common::CloudPlatform::kAws; + case EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP: + return server_common::CloudPlatform::kGcp; + default: + return server_common::CloudPlatform::kLocal; + } +} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/key_fetcher_utils.h b/services/seller_frontend_service/util/key_fetcher_utils.h index e50ecb37..bd3aaa58 100644 --- a/services/seller_frontend_service/util/key_fetcher_utils.h +++ b/services/seller_frontend_service/util/key_fetcher_utils.h @@ -20,8 +20,9 @@ #include #include +#include "api/bidding_auction_servers.pb.h" #include "services/common/clients/config/trusted_server_config_client.h" -#include "src/cpp/encryption/key_fetcher/interface/public_key_fetcher_interface.h" +#include "src/encryption/key_fetcher/interface/public_key_fetcher_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -51,6 +52,9 @@ ParseCloudPlatformPublicKeysMap(absl::string_view public_keys_endpoint_map_str); absl::StatusOr> CreateSfePublicKeyFetcher(const TrustedServersConfigClient& config_client); +server_common::CloudPlatform ProtoCloudPlatformToScpCloudPlatform( + EncryptionCloudPlatform cloud_platform); + } // namespace privacy_sandbox::bidding_auction_servers #endif // SERVICES_SELLER_FRONTEND_SERVICE_UTIL_KEY_FETCHER_UTILS_H_ diff --git a/services/seller_frontend_service/util/key_fetcher_utils_test.cc b/services/seller_frontend_service/util/key_fetcher_utils_test.cc index 5d93bf30..e4295fae 100644 --- a/services/seller_frontend_service/util/key_fetcher_utils_test.cc +++ b/services/seller_frontend_service/util/key_fetcher_utils_test.cc @@ -68,5 +68,24 @@ TEST(KeyFetcherUtilsTest, EXPECT_EQ(map.status().message(), kEmptyEndpointError); } +TEST(ProtoCloudPlatformToScpCloudPlatformTest, ReturnsGcpForGcp) { + EXPECT_EQ(server_common::CloudPlatform::kGcp, + ProtoCloudPlatformToScpCloudPlatform( + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP)); +} + +TEST(ProtoCloudPlatformToScpCloudPlatformTest, ReturnsAwsForAws) { + EXPECT_EQ(server_common::CloudPlatform::kAws, + ProtoCloudPlatformToScpCloudPlatform( + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_AWS)); +} + +TEST(ProtoCloudPlatformToScpCloudPlatformTest, ReturnsLocalForUnspecified) { + EXPECT_EQ( + server_common::CloudPlatform::kLocal, + ProtoCloudPlatformToScpCloudPlatform( + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_UNSPECIFIED)); +} + } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/proto_mapping_util.cc b/services/seller_frontend_service/util/proto_mapping_util.cc new file mode 100644 index 00000000..a8a82b4e --- /dev/null +++ b/services/seller_frontend_service/util/proto_mapping_util.cc @@ -0,0 +1,476 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/seller_frontend_service/util/proto_mapping_util.h" + +#include "services/seller_frontend_service/util/framing_utils.h" + +namespace privacy_sandbox::bidding_auction_servers { + +namespace { + +using server_common::log::ContextImpl; + +void SetReportingUrls(const WinReportingUrls& win_reporting_urls, + AuctionResult& auction_result) { + auto* mutable_win_reporting_urls = + auction_result.mutable_win_reporting_urls(); + mutable_win_reporting_urls->mutable_buyer_reporting_urls()->set_reporting_url( + win_reporting_urls.buyer_reporting_urls().reporting_url()); + *mutable_win_reporting_urls->mutable_buyer_reporting_urls() + ->mutable_interaction_reporting_urls() = + win_reporting_urls.buyer_reporting_urls().interaction_reporting_urls(); + mutable_win_reporting_urls->mutable_top_level_seller_reporting_urls() + ->set_reporting_url( + win_reporting_urls.top_level_seller_reporting_urls().reporting_url()); + *mutable_win_reporting_urls->mutable_top_level_seller_reporting_urls() + ->mutable_interaction_reporting_urls() = + win_reporting_urls.top_level_seller_reporting_urls() + .interaction_reporting_urls(); + mutable_win_reporting_urls->mutable_component_seller_reporting_urls() + ->set_reporting_url( + win_reporting_urls.component_seller_reporting_urls().reporting_url()); + + *mutable_win_reporting_urls->mutable_component_seller_reporting_urls() + ->mutable_interaction_reporting_urls() = + win_reporting_urls.component_seller_reporting_urls() + .interaction_reporting_urls(); +} + +AuctionResult MapAdScoreToAuctionResult( + const std::optional& high_score, + const std::optional& error) { + AuctionResult auction_result; + if (error.has_value()) { + *auction_result.mutable_error() = *error; + } else if (high_score.has_value()) { + auction_result.set_is_chaff(false); + if (high_score->bid() > 0) { + auction_result.set_bid(high_score->bid()); + } + auction_result.set_score(high_score->desirability()); + auction_result.set_interest_group_name(high_score->interest_group_name()); + auction_result.set_interest_group_owner(high_score->interest_group_owner()); + auction_result.set_ad_render_url(high_score->render()); + auction_result.mutable_win_reporting_urls() + ->mutable_buyer_reporting_urls() + ->set_reporting_url(high_score->win_reporting_urls() + .buyer_reporting_urls() + .reporting_url()); + for (auto& [event, url] : high_score->win_reporting_urls() + .buyer_reporting_urls() + .interaction_reporting_urls()) { + auction_result.mutable_win_reporting_urls() + ->mutable_buyer_reporting_urls() + ->mutable_interaction_reporting_urls() + ->try_emplace(event, url); + } + SetReportingUrls(high_score->win_reporting_urls(), auction_result); + *auction_result.mutable_ad_component_render_urls() = + high_score->component_renders(); + auction_result.set_ad_type(high_score->ad_type()); + } else { + auction_result.set_is_chaff(true); + } + return auction_result; +} + +absl::StatusOr PackageAuctionResultCiphertext( + absl::string_view serialized_auction_result, + OhttpHpkeDecryptedMessage& decrypted_request) { + // Gzip compress. + PS_ASSIGN_OR_RETURN(std::string compressed_data, + GzipCompress(serialized_auction_result)); + + // Frame(set bits) and pad. + PS_ASSIGN_OR_RETURN( + std::string encoded_plaintext, + server_common::EncodeResponsePayload( + server_common::CompressionType::kGzip, compressed_data, + GetEncodedDataSize(compressed_data.size()))); + + // Encapsulate and encrypt with corresponding private key. + return server_common::EncryptAndEncapsulateResponse( + std::move(encoded_plaintext), decrypted_request.private_key, + decrypted_request.context, decrypted_request.request_label); +} + +absl::StatusOr PackageAuctionResultForWeb( + const std::optional& high_score, + const std::optional& maybe_bidding_group_map, + const std::optional& error, + OhttpHpkeDecryptedMessage& decrypted_request, ContextImpl& log_context) { + std::string error_msg; + absl::Notification wait_for_error_callback; + auto error_handler = [&wait_for_error_callback, + &error_msg](const grpc::Status& status) { + error_msg = status.error_message(); + wait_for_error_callback.Notify(); + }; + PS_VLOG(2, log_context) + << "IG bids map size:" + << (maybe_bidding_group_map.has_value() + ? absl::StrCat("found: ", maybe_bidding_group_map->size()) + : "Not found"); + absl::StatusOr serialized_data = + Encode(high_score, + maybe_bidding_group_map.has_value() ? *maybe_bidding_group_map + : IgsWithBidsMap(), + error, error_handler); + if (!serialized_data.ok()) { + wait_for_error_callback.WaitForNotification(); + return absl::Status(serialized_data.status().code(), error_msg); + } else { + PS_VLOG(1, log_context) + << "AuctionResult:\n" + << [](absl::string_view encoded_data) { + auto result = CborDecodeAuctionResultToProto(encoded_data); + return result.ok() ? result->DebugString() + : result.status().ToString(); + }(*serialized_data); + } + absl::string_view data_to_compress = + absl::string_view(reinterpret_cast((*serialized_data).data()), + (*serialized_data).size()); + return PackageAuctionResultCiphertext(data_to_compress, decrypted_request); +} + +absl::StatusOr PackageAuctionResultForApp( + const std::optional& high_score, + const std::optional& error, + OhttpHpkeDecryptedMessage& decrypted_request, ContextImpl& log_context) { + // Map to AuctionResult proto and serialized to bytes array. + std::string serialized_result = + MapAdScoreToAuctionResult(high_score, error).SerializeAsString(); + PS_VLOG(1, log_context) << "AuctionResult:\n" << serialized_result; + return PackageAuctionResultCiphertext(serialized_result, decrypted_request); +} + +absl::StatusOr PackageAuctionResultForInvalid( + ClientType client_type) { + return absl::Status(absl::StatusCode::kInvalidArgument, + absl::StrCat(kUnsupportedClientType, " (", + ClientType_Name(client_type), ")")); +} + +} // namespace + +AuctionResult AdScoreToAuctionResult( + const std::optional& high_score, + std::optional bidding_groups, + const std::optional& error, + AuctionScope auction_scope, absl::string_view seller, + const std::variant& + protected_auction_input, + absl::string_view top_level_seller) { + AuctionResult auction_result = MapAdScoreToAuctionResult(high_score, error); + if (high_score.has_value() && + auction_scope == + AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER) { + absl::string_view generation_id; + std::visit( + [&generation_id](const auto& protected_auction_input) { + generation_id = protected_auction_input.generation_id(); + }, + protected_auction_input); + auction_result.mutable_auction_params()->set_ciphertext_generation_id( + generation_id); + auction_result.mutable_auction_params()->set_component_seller(seller); + auction_result.set_top_level_seller(top_level_seller); + if (bidding_groups.has_value()) { + *auction_result.mutable_bidding_groups() = (*std::move(bidding_groups)); + } + } + return auction_result; +} + +std::unique_ptr +CreateTopLevelScoreAdsRawRequest( + const SelectAdRequest::AuctionConfig& auction_config, + std::variant& + protected_auction_input, + std::vector& component_auction_results) { + auto raw_request = std::make_unique(); + *raw_request->mutable_auction_signals() = auction_config.auction_signals(); + *raw_request->mutable_seller_signals() = auction_config.seller_signals(); + *raw_request->mutable_seller() = auction_config.seller(); + *raw_request->mutable_seller_currency() = auction_config.seller_currency(); + // Move Component Auctions Results. + for (auto component_auction_itr = + std::make_move_iterator(component_auction_results.begin()); + component_auction_itr != + std::make_move_iterator(component_auction_results.end()); + ++component_auction_itr) { + if (!component_auction_itr->is_chaff()) { + raw_request->mutable_component_auction_results()->Add( + std::move(*component_auction_itr)); + } + } + std::visit( + [&raw_request, &auction_config](const auto& protected_auction_input) { + raw_request->set_publisher_hostname( + protected_auction_input.publisher_name()); + raw_request->set_enable_debug_reporting( + protected_auction_input.enable_debug_reporting()); + auto* log_context = raw_request->mutable_log_context(); + log_context->set_generation_id(protected_auction_input.generation_id()); + log_context->set_adtech_debug_id(auction_config.seller_debug_id()); + if (protected_auction_input.has_consented_debug_config()) { + *raw_request->mutable_consented_debug_config() = + protected_auction_input.consented_debug_config(); + } + }, + protected_auction_input); + return raw_request; +} + +absl::StatusOr CreateWinningAuctionResultCiphertext( + const ScoreAdsResponse::AdScore& ad_score, + const std::optional& bidding_group_map, + ClientType client_type, OhttpHpkeDecryptedMessage& decrypted_request, + ContextImpl& log_context) { + absl::StatusOr auction_result_ciphertext; + switch (client_type) { + case CLIENT_TYPE_ANDROID: + return PackageAuctionResultForApp(ad_score, /*error =*/std::nullopt, + decrypted_request, log_context); + case CLIENT_TYPE_BROWSER: + return PackageAuctionResultForWeb(ad_score, bidding_group_map, + /*error =*/std::nullopt, + decrypted_request, log_context); + default: + return PackageAuctionResultForInvalid(client_type); + } +} + +absl::StatusOr CreateErrorAuctionResultCiphertext( + const AuctionResult::Error& auction_error, ClientType client_type, + OhttpHpkeDecryptedMessage& decrypted_request, ContextImpl& log_context) { + switch (client_type) { + case CLIENT_TYPE_ANDROID: + return PackageAuctionResultForApp( + /*high_score=*/std::nullopt, auction_error, decrypted_request, + log_context); + case CLIENT_TYPE_BROWSER: + return PackageAuctionResultForWeb( + /*high_score=*/std::nullopt, /*maybe_bidding_group_map=*/std::nullopt, + auction_error, decrypted_request, log_context); + default: + return PackageAuctionResultForInvalid(client_type); + } +} + +absl::StatusOr CreateChaffAuctionResultCiphertext( + ClientType client_type, OhttpHpkeDecryptedMessage& decrypted_request, + ContextImpl& log_context) { + switch (client_type) { + case CLIENT_TYPE_ANDROID: + return PackageAuctionResultForApp( + /*high_score=*/std::nullopt, /*error =*/std::nullopt, + decrypted_request, log_context); + case CLIENT_TYPE_BROWSER: + return PackageAuctionResultForWeb( + /*high_score=*/std::nullopt, /*maybe_bidding_group_map=*/std::nullopt, + /*error =*/std::nullopt, decrypted_request, log_context); + default: + return PackageAuctionResultForInvalid(client_type); + } +} + +IgsWithBidsMap GetBuyerIgsWithBidsMap( + std::vector& component_auction_bidding_groups) { + absl::flat_hash_map> + colliding_buyer_sets; + IgsWithBidsMap buyer_to_ig_idx_map; + for (IgsWithBidsMap& component_auction_ig_group : + component_auction_bidding_groups) { + PS_VLOG(1) << "Size of input buyer bids map " + << component_auction_ig_group.size(); + for (auto& [buyer, ig_idx] : component_auction_ig_group) { + // Check if the key already exists in the output map + const auto& it = buyer_to_ig_idx_map.find(buyer); + if (it == buyer_to_ig_idx_map.end()) { + // Insert the entire entry + buyer_to_ig_idx_map.insert({buyer, std::move(ig_idx)}); + PS_VLOG(1) << "Inserted for " << buyer; + continue; + } else if (auto buyer_ig_set = colliding_buyer_sets.find(buyer); + buyer_ig_set == colliding_buyer_sets.end()) { + // Not in Colliding buyer set but present in buyer_to_ig_idx_map. + // Add values from previous CARs. + absl::flat_hash_set ig_set(it->second.index().begin(), + it->second.index().end()); + PS_VLOG(1) << "Inserted in colliding for" << buyer << ig_set.size(); + colliding_buyer_sets.insert({buyer, std::move(ig_set)}); + } + // Already in colliding buyer set. Add all values from current CAR. + PS_VLOG(1) << "Inserted in colliding for" << buyer + << ig_idx.index().size(); + colliding_buyer_sets.at(buyer).insert(ig_idx.index().begin(), + ig_idx.index().end()); + } + } + + // Replace colliding ig index sets in proto. + for (auto& [buyer, ig_idx_set] : colliding_buyer_sets) { + buyer_to_ig_idx_map.at(buyer).mutable_index()->Assign(ig_idx_set.begin(), + ig_idx_set.end()); + } + PS_VLOG(1) << "Size of output buyer bids map " << buyer_to_ig_idx_map.size(); + return buyer_to_ig_idx_map; +} + +absl::StatusOr UnpackageServerAuctionComponentResult( + const SelectAdRequest::ComponentAuctionResult& component_auction_result, + CryptoClientWrapperInterface& crypto_client, + server_common::KeyFetcherManagerInterface& key_fetcher_manager) { + PS_ASSIGN_OR_RETURN( + std::string encoded, + HpkeDecrypt(component_auction_result.auction_result_ciphertext(), + component_auction_result.key_id(), crypto_client, + key_fetcher_manager)); + AuctionResult proto; + PS_ASSIGN_OR_RETURN((server_common::DecodedRequest decoded_request), + server_common::DecodeRequestPayload(encoded)); + + PS_ASSIGN_OR_RETURN(std::string payload, + GzipDecompress(decoded_request.compressed_data)); + PS_VLOG(2) << "Parsing Auction Result: " << payload; + if (!proto.ParseFromArray(payload.data(), payload.size())) { + return absl::Status(absl::StatusCode::kInvalidArgument, kBadBinaryProto); + } + return proto; +} + +std::vector DecryptAndValidateComponentAuctionResults( + const SelectAdRequest* request, absl::string_view seller_domain, + absl::string_view request_generation_id, + CryptoClientWrapperInterface& crypto_client, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + ErrorAccumulator& error_accumulator, ContextImpl& log_context) { + std::vector component_auction_results; + component_auction_results.reserve(request->component_auction_results_size()); + for (const auto& enc_auction_result : request->component_auction_results()) { + auto auction_result = UnpackageServerAuctionComponentResult( + enc_auction_result, crypto_client, key_fetcher_manager); + if (!auction_result.ok()) { + std::string error_msg = + absl::StrFormat(kErrorDecryptingAuctionResultError, + auction_result.status().message()); + PS_VLOG(2, log_context) << error_msg; + // Report error. This will be later returned to client in case + // none of the auction results could be decoded. + error_accumulator.ReportError(ErrorVisibility::AD_SERVER_VISIBLE, + std::move(error_msg), + ErrorCode::CLIENT_SIDE); + continue; + } + PS_VLOG(2, log_context) + << "Successfully decrypted auction result ciphertext for: " + << auction_result->auction_params().component_seller(); + PS_VLOG(1, log_context) + << "Bidding group size: " << auction_result->bidding_groups().size(); + // Add errors from AuctionResult to error_accumulator in + // ValidateComponentAuctionResult. + if (!ValidateComponentAuctionResult(*auction_result, request_generation_id, + seller_domain, error_accumulator)) { + PS_VLOG(1, log_context) + << "Auction result skipped with failed validation for: " + << auction_result->auction_params().component_seller(); + continue; + } + PS_VLOG(2, log_context) + << "Successfully validated auction result for: " + << auction_result->auction_params().component_seller(); + component_auction_results.push_back(*std::move(auction_result)); + } + return component_auction_results; +} + +ProtectedAudienceInput DecryptProtectedAudienceInput( + absl::string_view encapsulated_req, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + ClientType client_type, ErrorAccumulator& error_accumulator) { + ProtectedAudienceInput protected_auction_input; + switch (client_type) { + case CLIENT_TYPE_ANDROID: + protected_auction_input = + AppProtectedAuctionInputDecodeHelper( + encapsulated_req, error_accumulator); + break; + case CLIENT_TYPE_BROWSER: + protected_auction_input = + WebProtectedAuctionInputDecodeHelper( + encapsulated_req, true, error_accumulator); + break; + default: + break; + } + PS_VLOG(2) << "Successfully decrypted protected audience input ciphertext"; + return protected_auction_input; +} + +ProtectedAuctionInput DecryptProtectedAuctionInput( + absl::string_view encapsulated_req, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + ClientType client_type, ErrorAccumulator& error_accumulator) { + ProtectedAuctionInput protected_auction_input; + switch (client_type) { + case CLIENT_TYPE_ANDROID: + protected_auction_input = + AppProtectedAuctionInputDecodeHelper( + encapsulated_req, error_accumulator); + break; + case CLIENT_TYPE_BROWSER: + protected_auction_input = + WebProtectedAuctionInputDecodeHelper( + encapsulated_req, true, error_accumulator); + break; + default: + break; + } + PS_VLOG(2) << "Successfully decrypted protected audience input ciphertext"; + return protected_auction_input; +} + +IgsWithBidsMap GetBiddingGroups( + const BuyerBidsResponseMap& shared_buyer_bids_map, + const absl::flat_hash_map& buyer_inputs) { + IgsWithBidsMap bidding_groups; + for (const auto& [buyer, ad_with_bids] : shared_buyer_bids_map) { + // Mapping from buyer to interest groups that are associated with non-zero + // bids. + absl::flat_hash_set buyer_interest_groups; + for (const auto& ad_with_bid : ad_with_bids->bids()) { + if (ad_with_bid.bid() > 0) { + buyer_interest_groups.insert(ad_with_bid.interest_group_name()); + } + } + const auto& buyer_input = buyer_inputs.at(buyer); + AuctionResult::InterestGroupIndex ar_interest_group_index; + int ig_index = 0; + for (const auto& interest_group : buyer_input.interest_groups()) { + // If the interest group name is one of the groups returned by the bidding + // service then record its index. + if (buyer_interest_groups.contains(interest_group.name())) { + ar_interest_group_index.add_index(ig_index); + } + ig_index++; + } + bidding_groups.try_emplace(buyer, std::move(ar_interest_group_index)); + } + return bidding_groups; +} + +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/proto_mapping_util.h b/services/seller_frontend_service/util/proto_mapping_util.h new file mode 100644 index 00000000..482de57e --- /dev/null +++ b/services/seller_frontend_service/util/proto_mapping_util.h @@ -0,0 +1,164 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_SELLER_FRONTEND_SERVICE_UTIL_PROTO_MAPPING_UTIL_H_ +#define SERVICES_SELLER_FRONTEND_SERVICE_UTIL_PROTO_MAPPING_UTIL_H_ + +#include +#include +#include +#include + +#include "absl/status/status.h" +#include "api/bidding_auction_servers.pb.h" +#include "services/common/compression/gzip.h" +#include "services/common/util/error_categories.h" +#include "services/common/util/hpke_utils.h" +#include "services/seller_frontend_service/data/scoring_signals.h" +#include "services/seller_frontend_service/util/encryption_util.h" +#include "services/seller_frontend_service/util/validation_utils.h" +#include "services/seller_frontend_service/util/web_utils.h" +#include "src/communication/encoding_utils.h" +#include "src/encryption/key_fetcher/key_fetcher_manager.h" +#include "src/logger/request_context_impl.h" +#include "src/util/status_macro/status_macros.h" + +namespace privacy_sandbox::bidding_auction_servers { + +using IgsWithBidsMap = + ::google::protobuf::Map; + +// Map fields from an AdScore proto to an Auction Result proto object. +AuctionResult AdScoreToAuctionResult( + const std::optional& high_score, + std::optional maybe_bidding_groups, + const std::optional& error, + AuctionScope auction_scope, absl::string_view seller, + const std::variant& + protected_auction_input, + absl::string_view top_level_seller = ""); + +// Builds a ScoreAdsRequest using component_auction_results. +// This function moves the elements from component_auction_results and +// signals fields from auction_config and protected_auction_input. +// These fields should not be used after this function has been called. +std::unique_ptr +CreateTopLevelScoreAdsRawRequest( + const SelectAdRequest::AuctionConfig& auction_config, + std::variant& + protected_auction_input, + std::vector& component_auction_results); + +// Encodes, compresses and encrypts AdScore and bidding groups map +// as auction_result_ciphertext. +absl::StatusOr CreateWinningAuctionResultCiphertext( + const ScoreAdsResponse::AdScore& high_score, + const std::optional& bidding_group_maps, + ClientType client_type, OhttpHpkeDecryptedMessage& decrypted_request, + server_common::log::ContextImpl& log_context); + +// Encodes, compresses and encrypts client error +// as auction_result_ciphertext. +absl::StatusOr CreateErrorAuctionResultCiphertext( + const AuctionResult::Error& auction_error, ClientType client_type, + OhttpHpkeDecryptedMessage& decrypted_request, + server_common::log::ContextImpl& log_context); + +// Encodes, compresses and encrypts chaff response +// as auction_result_ciphertext. +absl::StatusOr CreateChaffAuctionResultCiphertext( + ClientType client_type, OhttpHpkeDecryptedMessage& decrypted_request, + server_common::log::ContextImpl& log_context); + +// Collate all Igs that received bids from all server component auction results. +IgsWithBidsMap GetBuyerIgsWithBidsMap( + std::vector& component_auction_bidding_groups); + +// This function is used to decode an encrypted and compressed +// Server Component Auction Result. +absl::StatusOr UnpackageServerAuctionComponentResult( + const SelectAdRequest::ComponentAuctionResult& component_auction_result, + CryptoClientWrapperInterface& crypto_client, + server_common::KeyFetcherManagerInterface& key_fetcher_manager); + +// Decrypts Component Auction Result ciphertext and validate AuctionResult +// objects. +std::vector DecryptAndValidateComponentAuctionResults( + const SelectAdRequest* request, absl::string_view seller_domain, + absl::string_view request_generation_id, + CryptoClientWrapperInterface& crypto_client, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + ErrorAccumulator& error_accumulator, + server_common::log::ContextImpl& log_context); + +template +T AppProtectedAuctionInputDecodeHelper(absl::string_view encoded_data, + ErrorAccumulator& error_accumulator) { + T protected_auction_input; + absl::StatusOr decoded_request = + server_common::DecodeRequestPayload(encoded_data); + if (!decoded_request.ok()) { + error_accumulator.ReportError( + ErrorVisibility::CLIENT_VISIBLE, + std::string(decoded_request.status().message()), + ErrorCode::CLIENT_SIDE); + return protected_auction_input; + } + + std::string payload = std::move(decoded_request->compressed_data); + if (!protected_auction_input.ParseFromArray(payload.data(), payload.size())) { + error_accumulator.ReportError(ErrorVisibility::CLIENT_VISIBLE, + kBadProtectedAudienceBinaryProto, + ErrorCode::CLIENT_SIDE); + } + + return protected_auction_input; +} + +template +T WebProtectedAuctionInputDecodeHelper(absl::string_view encoded_data, + bool fail_fast, + ErrorAccumulator& error_accumulator) { + absl::StatusOr decoded_request = + server_common::DecodeRequestPayload(encoded_data); + if (!decoded_request.ok()) { + error_accumulator.ReportError( + ErrorVisibility::CLIENT_VISIBLE, + std::string(decoded_request.status().message()), + ErrorCode::CLIENT_SIDE); + return T{}; + } + return Decode(decoded_request->compressed_data, error_accumulator, + fail_fast); +} + +ProtectedAudienceInput DecryptProtectedAudienceInput( + absl::string_view encapsulated_req, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + ClientType client_type, ErrorAccumulator& error_accumulator); + +ProtectedAuctionInput DecryptProtectedAuctionInput( + absl::string_view encapsulated_req, + server_common::KeyFetcherManagerInterface& key_fetcher_manager, + ClientType client_type, ErrorAccumulator& error_accumulator); + +// Gets the bidding groups after scoring is done. +google::protobuf::Map +GetBiddingGroups( + const BuyerBidsResponseMap& shared_buyer_bids_map, + const absl::flat_hash_map& buyer_inputs); + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_SELLER_FRONTEND_SERVICE_UTIL_PROTO_MAPPING_UTIL_H_ diff --git a/services/seller_frontend_service/util/proto_mapping_util_test.cc b/services/seller_frontend_service/util/proto_mapping_util_test.cc new file mode 100644 index 00000000..4b0a064c --- /dev/null +++ b/services/seller_frontend_service/util/proto_mapping_util_test.cc @@ -0,0 +1,529 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/seller_frontend_service/util/proto_mapping_util.h" + +#include + +#include + +#include "absl/container/btree_map.h" +#include "gtest/gtest.h" +#include "services/common/encryption/crypto_client_factory.h" +#include "services/common/test/random.h" +#include "services/common/test/utils/ohttp_utils.h" +#include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" +#include "src/core/test/utils/proto_test_utils.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +using google::scp::core::test::EqualsProto; +using server_common::log::ContextImpl; + +constexpr char kTestSeller[] = "sample-seller"; +constexpr char kBuyer1[] = "bg1"; +constexpr char kBuyer2[] = "bg2"; + +ScoreAdsResponse::AdScore MakeAnAdScore() { + // Populate rejection reasons and highest other buyers. + return MakeARandomAdScore( + /*hob_buyer_entries = */ 2, + /*rejection_reason_ig_owners = */ 2, + /*rejection_reason_ig_per_owner = */ 2); +} + +AuctionResult::Error MakeAnError() { + AuctionResult::Error error; + error.set_code(MakeARandomInt(0, 10)); + error.set_message(MakeARandomString()); + return error; +} + +AuctionResult MapBasicScoreFieldsToAuctionResult( + const ScoreAdsResponse::AdScore& high_score) { + AuctionResult auction_result; + auction_result.set_is_chaff(false); + if (high_score.bid() > 0) { + auction_result.set_bid(high_score.bid()); + } + auction_result.set_score(high_score.desirability()); + auction_result.set_interest_group_name(high_score.interest_group_name()); + auction_result.set_interest_group_owner(high_score.interest_group_owner()); + auction_result.set_ad_render_url(high_score.render()); + auction_result.mutable_win_reporting_urls() + ->mutable_buyer_reporting_urls() + ->set_reporting_url(high_score.win_reporting_urls() + .buyer_reporting_urls() + .reporting_url()); + for (auto& [event, url] : high_score.win_reporting_urls() + .buyer_reporting_urls() + .interaction_reporting_urls()) { + auction_result.mutable_win_reporting_urls() + ->mutable_buyer_reporting_urls() + ->mutable_interaction_reporting_urls() + ->try_emplace(event, url); + } + auction_result.mutable_win_reporting_urls() + ->mutable_top_level_seller_reporting_urls() + ->set_reporting_url(high_score.win_reporting_urls() + .top_level_seller_reporting_urls() + .reporting_url()); + for (auto& [event, url] : high_score.win_reporting_urls() + .top_level_seller_reporting_urls() + .interaction_reporting_urls()) { + auction_result.mutable_win_reporting_urls() + ->mutable_top_level_seller_reporting_urls() + ->mutable_interaction_reporting_urls() + ->try_emplace(event, url); + } + + auction_result.mutable_win_reporting_urls() + ->mutable_component_seller_reporting_urls() + ->set_reporting_url(high_score.win_reporting_urls() + .component_seller_reporting_urls() + .reporting_url()); + for (auto& [event, url] : high_score.win_reporting_urls() + .component_seller_reporting_urls() + .interaction_reporting_urls()) { + auction_result.mutable_win_reporting_urls() + ->mutable_component_seller_reporting_urls() + ->mutable_interaction_reporting_urls() + ->try_emplace(event, url); + } + *auction_result.mutable_ad_component_render_urls() = + high_score.component_renders(); + auction_result.set_ad_type(high_score.ad_type()); + return auction_result; +} + +AuctionResult MapBasicErrorFieldsToAuctionResult( + const AuctionResult::Error& error) { + AuctionResult auction_result; + *auction_result.mutable_error() = error; + return auction_result; +} + +void MapServerComponentAuctionFieldsToAuctionResult( + AuctionResult* auction_result, absl::string_view seller, + absl::string_view generation_id) { + ASSERT_GT(generation_id.length(), 0); + ASSERT_GT(seller.length(), 0); + auction_result->mutable_auction_params()->set_ciphertext_generation_id( + generation_id); + auction_result->mutable_auction_params()->set_component_seller(seller); +} + +void MapServerComponentAuctionFieldsToAuctionResult( + AuctionResult* auction_result, absl::string_view seller, + const ProtectedAuctionInput& protected_auction_input) { + MapServerComponentAuctionFieldsToAuctionResult( + auction_result, seller, protected_auction_input.generation_id()); +} + +void MapServerComponentAuctionFieldsToAuctionResult( + AuctionResult* auction_result, absl::string_view seller, + const ProtectedAudienceInput& protected_audience_input) { + MapServerComponentAuctionFieldsToAuctionResult( + auction_result, seller, protected_audience_input.generation_id()); +} + +class AdScoreToAuctionResultTest : public testing::Test { + void SetUp() override { + valid_score_ = MakeAnAdScore(); + valid_seller_ = MakeARandomString(); + valid_error_ = MakeAnError(); + valid_audience_input_.set_generation_id(MakeARandomString()); + valid_auction_input_ = + MakeARandomProtectedAuctionInput(ClientType::CLIENT_TYPE_ANDROID); + } + + protected: + std::string empty_seller_; + ProtectedAudienceInput empty_audience_; + std::string valid_seller_; + ProtectedAudienceInput valid_audience_input_; + ProtectedAuctionInput valid_auction_input_; + ScoreAdsResponse::AdScore valid_score_; + AuctionResult::Error valid_error_; +}; + +TEST_F(AdScoreToAuctionResultTest, MapsAllFieldsForSingleSellerAuction) { + AuctionResult expected = MapBasicScoreFieldsToAuctionResult(valid_score_); + AuctionResult output = AdScoreToAuctionResult( + valid_score_, /*maybe_bidding_groups=*/std::nullopt, + /*error=*/std::nullopt, AuctionScope::AUCTION_SCOPE_SINGLE_SELLER, + empty_seller_, empty_audience_); + EXPECT_THAT(expected, EqualsProto(output)); +} + +TEST_F(AdScoreToAuctionResultTest, MapsAllFieldsForTopLevelAuction) { + AuctionResult expected = MapBasicScoreFieldsToAuctionResult(valid_score_); + AuctionResult output = AdScoreToAuctionResult( + valid_score_, /*maybe_bidding_groups=*/std::nullopt, + /*error=*/std::nullopt, + AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER, empty_seller_, + empty_audience_); + EXPECT_THAT(expected, EqualsProto(output)); +} + +TEST_F(AdScoreToAuctionResultTest, MapsAllFieldsForDeviceComponentAuction) { + AuctionResult expected = MapBasicScoreFieldsToAuctionResult(valid_score_); + AuctionResult output = AdScoreToAuctionResult( + valid_score_, /*maybe_bidding_groups=*/std::nullopt, + /*error=*/std::nullopt, + AuctionScope::AUCTION_SCOPE_DEVICE_COMPONENT_MULTI_SELLER, empty_seller_, + empty_audience_); + EXPECT_THAT(expected, EqualsProto(output)); +} + +TEST_F(AdScoreToAuctionResultTest, + MapsAllFieldsForServerAuctionForProtectedAuctionInput) { + AuctionResult expected = MapBasicScoreFieldsToAuctionResult(valid_score_); + MapServerComponentAuctionFieldsToAuctionResult(&expected, valid_seller_, + valid_auction_input_); + AuctionResult output = AdScoreToAuctionResult( + valid_score_, /*maybe_bidding_groups=*/std::nullopt, + /*error=*/std::nullopt, + AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER, valid_seller_, + valid_auction_input_); + EXPECT_THAT(expected, EqualsProto(output)); +} + +TEST_F(AdScoreToAuctionResultTest, + MapsAllFieldsForServerAuctionForProtectedAudienceInput) { + AuctionResult expected = MapBasicScoreFieldsToAuctionResult(valid_score_); + MapServerComponentAuctionFieldsToAuctionResult(&expected, valid_seller_, + valid_audience_input_); + AuctionResult output = AdScoreToAuctionResult( + valid_score_, /*maybe_bidding_groups=*/std::nullopt, + /*error=*/std::nullopt, + AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER, valid_seller_, + valid_audience_input_); + EXPECT_THAT(expected, EqualsProto(output)); +} + +TEST_F(AdScoreToAuctionResultTest, MapsErrorOverAdScoreIfPresent) { + AuctionResult expected = MapBasicErrorFieldsToAuctionResult(valid_error_); + AuctionResult output = AdScoreToAuctionResult( + std::nullopt, /*maybe_bidding_groups=*/std::nullopt, valid_error_, + AuctionScope::AUCTION_SCOPE_SINGLE_SELLER, empty_seller_, + empty_audience_); + EXPECT_THAT(expected, EqualsProto(output)); +} + +ScoreAdsRequest::ScoreAdsRawRequest MapScoreAdsRawRequest( + const SelectAdRequest::AuctionConfig& auction_config, + std::variant& + protected_auction_input, + std::vector& component_auction_results) { + ScoreAdsRequest::ScoreAdsRawRequest raw_request; + *raw_request.mutable_auction_signals() = auction_config.auction_signals(); + *raw_request.mutable_seller_signals() = auction_config.seller_signals(); + *raw_request.mutable_seller() = auction_config.seller(); + *raw_request.mutable_seller_currency() = auction_config.seller_currency(); + + raw_request.mutable_component_auction_results()->Assign( + component_auction_results.begin(), component_auction_results.end()); + std::visit( + [&raw_request, &auction_config](const auto& protected_auction_input) { + raw_request.set_publisher_hostname( + protected_auction_input.publisher_name()); + raw_request.set_enable_debug_reporting( + protected_auction_input.enable_debug_reporting()); + auto* log_context = raw_request.mutable_log_context(); + log_context->set_generation_id(protected_auction_input.generation_id()); + log_context->set_adtech_debug_id(auction_config.seller_debug_id()); + if (protected_auction_input.has_consented_debug_config()) { + *raw_request.mutable_consented_debug_config() = + protected_auction_input.consented_debug_config(); + } + }, + protected_auction_input); + return raw_request; +} + +template +class CreateScoreAdsRawRequestTest : public ::testing::Test { + protected: + void SetUp() override { + for (int i = 0; i < 10; i++) { + component_auctions_list_.emplace_back( + MakeARandomSingleSellerAuctionResult()); + } + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest(CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + protected_auction_input_ = {protected_auction_input}; + request_ = request; + expected_ = MapScoreAdsRawRequest(request_.auction_config(), + protected_auction_input_, + component_auctions_list_); + } + + public: + std::vector component_auctions_list_; + ScoreAdsRequest::ScoreAdsRawRequest expected_; + std::variant + protected_auction_input_; + SelectAdRequest request_; +}; + +using ProtectedAuctionInputTypes = + ::testing::Types; +TYPED_TEST_SUITE(CreateScoreAdsRawRequestTest, ProtectedAuctionInputTypes); + +TYPED_TEST(CreateScoreAdsRawRequestTest, MapsAllFields) { + MapScoreAdsRawRequest(this->request_.auction_config(), + this->protected_auction_input_, + this->component_auctions_list_); + auto output = CreateTopLevelScoreAdsRawRequest( + this->request_.auction_config(), this->protected_auction_input_, + this->component_auctions_list_); + EXPECT_THAT(this->expected_, EqualsProto(*output)); +} + +TYPED_TEST(CreateScoreAdsRawRequestTest, IgnoresChaffResults) { + AuctionResult chaff_res; + chaff_res.set_is_chaff(true); + std::vector list_with_chaff = this->component_auctions_list_; + list_with_chaff.push_back(chaff_res); + MapScoreAdsRawRequest(this->request_.auction_config(), + this->protected_auction_input_, + this->component_auctions_list_); + auto output = CreateTopLevelScoreAdsRawRequest( + this->request_.auction_config(), this->protected_auction_input_, + list_with_chaff); + EXPECT_THAT(this->expected_, EqualsProto(*output)); +} + +TEST(GetBuyerIgsWithBidsMapTest, CollatesMapsFromAllAuctionResults) { + std::vector input; + input.emplace_back(MakeARandomComponentAuctionResult( + MakeARandomString(), MakeARandomString(), {kBuyer1}) + .bidding_groups()); + input.emplace_back(MakeARandomComponentAuctionResult( + MakeARandomString(), MakeARandomString(), {kBuyer2}) + .bidding_groups()); + + // copy to expected + std::vector expected = input; + IgsWithBidsMap output = GetBuyerIgsWithBidsMap(input); + EXPECT_EQ(output.size(), 2); + ASSERT_TRUE(output.find(kBuyer1) != output.end()); + EXPECT_THAT(output.at(kBuyer1), EqualsProto(expected[0].at(kBuyer1))); + ASSERT_TRUE(output.find(kBuyer2) != output.end()); + EXPECT_THAT(output.at(kBuyer2), EqualsProto(expected[1].at(kBuyer2))); +} + +TEST(GetBuyerIgsWithBidsMapTest, MergesKeysAcrossAuctionResults) { + std::vector input; + input.emplace_back(MakeARandomComponentAuctionResult( + MakeARandomString(), MakeARandomString(), {kBuyer1}) + .bidding_groups()); + input.emplace_back(MakeARandomComponentAuctionResult( + MakeARandomString(), MakeARandomString(), {kBuyer1}) + .bidding_groups()); + + absl::flat_hash_set ig_idx_set; + ig_idx_set.insert(input[0].at(kBuyer1).index().begin(), + input[0].at(kBuyer1).index().end()); + ig_idx_set.insert(input[1].at(kBuyer1).index().begin(), + input[1].at(kBuyer1).index().end()); + + AuctionResult::InterestGroupIndex expected_ig_idx; + expected_ig_idx.mutable_index()->Assign(ig_idx_set.begin(), ig_idx_set.end()); + + IgsWithBidsMap output = GetBuyerIgsWithBidsMap(input); + EXPECT_EQ(output.size(), 1); + ASSERT_TRUE(output.find(kBuyer1) != output.end()); + EXPECT_THAT(output.at(kBuyer1), EqualsProto(expected_ig_idx)); +} + +class UnpackageServerAuctionComponentResultTest : public ::testing::Test { + protected: + void SetUp() { + crypto_client_ = CreateCryptoClient(); + key_fetcher_manager_ = + std::make_unique(); + } + std::unique_ptr + key_fetcher_manager_; + std::unique_ptr crypto_client_; +}; + +TEST_F(UnpackageServerAuctionComponentResultTest, UnpacksAuctionResult) { + AuctionResult expected = MakeARandomSingleSellerAuctionResult(); + std::string plaintext_response = + FrameAndCompressProto(expected.SerializeAsString()); + auto encrypted_request = + HpkeEncrypt(plaintext_response, *crypto_client_, *key_fetcher_manager_, + server_common::CloudPlatform::kGcp); + ASSERT_TRUE(encrypted_request.ok()) << encrypted_request.status(); + SelectAdRequest::ComponentAuctionResult input; + *input.mutable_key_id() = std::move(encrypted_request.value().key_id); + *input.mutable_auction_result_ciphertext() = + std::move(encrypted_request.value().ciphertext); + + auto output = UnpackageServerAuctionComponentResult(input, *crypto_client_, + *key_fetcher_manager_); + ASSERT_TRUE(output.ok()) << output.status(); + EXPECT_THAT(*output, EqualsProto(expected)); +} + +TEST_F(UnpackageServerAuctionComponentResultTest, ReturnsErrorForInvalidData) { + SelectAdRequest::ComponentAuctionResult input; + // Tink Crashes if payload is < 32 chars. + *input.mutable_auction_result_ciphertext() = + absl::StrCat(MakeARandomString(), MakeARandomString()); + *input.mutable_key_id() = MakeARandomString(); + auto output = UnpackageServerAuctionComponentResult(input, *crypto_client_, + *key_fetcher_manager_); + EXPECT_FALSE(output.ok()); +} + +class CreateAuctionResultCiphertextTest : public testing::Test { + void SetUp() override { + valid_score_ = MakeAnAdScore(); + valid_error_ = MakeAnError(); + bidding_group_map_ = + MakeARandomSingleSellerAuctionResult().bidding_groups(); + absl::btree_map context_map; + log_context_ = std::make_unique( + context_map, server_common::ConsentedDebugConfiguration()); + } + + protected: + ScoreAdsResponse::AdScore valid_score_; + AuctionResult::Error valid_error_; + IgsWithBidsMap bidding_group_map_; + std::unique_ptr log_context_; + std::unique_ptr MakeDecryptedMessage() { + auto [request, context] = + GetFramedInputAndOhttpContext(MakeARandomString()); + auto private_key = GetPrivateKey(); + return std::make_unique( + MakeARandomString(), context, private_key, + kBiddingAuctionOhttpRequestLabel); + } +}; + +TEST_F(CreateAuctionResultCiphertextTest, ConvertsAdScoreForAndroid) { + AuctionResult expected = + MapBasicScoreFieldsToAuctionResult(this->valid_score_); + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateWinningAuctionResultCiphertext( + this->valid_score_, std::nullopt, ClientType::CLIENT_TYPE_ANDROID, + *decrypted_message, *this->log_context_); + + ASSERT_TRUE(output.ok()) << output.status(); + EXPECT_THAT(DecryptAppProtoAuctionResult(*output, decrypted_message->context), + EqualsProto(expected)); +} + +TEST_F(CreateAuctionResultCiphertextTest, ConvertsAdScoreForWeb) { + AuctionResult expected = + MapBasicScoreFieldsToAuctionResult(this->valid_score_); + // Should not populate bid value for single seller auction. + expected.clear_bid(); + expected.mutable_bidding_groups()->insert(this->bidding_group_map_.begin(), + this->bidding_group_map_.end()); + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateWinningAuctionResultCiphertext( + this->valid_score_, this->bidding_group_map_, + ClientType::CLIENT_TYPE_BROWSER, *decrypted_message, *this->log_context_); + + ASSERT_TRUE(output.ok()) << output.status(); + EXPECT_THAT(DecryptBrowserAuctionResult(*output, decrypted_message->context), + EqualsProto(expected)); +} + +TEST_F(CreateAuctionResultCiphertextTest, ReturnsErrorForInvalidClientAdScore) { + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateWinningAuctionResultCiphertext( + this->valid_score_, this->bidding_group_map_, + ClientType::CLIENT_TYPE_UNKNOWN, *decrypted_message, *this->log_context_); + + ASSERT_FALSE(output.ok()); +} + +TEST_F(CreateAuctionResultCiphertextTest, ConvertsClientErrorForAndroid) { + AuctionResult expected = + MapBasicErrorFieldsToAuctionResult(this->valid_error_); + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateErrorAuctionResultCiphertext( + this->valid_error_, ClientType::CLIENT_TYPE_ANDROID, *decrypted_message, + *this->log_context_); + + ASSERT_TRUE(output.ok()) << output.status(); + EXPECT_THAT(DecryptAppProtoAuctionResult(*output, decrypted_message->context), + EqualsProto(expected)); +} + +TEST_F(CreateAuctionResultCiphertextTest, ConvertsClientErrorForWeb) { + AuctionResult expected = + MapBasicErrorFieldsToAuctionResult(this->valid_error_); + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateErrorAuctionResultCiphertext( + this->valid_error_, ClientType::CLIENT_TYPE_BROWSER, *decrypted_message, + *this->log_context_); + + ASSERT_TRUE(output.ok()) << output.status(); + EXPECT_THAT(DecryptBrowserAuctionResult(*output, decrypted_message->context), + EqualsProto(expected)); +} + +TEST_F(CreateAuctionResultCiphertextTest, + ReturnsErrorForInvalidClientErrorMessage) { + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateErrorAuctionResultCiphertext( + this->valid_error_, ClientType::CLIENT_TYPE_UNKNOWN, *decrypted_message, + *this->log_context_); + + ASSERT_FALSE(output.ok()); +} + +TEST_F(CreateAuctionResultCiphertextTest, ConvertsChaffForAndroid) { + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateChaffAuctionResultCiphertext( + ClientType::CLIENT_TYPE_ANDROID, *decrypted_message, *this->log_context_); + + ASSERT_TRUE(output.ok()) << output.status(); + AuctionResult decrypted_output = + DecryptAppProtoAuctionResult(*output, decrypted_message->context); + EXPECT_TRUE(decrypted_output.is_chaff()); +} + +TEST_F(CreateAuctionResultCiphertextTest, ConvertsChaffForWeb) { + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateChaffAuctionResultCiphertext( + ClientType::CLIENT_TYPE_BROWSER, *decrypted_message, *this->log_context_); + + ASSERT_TRUE(output.ok()) << output.status(); + AuctionResult decrypted_output = + DecryptBrowserAuctionResult(*output, decrypted_message->context); + EXPECT_TRUE(decrypted_output.is_chaff()); +} + +TEST_F(CreateAuctionResultCiphertextTest, ReturnsErrorForInvalidClientChaff) { + auto decrypted_message = MakeDecryptedMessage(); + auto output = CreateChaffAuctionResultCiphertext( + ClientType::CLIENT_TYPE_UNKNOWN, *decrypted_message, *this->log_context_); + + ASSERT_FALSE(output.ok()); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/select_ad_reactor_test_utils.cc b/services/seller_frontend_service/util/select_ad_reactor_test_utils.cc index 0f7f434c..f3dee1ab 100644 --- a/services/seller_frontend_service/util/select_ad_reactor_test_utils.cc +++ b/services/seller_frontend_service/util/select_ad_reactor_test_utils.cc @@ -29,12 +29,10 @@ #include "absl/strings/escaping.h" #include "absl/strings/string_view.h" #include "gmock/gmock.h" -#include "services/common/compression/gzip.h" #include "services/common/test/utils/cbor_test_utils.h" #include "services/common/util/oblivious_http_utils.h" #include "services/seller_frontend_service/select_ad_reactor.h" #include "services/seller_frontend_service/util/framing_utils.h" -#include "src/cpp/communication/encoding_utils.h" namespace privacy_sandbox::bidding_auction_servers { @@ -64,6 +62,18 @@ absl::flat_hash_map BuildBuyerWinningAdUrlMap( return buyer_to_ad_url; } +void SetupMockCrytoClient(MockCryptoClientWrapper& crypto_client) { + EXPECT_CALL(crypto_client, HpkeEncrypt) + .WillOnce([](const PublicKey& key, const std::string& plaintext_payload) { + // Mock the HpkeEncrypt() call on the crypto client. + google::cmrt::sdk::crypto_service::v1::HpkeEncryptResponse + hpke_encrypt_response; + hpke_encrypt_response.mutable_encrypted_data()->set_ciphertext( + plaintext_payload); + return hpke_encrypt_response; + }); +} + Cardinality BuyerCallCardinality(bool expect_all_buyers_solicited, bool repeated_get_allowed) { if (expect_all_buyers_solicited && repeated_get_allowed) { @@ -82,11 +92,11 @@ GetBidsResponse::GetBidsRawResponse BuildGetBidsResponseWithSingleAd( absl::optional bid_value, const bool enable_event_level_debug_reporting, int number_ad_component_render_urls, - absl::optional bid_currency) { - AdWithBid bid = BuildNewAdWithBid( - ad_url, std::move(interest_group_name), std::move(bid_value), - enable_event_level_debug_reporting, number_ad_component_render_urls, - bid_currency); + const absl::optional& bid_currency) { + AdWithBid bid = + BuildNewAdWithBid(ad_url, std::move(interest_group_name), bid_value, + enable_event_level_debug_reporting, + number_ad_component_render_urls, bid_currency); GetBidsResponse::GetBidsRawResponse response; response.mutable_bids()->Add(std::move(bid)); return response; @@ -156,7 +166,7 @@ AdWithBid BuildNewAdWithBid( absl::optional bid_value, const bool enable_event_level_debug_reporting, int number_ad_component_render_urls, - absl::optional bid_currency) { + const absl::optional& bid_currency) { AdWithBid bid; bid.set_render(ad_url); for (int i = 0; i < number_ad_component_render_urls; i++) { @@ -170,7 +180,7 @@ AdWithBid BuildNewAdWithBid( bid.set_interest_group_name(*interest_group_name); } if (bid_currency.has_value()) { - bid.set_bid_currency(bid_currency.value()); + bid.set_bid_currency(*bid_currency); } bid.set_ad_cost(kAdCost); bid.set_modeling_signals(kModelingSignals); @@ -186,6 +196,32 @@ AdWithBid BuildNewAdWithBid( return bid; } +ProtectedAppSignalsAdWithBid BuildNewPASAdWithBid( + const std::string& ad_render_url, absl::optional bid_value, + const bool enable_event_level_debug_reporting, + absl::optional bid_currency) { + ProtectedAppSignalsAdWithBid pas_ad_with_bid; + pas_ad_with_bid.set_render(ad_render_url); + if (bid_value.has_value()) { + pas_ad_with_bid.set_bid(bid_value.value()); + } + if (bid_currency.has_value()) { + pas_ad_with_bid.set_bid_currency(bid_currency.value()); + } + pas_ad_with_bid.set_ad_cost(kAdCost); + pas_ad_with_bid.set_modeling_signals(kModelingSignals); + + if (enable_event_level_debug_reporting) { + DebugReportUrls debug_report_urls; + debug_report_urls.set_auction_debug_win_url( + "https://pas_test.com/debugWin?render=" + ad_render_url); + debug_report_urls.set_auction_debug_loss_url( + "https://pas_test.com/debugLoss?render=" + ad_render_url); + *pas_ad_with_bid.mutable_debug_report_urls() = debug_report_urls; + } + return pas_ad_with_bid; +} + server_common::PrivateKey GetPrivateKey() { HpkeKeyset default_keyset = HpkeKeyset{}; server_common::PrivateKey private_key; @@ -200,7 +236,7 @@ void SetupScoringProviderMock( const std::optional& scoring_signals_value, bool repeated_get_allowed, const std::optional& server_error_to_return, - int expected_num_bids, std::string seller_egid) { + int expected_num_bids, const std::string& seller_egid) { auto MockScoringSignalsProvider = [&expected_buyer_bids, scoring_signals_value, server_error_to_return, expected_num_bids, seller_egid]( @@ -236,8 +272,7 @@ void SetupScoringProviderMock( GetByteSize get_byte_size; if (server_error_to_return.has_value()) { - std::move(on_done)(std::move(server_error_to_return.value()), - get_byte_size); + std::move(on_done)(*server_error_to_return, get_byte_size); } else { auto scoring_signals = std::make_unique(); if (scoring_signals_value.has_value()) { @@ -262,6 +297,7 @@ TrustedServersConfigClient CreateConfig() { config.SetFlagForTest(kAuctionHost, AUCTION_SERVER_HOST); config.SetFlagForTest(kTrue, ENABLE_SELLER_FRONTEND_BENCHMARKING); config.SetFlagForTest(kSellerOriginDomain, SELLER_ORIGIN_DOMAIN); + config.SetFlagForTest(kTrue, ENABLE_PROTECTED_AUDIENCE); return config; } @@ -335,4 +371,106 @@ absl::StatusOr UnframeAndDecompressAuctionResult( return GzipDecompress(unframed_response->compressed_data); } +std::string FrameAndCompressProto(absl::string_view serialized_proto) { + // Compress the bytes array before framing it with pre-amble and padding. + absl::StatusOr compressed_data = GzipCompress(serialized_proto); + EXPECT_TRUE(compressed_data.ok()) << compressed_data.status().message(); + absl::StatusOr framed_request = + server_common::EncodeResponsePayload( + server_common::CompressionType::kGzip, *compressed_data, + GetEncodedDataSize(compressed_data->size())); + EXPECT_TRUE(framed_request.ok()) << framed_request.status().message(); + return *framed_request; +} + +void FillInAdRenderUrlAndCurrency( + absl::string_view matching_currency, absl::string_view mismatching_currency, + absl::string_view base_ad_render_url, const int current_iteration, + int& mismatched_left_to_add, int& matched_left_to_add, + std::string& ad_render_url, std::string& bid_currency) { + if ((mismatched_left_to_add > 0) && (matched_left_to_add > 0)) { + if ((current_iteration % 2) == 0) { + ad_render_url = absl::StrCat(base_ad_render_url, "/", current_iteration, + "_", matching_currency); + bid_currency = matching_currency; + matched_left_to_add--; + } else { + ad_render_url = absl::StrCat(base_ad_render_url, "/", current_iteration, + "_", mismatching_currency); + bid_currency = mismatching_currency; + mismatched_left_to_add--; + } + } else if (matched_left_to_add > 0) { + ad_render_url = absl::StrCat(base_ad_render_url, "/", current_iteration, + "_", matching_currency); + bid_currency = matching_currency; + matched_left_to_add--; + } else { + ad_render_url = absl::StrCat(base_ad_render_url, "/", current_iteration, + "_", mismatching_currency); + bid_currency = mismatching_currency; + mismatched_left_to_add--; + } +} + +std::vector GetAdWithBidsInMultipleCurrencies( + const int num_ad_with_bids, const int num_mismatched, + absl::string_view matching_currency, absl::string_view mismatching_currency, + absl::string_view base_ad_render_url, absl::string_view base_ig_name) { + DCHECK(num_ad_with_bids >= num_mismatched); + + int mismatched_left_to_add = num_mismatched; + int matched_left_to_add = num_ad_with_bids - num_mismatched; + + std::vector ads_with_bids; + ads_with_bids.reserve(num_ad_with_bids); + for (int i = 0; i < num_ad_with_bids; i++) { + std::string ad_render_url; + std::string bid_currency; + + FillInAdRenderUrlAndCurrency(matching_currency, mismatching_currency, + base_ad_render_url, i, mismatched_left_to_add, + matched_left_to_add, ad_render_url, + bid_currency); + ads_with_bids.push_back(BuildNewAdWithBid( + ad_render_url, absl::StrCat(base_ig_name, "_", i), + /*bid_value=*/1 + 0.001 * i, + /*enable_event_level_debug_reporting=*/false, + /*number_ad_component_render_urls=*/kDefaultNumAdComponents, + bid_currency)); + } + DCHECK_EQ(matched_left_to_add, 0); + DCHECK_EQ(mismatched_left_to_add, 0); + return ads_with_bids; +} + +std::vector GetPASAdWithBidsInMultipleCurrencies( + const int num_ad_with_bids, const int num_mismatched, + absl::string_view matching_currency, absl::string_view mismatching_currency, + absl::string_view base_ad_render_url) { + DCHECK(num_ad_with_bids >= num_mismatched); + + int mismatched_left_to_add = num_mismatched; + int matched_left_to_add = num_ad_with_bids - num_mismatched; + + std::vector pas_ads_with_bids; + pas_ads_with_bids.reserve(num_ad_with_bids); + for (int i = 0; i < num_ad_with_bids; i++) { + std::string ad_render_url; + std::string bid_currency; + + FillInAdRenderUrlAndCurrency(matching_currency, mismatching_currency, + base_ad_render_url, i, mismatched_left_to_add, + matched_left_to_add, ad_render_url, + bid_currency); + pas_ads_with_bids.push_back(BuildNewPASAdWithBid( + ad_render_url, + /*bid_value=*/1 + 0.001 * i, + /*enable_event_level_debug_reporting=*/false, bid_currency)); + } + DCHECK_EQ(matched_left_to_add, 0); + DCHECK_EQ(mismatched_left_to_add, 0); + return pas_ads_with_bids; +} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/select_ad_reactor_test_utils.h b/services/seller_frontend_service/util/select_ad_reactor_test_utils.h index d81c42be..b30c0d74 100644 --- a/services/seller_frontend_service/util/select_ad_reactor_test_utils.h +++ b/services/seller_frontend_service/util/select_ad_reactor_test_utils.h @@ -21,12 +21,15 @@ #include #include #include +#include #include "absl/container/flat_hash_map.h" #include "absl/strings/string_view.h" #include "api/bidding_auction_servers.grpc.pb.h" #include "quiche/oblivious_http/oblivious_http_client.h" #include "quiche/oblivious_http/oblivious_http_gateway.h" +#include "services/common/compression/gzip.h" +#include "services/common/encryption/mock_crypto_client_wrapper.h" #include "services/common/test/mocks.h" #include "services/common/test/random.h" #include "services/common/test/utils/cbor_test_utils.h" @@ -36,7 +39,8 @@ #include "services/seller_frontend_service/test/app_test_utils.h" #include "services/seller_frontend_service/util/framing_utils.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" +#include "src/communication/encoding_utils.h" +#include "src/encryption/key_fetcher/mock/mock_key_fetcher_manager.h" namespace privacy_sandbox::bidding_auction_servers { @@ -69,6 +73,9 @@ inline constexpr char kTestComponentSellerReportingUrl[] = "http://componentReportResult.com"; inline constexpr char kTestBuyerReportingUrl[] = "http://reportWin.com"; inline constexpr char kTestAdMetadata[] = "testAdMetadata"; +inline constexpr char kEurosIsoCode[] = "EUR"; +inline constexpr char kUsdIsoCode[] = "USD"; +inline constexpr char kYenIsoCode[] = "JPY"; template struct EncryptedSelectAdRequestWithContext { @@ -90,7 +97,9 @@ GetBidsResponse::GetBidsRawResponse BuildGetBidsResponseWithSingleAd( absl::optional bid_value = absl::nullopt, const bool enable_event_level_debug_reporting = false, int number_ad_component_render_urls = kDefaultNumAdComponents, - absl::optional bid_currency = absl::nullopt); + const absl::optional& bid_currency = absl::nullopt); + +void SetupMockCrytoClient(MockCryptoClientWrapper& crypto_client); void SetupBuyerClientMock( absl::string_view hostname, @@ -110,7 +119,12 @@ AdWithBid BuildNewAdWithBid( absl::optional bid_value = absl::nullopt, const bool enable_event_level_debug_reporting = false, int number_ad_component_render_urls = kDefaultNumAdComponents, - absl::optional bid_currency = absl::nullopt); + const absl::optional& bid_currency = absl::nullopt); + +ProtectedAppSignalsAdWithBid BuildNewPASAdWithBid( + const std::string& ad_render_url, absl::optional bid_value, + const bool enable_event_level_debug_reporting, + absl::optional bid_currency); void SetupScoringProviderMock( const MockAsyncProvider& provider, @@ -118,7 +132,17 @@ void SetupScoringProviderMock( const std::optional& scoring_signals_value, bool repeated_get_allowed = false, const std::optional& server_error_to_return = std::nullopt, - int expected_num_bids = -1, std::string seller_egid = ""); + int expected_num_bids = -1, const std::string& seller_egid = ""); + +std::vector GetAdWithBidsInMultipleCurrencies( + int num_ad_with_bids, int num_mismatched, + absl::string_view matching_currency, absl::string_view mismatching_currency, + absl::string_view base_ad_render_url, absl::string_view base_ig_name); + +std::vector GetPASAdWithBidsInMultipleCurrencies( + const int num_ad_with_bids, const int num_mismatched, + absl::string_view matching_currency, absl::string_view mismatching_currency, + absl::string_view base_ad_render_url); TrustedServersConfigClient CreateConfig(); @@ -190,7 +214,9 @@ GetProtoEncodedEncryptedInputAndOhttpContext(const T& protected_auction_input) { template EncryptedSelectAdRequestWithContext GetSampleSelectAdRequest( ClientType client_type, absl::string_view seller_origin_domain, - bool is_consented_debug = false, absl::string_view top_level_seller = "") { + bool is_consented_debug = false, absl::string_view top_level_seller = "", + EncryptionCloudPlatform top_seller_cloud_platform = + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_UNSPECIFIED) { BuyerInput buyer_input; auto* interest_group = buyer_input.mutable_interest_groups()->Add(); interest_group->set_name(kSampleInterestGroupName); @@ -236,6 +262,11 @@ EncryptedSelectAdRequestWithContext GetSampleSelectAdRequest( } protected_auction_input.set_publisher_name(MakeARandomString()); request.mutable_auction_config()->set_seller(seller_origin_domain); + request.mutable_auction_config()->set_top_level_cloud_platform( + top_seller_cloud_platform); + if (!top_level_seller.empty()) { + request.mutable_auction_config()->set_top_level_seller(top_level_seller); + } request.set_client_type(client_type); const auto* descriptor = protected_auction_input.GetDescriptor(); @@ -267,16 +298,18 @@ EncryptedSelectAdRequestWithContext GetSampleSelectAdRequest( *request.mutable_protected_audience_ciphertext() = std::move(encrypted_request); } - if (!top_level_seller.empty()) { - request.mutable_auction_config()->set_top_level_seller( - top_level_seller); - } return {std::move(protected_auction_input), std::move(request), std::move(context)}; } } } +struct ServerComponentAuctionParams { + MockCryptoClientWrapper* crypto_client = nullptr; + EncryptionCloudPlatform top_level_cloud_platform = + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_UNSPECIFIED; +}; + template std::pair, ClientRegistry> GetSelectAdRequestAndClientRegistryForTest( @@ -291,10 +324,12 @@ GetSelectAdRequestAndClientRegistryForTest( absl::string_view seller_origin_domain, bool expect_all_buyers_solicited = true, absl::string_view top_level_seller = "", bool enable_reporting = false, - bool force_set_modified_bid_to_zero = false) { + bool force_set_modified_bid_to_zero = false, + ServerComponentAuctionParams server_component_auction_params = {}) { auto encrypted_request_with_context = GetSampleSelectAdRequest( client_type, seller_origin_domain, - /*is_consented_debug=*/false, top_level_seller); + /*is_consented_debug=*/false, top_level_seller, + server_component_auction_params.top_level_cloud_platform); // Sets up buyer client while populating the expected buyer bids that can then // be used to setup the scoring signals provider. @@ -385,10 +420,14 @@ GetSelectAdRequestAndClientRegistryForTest( std::unique_ptr async_reporter = std::make_unique( std::make_unique()); + // Sets up client registry - ClientRegistry clients{scoring_signals_provider, scoring_client, + ClientRegistry clients{scoring_signals_provider, + scoring_client, buyer_front_end_async_client_factory_mock, - *mock_key_fetcher_manager, std::move(async_reporter)}; + *mock_key_fetcher_manager, + server_component_auction_params.crypto_client, + std::move(async_reporter)}; return {std::move(encrypted_request_with_context), std::move(clients)}; } @@ -417,6 +456,8 @@ AuctionResult DecryptBrowserAuctionResult( absl::StatusOr UnframeAndDecompressAuctionResult( absl::string_view framed_response); +std::string FrameAndCompressProto(absl::string_view serialized_proto); + } // namespace privacy_sandbox::bidding_auction_servers #endif // SERVICES_SELLER_FRONTEND_SERVICE_UTIL_SELECT_AD_REACTOR_TEST_UTILS_H_ diff --git a/services/seller_frontend_service/util/validation_utils.cc b/services/seller_frontend_service/util/validation_utils.cc new file mode 100644 index 00000000..cb73aa6e --- /dev/null +++ b/services/seller_frontend_service/util/validation_utils.cc @@ -0,0 +1,116 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/seller_frontend_service/util/validation_utils.h" + +namespace privacy_sandbox::bidding_auction_servers { + +bool ValidateEncryptedSelectAdRequest(const SelectAdRequest& request, + AuctionScope auction_scope, + absl::string_view seller_domain, + ErrorAccumulator& error_accumulator) { + bool valid = true; + auto report_error_lambda = [&error_accumulator, &valid]( + bool has_error, absl::string_view error_msg) { + if (has_error) { + valid = false; + error_accumulator.ReportError(ErrorVisibility::AD_SERVER_VISIBLE, + error_msg, ErrorCode::CLIENT_SIDE); + } + }; + report_error_lambda(request.protected_auction_ciphertext().empty() && + request.protected_audience_ciphertext().empty(), + kEmptyProtectedAuctionCiphertextError); + const SelectAdRequest::AuctionConfig& auction_config = + request.auction_config(); + + report_error_lambda(auction_config.seller_signals().empty(), + kEmptySellerSignals); + report_error_lambda(auction_config.auction_signals().empty(), + kEmptyAuctionSignals); + report_error_lambda(auction_config.seller().empty(), kEmptySeller); + report_error_lambda(seller_domain != auction_config.seller(), + kWrongSellerDomain); + report_error_lambda(request.client_type() == CLIENT_TYPE_UNKNOWN, + kUnknownClientType); + + if (auction_scope == AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER) { + report_error_lambda(request.component_auction_results_size() == 0, + kNoComponentAuctionResults); + int valid_results = 0; + for (const auto& auction_result : request.component_auction_results()) { + if (auction_result.auction_result_ciphertext().empty() || + auction_result.key_id().empty()) { + continue; + } + ++valid_results; + } + report_error_lambda( + valid_results == 0 && request.component_auction_results_size() != 0, + kEmptyComponentAuctionResults); + } + return valid; +} + +bool ValidateComponentAuctionResult(const AuctionResult& auction_result, + absl::string_view request_generation_id, + absl::string_view seller_domain, + ErrorAccumulator& error_accumulator) { + // If chaff result or erroneous request, don't do any more validations and + // pass as is to auction server to make sure auction server is called. + if (auction_result.is_chaff() || !auction_result.error().message().empty()) { + return true; + } + + bool valid = true; + auto report_error_lambda = [&error_accumulator, &valid]( + bool has_error, absl::string_view error_msg) { + if (has_error) { + valid = false; + error_accumulator.ReportError(ErrorVisibility::AD_SERVER_VISIBLE, + error_msg, ErrorCode::CLIENT_SIDE); + } + }; + + report_error_lambda( + auction_result.auction_params().ciphertext_generation_id() != + request_generation_id, + absl::StrFormat(kErrorInAuctionResult, + kMismatchedGenerationIdInAuctionResultError)); + report_error_lambda( + auction_result.top_level_seller() != seller_domain, + absl::StrFormat(kErrorInAuctionResult, + kMismatchedTopLevelSellerInAuctionResultError)); + + report_error_lambda( + auction_result.auction_params().component_seller().empty(), + absl::StrFormat(kErrorInAuctionResult, + kEmptyComponentSellerInAuctionResultError)); + + report_error_lambda( + auction_result.ad_type() != AdType::AD_TYPE_PROTECTED_AUDIENCE_AD, + absl::StrFormat(kErrorInAuctionResult, + kUnsupportedAdTypeInAuctionResultError)); + + report_error_lambda( + !auction_result.win_reporting_urls() + .top_level_seller_reporting_urls() + .reporting_url() + .empty(), + absl::StrFormat(kErrorInAuctionResult, + kTopLevelWinReportingUrlsInAuctionResultError)); + return valid; +} + +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/validation_utils.h b/services/seller_frontend_service/util/validation_utils.h new file mode 100644 index 00000000..aa6e5e6e --- /dev/null +++ b/services/seller_frontend_service/util/validation_utils.h @@ -0,0 +1,42 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef SERVICES_SELLER_FRONTEND_SERVICE_UTIL_VALIDATION_UTILS_H_ +#define SERVICES_SELLER_FRONTEND_SERVICE_UTIL_VALIDATION_UTILS_H_ + +#include "absl/strings/string_view.h" +#include "api/bidding_auction_servers.pb.h" +#include "services/common/util/error_accumulator.h" +#include "services/common/util/error_categories.h" + +namespace privacy_sandbox::bidding_auction_servers { + +// Validates a single Auction Result and adds errors to error accumulator. +// Returns false if AuctionResult is invalid. +bool ValidateComponentAuctionResult(const AuctionResult& auction_result, + absl::string_view request_generation_id, + absl::string_view seller_domain, + ErrorAccumulator& error_accumulator); + +// Validates an encrypted SelectAdRequest and adds all errors to error +// accumulator. +// Returns false if plaintext fields in SelectAdRequest are invalid. +bool ValidateEncryptedSelectAdRequest(const SelectAdRequest& request, + AuctionScope auction_scope, + absl::string_view seller_domain, + ErrorAccumulator& error_accumulator); + +} // namespace privacy_sandbox::bidding_auction_servers + +#endif // SERVICES_SELLER_FRONTEND_SERVICE_UTIL_PROTO_MAPPING_UTIL_H_ diff --git a/services/seller_frontend_service/util/validation_utils_test.cc b/services/seller_frontend_service/util/validation_utils_test.cc new file mode 100644 index 00000000..c9af0db1 --- /dev/null +++ b/services/seller_frontend_service/util/validation_utils_test.cc @@ -0,0 +1,269 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "services/seller_frontend_service/util/validation_utils.h" + +#include + +#include + +#include "gtest/gtest.h" +#include "services/common/test/random.h" +#include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" + +namespace privacy_sandbox::bidding_auction_servers { +namespace { + +constexpr char kTestSeller[] = "sample-seller"; + +using ::testing::Contains; + +TEST(ValidateEncryptedSelectAdRequest, ValidatesSingleSellerInput) { + ErrorAccumulator error_accumulator; + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest( + CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + bool output = ValidateEncryptedSelectAdRequest( + request, AuctionScope::AUCTION_SCOPE_SINGLE_SELLER, kTestSeller, + error_accumulator); + ASSERT_TRUE(output); + const auto& errors_map = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE); + EXPECT_EQ(errors_map.find(ErrorCode::CLIENT_SIDE), errors_map.end()); +} + +TEST(ValidateEncryptedSelectAdRequest, AddsErrorsToAccumulatorForSingleSeller) { + ErrorAccumulator error_accumulator; + SelectAdRequest empty_request; + bool output = ValidateEncryptedSelectAdRequest( + empty_request, AuctionScope::AUCTION_SCOPE_SINGLE_SELLER, kTestSeller, + error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(kEmptyProtectedAuctionCiphertextError)); + EXPECT_THAT(errors, Contains(kEmptySellerSignals)); + EXPECT_THAT(errors, Contains(kEmptyAuctionSignals)); + EXPECT_THAT(errors, Contains(kEmptySeller)); + EXPECT_THAT(errors, Contains(kWrongSellerDomain)); + EXPECT_THAT(errors, Contains(kUnknownClientType)); +} + +TEST(ValidateEncryptedSelectAdRequest, ValidatesDeviceComponentSellerInput) { + ErrorAccumulator error_accumulator; + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest( + CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + bool output = ValidateEncryptedSelectAdRequest( + request, AuctionScope::AUCTION_SCOPE_DEVICE_COMPONENT_MULTI_SELLER, + kTestSeller, error_accumulator); + ASSERT_TRUE(output); + const auto& errors_map = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE); + EXPECT_EQ(errors_map.find(ErrorCode::CLIENT_SIDE), errors_map.end()); +} + +TEST(ValidateEncryptedSelectAdRequest, + AddsErrorsToAccumulatorForDeviceComponent) { + ErrorAccumulator error_accumulator; + SelectAdRequest empty_request; + bool output = ValidateEncryptedSelectAdRequest( + empty_request, AuctionScope::AUCTION_SCOPE_DEVICE_COMPONENT_MULTI_SELLER, + kTestSeller, error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(kEmptyProtectedAuctionCiphertextError)); + EXPECT_THAT(errors, Contains(kEmptySellerSignals)); + EXPECT_THAT(errors, Contains(kEmptyAuctionSignals)); + EXPECT_THAT(errors, Contains(kEmptySeller)); + EXPECT_THAT(errors, Contains(kWrongSellerDomain)); + EXPECT_THAT(errors, Contains(kUnknownClientType)); +} + +TEST(ValidateEncryptedSelectAdRequest, ValidatesServerComponentSellerInput) { + ErrorAccumulator error_accumulator; + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest( + CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + bool output = ValidateEncryptedSelectAdRequest( + request, AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER, + kTestSeller, error_accumulator); + ASSERT_TRUE(output); + const auto& errors_map = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE); + EXPECT_EQ(errors_map.find(ErrorCode::CLIENT_SIDE), errors_map.end()); +} + +TEST(ValidateEncryptedSelectAdRequest, + AddsErrorsToAccumulatorForServerComponent) { + ErrorAccumulator error_accumulator; + SelectAdRequest empty_request; + bool output = ValidateEncryptedSelectAdRequest( + empty_request, AuctionScope::AUCTION_SCOPE_SERVER_COMPONENT_MULTI_SELLER, + kTestSeller, error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(kEmptyProtectedAuctionCiphertextError)); + EXPECT_THAT(errors, Contains(kEmptySellerSignals)); + EXPECT_THAT(errors, Contains(kEmptyAuctionSignals)); + EXPECT_THAT(errors, Contains(kEmptySeller)); + EXPECT_THAT(errors, Contains(kWrongSellerDomain)); + EXPECT_THAT(errors, Contains(kUnknownClientType)); +} + +TEST(ValidateEncryptedSelectAdRequest, + AddsErrorsToAccumulatorForServerTopLevel) { + ErrorAccumulator error_accumulator; + SelectAdRequest empty_request; + bool output = ValidateEncryptedSelectAdRequest( + empty_request, AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER, + kTestSeller, error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(kEmptyProtectedAuctionCiphertextError)); + EXPECT_THAT(errors, Contains(kEmptySellerSignals)); + EXPECT_THAT(errors, Contains(kEmptyAuctionSignals)); + EXPECT_THAT(errors, Contains(kEmptySeller)); + EXPECT_THAT(errors, Contains(kWrongSellerDomain)); + EXPECT_THAT(errors, Contains(kUnknownClientType)); + EXPECT_THAT(errors, Contains(kNoComponentAuctionResults)); +} + +TEST(ValidateEncryptedSelectAdRequest, + AddsErrorForServerTopLevelForMissingCiphertext) { + ErrorAccumulator error_accumulator; + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest( + CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + auto* auction_res = request.mutable_component_auction_results()->Add(); + auction_res->set_key_id(MakeARandomString()); + bool output = ValidateEncryptedSelectAdRequest( + request, AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER, kTestSeller, + error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(kEmptyComponentAuctionResults)); +} + +TEST(ValidateEncryptedSelectAdRequest, + AddsErrorForServerTopLevelForMissingKeyId) { + ErrorAccumulator error_accumulator; + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest( + CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + auto* auction_res = request.mutable_component_auction_results()->Add(); + auction_res->set_auction_result_ciphertext(MakeARandomString()); + bool output = ValidateEncryptedSelectAdRequest( + request, AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER, kTestSeller, + error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(kEmptyComponentAuctionResults)); +} + +TEST(ValidateEncryptedSelectAdRequest, ValidatesTopLevelSellerInput) { + ErrorAccumulator error_accumulator; + auto [protected_auction_input, request, context] = + GetSampleSelectAdRequest( + CLIENT_TYPE_BROWSER, kTestSeller, + /*is_consented_debug=*/true); + auto* auction_res = request.mutable_component_auction_results()->Add(); + auction_res->set_auction_result_ciphertext(MakeARandomString()); + auction_res->set_key_id(MakeARandomString()); + bool output = ValidateEncryptedSelectAdRequest( + request, AuctionScope::AUCTION_SCOPE_SERVER_TOP_LEVEL_SELLER, kTestSeller, + error_accumulator); + ASSERT_TRUE(output); + const auto& errors_map = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE); + EXPECT_EQ(errors_map.find(ErrorCode::CLIENT_SIDE), errors_map.end()); +} + +TEST(ValidateComponentAuctionResultTest, ReturnsNoErrorForChaff) { + ErrorAccumulator error_accumulator; + std::string generation_id = MakeARandomString(); + AuctionResult input; + input.set_is_chaff(true); + bool output = ValidateComponentAuctionResult(input, generation_id, + kTestSeller, error_accumulator); + ASSERT_TRUE(output); + const auto& errors_map = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE); + EXPECT_EQ(errors_map.find(ErrorCode::CLIENT_SIDE), errors_map.end()); +} + +TEST(ValidateComponentAuctionResultTest, ValidatesAuctionResult) { + ErrorAccumulator error_accumulator; + std::string generation_id = MakeARandomString(); + AuctionResult input = + MakeARandomComponentAuctionResult(generation_id, kTestSeller); + bool output = ValidateComponentAuctionResult(input, generation_id, + kTestSeller, error_accumulator); + ASSERT_TRUE(output); + const auto& errors_map = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE); + EXPECT_EQ(errors_map.find(ErrorCode::CLIENT_SIDE), errors_map.end()); +} + +TEST(ValidateComponentAuctionResultTest, ReportsErrors) { + ErrorAccumulator error_accumulator; + std::string generation_id = MakeARandomString(); + AuctionResult input = MakeARandomComponentAuctionResult(MakeARandomString(), + MakeARandomString()); + input.mutable_auction_params()->clear_component_seller(); + input.set_ad_type(AdType::AD_TYPE_PROTECTED_APP_SIGNALS_AD); + input.mutable_win_reporting_urls() + ->mutable_top_level_seller_reporting_urls() + ->set_reporting_url(MakeARandomString()); + bool output = ValidateComponentAuctionResult(input, generation_id, + kTestSeller, error_accumulator); + ASSERT_FALSE(output); + const auto& errors = + error_accumulator.GetErrors(ErrorVisibility::AD_SERVER_VISIBLE) + .at(ErrorCode::CLIENT_SIDE); + EXPECT_THAT(errors, Contains(absl::StrFormat( + kErrorInAuctionResult, + kMismatchedGenerationIdInAuctionResultError))); + EXPECT_THAT(errors, Contains(absl::StrFormat( + kErrorInAuctionResult, + kMismatchedTopLevelSellerInAuctionResultError))); + EXPECT_THAT(errors, Contains(absl::StrFormat( + kErrorInAuctionResult, + kEmptyComponentSellerInAuctionResultError))); + EXPECT_THAT(errors, Contains(absl::StrFormat( + kErrorInAuctionResult, + kUnsupportedAdTypeInAuctionResultError))); + EXPECT_THAT(errors, Contains(absl::StrFormat( + kErrorInAuctionResult, + kTopLevelWinReportingUrlsInAuctionResultError))); +} + +} // namespace +} // namespace privacy_sandbox::bidding_auction_servers diff --git a/services/seller_frontend_service/util/web_utils.cc b/services/seller_frontend_service/util/web_utils.cc index 2f781b46..f0ebb492 100644 --- a/services/seller_frontend_service/util/web_utils.cc +++ b/services/seller_frontend_service/util/web_utils.cc @@ -27,7 +27,7 @@ #include "rapidjson/stringbuffer.h" #include "rapidjson/writer.h" #include "services/common/compression/gzip.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/util/status_macro/status_macros.h" #include "cbor.h" @@ -239,7 +239,7 @@ absl::Status CborSerializeString(absl::string_view key, absl::string_view value, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = cbor_move(cbor_build_stringn(value.data(), value.size()))}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", key, " to CBOR"))); return absl::InternalError(""); @@ -254,7 +254,7 @@ absl::Status CborSerializeFloat(absl::string_view key, double value, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = cbor_move(float_val)}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", key, " to CBOR"))); return absl::InternalError(""); @@ -268,7 +268,7 @@ absl::Status CborSerializeBool(absl::string_view key, bool value, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = cbor_move(cbor_build_bool(value))}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", key, " to CBOR"))); return absl::InternalError(""); @@ -297,7 +297,7 @@ absl::Status CborSerializeAdComponentUrls( struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(key.data(), key.size())), .value = *serialized_component_renders}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", key, " to CBOR"))); return absl::InternalError(""); @@ -404,7 +404,7 @@ absl::Status CborSerializeError(const AuctionResult::Error& error, struct cbor_pair code_kv = { .key = cbor_move(cbor_build_stringn(kCode, sizeof(kCode) - 1)), .value = cbor_move(cbor_build_uint(error.code()))}; - if (!cbor_map_add(*serialized_error_map, std::move(code_kv))) { + if (!cbor_map_add(*serialized_error_map, code_kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize error ", kCode, " to CBOR"))); @@ -415,7 +415,7 @@ absl::Status CborSerializeError(const AuctionResult::Error& error, struct cbor_pair message_kv = { .key = cbor_move(cbor_build_stringn(kMessage, sizeof(kMessage) - 1)), .value = cbor_move(cbor_build_stringn(message.data(), message.size()))}; - if (!cbor_map_add(*serialized_error_map, std::move(message_kv))) { + if (!cbor_map_add(*serialized_error_map, message_kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize error ", kMessage, " to CBOR"))); @@ -425,7 +425,7 @@ absl::Status CborSerializeError(const AuctionResult::Error& error, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(kError, sizeof(kError) - 1)), .value = *serialized_error_map}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler( grpc::Status(grpc::INTERNAL, absl::StrCat("Failed to serialize ", kError, " to CBOR"))); @@ -707,7 +707,7 @@ EncodedBuyerInputs DecodeBuyerInputKeys( continue; } - const std::string owner = DecodeCborString(interest_group.key); + std::string owner = DecodeCborString(interest_group.key); // The value is a gzip compressed bytestring. bool is_ig_val_valid_type = IsTypeValid(&cbor_isa_bytestring, interest_group.value, kIgValue, @@ -720,7 +720,7 @@ EncodedBuyerInputs DecodeBuyerInputKeys( const std::string compressed_igs( reinterpret_cast(cbor_bytestring_handle(interest_group.value)), cbor_bytestring_length(interest_group.value)); - encoded_buyer_inputs.insert({std::move(owner), std::move(compressed_igs)}); + encoded_buyer_inputs.insert({std::move(owner), compressed_igs}); } return encoded_buyer_inputs; @@ -854,7 +854,7 @@ absl::Status CborSerializeBiddingGroups(const BiddingGroupMap& bidding_groups, struct cbor_pair kv = { .key = cbor_move(cbor_build_stringn(origin.data(), origin.size())), .value = *serialized_group_indices}; - if (!cbor_map_add(*serialized_group_map, std::move(kv))) { + if (!cbor_map_add(*serialized_group_map, kv)) { error_handler(grpc::Status( grpc::INTERNAL, "Failed to serialize an pair to CBOR")); @@ -864,7 +864,7 @@ absl::Status CborSerializeBiddingGroups(const BiddingGroupMap& bidding_groups, struct cbor_pair kv = {.key = cbor_move(cbor_build_stringn( kBiddingGroups, sizeof(kBiddingGroups) - 1)), .value = *serialized_group_map}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", kBiddingGroups, " to CBOR"))); @@ -892,7 +892,7 @@ absl::Status CborSerializeInteractionReportingUrls( .key = cbor_move(cbor_build_stringn( kInteractionReportingUrls, sizeof(kInteractionReportingUrls) - 1)), .value = *serialized_interaction_url_map}; - if (!cbor_map_add(&root, std::move(kv))) { + if (!cbor_map_add(&root, kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", kInteractionReportingUrls, " to CBOR"))); @@ -1138,7 +1138,7 @@ absl::Status CborSerializeReportingUrls( .value = *serialized_reporting_urls, }; - if (!cbor_map_add(&root, std::move(serialized_reporting_urls_kv))) { + if (!cbor_map_add(&root, serialized_reporting_urls_kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", key, " to CBOR"))); return absl::InternalError(""); @@ -1178,7 +1178,7 @@ absl::Status CborSerializeWinReportingUrls( cbor_build_stringn(kWinReportingUrls, sizeof(kWinReportingUrls) - 1)), .value = *serialized_win_reporting_urls, }; - if (!cbor_map_add(&root, std::move(serialized_win_reporting_urls_kv))) { + if (!cbor_map_add(&root, serialized_win_reporting_urls_kv)) { error_handler(grpc::Status( grpc::INTERNAL, absl::StrCat("Failed to serialize ", kWinReportingUrls, " to CBOR"))); diff --git a/services/seller_frontend_service/util/web_utils.h b/services/seller_frontend_service/util/web_utils.h index 58ac03c2..af39afd1 100644 --- a/services/seller_frontend_service/util/web_utils.h +++ b/services/seller_frontend_service/util/web_utils.h @@ -123,7 +123,7 @@ inline constexpr auto kComparator = [](absl::string_view a, }; #define RETURN_IF_PREV_ERRORS(error_accumulator, should_fail_fast, to_return) \ - if (error_accumulator.HasErrors() && should_fail_fast) { \ + if ((error_accumulator).HasErrors() && (should_fail_fast)) { \ return to_return; \ } diff --git a/services/seller_frontend_service/util/web_utils_test.cc b/services/seller_frontend_service/util/web_utils_test.cc index e561909d..6f5e55f8 100644 --- a/services/seller_frontend_service/util/web_utils_test.cc +++ b/services/seller_frontend_service/util/web_utils_test.cc @@ -445,7 +445,7 @@ TEST(ChromeResponseUtils, VerifyBiddingGroupBuyerOriginOrdering) { // Convert the bidding group map to CBOR. ScopedCbor cbor_data_root(cbor_new_definite_map(kNumAuctionResultKeys)); auto* cbor_internal = cbor_data_root.get(); - auto err_handler = [](grpc::Status status) {}; + auto err_handler = [](const grpc::Status& status) {}; auto result = CborSerializeBiddingGroups(bidding_group_map, err_handler, *cbor_internal); ASSERT_TRUE(result.ok()) << result; @@ -573,7 +573,7 @@ TEST(ChromeResponseUtils, CborSerializeWinReportingUrls) { ScopedCbor cbor_data_root(cbor_new_definite_map(kNumWinReportingUrlsKeys)); auto* cbor_internal = cbor_data_root.get(); - auto err_handler = [](grpc::Status status) {}; + auto err_handler = [](const grpc::Status& status) {}; auto result = CborSerializeWinReportingUrls(win_reporting_urls, err_handler, *cbor_internal); ASSERT_TRUE(result.ok()) << result; @@ -600,7 +600,7 @@ TEST(ChromeResponseUtils, NoCborGeneratedWithEmptyWinReportingUrl) { WinReportingUrls win_reporting_urls; ScopedCbor cbor_data_root(cbor_new_definite_map(kNumWinReportingUrlsKeys)); auto* cbor_internal = cbor_data_root.get(); - auto err_handler = [](grpc::Status status) {}; + auto err_handler = [](const grpc::Status& status) {}; auto result = CborSerializeWinReportingUrls(win_reporting_urls, err_handler, *cbor_internal); ASSERT_TRUE(result.ok()) << result; @@ -632,7 +632,7 @@ TEST(ChromeResponseUtils, CborWithOnlySellerReprotingUrls) { ScopedCbor cbor_data_root(cbor_new_definite_map(kNumWinReportingUrlsKeys)); auto* cbor_internal = cbor_data_root.get(); - auto err_handler = [](grpc::Status status) {}; + auto err_handler = [](const grpc::Status& status) {}; auto result = CborSerializeWinReportingUrls(win_reporting_urls, err_handler, *cbor_internal); ASSERT_TRUE(result.ok()) << result; @@ -659,7 +659,7 @@ TEST(ChromeResponseUtils, CborWithNoInteractionReportingUrls) { ->set_reporting_url(kTestReportResultUrl); ScopedCbor cbor_data_root(cbor_new_definite_map(kNumWinReportingUrlsKeys)); auto* cbor_internal = cbor_data_root.get(); - auto err_handler = [](grpc::Status status) {}; + auto err_handler = [](const grpc::Status& status) {}; auto result = CborSerializeWinReportingUrls(win_reporting_urls, err_handler, *cbor_internal); ASSERT_TRUE(result.ok()) << result; @@ -722,8 +722,8 @@ TEST(ChromeResponseUtils, VerifyCborEncodingWithWinReportingUrls) { bidding_group_map.try_emplace(interest_group_owner, std::move(ig_indices)); auto response_with_cbor = - Encode(winner, std::move(bidding_group_map), /*error=*/std::nullopt, - [](grpc::Status status) {}); + Encode(winner, bidding_group_map, /*error=*/std::nullopt, + [](const grpc::Status& status) {}); ASSERT_TRUE(response_with_cbor.ok()) << response_with_cbor.status(); ABSL_LOG(INFO) << "Encoded CBOR: " @@ -801,8 +801,8 @@ TEST(ChromeResponseUtils, VerifyCborEncoding) { bidding_group_map.try_emplace(interest_group_owner, std::move(ig_indices)); auto response_with_cbor = - Encode(winner, std::move(bidding_group_map), /*error=*/std::nullopt, - [](grpc::Status status) {}); + Encode(winner, bidding_group_map, /*error=*/std::nullopt, + [](const grpc::Status& status) {}); ASSERT_TRUE(response_with_cbor.ok()) << response_with_cbor.status(); ABSL_LOG(INFO) << "Encoded CBOR: " @@ -867,9 +867,9 @@ TEST(ChromeResponseUtils, CborEncodesComponentAuctionResult) { bidding_group_map.try_emplace(winner.interest_group_owner(), std::move(ig_indices)); - auto response_with_cbor = - EncodeComponent(top_level_seller, winner, std::move(bidding_group_map), - /*error=*/std::nullopt, [](grpc::Status status) {}); + auto response_with_cbor = EncodeComponent( + top_level_seller, winner, bidding_group_map, + /*error=*/std::nullopt, [](const grpc::Status& status) {}); ASSERT_TRUE(response_with_cbor.ok()) << response_with_cbor.status(); absl::StatusOr decoded_result = @@ -935,8 +935,8 @@ TEST(ChromeResponseUtils, VerifyCBOREncodedError) { AuctionResult::Error error; error.set_message(kSampleErrorMessage); error.set_code(kSampleErrorCode); - auto response_with_cbor = Encode(winner, std::move(bidding_group_map), error, - [](grpc::Status status) {}); + auto response_with_cbor = Encode(winner, bidding_group_map, error, + [](const grpc::Status& status) {}); absl::StatusOr decoded_result = CborDecodeAuctionResultToProto(*response_with_cbor); @@ -964,7 +964,7 @@ TEST(WebRequestUtils, Decode_FailsAndGetsAllErrors) { sizeof(kPublisher) - 1)), cbor_move( cbor_build_stringn(kSamplePublisher, sizeof(kSamplePublisher) - 1))}; - EXPECT_TRUE(cbor_map_add(*protected_auction_input, std::move(publisher_kv))); + EXPECT_TRUE(cbor_map_add(*protected_auction_input, publisher_kv)); expected_errors.emplace(ErrStr(/*field_name=*/kRootCborKey, /*expected_type=*/kCborTypeString, /*observed_type=*/kCborTypeByteString)); @@ -975,7 +975,7 @@ TEST(WebRequestUtils, Decode_FailsAndGetsAllErrors) { cbor_move(cbor_build_bytestring( reinterpret_cast(kSampleGenerationId), sizeof(kSampleGenerationId) - 1))}; - EXPECT_TRUE(cbor_map_add(*protected_auction_input, std::move(gen_id_kv))); + EXPECT_TRUE(cbor_map_add(*protected_auction_input, gen_id_kv)); expected_errors.emplace(ErrStr(/*field_name=*/kGenerationId, /*expected_type=*/kCborTypeString, /*observed_type=*/kCborTypeByteString)); @@ -987,7 +987,7 @@ TEST(WebRequestUtils, Decode_FailsAndGetsAllErrors) { cbor_pair ig_name_kv = { cbor_move(cbor_build_stringn(kName, sizeof(kName) - 1)), cbor_move(cbor_build_uint32(1))}; - EXPECT_TRUE(cbor_map_add(interest_group, std::move(ig_name_kv))); + EXPECT_TRUE(cbor_map_add(interest_group, ig_name_kv)); expected_errors.emplace(ErrStr(/*field_name=*/kIgName, /*expected_type=*/kCborTypeString, /*observed_type=*/kCborTypePositiveInt)); @@ -999,7 +999,7 @@ TEST(WebRequestUtils, Decode_FailsAndGetsAllErrors) { cbor_move(cbor_build_stringn(kBrowserSignalsKey, sizeof(kBrowserSignalsKey) - 1)), cbor_move(browser_signals_array)}; - EXPECT_TRUE(cbor_map_add(interest_group, std::move(browser_signals_kv))); + EXPECT_TRUE(cbor_map_add(interest_group, browser_signals_kv)); expected_errors.emplace(ErrStr(/*field_name=*/kBrowserSignalsKey, /*expected_type=*/kCborTypeString, /*observed_type=*/kCborTypePositiveInt)); @@ -1011,7 +1011,7 @@ TEST(WebRequestUtils, Decode_FailsAndGetsAllErrors) { cbor_move(cbor_build_stringn(kBiddingSignalsKeys, sizeof(kBiddingSignalsKeys) - 1)), cbor_move(bidding_signals_array)}; - EXPECT_TRUE(cbor_map_add(interest_group, std::move(bidding_signals_kv))); + EXPECT_TRUE(cbor_map_add(interest_group, bidding_signals_kv)); expected_errors.emplace(ErrStr(/*field_name=*/kIgBiddingSignalKeysEntry, /*expected_type=*/kCborTypeString, /*observed_type=*/kCborTypePositiveInt)); @@ -1177,8 +1177,8 @@ TEST(ChromeResponseUtils, VerifyMinimalResponseEncoding) { bidding_group_map.try_emplace(interest_group_owner_3, indices); bidding_group_map.try_emplace("owner1", std::move(indices)); - auto ret = Encode(std::move(winner), std::move(bidding_group_map), - std::nullopt, [](auto error) {}); + auto ret = Encode(std::move(winner), bidding_group_map, std::nullopt, + [](auto error) {}); ASSERT_TRUE(ret.ok()) << ret.status(); // Conversion can be verified at: https://cbor.me/ EXPECT_EQ( @@ -1232,9 +1232,9 @@ TEST(ChromeResponseUtils, VerifyMinimalComponentResponseEncoding) { bidding_group_map.try_emplace(interest_group_owner_3, indices); bidding_group_map.try_emplace("owner1", std::move(indices)); - auto ret = EncodeComponent("https://top-level-adtech.com", std::move(winner), - std::move(bidding_group_map), std::nullopt, - [](auto error) {}); + auto ret = + EncodeComponent("https://top-level-adtech.com", std::move(winner), + bidding_group_map, std::nullopt, [](auto error) {}); ASSERT_TRUE(ret.ok()) << ret.status(); // Conversion can be verified at: https://cbor.me/ EXPECT_EQ( @@ -1290,9 +1290,9 @@ TEST(ChromeResponseUtils, VerifyMinimalComponentResponseEncodingNoBidCurrency) { bidding_group_map.try_emplace(interest_group_owner_3, indices); bidding_group_map.try_emplace("owner1", std::move(indices)); - auto ret = EncodeComponent("https://top-level-adtech.com", std::move(winner), - std::move(bidding_group_map), std::nullopt, - [](auto error) {}); + auto ret = + EncodeComponent("https://top-level-adtech.com", std::move(winner), + bidding_group_map, std::nullopt, [](auto error) {}); ASSERT_TRUE(ret.ok()) << ret.status(); // Conversion can be verified at: https://cbor.me/ EXPECT_EQ( diff --git a/third_party/bazel_clang_tidy.patch b/third_party/bazel_clang_tidy.patch new file mode 100644 index 00000000..5999f748 --- /dev/null +++ b/third_party/bazel_clang_tidy.patch @@ -0,0 +1,18 @@ +diff --git a/clang_tidy/run_clang_tidy.sh b/clang_tidy/run_clang_tidy.sh +index 5cddb59..c33d0cf 100755 +--- a/clang_tidy/run_clang_tidy.sh ++++ b/clang_tidy/run_clang_tidy.sh +@@ -35,6 +35,13 @@ trap 'if (($?)); then cat "$logfile" 1>&2; fi; rm "$logfile"' EXIT + set -- \ + --checks=-clang-diagnostic-builtin-macro-redefined \ + --warnings-as-errors=-clang-diagnostic-builtin-macro-redefined \ ++ -extra-arg=-Wno-unknown-warning-option \ ++ -extra-arg=-Wno-unused-result \ ++ -extra-arg=-Wno-macro-redefined \ ++ -extra-arg=-Wno-deprecated-declarations \ ++ -extra-arg=-Wno-unused-const-variable \ ++ -extra-arg=-Wno-format \ ++ -extra-arg=-Wno-unused-function \ + "$@" + + "${CLANG_TIDY_BIN}" "$@" >"$logfile" 2>&1 diff --git a/tools/get_workspace_status b/tools/get_workspace_status new file mode 100644 index 00000000..7ef03c7d --- /dev/null +++ b/tools/get_workspace_status @@ -0,0 +1,27 @@ +#!/bin/bash +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +if ! command -v git; then + exit 0 +fi + +TOPLEVEL_DIR="$(git rev-parse --show-toplevel)" +readonly TOPLEVEL_DIR +VERSION="$(cat "${TOPLEVEL_DIR}"/version.txt)" +readonly VERSION + +cat < GetBuyerInputMap( buyer_input_map.end()); } +SelectAdRequest::ComponentAuctionResult GetComponentAuctionResult( + rapidjson::Value* component_auction_json, absl::string_view generation_id, + const HpkeKeyset& keyset) { + CHECK(component_auction_json != nullptr) << "Input JSON must be non null"; + AuctionResult auction_result; + std::string auction_result_json_str = ValueToJson(component_auction_json); + google::protobuf::json::ParseOptions parse_options; + parse_options.ignore_unknown_fields = true; + auto auction_result_parse = google::protobuf::util::JsonStringToMessage( + auction_result_json_str, &auction_result, parse_options); + CHECK(auction_result_parse.ok()) << auction_result_parse; + auction_result.mutable_auction_params()->set_ciphertext_generation_id( + generation_id); + + // Encode, compress and encrypt Auction Result. + auto encrypted_request = + PackageServerComponentAuctionResult(auction_result, keyset); + CHECK(encrypted_request.ok()) << encrypted_request.status(); + + SelectAdRequest::ComponentAuctionResult component_auction_result; + *component_auction_result.mutable_key_id() = + std::move(encrypted_request.value().key_id); + *component_auction_result.mutable_auction_result_ciphertext() = + std::move(encrypted_request.value().ciphertext); + return component_auction_result; +} + } // namespace std::pair, @@ -179,32 +210,44 @@ PackagePlainTextSelectAdRequest(absl::string_view input_json_str, bool enable_debug_reporting, absl::string_view protected_app_signals_json) { rapidjson::Document input_json = ParseRequestInputJson(input_json_str); - google::protobuf::Map buyer_map_proto = - GetBuyerInputMap(client_type, &input_json, protected_app_signals_json); - // Encode buyer map. - absl::StatusOr> - encoded_buyer_map; - switch (client_type) { - case CLIENT_TYPE_BROWSER: - encoded_buyer_map = PackageBuyerInputsForBrowser(buyer_map_proto); - break; - case CLIENT_TYPE_ANDROID: - encoded_buyer_map = PackageBuyerInputsForApp(buyer_map_proto); - default: - break; - } - CHECK(encoded_buyer_map.ok()) << encoded_buyer_map.status(); - + auto select_ad_request = std::make_unique(); ProtectedAuctionInput protected_auction_input = GetProtectedAuctionInput(&input_json, enable_debug_reporting); - // Set encoded BuyerInput. - protected_auction_input.mutable_buyer_input()->swap(*encoded_buyer_map); + if (input_json.HasMember(kComponentAuctionsField)) { + for (auto& component_auction_json : + input_json[kComponentAuctionsField].GetArray()) { + if (component_auction_json.ObjectEmpty()) { + continue; + } + *select_ad_request->mutable_component_auction_results()->Add() = + GetComponentAuctionResult(&component_auction_json, + protected_auction_input.generation_id(), + keyset); + } + } else { + google::protobuf::Map buyer_map_proto = + GetBuyerInputMap(client_type, &input_json, protected_app_signals_json); + // Encode buyer map. + absl::StatusOr> + encoded_buyer_map; + switch (client_type) { + case CLIENT_TYPE_BROWSER: + encoded_buyer_map = PackageBuyerInputsForBrowser(buyer_map_proto); + break; + case CLIENT_TYPE_ANDROID: + encoded_buyer_map = PackageBuyerInputsForApp(buyer_map_proto); + default: + break; + } + CHECK(encoded_buyer_map.ok()) << encoded_buyer_map.status(); + // Set encoded BuyerInput. + protected_auction_input.mutable_buyer_input()->swap(*encoded_buyer_map); + } // Package protected_auction_input. auto pa_ciphertext_encryption_context_pair = PackagePayload(protected_auction_input, client_type, keyset); CHECK(pa_ciphertext_encryption_context_pair.ok()) << pa_ciphertext_encryption_context_pair.status(); - auto select_ad_request = std::make_unique(); *(select_ad_request->mutable_auction_config()) = GetAuctionConfig(&input_json); select_ad_request->set_protected_auction_ciphertext( diff --git a/tools/secure_invoke/payload_generator/payload_packaging.h b/tools/secure_invoke/payload_generator/payload_packaging.h index 429093c0..bdef4ea1 100644 --- a/tools/secure_invoke/payload_generator/payload_packaging.h +++ b/tools/secure_invoke/payload_generator/payload_packaging.h @@ -29,10 +29,12 @@ const char kAuctionConfigField[] = "auction_config"; const char kProtectedAuctionInputField[] = "raw_protected_audience_input"; const char kOldBuyerInputMapField[] = "buyer_input"; const char kBuyerInputMapField[] = "raw_buyer_input"; +const char kComponentAuctionsField[] = "raw_component_auction_results"; namespace privacy_sandbox::bidding_auction_servers { -// This method expects a plaintext json with the following fields. +// This method expects a plaintext json. For single seller/component +// auctions, it expects the following fields - // { // "auction_config" : { // "seller_signals": "...", @@ -54,22 +56,71 @@ namespace privacy_sandbox::bidding_auction_servers { // ..... // } // } -// It returns a SelectAdRequest for testing B&A servers in "test_mode" +// For this input, it returns a SelectAdRequest for testing B&A servers // by performing the following operations with the input fields - -// 1. Create a ProtectedAudienceInput object. +// 1. Create a ProtectedAuctionInput object. // 2. Encode and compress the Buyer Input Map, and copy over to the -// ProtectedAudienceInput object from 1. +// ProtectedAuctionInput object from 1. // 3. Copy over the other fields from raw_protected_audience_input into the -// ProtectedAudienceInput object from 1. -// 4. Encode the ProtectedAudienceInput object from 1. -// 5. Encrypt the ProtectedAudienceInput object from 1 using the B&A "test_mode" +// ProtectedAuctionInput object from 1. +// 4. Encode the ProtectedAuctionInput object from 1. +// 5. Encrypt the ProtectedAuctionInput object from 1 using the B&A "test_mode" // hardcoded keys. -// 6. Copy the encrypted ProtectedAudienceInput object and auction_config from +// 6. Copy the encrypted ProtectedAuctionInput object and auction_config from // the input JSON to a SelectAdRequest object. // 7. Convert the SelectAdRequest object to a JSON string. // The protected_audience_ciphertext is encoded, encrypted and compressed in the // same format as expected in the ciphertext from a CLIENT_TYPE_BROWSER. // This will extended in the future to support CLIENT_TYPE_ANDROID as well. + +// For top level auctions, this method expects a plaintext json +// with the following fields - +// { +// "auction_config" : { +// "seller_signals": "...", +// "per_buyer_config": {"buyer1" : { +// "buyer_signals": "...", +// .... +// }} +// ..... +// } +// "raw_protected_audience_input": { +// "publisher_name": "testPublisher.com", +// ..... +// }, +// "raw_component_auction_results": [{ +// "ad_render_url": "URL", +// "interest_group_origin": "testIG.com", +// "interest_group_name": "testIG", +// "interest_group_owner": "testIG.com", +// "bidding_groups": {}, +// "score": 4.9, +// "bid": 0.2, +// "bid_currency": "USD", +// "ad_metadata": "{}", +// "top_level_seller": "SFE-domain.com", +// "auction_params": { +// "component_seller": "test_seller.com" +// } +// }, +// ..... +// ] +// } +// For this input, it returns a SelectAdRequest for testing B&A servers +// by performing the following operations with the input fields - +// 1. Create a ProtectedAuctionInput object with empty Buyer Input Map. +// 2. Copy over the other fields from raw_protected_audience_input into the +// ProtectedAuctionInput object from 1. +// 3. Encode the ProtectedAuctionInput object from 1. +// 4. Encrypt the ProtectedAuctionInput object from 1 using the provided keys. +// 5. Copy the encrypted ProtectedAuctionInput object and auction_config from +// the input JSON to a SelectAdRequest object. +// 6. Create ComponentAuctionResults from the raw_component_auction_results +// field. +// 7. Encode, compress and HPKE encrypt the ComponentAuctionResults in step 6. +// 8. Convert the SelectAdRequest object to a JSON string. +// The protected_audience_ciphertext is encoded, encrypted and compressed in the +// same format as expected in the ciphertext from a Server Component Auction. std::pair, quiche::ObliviousHttpRequest::Context> PackagePlainTextSelectAdRequest( diff --git a/tools/secure_invoke/payload_generator/payload_packaging_test.cc b/tools/secure_invoke/payload_generator/payload_packaging_test.cc index 09eb9705..4034232c 100644 --- a/tools/secure_invoke/payload_generator/payload_packaging_test.cc +++ b/tools/secure_invoke/payload_generator/payload_packaging_test.cc @@ -14,7 +14,9 @@ #include "tools/secure_invoke/payload_generator/payload_packaging.h" +#include #include +#include #include @@ -26,8 +28,8 @@ #include "services/common/util/json_util.h" #include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/communication/encoding_utils.h" -#include "src/cpp/communication/ohttp_utils.h" +#include "src/communication/encoding_utils.h" +#include "src/communication/ohttp_utils.h" #include "tools/secure_invoke/payload_generator/payload_packaging_utils.h" namespace privacy_sandbox::bidding_auction_servers { @@ -55,22 +57,34 @@ rapidjson::Document ProtoToDocument(const google::protobuf::Message& message) { } // Creates a valid input json doc. -std::string GetValidInput(const SelectAdRequest& expected_output) { - rapidjson::Document input; - input.SetObject(); +std::string GetValidInput( + const SelectAdRequest& expected_output, + const std::vector& component_auction_results = {}) { + rapidjson::Document input(rapidjson::kObjectType); rapidjson::Value protected_audience_doc; protected_audience_doc.SetObject(); - rapidjson::Value buyerMapValue; - buyerMapValue.SetObject(); - protected_audience_doc.AddMember(kBuyerInputMapField, buyerMapValue, - input.GetAllocator()); + if (component_auction_results.empty()) { + rapidjson::Value buyer_map_value(rapidjson::kObjectType); + protected_audience_doc.AddMember(kBuyerInputMapField, buyer_map_value, + input.GetAllocator()); + } else { + rapidjson::Value component_auction_arr(rapidjson::kArrayType); + input.AddMember(kComponentAuctionsField, component_auction_arr, + input.GetAllocator()); + for (auto& car : component_auction_results) { + rapidjson::Value car_val; + car_val.CopyFrom(ProtoToDocument(car), input.GetAllocator()); + input[kComponentAuctionsField].PushBack(car_val, input.GetAllocator()); + } + } + + input.AddMember(kProtectedAuctionInputField, protected_audience_doc, + input.GetAllocator()); // Copy auction config from expected output. auto document = ProtoToDocument(expected_output.auction_config()); input.AddMember(kAuctionConfigField, document, input.GetAllocator()); - input.AddMember(kProtectedAuctionInputField, protected_audience_doc, - input.GetAllocator()); auto input_str = SerializeJsonDoc(input); CHECK(input_str.ok()) << input_str.status(); @@ -118,7 +132,8 @@ TEST(PaylodPackagingTest, FailsOnInputJsonMissingProtectedAudience) { ""); } -TEST(PaylodPackagingTest, FailsOnInputJsonMissingBuyerInputMap) { +TEST(PaylodPackagingTest, + FailsOnInputJsonMissingBuyerInputMapAndComponentAuctionResults) { rapidjson::Document input; input.SetObject(); @@ -224,6 +239,30 @@ TEST(PaylodPackagingTest, PassesTopLevelSellerForComponentAuction) { EXPECT_EQ(actual.auction_config().top_level_seller(), top_level_seller); } +TEST(PaylodPackagingTest, + PassesTopLevelSellerAndCloudPlatformForServerComponentAuction) { + absl::string_view top_level_seller = "https://toplevel-seller.com"; + auto protected_audience_input = + MakeARandomProtectedAuctionInput(); + SelectAdRequest expected = + MakeARandomSelectAdRequest(kSellerOriginDomain, protected_audience_input); + expected.mutable_auction_config()->set_top_level_seller(top_level_seller); + expected.mutable_auction_config()->set_top_level_cloud_platform( + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP); + std::string input = GetValidInput(expected); + + HpkeKeyset keyset; + std::string output = + PackagePlainTextSelectAdRequestToJson(input, CLIENT_TYPE_BROWSER, keyset); + SelectAdRequest actual; + auto parse_output = + google::protobuf::util::JsonStringToMessage(output, &actual); + ASSERT_TRUE(parse_output.ok()) << parse_output; + EXPECT_EQ(actual.auction_config().top_level_seller(), top_level_seller); + EXPECT_EQ(actual.auction_config().top_level_cloud_platform(), + EncryptionCloudPlatform::ENCRYPTION_CLOUD_PLATFORM_GCP); +} + TEST(PaylodPackagingTest, SetsTheCorrectClientType) { auto protected_audience_input = MakeARandomProtectedAuctionInput(); @@ -373,6 +412,109 @@ TEST(PaylodPackagingTest, HandlesProtectedAppSignals) { .empty()); } +TEST(PaylodPackagingTest, PopulatesComponentAuctionResultsForTopLevelAuction) { + auto protected_audience_input = + MakeARandomProtectedAuctionInput(); + protected_audience_input.clear_buyer_input(); + SelectAdRequest expected = + MakeARandomSelectAdRequest(kSellerOriginDomain, protected_audience_input); + std::vector expected_component_auctions = { + MakeARandomComponentAuctionResult( + protected_audience_input.generation_id(), kSellerOriginDomain), + MakeARandomComponentAuctionResult( + protected_audience_input.generation_id(), kSellerOriginDomain)}; + std::string input = GetValidInput(expected, expected_component_auctions); + + HpkeKeyset hardcoded_keyset; + std::string output = PackagePlainTextSelectAdRequestToJson( + input, CLIENT_TYPE_BROWSER, hardcoded_keyset); + SelectAdRequest actual; + auto parse_output = + google::protobuf::util::JsonStringToMessage(output, &actual); + ASSERT_TRUE(parse_output.ok()) << parse_output; + // Actual encoding tested by payload_packaging_util_test. + ASSERT_EQ(actual.component_auction_results_size(), + expected_component_auctions.size()); + for (auto& car : actual.component_auction_results()) { + EXPECT_FALSE(car.key_id().empty()); + EXPECT_FALSE(car.auction_result_ciphertext().empty()); + } +} + +TEST(PaylodPackagingTest, PopulatesAuctionConfigForTopLevelAuction) { + auto protected_audience_input = + MakeARandomProtectedAuctionInput(); + protected_audience_input.clear_buyer_input(); + SelectAdRequest expected = + MakeARandomSelectAdRequest(kSellerOriginDomain, protected_audience_input); + std::vector expected_component_auctions = { + MakeARandomComponentAuctionResult( + protected_audience_input.generation_id(), kSellerOriginDomain), + MakeARandomComponentAuctionResult( + protected_audience_input.generation_id(), kSellerOriginDomain)}; + std::string input = GetValidInput(expected, expected_component_auctions); + + HpkeKeyset hardcoded_keyset; + std::string output = PackagePlainTextSelectAdRequestToJson( + input, CLIENT_TYPE_BROWSER, hardcoded_keyset); + SelectAdRequest actual; + auto parse_output = + google::protobuf::util::JsonStringToMessage(output, &actual); + ASSERT_TRUE(parse_output.ok()) << parse_output; + + google::protobuf::util::MessageDifferencer diff; + std::string difference; + diff.ReportDifferencesToString(&difference); + EXPECT_TRUE(diff.Compare(actual.auction_config(), expected.auction_config())) + << difference; +} + +TEST(PaylodPackagingTest, PopulatesProtectedAuctionInputForTopLevelAuction) { + auto protected_audience_input = + MakeARandomProtectedAuctionInput(); + protected_audience_input.clear_buyer_input(); + SelectAdRequest expected = + MakeARandomSelectAdRequest(kSellerOriginDomain, protected_audience_input); + std::vector expected_component_auctions = { + MakeARandomComponentAuctionResult( + protected_audience_input.generation_id(), kSellerOriginDomain), + MakeARandomComponentAuctionResult( + protected_audience_input.generation_id(), kSellerOriginDomain)}; + std::string input = GetValidInput(expected, expected_component_auctions); + + HpkeKeyset hardcoded_keyset; + std::string output = PackagePlainTextSelectAdRequestToJson( + input, CLIENT_TYPE_BROWSER, hardcoded_keyset); + SelectAdRequest actual; + auto parse_output = + google::protobuf::util::JsonStringToMessage(output, &actual); + ASSERT_TRUE(parse_output.ok()) << parse_output; + + EXPECT_FALSE(actual.protected_auction_ciphertext().empty()); +} + +TEST(PaylodPackagingTest, + IgnoresEmptyComponentAuctionResultsForTopLevelAuction) { + std::string input = R"JSON( + { + "auction_config" : {}, + "client_type" : "CLIENT_TYPE_ANDROID", + "raw_protected_audience_input" : { + "generation_id" : "1" + }, + "raw_component_auction_results": [{}] + })JSON"; + + HpkeKeyset hardcoded_keyset; + std::string output = PackagePlainTextSelectAdRequestToJson( + input, CLIENT_TYPE_BROWSER, hardcoded_keyset); + SelectAdRequest actual; + auto parse_output = + google::protobuf::util::JsonStringToMessage(output, &actual); + ASSERT_TRUE(parse_output.ok()) << parse_output; + EXPECT_EQ(actual.component_auction_results_size(), 0); +} + } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/tools/secure_invoke/payload_generator/payload_packaging_utils.cc b/tools/secure_invoke/payload_generator/payload_packaging_utils.cc index d3102ad3..93be0415 100644 --- a/tools/secure_invoke/payload_generator/payload_packaging_utils.cc +++ b/tools/secure_invoke/payload_generator/payload_packaging_utils.cc @@ -14,15 +14,21 @@ #include "tools/secure_invoke/payload_generator/payload_packaging_utils.h" +#include + #include "api/bidding_auction_servers.grpc.pb.h" #include "services/common/compression/gzip.h" +#include "services/common/encryption/crypto_client_factory.h" +#include "services/common/encryption/key_fetcher_factory.h" #include "services/common/test/random.h" #include "services/common/test/utils/cbor_test_utils.h" #include "services/common/util/oblivious_http_utils.h" #include "services/seller_frontend_service/util/framing_utils.h" +#include "services/seller_frontend_service/util/proto_mapping_util.h" #include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" -#include "src/cpp/communication/encoding_utils.h" -#include "src/cpp/util/status_macro/status_macros.h" +#include "src/communication/encoding_utils.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" +#include "src/util/status_macro/status_macros.h" namespace privacy_sandbox::bidding_auction_servers { @@ -119,4 +125,38 @@ absl::StatusOr UnpackageAuctionResult( } } +absl::StatusOr PackageServerComponentAuctionResult( + const AuctionResult& auction_result, const HpkeKeyset& keyset) { + // Serialize the protected audience input and frame it. + std::string serialized_result = auction_result.SerializeAsString(); + + PS_ASSIGN_OR_RETURN(std::string compressed_data, + GzipCompress(serialized_result)); + PS_ASSIGN_OR_RETURN( + std::string plaintext_response, + server_common::EncodeResponsePayload( + server_common::CompressionType::kGzip, compressed_data, + GetEncodedDataSize(compressed_data.size()))); + auto key_fetcher_manager = + std::make_unique( + keyset.public_key, keyset.private_key, std::to_string(keyset.key_id)); + auto crypto_client = CreateCryptoClient(); + return HpkeEncrypt(plaintext_response, *crypto_client, *key_fetcher_manager, + server_common::CloudPlatform::kGcp); +} + +absl::StatusOr UnpackageResultForServerComponentAuction( + absl::string_view ciphertext, absl::string_view key_id, + const HpkeKeyset& keyset) { + auto key_fetcher_manager = + std::make_unique( + keyset.public_key, keyset.private_key, absl::StrCat(key_id)); + auto crypto_client = CreateCryptoClient(); + SelectAdRequest::ComponentAuctionResult car; + car.set_key_id(key_id); + car.set_auction_result_ciphertext(ciphertext); + return UnpackageServerAuctionComponentResult(car, *crypto_client, + *key_fetcher_manager); +} + } // namespace privacy_sandbox::bidding_auction_servers diff --git a/tools/secure_invoke/payload_generator/payload_packaging_utils.h b/tools/secure_invoke/payload_generator/payload_packaging_utils.h index 6d6c6791..c2c54cb8 100644 --- a/tools/secure_invoke/payload_generator/payload_packaging_utils.h +++ b/tools/secure_invoke/payload_generator/payload_packaging_utils.h @@ -23,6 +23,7 @@ #include "api/bidding_auction_servers.grpc.pb.h" #include "quiche/oblivious_http/oblivious_http_client.h" #include "services/common/test/utils/ohttp_utils.h" +#include "services/common/util/hpke_utils.h" // The methods in this util file can be used for packaging // raw requests for testing. The packaging is done as follows - @@ -85,6 +86,17 @@ absl::StatusOr UnpackageAuctionResult( quiche::ObliviousHttpRequest::Context& oblivious_request_context, const HpkeKeyset& keyset); +// Returns an encoded, compressed and encrypted auction result +// for a server component auction. +absl::StatusOr PackageServerComponentAuctionResult( + const AuctionResult& auction_result, const HpkeKeyset& keyset); + +// Returns a decrypted, decompressed and decoded proto response +// for response strings generated for a server component auction. +absl::StatusOr UnpackageResultForServerComponentAuction( + absl::string_view ciphertext, absl::string_view key_id, + const HpkeKeyset& keyset); + } // namespace privacy_sandbox::bidding_auction_servers #endif // TOOLS_TEST_PAYLOAD_GENERATOR_UTILS_H_ diff --git a/tools/secure_invoke/payload_generator/payload_packaging_utils_test.cc b/tools/secure_invoke/payload_generator/payload_packaging_utils_test.cc index d90549d4..5f3eed35 100644 --- a/tools/secure_invoke/payload_generator/payload_packaging_utils_test.cc +++ b/tools/secure_invoke/payload_generator/payload_packaging_utils_test.cc @@ -16,6 +16,7 @@ #include #include +#include #include #include @@ -23,13 +24,19 @@ #include "include/gmock/gmock.h" #include "include/gtest/gtest.h" #include "services/common/compression/gzip.h" +#include "services/common/encryption/crypto_client_factory.h" +#include "services/common/encryption/key_fetcher_factory.h" #include "services/common/test/random.h" +#include "services/common/util/hpke_utils.h" #include "services/common/util/request_response_constants.h" #include "services/seller_frontend_service/util/framing_utils.h" +#include "services/seller_frontend_service/util/proto_mapping_util.h" +#include "services/seller_frontend_service/util/select_ad_reactor_test_utils.h" #include "services/seller_frontend_service/util/web_utils.h" -#include "src/cpp/communication/encoding_utils.h" -#include "src/cpp/communication/ohttp_utils.h" -#include "src/cpp/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" +#include "src/communication/encoding_utils.h" +#include "src/communication/ohttp_utils.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/interface/key_fetcher_manager_interface.h" namespace privacy_sandbox::bidding_auction_servers { @@ -134,7 +141,7 @@ TEST(UnpackageBrowserAuctionResultTest, GeneratesAValidResponse) { // Encode. auto encoded_data = Encode(input, expected.bidding_groups(), /*error=*/std::nullopt, - [](grpc::Status status) {}); + [](const grpc::Status& status) {}); ASSERT_TRUE(encoded_data.ok()) << encoded_data.status(); // Compress. @@ -269,6 +276,61 @@ TEST(UnpackageAppAuctionResultTest, GeneratesAValidResponse) { EXPECT_TRUE(diff.Compare(*actual, expected)) << difference; } +TEST(PackageServerComponentAuctionResultTest, GeneratesAValidResponse) { + AuctionResult expected = MakeARandomComponentAuctionResult( + MakeARandomString(), MakeARandomString()); + HpkeKeyset hardcoded_keyset; + auto output = PackageServerComponentAuctionResult(expected, hardcoded_keyset); + ASSERT_TRUE(output.ok()) << output.status(); + SelectAdRequest::ComponentAuctionResult car; + car.set_key_id(output->key_id); + car.set_auction_result_ciphertext(output->ciphertext); + + // Decrypt. + auto key_fetcher_manager = + std::make_unique( + hardcoded_keyset.public_key, hardcoded_keyset.private_key, + std::to_string(hardcoded_keyset.key_id)); + auto crypto_client = CreateCryptoClient(); + auto actual = UnpackageServerAuctionComponentResult(car, *crypto_client, + *key_fetcher_manager); + ASSERT_TRUE(actual.ok()) << actual.status(); + + google::protobuf::util::MessageDifferencer diff; + std::string difference; + diff.ReportDifferencesToString(&difference); + EXPECT_TRUE(diff.Compare(*actual, expected)) << difference; +} + +TEST(UnpackageResultForServerComponentAuctionTest, UnpacksPayloadSuccessfully) { + AuctionResult expected = MakeARandomComponentAuctionResult( + MakeARandomString(), MakeARandomString()); + // Encode + std::string plaintext_response = + FrameAndCompressProto(expected.SerializeAsString()); + // Encrypt + HpkeKeyset hardcoded_keyset; + auto key_fetcher_manager = + std::make_unique( + hardcoded_keyset.public_key, hardcoded_keyset.private_key, + std::to_string(hardcoded_keyset.key_id)); + auto crypto_client = CreateCryptoClient(); + auto output_hpke = + HpkeEncrypt(plaintext_response, *crypto_client, *key_fetcher_manager, + server_common::CloudPlatform::kGcp); + ASSERT_TRUE(output_hpke.ok()) << output_hpke.status(); + ASSERT_EQ(output_hpke->key_id, std::to_string(hardcoded_keyset.key_id)); + + auto actual = UnpackageResultForServerComponentAuction( + output_hpke->ciphertext, output_hpke->key_id, hardcoded_keyset); + ASSERT_TRUE(actual.ok()) << actual.status(); + + google::protobuf::util::MessageDifferencer diff; + std::string difference; + diff.ReportDifferencesToString(&difference); + EXPECT_TRUE(diff.Compare(*actual, expected)) << difference; +} + } // namespace } // namespace privacy_sandbox::bidding_auction_servers diff --git a/tools/secure_invoke/secure_invoke.cc b/tools/secure_invoke/secure_invoke.cc index 93a0eff6..a0680a81 100644 --- a/tools/secure_invoke/secure_invoke.cc +++ b/tools/secure_invoke/secure_invoke.cc @@ -23,7 +23,7 @@ #include "absl/synchronization/notification.h" #include "api/bidding_auction_servers.grpc.pb.h" #include "services/common/test/utils/ohttp_utils.h" -#include "src/cpp/encryption/key_fetcher/src/key_fetcher_utils.h" +#include "src/encryption/key_fetcher/key_fetcher_utils.h" #include "tools/secure_invoke/flags.h" #include "tools/secure_invoke/payload_generator/payload_packaging.h" #include "tools/secure_invoke/secure_invoke_lib.h" @@ -75,13 +75,18 @@ int main(int argc, char** argv) { << "Failed to unescape public key."; std::string public_key_hex = absl::BytesToHexString(public_key_bytes); + std::string private_key_bytes; + CHECK(absl::Base64Unescape(absl::GetFlag(FLAGS_private_key), + &private_key_bytes)) + << "Failed to unescape private key."; + std::string private_key_hex = absl::BytesToHexString(private_key_bytes); + std::string id = privacy_sandbox::server_common::ToOhttpKeyId(absl::GetFlag(FLAGS_key_id)); - uint8_t key_id = stoi(id); const HpkeKeyset keyset = { - .public_key = public_key_hex, - .private_key = "unused", - .key_id = key_id, + .public_key = std::move(public_key_hex), + .private_key = std::move(private_key_hex), + .key_id = static_cast(stoi(id)), }; bool enable_debug_reporting = absl::GetFlag(FLAGS_enable_debug_reporting); diff --git a/tools/secure_invoke/secure_invoke_lib.cc b/tools/secure_invoke/secure_invoke_lib.cc index 6b6adc17..4a9bd8af 100644 --- a/tools/secure_invoke/secure_invoke_lib.cc +++ b/tools/secure_invoke/secure_invoke_lib.cc @@ -30,7 +30,7 @@ #include "services/common/constants/common_service_flags.h" #include "services/common/encryption/crypto_client_factory.h" #include "services/common/encryption/key_fetcher_factory.h" -#include "src/cpp/encryption/key_fetcher/src/fake_key_fetcher_manager.h" +#include "src/encryption/key_fetcher/fake_key_fetcher_manager.h" #include "tools/secure_invoke/flags.h" #include "tools/secure_invoke/payload_generator/payload_packaging.h" #include "tools/secure_invoke/payload_generator/payload_packaging_utils.h" @@ -44,8 +44,16 @@ constexpr char kJsonFormat[] = "JSON"; absl::StatusOr ParseSelectAdResponse( std::unique_ptr resp, ClientType client_type, quiche::ObliviousHttpRequest::Context& context, const HpkeKeyset& keyset) { - absl::StatusOr res = UnpackageAuctionResult( - *resp->mutable_auction_result_ciphertext(), client_type, context, keyset); + absl::StatusOr res; + // Server component Auction + if (!resp->key_id().empty()) { + res = UnpackageResultForServerComponentAuction( + *resp->mutable_auction_result_ciphertext(), resp->key_id(), keyset); + } else { + res = UnpackageAuctionResult(*resp->mutable_auction_result_ciphertext(), + client_type, context, keyset); + } + if (!res.ok()) { return res.status(); } diff --git a/tools/secure_invoke/secure_invoke_lib_test.cc b/tools/secure_invoke/secure_invoke_lib_test.cc index 22271b28..557dbe82 100644 --- a/tools/secure_invoke/secure_invoke_lib_test.cc +++ b/tools/secure_invoke/secure_invoke_lib_test.cc @@ -160,7 +160,7 @@ TEST_F(SecureInvokeLib, RequestToBfeNeedsClientIp) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); absl::SetFlag(&FLAGS_host_addr, @@ -173,7 +173,7 @@ TEST_F(SecureInvokeLib, RequestToBfeNeedsServerAddress) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); EXPECT_DEATH(auto unused = SendRequestToBfe(default_keyset_, false), @@ -184,7 +184,7 @@ TEST_F(SecureInvokeLib, RequestToBfeNeedsUserAgent) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); absl::SetFlag(&FLAGS_host_addr, @@ -200,7 +200,7 @@ TEST_F(SecureInvokeLib, RequestToBfeNeedsAcceptLanguage) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); absl::SetFlag(&FLAGS_host_addr, @@ -216,7 +216,7 @@ TEST_F(SecureInvokeLib, RequestToBfeReturnsAResponse) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); std::unique_ptr stub = @@ -237,7 +237,7 @@ TEST_F(SecureInvokeLib, RequestToBfeReturnsAResponseWithDebugReportingEnabled) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); bool enable_debug_reporting = true; @@ -260,7 +260,7 @@ TEST_F(SecureInvokeLib, IncludesTopLevelSellerInBfeInput) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); bool enable_debug_reporting = true; @@ -283,7 +283,7 @@ TEST_F(SecureInvokeLib, RequestToBfeNeedsValidKey) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); std::unique_ptr stub = @@ -305,7 +305,7 @@ TEST_F(SecureInvokeLib, UsesKeyForBfeEncryption) { BuyerFrontEndService buyer_front_end_service{ std::move(bidding_signals_provider_), bidding_service_client_config_, std::move(key_fetcher_manager_), std::move(crypto_client_), - std::move(get_bids_config_)}; + get_bids_config_}; auto start_service_result = StartLocalService(&buyer_front_end_service); std::unique_ptr stub = diff --git a/version.txt b/version.txt index 0fa4ae48..fbcbf738 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -3.3.0 \ No newline at end of file +3.4.0 \ No newline at end of file