diff --git a/source/neuropod/backends/neuropod_backend.cc b/source/neuropod/backends/neuropod_backend.cc index 09df0a3d..8b991293 100644 --- a/source/neuropod/backends/neuropod_backend.cc +++ b/source/neuropod/backends/neuropod_backend.cc @@ -219,7 +219,7 @@ NeuropodBackend::NeuropodBackend(const std::string &neuropod_path, RuntimeOption : model_config_(load_model_config(neuropod_path)), neuropod_path_(neuropod_path), options_(std::move(options)), - sealer_(stdx::make_unique(get_device_mapping(*model_config_, options_))) + sealer_(std::make_unique(get_device_mapping(*model_config_, options_))) { loader_ = get_loader(neuropod_path); } @@ -301,7 +301,7 @@ std::unique_ptr NeuropodBackend::infer_internal(const Neuropod // Run inference and get all the outputs auto data = infer_internal(inputs); - auto out = stdx::make_unique(); + auto out = std::make_unique(); // Filter to the requested outputs for (const auto &tensor_name : requested_outputs) diff --git a/source/neuropod/backends/python_bridge/python_bridge.cc b/source/neuropod/backends/python_bridge/python_bridge.cc index a01a2191..8064c632 100644 --- a/source/neuropod/backends/python_bridge/python_bridge.cc +++ b/source/neuropod/backends/python_bridge/python_bridge.cc @@ -165,7 +165,7 @@ std::unique_ptr maybe_initialize() // TODO: shutdown the interpreter once we know that there are no more python objects left // atexit(py::finalize_interpreter); - return stdx::make_unique(); + return std::make_unique(); } // Handle interpreter startup and shutdown @@ -205,7 +205,7 @@ void PythonBridge::load_model_internal() const auto local_path = loader_->ensure_local(); // Load the neuropod and save a reference to it - neuropod_ = stdx::make_unique(load_neuropod(local_path)); + neuropod_ = std::make_unique(load_neuropod(local_path)); } PythonBridge::~PythonBridge() @@ -249,7 +249,7 @@ std::unique_ptr PythonBridge::infer_internal(const NeuropodVal auto outputs = from_numpy_dict(*get_tensor_allocator(), model_outputs); // We need a unique pointer - return stdx::make_unique(std::move(outputs)); + return std::make_unique(std::move(outputs)); } REGISTER_NEUROPOD_BACKEND(PythonBridge, "python", PY_VERSION) diff --git a/source/neuropod/backends/tensorflow/tf_backend.cc b/source/neuropod/backends/tensorflow/tf_backend.cc index 2a85fe7f..523cb351 100644 --- a/source/neuropod/backends/tensorflow/tf_backend.cc +++ b/source/neuropod/backends/tensorflow/tf_backend.cc @@ -385,7 +385,7 @@ std::unique_ptr TensorflowNeuropodBackend::infer_internal( check_tf_status(session_->RunCallable(handle, tf_inputs, &outputs, nullptr)); // Read the outputs and wrap them in `NeuropodTensor`s - auto to_return = stdx::make_unique(); + auto to_return = std::make_unique(); size_t position = 0; for (const auto &item : tensor_fetches) { diff --git a/source/neuropod/backends/torchscript/torch_backend.cc b/source/neuropod/backends/torchscript/torch_backend.cc index e84029d2..fa8161d6 100644 --- a/source/neuropod/backends/torchscript/torch_backend.cc +++ b/source/neuropod/backends/torchscript/torch_backend.cc @@ -171,7 +171,7 @@ void insert_value_in_output(NeuropodValueMap & output, (!has_type && list[0].isString())) { // Make a TorchNeuropodTensor - auto neuropod_tensor = stdx::make_unique>(tensor); + auto neuropod_tensor = std::make_unique>(tensor); // Add it to our output auto &to_set = output[name]; @@ -416,7 +416,7 @@ std::unique_ptr TorchNeuropodBackend::infer_internal(const Neu c10::IValue result = model_->forward(torch_inputs); // Get outputs - auto to_return = stdx::make_unique(); + auto to_return = std::make_unique(); if (result.isGenericDict()) { diff --git a/source/neuropod/bindings/neuropod_native.cc b/source/neuropod/bindings/neuropod_native.cc index 910dcc81..648a4d62 100644 --- a/source/neuropod/bindings/neuropod_native.cc +++ b/source/neuropod/bindings/neuropod_native.cc @@ -144,7 +144,7 @@ template std::unique_ptr make_neuropod(py::kwargs kwargs, Params &&... params) { auto options = get_options_from_kwargs(kwargs); - return stdx::make_unique(std::forward(params)..., options); + return std::make_unique(std::forward(params)..., options); } } // namespace diff --git a/source/neuropod/core/generic_tensor.cc b/source/neuropod/core/generic_tensor.cc index 46e4253b..379eca31 100644 --- a/source/neuropod/core/generic_tensor.cc +++ b/source/neuropod/core/generic_tensor.cc @@ -25,7 +25,7 @@ namespace neuropod std::unique_ptr get_generic_tensor_allocator() { - return stdx::make_unique>(); + return std::make_unique>(); } } // namespace neuropod diff --git a/source/neuropod/internal/backend_registration.cc b/source/neuropod/internal/backend_registration.cc index 31359b52..00d80b90 100644 --- a/source/neuropod/internal/backend_registration.cc +++ b/source/neuropod/internal/backend_registration.cc @@ -46,7 +46,7 @@ std::unique_ptr> registered_backend void init_registrar_if_needed() { std::call_once(registrar_initialized, []() { - registered_backends_by_type = stdx::make_unique>(); + registered_backends_by_type = std::make_unique>(); // Make sure our logging is initialized init_logging(); diff --git a/source/neuropod/internal/backend_registration.hh b/source/neuropod/internal/backend_registration.hh index 9b6ede67..0e521e21 100644 --- a/source/neuropod/internal/backend_registration.hh +++ b/source/neuropod/internal/backend_registration.hh @@ -38,7 +38,7 @@ typedef std::unique_ptr (*BackendFactoryFunction)(const std::st template std::unique_ptr createNeuropodBackend(const std::string &neuropod_path, const RuntimeOptions &options) { - return stdx::make_unique(neuropod_path, options); + return std::make_unique(neuropod_path, options); } // Register a backend for a set of specific types diff --git a/source/neuropod/internal/config_utils.cc b/source/neuropod/internal/config_utils.cc index 8414fd94..26d2603e 100644 --- a/source/neuropod/internal/config_utils.cc +++ b/source/neuropod/internal/config_utils.cc @@ -264,7 +264,7 @@ std::unique_ptr load_model_config(std::istream &input_stream) } // Not directly using make_unique because of brace initialization - return stdx::make_unique( + return std::make_unique( ModelConfig{name, platform, platform_version_semver, inputs, outputs, custom_ops, input_tensor_device}); } diff --git a/source/neuropod/internal/config_utils.hh b/source/neuropod/internal/config_utils.hh index 2e33ff5f..a31ac18e 100644 --- a/source/neuropod/internal/config_utils.hh +++ b/source/neuropod/internal/config_utils.hh @@ -15,7 +15,6 @@ limitations under the License. #pragma once -#include "neuropod/internal/memory_utils.hh" #include "neuropod/internal/tensor_types.hh" #include diff --git a/source/neuropod/internal/memory_utils.hh b/source/neuropod/internal/memory_utils.hh deleted file mode 100644 index 9fcc0cc6..00000000 --- a/source/neuropod/internal/memory_utils.hh +++ /dev/null @@ -1,65 +0,0 @@ -/* Copyright (c) 2020 The Neuropod Authors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -#pragma once - -#include -#include - -namespace neuropod -{ -namespace stdx -{ -namespace detail -{ - -template -struct unique_if -{ - using unique_ptr = std::unique_ptr; -}; - -template -struct unique_if -{ - using unique_ptr_array_unknown_bound = std::unique_ptr; -}; - -template -struct unique_if -{ - using unique_ptr_array_known_bound = void; -}; - -} // namespace detail - -// See http://en.cppreference.com/w/cpp/memory/unique_ptr/make_unique -template -typename detail::unique_if::unique_ptr make_unique(Args &&... args) -{ - return std::unique_ptr(new T(std::forward(args)...)); -} - -template -typename detail::unique_if::unique_ptr_array_unknown_bound make_unique(std::size_t n) -{ - return std::unique_ptr(new typename std::remove_extent::type[n]()); -} - -template -typename detail::unique_if::unique_ptr_array_known_bound make_unique(Args &&...) = delete; - -} // namespace stdx -} // namespace neuropod diff --git a/source/neuropod/internal/neuropod_loader.cc b/source/neuropod/internal/neuropod_loader.cc index ba3a0235..4d5c1b33 100644 --- a/source/neuropod/internal/neuropod_loader.cc +++ b/source/neuropod/internal/neuropod_loader.cc @@ -48,7 +48,7 @@ class LocalLoader : public NeuropodLoader std::unique_ptr get_istream_for_file(const std::string &path) override { - auto ret = stdx::make_unique(get_file_path(path)); + auto ret = std::make_unique(get_file_path(path)); if (!(*ret)) { return nullptr; @@ -99,7 +99,7 @@ class ZipLoader : public NeuropodLoader std::unique_ptr get_istream_for_file(const std::string &path) override { - auto out = stdx::make_unique(); + auto out = std::make_unique(); if (!unzipper_.extractEntryToStream(path, *out)) { return nullptr; @@ -174,10 +174,10 @@ std::unique_ptr get_loader(const std::string &neuropod_path) if (fs::is_directory(neuropod_path)) { - return stdx::make_unique(neuropod_path); + return std::make_unique(neuropod_path); } - return stdx::make_unique(neuropod_path); + return std::make_unique(neuropod_path); } } // namespace neuropod diff --git a/source/neuropod/internal/neuropod_tensor.hh b/source/neuropod/internal/neuropod_tensor.hh index 3ecbb11a..e5c777eb 100644 --- a/source/neuropod/internal/neuropod_tensor.hh +++ b/source/neuropod/internal/neuropod_tensor.hh @@ -644,7 +644,7 @@ protected: // Utility to make a tensor of a specific type #define MAKE_TENSOR(CPP_TYPE, NEUROPOD_TYPE) \ case NEUROPOD_TYPE: { \ - return stdx::make_unique>(std::forward(params)...); \ + return std::make_unique>(std::forward(params)...); \ } template