diff --git a/.github/workflows/dotnet-ci.yml b/.github/workflows/dotnet-ci.yml deleted file mode 100644 index 75e2aa14..00000000 --- a/.github/workflows/dotnet-ci.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: .NET CI - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - static_code_analysis: - runs-on: ubuntu-20.04 - steps: - - name: Checkout repository and submodules - uses: actions/checkout@v3 - - name: Install .NET 6.0.119 - uses: actions/setup-dotnet@v3 - with: - dotnet-version: 6.0.119 - global-json-file: cloud_connectors/azure/digital_twins_connector/global.json - - name: Cache NuGet dependencies - uses: actions/cache@v3 - with: - path: ~/.nuget/packages - # Look to see if there is a cache hit for the corresponding requirements file - key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} - restore-keys: | - ${{ runner.os }}-nuget - - name: Build Digital Twins Connector - run: ./cloud_connectors/azure/digital_twins_connector/build.sh - - name: Build MQTT Connector's Azure Function - run: dotnet build cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.csproj - - name: Digital Twins Connector Tests - run: dotnet test cloud_connectors/azure/digital_twins_connector/tests/**/*.csproj - - name: MQTT Connector's Azure Function Tests - run: dotnet test cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.csproj \ No newline at end of file diff --git a/.github/workflows/security-audit.yml b/.github/workflows/security-audit.yml index 186d7927..09d029fa 100644 --- a/.github/workflows/security-audit.yml +++ b/.github/workflows/security-audit.yml @@ -7,7 +7,6 @@ on: - "**/Cargo.toml" - "**/Cargo.lock" - ".github/workflows/security-audit.yaml" - - "cloud_connector/**" schedule: - cron: "0 0 * * *" # once a day at midnight UTC # NB: that cron trigger on GH actions runs only on the default branch @@ -25,41 +24,4 @@ jobs: uses: actions-rs/cargo@v1 with: command: audit - args: --deny warnings - - security_audit_csharp: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - language: [ 'csharp' ] - steps: - - name: Checkout repository and submodules - uses: actions/checkout@v3 - with: - submodules: recursive - - name: Install .NET 6.0.119 - uses: actions/setup-dotnet@v3 - with: - dotnet-version: 6.0.119 - global-json-file: cloud_connectors/azure/digital_twins_connector/global.json - - name: Cache NuGet dependencies - uses: actions/cache@v3 - with: - path: ~/.nuget/packages - # Look to see if there is a cache hit for the corresponding requirements file - key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} - restore-keys: | - ${{ runner.os }}-nuget - - name: Check .NET version - run: dotnet --info - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - - name: Build Digital Twins Connector - run: ./cloud_connectors/azure/digital_twins_connector/build.sh - - name: Build MQTT Connector's Azure Function - run: dotnet build cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.csproj - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 \ No newline at end of file + args: --deny warnings \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index d2a72788..6fd834fb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,13 +7,9 @@ resolver = "2" members = [ "build_common", - "cloud_adapters/azure_cloud_connector_adapter", "cloud_adapters/in_memory_mock_cloud_adapter", - "cloud_connectors/azure/mqtt_connector", - "cloud_connectors/azure/proto-build", "common", "contracts", - "digital_twin_adapters/ibeji_adapter", "digital_twin_adapters/in_memory_mock_digital_twin_adapter", "digital_twin_adapters/mock_digital_twin_adapter", "freyja", @@ -29,7 +25,6 @@ members = [ [workspace.dependencies] # Freyja dependencies -azure-cloud-connector-proto = { path = "cloud_connectors/azure/proto-build" } freyja-build-common = { path = "build_common" } freyja-common = { path = "common" } freyja-contracts = { path = "contracts" } @@ -51,15 +46,12 @@ crossbeam = "0.8.2" env_logger = "0.10.0" futures = "0.3.28" home = "0.5.5" -httptest = "0.15.4" log = "^0.4" mockall = "0.11.4" paho-mqtt = "0.12" proc-macro2 = "1.0.52" -prost = "0.11.9" quote = "1.0.23" reqwest = { version = "0.11.4", features = ["json"] } -rstest = "0.18.1" serde = { version = "1.0.88", features = ["derive"] } serde_json = "1.0.88" strum = "0.25.0" diff --git a/cloud_adapters/azure_cloud_connector_adapter/Cargo.toml b/cloud_adapters/azure_cloud_connector_adapter/Cargo.toml deleted file mode 100644 index 9a4bfeb0..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -[package] -name = "azure-cloud-connector-adapter" -version = "0.1.0" -edition = "2021" -license = "MIT" - -[dependencies] -async-trait = { workspace = true } -azure-cloud-connector-proto = { workspace = true } -freyja-common = { workspace = true } -freyja-contracts = { workspace = true } -futures = { workspace = true } -log = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -tempfile = { workspace = true } -tonic = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tower = { workspace = true } \ No newline at end of file diff --git a/cloud_adapters/azure_cloud_connector_adapter/README.md b/cloud_adapters/azure_cloud_connector_adapter/README.md deleted file mode 100644 index 593f3a31..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# Azure Cloud Connector Adapter - -This is an example implementation of an adapter for the #[Azure Cloud Connectors](../../cloud_connectors/azure/README.md). - -This adapter is used to communicate with an Azure Cloud Connector to synchronize in-vehicle signals with the cloud. - -## Prerequisites - -### Azure Cloud Connector - -You will need to either have the #[Azure Digital Twins Connector](../../cloud_connectors/azure/digital_twins_connector/README.md) or #[Azure MQTT Connector](../../cloud_connectors/azure/mqtt_connector/README.md) running. - -## Build - -1. Before building, please ensure that the `cloud_connector_url` field in `res/azure_cloud_connector_adapter_config.sample.json` matches the url that the Azure Cloud Connector uses. - -```shell -cargo build -``` diff --git a/cloud_adapters/azure_cloud_connector_adapter/build.rs b/cloud_adapters/azure_cloud_connector_adapter/build.rs deleted file mode 100644 index 9b75cf80..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/build.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use std::{env, fs, path::Path}; - -const OUT_DIR: &str = "OUT_DIR"; -const SAMPLE_CONFIG_FILE: &str = "res/azure_cloud_connector_adapter_config.sample.json"; -const CONFIG_FILE: &str = "azure_cloud_connector_adapter_config.json"; - -fn main() { - // The current directory of the build script is the package's root directory - let config_path = env::current_dir().unwrap().join(SAMPLE_CONFIG_FILE); - - let target_dir = env::var(OUT_DIR).unwrap(); - let dest_path = Path::new(&target_dir).join(CONFIG_FILE); - - fs::copy(&config_path, dest_path).unwrap(); - - println!("cargo:rerun-if-changed={}", config_path.to_str().unwrap()); -} diff --git a/cloud_adapters/azure_cloud_connector_adapter/res/azure_cloud_connector_adapter_config.sample.json b/cloud_adapters/azure_cloud_connector_adapter/res/azure_cloud_connector_adapter_config.sample.json deleted file mode 100644 index 71a7c2c1..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/res/azure_cloud_connector_adapter_config.sample.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "max_retries": 5, - "retry_interval_ms": 1000, - "cloud_connector_url": "http://[::1]:8890" -} \ No newline at end of file diff --git a/cloud_adapters/azure_cloud_connector_adapter/src/azure_cloud_connector_adapter.rs b/cloud_adapters/azure_cloud_connector_adapter/src/azure_cloud_connector_adapter.rs deleted file mode 100644 index cb9b9048..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/src/azure_cloud_connector_adapter.rs +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use std::{env, fs, path::Path, time::Duration}; - -use async_trait::async_trait; -use azure_cloud_connector_proto::azure_cloud_connector::{ - azure_cloud_connector_client::AzureCloudConnectorClient, UpdateDigitalTwinRequest, -}; -use log::debug; -use serde::{Deserialize, Serialize}; -use tonic::transport::Channel; - -use crate::azure_cloud_connector_adapter_config::{Config, CONFIG_FILE}; -use freyja_common::retry_utils::execute_with_retry; -use freyja_contracts::cloud_adapter::{ - CloudAdapter, CloudAdapterError, CloudMessageRequest, CloudMessageResponse, -}; - -const MODEL_ID_KEY: &str = "model_id"; -const INSTANCE_ID_KEY: &str = "instance_id"; -const INSTANCE_PROPERTY_PATH_KEY: &str = "instance_property_path"; - -/// The Cloud Connector Adapter for communicating with the Cloud Connector -pub struct AzureCloudConnectorAdapter { - // A gRPC Client for communicating with the Azure Cloud Connector - cloud_connector_client: AzureCloudConnectorClient, -} - -/// Contains info about a digital twin instance -#[derive(Debug, Default, Clone, Serialize, Deserialize)] -struct CloudDigitalTwinInstance { - /// The id of the target signal's model - pub model_id: String, - - /// The id of the target signal's instance - pub instance_id: String, - - /// The path of the property within the instance to target - pub instance_property_path: String, -} - -impl AzureCloudConnectorAdapter { - /// Gets info about an instance from a cloud message - /// - /// # Arguments - /// - `cloud_message`: represents a message to send to the cloud canonical model - fn get_instance_info_from_message( - cloud_message: &CloudMessageRequest, - ) -> Result { - Ok(CloudDigitalTwinInstance { - model_id: cloud_message - .cloud_signal - .get(MODEL_ID_KEY) - .ok_or_else(|| { - CloudAdapterError::key_not_found(format!("Cannot find key: {MODEL_ID_KEY:}")) - })? - .clone(), - instance_id: cloud_message - .cloud_signal - .get(INSTANCE_ID_KEY) - .ok_or_else(|| { - CloudAdapterError::key_not_found(format!("Cannot find key: {INSTANCE_ID_KEY:}")) - })? - .clone(), - instance_property_path: cloud_message - .cloud_signal - .get(INSTANCE_PROPERTY_PATH_KEY) - .ok_or_else(|| { - CloudAdapterError::key_not_found(format!( - "Cannot find key: {INSTANCE_PROPERTY_PATH_KEY:}" - )) - })? - .clone(), - }) - } -} - -#[async_trait] -impl CloudAdapter for AzureCloudConnectorAdapter { - /// Creates a new instance of a CloudAdapter with default settings - fn create_new() -> Result { - let cloud_connector_client = futures::executor::block_on(async { - let config_file = fs::read_to_string(Path::new(env!("OUT_DIR")).join(CONFIG_FILE)) - .map_err(CloudAdapterError::io)?; - // Load the config - let config: Config = - serde_json::from_str(&config_file).map_err(CloudAdapterError::deserialize)?; - - execute_with_retry( - config.max_retries, - Duration::from_millis(config.retry_interval_ms), - || AzureCloudConnectorClient::connect(config.cloud_connector_url.clone()), - Some(String::from( - "Connection retry for connecting to Azure Cloud Connector", - )), - ) - .await - .map_err(CloudAdapterError::communication) - })?; - - Ok(Self { - cloud_connector_client, - }) - } - - /// Sends the signal to the cloud - /// - /// # Arguments - /// - `cloud_message`: represents a message to send to the cloud canonical model - async fn send_to_cloud( - &self, - cloud_message: CloudMessageRequest, - ) -> Result { - debug!("Received a request to send to the cloud"); - let cloud_message_string = - serde_json::to_string_pretty(&cloud_message).map_err(CloudAdapterError::serialize)?; - debug!("Cloud canonical value:\n{cloud_message_string}"); - - let cloud_digital_twin_instance = Self::get_instance_info_from_message(&cloud_message)?; - - let request = tonic::Request::new(UpdateDigitalTwinRequest { - model_id: cloud_digital_twin_instance.model_id, - instance_id: cloud_digital_twin_instance.instance_id, - instance_property_path: cloud_digital_twin_instance.instance_property_path, - data: cloud_message.signal_value, - }); - - let response = self - .cloud_connector_client - .clone() - .update_digital_twin(request) - .await - .map_err(CloudAdapterError::communication)?; - debug!("Response from cloud connector {response:?}"); - - Ok(CloudMessageResponse {}) - } -} - -#[cfg(test)] -mod azure_cloud_connector_tests { - use super::*; - - use std::collections::HashMap; - - #[tokio::test] - async fn get_instance_info_from_message_test() { - let cloud_message = CloudMessageRequest { - cloud_signal: HashMap::new(), - signal_value: String::new(), - signal_timestamp: String::new(), - }; - let cloud_digital_twin_instance = - AzureCloudConnectorAdapter::get_instance_info_from_message(&cloud_message); - assert!(cloud_digital_twin_instance.is_err()); - - let mut cloud_signal_map = HashMap::new(); - cloud_signal_map.insert(String::from(MODEL_ID_KEY), String::from("some-model-id")); - cloud_signal_map.insert( - String::from(INSTANCE_ID_KEY), - String::from("some-instance-id"), - ); - cloud_signal_map.insert( - String::from(INSTANCE_PROPERTY_PATH_KEY), - String::from("some-instance-property-path"), - ); - } - - /// The tests below uses Unix sockets to create a channel between a gRPC client and a gRPC server. - /// Unix sockets are more ideal than using TCP/IP sockets since Rust tests will run in parallel - /// so you would need to set an arbitrary port per test for TCP/IP sockets. - #[cfg(unix)] - mod unix_tests { - use super::*; - - use std::sync::Arc; - - use azure_cloud_connector_proto::azure_cloud_connector::azure_cloud_connector_server::{ - AzureCloudConnector, AzureCloudConnectorServer, - }; - use azure_cloud_connector_proto::azure_cloud_connector::UpdateDigitalTwinResponse; - use tempfile::TempPath; - use tokio::net::{UnixListener, UnixStream}; - use tokio_stream::wrappers::UnixListenerStream; - use tonic::transport::{Channel, Endpoint, Server, Uri}; - use tonic::{Request, Response, Status}; - use tower::service_fn; - - pub struct MockAzureConnector {} - - #[tonic::async_trait] - impl AzureCloudConnector for MockAzureConnector { - /// Updates a digital twin instance - /// - /// # Arguments - /// - `request`: the request to send - async fn update_digital_twin( - &self, - _request: Request, - ) -> Result, Status> { - let response = UpdateDigitalTwinResponse { - reply: String::new(), - }; - Ok(Response::new(response)) - } - } - - async fn create_test_grpc_client( - bind_path: Arc, - ) -> AzureCloudConnectorClient { - let channel = Endpoint::try_from("http://URI_IGNORED") // Devskim: ignore DS137138 - .unwrap() - .connect_with_connector(service_fn(move |_: Uri| { - let bind_path = bind_path.clone(); - async move { UnixStream::connect(bind_path.as_ref()).await } - })) - .await - .unwrap(); - - AzureCloudConnectorClient::new(channel) - } - - async fn run_test_grpc_server(uds_stream: UnixListenerStream) { - let mock_azure_connector = MockAzureConnector {}; - Server::builder() - .add_service(AzureCloudConnectorServer::new(mock_azure_connector)) - .serve_with_incoming(uds_stream) - .await - .unwrap(); - } - - #[tokio::test] - async fn send_request_to_provider() { - // Create the Unix Socket - let bind_path = Arc::new(tempfile::NamedTempFile::new().unwrap().into_temp_path()); - let uds = match UnixListener::bind(bind_path.as_ref()) { - Ok(unix_listener) => unix_listener, - Err(_) => { - std::fs::remove_file(bind_path.as_ref()).unwrap(); - UnixListener::bind(bind_path.as_ref()).unwrap() - } - }; - let uds_stream = UnixListenerStream::new(uds); - - let request_future = async { - let mut client = create_test_grpc_client(bind_path.clone()).await; - - let request = tonic::Request::new(UpdateDigitalTwinRequest { - model_id: String::from( - "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC:AmbientAirTemperature;1", - ), - instance_id: String::from("hvac"), - instance_property_path: String::from("/AmbientAirTemperature"), - data: String::from("12.00"), - }); - assert!(client.update_digital_twin(request).await.is_ok()) - }; - - tokio::select! { - _ = run_test_grpc_server(uds_stream) => (), - _ = request_future => () - } - - std::fs::remove_file(bind_path.as_ref()).unwrap(); - } - } -} diff --git a/cloud_adapters/azure_cloud_connector_adapter/src/azure_cloud_connector_adapter_config.rs b/cloud_adapters/azure_cloud_connector_adapter/src/azure_cloud_connector_adapter_config.rs deleted file mode 100644 index 3e985fd9..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/src/azure_cloud_connector_adapter_config.rs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use serde::{Deserialize, Serialize}; - -pub(crate) const CONFIG_FILE: &str = "azure_cloud_connector_adapter_config.json"; - -/// A config entry for the Azure Cloud Connector Adapter -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Config { - /// Max retries for connecting to an Azure Cloud Connector - pub max_retries: u32, - - /// Retry interval in milliseconds - pub retry_interval_ms: u64, - - /// The url for the cloud connector server - pub cloud_connector_url: String, -} diff --git a/cloud_adapters/azure_cloud_connector_adapter/src/lib.rs b/cloud_adapters/azure_cloud_connector_adapter/src/lib.rs deleted file mode 100644 index 26ee911f..00000000 --- a/cloud_adapters/azure_cloud_connector_adapter/src/lib.rs +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -pub mod azure_cloud_connector_adapter; -mod azure_cloud_connector_adapter_config; diff --git a/cloud_adapters/in_memory_mock_cloud_adapter/Cargo.toml b/cloud_adapters/in_memory_mock_cloud_adapter/Cargo.toml index b6a45d2b..1728ae66 100644 --- a/cloud_adapters/in_memory_mock_cloud_adapter/Cargo.toml +++ b/cloud_adapters/in_memory_mock_cloud_adapter/Cargo.toml @@ -12,7 +12,8 @@ license = "MIT" async-trait = { workspace = true } freyja-contracts = { workspace = true } log = { workspace = true } -serde = { workspace = true } serde_json = { workspace = true } tokio = { workspace = true } + +[dev-dependencies] time = { workspace = true } \ No newline at end of file diff --git a/cloud_connectors/azure/README.md b/cloud_connectors/azure/README.md deleted file mode 100644 index 4a973879..00000000 --- a/cloud_connectors/azure/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# Azure Cloud Connector - -The role of the Cloud Connector is to take the data emitted by Freyja, and update the data of your cloud digital twin which could be hosted in Azure, AWS, etc. - -The [MQTT Connector](./mqtt_connector/README.md) and [Digital Twins Connector](./digital_twins_connector/README.md) are two sample implementations using Azure Digital Twins. - -These two examples integrates Freyja with Azure Digital Twins. - -However, Freyja is not tightly coupled with Azure and can synchronize data with any cloud solution, provided an appropriate Cloud Connector and adapter are written. - -The [MQTT Connector](./mqtt_connector/README.md) relays the data emitted by Freyja to an [Azure Event Grid](https://learn.microsoft.com/en-us/azure/event-grid/overview) using the [MQTT](https://mqtt.org/) protocol. Data coming into the Event Grid will trigger an Azure function which updates the Azure Digital Twin instance. - -The [Digital Twins Connector](./digital_twins_connector/README.md) updates an Azure Digital Twin instance directly with the data emitted by Freyja. - -## Prerequisites for Automated Deployment of Azure Resources - -The deployment scripts in the `{freyja-root-dir}/cloud_connectors/azure/scripts` directory will automate the deployment of necessary Azure resources depending on which Azure Cloud Connector sample you wish to use. - -To run the deployment scripts, you will need to complete each prerequisite section specified below. - -Alternatively, you can create Azure resources via the Azure portal. Please see [Manual Azure Digital Twins Setup](./digital_twins_connector/README.md#manual-azure-digital-twins-setup) for the Digital Twins Connector sample, and [Manual Deployment of Azure Key Vault, Event Grid, and Azure Function App](./mqtt_connector/README.md#manual-deployment-of-azure-key-vault-event-grid-and-azure-function-app) for the MQTT Connector sample. - -### Azure CLI and Extensions - -You must install the following: - -* [Azure CLI](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - -* [Azure IoT CLI Extension](https://github.com/Azure/azure-iot-cli-extension) - -* [Azure Functions Core Tools](https://learn.microsoft.com/en-us/azure/azure-functions/functions-run-local?tabs=windows%2Cportal%2Cv2%2Cbash&pivots=programming-language-csharp) (required only for the [MQTT Connector](./mqtt_connector/README.md)). - -### Azure Resource Group Role-Based Access Control - -You will need to be an Owner or a Contributor for your Azure resource group to deploy Azure resources using the scripts. Please see [Azure built-in roles](https://learn.microsoft.com/en-us/azure/role-based-access-control/built-in-roles) for more details. - -## Automated Deployment of Azure Resources - -Please see [Automated Azure Digital Twins Setup](./digital_twins_connector/README.md#automated-azure-digital-twins-setup) for the Digital Twins Connector sample, and [Automated Deployment of Azure Key Vault, Event Grid, and Azure Function App](./mqtt_connector/README.md#automated-deployment-of-azure-key-vault-event-grid-and-azure-function-app) for the MQTT Connector sample. - -If you experience permission or deployment errors, try running the script again as sometimes it takes a while for some dependencies to be fully deployed. If you use the same name or identifier for each Azure resource, the script will not create additional copies of that Azure resource. - -You may also follow the [Manual Azure Digital Twins Setup](./digital_twins_connector/README.md#manual-azure-digital-twins-setup) for the Digital Twins Connector sample, or the [Manual Deployment of Azure Key Vault, Event Grid, and Azure Function App](./mqtt_connector/README.md#manual-deployment-of-azure-key-vault-event-grid-and-azure-function-app) for the MQTT Connector sample sections to deploy the respective Azure resource that is failing to be deployed by the script. diff --git a/cloud_connectors/azure/digital_twins_connector/README.md b/cloud_connectors/azure/digital_twins_connector/README.md deleted file mode 100644 index c53ce97a..00000000 --- a/cloud_connectors/azure/digital_twins_connector/README.md +++ /dev/null @@ -1,102 +0,0 @@ -# Azure Digital Twins Cloud Connector - -This is an example implementation of an Azure Cloud Connector. - -Freyja is not tightly coupled with Azure and can synchronize data with any cloud solution, provided an appropriate Cloud Connector and adapter are written. - -## Architecture - -The Azure Digital Twins Cloud Connector updates an Azure Digital Twin instance directly with the data emitted by Freyja. - -Below is a high-level diagram that illustrates Freyja communicating with the Azure Digital Twins Cloud Connector: - -![Component Diagram](../../../docs/diagrams/azure_digital_twins_cloud_connector.svg) - -## Prerequisites - -### .NET 6.0 Setup - -Install .NET 6.0 - -```shell -sudo apt install dotnet-sdk-6.0 -``` - -### Automated Azure Digital Twins Setup - -Before starting this section, please view [Prerequisites for Automated Deployment of Azure Resources](../README.md#prerequisites-for-automated-deployment-of-azure-resources). - -1. Sign in with Azure CLI. Follow the prompts after entering the following command. - -```shell -az login --use-device-code -``` - -1. Deploy Azure Digital Twins to your resource group. - -```shell -cd {freyja-root-dir}/cloud_connectors/azure/scripts -chmod +x digital_twins_setup.sh -./digital_twins_setup.sh -r myRG -l westus2 -d myADT -``` - -### Manual Azure Digital Twins Setup - -If you have successfully ran the `digital_twins_setup.sh`, you do not need to follow this section. - -The steps below will guide you on manually deploying the Azure Digital Twins resource to your resource group, and creating your Azure Digital Twins instances. - -1. Set up your [Azure Digital Twin Instance](https://learn.microsoft.com/en-us/azure/digital-twins/quickstart-azure-digital-twins-explorer#set-up-azure-digital-twins). - - If you wish to use the default mappings in `{repo-root-dir}/mapping_clients/in_memory_mock_mapping_client/res/config.json` and `{repo-root-dir}/mocks/mock_mapping_service/res/config.json`, create the following instances: - - * vehicle - * hvac - * obd - - For each instance, use the respective DTDL provided in `{repo-root-dir}/cloud_connector/azure/sample-dtdl`. - - In your hvac instance, name the two properties as AmbientAirTemperature and IsAirConditioningActive. - - In your obd instance, name the single property as HybridBatteryRemaining. - -1. Follow the *Open instance in Azure Digital Twins Explorer* section under [Set up Azure Digital Twins](https://learn.microsoft.com/en-us/azure/digital-twins/quickstart-azure-digital-twins-explorer#set-up-azure-digital-twins) to get the Azure Digital Twin URL of your Azure Digital Twin instance. - -1. After [building](#build) the Digital Twins Connector, you should see an `adt_instance_config.json` file in your `src/core/bin/Debug/net6.0/config` directory. If you do not see the `adt_instance_config.json` file in `src/core/bin/Debug/net6.0/config`, you can create one manually by copying the `src/core/adt_instance_config.json` file and pasting it into the `src/core/bin/Debug/net6.0/config` directory. - -1. Replace the `AzureDigitalTwinsInstanceUrl` field in `src/core/bin/net6.0/config/adt_instance_config.json` with the URL to your Azure Digital Twin instance that you have obtained in step 2. - -1. Ensure that you have [Azure CLI](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) installed. - -1. Acquire the access token that's scoped to the Azure Digital Twin service. - -```shell -az login --use-device-code --scope https://digitaltwins.azure.net/.default -``` - -## Configuration - -The URL for the Cloud Connector's GRPC server can be configured via the `src/Properties/launchSetting.json`. - -* `applicationUrl`: specifies the url to use for the Cloud Connector's GRPC server. - -## Build - -```shell -chmod +x build.sh -./build.sh -``` - -## Tests - -```shell -dotnet test tests/**/*.csproj -``` - -## Run - -The first gRPC client call to the Digital Twins Connector's gRPC server could be delayed, since the initial API call to the Azure Digital Twins service requires authentication. - -```shell -dotnet run --project src/core/ -``` diff --git a/cloud_connectors/azure/digital_twins_connector/build.sh b/cloud_connectors/azure/digital_twins_connector/build.sh deleted file mode 100755 index 9b1b69fe..00000000 --- a/cloud_connectors/azure/digital_twins_connector/build.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -# Set the current directory to the directory of this script. -cd "$(dirname "$0")" - -dotnet build src/core/DigitalTwinsConnector.csproj -dotnet build src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.csproj -dotnet build tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.csproj \ No newline at end of file diff --git a/cloud_connectors/azure/digital_twins_connector/global.json b/cloud_connectors/azure/digital_twins_connector/global.json deleted file mode 100644 index 08c83960..00000000 --- a/cloud_connectors/azure/digital_twins_connector/global.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "sdk": { - "version": "6.0.119" - } -} \ No newline at end of file diff --git a/cloud_connectors/azure/digital_twins_connector/src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.cs b/cloud_connectors/azure/digital_twins_connector/src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.cs deleted file mode 100644 index 181814e7..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.cs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -using System.ComponentModel; - -using Azure; -using Azure.DigitalTwins.Core; -using Microsoft.Extensions.Logging; - -namespace Microsoft.ESDV.CloudConnector.Azure -{ - /// - /// This class wraps the DigitalTwinsClient class in the Azure Digital Twins SDK - /// Before calling the UpdateDigitalTwinAsync(...) method, you will need to be authenticated via your terminal by typing - /// `az login --use-device-code --scope https://digitaltwins.azure.net/.default` - /// - public class DigitalTwinsClientWrapper - { - // The Azure Digital Twins Client. - private readonly DigitalTwinsClient _client; - - // The logger. - private readonly ILogger _logger; - - /// - /// Checks if a path starts with a slash. - /// - /// the path. - /// Returns true if the path starts with a slash, otherwise false. - private bool DoesPathStartsWithSlash(string path) - { - return path.StartsWith('/'); - } - - /// - /// Constructor for DigitalTwinsClientWrapper - /// - /// A DigitalTwinsClient - /// An ILogger - public DigitalTwinsClientWrapper(DigitalTwinsClient client, ILogger logger) - { - _client = client; - _logger = logger; - _logger.LogInformation("Starting Azure Digital Client"); - } - - /// - /// Updates a digital twin's property. - /// - /// - /// Invoking UpdateDigitalTwinAsync("dtmi:sdv:Cloud:Vehicle:Cabin:HVAC:AmbientAirTemperature;1", "44") - /// sets the dtmi "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC:AmbientAirTemperature;1" to 44. - /// - /// the model ID that a digital twin instance is based on. - /// the digital twin instance ID. - /// the property path of a digital twin instance to update. - /// the data used to update a digital twin instance's property. - /// Returns a task for updating a digital twin instance. - public async Task UpdateDigitalTwinAsync(string modelID, string instanceID, string instancePropertyPath, string data) - { - List dataTypes = new List() { typeof(Double), typeof(Boolean), typeof(Int32) }; - var jsonPatchDocument = new JsonPatchDocument(); - - foreach (Type type in dataTypes) - { - try - { - // Parse the data string to a type - dynamic value = TypeDescriptor.GetConverter(type).ConvertFromInvariantString(data); - - if (!DoesPathStartsWithSlash(instancePropertyPath)) - { - instancePropertyPath = "$/{instancePropertyPath}"; - } - // Once we're able to parse the data string to a type - // we append it to the jsonPatchDocument - jsonPatchDocument.AppendAdd(instancePropertyPath, value); - - // First UpdateDigitalTwinAsync call may block due to initial authorization. - await _client.UpdateDigitalTwinAsync(instanceID, jsonPatchDocument); - _logger.LogInformation($"Successfully set instance {instanceID}{instancePropertyPath} based on model {modelID} to {data}"); - return; - } - catch (RequestFailedException ex) - { - _logger.LogError($"Cannot set instance {instanceID}{instancePropertyPath} based on model {modelID} to {data} due to {ex.Message}"); - throw ex; - } - // Try to parse string data with the next type if we're unsuccessful. - catch (Exception ex) when (ex is NotSupportedException || ex is ArgumentException || ex is FormatException) - { - continue; - } - } - - string errorMessage = $"Failed to parse {data}. Cannot set instance {instanceID}{instancePropertyPath} based on model {modelID} to {data}"; - _logger.LogError(errorMessage); - throw new NotSupportedException(errorMessage); - } - } -} diff --git a/cloud_connectors/azure/digital_twins_connector/src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.csproj b/cloud_connectors/azure/digital_twins_connector/src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.csproj deleted file mode 100644 index 6f27d36d..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/DigitalTwinsClientWrapper/DigitalTwinsClientWrapper.csproj +++ /dev/null @@ -1,13 +0,0 @@ - - - - net6.0 - enable - - - - - - - - diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/DigitalTwinsConnector.csproj b/cloud_connectors/azure/digital_twins_connector/src/core/DigitalTwinsConnector.csproj deleted file mode 100644 index c0b4b738..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/DigitalTwinsConnector.csproj +++ /dev/null @@ -1,27 +0,0 @@ - - - - Exe - net6.0 - enable - - - - - - - - - - - - - - - - - - - - - diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/DigitalTwinsConnectorService.cs b/cloud_connectors/azure/digital_twins_connector/src/core/DigitalTwinsConnectorService.cs deleted file mode 100644 index ae6882f8..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/DigitalTwinsConnectorService.cs +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -using Grpc.Core; - -namespace Microsoft.ESDV.CloudConnector.Azure.GrpcService.Services -{ - /// - /// This class implements the gRPC service for the Azure Cloud Connector - /// - public class DigitalTwinsConnectorService : AzureCloudConnector.AzureCloudConnectorBase - { - // The logger. - private readonly ILogger _logger; - - // Used to update the cloud digital twin instances' values. - private readonly DigitalTwinsClientWrapper _digitalTwinClient; - - /// - /// Constructor for DigitalTwinsConnectorService - /// - /// - /// - public DigitalTwinsConnectorService(ILogger logger, DigitalTwinsClientWrapper digitalTwinClient) - { - _logger = logger; - _digitalTwinClient = digitalTwinClient; - } - - /// - /// Updates an Azure Digital Twin instance. - /// - /// the request to send. - /// the context for the server-side call. - /// An exception is thrown if the digital twin client cannot perform an update. - /// The response status of the update. - public override async Task UpdateDigitalTwin(UpdateDigitalTwinRequest request, ServerCallContext context) - { - try - { - await _digitalTwinClient.UpdateDigitalTwinAsync(request.ModelId, request.InstanceId, request.InstancePropertyPath, request.Data); - } - catch (Exception ex) - { - _logger.LogError(ex.Message); - throw; - } - - return new UpdateDigitalTwinResponse - { - Reply = $"Successfully set instance {request.InstanceId}{request.InstancePropertyPath} based on model {request.ModelId} to {request.Data}" - }; - } - } -} - diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/Program.cs b/cloud_connectors/azure/digital_twins_connector/src/core/Program.cs deleted file mode 100644 index 695692ef..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/Program.cs +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -using System.Text.Json; - -using Azure.DigitalTwins.Core; -using Azure.Identity; - -using Microsoft.ESDV.CloudConnector.Azure.GrpcService.Services; - -namespace Microsoft.ESDV.CloudConnector.Azure -{ - class AzureDigitalTwinsInstanceConfig - { - public string AzureDigitalTwinsInstanceUrl { get; set; } - } - - class Program - { - static void Main(string[] args) - { - const string AZURE_DIGITAL_TWINS_INSTANCE_CONFIG_PATH = @"bin/Debug/net6.0/config/adt_instance_config.json"; - string contents = File.ReadAllText(AZURE_DIGITAL_TWINS_INSTANCE_CONFIG_PATH); - AzureDigitalTwinsInstanceConfig adtInstanceConfig = JsonSerializer.Deserialize(contents); - - // Configure the builder - WebApplicationBuilder builder = WebApplication.CreateBuilder(args); - - string adtInstanceUrl = adtInstanceConfig.AzureDigitalTwinsInstanceUrl; - var credential = new DefaultAzureCredential(); - DigitalTwinsClient client = new DigitalTwinsClient(new Uri(adtInstanceUrl), credential); - - ILoggerFactory loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(c => - { - c.TimestampFormat = "[yyyy-MM-ddTHH:mm::ssZ] "; - c.UseUtcTimestamp = true; - })); - loggerFactory.CreateLogger("Main").LogInformation("Started the Azure Digital Twins Connector"); - - // Instantiate the DigitalTwinClient first before adding it as a service for dependency injection. - // Otherwise, if the constructor throws an exception due to invalid configurations, this exception - // would be handled by the GRPC service every time a new request is sent from the client, so the program won't crash. - // This is not ideal since we should fail fast with invalid configurations. - builder.Host.ConfigureLogging(logging => - { - logging.ClearProviders(); - logging.AddSimpleConsole(settings => - { - settings.TimestampFormat = "[yyyy-MM-ddTHH:mm:ssZ] "; - settings.UseUtcTimestamp = true; - }); - }); - - builder.Services.AddSingleton(client); - builder.Services.AddSingleton(); - builder.Services.AddGrpc(options => options.EnableDetailedErrors = true); - - WebApplication app = builder.Build(); - app.MapGrpcService(); - app.Run(); - } - } -} diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/Properties/launchSettings.json b/cloud_connectors/azure/digital_twins_connector/src/core/Properties/launchSettings.json deleted file mode 100644 index 1b06422e..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/Properties/launchSettings.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "profiles": { - "Cloud.Connector.Digital.Twin.Proxy.GrpcService": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5176", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/adt_instance_config.sample.json b/cloud_connectors/azure/digital_twins_connector/src/core/adt_instance_config.sample.json deleted file mode 100644 index 4b7d26cf..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/adt_instance_config.sample.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "AzureDigitalTwinsInstanceUrl": "https://{your_azure_digital_twins_url}" -} \ No newline at end of file diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/appsettings.Development.json b/cloud_connectors/azure/digital_twins_connector/src/core/appsettings.Development.json deleted file mode 100644 index c678177f..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "None" - } - } -} diff --git a/cloud_connectors/azure/digital_twins_connector/src/core/appsettings.json b/cloud_connectors/azure/digital_twins_connector/src/core/appsettings.json deleted file mode 100644 index e1e84356..00000000 --- a/cloud_connectors/azure/digital_twins_connector/src/core/appsettings.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Warning", - "Microsoft.AspNetCore": "None" - } - }, - "AllowedHosts": "*", - "Kestrel": { - "EndpointDefaults": { - "Protocols": "Http2" - } - } -} \ No newline at end of file diff --git a/cloud_connectors/azure/digital_twins_connector/tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.cs b/cloud_connectors/azure/digital_twins_connector/tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.cs deleted file mode 100644 index 09be11fa..00000000 --- a/cloud_connectors/azure/digital_twins_connector/tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.cs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -using Azure.DigitalTwins.Core; -using Moq; -using Microsoft.Extensions.Logging; -using NUnit.Framework; - -namespace Microsoft.ESDV.CloudConnector.Azure.Tests -{ - [TestFixture] - public class DigitalTwinsClientWrapperTests - { - private DigitalTwinsClientWrapper _client; - - [SetUp] - public void Setup() - { - var mockLogger = new Mock>(); - var mockDigitalTwinsClient = new Mock(); - _client = new DigitalTwinsClientWrapper(mockDigitalTwinsClient.Object, mockLogger.Object); - } - - [Test] - public async Task UpdateDigitalTwinAsync_ShouldSucceed() - { - const string modelID = "some-model"; - const string instanceID = "some-instance"; - const string instancePropertyPath = "some-instance-property"; - await _client.UpdateDigitalTwinAsync(modelID, instanceID, instancePropertyPath, "44.5"); - Assert.Pass(); - } - - [Test] - public void UpdateDigitalTwinAsync_ThrowNotSupported() - { - const string modelID = "some-model"; - const string instanceID = "some-instance"; - const string instancePropertyPath = "some-instance-property"; - Assert.ThrowsAsync(async () => await _client.UpdateDigitalTwinAsync(modelID, instanceID, instancePropertyPath, "test1234")); - Assert.ThrowsAsync(async () => await _client.UpdateDigitalTwinAsync(modelID, instanceID, instancePropertyPath, "1234test")); - Assert.ThrowsAsync(async () => await _client.UpdateDigitalTwinAsync(modelID, instanceID, instancePropertyPath, "")); - } - } -} - diff --git a/cloud_connectors/azure/digital_twins_connector/tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.csproj b/cloud_connectors/azure/digital_twins_connector/tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.csproj deleted file mode 100644 index b0409a1b..00000000 --- a/cloud_connectors/azure/digital_twins_connector/tests/DigitalTwinsClientWrapper.Tests/DigitalTwinsClientWrapper.Tests.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - net6.0 - enable - false - - - - - - - - - - - - - - - - - diff --git a/cloud_connectors/azure/mqtt_connector/Cargo.toml b/cloud_connectors/azure/mqtt_connector/Cargo.toml deleted file mode 100644 index 3324c099..00000000 --- a/cloud_connectors/azure/mqtt_connector/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -[package] -name = "azure-mqtt-connector" -version = "0.1.0" -edition = "2021" -license = "MIT" - -[dependencies] -async-trait = { workspace = true } -azure-cloud-connector-proto = { path = "../proto-build/" } -env_logger = { workspace = true } -freyja-common = { path = "../../../common" } -futures = { workspace = true } -log = { workspace = true } -time = { workspace = true } -tokio = { workspace = true } -tonic = { workspace = true } -paho-mqtt = { workspace = true } -proc-macros = { path = "../../../proc_macros" } -prost = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } - -[build-dependencies] -tonic-build = { workspace = true } \ No newline at end of file diff --git a/cloud_connectors/azure/mqtt_connector/README.md b/cloud_connectors/azure/mqtt_connector/README.md deleted file mode 100644 index 60b5fec9..00000000 --- a/cloud_connectors/azure/mqtt_connector/README.md +++ /dev/null @@ -1,222 +0,0 @@ -# Azure MQTT Cloud Connector - -This is an example implementation of an Azure Cloud Connector. - -Freyja is not tightly coupled with Azure and can synchronize data with any cloud solution, provided an appropriate Cloud Connector and adapter are written. - -For this Cloud Connector sample, you will use Azure to deploy an Azure Key Vault, an Event Grid with MQTT v5, and a Function App. - -## Architecture - -When signals are propagated from Freyja, the Azure MQTT Cloud Connector will publish these signals to an Azure Event Grid topic using the MQTT protocol. When signals are published to a topic on the Event Grid, an Azure Function gets triggered and updates an Azure Digital Twin instance with the data emitted by Freyja. - -Below is a high-level diagram that illustrates Freyja communicating with the Azure MQTT Cloud Connector: - -![Component Diagram](../../../docs/diagrams/azure_mqtt_cloud_connector.svg) - -## Prerequisites - -### Azure Digital Twins Deployed - -In your Azure Digital Twins resource, you will also need to create digital twin instances. Sample DTDL models are located in the `{freyja-root-dir}/cloud_connectors/azure/sample-dtdl` directory. - -Please see [Automated Azure Digital Twins Setup](../digital_twins_connector/README.md#automated-azure-digital-twins-setup) or [Manual Azure Digital Twins Setup](../digital_twins_connector/README.md#manual-azure-digital-twins-setup) for additional info on setting up Azure Digital Twins. - -### Self-Signed X.509 Certificate - -Please see steps 1-3 in [Azure Event Grid with MQTT](#2-azure-event-grid-with-mqtt) for additional info on generating an X.509 self-signed certificate and getting its thumbprint. - -## Automated Deployment of Azure Key Vault, Event Grid, and Azure Function App - -Before starting this section, please view [Prerequisites for Automated Deployment of Azure Resources](../README.md#prerequisites-for-automated-deployment-of-azure-resources). - -1. Sign in with Azure CLI. Follow the prompts after entering the following command. - -```shell -az login --use-device-code -``` - -1. You can either pass in a config or arguments to the `mqtt_connector_setup.sh` script. - -If you wish to pass a config, then please copy the `mqtt_connector_setup.template.json` in the `{freyja-root-dir}/cloud_connectors/azure/scripts` directory and fill in the placeholders. - -```shell -cd {freyja-root-dir}/cloud_connectors/azure/scripts -chmod +x mqtt_connector_setup.sh -./mqtt_connector_setup.sh -c -``` - -Field descriptions: - -* `resource_group`: The name of your resource group. - -* `subscription_id`: The subscription ID that your resource group is under. - -* `digital_twins_name`: The name of your Digital Twins resource. - -* `thumbprint_of_cert_in_der_format`: The thumbprint of your X.509 certificate in DER format. - -* `storage_account_name`: The desired name for the Storage Account you want to create. - -* `function_app_name`: The desired name for the Azure Function App you want to create. - -* `key_vault_name`: The desired name for the Key Vault you want to create. - -* `event_grid_topic`: The desired name for the Event Grid Topic you want to create. - -* `event_grid_subscription_name`: The desired name for the Event Grid Subscription you want to create. - -* `event_grid_namespace`: The desired name for the Event Grid Namespace you want to create. - -* `mqtt_client_authentication_name`: The desired name for the Event Grid Client Authentication you want to use to authenticate. - -If you wish to pass in arguments, then please do the following: - -```shell -cd {freyja-root-dir}/cloud_connectors/azure/scripts -chmod +x mqtt_connector_setup.sh - ./mqtt_connector_setup.sh -r myResourceGroup -s mySubscriptionId -d myDigitalTwinsName \ - -t myThumbprint -S myStorageAccountName -f myFunctionAppName \ - -k myKeyVaultName -x myEventGridTopic -y myEventGridSubscriptionName \ - -z myEventgridNamespace -m myMqttClientAuthenticationName -``` - -## Manual Deployment of Azure Key Vault, Event Grid, and Azure Function App - -### 1. Azure Key Vault - -1. Follow the *Open instance in Azure Digital Twins Explorer* section under [Set up Azure Digital Twins](https://learn.microsoft.com/en-us/azure/digital-twins/quickstart-azure-digital-twins-explorer#set-up-azure-digital-twins) to get the Azure Digital Twin URL of your Azure Digital Twin instance. - -1. [Create an Azure Key Vault using the Azure portal](https://learn.microsoft.com/en-us/azure/key-vault/general/quick-create-portal). - -1. Create a secret with `ADT-INSTANCE-URL` as the name, and the value should be the **Azure Digital Twin URL** that you obtained in step 1. - -You have successfully deployed your Key Vault if you see an `ADT-INSTANCE-URL` secret and the status of that secret is enabled. - -### 2. Azure Event Grid with MQTT - -1. Create a private key. Replace the `{PrivateKeyName}` placeholder with the name you wish to use. - - ```shell - openssl genpkey -out {PrivateKeyName}.key -algorithm RSA -pkeyopt rsa_keygen_bits:2048 - ``` - -1. Create a certificate signing request. Replace the placeholders with their respective values, and fill in the prompts of the certificate signing request. - - ```shell - openssl req -new -key {PrivateKeyName}.key -out {CertificateSigningRequestName}.csr - ``` - -1. Create an X.509 self-signed certificate. Replace the placeholders with their respective values. - - ```shell - openssl x509 -req -days 365 -in {CertificateSigningRequestName}.csr -signkey {PrivateKeyName}.key -out {CertificateName}.cer - ``` - -1. Get the thumbprint of your certificate in DER format. You will need the thumbprint when [creating a client](https://learn.microsoft.com/en-us/azure/event-grid/mqtt-publish-and-subscribe-portal#create-clients) for your Event Grid in step 6. - - ```shell - openssl x509 -in {CertificateName}.cer -outform DER -out {CertificateName}.crt - sha256sum {CertificateName}.crt | awk '{print $1}' - rm {CertificateName}.crt - ``` - -1. Follow the [Quickstart: Publish and subscribe to MQTT messages on Event Grid Namespace with Azure portal](https://learn.microsoft.com/en-us/azure/event-grid/mqtt-publish-and-subscribe-portal) guide for creating an Azure Event Grid, topic namespace, and client. You can skip the *Generate sample client certificate and thumbprint* section as you have generated a self-signed certificate in steps 1-3. - -1. Once you have successfully deployed an Event Grid namespace, navigate to it then copy the `MQTT Hostname` field. You will need it later for the `mqtt_event_grid_host_name` field in the configuration file that is described in the [Configuration](#configuration) section. - -1. In the [Create clients](https://learn.microsoft.com/en-us/azure/event-grid/mqtt-publish-and-subscribe-portal#create-clients) section, use the thumbprint you obtained in step 4 for thumbprint match authentication. Also keep note of what you set for the **Client Authentication Name**. You will need it later for the `mqtt_client_authentication_name` field in the configuration file that is described in the [Configuration](#configuration) section. - -1. When you [create a topic space](https://learn.microsoft.com/en-us/azure/event-grid/mqtt-publish-and-subscribe-portal#create-topic-spaces), keep note of the name you used for the **topic template**. You will need it later for the `mqtt_event_grid_topic` field in the configuration file that is described in the [Configuration](#configuration) section. - -You have successfully deployed your Event Grid Namespace if you have a publisher permission binding, a client and a client group, and a topic space. -Navigate to the client that you have created in your Event Grid Namespace, and validate that the `Client Certificate Authentication Validation Scheme` is set to `Thumbprint Match`, and the thumbprint matches to your self-signed certificate obtained in [Azure Event Grid with MQTT](#2-azure-event-grid-with-mqtt). - -### 3. Azure Function App - -1. [Create an Azure Function app](https://learn.microsoft.com/en-us/azure/event-grid/custom-event-to-function#create-azure-function-app) that triggers your Azure Event Grid. Ensure you set the Runtime stack to .NET and version 6.0. - -1. Replace the code in your Azure Function with the code in the `res/azure_function/src/run.cs` file. - -1. Add `src/function.json` and `src/function.csproj` from `{freyja-root-dir}/cloud_connectors/azure/mqtt_connector/res/azure_function` to your Azure Function. - -1. Go back to your Azure Function App homepage, and click on **Configuration** on the side-panel. - -1. Click on **New application setting**. - -1. Set the name to `KEYVAULT_SETTINGS`, and the value to `@Microsoft.KeyVault(SecretUri={YOUR_ADT_INSTANCE_URI_SECRET_IN_KEY_VAULT})` - -1. Replace the placeholder `{YOUR_ADT_INSTANCE_URI_SECRET_IN_KEY_VAULT}` with the secret URI to your `ADT-INSTANCE-URL` secret in Key Vault obtained from step 3 of [Azure Key Vault](#1-azure-key-vault). To find the URI to your `ADT-INSTANCE-URL` secret, click on your Key Vault then Secrets. Click on ADT-INSTANCE-URL -> Current version, and copy the secret identifier. - -You have successfully deployed your Azure Function App if you see the files in steps 1-2 uploaded. If you navigate to `Configuration` under the `Settings` of your Azure Function App then under `Application settings`, you see a green check mark beside the `Key vault Reference` label for `KEYVAULT_SETTINGS`. - -### 4. Enable Managed System Identity in Azure Function App - -Your Azure Function App will need the Azure Digital Twins Data Owner role to read/write to your Azure Digital Twin instances. -Also your Function App will need the Key Vault Reader role to read the `ADT-INSTANCE-URL` secret you had set up in step 3 of [Azure Key Vault](#1-azure-key-vault). - -1. Navigate to the homepage of your Azure Function App. - -1. Click on **Identity** on the side-panel, then click on **Azure role assignments**. - -1. Click **On** button for the Status to enable Managed System Identity. - -1. Click on **Add role assignment**. - -1. Use the following settings for the Azure Digital Twins Data Owner role: - * Scope: Resource Group - * Subscription: {YOUR_SUBSCRIPTION} - * Resource group: {YOUR_RESOURCE_GROUP} - * Role: Azure Digital Twins Data Owner - -1. Repeat step 4, but use the following settings for the Key Vault Reader role: - * Scope: Key Vault - * Subscription: {YOUR_SUBSCRIPTION} - * Resource: {YOUR_KEYVAULT} - * Role: Key Vault Reader - -## Build - -1. Before building, please replace the placeholders in `res/mqtt_config.template.json` with their respective values. - - Field descriptions: - - * `max_retries`: Max retries for connecting to Azure Event Grid - - * `retry_interval_ms`: Retry interval in milliseconds - - - * `grpc_server_authority`: The gRPC server authority you wish to use to host the MQTT Connector's gRPC server. Example `"grpc_server_authority": "[::1]:8890"` - - * `cert_path`: The absolute path to the self-signed certificate generated in step 3 of [Azure Event Grid with MQTT](#2-azure-event-grid-with-mqtt). This file ends in *.cer. - - * `private_key_path`: The absolute path to the private key generated in step 1 of [Azure Event Grid with MQTT](#2-azure-event-grid-with-mqtt). This file ends in *.key. - - * `mqtt_client_id`: The client ID for identifying the MQTT client used in this Cloud Connector. You can keep the default value or change it. The client ID can be any unique value, as long as it's not the same client ID of another client that's publishing to your Azure Event Grid. - - * `mqtt_client_authentication_name`: The client authentication name that you specified in step 1 of [Automated Deployment of Azure Key Vault, Event Grid, and Azure Function App](#automated-deployment-of-azure-key-vault-event-grid-and-azure-function-app), or step 6 of [Azure Event Grid with MQTT](#2-azure-event-grid-with-mqtt) for manual deployment. - - * `event_grid_topic`: The topic that you specified in step 1 of [Automated Deployment of Azure Key Vault, Event Grid, and Azure Function App](#automated-deployment-of-azure-key-vault-event-grid-and-azure-function-app), or step 7 of [Azure Event Grid with MQTT](#2-azure-event-grid-with-mqtt) for manual deployment. - - * `event_grid_namespace_host_name`: The Event Grid Namespace MQTT hostname. You can find the hostname by clicking on your event grid namespace, then copy the MQTT hostname. - -```shell -cargo build -``` - -## Configuration - -1. Change to the directory with the build artifacts `{freyja-root-dir}/target/debug`. Replace `{freyja-root-dir}` with the repository's root directory. - - ```shell - cd {freyja-root-dir}/target/debug - ``` - -## Run - -Change to the directory with the build artifacts `{freyja-root-dir}/target/debug`. Replace `{freyja-root-dir}` with the repository's root directory. - -```shell -cd {freyja-root-dir}/target/debug -./azure-mqtt-connector -``` diff --git a/cloud_connectors/azure/mqtt_connector/build.rs b/cloud_connectors/azure/mqtt_connector/build.rs deleted file mode 100644 index df4689b0..00000000 --- a/cloud_connectors/azure/mqtt_connector/build.rs +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use std::{ - env, fs, - path::{Path, PathBuf}, -}; - -const OUTPUT_DIR: &str = "OUT_DIR"; -const TEMPLATE_CONFIG_FILE: &str = "res/mqtt_config.template.json"; -const CONFIG_FILE: &str = "mqtt_config.json"; - -fn main() { - let target_dir = env::var(OUTPUT_DIR).unwrap(); - - // Copy the mqtt_config.template.json template to target/debug - let config_template = env::current_dir().unwrap().join(TEMPLATE_CONFIG_FILE); - let dest_path = Path::new(&target_dir).join(CONFIG_FILE); - copy(config_template, dest_path); -} - -/// Copies a file to the destination path. -/// -/// # Arguments -/// - `source_path`: the source path to a file. -/// - `dest_path`: the destination path. -fn copy(source_path: PathBuf, dest_path: PathBuf) { - fs::copy(&source_path, dest_path).unwrap(); - println!( - "cargo:rerun-if-changed={}", - source_path - .to_str() - .ok_or(format!("Check the file {source_path:?}")) - .unwrap() - ); -} diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.csproj b/cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.csproj deleted file mode 100644 index 35c9ee13..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.csproj +++ /dev/null @@ -1,20 +0,0 @@ - - - net6.0 - v4 - - - - - - - - - PreserveNewest - Never - - - PreserveNewest - - - \ No newline at end of file diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.json b/cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.json deleted file mode 100644 index 52e43368..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/function.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "bindings": [ - { - "type": "eventGridTrigger", - "name": "cloudEvent", - "direction": "in" - } - ] -} \ No newline at end of file diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/host.json b/cloud_connectors/azure/mqtt_connector/res/azure_function/src/host.json deleted file mode 100644 index 0d5cfed9..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/host.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": "2.0", - "extensions": { - "http": { - "routePrefix": "api", - "maxOutstandingRequests": 200, - "maxConcurrentRequests": 100, - "dynamicThrottlesEnabled": true, - "hsts": { - "isEnabled": true, - "maxAge": "10" - }, - "customHeaders": { - "X-Content-Type-Options": "nosniff" - } - } - } -} \ No newline at end of file diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/local.settings.json b/cloud_connectors/azure/mqtt_connector/res/azure_function/src/local.settings.json deleted file mode 100644 index 4fce9ff3..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/local.settings.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "IsEncrypted": false, - "Values": { - "AzureWebJobsStorage": "UseDevelopmentStorage=true", - "FUNCTIONS_WORKER_RUNTIME": "dotnet" - } -} \ No newline at end of file diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/run.cs b/cloud_connectors/azure/mqtt_connector/res/azure_function/src/run.cs deleted file mode 100644 index 3fe8523d..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/src/run.cs +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Threading.Tasks; - -using Azure; -using Azure.DigitalTwins.Core; -using Azure.Identity; -using Azure.Messaging; -using Microsoft.Azure.WebJobs; -using Microsoft.Azure.WebJobs.Extensions.EventGrid; -using Microsoft.Extensions.Logging; - -namespace Microsoft.ESDV.CloudConnector.Azure { - /// - /// This class contains the info to target an Azure Digital Twin instance. - /// - public class DigitalTwinsInstance { - public string model_id { get; set; } - public string instance_id { get; set; } - public string instance_property_path { get; set; } - public string data { get; set; } - } - - public class MQTTConnectorAzureFunction { - private readonly ILogger _logger; - - private const string KEYVAULT_SETTINGS = "KEYVAULT_SETTINGS"; - - // Maps a string data type name to its concrete data type. - private static readonly Dictionary dataTypeNameToConverterMap = new Dictionary { - { "int", typeof(int) }, - { "double", typeof(double) }, - { "boolean", typeof(bool) } - }; - - public MQTTConnectorAzureFunction(ILogger logger) - { - _logger = logger; - } - - /// - /// Checks if a path starts with a slash. - /// - /// the path. - /// Returns true if the path starts with a slash, otherwise false. - public static bool DoesPathStartsWithSlash(string path) { - return path.StartsWith('/'); - } - - /// - /// Gets the data type from a data type name. - /// - /// the name of the data type. - /// Returns a task for updating a digital twin instance. - public Type GetDataTypeFromString(string dataTypeName) { - if (!dataTypeNameToConverterMap.ContainsKey(dataTypeName)) { - throw new NotSupportedException($"No conversion for {dataTypeName}"); - } - return dataTypeNameToConverterMap[dataTypeName]; - } - - /// - /// Updates a digital twin's property. - /// - /// the Azure Digital Twins client. - /// the digital twin instance to update. - /// the name of the data type. - /// Returns a task for updating a digital twin instance. - public async Task UpdateDigitalTwinAsync(DigitalTwinsClient client, DigitalTwinsInstance instance, string dataTypeName = "double") { - JsonPatchDocument jsonPatchDocument = new JsonPatchDocument(); - - try { - // Get the concrete data type of an instance's data based on its string data type name - // then uses that concrete data type to change the data from string to its concrete data type. - Type dataType = GetDataTypeFromString(dataTypeName); - dynamic convertedDataToType = Convert.ChangeType(instance.data, dataType); - - if (!DoesPathStartsWithSlash(instance.instance_property_path)) - { - instance.instance_property_path = $"/{instance.instance_property_path}"; - } - jsonPatchDocument.AppendAdd(instance.instance_property_path, convertedDataToType); - } - catch (Exception ex) when (ex is NotSupportedException || ex is InvalidCastException || ex is FormatException) { - throw new NotSupportedException($"Cannot convert {instance.data}. {ex.Message}"); - } - - try { - await client.UpdateDigitalTwinAsync(instance.instance_id, jsonPatchDocument); - } - catch(RequestFailedException ex) { - string errorMessage = @$"Cannot set instance {instance.instance_id}{instance.instance_property_path} - based on model {instance.model_id} to {instance.data} due to {ex.Message}"; - throw new NotSupportedException(errorMessage); - } - } - - /// - /// An Azure Function that updates an Azure Digital Twin based on the request. - /// - /// the cloudEvent request that is received. - /// the logger - /// An exception is thrown if the Azure Digital Twin client cannot update an instance. - /// - [FunctionName("MQTTConnectorAzureFunction")] - public async Task Run([EventGridTrigger] CloudEvent cloudEvent) { - DigitalTwinsInstance instance = cloudEvent.Data.ToObjectFromJson(); - - try { - DefaultAzureCredential credential = new DefaultAzureCredential(); - string adt_instance_url = Environment.GetEnvironmentVariable(KEYVAULT_SETTINGS, EnvironmentVariableTarget.Process); - DigitalTwinsClient client = new DigitalTwinsClient(new Uri(adt_instance_url), credential); - await UpdateDigitalTwinAsync(client, instance); - _logger.LogInformation(@$"Successfully set instance {instance.instance_id}{instance.instance_property_path} - based on model {instance.model_id} to {instance.data}"); - } - catch (Exception ex) { - _logger.LogError(ex.Message); - throw; - } - } - } -} diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.cs b/cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.cs deleted file mode 100644 index 19bde42c..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.cs +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -using Azure.DigitalTwins.Core; -using Microsoft.Extensions.Logging; -using Moq; -using NUnit.Framework; -namespace Microsoft.ESDV.CloudConnector.Azure.Tests -{ - [TestFixture] - public class MQTTConnectorAzureFunctionTests - { - private DigitalTwinsClient _client; - private DigitalTwinsInstance _instance; - private MQTTConnectorAzureFunction _connector; - - [SetUp] - public void Setup() - { - _client = new Mock().Object; - _connector = new MQTTConnectorAzureFunction(new Mock>().Object); - _instance = new DigitalTwinsInstance - { - model_id = "some-model", - instance_id = "some-instance", - instance_property_path = "some-instance-property", - data = null - }; - } - - [Test] - public void ConvertStringToDataType_ShouldSucceed() - { - Assert.That(_connector.GetDataTypeFromString("int"), Is.EqualTo(typeof(int))); - Assert.That(_connector.GetDataTypeFromString("double"), Is.EqualTo(typeof(double))); - Assert.That(_connector.GetDataTypeFromString("boolean"), Is.EqualTo(typeof(bool))); - Assert.Throws(() => _connector.GetDataTypeFromString("invalid-converter")); - } - - [Test] - public async Task UpdateDigitalTwinAsync_ShouldSucceed() - { - _instance.data = "44.5"; - await _connector.UpdateDigitalTwinAsync(_client, _instance, "double"); - Assert.Pass(); - - _instance.data = "44"; - await _connector.UpdateDigitalTwinAsync(_client, _instance, "int"); - Assert.Pass(); - - _instance.data = "true"; - await _connector.UpdateDigitalTwinAsync(_client, _instance, "boolean"); - Assert.Pass(); - } - - [Test] - public void UpdateDigitalTwinAsync_ThrowNotSupported() - { - _instance.data = null; - Assert.ThrowsAsync(async () => await _connector.UpdateDigitalTwinAsync(_client, _instance)); - - _instance.data = "test1234"; - Assert.ThrowsAsync(async () => await _connector.UpdateDigitalTwinAsync(_client, _instance, "invalid-converter")); - - _instance.data = ""; - Assert.ThrowsAsync(async () => await _connector.UpdateDigitalTwinAsync(_client, _instance, "double")); - } - } -} diff --git a/cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.csproj b/cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.csproj deleted file mode 100644 index 24c4e2d8..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/azure_function/tests/MQTTConnectorAzureFunction.Tests.csproj +++ /dev/null @@ -1,23 +0,0 @@ - - - - net6.0 - enable - false - - - - - - - - - - - - - - - - - diff --git a/cloud_connectors/azure/mqtt_connector/res/mqtt_config.template.json b/cloud_connectors/azure/mqtt_connector/res/mqtt_config.template.json deleted file mode 100644 index 6e8d33a5..00000000 --- a/cloud_connectors/azure/mqtt_connector/res/mqtt_config.template.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "max_retries": 5, - "retry_interval_ms": 1000, - "grpc_server_authority": "{GRPC_SERVER_AUTHORITY}", - "cert_path": "{PATH_TO_YOUR_CERT_FILE}", - "private_key_path": "{PATH_TO_YOUR_PRIVATE_KEY_FILE}", - "mqtt_client_id": "mqtt_connector", - "mqtt_client_authentication_name": "{YOUR_EVENT_GRID_MQTT_CLIENT_AUTHENTICATION_NAME}", - "event_grid_topic": "{YOUR_EVENT_GRID_MQTT_TOPIC}", - "event_grid_namespace_host_name": "{YOUR_EVENT_GRID_NAMESPACE_HOST_NAME}", - "event_grid_port": "8883" -} \ No newline at end of file diff --git a/cloud_connectors/azure/mqtt_connector/src/main.rs b/cloud_connectors/azure/mqtt_connector/src/main.rs deleted file mode 100644 index b2a08a14..00000000 --- a/cloud_connectors/azure/mqtt_connector/src/main.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -mod mqtt_connector; -mod mqtt_connector_config; - -use std::{fs, path::Path}; - -use env_logger::{Builder, Target}; -use log::{info, LevelFilter}; -use tonic::transport::Server; - -use azure_cloud_connector_proto::azure_cloud_connector::azure_cloud_connector_server::AzureCloudConnectorServer; -use mqtt_connector::MQTTConnector; -use mqtt_connector_config::{Config, CONFIG_FILE}; - -#[tokio::main] -async fn main() -> Result<(), Box> { - // Load the configuration settings - let config_file = fs::read_to_string(Path::new(env!("OUT_DIR")).join(CONFIG_FILE)).unwrap(); - let config: Config = serde_json::from_str(&config_file).unwrap(); - - // Setup logging - Builder::new() - .filter(None, LevelFilter::Info) - .target(Target::Stdout) - .init(); - - info!("Starting the Azure MQTT Cloud Connector."); - - // Start a gRPC server and MQTT client - let grpc_server_authority = config.grpc_server_authority.parse()?; - let mqtt_connector = MQTTConnector::new(config).expect("Unable to read MQTT config"); - Server::builder() - .add_service(AzureCloudConnectorServer::new(mqtt_connector)) - .serve(grpc_server_authority) - .await?; - Ok(()) -} diff --git a/cloud_connectors/azure/mqtt_connector/src/mqtt_connector.rs b/cloud_connectors/azure/mqtt_connector/src/mqtt_connector.rs deleted file mode 100644 index 51ab3dd9..00000000 --- a/cloud_connectors/azure/mqtt_connector/src/mqtt_connector.rs +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use std::time::Duration; - -use log::info; -use paho_mqtt::{self as mqtt, MQTT_VERSION_5}; -use serde::{Deserialize, Serialize}; -use tonic::{Request, Response, Status}; - -use crate::mqtt_connector_config::Config; -use azure_cloud_connector_proto::azure_cloud_connector::azure_cloud_connector_server::AzureCloudConnector; -use azure_cloud_connector_proto::azure_cloud_connector::{ - UpdateDigitalTwinRequest, UpdateDigitalTwinResponse, -}; -use freyja_common::retry_utils::execute_with_retry; - -/// Implementation of the MQTTConnector gRPC trait -pub struct MQTTConnector { - mqtt_event_grid_client: mqtt::AsyncClient, - mqtt_event_grid_topic: String, -} - -/// The event grid payload for updating a digital twin instance -#[derive(Debug, Serialize, Deserialize)] -struct EventGridDigitalTwinPayload { - model_id: String, - instance_id: String, - instance_property_path: String, - data: String, -} - -impl MQTTConnector { - /// Creates an instance of MQTTConnector - /// - /// # Arguments - /// - `config`: the config file - pub fn new(config: Config) -> Result { - let event_grid_mqtt_uri = format!( - "mqtts://{}:{}", - config.event_grid_namespace_host_name, config.event_grid_port - ); - - let mqtt_event_grid_client = mqtt::CreateOptionsBuilder::new() - .server_uri(event_grid_mqtt_uri) - .client_id(config.mqtt_client_id) - .mqtt_version(MQTT_VERSION_5) - .max_buffered_messages(100) - .create_client() - .map_err(MQTTConnectorError::communication)?; - - // The key_store option uses a self-signed certificate - let ssl_options = mqtt::SslOptionsBuilder::new() - .key_store(config.cert_path) - .map_err(MQTTConnectorError::io)? - .private_key(config.private_key_path) - .map_err(MQTTConnectorError::io)? - .finalize(); - let conn_opts = mqtt::ConnectOptionsBuilder::new_v5() - .ssl_options(ssl_options) - .user_name(config.mqtt_client_authentication_name) - .clean_start(true) - .finalize(); - - futures::executor::block_on(async { - execute_with_retry( - config.max_retries, - Duration::from_millis(config.retry_interval_ms), - || mqtt_event_grid_client.connect(conn_opts.clone()), - Some(String::from( - "Connection retry for connecting to your Azure Event Grid", - )), - ) - .await - .map_err(MQTTConnectorError::communication) - })?; - - Ok(MQTTConnector { - mqtt_event_grid_client, - mqtt_event_grid_topic: config.event_grid_topic, - }) - } -} - -#[tonic::async_trait] -impl AzureCloudConnector for MQTTConnector { - /// Updates a digital twin instance - /// - /// # Arguments - /// - `request`: the request to send - async fn update_digital_twin( - &self, - request: Request, - ) -> Result, Status> { - let request_inner = request.into_inner(); - - let mqtt_payload = EventGridDigitalTwinPayload { - model_id: request_inner.model_id.clone(), - instance_id: request_inner.instance_id.clone(), - instance_property_path: request_inner.instance_property_path.clone(), - data: request_inner.data.clone(), - }; - - let message = mqtt::MessageBuilder::new() - .topic(self.mqtt_event_grid_topic.clone()) - .payload( - serde_json::to_vec(&mqtt_payload) - .map_err(|error| Status::failed_precondition(error.to_string()))?, - ) - .qos(1) - .finalize(); - - self.mqtt_event_grid_client - .publish(message) - .await - .map_err(|error| Status::internal(error.to_string()))?; - - let reply = format!( - "Successfully set {}{} based on model {} to {}", - request_inner.instance_id, - request_inner.instance_property_path, - request_inner.model_id, - request_inner.data - ); - - info!("{reply}"); - - Ok(Response::new(UpdateDigitalTwinResponse { reply })) - } -} - -#[cfg(test)] -mod azure_cloud_connector_tests { - use super::*; - - #[tokio::test] - async fn update_digital_twin_with_no_broker_test() { - let consumer_impl = MQTTConnector { - mqtt_event_grid_client: mqtt::CreateOptionsBuilder::new().create_client().unwrap(), - mqtt_event_grid_topic: String::new(), - }; - - let request = tonic::Request::new(UpdateDigitalTwinRequest { - model_id: String::new(), - instance_id: String::new(), - instance_property_path: String::new(), - data: String::new(), - }); - - let result = consumer_impl.update_digital_twin(request).await; - - assert!(result.is_err()); - } -} - -proc_macros::error! { - MQTTConnectorError { - Io, - Communication, - } -} diff --git a/cloud_connectors/azure/mqtt_connector/src/mqtt_connector_config.rs b/cloud_connectors/azure/mqtt_connector/src/mqtt_connector_config.rs deleted file mode 100644 index 7b5af9f2..00000000 --- a/cloud_connectors/azure/mqtt_connector/src/mqtt_connector_config.rs +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use serde::{Deserialize, Serialize}; - -pub(crate) const CONFIG_FILE: &str = "mqtt_config.json"; - -/// Configuration for the MQTT Connector -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Config { - /// Max retries for connecting to Azure Event Grid - pub max_retries: u32, - - /// Retry interval in milliseconds - pub retry_interval_ms: u64, - - /// gRPC Server Authority - pub grpc_server_authority: String, - - /// Absolute path to certificate - pub cert_path: String, - - /// Absolute path to private key - pub private_key_path: String, - - /// The mqtt client id - pub mqtt_client_id: String, - - /// The client authentication name to use, which is different from mqtt_client_id. - /// The mqtt_client_id field is used to identify the client, whereas this field - /// is used for authentication purposes. - pub mqtt_client_authentication_name: String, - - /// The Event Grid topic to use for updating an Azure Digital Twin instance. - pub event_grid_topic: String, - - /// The Event Grid Namespace hostname. - pub event_grid_namespace_host_name: String, - - /// The Event Grid port number - pub event_grid_port: String, -} diff --git a/cloud_connectors/azure/proto-build/Cargo.toml b/cloud_connectors/azure/proto-build/Cargo.toml deleted file mode 100644 index 871a9546..00000000 --- a/cloud_connectors/azure/proto-build/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -[package] -name = "azure-cloud-connector-proto" -version = "0.1.0" -edition = "2021" -license = "MIT" - -[dependencies] -tonic = { workspace = true } -tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } -prost = { workspace = true } - -[build-dependencies] -tonic-build = { workspace = true } \ No newline at end of file diff --git a/cloud_connectors/azure/proto-build/build.rs b/cloud_connectors/azure/proto-build/build.rs deleted file mode 100644 index e3cea876..00000000 --- a/cloud_connectors/azure/proto-build/build.rs +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -fn main() { - tonic_build::compile_protos("../proto/azure_cloud_connector.proto").unwrap(); -} diff --git a/cloud_connectors/azure/proto-build/src/lib.rs b/cloud_connectors/azure/proto-build/src/lib.rs deleted file mode 100644 index 52cf01a8..00000000 --- a/cloud_connectors/azure/proto-build/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -pub mod azure_cloud_connector { - tonic::include_proto!("azure_cloud_connector"); -} diff --git a/cloud_connectors/azure/proto/azure_cloud_connector.proto b/cloud_connectors/azure/proto/azure_cloud_connector.proto deleted file mode 100644 index dd871e98..00000000 --- a/cloud_connectors/azure/proto/azure_cloud_connector.proto +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -syntax = "proto3"; - -option csharp_namespace = "Microsoft.ESDV.CloudConnector.Azure.GrpcService"; - -package azure_cloud_connector; - -service AzureCloudConnector { - rpc UpdateDigitalTwin (UpdateDigitalTwinRequest) returns (UpdateDigitalTwinResponse); -} - -message UpdateDigitalTwinRequest { - string model_id = 1; - string instance_id = 2; - string instance_property_path = 3; - string data = 4; -} - -message UpdateDigitalTwinResponse { - string reply = 1; -} \ No newline at end of file diff --git a/cloud_connectors/azure/sample-dtdl/hvac.json b/cloud_connectors/azure/sample-dtdl/hvac.json deleted file mode 100644 index b71adb5d..00000000 --- a/cloud_connectors/azure/sample-dtdl/hvac.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "@context": [ - "dtmi:dtdl:context;2" - ], - "@type": "Interface", - "@id": "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC;1", - "description": "Heat, Ventilation and Air Conditioning", - "contents": [ - { - "@type": [ - "Property", - "Temperature" - ], - "@id": "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC:AmbientAirTemperature;1", - "name": "AmbientAirTemperature", - "description": "The immediate surroundings air temperature (in Celsius).", - "schema": "double", - "unit": "degreeCelsius" - }, - { - "@type": "Property", - "@id": "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC:IsAirConditioningActive;1", - "name": "IsAirConditioningActive", - "description": "Is air conditioning active?", - "schema": "boolean" - } - ] -} \ No newline at end of file diff --git a/cloud_connectors/azure/sample-dtdl/obd.json b/cloud_connectors/azure/sample-dtdl/obd.json deleted file mode 100644 index 010eed3d..00000000 --- a/cloud_connectors/azure/sample-dtdl/obd.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "@context": [ - "dtmi:dtdl:context;2" - ], - "@type": "Interface", - "@id": "dtmi:sdv:Cloud:Vehicle:OBD;1", - "description": "On-board Diagnostics Interface", - "contents": [ - { - "@type": "Property", - "@id": "dtmi:sdv:Cloud:Vehicle:OBD:HybridBatteryRemaining;1", - "name": "HybridBatteryRemaining", - "description": "The remaining hybrid battery life.", - "schema": "double" - } - ] -} \ No newline at end of file diff --git a/cloud_connectors/azure/sample-dtdl/vehicle.json b/cloud_connectors/azure/sample-dtdl/vehicle.json deleted file mode 100644 index 670f8697..00000000 --- a/cloud_connectors/azure/sample-dtdl/vehicle.json +++ /dev/null @@ -1,22 +0,0 @@ -{ - "@context": [ - "dtmi:dtdl:context;2" - ], - "@type": "Interface", - "@id": "dtmi:sdv:Cloud:Vehicle;1", - "description": "Vehicle", - "contents": [ - { - "@type": "Relationship", - "@id": "dtmi:sdv:Cloud:Vehicle:rel_has_hvac;1", - "name": "rel_has_hvac", - "target": "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC;1" - }, - { - "@type": "Relationship", - "@id": "dtmi:sdv:Cloud:Vehicle:rel_has_obd;1", - "name": "rel_has_obd", - "target": "dtmi:sdv:Cloud:Vehicle:OBD;1" - } - ] -} \ No newline at end of file diff --git a/cloud_connectors/azure/scripts/digital_twins_setup.sh b/cloud_connectors/azure/scripts/digital_twins_setup.sh deleted file mode 100755 index 71322328..00000000 --- a/cloud_connectors/azure/scripts/digital_twins_setup.sh +++ /dev/null @@ -1,101 +0,0 @@ -#!/bin/bash - -set -e - -# Set the current directory to where the script lives. -cd "$(dirname "$0")" - -# Function to display usage information -usage() { - echo "Usage: $0 [-r|--resource-group-name] [-l|--location] [-d|--digital-twins-name] " - echo "Example:" - echo " $0 -r myRG -l westus2 -d myADT" -} - -# Parse command line arguments -while [[ $# -gt 0 ]] -do -key="$1" - -case $key in - -r|--resource-group-name) - resource_group="$2" - shift # past argument - shift # past value - ;; - -l|--location) - location="$2" - shift # past argument - shift # past value - ;; - -d|--digital-twins-name) - digital_twin_name="$2" - shift # past argument - shift # past value - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "Unknown argument: $key" - usage - exit 1 -esac -done - -# Check if all required arguments have been set -if [[ -z "${resource_group}" || -z "${location}" || -z "${digital_twin_name}" ]]; then - echo "Error: Missing required arguments:" - [[ -z "${resource_group}" ]] && echo " -r|--resource-group-name" - [[ -z "${location}" ]] && echo " -l|--location" - [[ -z "${digital_twin_name}" ]] && echo " -d|--digital-twins-name" - echo -e "\n" - usage - exit 1 -fi - -# Check if the Digital Twins instance exists -if az dt show -n "$digital_twin_name" > /dev/null 2>&1; then - echo "Digital Twins instance '$digital_twin_name' already exists in resource group '$resource_group'" -else - echo -e "\nCreating the Azure Digital Twins resource" - az dt create --dt-name "$digital_twin_name" --resource-group "$resource_group" --location "$location" -fi - -# Assign the Digital Twins Data Owner role -echo -e "\nAssigning the Azure Digital Twins Data Owner role" -userObjectID=$(az ad signed-in-user show --query id -o tsv) -az dt role-assignment create --dt-name "$digital_twin_name" --assignee "$userObjectID" --role "Azure Digital Twins Data Owner" - -# Upload the sample-dtdl models -echo -e "\nUploading sample-dtdl models" -for file in $(find ../sample-dtdl -name "*.json"); do - if ! az dt model create --dt-name ${digital_twin_name} --models $file; then - echo "$file" dtdl already uploaded. - fi -done - -# Create the Azure Digital Twin instances -echo -e "\nCreating the Azure Digital Twin instances" -az dt twin create --dt-name "$digital_twin_name" --dtmi "dtmi:sdv:Cloud:Vehicle;1" --twin-id vehicle -az dt twin create --dt-name "$digital_twin_name" --dtmi "dtmi:sdv:Cloud:Vehicle:OBD;1" --twin-id obd -az dt twin create --dt-name "$digital_twin_name" --dtmi "dtmi:sdv:Cloud:Vehicle:Cabin:HVAC;1" --twin-id hvac - -# Create the relationships -echo -e "\nCreating the Azure Digital Twin instance relationships" -az dt twin relationship create \ - --dt-name "$digital_twin_name" \ - --relationship-id rel_has_hvac \ - --relationship rel_has_hvac \ - --twin-id vehicle \ - --target hvac -az dt twin relationship create \ - --dt-name "$digital_twin_name" \ - --relationship-id rel_has_obd \ - --relationship rel_has_obd \ - --twin-id vehicle \ - --target obd - -echo -e "\nSetup finished for Freyja's Sample Azure Digital Twins" -exit 0 \ No newline at end of file diff --git a/cloud_connectors/azure/scripts/mqtt_connector_setup.sh b/cloud_connectors/azure/scripts/mqtt_connector_setup.sh deleted file mode 100755 index 4427c9c8..00000000 --- a/cloud_connectors/azure/scripts/mqtt_connector_setup.sh +++ /dev/null @@ -1,428 +0,0 @@ -#!/bin/bash - -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -set -e - -required_vars=( - "resource_group" - "subscription_id" - "digital_twins_name" - "thumbprint_of_cert_in_der_format" - "storage_account_name" - "function_app_name" - "key_vault_name" - "event_grid_topic" - "event_grid_subscription_name" - "event_grid_namespace" - "mqtt_client_authentication_name" -) - -parse_config_file() { - local config_file="$1" - # Parse the configuration file - while read -r line; do - key=$(echo "$line" | sed -e 's/[{}"]//g' | awk -F: '{print $1}') - value=$(echo "$line" | sed -e 's/[{}"]//g' | awk -F: '{print $2}'| xargs) - case "$key" in - resource_group) resource_group="$value" ;; - subscription_id) subscription_id="$value" ;; - digital_twins_name) digital_twins_name="$value" ;; - thumbprint_of_cert_in_der_format) thumbprint_of_cert_in_der_format="$value" ;; - storage_account_name) storage_account_name="$value" ;; - function_app_name) function_app_name="$value" ;; - key_vault_name) key_vault_name="$value" ;; - event_grid_topic) event_grid_topic="$value" ;; - event_grid_subscription_name) event_grid_subscription_name="$value" ;; - event_grid_namespace) event_grid_namespace="$value" ;; - mqtt_client_authentication_name) mqtt_client_authentication_name="$value" ;; - esac - done < <(cat "$config_file" | grep -Eo '"[^"]*"\s*:\s*"[^"]*"') - - # Required values from the mqtt_config.json file - - - # Check if all required variables have been set - missing_vars=() - for var in "${required_vars[@]}"; do - if [[ -z "${!var}" ]]; then - missing_vars+=("$var") - fi - done - - # If we have missing key-value pairs, then print all the pairs that are missing from the config file. - if [[ ${#missing_vars[@]} -gt 0 ]]; then - echo "Error: Missing required values in config file:" - for var in "${missing_vars[@]}"; do - echo " $var" - done - exit 1 - fi -} - -# Set the current directory to where the script lives. -cd "$(dirname "$0")" - -# Function to display usage information -usage() { - echo "Usage: $0 [-c|--config-file] " - echo " $0 [-r|--resource-group-name] " - echo " [-s|--subscription-id] " - echo " [-d|--digital-twins-name] " - echo " [-t|--thumbprint] " - echo " [-S|--storage-account-name] " - echo " [-f|--function-app-name] " - echo " [-k|--key-vault-name] " - echo " [-x|--event-grid-topic] " - echo " [-y|--event-grid-subscription-name] " - echo " [-z|--event-grid-namespace] " - echo " [-m|--mqtt-client-auth-name] " - echo "" - echo "Example:" - echo " $0 -c mqtt_connector_setup.json" - echo " $0 -r myResourceGroup -s mySubscriptionId -d myDigitalTwinsName \\" - echo " -t myThumbprint -S myStorageAccountName -f myFunctionAppName \\" - echo " -k myKeyVaultName -x myEventGridTopic -y myEventGridSubscriptionName \\" - echo " -z myEventgridNamespace -m myMqttClientAuthenticationName" -} - -check_argument_value() { - if [[ -z "$2" ]]; then - echo "Error: Missing value for option $1" - usage - exit 1 - fi -} - -# Function to check if all required arguments have been set -check_required_arguments() { - # Array to store the names of the missing arguments - local missing_arguments=() - - # Loop through the array of required argument names - for arg_name in "${required_vars[@]}"; do - # Check if the argument value is empty - if [[ -z "${!arg_name}" ]]; then - # Add the name of the missing argument to the array - missing_arguments+=("${arg_name}") - fi - done - - # Check if any required argument is missing - if [[ ${#missing_arguments[@]} -gt 0 ]]; then - echo -e "\nError: Missing required arguments:" - printf ' %s\n' "${missing_arguments[@]}" - [ ! \( \( $# == 1 \) -a \( "$1" == "-c" \) \) ] && echo " Either provide a config file path or all the arguments, but not both at the same time." - [ ! \( $# == 22 \) ] && echo " All arguments must be provided." - echo "" - usage - exit 1 - fi -} - -# Parse command line arguments -while [[ $# -gt 0 ]] -do -key="$1" - -case $key in - -c|--config-file) - config_file="$2" - parse_config_file "$config_file" - shift # past argument - shift # past value - break # break out of case statement if config file is provided - ;; - -r|--resource-group-name) - check_argument_value "$@" - resource_group="$2" - shift # past argument - shift # past value - ;; - -s|--subscription-id) - check_argument_value "$@" - subscription_id="$2" - shift # past argument - shift # past value - ;; - -d|--digital-twins-name) - check_argument_value "$@" - digital_twins_name="$2" - shift # past argument - shift # past value - ;; - -t|--thumbprint) - check_argument_value "$@" - thumbprint_of_cert_in_der_format="$2" - shift # past argument - shift # past value - ;; - -S|--storage-account-name) - check_argument_value "$@" - storage_account_name="$2" - shift # past argument - shift # past value - ;; - -f|--function-app-name) - check_argument_value "$@" - function_app_name="$2" - shift # past argument - shift # past value - ;; - -k|--key-vault-name) - check_argument_value "$@" - key_vault_name="$2" - shift # past argument - shift # past value - ;; - -x|--event-grid-topic) - check_argument_value "$@" - event_grid_topic="$2" - shift # past argument - shift # past value - ;; - -y|--event-grid-subscription-name) - check_argument_value "$@" - event_grid_subscription_name="$2" - shift # past argument - shift # past value - ;; - -z|--event-grid-namespace) - check_argument_value "$@" - event_grid_namespace="$2" - shift # past argument - shift # past value - ;; - -m|--mqtt-client-auth-name) - check_argument_value "$@" - mqtt_client_authentication_name="$2" - shift # past argument - shift # past value - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "Unknown argument: $key" - usage - exit 1 -esac -done - -# Check if all required arguments have been set -check_required_arguments - -az account set --subscription "$subscription_id" -azure_providers_id_path="/subscriptions/$subscription_id/resourceGroups/$resource_group/providers" - -storage_account_query=$(az storage account list --query "[?name=='$storage_account_name']") -if [ "$storage_account_query" == "[]" ]; then - echo -e "\nCreating an Azure Storage Account" - az storage account create --name "$storage_account_name" \ - --location westus --resource-group "$resource_group" \ - --sku Standard_LRS --allow-blob-public-access false -else - echo "Storage Account $storage_account_name already exists." -fi - -function_app_query=$(az functionapp list --query "[?name=='$function_app_name']") -if [ "$function_app_query" == "[]" ]; then - echo -e "\nCreating an Azure Function App" - az functionapp create --resource-group "$resource_group" \ - --consumption-plan-location westus \ - --runtime dotnet \ - --functions-version 4 \ - --name "$function_app_name" \ - --storage-account "$storage_account_name" -else - echo "Azure Function App $function_app_name already exists." -fi - -# When you create an Azure Function App for the first time, it takes some time to deploy fully. -# Retry publishing the MQTT Connector Function to your Azure Function App. -cd "../mqtt_connector/res/azure_function/src" -echo -e "\nDeploying Freyja's MQTT Connector Azure Function to $function_app_name" -max_attempts=10 -attempt=0 -success=false -while [ $attempt -lt $max_attempts ] && ! $success; do - if func azure functionapp publish "$function_app_name" --csharp; then - success=true - else - echo "Retrying deployment of Freyja's MQTT Connector Azure Function to $function_app_name" - fi -done -if ! $success; then - echo "Failed to publish Freyja's MQTT Connector Azure Function after $max_attempts attempts" - echo "Please try running this script again." - exit 1 -fi -cd "$(dirname "$0")" - -# Key Vault -keyvault_query=$(az keyvault list --query "[?name=='$key_vault_name']") -if [ "$keyvault_query" == "[]" ]; then - echo -e "\nCreating an Azure Key Vault" - az keyvault create --name "$key_vault_name" --resource-group "$resource_group" --location "westus2" -else - echo "Key Vault $key_vault_name already exists." -fi -echo -e "\nSetting a secret for ADT-INSTANCE-URL in your Azure Key Vault" -adt_instance_url=$(az dt show --dt-name "$digital_twins_name" -g "$resource_group" --query hostName -o tsv) -az keyvault secret set --name ADT-INSTANCE-URL --vault-name "$key_vault_name" --value "https://$adt_instance_url" - -# Event Grid -event_grid_topic_query=$(az eventgrid topic list --resource-group "$resource_group" --query "[?name=='$event_grid_topic']") -if [ "$event_grid_topic_query" == "[]" ]; then - echo -e "\nCreating the event grid topic '$event_grid_topic'" - az eventgrid topic create --name "$event_grid_topic" -l westus2 -g "$resource_group" --input-schema cloudeventschemav1_0 -else - echo "Event Grid topic $event_grid_topic already exists." -fi - -echo -e "\nAssigning EventGrid Data Sender Role" -# Gets the signed-in Azure CLI user's object ID -userObjectID=$(az ad signed-in-user show --query id -o tsv) -az role assignment create --assignee "$userObjectID" --role "EventGrid Data Sender" \ - --scope "$azure_providers_id_path/Microsoft.EventGrid/topics/$event_grid_topic" - -event_grid_subscription_name_query=$(az eventgrid event-subscription list \ - --source-resource-id "$azure_providers_id_path/Microsoft.EventGrid/topics/$event_grid_topic" \ - --query "[?name=='$event_grid_subscription_name']") -if [ "$event_grid_subscription_name_query" == "[]" ]; then - echo -e "\nCreating Event Grid Subscription" - az eventgrid event-subscription create --name $event_grid_subscription_name \ - --source-resource-id "$azure_providers_id_path/Microsoft.EventGrid/topics/$event_grid_topic" \ - --endpoint "$azure_providers_id_path/Microsoft.Web/sites/$function_app_name/functions/MQTTConnectorAzureFunction" \ - --endpoint-type "azurefunction" -else - echo "Event Grid Subscription $event_grid_subscription_name already exists." -fi - -event_grid_namespace_query=$(az resource list --resource-group "$resource_group" \ - --resource-type "Microsoft.EventGrid/namespaces" \ - --query "[?name=='$event_grid_namespace']") -namespace_properties=$(cat <", - "subscription_id": "", - "digital_twins_name": "", - "thumbprint_of_cert_in_der_format": "", - "storage_account_name": "", - "function_app_name": "", - "key_vault_name": "", - "event_grid_topic": "", - "event_grid_subscription_name": "", - "event_grid_namespace": "", - "mqtt_client_authentication_name": "" -} \ No newline at end of file diff --git a/cloud_connectors/azure/shared/license_url_to_type.json b/cloud_connectors/azure/shared/license_url_to_type.json deleted file mode 100644 index a4f02904..00000000 --- a/cloud_connectors/azure/shared/license_url_to_type.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "https://raw.githubusercontent.com/moq/moq4/main/License.txt": "BSD 3-Clause", - "https://www.nuget.org/packages/NUnit/3.13.3/License": "MIT", - "https://www.nuget.org/packages/NUnit.Analyzers/3.3.0/License": "MIT" -} \ No newline at end of file diff --git a/common/Cargo.toml b/common/Cargo.toml index a67595dc..177b5cf7 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -14,5 +14,4 @@ freyja-contracts = { workspace = true } home = { workspace = true } log = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } tokio = { workspace = true } \ No newline at end of file diff --git a/contracts/Cargo.toml b/contracts/Cargo.toml index e70536d6..999d6801 100644 --- a/contracts/Cargo.toml +++ b/contracts/Cargo.toml @@ -11,10 +11,8 @@ license = "MIT" [dependencies] async-trait = { workspace = true } crossbeam = { workspace = true } -log = { workspace = true } proc-macros = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } strum = { workspace = true } strum_macros = { workspace = true } tokio = { workspace = true } \ No newline at end of file diff --git a/digital_twin_adapters/ibeji_adapter/Cargo.toml b/digital_twin_adapters/ibeji_adapter/Cargo.toml deleted file mode 100644 index 02dc71f5..00000000 --- a/digital_twin_adapters/ibeji_adapter/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -[package] -name = "ibeji-adapter" -version = "0.1.0" -edition = "2021" -license = "MIT" - -[dependencies] -async-trait = { workspace = true } -core-protobuf-data-access = { workspace = true } -freyja-common = { workspace = true } -freyja-contracts = { workspace = true } -futures = { workspace = true } -log = { workspace = true } -proc-macros = { workspace = true } -provider-proxy-selector = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -service_discovery_proto = { workspace = true } -strum = { workspace = true } -strum_macros = { workspace = true } -tempfile = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tonic = { workspace = true } -tower = { workspace = true } \ No newline at end of file diff --git a/digital_twin_adapters/ibeji_adapter/README.md b/digital_twin_adapters/ibeji_adapter/README.md deleted file mode 100644 index 64a116b6..00000000 --- a/digital_twin_adapters/ibeji_adapter/README.md +++ /dev/null @@ -1,19 +0,0 @@ -# Ibeji Adapter - -The Ibeji Adapter is used to integrate with [Eclipse-Ibeji's In-vehicle Digital Twin Service](https://github.com/eclipse-ibeji/ibeji). - -## Behavior - -The adapter shares an `entity_map` map with Freyja's emitter that maps `entity_id`s to its entity info. When the emitter receives new mappings from the cartographer, it will update this shared state and insert `None` values for the corresponding ID. The adapter will detect empty entries for each `entity_id` in our `entity_map` then call `find_by_id` to send a request to [Ibeji's In-vehicle Digital Twin Service](https://github.com/eclipse-ibeji/ibeji), to populate the entity info. - -### Ibeji Without Chariott - -By default, running `cargo build` will copy the `ibeji_adapter_config.sample.json` file from the `res` directory. Before building, please edit the `uri` field in `res/ibeji_adapter_config.sample.json`, so that the URI matches with the URI that Ibeji's In-Vehicle Digital Twin service uses. - -### Ibeji With Chariott - -If Ibeji is registered with [Chariott's Service Discovery system](https://github.com/eclipse-chariott/chariott/blob/main/service_discovery/README.md) and you wish to discover Ibeji through Chariott, then copy the contents from `res/ibeji_adapter_config_with_chariott.sample.json`, and paste it into `res/ibeji_adapter_config.sample.json`. - -Before building, please edit the `uri` field in `res/ibeji_adapter_config.sample.json`, so that the URI matches with the URI that Chariott's Service Discovery uses. - -The Ibeji Adapter will discover Ibeji's In-Vehicle Digital Twin Service URI through Chariott. diff --git a/digital_twin_adapters/ibeji_adapter/build.rs b/digital_twin_adapters/ibeji_adapter/build.rs deleted file mode 100644 index 871645bc..00000000 --- a/digital_twin_adapters/ibeji_adapter/build.rs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use std::{env, fs, path::Path}; - -const OUT_DIR: &str = "OUT_DIR"; -const SAMPLE_CONFIG_FILE: &str = "res/ibeji_adapter_config.sample.json"; -const CONFIG_FILE: &str = "ibeji_adapter_config.json"; - -fn main() { - // The current directory of the build script is the package's root directory - let config_path = env::current_dir().unwrap().join(SAMPLE_CONFIG_FILE); - - let target_dir = env::var(OUT_DIR).unwrap(); - let dest_path = Path::new(&target_dir).join(CONFIG_FILE); - - fs::copy(&config_path, dest_path).unwrap(); - - println!( - "The config ibeji_adapter_config.json is located in the {} directory", - target_dir - ); - println!("cargo:rerun-if-changed={}", config_path.to_str().unwrap()); -} diff --git a/digital_twin_adapters/ibeji_adapter/res/ibeji_adapter_config.sample.json b/digital_twin_adapters/ibeji_adapter/res/ibeji_adapter_config.sample.json deleted file mode 100644 index e81d159b..00000000 --- a/digital_twin_adapters/ibeji_adapter/res/ibeji_adapter_config.sample.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "service_type": "InVehicleDigitalTwinService", - "uri": "http://[::1]:50001", - "max_retries": 5, - "retry_interval_ms": 1000 -} \ No newline at end of file diff --git a/digital_twin_adapters/ibeji_adapter/res/ibeji_adapter_config_with_chariott.sample.json b/digital_twin_adapters/ibeji_adapter/res/ibeji_adapter_config_with_chariott.sample.json deleted file mode 100644 index 66a983b8..00000000 --- a/digital_twin_adapters/ibeji_adapter/res/ibeji_adapter_config_with_chariott.sample.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "service_type": "ChariottDiscoveryService", - "uri": "http://0.0.0.0:50000", - "max_retries": 5, - "retry_interval_ms": 1000, - "metadata": { - "namespace": "sdv.ibeji", - "name": "invehicle_digital_twin", - "version": "1.0" - } -} \ No newline at end of file diff --git a/digital_twin_adapters/ibeji_adapter/src/config.rs b/digital_twin_adapters/ibeji_adapter/src/config.rs deleted file mode 100644 index 82a8c332..00000000 --- a/digital_twin_adapters/ibeji_adapter/src/config.rs +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use serde::{Deserialize, Serialize}; - -pub(crate) const CONFIG_FILE: &str = "ibeji_adapter_config.json"; - -/// Configuration setting variants for selecting the service -/// that the Ibeji Adapter should communicate with to interact with Ibeji -#[derive(Clone, Serialize, Deserialize)] -#[serde(tag = "service_type")] -pub enum Settings { - /// In-Vehicle Digital Twin Service - InVehicleDigitalTwinService { - uri: String, - max_retries: u32, - retry_interval_ms: u64, - }, - - /// Chariott's Service Discovery to discover Ibeji - ChariottDiscoveryService { - uri: String, - max_retries: u32, - retry_interval_ms: u64, - metadata: IbejiDiscoveryMetadata, - }, -} - -/// Configuration metadata for discovering Ibeji using Chariott -#[derive(Clone, Serialize, Deserialize)] -pub struct IbejiDiscoveryMetadata { - pub namespace: String, - pub name: String, - pub version: String, -} diff --git a/digital_twin_adapters/ibeji_adapter/src/ibeji_adapter.rs b/digital_twin_adapters/ibeji_adapter/src/ibeji_adapter.rs deleted file mode 100644 index 3055e022..00000000 --- a/digital_twin_adapters/ibeji_adapter/src/ibeji_adapter.rs +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -use std::{fs, path::Path, str::FromStr, time::Duration}; - -use async_trait::async_trait; -use core_protobuf_data_access::invehicle_digital_twin::v1::{ - invehicle_digital_twin_client::InvehicleDigitalTwinClient, EndpointInfo, FindByIdRequest, -}; -use log::{info, warn}; -use service_discovery_proto::service_registry::v1::service_registry_client::ServiceRegistryClient; -use service_discovery_proto::service_registry::v1::DiscoverRequest; -use tonic::{transport::Channel, Request}; - -use crate::config::{IbejiDiscoveryMetadata, Settings, CONFIG_FILE}; -use freyja_common::retry_utils::execute_with_retry; -use freyja_contracts::{ - digital_twin_adapter::{ - DigitalTwinAdapter, DigitalTwinAdapterError, GetDigitalTwinProviderRequest, - GetDigitalTwinProviderResponse, - }, - entity::Entity, - provider_proxy::OperationKind, -}; - -const GET_OPERATION: &str = "Get"; -const SUBSCRIBE_OPERATION: &str = "Subscribe"; - -/// Contacts the In-Vehicle Digital Twin Service in Ibeji -pub struct IbejiAdapter { - client: InvehicleDigitalTwinClient, -} - -impl IbejiAdapter { - /// Retrieves Ibeji's In-Vehicle Digital Twin URI from Chariott - /// - /// # Arguments - /// - `chariott_service_discovery_uri`: the uri for Chariott's service discovery - /// - `metadata`: optional configuration metadata for discovering Ibeji using Chariott - async fn retrieve_ibeji_invehicle_digital_twin_uri_from_chariott( - chariott_service_discovery_uri: &str, - chariott_ibeji_config: IbejiDiscoveryMetadata, - ) -> Result { - let mut service_registry_client = - ServiceRegistryClient::connect(String::from(chariott_service_discovery_uri)) - .await - .map_err(DigitalTwinAdapterError::communication)?; - - let discover_request = Request::new(DiscoverRequest { - namespace: chariott_ibeji_config.namespace, - name: chariott_ibeji_config.name, - version: chariott_ibeji_config.version, - }); - - let service = service_registry_client - .discover(discover_request) - .await - .map_err(DigitalTwinAdapterError::communication)? - .into_inner() - .service - .ok_or_else(|| { - DigitalTwinAdapterError::communication( - "Cannot discover the uri of Ibeji's In-Vehicle Digital Twin Service", - ) - })?; - - Ok(service.uri) - } -} - -#[async_trait] -impl DigitalTwinAdapter for IbejiAdapter { - /// Creates a new instance of a DigitalTwinAdapter with default settings - fn create_new() -> Result { - let settings_content = - fs::read_to_string(Path::new(env!("OUT_DIR")).join(CONFIG_FILE)).unwrap(); - let settings: Settings = serde_json::from_str(settings_content.as_str()).unwrap(); - - let (invehicle_digital_twin_service_uri, max_retries, retry_interval_ms) = match settings { - Settings::InVehicleDigitalTwinService { - uri, - max_retries, - retry_interval_ms, - } => (uri, max_retries, retry_interval_ms), - Settings::ChariottDiscoveryService { - uri, - max_retries, - retry_interval_ms, - metadata, - } => { - let invehicle_digital_twin_service_uri = futures::executor::block_on(async { - execute_with_retry( - max_retries, - Duration::from_millis(retry_interval_ms), - || { - Self::retrieve_ibeji_invehicle_digital_twin_uri_from_chariott( - &uri, - metadata.clone(), - ) - }, - Some(String::from("Connection retry for connecting to Chariott")), - ) - .await - }) - .unwrap(); - info!("Discovered the uri of the In-Vehicle Digital Twin Service via Chariott: {invehicle_digital_twin_service_uri}"); - - ( - invehicle_digital_twin_service_uri, - max_retries, - retry_interval_ms, - ) - } - }; - - let client = futures::executor::block_on(async { - execute_with_retry( - max_retries, - Duration::from_millis(retry_interval_ms), - || InvehicleDigitalTwinClient::connect(invehicle_digital_twin_service_uri.clone()), - Some(String::from("Connection retry for connecting to Ibeji")), - ) - .await - .map_err(DigitalTwinAdapterError::communication) - }) - .unwrap(); - - Ok(Self { client }) - } - - /// Gets entity access information - /// - /// # Arguments - /// - `request`: the request for finding an entity's access information - async fn find_by_id( - &self, - request: GetDigitalTwinProviderRequest, - ) -> Result { - let entity_id = request.entity_id; - let request = tonic::Request::new(FindByIdRequest { - id: entity_id.clone(), - }); - - let response = self - .client - .clone() - .find_by_id(request) - .await - .map_err(DigitalTwinAdapterError::entity_not_found)?; - - // Extract the response from find_by_id - let entity_access_info = response - .into_inner() - .entity_access_info - .ok_or(format!("Cannot find {entity_id} with find_by_id")) - .map_err(DigitalTwinAdapterError::entity_not_found)?; - let entity_endpoint_info_list = entity_access_info.endpoint_info_list; - - let endpoint: Option<(EndpointInfo, String)> = entity_endpoint_info_list - .into_iter() - .find_map(|endpoint_info| { - endpoint_info.operations.iter().find_map(|operation| { - if *operation == SUBSCRIBE_OPERATION || *operation == GET_OPERATION { - return Some((endpoint_info.clone(), operation.clone())); - } - None - }) - }); - - if endpoint.is_none() { - let message = format!("No access info to connect with {entity_id}"); - warn!("{message}"); - return Err(DigitalTwinAdapterError::communication(message)); - } - - let (endpoint, _) = endpoint.unwrap(); - - // If both Subscribe and Get are supported, then we pick Subscribe over Get - let operation = if endpoint - .operations - .iter() - .any(|op| op == SUBSCRIBE_OPERATION) - { - String::from(SUBSCRIBE_OPERATION) - } else { - String::from(GET_OPERATION) - }; - - let operation = - OperationKind::from_str(&operation).map_err(DigitalTwinAdapterError::parse_error)?; - let entity = Entity { - id: entity_id, - description: Some(entity_access_info.description), - name: Some(entity_access_info.name), - operation, - uri: endpoint.uri, - protocol: endpoint.protocol, - }; - - Ok(GetDigitalTwinProviderResponse { entity }) - } -} - -#[cfg(test)] -mod ibeji_digital_twin_adapter_tests { - use super::*; - - use core_protobuf_data_access::invehicle_digital_twin::v1::{ - invehicle_digital_twin_server::InvehicleDigitalTwin, EntityAccessInfo, FindByIdRequest, - FindByIdResponse, RegisterRequest, RegisterResponse, - }; - use tonic::{Request, Response, Status}; - - const AMBIENT_AIR_TEMPERATURE_ID: &str = "dtmi:sdv:Vehicle:Cabin:HVAC:AmbientAirTemperature;1"; - - pub struct MockInVehicleTwin {} - - #[tonic::async_trait] - impl InvehicleDigitalTwin for MockInVehicleTwin { - async fn find_by_id( - &self, - request: Request, - ) -> Result, Status> { - let entity_id = request.into_inner().id; - - if entity_id != AMBIENT_AIR_TEMPERATURE_ID { - return Err(Status::not_found( - "Unable to find the entity with id {entity_id}", - )); - } - let endpoint_info = EndpointInfo { - protocol: String::from("grpc"), - uri: String::from("http://[::1]:40010"), // Devskim: ignore DS137138 - context: String::from("dtmi:sdv:Vehicle:Cabin:HVAC:AmbientAirTemperature;1"), - operations: vec![String::from("Get"), String::from("Subscribe")], - }; - - let entity_access_info = EntityAccessInfo { - name: String::from("AmbientAirTemperature"), - id: String::from("dtmi:sdv:Vehicle:Cabin:HVAC:AmbientAirTemperature;1"), - description: String::from("Ambient air temperature"), - endpoint_info_list: vec![endpoint_info], - }; - - let response = FindByIdResponse { - entity_access_info: Some(entity_access_info), - }; - - Ok(Response::new(response)) - } - - async fn register( - &self, - _request: Request, - ) -> Result, Status> { - let response = RegisterResponse {}; - Ok(Response::new(response)) - } - } - - /// The tests below uses Unix sockets to create a channel between a gRPC client and a gRPC server. - /// Unix sockets are more ideal than using TCP/IP sockets since Rust tests will run in parallel - /// so you would need to set an arbitrary port per test for TCP/IP sockets. - #[cfg(unix)] - mod unix_tests { - use super::*; - - use std::sync::Arc; - - use core_protobuf_data_access::invehicle_digital_twin::v1::invehicle_digital_twin_server::InvehicleDigitalTwinServer; - use tempfile::TempPath; - use tokio::net::{UnixListener, UnixStream}; - use tokio_stream::wrappers::UnixListenerStream; - use tonic::transport::{Channel, Endpoint, Server, Uri}; - use tower::service_fn; - - async fn create_test_grpc_client( - bind_path: Arc, - ) -> InvehicleDigitalTwinClient { - let channel = Endpoint::try_from("http://URI_IGNORED") // Devskim: ignore DS137138 - .unwrap() - .connect_with_connector(service_fn(move |_: Uri| { - let bind_path = bind_path.clone(); - async move { UnixStream::connect(bind_path.as_ref()).await } - })) - .await - .unwrap(); - - InvehicleDigitalTwinClient::new(channel) - } - - async fn run_test_grpc_server(uds_stream: UnixListenerStream) { - let mock_in_vehicle_twin = MockInVehicleTwin {}; - Server::builder() - .add_service(InvehicleDigitalTwinServer::new(mock_in_vehicle_twin)) - .serve_with_incoming(uds_stream) - .await - .unwrap(); - } - - #[tokio::test] - async fn find_by_id_test() { - // Create the Unix Socket - let bind_path = Arc::new(tempfile::NamedTempFile::new().unwrap().into_temp_path()); - let uds = match UnixListener::bind(bind_path.as_ref()) { - Ok(unix_listener) => unix_listener, - Err(_) => { - std::fs::remove_file(bind_path.as_ref()).unwrap(); - UnixListener::bind(bind_path.as_ref()).unwrap() - } - }; - let uds_stream = UnixListenerStream::new(uds); - - let request_future = async { - let client = create_test_grpc_client(bind_path.clone()).await; - let ibeji_digital_twin_adapter = IbejiAdapter { client }; - - let request = GetDigitalTwinProviderRequest { - entity_id: String::from("invalid_entity"), - }; - - let result = ibeji_digital_twin_adapter.find_by_id(request).await; - - assert!(result.is_err()); - - let request = GetDigitalTwinProviderRequest { - entity_id: String::from(AMBIENT_AIR_TEMPERATURE_ID), - }; - let result = ibeji_digital_twin_adapter.find_by_id(request).await; - assert!(result.is_ok()); - - let response = result.unwrap(); - assert_eq!(response.entity.operation, OperationKind::Subscribe); - }; - - tokio::select! { - _ = run_test_grpc_server(uds_stream) => (), - _ = request_future => () - } - - std::fs::remove_file(bind_path.as_ref()).unwrap(); - } - } -} diff --git a/digital_twin_adapters/ibeji_adapter/src/lib.rs b/digital_twin_adapters/ibeji_adapter/src/lib.rs deleted file mode 100644 index cc095976..00000000 --- a/digital_twin_adapters/ibeji_adapter/src/lib.rs +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// SPDX-License-Identifier: MIT - -mod config; -pub mod ibeji_adapter; diff --git a/digital_twin_adapters/mock_digital_twin_adapter/Cargo.toml b/digital_twin_adapters/mock_digital_twin_adapter/Cargo.toml index cdd4d41e..a1b31839 100644 --- a/digital_twin_adapters/mock_digital_twin_adapter/Cargo.toml +++ b/digital_twin_adapters/mock_digital_twin_adapter/Cargo.toml @@ -11,10 +11,7 @@ license = "MIT" [dependencies] async-trait = { workspace = true } freyja-contracts = { workspace = true } -httptest = { workspace = true } mock-digital-twin = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } -tokio = { workspace = true } -tower = { workspace = true } \ No newline at end of file +serde_json = { workspace = true } \ No newline at end of file diff --git a/freyja/Cargo.toml b/freyja/Cargo.toml index f4666a5e..9dbb2735 100644 --- a/freyja/Cargo.toml +++ b/freyja/Cargo.toml @@ -9,7 +9,6 @@ edition = "2021" license = "MIT" [dependencies] -async-trait = { workspace = true } crossbeam = { workspace = true } env_logger = { workspace = true } freyja-contracts = { workspace = true } @@ -23,6 +22,7 @@ tokio = { workspace = true } [dev-dependencies] # Dependencies for testing mockall = { workspace = true } +async-trait = { workspace = true } # Dependencies for examples in-memory-mock-cloud-adapter = { path = "../cloud_adapters/in_memory_mock_cloud_adapter" } diff --git a/mapping_clients/mock_mapping_service_client/Cargo.toml b/mapping_clients/mock_mapping_service_client/Cargo.toml index 630d7b7b..2016ee7e 100644 --- a/mapping_clients/mock_mapping_service_client/Cargo.toml +++ b/mapping_clients/mock_mapping_service_client/Cargo.toml @@ -14,7 +14,6 @@ freyja-common = { workspace = true } freyja-contracts = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } [build-dependencies] freyja-build-common = { workspace = true } \ No newline at end of file diff --git a/mocks/mock_digital_twin/Cargo.toml b/mocks/mock_digital_twin/Cargo.toml index 9c13b478..474da3f2 100644 --- a/mocks/mock_digital_twin/Cargo.toml +++ b/mocks/mock_digital_twin/Cargo.toml @@ -9,7 +9,6 @@ edition = "2021" license = "MIT" [dependencies] -async-trait = { workspace = true } axum = { workspace = true } env_logger = { workspace = true } freyja-contracts = { workspace = true } diff --git a/mocks/mock_mapping_service/Cargo.toml b/mocks/mock_mapping_service/Cargo.toml index ecdd3fc7..ee76f6fc 100644 --- a/mocks/mock_mapping_service/Cargo.toml +++ b/mocks/mock_mapping_service/Cargo.toml @@ -15,7 +15,6 @@ freyja-common = { workspace = true } freyja-contracts = { workspace = true } log = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } tokio = { workspace = true } [build-dependencies] diff --git a/provider_proxies/grpc/v1/Cargo.toml b/provider_proxies/grpc/v1/Cargo.toml index 51b760b7..823ea4d1 100644 --- a/provider_proxies/grpc/v1/Cargo.toml +++ b/provider_proxies/grpc/v1/Cargo.toml @@ -15,15 +15,15 @@ freyja-common = { workspace = true } freyja-contracts = { workspace = true } futures = { workspace = true } log = { workspace = true } -proc-macros = { workspace = true } samples-protobuf-data-access = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } tempfile = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } tonic = { workspace = true } tower = { workspace = true } +[dev-dependencies] +tokio = { workspace = true } +tokio-stream = { workspace = true } + [build-dependencies] freyja-build-common = { workspace = true } \ No newline at end of file diff --git a/provider_proxies/http_mock_provider_proxy/Cargo.toml b/provider_proxies/http_mock_provider_proxy/Cargo.toml index 71d84b00..7755ce11 100644 --- a/provider_proxies/http_mock_provider_proxy/Cargo.toml +++ b/provider_proxies/http_mock_provider_proxy/Cargo.toml @@ -15,15 +15,8 @@ crossbeam = { workspace = true } freyja-common = { workspace = true } freyja-contracts = { workspace = true } log = { workspace = true } -mock-digital-twin = { workspace = true } -proc-macros = { workspace = true } reqwest = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } -tokio = { workspace = true } -tokio-stream = { workspace = true } -tonic = { workspace = true } -tower = { workspace = true } [build-dependencies] freyja-build-common = { workspace = true } \ No newline at end of file diff --git a/provider_proxies/in_memory_mock_provider_proxy/Cargo.toml b/provider_proxies/in_memory_mock_provider_proxy/Cargo.toml index c47c4a22..ccdb9c11 100644 --- a/provider_proxies/in_memory_mock_provider_proxy/Cargo.toml +++ b/provider_proxies/in_memory_mock_provider_proxy/Cargo.toml @@ -14,12 +14,8 @@ crossbeam = { workspace = true } freyja-common = { workspace = true } freyja-contracts = { workspace = true } log = { workspace = true } -proc-macros = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } tokio = { workspace = true } -tonic = { workspace = true } -tower = { workspace = true } [build-dependencies] freyja-build-common = { workspace = true } \ No newline at end of file diff --git a/provider_proxy_selector/Cargo.toml b/provider_proxy_selector/Cargo.toml index a10cfcdc..5dc75d9b 100644 --- a/provider_proxy_selector/Cargo.toml +++ b/provider_proxy_selector/Cargo.toml @@ -9,7 +9,6 @@ edition = "2021" license = "MIT" [dependencies] -async-trait = { workspace = true } crossbeam = { workspace = true } freyja-contracts = { workspace = true } grpc-provider-proxy-v1 = { path = "../provider_proxies/grpc/v1" } diff --git a/tools/cg/about.toml b/tools/cg/about.toml index 354f3e4e..2bcab157 100644 --- a/tools/cg/about.toml +++ b/tools/cg/about.toml @@ -1,11 +1,12 @@ accepted = [ - "MIT", "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "CC0-1.0", "EPL-1.0", "EPL-2.0", + "ISC", + "MIT", "Unicode-DFS-2016", - "BSD-2-Clause", - "BSD-3-Clause", "Zlib", - "CC0-1.0", ] \ No newline at end of file diff --git a/tools/dotnet_append_to_notice.sh b/tools/dotnet_append_to_notice.sh deleted file mode 100755 index fa145723..00000000 --- a/tools/dotnet_append_to_notice.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/bin/bash - -set -e - -# Check if the correct number of arguments are provided -if [ "$#" -ne 2 ]; then - echo "Usage: $0 path_to_markdown_file path_to_json_file" - exit 1 -fi - -# Assign arguments to variables for clarity -markdown_file="$1" -json_file="$2" - -# Check if the markdown file exists -if [ ! -f "$markdown_file" ]; then - echo "Error: markdown file '$markdown_file' not found" - exit 1 -fi - -# Check if the JSON file exists -if [ ! -f "$json_file" ]; then - echo "Error: JSON file '$json_file' not found" - exit 1 -fi - -# Append header to markdown file -echo -e "\n\n# .NET Third Party Licenses\nThe following lists the licenses of the .NET projects used.\n" >> "$markdown_file" - -# Read JSON file and append information to markdown file -while read -r line; do - # Extract values from JSON object - license_type=$(echo "$line" | jq -r '.LicenseType') - package_name=$(echo "$line" | jq -r '.PackageName') - package_version=$(echo "$line" | jq -r '.PackageVersion') - package_url=$(echo "$line" | jq -r '.PackageUrl') - license_description=$(echo "$line" | jq -r '.LicenseDescription') - - # Append information to markdown file in specified format - echo "### $license_type" >> "$markdown_file" - echo -e "\n#### Used by\n" >> "$markdown_file" - echo "- [$package_name]( $package_url ) $package_version" >> "$markdown_file" - echo -e "\n#### License\n" >> "$markdown_file" - echo '```text' >> "$markdown_file" - echo -e "$license_description" >> "$markdown_file" - echo '```' >> "$markdown_file" -done < <(jq -c '.[]' "$json_file") - -echo -e "\n## Disclaimer" >> "$markdown_file" -echo -e " -This .NET Third Party Licenses list has been generated with [nuget-license](https://github.com/tomchavakis/nuget-license), \ -licensed under [Apache License 2.0](https://github.com/tomchavakis/nuget-license/blob/master/LICENSE)" >> "$markdown_file" - -exit 0 \ No newline at end of file diff --git a/tools/dotnet_get_licenses.sh b/tools/dotnet_get_licenses.sh deleted file mode 100755 index f580156d..00000000 --- a/tools/dotnet_get_licenses.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash - -set -e - -# Check if the correct number of arguments are provided -if [ "$#" -ne 2 ]; then - echo "Usage: $0 path_to_json_file path_to_text_files_directory" - exit 1 -fi - -# Assign arguments to variables for clarity -json_file="$1" -text_files_dir="$2" - -# Check if the JSON file exists -if [ ! -f "$json_file" ]; then - echo "Error: JSON file '$json_file' not found" - exit 1 -fi - -# Check if the text files directory exists -if [ ! -d "$text_files_dir" ]; then - echo "Error: text files directory '$text_files_dir' not found" - exit 1 -fi - -# Create a temporary file to store the updated JSON -temp_file=$(mktemp) - -# Read JSON file and update elements with LicenseDescription field -while read -r line; do - # Extract values from JSON object - package_name=$(echo "$line" | jq -r '.PackageName') - package_version=$(echo "$line" | jq -r '.PackageVersion') - - # Construct path to license description text file - license_description_file="${text_files_dir}/${package_name}_${package_version}.txt" - - # Check if the license description text file exists - if [ ! -f "$license_description_file" ]; then - echo "Error: license description text file '$license_description_file' not found" - exit 1 - fi - - # Read license description text file and add LicenseDescription field to JSON object - license_description=$(cat "$license_description_file") - updated_json=$(echo "$line" | jq --arg desc "$license_description" '. + {LicenseDescription: $desc}') - - # Write updated JSON object to temporary file - echo "$updated_json" >> "$temp_file" -done < <(jq -c '.[]' "$json_file") - -# Overwrite original JSON file with updated JSON from temporary file -jq -s '.' "$temp_file" > "$json_file" - -# Remove temporary file -rm "$temp_file" - -exit 0 \ No newline at end of file diff --git a/tools/dotnet_notice_generation.sh b/tools/dotnet_notice_generation.sh deleted file mode 100755 index a1f98875..00000000 --- a/tools/dotnet_notice_generation.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash - -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -# SPDX-License-Identifier: MIT - -set -e - -cd "$(dirname "$0")/.." - -# Check if the correct number of argments are passed -if [ "$#" -lt 3 ] ; then - echo "Usage: $0 " - exit 1 -fi - -# Assign notice_file_path and dotnet_directory to arguments -notice_file_path="$1" -dotnet_directory="$2" -license_url_to_license_mappings="$3" - -# Check if the notice file exists -if [ ! -f "$notice_file_path" ]; then - echo "Error: Notice file '$notice_file_path' not found" - exit 1 -fi - -if ! dotnet tool list --global | grep -q 'dotnet-project-licenses'; then - dotnet tool install --global dotnet-project-licenses -fi - -dotnet_licenses_output_directory="$dotnet_directory/dotnet_licenses_output" -mkdir -p "$dotnet_licenses_output_directory" -echo "Getting the .NET Third Party licenses" - -dotnet-project-licenses -i $dotnet_directory -o -f "$dotnet_licenses_output_directory" -u --json -e -c \ - --licenseurl-to-license-mappings "$license_url_to_license_mappings" - -./tools/dotnet_get_licenses.sh "$dotnet_licenses_output_directory/licenses.json" "$dotnet_directory/dotnet_licenses_output" -./tools/dotnet_append_to_notice.sh "$notice_file_path" "$dotnet_licenses_output_directory/licenses.json" - -rm -r "$dotnet_licenses_output_directory" - -exit 0 \ No newline at end of file diff --git a/tools/notice_generation.sh b/tools/notice_generation.sh index 0b70d359..d3d92247 100755 --- a/tools/notice_generation.sh +++ b/tools/notice_generation.sh @@ -36,10 +36,6 @@ NOTICE_FILENAME="NOTICE" echo "Running cargo-about for NOTICE file generation..." cargo about generate --workspace tools/cg/about.hbs --config tools/cg/about.toml > $NOTICE_FILENAME -CLOUD_CONNECTORS_AZURE_DIRECTORY="cloud_connectors/azure/" -echo "Appending Azure Cloud Connectors' .NET Third Party licenses to $NOTICE_FILENAME" -./tools/dotnet_notice_generation.sh $NOTICE_FILENAME $CLOUD_CONNECTORS_AZURE_DIRECTORY $CLOUD_CONNECTORS_AZURE_DIRECTORY/shared/license_url_to_type.json - if [ -z "$(git diff --name-only $NOTICE_FILENAME)" ] then echo "File not changed"