From 8809076bcaad6afaf8a5460f848565c26b827a7d Mon Sep 17 00:00:00 2001 From: Daniel McCoy Stephenson Date: Thu, 1 Feb 2024 10:04:28 -0700 Subject: [PATCH 1/5] Removed unnecessary `RUN apk update` instructions in dockerfiles --- Dockerfile | 3 --- Dockerfile.dev | 3 --- Dockerfile.standalone | 3 --- 3 files changed, 9 deletions(-) diff --git a/Dockerfile b/Dockerfile index 27834a5..b670165 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,9 +4,6 @@ USER root WORKDIR /asn1_codec VOLUME ["/asn1_codec_share"] -# update the package manager -RUN apk update - # add build dependencies RUN apk add --upgrade --no-cache --virtual .build-deps \ cmake \ diff --git a/Dockerfile.dev b/Dockerfile.dev index 76bcef4..9421e78 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -4,9 +4,6 @@ USER root WORKDIR /asn1_codec VOLUME ["/asn1_codec_share"] -# update the package manager -RUN apk update - # add build dependencies RUN apk add --upgrade --no-cache --virtual .build-deps \ cmake \ diff --git a/Dockerfile.standalone b/Dockerfile.standalone index ec2f1d2..5188063 100644 --- a/Dockerfile.standalone +++ b/Dockerfile.standalone @@ -3,9 +3,6 @@ FROM alpine:3.12 as builder USER root WORKDIR /asn1_codec -# update the package manager -RUN apk update - # add build dependencies RUN apk add --upgrade --no-cache --virtual .build-deps \ cmake \ From 65383c1352b35d1d1617583dbd1faadc0323c590 Mon Sep 17 00:00:00 2001 From: Daniel McCoy Stephenson Date: Thu, 1 Feb 2024 10:38:06 -0700 Subject: [PATCH 2/5] Added documentation for contents of the `data` & `unit-test-data` directories --- data/README.md | 20 ++++++++++++++++---- unit-test-data/README.md | 12 ++++++++++++ 2 files changed, 28 insertions(+), 4 deletions(-) create mode 100644 unit-test-data/README.md diff --git a/data/README.md b/data/README.md index d699e7e..9c96676 100644 --- a/data/README.md +++ b/data/README.md @@ -3,11 +3,11 @@ - You can construct test data files by working with the BSM MessageFrame and working up. j2735.MessageFrame.Bsm.xml : apply converter-example -ixer -oper -p MessageFrame -j2735.MessageFrame.Bsm.per : apply xxd -p +j2735.MessageFrame.Bsm.uper : apply xxd -p j2735.MessageFrame.Bsm.hex : cut and paste into Ieee1609Dot2Data xml structure. Ieee1609Dot2Data.unsecuredData.xml : apply converter-example -ixer -oper -p Ieee1609Dot2Data -Ieee1609Dot2Data.unsecuredData.per : apply xxd -p +Ieee1609Dot2Data.unsecuredData.uper : apply xxd -p Ieee1609Dot2Data.unsecuredData.hex : cut and paste into the BAH packet that goes into the input Kafka stream. BAH.Input.xml : the data that is received on the Kafka input stream. @@ -35,5 +35,17 @@ BAH.Input.xml : the data that is received on the Kafka input stream. encryptedData EncryptedData, signedCertificateRequest Opaque, - - +# Data Files +The data files in this directory are referenced in the following files: +| Data File | File | Test Name / Context | +| --------- | --------- | --------- | +| InputData.encoding.tim.odetimpayload.xml | src/tests.cpp | Encode TIM with payload type 'OdeTimPayload' | +| InputData.encoding.tim.odeasdpayload.xml | src/tests.cpp | Encode TIM with payload type 'OdeAsdPayload' | +| InputData.decoding.bsm.xml | src/tests.cpp | Decode BSM | +| producer_test_xml.txt | do_kafka_test.sh | ./test-scripts/standalone.sh config/test/c1.properties data/producer_test_xml.txt encode 0 | +| InputData.Ieee1609Dot2Data.packed.xml | testing.md | Testing Documentation | +| j2735.MessageFrame.Bsm.xml | data/README.md | Building Test Data Files | +| j2735.MessageFrame.Bsm.uper | data/README.md | Building Test Data Files | +| j2735.MessageFrame.Bsm.hex | data/README.md | Building Test Data Files | + +The rest of the files in this directory (and subdirectories) are provided as examples and are not referenced anywhere. \ No newline at end of file diff --git a/unit-test-data/README.md b/unit-test-data/README.md new file mode 100644 index 0000000..4257d54 --- /dev/null +++ b/unit-test-data/README.md @@ -0,0 +1,12 @@ +# Data Usage +The data files in this directory are used in the following tests: +| Data File | Test File | Test Name | +| --------- | --------- | --------- | +| 1609_BSM.xml | src/tests.cpp | Encode 1609_BSM | +| 1609.xml | src/tests.cpp | Encode 1609 | +| ASD_1609_BSM.xml | src/tests.cpp | Encode ASD_1609_BSM | +| ASD_1609.xml | src/tests.cpp | Encode ASD_1609 | +| ASD_BSM.xml | src/tests.cpp | Encode ASD_BSM | +| ASD.xml | src/tests.cpp | Encode ASD_BSM | +| BSM.xml | src/tests.cpp | Encode BSM | +| empty.xml | N/A | N/A | From d98246dadf810d86a2201277fd9f0d4d088e511b Mon Sep 17 00:00:00 2001 From: Daniel McCoy Stephenson Date: Thu, 1 Feb 2024 10:46:47 -0700 Subject: [PATCH 3/5] Modified kafkaType check in `acm.cpp` --- src/acm.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/acm.cpp b/src/acm.cpp index 183bf1d..6b4fcda 100644 --- a/src/acm.cpp +++ b/src/acm.cpp @@ -488,10 +488,6 @@ bool ASN1_Codec::configure() { // confluent cloud integration std::string kafkaType = getEnvironmentVariable("KAFKA_TYPE"); - if (kafkaType == "") { - logger->warn(fnname + ": KAFKA_TYPE environment variable not set. A local kafka broker will be targeted."); - } - if (kafkaType == "CONFLUENT") { // get username and password std::string username = getEnvironmentVariable("CONFLUENT_KEY"); @@ -507,6 +503,9 @@ bool ASN1_Codec::configure() { conf->set("api.version.fallback.ms", "0", error_string); conf->set("broker.version.fallback", "0.10.0.0", error_string); } + else { + logger->warn(fnname + ": KAFKA_TYPE environment variable not set to 'CONFLUENT'. A local kafka broker will be targeted."); + } // end of confluent cloud integration if ( getOption('g').isSet() && conf->set("group.id", optString('g'), error_string) != RdKafka::Conf::CONF_OK) { From 346265bc2c049e7c1b0b061f690f94c9fb4d1482 Mon Sep 17 00:00:00 2001 From: Daniel McCoy Stephenson Date: Thu, 1 Feb 2024 10:48:27 -0700 Subject: [PATCH 4/5] Added KAFKA_TYPE & confluent env vars to sample.env --- sample.env | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/sample.env b/sample.env index dea1a16..adb4451 100644 --- a/sample.env +++ b/sample.env @@ -9,4 +9,12 @@ ACM_LOG_TO_FILE= # The log level to use. # Valid values are: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL", "OFF" -ACM_LOG_LEVEL= \ No newline at end of file +ACM_LOG_LEVEL= + +# If unset, a local kafka broker will be targeted. +# If set to "CONFLUENT", the application will target a Confluent Cloud cluster. +KAFKA_TYPE= + +# Confluent Cloud Integration (if KAFKA_TYPE is set to "CONFLUENT") +CONFLUENT_KEY= +CONFLUENT_SECRET= \ No newline at end of file From f10bc0866e29f2913dc4576956a5de05f71376fb Mon Sep 17 00:00:00 2001 From: Daniel McCoy Stephenson Date: Thu, 1 Feb 2024 11:07:50 -0700 Subject: [PATCH 5/5] Refactored logger setup code to improve clarity. --- include/acm.hpp | 10 +++++++--- include/acm_blob_producer.hpp | 2 +- src/acm.cpp | 12 ++++++++---- src/acm_blob_producer.cpp | 4 ++-- src/tests.cpp | 20 ++++++++++---------- 5 files changed, 28 insertions(+), 20 deletions(-) diff --git a/include/acm.hpp b/include/acm.hpp index 781800c..46a2605 100644 --- a/include/acm.hpp +++ b/include/acm.hpp @@ -221,7 +221,7 @@ class ASN1_Codec : public tool::Tool { const char* getEnvironmentVariable(const char* variableName); /** - * @brief Create and setup the two loggers used for the ASN1_Codec. The locations and filenames for the logs can be specified + * @brief Create and setup the logger used for the ASN1_Codec. The locations and filenames for the logs can be specified * using command line parameters. The CANNOT be set via the configuration file, since these loggers are setup * prior to the configuration file being read. * @@ -231,8 +231,12 @@ class ASN1_Codec : public tool::Tool { * * @return true upon success; false if some failure occurred during logger setup. */ - bool make_loggers( bool remove_files ); - bool make_loggers_testing(); + bool setup_logger( bool remove_files ); + + /** + * @brief Set up the logger for testing + */ + bool setup_logger_for_testing(); private: diff --git a/include/acm_blob_producer.hpp b/include/acm_blob_producer.hpp index d9fcc49..7ee51bb 100644 --- a/include/acm_blob_producer.hpp +++ b/include/acm_blob_producer.hpp @@ -81,7 +81,7 @@ class ACMBlobProducer : public tool::Tool { * * @return true upon success; false if some failure occurred during logger setup. */ - bool make_loggers( bool remove_files ); + bool setup_logger( bool remove_files ); private: diff --git a/src/acm.cpp b/src/acm.cpp index 6b4fcda..60f0cfe 100644 --- a/src/acm.cpp +++ b/src/acm.cpp @@ -654,7 +654,7 @@ bool ASN1_Codec::launch_consumer(){ return true; } -bool ASN1_Codec::make_loggers( bool remove_files ) { +bool ASN1_Codec::setup_logger( bool remove_files ) { // defaults. std::string path{ "logs/" }; std::string logname{ "log.info" }; @@ -702,8 +702,12 @@ bool ASN1_Codec::make_loggers( bool remove_files ) { return true; } -bool ASN1_Codec::make_loggers_testing() { - logger = std::make_shared("testlog"); +/** + * @brief This method is used to setup the logger for testing purposes. + */ +bool ASN1_Codec::setup_logger_for_testing() { + std::string TEST_LOGGER_FILE_NAME = "test_logger_file.log"; + logger = std::make_shared(TEST_LOGGER_FILE_NAME); return true; } @@ -1970,7 +1974,7 @@ int main( int argc, char* argv[] ) } // can set levels if needed here. - if ( !asn1_codec.make_loggers( asn1_codec.optIsSet('R') )) { + if ( !asn1_codec.setup_logger( asn1_codec.optIsSet('R') )) { std::exit( EXIT_FAILURE ); } diff --git a/src/acm_blob_producer.cpp b/src/acm_blob_producer.cpp index 3763603..33bfc52 100644 --- a/src/acm_blob_producer.cpp +++ b/src/acm_blob_producer.cpp @@ -375,7 +375,7 @@ bool ACMBlobProducer::launch_producer() return true; } -bool ACMBlobProducer::make_loggers( bool remove_files ) +bool ACMBlobProducer::setup_logger( bool remove_files ) { // defaults. std::string path{ "logs/" }; @@ -533,7 +533,7 @@ int main(int argc, char* argv[]) } // can set levels if needed here. - if (!acm_blob_producer.make_loggers((acm_blob_producer.optIsSet('R')))) { + if (!acm_blob_producer.setup_logger((acm_blob_producer.optIsSet('R')))) { std::exit(EXIT_FAILURE); } diff --git a/src/tests.cpp b/src/tests.cpp index 54e4b06..a325ebf 100644 --- a/src/tests.cpp +++ b/src/tests.cpp @@ -43,7 +43,7 @@ const char *ASD_ONE609_HEX = "44400000000084782786283B90A7148D2B0A89C49F8A85A776 */ // TEST_CASE("Encode BSM", "[encoding]" ) { // TODO: fix test case failing // // prepare -// asn1_codec.make_loggers_testing(); +// asn1_codec.setup_logger_for_testing(); // std::stringstream out1; // CHECK(asn1_codec.file_test("unit-test-data/BSM.xml", out1) == EXIT_SUCCESS); @@ -60,7 +60,7 @@ TEST_CASE("Encode ASD", "[encoding]" ) { std::cout << "=== Encode ASD ===" << std::endl; // prepare - asn1_codec.make_loggers_testing(); + asn1_codec.setup_logger_for_testing(); std::stringstream out2; CHECK(asn1_codec.file_test("unit-test-data/ASD.xml", out2) == EXIT_SUCCESS); @@ -79,7 +79,7 @@ TEST_CASE("Encode ASD", "[encoding]" ) { */ // TEST_CASE("Encode ASD_BSM", "[encoding]" ) { // TODO: fix test case failing // // prepare -// asn1_codec.make_loggers_testing(); +// asn1_codec.setup_logger_for_testing(); // std::stringstream out3; // CHECK(asn1_codec.file_test("unit-test-data/ASD_BSM.xml", out3) == EXIT_SUCCESS); // parse_result = output_doc.load(out3, pugi::parse_default | pugi::parse_declaration | pugi::parse_doctype | pugi::parse_trim_pcdata); @@ -98,7 +98,7 @@ TEST_CASE("Encode 1609", "[encoding]" ) { std::cout << "=== Encode 1609 ===" << std::endl; // prepare - asn1_codec.make_loggers_testing(); + asn1_codec.setup_logger_for_testing(); std::stringstream out4; CHECK(asn1_codec.file_test("unit-test-data/1609.xml", out4) == EXIT_SUCCESS); @@ -115,7 +115,7 @@ TEST_CASE("Encode ASD_1609", "[encoding]" ) { std::cout << "=== Encode ASD_1609 ===" << std::endl; // prepare - asn1_codec.make_loggers_testing(); + asn1_codec.setup_logger_for_testing(); std::stringstream out5; CHECK(asn1_codec.file_test("unit-test-data/ASD_1609.xml", out5) == EXIT_SUCCESS); @@ -137,7 +137,7 @@ TEST_CASE("Encode ASD_1609", "[encoding]" ) { */ // TEST_CASE("Encode 1609_BSM", "[encoding]") { // TODO: fix test case failing // // prepare -// asn1_codec.make_loggers_testing(); +// asn1_codec.setup_logger_for_testing(); // std::stringstream out6; // CHECK(asn1_codec.file_test("unit-test-data/1609_BSM.xml", out6) == EXIT_SUCCESS); @@ -159,7 +159,7 @@ TEST_CASE("Encode ASD_1609", "[encoding]" ) { */ // TEST_CASE("Encode ASD_1609_BSM", "[encoding]") { // TODO: fix test case failing // // prepare -// asn1_codec.make_loggers_testing(); +// asn1_codec.setup_logger_for_testing(); // std::stringstream out7; // CHECK(asn1_codec.file_test("unit-test-data/ASD_1609_BSM.xml", out7) == EXIT_SUCCESS); @@ -182,7 +182,7 @@ TEST_CASE("Encode TIM with payload type 'OdeTimPayload'", "[encoding][odetimpayl std::cout << "=== Encode TIM with payload type 'OdeTimPayload' ===" << std::endl; // prepare - asn1_codec.make_loggers_testing(); + asn1_codec.setup_logger_for_testing(); std::stringstream out8; CHECK(asn1_codec.file_test("data/InputData.encoding.tim.odetimpayload.xml", out8) == EXIT_SUCCESS); @@ -196,7 +196,7 @@ TEST_CASE("Encode TIM with payload type 'OdeAsdPayload'", "[encoding][odeasdpayl std::cout << "=== Encode TIM with payload type 'OdeAsdPayload' ===" << std::endl; // prepare - asn1_codec.make_loggers_testing(); + asn1_codec.setup_logger_for_testing(); std::stringstream out8; CHECK(asn1_codec.file_test("data/InputData.encoding.tim.odeasdpayload.xml", out8) == EXIT_SUCCESS); @@ -212,7 +212,7 @@ TEST_CASE("Decode BSM", "[decoding]") { std::cout << "=== Decode BSM ===" << std::endl; // prepare - asn1_codec.make_loggers_testing(); + asn1_codec.setup_logger_for_testing(); std::stringstream out9; CHECK(asn1_codec.file_test("data/InputData.decoding.bsm.xml", out9, false) == EXIT_SUCCESS);