From b5a17a2329cb39184b3c75911e120bfb034c6893 Mon Sep 17 00:00:00 2001 From: Michael7371 <40476797+Michael7371@users.noreply.github.com> Date: Thu, 8 Aug 2024 18:01:37 -0600 Subject: [PATCH] spring boot refactor initial commit --- .../AwsDepositor.java => AwsDepositor.java | 0 jpo-s3-depositor/.gitignore | 33 +++ .../.mvn/wrapper/maven-wrapper.properties | 19 ++ jpo-s3-depositor/mvnw | 259 ++++++++++++++++++ jpo-s3-depositor/mvnw.cmd | 149 ++++++++++ jpo-s3-depositor/pom.xml | 106 +++++++ ...aysContinueProductionExceptionHandler.java | 17 ++ .../ode/depositor/DepositorApplication.java | 45 +++ .../ode/depositor/DepositorProperties.java | 251 +++++++++++++++++ .../depositor/DepositorServiceController.java | 57 ++++ .../its/jpo/ode/depositor/KafkaConfig.java | 51 ++++ .../its/jpo/ode/depositor/SystemConfig.java | 52 ++++ .../jpo/ode/depositor/SystemConfigMBean.java | 32 +++ .../ode/depositor/services/MqttService.java | 27 ++ .../depositor/topologies/StreamsTopology.java | 46 ++++ .../jpo/ode/depositor/utils/CommonUtils.java | 18 ++ .../src/main/resources/application.yaml | 29 ++ .../src/main/resources/logback.xml | 20 ++ .../JpoS3DepositorApplicationTests.java | 13 + pom.xml | 168 ------------ src/main/resources/log4j.properties | 10 - src/main/resources/logback.xml | 15 - .../depositor/AddConfluentPropertiesTest.java | 19 -- .../depositor/BuildFirehoseClientTest.java | 18 -- .../ConvertStringToByteBufferTest.java | 20 -- .../ode/aws/depositor/CreateS3ClientTest.java | 25 -- .../aws/depositor/CreateSampleFileTest.java | 23 -- .../aws/depositor/DepositToFirehoseTest.java | 75 ----- .../ode/aws/depositor/DepositToGCSTest.java | 35 --- .../ode/aws/depositor/DepositToS3Test.java | 41 --- .../aws/depositor/GenerateAWSProfileTest.java | 71 ----- .../depositor/GetEnvironmentVariableTest.java | 31 --- .../its/jpo/ode/aws/depositor/RunTest.java | 86 ------ 33 files changed, 1224 insertions(+), 637 deletions(-) rename src/main/java/us/dot/its/jpo/ode/aws/depositor/AwsDepositor.java => AwsDepositor.java (100%) create mode 100644 jpo-s3-depositor/.gitignore create mode 100644 jpo-s3-depositor/.mvn/wrapper/maven-wrapper.properties create mode 100644 jpo-s3-depositor/mvnw create mode 100644 jpo-s3-depositor/mvnw.cmd create mode 100644 jpo-s3-depositor/pom.xml create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/AlwaysContinueProductionExceptionHandler.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorApplication.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorProperties.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorServiceController.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/KafkaConfig.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfig.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfigMBean.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/services/MqttService.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/topologies/StreamsTopology.java create mode 100644 jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/utils/CommonUtils.java create mode 100644 jpo-s3-depositor/src/main/resources/application.yaml create mode 100644 jpo-s3-depositor/src/main/resources/logback.xml create mode 100644 jpo-s3-depositor/src/test/java/us/dot/its/jpo/ode/jpo_s3_depositor/JpoS3DepositorApplicationTests.java delete mode 100644 pom.xml delete mode 100644 src/main/resources/log4j.properties delete mode 100644 src/main/resources/logback.xml delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/AddConfluentPropertiesTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/BuildFirehoseClientTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/ConvertStringToByteBufferTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateS3ClientTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateSampleFileTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToFirehoseTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToGCSTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToS3Test.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/GenerateAWSProfileTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/GetEnvironmentVariableTest.java delete mode 100644 src/test/java/us/dot/its/jpo/ode/aws/depositor/RunTest.java diff --git a/src/main/java/us/dot/its/jpo/ode/aws/depositor/AwsDepositor.java b/AwsDepositor.java similarity index 100% rename from src/main/java/us/dot/its/jpo/ode/aws/depositor/AwsDepositor.java rename to AwsDepositor.java diff --git a/jpo-s3-depositor/.gitignore b/jpo-s3-depositor/.gitignore new file mode 100644 index 0000000..549e00a --- /dev/null +++ b/jpo-s3-depositor/.gitignore @@ -0,0 +1,33 @@ +HELP.md +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### STS ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### IntelliJ IDEA ### +.idea +*.iws +*.iml +*.ipr + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ diff --git a/jpo-s3-depositor/.mvn/wrapper/maven-wrapper.properties b/jpo-s3-depositor/.mvn/wrapper/maven-wrapper.properties new file mode 100644 index 0000000..8f96f52 --- /dev/null +++ b/jpo-s3-depositor/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,19 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +wrapperVersion=3.3.2 +distributionType=only-script +distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.7/apache-maven-3.9.7-bin.zip diff --git a/jpo-s3-depositor/mvnw b/jpo-s3-depositor/mvnw new file mode 100644 index 0000000..d7c358e --- /dev/null +++ b/jpo-s3-depositor/mvnw @@ -0,0 +1,259 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Apache Maven Wrapper startup batch script, version 3.3.2 +# +# Optional ENV vars +# ----------------- +# JAVA_HOME - location of a JDK home dir, required when download maven via java source +# MVNW_REPOURL - repo url base for downloading maven distribution +# MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +# MVNW_VERBOSE - true: enable verbose log; debug: trace the mvnw script; others: silence the output +# ---------------------------------------------------------------------------- + +set -euf +[ "${MVNW_VERBOSE-}" != debug ] || set -x + +# OS specific support. +native_path() { printf %s\\n "$1"; } +case "$(uname)" in +CYGWIN* | MINGW*) + [ -z "${JAVA_HOME-}" ] || JAVA_HOME="$(cygpath --unix "$JAVA_HOME")" + native_path() { cygpath --path --windows "$1"; } + ;; +esac + +# set JAVACMD and JAVACCMD +set_java_home() { + # For Cygwin and MinGW, ensure paths are in Unix format before anything is touched + if [ -n "${JAVA_HOME-}" ]; then + if [ -x "$JAVA_HOME/jre/sh/java" ]; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACCMD="$JAVA_HOME/jre/sh/javac" + else + JAVACMD="$JAVA_HOME/bin/java" + JAVACCMD="$JAVA_HOME/bin/javac" + + if [ ! -x "$JAVACMD" ] || [ ! -x "$JAVACCMD" ]; then + echo "The JAVA_HOME environment variable is not defined correctly, so mvnw cannot run." >&2 + echo "JAVA_HOME is set to \"$JAVA_HOME\", but \"\$JAVA_HOME/bin/java\" or \"\$JAVA_HOME/bin/javac\" does not exist." >&2 + return 1 + fi + fi + else + JAVACMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v java + )" || : + JAVACCMD="$( + 'set' +e + 'unset' -f command 2>/dev/null + 'command' -v javac + )" || : + + if [ ! -x "${JAVACMD-}" ] || [ ! -x "${JAVACCMD-}" ]; then + echo "The java/javac command does not exist in PATH nor is JAVA_HOME set, so mvnw cannot run." >&2 + return 1 + fi + fi +} + +# hash string like Java String::hashCode +hash_string() { + str="${1:-}" h=0 + while [ -n "$str" ]; do + char="${str%"${str#?}"}" + h=$(((h * 31 + $(LC_CTYPE=C printf %d "'$char")) % 4294967296)) + str="${str#?}" + done + printf %x\\n $h +} + +verbose() { :; } +[ "${MVNW_VERBOSE-}" != true ] || verbose() { printf %s\\n "${1-}"; } + +die() { + printf %s\\n "$1" >&2 + exit 1 +} + +trim() { + # MWRAPPER-139: + # Trims trailing and leading whitespace, carriage returns, tabs, and linefeeds. + # Needed for removing poorly interpreted newline sequences when running in more + # exotic environments such as mingw bash on Windows. + printf "%s" "${1}" | tr -d '[:space:]' +} + +# parse distributionUrl and optional distributionSha256Sum, requires .mvn/wrapper/maven-wrapper.properties +while IFS="=" read -r key value; do + case "${key-}" in + distributionUrl) distributionUrl=$(trim "${value-}") ;; + distributionSha256Sum) distributionSha256Sum=$(trim "${value-}") ;; + esac +done <"${0%/*}/.mvn/wrapper/maven-wrapper.properties" +[ -n "${distributionUrl-}" ] || die "cannot read distributionUrl property in ${0%/*}/.mvn/wrapper/maven-wrapper.properties" + +case "${distributionUrl##*/}" in +maven-mvnd-*bin.*) + MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ + case "${PROCESSOR_ARCHITECTURE-}${PROCESSOR_ARCHITEW6432-}:$(uname -a)" in + *AMD64:CYGWIN* | *AMD64:MINGW*) distributionPlatform=windows-amd64 ;; + :Darwin*x86_64) distributionPlatform=darwin-amd64 ;; + :Darwin*arm64) distributionPlatform=darwin-aarch64 ;; + :Linux*x86_64*) distributionPlatform=linux-amd64 ;; + *) + echo "Cannot detect native platform for mvnd on $(uname)-$(uname -m), use pure java version" >&2 + distributionPlatform=linux-amd64 + ;; + esac + distributionUrl="${distributionUrl%-bin.*}-$distributionPlatform.zip" + ;; +maven-mvnd-*) MVN_CMD=mvnd.sh _MVNW_REPO_PATTERN=/maven/mvnd/ ;; +*) MVN_CMD="mvn${0##*/mvnw}" _MVNW_REPO_PATTERN=/org/apache/maven/ ;; +esac + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +[ -z "${MVNW_REPOURL-}" ] || distributionUrl="$MVNW_REPOURL$_MVNW_REPO_PATTERN${distributionUrl#*"$_MVNW_REPO_PATTERN"}" +distributionUrlName="${distributionUrl##*/}" +distributionUrlNameMain="${distributionUrlName%.*}" +distributionUrlNameMain="${distributionUrlNameMain%-bin}" +MAVEN_USER_HOME="${MAVEN_USER_HOME:-${HOME}/.m2}" +MAVEN_HOME="${MAVEN_USER_HOME}/wrapper/dists/${distributionUrlNameMain-}/$(hash_string "$distributionUrl")" + +exec_maven() { + unset MVNW_VERBOSE MVNW_USERNAME MVNW_PASSWORD MVNW_REPOURL || : + exec "$MAVEN_HOME/bin/$MVN_CMD" "$@" || die "cannot exec $MAVEN_HOME/bin/$MVN_CMD" +} + +if [ -d "$MAVEN_HOME" ]; then + verbose "found existing MAVEN_HOME at $MAVEN_HOME" + exec_maven "$@" +fi + +case "${distributionUrl-}" in +*?-bin.zip | *?maven-mvnd-?*-?*.zip) ;; +*) die "distributionUrl is not valid, must match *-bin.zip or maven-mvnd-*.zip, but found '${distributionUrl-}'" ;; +esac + +# prepare tmp dir +if TMP_DOWNLOAD_DIR="$(mktemp -d)" && [ -d "$TMP_DOWNLOAD_DIR" ]; then + clean() { rm -rf -- "$TMP_DOWNLOAD_DIR"; } + trap clean HUP INT TERM EXIT +else + die "cannot create temp dir" +fi + +mkdir -p -- "${MAVEN_HOME%/*}" + +# Download and Install Apache Maven +verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +verbose "Downloading from: $distributionUrl" +verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +# select .zip or .tar.gz +if ! command -v unzip >/dev/null; then + distributionUrl="${distributionUrl%.zip}.tar.gz" + distributionUrlName="${distributionUrl##*/}" +fi + +# verbose opt +__MVNW_QUIET_WGET=--quiet __MVNW_QUIET_CURL=--silent __MVNW_QUIET_UNZIP=-q __MVNW_QUIET_TAR='' +[ "${MVNW_VERBOSE-}" != true ] || __MVNW_QUIET_WGET='' __MVNW_QUIET_CURL='' __MVNW_QUIET_UNZIP='' __MVNW_QUIET_TAR=v + +# normalize http auth +case "${MVNW_PASSWORD:+has-password}" in +'') MVNW_USERNAME='' MVNW_PASSWORD='' ;; +has-password) [ -n "${MVNW_USERNAME-}" ] || MVNW_USERNAME='' MVNW_PASSWORD='' ;; +esac + +if [ -z "${MVNW_USERNAME-}" ] && command -v wget >/dev/null; then + verbose "Found wget ... using wget" + wget ${__MVNW_QUIET_WGET:+"$__MVNW_QUIET_WGET"} "$distributionUrl" -O "$TMP_DOWNLOAD_DIR/$distributionUrlName" || die "wget: Failed to fetch $distributionUrl" +elif [ -z "${MVNW_USERNAME-}" ] && command -v curl >/dev/null; then + verbose "Found curl ... using curl" + curl ${__MVNW_QUIET_CURL:+"$__MVNW_QUIET_CURL"} -f -L -o "$TMP_DOWNLOAD_DIR/$distributionUrlName" "$distributionUrl" || die "curl: Failed to fetch $distributionUrl" +elif set_java_home; then + verbose "Falling back to use Java to download" + javaSource="$TMP_DOWNLOAD_DIR/Downloader.java" + targetZip="$TMP_DOWNLOAD_DIR/$distributionUrlName" + cat >"$javaSource" <<-END + public class Downloader extends java.net.Authenticator + { + protected java.net.PasswordAuthentication getPasswordAuthentication() + { + return new java.net.PasswordAuthentication( System.getenv( "MVNW_USERNAME" ), System.getenv( "MVNW_PASSWORD" ).toCharArray() ); + } + public static void main( String[] args ) throws Exception + { + setDefault( new Downloader() ); + java.nio.file.Files.copy( java.net.URI.create( args[0] ).toURL().openStream(), java.nio.file.Paths.get( args[1] ).toAbsolutePath().normalize() ); + } + } + END + # For Cygwin/MinGW, switch paths to Windows format before running javac and java + verbose " - Compiling Downloader.java ..." + "$(native_path "$JAVACCMD")" "$(native_path "$javaSource")" || die "Failed to compile Downloader.java" + verbose " - Running Downloader.java ..." + "$(native_path "$JAVACMD")" -cp "$(native_path "$TMP_DOWNLOAD_DIR")" Downloader "$distributionUrl" "$(native_path "$targetZip")" +fi + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +if [ -n "${distributionSha256Sum-}" ]; then + distributionSha256Result=false + if [ "$MVN_CMD" = mvnd.sh ]; then + echo "Checksum validation is not supported for maven-mvnd." >&2 + echo "Please disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + elif command -v sha256sum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | sha256sum -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + elif command -v shasum >/dev/null; then + if echo "$distributionSha256Sum $TMP_DOWNLOAD_DIR/$distributionUrlName" | shasum -a 256 -c >/dev/null 2>&1; then + distributionSha256Result=true + fi + else + echo "Checksum validation was requested but neither 'sha256sum' or 'shasum' are available." >&2 + echo "Please install either command, or disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." >&2 + exit 1 + fi + if [ $distributionSha256Result = false ]; then + echo "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised." >&2 + echo "If you updated your Maven version, you need to update the specified distributionSha256Sum property." >&2 + exit 1 + fi +fi + +# unzip and move +if command -v unzip >/dev/null; then + unzip ${__MVNW_QUIET_UNZIP:+"$__MVNW_QUIET_UNZIP"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -d "$TMP_DOWNLOAD_DIR" || die "failed to unzip" +else + tar xzf${__MVNW_QUIET_TAR:+"$__MVNW_QUIET_TAR"} "$TMP_DOWNLOAD_DIR/$distributionUrlName" -C "$TMP_DOWNLOAD_DIR" || die "failed to untar" +fi +printf %s\\n "$distributionUrl" >"$TMP_DOWNLOAD_DIR/$distributionUrlNameMain/mvnw.url" +mv -- "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" "$MAVEN_HOME" || [ -d "$MAVEN_HOME" ] || die "fail to move MAVEN_HOME" + +clean || : +exec_maven "$@" diff --git a/jpo-s3-depositor/mvnw.cmd b/jpo-s3-depositor/mvnw.cmd new file mode 100644 index 0000000..6f779cf --- /dev/null +++ b/jpo-s3-depositor/mvnw.cmd @@ -0,0 +1,149 @@ +<# : batch portion +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Apache Maven Wrapper startup batch script, version 3.3.2 +@REM +@REM Optional ENV vars +@REM MVNW_REPOURL - repo url base for downloading maven distribution +@REM MVNW_USERNAME/MVNW_PASSWORD - user and password for downloading maven +@REM MVNW_VERBOSE - true: enable verbose log; others: silence the output +@REM ---------------------------------------------------------------------------- + +@IF "%__MVNW_ARG0_NAME__%"=="" (SET __MVNW_ARG0_NAME__=%~nx0) +@SET __MVNW_CMD__= +@SET __MVNW_ERROR__= +@SET __MVNW_PSMODULEP_SAVE=%PSModulePath% +@SET PSModulePath= +@FOR /F "usebackq tokens=1* delims==" %%A IN (`powershell -noprofile "& {$scriptDir='%~dp0'; $script='%__MVNW_ARG0_NAME__%'; icm -ScriptBlock ([Scriptblock]::Create((Get-Content -Raw '%~f0'))) -NoNewScope}"`) DO @( + IF "%%A"=="MVN_CMD" (set __MVNW_CMD__=%%B) ELSE IF "%%B"=="" (echo %%A) ELSE (echo %%A=%%B) +) +@SET PSModulePath=%__MVNW_PSMODULEP_SAVE% +@SET __MVNW_PSMODULEP_SAVE= +@SET __MVNW_ARG0_NAME__= +@SET MVNW_USERNAME= +@SET MVNW_PASSWORD= +@IF NOT "%__MVNW_CMD__%"=="" (%__MVNW_CMD__% %*) +@echo Cannot start maven from wrapper >&2 && exit /b 1 +@GOTO :EOF +: end batch / begin powershell #> + +$ErrorActionPreference = "Stop" +if ($env:MVNW_VERBOSE -eq "true") { + $VerbosePreference = "Continue" +} + +# calculate distributionUrl, requires .mvn/wrapper/maven-wrapper.properties +$distributionUrl = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionUrl +if (!$distributionUrl) { + Write-Error "cannot read distributionUrl property in $scriptDir/.mvn/wrapper/maven-wrapper.properties" +} + +switch -wildcard -casesensitive ( $($distributionUrl -replace '^.*/','') ) { + "maven-mvnd-*" { + $USE_MVND = $true + $distributionUrl = $distributionUrl -replace '-bin\.[^.]*$',"-windows-amd64.zip" + $MVN_CMD = "mvnd.cmd" + break + } + default { + $USE_MVND = $false + $MVN_CMD = $script -replace '^mvnw','mvn' + break + } +} + +# apply MVNW_REPOURL and calculate MAVEN_HOME +# maven home pattern: ~/.m2/wrapper/dists/{apache-maven-,maven-mvnd--}/ +if ($env:MVNW_REPOURL) { + $MVNW_REPO_PATTERN = if ($USE_MVND) { "/org/apache/maven/" } else { "/maven/mvnd/" } + $distributionUrl = "$env:MVNW_REPOURL$MVNW_REPO_PATTERN$($distributionUrl -replace '^.*'+$MVNW_REPO_PATTERN,'')" +} +$distributionUrlName = $distributionUrl -replace '^.*/','' +$distributionUrlNameMain = $distributionUrlName -replace '\.[^.]*$','' -replace '-bin$','' +$MAVEN_HOME_PARENT = "$HOME/.m2/wrapper/dists/$distributionUrlNameMain" +if ($env:MAVEN_USER_HOME) { + $MAVEN_HOME_PARENT = "$env:MAVEN_USER_HOME/wrapper/dists/$distributionUrlNameMain" +} +$MAVEN_HOME_NAME = ([System.Security.Cryptography.MD5]::Create().ComputeHash([byte[]][char[]]$distributionUrl) | ForEach-Object {$_.ToString("x2")}) -join '' +$MAVEN_HOME = "$MAVEN_HOME_PARENT/$MAVEN_HOME_NAME" + +if (Test-Path -Path "$MAVEN_HOME" -PathType Container) { + Write-Verbose "found existing MAVEN_HOME at $MAVEN_HOME" + Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" + exit $? +} + +if (! $distributionUrlNameMain -or ($distributionUrlName -eq $distributionUrlNameMain)) { + Write-Error "distributionUrl is not valid, must end with *-bin.zip, but found $distributionUrl" +} + +# prepare tmp dir +$TMP_DOWNLOAD_DIR_HOLDER = New-TemporaryFile +$TMP_DOWNLOAD_DIR = New-Item -Itemtype Directory -Path "$TMP_DOWNLOAD_DIR_HOLDER.dir" +$TMP_DOWNLOAD_DIR_HOLDER.Delete() | Out-Null +trap { + if ($TMP_DOWNLOAD_DIR.Exists) { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } + } +} + +New-Item -Itemtype Directory -Path "$MAVEN_HOME_PARENT" -Force | Out-Null + +# Download and Install Apache Maven +Write-Verbose "Couldn't find MAVEN_HOME, downloading and installing it ..." +Write-Verbose "Downloading from: $distributionUrl" +Write-Verbose "Downloading to: $TMP_DOWNLOAD_DIR/$distributionUrlName" + +$webclient = New-Object System.Net.WebClient +if ($env:MVNW_USERNAME -and $env:MVNW_PASSWORD) { + $webclient.Credentials = New-Object System.Net.NetworkCredential($env:MVNW_USERNAME, $env:MVNW_PASSWORD) +} +[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 +$webclient.DownloadFile($distributionUrl, "$TMP_DOWNLOAD_DIR/$distributionUrlName") | Out-Null + +# If specified, validate the SHA-256 sum of the Maven distribution zip file +$distributionSha256Sum = (Get-Content -Raw "$scriptDir/.mvn/wrapper/maven-wrapper.properties" | ConvertFrom-StringData).distributionSha256Sum +if ($distributionSha256Sum) { + if ($USE_MVND) { + Write-Error "Checksum validation is not supported for maven-mvnd. `nPlease disable validation by removing 'distributionSha256Sum' from your maven-wrapper.properties." + } + Import-Module $PSHOME\Modules\Microsoft.PowerShell.Utility -Function Get-FileHash + if ((Get-FileHash "$TMP_DOWNLOAD_DIR/$distributionUrlName" -Algorithm SHA256).Hash.ToLower() -ne $distributionSha256Sum) { + Write-Error "Error: Failed to validate Maven distribution SHA-256, your Maven distribution might be compromised. If you updated your Maven version, you need to update the specified distributionSha256Sum property." + } +} + +# unzip and move +Expand-Archive "$TMP_DOWNLOAD_DIR/$distributionUrlName" -DestinationPath "$TMP_DOWNLOAD_DIR" | Out-Null +Rename-Item -Path "$TMP_DOWNLOAD_DIR/$distributionUrlNameMain" -NewName $MAVEN_HOME_NAME | Out-Null +try { + Move-Item -Path "$TMP_DOWNLOAD_DIR/$MAVEN_HOME_NAME" -Destination $MAVEN_HOME_PARENT | Out-Null +} catch { + if (! (Test-Path -Path "$MAVEN_HOME" -PathType Container)) { + Write-Error "fail to move MAVEN_HOME" + } +} finally { + try { Remove-Item $TMP_DOWNLOAD_DIR -Recurse -Force | Out-Null } + catch { Write-Warning "Cannot remove $TMP_DOWNLOAD_DIR" } +} + +Write-Output "MVN_CMD=$MAVEN_HOME/bin/$MVN_CMD" diff --git a/jpo-s3-depositor/pom.xml b/jpo-s3-depositor/pom.xml new file mode 100644 index 0000000..d073072 --- /dev/null +++ b/jpo-s3-depositor/pom.xml @@ -0,0 +1,106 @@ + + + 4.0.0 + + org.springframework.boot + spring-boot-starter-parent + 3.2.8 + + + usdot.jpo.ode + jpo-s3-depositor + 1.5.0-SNAPSHOT + JPO S3 Depositor + Demo project for Spring Boot + + + + + + + + + + + + + + + 21 + 5.4.3 + 2023.0.3 + + + + com.google.cloud + spring-cloud-gcp-starter-storage + + + org.apache.kafka + kafka-streams + + + org.springframework.kafka + spring-kafka + + + + + org.apache.commons + commons-lang3 + 3.16.0 + + + + com.amazonaws + aws-java-sdk-s3 + 1.12.767 + + + + org.projectlombok + lombok + 1.18.30 + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.kafka + spring-kafka-test + test + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + com.google.cloud + spring-cloud-gcp-dependencies + ${spring-cloud-gcp.version} + pom + import + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/AlwaysContinueProductionExceptionHandler.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/AlwaysContinueProductionExceptionHandler.java new file mode 100644 index 0000000..9692f92 --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/AlwaysContinueProductionExceptionHandler.java @@ -0,0 +1,17 @@ +package us.dot.its.jpo.ode.depositor; + +import java.util.Map; + +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.streams.errors.ProductionExceptionHandler; + +public class AlwaysContinueProductionExceptionHandler implements ProductionExceptionHandler { + @Override + public void configure(Map configs) { + // Nothing to configure + } + @Override + public ProductionExceptionHandlerResponse handle(ProducerRecord record, Exception exception) { + return ProductionExceptionHandlerResponse.CONTINUE; + } +} \ No newline at end of file diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorApplication.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorApplication.java new file mode 100644 index 0000000..76e1873 --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorApplication.java @@ -0,0 +1,45 @@ +package us.dot.its.jpo.ode.depositor; + +import java.lang.management.ManagementFactory; +import javax.management.InstanceAlreadyExistsException; +import javax.management.MBeanRegistrationException; +import javax.management.MBeanServer; +import javax.management.MalformedObjectNameException; +import javax.management.NotCompliantMBeanException; +import javax.management.ObjectName; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; + +import us.dot.its.jpo.ode.depositor.DepositorProperties; + + +@SpringBootApplication +@EnableKafka +@EnableKafkaStreams +@EnableConfigurationProperties(DepositorProperties.class) +public class DepositorApplication { + private static final Logger logger = LoggerFactory.getLogger(DepositorApplication.class); + + static final int DEFAULT_NO_THREADS = 10; + static final String DEFAULT_SCHEMA = "default"; + + public static void main(String[] args) throws MalformedObjectNameException, InterruptedException, + InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { + + + SpringApplication.run(DepositorApplication.class, args); + MBeanServer mbs = ManagementFactory.getPlatformMBeanServer(); + SystemConfig mBean = new SystemConfig(DEFAULT_NO_THREADS, DEFAULT_SCHEMA); + ObjectName name = new ObjectName("us.dot.its.jpo.ode.depositor:type=SystemConfig"); + mbs.registerMBean(mBean, name); + } + + +} diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorProperties.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorProperties.java new file mode 100644 index 0000000..4fb9d70 --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorProperties.java @@ -0,0 +1,251 @@ +package us.dot.its.jpo.ode.depositor; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Properties; +import java.util.UUID; + +import jakarta.annotation.PostConstruct; + +import org.apache.commons.lang3.SystemUtils; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler; +import org.apache.kafka.streams.processor.LogAndSkipOnInvalidTimestamp; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import lombok.Getter; +import lombok.Setter; +import lombok.AccessLevel; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.info.BuildProperties; +import org.springframework.context.EnvironmentAware; +import org.springframework.core.env.Environment; + +import us.dot.its.jpo.ode.depositor.utils.CommonUtils; + + +@Getter +@Setter +@ConfigurationProperties +public class DepositorProperties implements EnvironmentAware { + private static final Logger logger = LoggerFactory.getLogger(DepositorProperties.class); + + @Autowired + @Setter(AccessLevel.NONE) + private Environment env; + + // Confluent Properties + private boolean confluentCloudEnabled = false; + private String confluentKey = null; + private String confluentSecret = null; + + + public Boolean getConfluentCloudStatus() { + return confluentCloudEnabled; + } + + + + /* + * General Properties + */ + private String version; + + @Setter(AccessLevel.NONE) + private String kafkaBrokers = null; + + private static final String DEFAULT_KAFKA_PORT = "9092"; + + @Setter(AccessLevel.NONE) + private String hostId; + + @Setter(AccessLevel.NONE) + private String kafkaBrokerIP = null; + + /* + * S3 Properties + */ + private String K_AWS_ACCESS_KEY_ID; + private String K_AWS_SECRET_ACCESS_KEY; + private String K_AWS_SESSION_TOKEN; + private String K_AWS_EXPIRATION; + private String API_ENDPOINT; + private String HEADER_Accept; + private String HEADER_X_API_KEY; + + private String AWS_ACCESS_KEY_ID; + private String AWS_SECRET_ACCESS_KEY; + private String AWS_SESSION_TOKEN; + private String AWS_EXPIRATION; + + private String S3_BUCKET_NAME; + private String S3_KEY_NAME; + private String AWS_REGION_NAME; + + + @Setter(AccessLevel.NONE) + @Autowired + BuildProperties buildProperties; + + @PostConstruct + void initialize() { + setVersion(buildProperties.getVersion()); + logger.info("groupId: {}", buildProperties.getGroup()); + logger.info("artifactId: {}", buildProperties.getArtifact()); + logger.info("version: {}", version); + + String hostname; + try { + hostname = InetAddress.getLocalHost().getHostName(); + } catch (UnknownHostException e) { + // Let's just use a random hostname + hostname = UUID.randomUUID().toString(); + logger.info("Unknown host error: {}, using random", e); + } + + hostId = hostname; + logger.info("Host ID: {}", hostId); + logger.info("Initializing services on host {}", hostId); + + + if (kafkaBrokers == null) { + + String kafkaBroker = CommonUtils.getEnvironmentVariable("KAFKA_BROKER_IP"); + + logger.info("ode.kafkaBrokers property not defined. Will try KAFKA_BROKER_IP => {}", kafkaBrokers); + + if (kafkaBroker == null) { + logger.warn( + "Neither ode.kafkaBrokers ode property nor KAFKA_BROKER_IP environment variable are defined. Defaulting to localhost."); + kafkaBroker = "localhost"; + } + + kafkaBrokers = kafkaBroker + ":" + DEFAULT_KAFKA_PORT; + } + + String kafkaType = CommonUtils.getEnvironmentVariable("KAFKA_TYPE"); + if (kafkaType != null) { + confluentCloudEnabled = kafkaType.equals("CONFLUENT"); + if (confluentCloudEnabled) { + + System.out.println("Enabling Confluent Cloud Integration"); + + confluentKey = CommonUtils.getEnvironmentVariable("CONFLUENT_KEY"); + confluentSecret = CommonUtils.getEnvironmentVariable("CONFLUENT_SECRET"); + } + } + + + // S3 Properties + S3_BUCKET_NAME = CommonUtils.getEnvironmentVariable("DEPOSIT_BUCKET_NAME", ""); + AWS_REGION_NAME = CommonUtils.getEnvironmentVariable("REGION", "us-east-1"); + S3_KEY_NAME = CommonUtils.getEnvironmentVariable("DEPOSIT_KEY_NAME", ""); + + K_AWS_ACCESS_KEY_ID = CommonUtils.getEnvironmentVariable("AWS_ACCESS_KEY_ID", "AccessKeyId"); + K_AWS_SECRET_ACCESS_KEY = CommonUtils.getEnvironmentVariable("AWS_SECRET_ACCESS_KEY", "SecretAccessKey"); + K_AWS_SESSION_TOKEN = CommonUtils.getEnvironmentVariable("AWS_SESSION_TOKEN", "SessionToken"); + K_AWS_EXPIRATION = CommonUtils.getEnvironmentVariable("AWS_EXPIRATION", "Expiration"); + API_ENDPOINT = CommonUtils.getEnvironmentVariable("API_ENDPOINT", ""); + HEADER_Accept = CommonUtils.getEnvironmentVariable("HEADER_ACCEPT", "application/json"); + HEADER_X_API_KEY = CommonUtils.getEnvironmentVariable("HEADER_X_API_KEY", ""); + + } + + public Properties createStreamProperties(String name) { + Properties streamProps = new Properties(); + streamProps.put(StreamsConfig.APPLICATION_ID_CONFIG, name); + + streamProps.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaBrokers); + + streamProps.put(StreamsConfig.DEFAULT_DESERIALIZATION_EXCEPTION_HANDLER_CLASS_CONFIG, + LogAndContinueExceptionHandler.class.getName()); + + streamProps.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, + LogAndSkipOnInvalidTimestamp.class.getName()); + + streamProps.put(StreamsConfig.DEFAULT_PRODUCTION_EXCEPTION_HANDLER_CLASS_CONFIG, + AlwaysContinueProductionExceptionHandler.class.getName()); + + streamProps.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 2); + + // streamProps.put(StreamsConfig.producerPrefix("acks"), "all"); + streamProps.put(StreamsConfig.producerPrefix(ProducerConfig.ACKS_CONFIG), "all"); + + // Reduce cache buffering per topology to 1MB + streamProps.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 1 * 1024 * 1024L); + + // Decrease default commit interval. Default for 'at least once' mode of 30000ms + // is too slow. + streamProps.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100); + + // All the keys are Strings in this app + streamProps.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); + + // Configure the state store location + if (SystemUtils.IS_OS_LINUX) { + streamProps.put(StreamsConfig.STATE_DIR_CONFIG, "/var/lib/ode/kafka-streams"); + } else if (SystemUtils.IS_OS_WINDOWS) { + streamProps.put(StreamsConfig.STATE_DIR_CONFIG, "C:/temp/ode"); + } + // streamProps.put(StreamsConfig.STATE_DIR_CONFIG, "/var/lib/")\ + + // Increase max.block.ms and delivery.timeout.ms for streams + final int FIVE_MINUTES_MS = 5 * 60 * 1000; + streamProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, FIVE_MINUTES_MS); + streamProps.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, FIVE_MINUTES_MS); + + // Disable batching + streamProps.put(ProducerConfig.BATCH_SIZE_CONFIG, 0); + + if (confluentCloudEnabled) { + streamProps.put("ssl.endpoint.identification.algorithm", "https"); + streamProps.put("security.protocol", "SASL_SSL"); + streamProps.put("sasl.mechanism", "PLAIN"); + + if (confluentKey != null && confluentSecret != null) { + String auth = "org.apache.kafka.common.security.plain.PlainLoginModule required " + + "username=\"" + confluentKey + "\" " + + "password=\"" + confluentSecret + "\";"; + streamProps.put("sasl.jaas.config", auth); + } + else { + logger.error("Environment variables CONFLUENT_KEY and CONFLUENT_SECRET are not set. Set these in the .env file to use Confluent Cloud"); + } + } + + + return streamProps; + } + + + + public String getProperty(String key) { + return env.getProperty(key); + } + + public String getProperty(String key, String defaultValue) { + return env.getProperty(key, defaultValue); + } + + public Object getProperty(String key, int i) { + return env.getProperty(key, Integer.class, i); + } + + + @Value("${spring.kafka.bootstrap-servers}") + public void setKafkaBrokers(String kafkaBrokers) { + this.kafkaBrokers = kafkaBrokers; + } + + @Override + public void setEnvironment(Environment environment) { + env = environment; + } +} diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorServiceController.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorServiceController.java new file mode 100644 index 0000000..95e3fe3 --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/DepositorServiceController.java @@ -0,0 +1,57 @@ +package us.dot.its.jpo.ode.depositor; + +import org.apache.kafka.streams.KafkaStreams; + +import java.util.concurrent.ConcurrentHashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + +import lombok.Getter; +import lombok.Setter; +import lombok.AccessLevel; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.DependsOn; +import org.springframework.context.annotation.Profile; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.stereotype.Controller; + +import us.dot.its.jpo.ode.depositor.DepositorProperties; +import us.dot.its.jpo.ode.depositor.topologies.StreamsTopology; + + +@Controller +@DependsOn("createKafkaTopics") +@Profile("!test && !testConfig") +public class DepositorServiceController { + private static final Logger logger = LoggerFactory.getLogger(DepositorServiceController.class); + org.apache.kafka.common.serialization.Serdes bas; + + // Temporary for KafkaStreams that don't implement the Algorithm interface + @Getter + final ConcurrentHashMap streamsMap = new ConcurrentHashMap(); + + @Getter + final ConcurrentHashMap algoMap = new ConcurrentHashMap(); + + + + @Autowired + public DepositorServiceController(final DepositorProperties depositorProperties, + final KafkaTemplate kafkaTemplate) { + try { + logger.info("Starting {}", this.getClass().getSimpleName()); + + + + + + logger.info("All services started!"); + } catch (Exception e) { + logger.error("Encountered issue with creating topologies", e); + } + + } +} diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/KafkaConfig.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/KafkaConfig.java new file mode 100644 index 0000000..a1fe9f7 --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/KafkaConfig.java @@ -0,0 +1,51 @@ +// package us.dot.its.jpo.ode.depositor; + +// import java.util.ArrayList; +// import java.util.HashMap; +// import java.util.List; +// import java.util.Map; +// import java.util.Properties; +// import java.util.Set; +// import java.util.concurrent.ExecutionException; + +// import org.apache.kafka.clients.admin.Admin; +// import org.apache.kafka.clients.admin.ListTopicsOptions; +// import org.apache.kafka.clients.admin.ListTopicsResult; +// import org.apache.kafka.clients.admin.NewTopic; +// import org.apache.kafka.common.KafkaFuture; +// import org.apache.kafka.common.serialization.StringSerializer; +// import org.slf4j.Logger; +// import org.slf4j.LoggerFactory; +// import org.springframework.beans.factory.annotation.Autowired; +// import org.springframework.boot.context.properties.ConfigurationProperties; +// import org.springframework.context.annotation.Bean; +// import org.springframework.kafka.config.TopicBuilder; +// import org.springframework.kafka.core.DefaultKafkaProducerFactory; +// import org.springframework.kafka.core.KafkaAdmin; +// import org.springframework.kafka.core.KafkaAdmin.NewTopics; +// import org.springframework.kafka.core.KafkaTemplate; +// import org.springframework.kafka.core.ProducerFactory; +// import org.springframework.stereotype.Component; +// import org.apache.kafka.clients.producer.ProducerConfig; + +// @Configuration +// public class KafkaConfig { + +// @Autowired +// private DepositorProperties properties; + +// @Value(value = "${spring.kafka.bootstrap-servers}") +// private String bootstrapAddress; + +// @Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME) +// KafkaStreamsConfiguration kStreamsConfig() { +// Map props = new HashMap<>(); +// props.put(APPLICATION_ID_CONFIG, "jpo-s3-depositor"); +// props.put(BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress); +// props.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); +// props.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName()); + +// return new KafkaStreamsConfiguration(props); +// } + +// } diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfig.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfig.java new file mode 100644 index 0000000..e64de9f --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfig.java @@ -0,0 +1,52 @@ +/******************************************************************************* + * Copyright 2018 572682 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy + * of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + ******************************************************************************/ +package us.dot.its.jpo.ode.depositor; + +public class SystemConfig implements SystemConfigMBean { + + private int threadCount; + private String schemaName; + + public SystemConfig(int numThreads, String schema) { + this.threadCount = numThreads; + this.schemaName = schema; + } + + @Override + public void setThreadCount(int noOfThreads) { + this.threadCount = noOfThreads; + } + + @Override + public int getThreadCount() { + return this.threadCount; + } + + @Override + public void setSchemaName(String schemaName) { + this.schemaName = schemaName; + } + + @Override + public String getSchemaName() { + return this.schemaName; + } + + @Override + public String doConfig() { + return "No of Threads=" + this.threadCount + " and DB Schema Name=" + this.schemaName; + } +} diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfigMBean.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfigMBean.java new file mode 100644 index 0000000..4f9a18d --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/SystemConfigMBean.java @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright 2018 572682 + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not + * use this file except in compliance with the License. You may obtain a copy + * of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations under + * the License. + ******************************************************************************/ +package us.dot.its.jpo.ode.depositor; + +public interface SystemConfigMBean { + + public void setThreadCount(int noOfThreads); + + public int getThreadCount(); + + public void setSchemaName(String schemaName); + + public String getSchemaName(); + + // any method starting with get and set are considered + // as attributes getter and setter methods, so I am + // using do* for operation. + public String doConfig(); +} diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/services/MqttService.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/services/MqttService.java new file mode 100644 index 0000000..b5f06df --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/services/MqttService.java @@ -0,0 +1,27 @@ +package us.dot.its.jpo.ode.depositor.services; + + + +@Service +public class MqttService { + + private final IMqttClient mqttClient; + + public MqttService() throws MqttException { + mqttClient = new MqttClient("tcp://broker.hivemq.com:1883", MqttClient.generateClientId()); + mqttClient.connect(); + } + + public void publish(String topic, String message) { + MqttMessage mqttMessage = new MqttMessage(message.getBytes()); + mqttMessage.setQos(2); + mqttClient.publish(topic, mqttMessage); + } + + @PreDestroy + public void cleanup() throws MqttException { + if (mqttClient.isConnected()) { + mqttClient.disconnect(); + } + } +} \ No newline at end of file diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/topologies/StreamsTopology.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/topologies/StreamsTopology.java new file mode 100644 index 0000000..4e27462 --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/topologies/StreamsTopology.java @@ -0,0 +1,46 @@ +package us.dot.its.jpo.ode.depositor.topologies; + +import java.util.Properties; + +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.KafkaStreams.StateListener; +import org.apache.kafka.streams.Topology; +import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; +import org.slf4j.Logger; + +/** + * General interface for an algorithm implemented as a Kafka Streams + * topology that takes streams parameters and exposes the underlying + * and streams objects to do queries on and get diagnostics from. + */ +public interface StreamsTopology { + + void setStreamsProperties(Properties streamsProperties); + Properties getStreamsProperties(); + + + KafkaStreams getStreams(); + + Topology getTopology(); + + Topology buildTopology(); + + /** + * Register a callback method that receives notifications when the KafkaStreams state changes + * @param stateListener + */ + void registerStateListener(StateListener stateListener); + + /** + * Register exception handler for exceptions thrown from streams processes. + * @param exceptionHandler + */ + void registerUncaughtExceptionHandler(StreamsUncaughtExceptionHandler exceptionHandler); + + + + + + + +} diff --git a/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/utils/CommonUtils.java b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/utils/CommonUtils.java new file mode 100644 index 0000000..12558fb --- /dev/null +++ b/jpo-s3-depositor/src/main/java/us/dot/its/jpo/ode/depositor/utils/CommonUtils.java @@ -0,0 +1,18 @@ +package us.dot.its.jpo.ode.depositor.utils; + +public class CommonUtils { + public static String getEnvironmentVariable(String variableName) { + String value = System.getenv(variableName); + return value; + } + + public static String getEnvironmentVariable(String variableName, String defaultValue) { + String value = System.getenv(variableName); + if (value == null || value.equals("")) { + System.out.println("Something went wrong retrieving the environment variable " + variableName); + System.out.println("Using default value: " + defaultValue); + return defaultValue; + } + return value; + } +} diff --git a/jpo-s3-depositor/src/main/resources/application.yaml b/jpo-s3-depositor/src/main/resources/application.yaml new file mode 100644 index 0000000..57b70d1 --- /dev/null +++ b/jpo-s3-depositor/src/main/resources/application.yaml @@ -0,0 +1,29 @@ +#General Properties +#================== +groupId: ^project.groupId^ +artifactId: ^project.artifactId^ +version: ^project.version^ +# server.port: 8082 + +# Kafka properties +spring.kafka.bootstrap-servers: ${KAFKA_BROKER_IP:localhost}:9092 + +logging.level.org.apache.kafka: INFO +logging.level: INFO +log4j.logger.kafka: OFF +log4j.logger.org.apache.kafka: OFF + + +# Custom kafka properties +kafka.topics: + autoCreateTopics: true # Override auto-create in test properties + numPartitions: 1 + numReplicas: 1 + sinkTopics: + - name: + + # createTopics: + # - name: topic.CmAppHealthNotifications + # cleanupPolicy: delete + # retentionMs: 300000 + diff --git a/jpo-s3-depositor/src/main/resources/logback.xml b/jpo-s3-depositor/src/main/resources/logback.xml new file mode 100644 index 0000000..91e7718 --- /dev/null +++ b/jpo-s3-depositor/src/main/resources/logback.xml @@ -0,0 +1,20 @@ + + + + + + + %date{"yyyy-MM-dd HH:mm:ss", UTC} [%thread] %-5level %logger{0} - %msg %n + + + + + + + + + + + + + diff --git a/jpo-s3-depositor/src/test/java/us/dot/its/jpo/ode/jpo_s3_depositor/JpoS3DepositorApplicationTests.java b/jpo-s3-depositor/src/test/java/us/dot/its/jpo/ode/jpo_s3_depositor/JpoS3DepositorApplicationTests.java new file mode 100644 index 0000000..6d8c905 --- /dev/null +++ b/jpo-s3-depositor/src/test/java/us/dot/its/jpo/ode/jpo_s3_depositor/JpoS3DepositorApplicationTests.java @@ -0,0 +1,13 @@ +package us.dot.its.jpo.ode.jpo_s3_depositor; + +import org.junit.jupiter.api.Test; +import org.springframework.boot.test.context.SpringBootTest; + +@SpringBootTest +class JpoS3DepositorApplicationTests { + + @Test + void contextLoads() { + } + +} diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 6b52fa1..0000000 --- a/pom.xml +++ /dev/null @@ -1,168 +0,0 @@ - - 4.0.0 - - usdot.jpo.ode - jpo-aws-depositor - 1.5.0-SNAPSHOT - jar - JPO AWS Depositor - - 21 - 21 - 1.49 - - usdot-jpo-ode - - -javaagent:${user.home}/.m2/repository/org/jmockit/jmockit/${jmockit.version}/jmockit-${jmockit.version}.jar - - 0.8.11 - - - - org.jmockit - jmockit - ${jmockit.version} - test - - - org.junit.jupiter - junit-jupiter-api - 5.9.3 - test - - - junit - junit - 4.13.2 - test - - - org.mockito - mockito-core - 3.3.3 - test - - - - - - org.apache.kafka - kafka_2.11 - 2.4.1 - - - - commons-cli - commons-cli - 1.3 - - - - org.apache.commons - commons-lang3 - 3.0 - - - - com.amazonaws - aws-java-sdk - 1.11.288 - - - - ch.qos.logback - logback-classic - 1.2.13 - - - javax.xml.bind - jaxb-api - 2.3.0 - - - - org.json - json - 20231013 - - - - org.slf4j - slf4j-api - 1.7.36 - compile - - - - org.slf4j - slf4j-simple - 1.7.36 - compile - - - - com.google.cloud - google-cloud-storage - 2.37.0 - - - - ${project.artifactId} - - - - org.apache.maven.plugins - maven-assembly-plugin - 3.5.0 - - - - us.dot.its.jpo.ode.aws.depositor.AwsDepositor - - - - jar-with-dependencies - - - - - org.apache.maven.plugins - maven-surefire-plugin - 3.2.5 - - -javaagent:${user.home}/.m2/repository/org/jmockit/jmockit/${jmockit.version}/jmockit-${jmockit.version}.jar -Xshare:off - - ${loader.path} - ${project.build.directory} - - - testValue - - testAccessKey - testSecretKey - testSessionToken - 2020-01-01 00:00:00 - testApiEndpoint - testConfluentKey - testConfluentSecret - - - - - org.apache.maven.plugins - maven-surefire-plugin - 3.2.5 - - - - - - - - github - GitHub Packages - https://maven.pkg.github.com/${github_organization}/jpo-s3-deposit - - - diff --git a/src/main/resources/log4j.properties b/src/main/resources/log4j.properties deleted file mode 100644 index c9dd86c..0000000 --- a/src/main/resources/log4j.properties +++ /dev/null @@ -1,10 +0,0 @@ -# Define the appender for AwsDepositor -log4j.appender.awsDepositor=org.apache.log4j.ConsoleAppender -log4j.appender.awsDepositor.layout=org.apache.log4j.PatternLayout -log4j.appender.awsDepositor.layout.ConversionPattern=%d [%t] %-5p %c{1} - %m%n - -# Set the log level for AwsDepositor -log4j.logger.us.dot.its.jpo.ode.aws.depositor.AwsDepositor=WARN -log4j.additivity.us.dot.its.jpo.ode.aws.depositor.AwsDepositor=false -log4j.appender.awsDepositor.threshold=DEBUG -log4j.appender.awsDepositor.Target=System.out diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml deleted file mode 100644 index 52059b2..0000000 --- a/src/main/resources/logback.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - %date{"yyyy-MM-dd HH:mm:ss.SSS", UTC} [%thread] %-5level %logger{0} - %msg %n - - - - - - - - diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/AddConfluentPropertiesTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/AddConfluentPropertiesTest.java deleted file mode 100644 index c3862b1..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/AddConfluentPropertiesTest.java +++ /dev/null @@ -1,19 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.util.Properties; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.jupiter.api.Test; - -public class AddConfluentPropertiesTest { - @Test - public void testAddConfluentProperties() { - Properties props = new Properties(); - AwsDepositor.addConfluentProperties(props); - - assertEquals("https", props.getProperty("ssl.endpoint.identification.algorithm")); - assertEquals("SASL_SSL", props.getProperty("security.protocol")); - assertEquals("PLAIN", props.getProperty("sasl.mechanism")); - assertEquals("org.apache.kafka.common.security.plain.PlainLoginModule required username=\"testConfluentKey\" password=\"testConfluentSecret\";" , props.getProperty("sasl.jaas.config")); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/BuildFirehoseClientTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/BuildFirehoseClientTest.java deleted file mode 100644 index 1683c59..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/BuildFirehoseClientTest.java +++ /dev/null @@ -1,18 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.jupiter.api.Test; - -import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehoseAsync; - -public class BuildFirehoseClientTest { - @Test - public void testBuildFirehoseClient() { - AwsDepositor awsDepositor = new AwsDepositor(); - String awsRegion = "us-east-1"; - - AmazonKinesisFirehoseAsync firehose = awsDepositor.buildFirehoseClient(awsRegion); - - assertNotNull(firehose); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/ConvertStringToByteBufferTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/ConvertStringToByteBufferTest.java deleted file mode 100644 index f3ea403..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/ConvertStringToByteBufferTest.java +++ /dev/null @@ -1,20 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.jupiter.api.Test; - -public class ConvertStringToByteBufferTest { - @Test - public void testConvertStringToByteBuffer() { - AwsDepositor awsDepositor = new AwsDepositor(); - String input = "Test"; - ByteBuffer expected = ByteBuffer.wrap(input.getBytes(StandardCharsets.UTF_8)); - - ByteBuffer result = awsDepositor.convertStringToByteBuffer(input, StandardCharsets.UTF_8); - - assertEquals(expected, result); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateS3ClientTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateS3ClientTest.java deleted file mode 100644 index 79bb555..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateS3ClientTest.java +++ /dev/null @@ -1,25 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.jupiter.api.Test; - -import com.amazonaws.services.s3.AmazonS3; - -public class CreateS3ClientTest { - - @Test - public void testCreateS3Client() { - AwsDepositor awsDepositor = new AwsDepositor(); - AmazonS3 s3Client = awsDepositor.createS3Client("us-east-1"); - assertNotNull(s3Client); - } - - @Test - public void testCreateS3Client_InvalidCredentials() { - AwsDepositor awsDepositor = new AwsDepositor(); - assertThrows(IllegalArgumentException.class, () -> { - awsDepositor.createS3Client("invalid-region"); - }); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateSampleFileTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateSampleFileTest.java deleted file mode 100644 index 6570fd4..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/CreateSampleFileTest.java +++ /dev/null @@ -1,23 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.io.File; -import java.io.IOException; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.jupiter.api.Test; - -public class CreateSampleFileTest { - @Test - public void testCreateSampleFile() throws IOException { - AwsDepositor awsDepositor = new AwsDepositor(); - String json = "{\"key\": \"value\"}"; - File file = awsDepositor.createSampleFile(json); - assertNotNull(file); - assertTrue(file.exists()); - assertTrue(file.isFile()); - assertEquals(".json", file.getName().substring(file.getName().lastIndexOf("."))); - file.delete(); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToFirehoseTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToFirehoseTest.java deleted file mode 100644 index 5fd06e8..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToFirehoseTest.java +++ /dev/null @@ -1,75 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.charset.Charset; -import java.time.LocalDateTime; -import java.util.concurrent.ExecutionException; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.json.JSONObject; -import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.amazonaws.services.kinesisfirehose.AmazonKinesisFirehoseAsync; -import com.amazonaws.services.kinesisfirehose.model.PutRecordRequest; - -public class DepositToFirehoseTest { - - @Test - public void testDepositToFirehose() throws InterruptedException, ExecutionException, IOException { - - // Create a mock AmazonKinesisFirehoseAsync instance - AmazonKinesisFirehoseAsync firehose = mock(AmazonKinesisFirehoseAsync.class); - - // Create a mock ConsumerRecord - ConsumerRecord mockRecord = mock(ConsumerRecord.class); - when(mockRecord.value()).thenReturn("Test Record"); - - AwsDepositor depositor = spy(new AwsDepositor()); - doReturn(LocalDateTime.of(2024, 6, 26, 12, 0, 0)).when(depositor).getLocalDateTime(); - doReturn(LocalDateTime.of(2024, 6, 26, 10, 0, 0)).when(depositor).getExpirationDateTime(); - - JSONObject generateAwsReturnVal = new JSONObject(); - generateAwsReturnVal.put("testAccessKey", "test-access-key-id"); - generateAwsReturnVal.put("testSecretKey", "test-secret-key"); - generateAwsReturnVal.put("testSessionToken", "test-token"); - generateAwsReturnVal.put("2020-01-01 00:00:00", "test-expiration"); - - doReturn(generateAwsReturnVal).when(depositor).generateAWSProfile(); - - // pull in necessary environment variables - depositor.depositorSetup(); - - // Call the depositToFirehose method - depositor.depositToFirehose(firehose, mockRecord); - - // Verify that the putRecordAsync method was called on the mock AmazonKinesisFirehoseAsync instance - ArgumentCaptor putRecordRequestCaptor = ArgumentCaptor.forClass(PutRecordRequest.class); - verify(firehose).putRecordAsync(putRecordRequestCaptor.capture()); - - // Assert PutRecordRequest value is as expected - PutRecordRequest putRecordRequestResult = putRecordRequestCaptor.getValue(); - assertEquals("Test Record\n", convertByteBufferToString(putRecordRequestResult.getRecord().getData())); - } - - @Test - public void testGetExpirationDateTime() { - AwsDepositor depositor = new AwsDepositor(); - depositor.depositorSetup(); - LocalDateTime result = depositor.getExpirationDateTime(); - assertEquals(LocalDateTime.of(2020, 01, 01, 0, 0, 0), result); - } - - private String convertByteBufferToString(ByteBuffer buffer) { - byte[] bytes = new byte[buffer.remaining()]; - buffer.get(bytes); - return new String(bytes, Charset.defaultCharset()); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToGCSTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToGCSTest.java deleted file mode 100644 index b0cbdef..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToGCSTest.java +++ /dev/null @@ -1,35 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.junit.jupiter.api.Test; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.google.cloud.storage.Blob; -import com.google.cloud.storage.Bucket; -import com.google.cloud.storage.Storage; - -public class DepositToGCSTest { - @Test - public void testDepositToGCS() { - Storage gcsStorage = mock(Storage.class); - Bucket bucket = mock(Bucket.class); - Blob blob = mock(Blob.class); - - ConsumerRecord record = mock(ConsumerRecord.class); - when(record.value()).thenReturn("test"); - - when(gcsStorage.get(anyString())).thenReturn(bucket); - when(bucket.create(anyString(), any(byte[].class))).thenReturn(blob); - - AwsDepositor awsDepositor = new AwsDepositor(); - - awsDepositor.depositToGCS(gcsStorage, "depositBucket", record); - - verify(gcsStorage).get("depositBucket"); - verify(bucket).create(anyString(), any(byte[].class)); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToS3Test.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToS3Test.java deleted file mode 100644 index 6da5f0a..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/DepositToS3Test.java +++ /dev/null @@ -1,41 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.io.IOException; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import static org.junit.Assert.assertNotNull; -import org.junit.jupiter.api.Test; -import org.mockito.ArgumentCaptor; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.PutObjectResult; - - -public class DepositToS3Test { - @Test - public void testDepositToS3() throws IOException { - // Mock necessary classes - AmazonS3 s3 = mock(AmazonS3.class); - ConsumerRecord mockRecord = mock(ConsumerRecord.class); - - PutObjectResult result = new PutObjectResult(); - when(mockRecord.value()).thenReturn("Test Record"); - when(s3.putObject(any())).thenReturn(result); - - AwsDepositor awsDepositor = new AwsDepositor(); - awsDepositor.depositToS3(s3, mockRecord); - - // Verify that the putObject method was called on the mock AmazonS3 instance - ArgumentCaptor putObjectRequestCaptor = ArgumentCaptor.forClass(PutObjectRequest.class); - verify(s3).putObject(putObjectRequestCaptor.capture()); - - // Assert that the putObjectRequest was created correctly - PutObjectRequest putObjectRequestResult = putObjectRequestCaptor.getValue(); - assertNotNull(putObjectRequestResult); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/GenerateAWSProfileTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/GenerateAWSProfileTest.java deleted file mode 100644 index 21225a3..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/GenerateAWSProfileTest.java +++ /dev/null @@ -1,71 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.io.IOException; - -import org.apache.http.HttpVersion; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.message.BasicStatusLine; -import org.json.JSONObject; -import static org.junit.Assert.assertThrows; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.jupiter.api.Test; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class GenerateAWSProfileTest { - @Test - void testGenerateAWSProfileSuccess() throws Exception { - - AwsDepositor depositor = spy(new AwsDepositor()); - - // Mock the CloseableHttpResponse - CloseableHttpResponse mockResponse = mock(CloseableHttpResponse.class); - when(mockResponse.getStatusLine()).thenReturn(new BasicStatusLine(HttpVersion.HTTP_1_1, 200, "OK")); - when(mockResponse.getEntity()).thenReturn(new StringEntity("{\"key\":\"value\"}")); - - // Mock the CloseableHttpClient - CloseableHttpClient mockClient = mock(CloseableHttpClient.class); - when(mockClient.execute(any())).thenReturn(mockResponse); - - doReturn(mockClient).when(depositor).getHttpClient(); - - depositor.depositorSetup(); - JSONObject result = depositor.generateAWSProfile(); - - assertNotNull(result); - assertEquals("value", result.getString("key")); - - verify(mockClient, times(1)).execute((HttpPost) any()); - verify(mockResponse, times(1)).close(); - verify(mockClient, times(1)).close(); - } - - @Test - void testGenerateAWSProfileException() throws IOException { - AwsDepositor depositor = spy(new AwsDepositor()); - - // Mock the CloseableHttpResponse - CloseableHttpResponse mockResponse = mock(CloseableHttpResponse.class); - when(mockResponse.getStatusLine()).thenReturn(new BasicStatusLine(HttpVersion.HTTP_1_1, 200, "OK")); - when(mockResponse.getEntity()).thenReturn(null); - - // Mock the CloseableHttpClient - CloseableHttpClient mockClient = mock(CloseableHttpClient.class); - when(mockClient.execute(any())).thenReturn(mockResponse); - - doReturn(mockClient).when(depositor).getHttpClient(); - Exception exception = assertThrows(Exception.class, depositor::generateAWSProfile); - - // Verify the exception - assertNotNull(exception); - } -} diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/GetEnvironmentVariableTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/GetEnvironmentVariableTest.java deleted file mode 100644 index ddc1c83..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/GetEnvironmentVariableTest.java +++ /dev/null @@ -1,31 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.jupiter.api.Test; - -public class GetEnvironmentVariableTest { - private final String TEST_VARIABLE = "TEST_VARIABLE"; - private final String TEST_VARIABLE_NO_ENV = "TEST_VARIABLE_NO_ENV"; - private final String TEST_VARIABLE_EMPTY = "TEST_VARIABLE_EMPTY"; - private final String DEFAULT_VALUE = "default"; - - @Test - void testGetEnvironmentVariableExists() throws Exception { - String expectedValue = "testValue"; - - // Test when the environment variable is set - String result = AwsDepositor.getEnvironmentVariable(TEST_VARIABLE, ""); - assertEquals(expectedValue, result); - } - - @Test - void testGetEnvironmentVariableNotSetOrEmpty() { - // Test when the environment variable is not set - String notSetResult = AwsDepositor.getEnvironmentVariable(TEST_VARIABLE_NO_ENV, DEFAULT_VALUE); - assertEquals(DEFAULT_VALUE, notSetResult); - - // Test when the environment variable is empty - String emptyResult = AwsDepositor.getEnvironmentVariable(TEST_VARIABLE_EMPTY, DEFAULT_VALUE); - assertEquals(DEFAULT_VALUE, emptyResult); - } -} \ No newline at end of file diff --git a/src/test/java/us/dot/its/jpo/ode/aws/depositor/RunTest.java b/src/test/java/us/dot/its/jpo/ode/aws/depositor/RunTest.java deleted file mode 100644 index 6bbec89..0000000 --- a/src/test/java/us/dot/its/jpo/ode/aws/depositor/RunTest.java +++ /dev/null @@ -1,86 +0,0 @@ -package us.dot.its.jpo.ode.aws.depositor; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.TopicPartition; -import org.json.JSONObject; -import org.junit.jupiter.api.Test; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doNothing; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -public class RunTest { - @Test - public void testRunNoRecords() throws Exception { - AwsDepositor depositor = spy(new AwsDepositor()); - - KafkaConsumer mockConsumer = mock(KafkaConsumer.class); - when(mockConsumer.poll(any())).thenReturn(null); - - doReturn(mockConsumer).when(depositor).getKafkaConsumer(any()); - doReturn(true, true, false).when(depositor).getRunDepositor(); - - JSONObject generateAwsReturnVal = new JSONObject(); - generateAwsReturnVal.put("testAccessKey", "test-access-key-id"); - generateAwsReturnVal.put("testSecretKey", "test-secret-key"); - generateAwsReturnVal.put("testSessionToken", "test-token"); - generateAwsReturnVal.put("2020-01-01 00:00:00", "test-expiration"); - - doReturn(generateAwsReturnVal).when(depositor).generateAWSProfile(); - - depositor.run(); - - verify(depositor, times(1)).getKafkaConsumer(any()); - verify(depositor, times(4)).getRunDepositor(); - } - - @Test - public void testRunRecords() throws Exception { - AwsDepositor depositor = spy(new AwsDepositor()); - - KafkaConsumer mockConsumer = mock(KafkaConsumer.class); - when(mockConsumer.poll(any())).thenReturn(null); - - doReturn(mockConsumer).when(depositor).getKafkaConsumer(any()); - doReturn(true, true, false).when(depositor).getRunDepositor(); - - JSONObject generateAwsReturnVal = new JSONObject(); - generateAwsReturnVal.put("testAccessKey", "test-access-key-id"); - generateAwsReturnVal.put("testSecretKey", "test-secret-key"); - generateAwsReturnVal.put("testSessionToken", "test-token"); - generateAwsReturnVal.put("2020-01-01 00:00:00", "test-expiration"); - - doReturn(generateAwsReturnVal).when(depositor).generateAWSProfile(); - - doNothing().when(depositor).depositToFirehose(any(), any()); - - List> records = new ArrayList<>(); - records.add(new ConsumerRecord<>("topic", 0, 0, "test", "test-value")); - - TopicPartition topicPartition = new TopicPartition("topic", 0); - - Map>> recordsMap = new HashMap<>(); - recordsMap.put(topicPartition, records); - - ConsumerRecords mockRecords = new ConsumerRecords<>(recordsMap); - - when(mockConsumer.poll(any())).thenReturn(mockRecords); - - depositor.run(); - - verify(depositor, times(1)).getKafkaConsumer(any()); - verify(depositor, times(4)).getRunDepositor(); - verify(depositor, times(1)).depositToFirehose(any(), any()); - } -}