Skip to content

Commit

Permalink
Merge pull request #227 from xtuml/226_start_stop_start
Browse files Browse the repository at this point in the history
job:  #226  start/stop/start config
  • Loading branch information
cortlandstarrett authored Jun 19, 2024
2 parents 01eebda + 978d2cf commit 9d927fa
Show file tree
Hide file tree
Showing 6 changed files with 183 additions and 11 deletions.
2 changes: 1 addition & 1 deletion bin/.env
Original file line number Diff line number Diff line change
@@ -1 +1 @@
MASL_VERSION=4.4.4-kafka-polling
MASL_VERSION=4.4.5
2 changes: 1 addition & 1 deletion deploy/.env
Original file line number Diff line number Diff line change
@@ -1 +1 @@
MASL_VERSION=4.4.4-kafka-polling
MASL_VERSION=4.4.5
6 changes: 3 additions & 3 deletions deploy/docker-compose.kafka.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ services:
logging:
driver: local
working_dir: /root
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/JM_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -util MetaData"
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/JM_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -kafka-group-id PVKafkaGroup -util MetaData"

istore_proc:
image: levistarrett/masl-dev:${MASL_VERSION:-latest}
Expand All @@ -44,7 +44,7 @@ services:
logging:
driver: local
working_dir: /root
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/ISTORE_PROC_sqlite -db ./InvariantStore/InvariantStore.db -log-config config/log-pv-kafka.properties -util Kafka -kafka-broker-list kafka:9093 -util MetaData"
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/ISTORE_PROC_sqlite -db ./InvariantStore/InvariantStore.db -log-config config/log-pv-kafka.properties -util Kafka -kafka-broker-list kafka:9093 -kafka-offset-reset latest -util MetaData"

aeo_svdc_proc:
image: levistarrett/masl-dev:${MASL_VERSION:-latest}
Expand All @@ -65,7 +65,7 @@ services:
logging:
driver: local
working_dir: /root
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/AEO_SVDC_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -util MetaData"
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/AEO_SVDC_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -kafka-offset-reset latest -util MetaData"

zookeeper:
image: levistarrett/zookeeper:3.4.6
Expand Down
6 changes: 3 additions & 3 deletions deploy/docker-compose.onlypv.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ services:
logging:
driver: local
working_dir: /root
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/JM_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -util MetaData"
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/JM_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -kafka-group-id PVKafkaGroup -util MetaData"

istore_proc:
image: levistarrett/masl-dev:${MASL_VERSION:-latest}
Expand All @@ -38,7 +38,7 @@ services:
logging:
driver: local
working_dir: /root
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/ISTORE_PROC_sqlite -db ./InvariantStore/InvariantStore.db -log-config config/log-pv-kafka.properties -util Kafka -kafka-broker-list kafka:9093 -util MetaData"
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/ISTORE_PROC_sqlite -db ./InvariantStore/InvariantStore.db -log-config config/log-pv-kafka.properties -util Kafka -kafka-broker-list kafka:9093 -kafka-offset-reset latest -util MetaData"

aeo_svdc_proc:
image: levistarrett/masl-dev:${MASL_VERSION:-latest}
Expand All @@ -56,5 +56,5 @@ services:
logging:
driver: local
working_dir: /root
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/AEO_SVDC_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -util MetaData"
command: bash -c "source /work/build/Release/generators/conanrun.sh && export LD_LIBRARY_PATH=$${LD_LIBRARY_PATH}:/work/build/Release/lib && /work/build/Release/bin/AEO_SVDC_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -kafka-offset-reset latest -util MetaData"

6 changes: 3 additions & 3 deletions deploy/docker-compose.prod.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ services:
logging:
driver: local
working_dir: /root
command: JM_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -util MetaData
command: JM_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -kafka-group-id PVKafkaGroup -util MetaData

istore_proc:
image: ghcr.io/xtuml/protocol_verifier:latest
Expand All @@ -31,7 +31,7 @@ services:
logging:
driver: local
working_dir: /root
command: ISTORE_PROC_sqlite -db ./InvariantStore/InvariantStore.db -log-config config/log-pv-kafka.properties -util Kafka -kafka-broker-list kafka:9093 -util MetaData
command: ISTORE_PROC_sqlite -db ./InvariantStore/InvariantStore.db -log-config config/log-pv-kafka.properties -util Kafka -kafka-broker-list kafka:9093 -kafka-offset-reset latest -util MetaData

aeo_svdc_proc:
image: ghcr.io/xtuml/protocol_verifier:latest
Expand All @@ -48,7 +48,7 @@ services:
logging:
driver: local
working_dir: /root
command: AEO_SVDC_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -util MetaData
command: AEO_SVDC_PROC_transient -log-config config/log-pv-kafka.properties -configFile ${CONFIG_FILE:-pv-config.json} -util Kafka -kafka-broker-list kafka:9093 -kafka-offset-reset latest -util MetaData

zookeeper:
image: levistarrett/zookeeper:3.4.6
Expand Down
172 changes: 172 additions & 0 deletions metrics/test_group_id.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
#!/bin/bash
set -e

# Usage:
# run_benchmark.sh [rate (events/second)] [total number of events] [prepopulated events] [reception topic]
# Executution defaults to: run_benchmark.sh 1000 100000 0 Protocol_Verifier_Reception

export CONFIG_FILE=benchmarking-config.json

P2J="python ../bin/plus2json.pyz"
# Define batches of events for p2j to play.
BATCH_OF_EVENTS=10000
EVENTS_PER_SECOND=1000
TOTAL_EVENTS=100000
if [[ $# -ge 2 ]] ; then
EVENTS_PER_SECOND=$1
TOTAL_EVENTS=$2
if [[ $BATCH_OF_EVENTS -gt $TOTAL_EVENTS ]] ; then
BATCH_OF_EVENTS=$TOTAL_EVENTS
fi
fi
ITERATIONS=$(($TOTAL_EVENTS / $BATCH_OF_EVENTS))

# Define prepopulation quantity.
PREPOPULATION_QUANTITY=0
if [[ $# -ge 3 ]] ; then
PREPOPULATION_QUANTITY=$3
fi

# Allow over-riding the kafka topic for reception.
RECEPTION_TOPIC="Protocol_Verifier_Reception"
if [[ $# -ge 4 ]] ; then
RECEPTION_TOPIC=$4
fi

# prepare the deploy folder
echo "Preparing deploy location..."
cd ../deploy
git clean -dxf .
echo "Done."

# get list of puml files (stripping DOS CR)
puml_files=$(cat ../metrics/benchmark_job_definitions.txt | sed "s/\r$//")
puml_file_for_injection="../tests/PumlForTesting/PumlRegression/ComplexNoEventDataJob.puml"
puml_file_for_alarm="../tests/PumlForTesting/PumlRegression/ACritical1.puml"
puml_file_for_prepopulation="../tests/PumlForTesting/PumlRegression/SimpleSequenceJob.puml"

# generate job definitions
echo "Generating job definitions..."
echo ${puml_files} | xargs $P2J --job -o config/job_definitions
echo "../tests/PumlForTesting/PumlRegression/AAExtraJobInvariantSourceJob.puml" | xargs $P2J --job -o config/job_definitions
echo "Done."


# launch the broker
echo "Launching the message broker..."
docker compose -f docker-compose.onlykafka.yml up -d --wait
echo "Done."

if [[ $PREPOPULATION_QUANTITY -gt 0 ]] ; then
echo "Prepopulating broker with" $PREPOPULATION_QUANTITY "events..."
echo ${puml_file_for_prepopulation} | xargs $P2J --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --shuffle --rate $EVENTS_PER_SECOND --num-events $PREPOPULATION_QUANTITY
echo "Done."
fi

# launch the application
echo "Launching the application..."
docker compose -f docker-compose.onlypv.yml up -d --wait
echo "Done."

# generate source job
echo "Generating invariant source runtime event stream..."
# little delay to assure everything is initialized
sleep 5
echo "../tests/PumlForTesting/PumlRegression/AAExtraJobInvariantSourceJob.puml" | xargs $P2J --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC
echo "Done."

# generate test event data
echo "Generating audit event stream..."
sleep 1
echo start `date` $TOTAL_EVENTS "at" $EVENTS_PER_SECOND "on" `hostname` >> runtime.txt
start_seconds=`date +%s`
# plus2json leaks memory when running continously.
# Loop it up to free memory after small batches of events.
echo "0 of " $TOTAL_EVENTS
LOOP_COUNT=0
for ((i = 0; i < $ITERATIONS; i++)); do
echo ${puml_files} | xargs $P2J --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --shuffle --rate $EVENTS_PER_SECOND --num-events $BATCH_OF_EVENTS
if [[ $# -lt 3 ]] ; then
# Inject an error to fail one job.
echo "Inject error to fail a job."
$P2J ${puml_file_for_injection} --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --omit C
echo "Inject error to alarm a job."
$P2J ${puml_file_for_alarm} --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --sibling CSJC
fi
LOOP_COUNT=$(($LOOP_COUNT + 1))
echo $(($LOOP_COUNT * $BATCH_OF_EVENTS)) " of " $TOTAL_EVENTS
done
stop_seconds=`date +%s`
echo "stop " `date` >> runtime.txt
runtime=$(($stop_seconds - $start_seconds))
events_per_second=$(($TOTAL_EVENTS / $runtime))
echo $runtime "seconds at rate:" $events_per_second >> runtime.txt
echo "Done."
if [[ $# -ge 3 ]] ; then
echo "Press ENTER to continue..."
read a
else
sleep 20
fi

# bring down application
echo "Bringing down the application... (ctrl-c to leave it running)"
sleep 10
docker compose -f docker-compose.onlypv.yml down
echo "Done."

if [[ $PREPOPULATION_QUANTITY -gt 0 ]] ; then
echo "Prepopulating broker with" $PREPOPULATION_QUANTITY "events..."
echo ${puml_file_for_prepopulation} | xargs $P2J --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --shuffle --rate $EVENTS_PER_SECOND --num-events $PREPOPULATION_QUANTITY
echo "Done."
fi

# launch the application
echo "Launching the application (again)..."
docker compose -f docker-compose.onlypv.yml up -d --wait
echo "Done."

# generate test event data
echo "Generating audit event stream..."
sleep 1
echo start `date` $TOTAL_EVENTS "at" $EVENTS_PER_SECOND "on" `hostname` >> runtime.txt
start_seconds=`date +%s`
# plus2json leaks memory when running continously.
# Loop it up to free memory after small batches of events.
echo "0 of " $TOTAL_EVENTS
LOOP_COUNT=0
for ((i = 0; i < $ITERATIONS; i++)); do
echo ${puml_files} | xargs $P2J --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --shuffle --rate $EVENTS_PER_SECOND --num-events $BATCH_OF_EVENTS
if [[ $# -lt 3 ]] ; then
# Inject an error to fail one job.
echo "Inject error to fail a job."
$P2J ${puml_file_for_injection} --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --omit C
echo "Inject error to alarm a job."
$P2J ${puml_file_for_alarm} --play --msgbroker localhost:9092 --topic $RECEPTION_TOPIC --sibling CSJC
fi
LOOP_COUNT=$(($LOOP_COUNT + 1))
echo $(($LOOP_COUNT * $BATCH_OF_EVENTS)) " of " $TOTAL_EVENTS
done
stop_seconds=`date +%s`
echo "stop " `date` >> runtime.txt
runtime=$(($stop_seconds - $start_seconds))
events_per_second=$(($TOTAL_EVENTS / $runtime))
echo $runtime "seconds at rate:" $events_per_second >> runtime.txt
echo "Done."
if [[ $# -ge 3 ]] ; then
echo "Press ENTER to continue..."
read a
fi

# bring down application
echo "Bringing down the application... (ctrl-c to leave it running)"
docker compose -f docker-compose.onlypv.yml down
echo "Done."

# bring down kafka
echo "Bringing down kafka... (ctrl-c to leave it running)"
docker compose -f docker-compose.onlykafka.yml down
echo "Done."

exit_code=0
exit ${exit_code}

0 comments on commit 9d927fa

Please sign in to comment.