From 984dd8e8db2839d7e1f5729d019d8d486d1cc221 Mon Sep 17 00:00:00 2001 From: Robert Gildein Date: Mon, 14 Oct 2024 09:54:53 +0200 Subject: [PATCH] Add integration tests for log forwarding Signed-off-by: Robert Gildein --- tests/integration/integration-tests.sh | 65 ++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/tests/integration/integration-tests.sh b/tests/integration/integration-tests.sh index 0ef55517..3b045457 100755 --- a/tests/integration/integration-tests.sh +++ b/tests/integration/integration-tests.sh @@ -53,6 +53,16 @@ validate_metrics() { fi } +validate_logs() { + log=$1 + if [ $(grep -Ri "Configuring log-forwarding to Loki." $log | wc -l) -lt 2 ]; then + exit 1 + fi + if [ $(grep -Ri 'Layer \\\\"logging\\\\" added successfully from \\\\"/tmp/rendered_log_layer.yaml\\\\"' $log | wc -l) -lt 2 ]; then + exit 1 + fi +} + setup_user() { echo "setup_user() ${1} ${2}" @@ -422,6 +432,55 @@ run_example_job_in_pod_with_metrics() { } +run_example_job_in_pod_with_log_forwarding() { + NAMESPACE=${1-$NAMESPACE} + USERNAME=${2-spark} + SPARK_EXAMPLES_JAR_NAME="spark-examples_2.12-$(get_spark_version).jar" + + PREVIOUS_JOB=$(kubectl -n $NAMESPACE get pods --sort-by=.metadata.creationTimestamp | grep driver | tail -n 1 | cut -d' ' -f1) + # start simple http server + LOG_FILE="/tmp/server.log" + SERVER_PORT=9091 + python3 tests/integration/resources/test_web_server.py $SERVER_PORT > $LOG_FILE & + HTTP_SERVER_PID=$! + # get ip address + IP_ADDRESS=$(hostname -I | cut -d ' ' -f 1) + echo "IP: $IP_ADDRESS" + + kubectl -n $NAMESPACE exec testpod -- env PORT="$SERVER_PORT" IP="$IP_ADDRESS" UU="$USERNAME" NN="$NAMESPACE" JJ="$SPARK_EXAMPLES_JAR_NAME" IM="$(spark_image)" \ + /bin/bash -c 'spark-client.spark-submit \ + --username $UU --namespace $NN \ + --conf spark.kubernetes.driver.request.cores=100m \ + --conf spark.kubernetes.executor.request.cores=100m \ + --conf spark.kubernetes.container.image=$IM \ + --conf spark.executorEnv.LOKI_URL="$IP:$PORT" \ + --class org.apache.spark.examples.SparkPi \ + local:///opt/spark/examples/jars/$JJ 1000' + + # kubectl --kubeconfig=${KUBE_CONFIG} get pods + DRIVER_PODS=$(kubectl get pods --sort-by=.metadata.creationTimestamp -n ${NAMESPACE} | grep driver ) + DRIVER_JOB=$(kubectl get pods --sort-by=.metadata.creationTimestamp -n ${NAMESPACE} | grep driver | tail -n 1 | cut -d' ' -f1) + + if [[ "${DRIVER_JOB}" == "${PREVIOUS_JOB}" ]] + then + echo "ERROR: Sample job has not run!" + exit 1 + fi + + # Check job output + # Sample output + # "Pi is roughly 3.13956232343" + pi=$(kubectl logs $(kubectl get pods --sort-by=.metadata.creationTimestamp -n ${NAMESPACE} | grep driver | tail -n 1 | cut -d' ' -f1) -n ${NAMESPACE} | grep 'Pi is roughly' | rev | cut -d' ' -f1 | rev | cut -c 1-3) + echo -e "Spark Pi Job Output: \n ${pi}" + + validate_pi_value $pi + validate_logs $LOG_FILE + + # kill http server + kill $HTTP_SERVER_PID +} + + run_example_job_with_error_in_pod() { SPARK_EXAMPLES_JAR_NAME="spark-examples_2.12-$(get_spark_version).jar" @@ -657,6 +716,12 @@ echo -e "########################################" (setup_user_context && test_example_job_in_pod_with_metrics && cleanup_user_success) || cleanup_user_failure_in_pod +echo -e "########################################" +echo -e "RUN EXAMPLE JOB WITH LOG FORWARDING" +echo -e "########################################" + +(setup_user_context && run_example_job_in_pod_with_log_forwarding && cleanup_user_success) || cleanup_user_failure_in_pod + echo -e "########################################" echo -e "RUN EXAMPLE JOB WITH ERRORS" echo -e "########################################"