Skip to content

Commit

Permalink
Use latest Bitnami Kafka image
Browse files Browse the repository at this point in the history
  • Loading branch information
slominskir committed Aug 2, 2023
1 parent cedc74b commit b86f464
Show file tree
Hide file tree
Showing 11 changed files with 79 additions and 55 deletions.
13 changes: 8 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
ARG BUILD_IMAGE=gradle:7.4-jdk17-alpine
ARG RUN_IMAGE=slominskir/epics2kafka-base:1.0.0
ARG RUN_IMAGE=bitnami/kafka:3.5.0
ARG CUSTOM_CRT_URL=http://pki.jlab.org/JLabCA.crt

################## Stage 0
FROM ${BUILD_IMAGE} as builder
Expand All @@ -18,13 +19,15 @@ RUN cd /app && gradle build -x test --no-watch-fs $OPTIONAL_CERT_ARG
################## Stage 1
FROM ${RUN_IMAGE} as runner
ARG CUSTOM_CRT_URL
ARG RUN_USER=kafka
ARG RUN_USER=1001
USER root
ENV PATH="/kafka/bin:${PATH}"
ENV KAFKA_HOME="/opt/bitnami/kafka"
ENV KAFKA_CONNECT_PLUGINS_DIR="/plugins"
ENV PATH="$KAfKA_HOME/bin:${PATH}"
COPY --from=builder /app/build/install $KAFKA_CONNECT_PLUGINS_DIR
COPY --from=builder /app/scripts /scripts
COPY --from=builder /app/examples/logging/log4j.properties /kafka/config
COPY --from=builder /app/examples/logging/logging.properties /kafka/config
COPY --from=builder /app/examples/logging/log4j.properties $KAFKA_HOME/config
COPY --from=builder /app/examples/logging/logging.properties $KAFKA_HOME/config
RUN chown -R ${RUN_USER}:0 /scripts \
&& chmod -R g+rw /scripts
USER ${RUN_USER}
Expand Down
5 changes: 0 additions & 5 deletions bind.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,6 @@ services:
file: build.yml
service: softioc

zookeeper:
extends:
file: build.yml
service: zookeeper

kafka:
extends:
file: build.yml
Expand Down
3 changes: 3 additions & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ task integrationTest(type: Test) {
testClassesDirs = sourceSets.integration.output.classesDirs
classpath = sourceSets.integration.runtimeClasspath

//environment 'BOOTSTRAP_SERVERS', 'localhost:9094'
//environment 'EPICS_CA_ADDR_LIST', 'localhost'

testLogging {
events "passed", "skipped", "failed"
showStandardStreams = true
Expand Down
9 changes: 1 addition & 8 deletions build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,6 @@ services:
file: docker-compose.yml
service: softioc

zookeeper:
extends:
file: docker-compose.yml
service: zookeeper

kafka:
extends:
file: docker-compose.yml
Expand All @@ -20,6 +15,4 @@ services:
service: connect
build:
context: .
dockerfile: Dockerfile
args:
- CUSTOM_CRT_URL=http://pki.jlab.org/JLabCA.crt
dockerfile: Dockerfile
39 changes: 23 additions & 16 deletions deps.yml
Original file line number Diff line number Diff line change
@@ -1,31 +1,38 @@
services:
softioc:
image: slominskir/softioc:1.1.0
image: jeffersonlab/softioc:1.1.0
tty: true
stdin_open: true
hostname: softioc
container_name: softioc
ports:
- "5065:5065/tcp"
- "5064:5064/tcp"
- "5065:5065/udp"
- "5064:5064/udp"
volumes:
- ./examples/softioc-db:/db
- ./examples/softioc-scripts:/scripts

zookeeper:
image: debezium/zookeeper:1.9.2.Final
hostname: zookeeper
container_name: zookeeper
ports:
- "2181:2181"
- "2888:2888"
- "3888:3888"

kafka:
image: debezium/kafka:1.9.2.Final
image: bitnami/kafka:3.5.0
hostname: kafka
container_name: kafka
depends_on:
- zookeeper
ports:
- "9092:9092"
- "9094:9094"
environment:
ZOOKEEPER_CONNECT: 'zookeeper:2181'
CREATE_TOPICS: 'channels:1:1:compact,topic1:1:1:compact,topic2:1:1:compact,topic3:1:1:compact'
- ALLOW_PLAINTEXT_LISTENER=yes
- KAFKA_KRAFT_CLUSTER_ID=Cl1akjnHRoG9df2FiSicNg
- KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094
- KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094
- KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT,PLAINTEXT:PLAINTEXT
- KAFKA_CFG_LOG_CLEANER_MIN_CLEANABLE_RATIO=0.1
- KAFKA_CFG_LOG_CLEANER_MAX_COMPACTION_LAG_MS=600000
- KAFKA_CFG_LOG_CLEANER_MIN_COMPACTION_LAG_MS=300000
- KAFKA_CFG_LOG_ROLL_MS=300000
healthcheck:
test: kafka-topics.sh --bootstrap-server kafka:9092 --list
start_period: 10s
interval: 5s
timeout: 10s
retries: 5
6 changes: 1 addition & 5 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,6 @@ services:
file: deps.yml
service: softioc

zookeeper:
extends:
file: deps.yml
service: zookeeper

kafka:
extends:
file: deps.yml
Expand All @@ -29,5 +24,6 @@ services:
CONNECT_MAX_REQUEST_SIZE: 5242880
#MONITOR_CHANNELS: "/config/channels"
MONITOR_CHANNELS: "channel1|topic1|v|key1,channel2|topic2|v|key2,channel3|topic3|v|key3"
#CREATE_TOPICS: "topic1:1:1:compact,topic2:1:1:compact,topic3:1:1:compact"
volumes:
- ./examples/connect-config/distributed:/config
25 changes: 25 additions & 0 deletions examples/connect-config/distributed/connect-distributed.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
bootstrap.servers=kafka:9092
group.id=connect-cluster
key.converter=org.apache.kafka.connect.json.JsonConverter
value.converter=org.apache.kafka.connect.json.JsonConverter
key.converter.schemas.enable=true
value.converter.schemas.enable=true
offset.storage.topic=connect-offsets
offset.storage.replication.factor=1
#offset.storage.partitions=25
config.storage.topic=connect-configs
config.storage.replication.factor=1
status.storage.topic=connect-status
status.storage.replication.factor=1
#status.storage.partitions=5
offset.flush.interval.ms=10000
listeners=HTTP://:8083

# If not set, it uses the value for "listeners" if configured.
#rest.advertised.host.name=
#rest.advertised.port=
#rest.advertised.listener=

plugin.path=/plugins

max.request.size=5242880
13 changes: 6 additions & 7 deletions scripts/docker-entrypoint.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
#!/bin/bash

# We need to configure java.util.logging option on JVM eventually launched by /kafka/bin/connect-distributed.sh
export KAFKA_HOME=/opt/bitnami/kafka

# We need to configure java.util.logging option on JVM eventually launched by kafka/bin/connect-distributed.sh
# - all this just to quiet some noisy log messages from some third party dependency
export EXTRA_ARGS="-Djava.util.logging.config.file=/kafka/config/logging.properties"
export EXTRA_ARGS="-Djava.util.logging.config.file=$KAFKA_HOME/config/logging.properties"

# Grab first SERVER from SERVERS CSV env
IFS=','
Expand All @@ -11,9 +13,6 @@ read -ra tmpArray <<< "$BOOTSTRAP_SERVERS"
export BOOTSTRAP_SERVER=${tmpArray[0]}
echo "BOOTSTRAP_SERVER: $BOOTSTRAP_SERVER"

# tools-log4j.properties doesn't exist initially
cp -rn $KAFKA_HOME/config.orig/* $KAFKA_HOME/config

echo "----------------------------------------------"
echo " Step 1: Waiting for Kafka to start listening "
echo "----------------------------------------------"
Expand All @@ -28,8 +27,8 @@ echo "----------------------"
echo "---------------------------"
echo " Step 3: Launching Connect "
echo "---------------------------"
# Launch original container ENTRYPOINT in background
/docker-entrypoint.sh start &
# Launch Connect in background
/opt/bitnami/kafka/bin/connect-distributed.sh /config/connect-distributed.properties &

echo "------------------------------------------------"
echo " Step 4: Waiting for Connect to start listening "
Expand Down
14 changes: 11 additions & 3 deletions src/integration/java/org/jlab/kafka/connect/CommandTopicTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@
import org.junit.Assert;
import org.junit.Test;

import java.time.Instant;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
Expand All @@ -21,6 +23,8 @@ public class CommandTopicTest {

@Test
public void testCommandTopic() throws ExecutionException, InterruptedException, TimeoutException {
System.err.println("BOOTSTRAP_SERVERS: " + System.getenv("BOOTSTRAP_SERVERS"));

LinkedHashMap<CommandKey, EventSourceRecord<CommandKey, CommandValue>> results = new LinkedHashMap<>();

CommandKey expectedKey = new CommandKey("topic1", "channela");
Expand All @@ -38,13 +42,17 @@ public void highWaterOffset(LinkedHashMap<CommandKey, EventSourceRecord<CommandK
Future<RecordMetadata> future = producer.send(expectedKey, expectedValue);

// Block until sent or an exception is thrown
future.get(2, TimeUnit.SECONDS);
future.get(5, TimeUnit.SECONDS);
}

consumer.start();

// highWaterOffset method is called before this method returns, so we should be good!
consumer.awaitHighWaterOffset(2, TimeUnit.SECONDS);
boolean reached = consumer.awaitHighWaterOffset(5, TimeUnit.SECONDS);

if(!reached) {
throw new TimeoutException("Timeout while waiting for highwater");
}

ArrayList<EventSourceRecord<CommandKey, CommandValue>> list = new ArrayList<>(results.values());

Expand All @@ -64,7 +72,7 @@ public void highWaterOffset(LinkedHashMap<CommandKey, EventSourceRecord<CommandK
Future<RecordMetadata> future = producer.send(expectedKey, null);

// Block until sent or an exception is thrown
future.get(2, TimeUnit.SECONDS);
future.get(5, TimeUnit.SECONDS);
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ private String getBootstrapServers() {
String bootstrapServers = System.getenv("BOOTSTRAP_SERVERS");

if(bootstrapServers == null) {
bootstrapServers = "localhost:9092";
bootstrapServers = "localhost:9094";
}

return bootstrapServers;
Expand Down
5 changes: 0 additions & 5 deletions test.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
services:
zookeeper:
extends:
file: build.yml
service: zookeeper

kafka:
extends:
file: build.yml
Expand Down

0 comments on commit b86f464

Please sign in to comment.