diff --git a/Gopkg.lock b/Gopkg.lock deleted file mode 100644 index 26df7d2..0000000 --- a/Gopkg.lock +++ /dev/null @@ -1,222 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - branch = "master" - digest = "1:8b55be8259cd15a9b5740a7326d238843daf68a2a885a9a8ef1e1df45825533d" - name = "github.com/GeertJohan/go.rice" - packages = [ - ".", - "embedded", - ] - pruneopts = "UT" - revision = "d954009f7238df62c766980934c8ea7f161d0e59" - -[[projects]] - digest = "1:8e8da6cc8cca12851d4e089d970a0f7387b3a6bcc8c459ff432213b03076a66d" - name = "github.com/daaku/go.zipexe" - packages = ["."] - pruneopts = "UT" - revision = "74d766ac1dde7458348221869a7d1e7e5fa0597e" - version = "v1.0.1" - -[[projects]] - digest = "1:26d73f0911e537e89d04011f7a65a5cd2d28113d551d4366480b4706b2833e3a" - name = "github.com/dlclark/regexp2" - packages = [ - ".", - "syntax", - ] - pruneopts = "UT" - revision = "92c702aa5812963ef9193a9624191932d44073d7" - version = "v1.2.0" - -[[projects]] - branch = "master" - digest = "1:2e1d1e868e4480cf86a148971c16b6ff2794d3f5189eeba316117c57abecf257" - name = "github.com/dop251/goja" - packages = [ - ".", - "ast", - "file", - "parser", - "token", - ] - pruneopts = "UT" - revision = "77e84ffb8c65af72e4a3bc53c31328265eac7c81" - -[[projects]] - branch = "master" - digest = "1:e3f7a201f18abb4b5b39c266446dabfb8ad9958047e941c82df71e04114a4748" - name = "github.com/dustin/go-humanize" - packages = ["."] - pruneopts = "UT" - revision = "afde56e7acacd811f6c94228c2c61af2b0e93158" - -[[projects]] - digest = "1:3ef6db4aec5ff4301a775e03a4ba0d66638a62ce7b9dd8cb985ba66b10a66272" - name = "github.com/go-sourcemap/sourcemap" - packages = [ - ".", - "internal/base64vlq", - ] - pruneopts = "UT" - revision = "2588a51d69f1c3e296d57f75815ef40a20d54245" - version = "v2.1.3" - -[[projects]] - digest = "1:e4f5819333ac698d294fe04dbf640f84719658d5c7ce195b10060cc37292ce79" - name = "github.com/golang/snappy" - packages = ["."] - pruneopts = "UT" - revision = "2a8bb927dd31d8daada140a5d09578521ce5c36a" - version = "v0.0.1" - -[[projects]] - digest = "1:09cb61dc19af93deae01587e2fdb1c081e0bf48f1a5ad5fa24f48750dc57dce8" - name = "github.com/konsorten/go-windows-terminal-sequences" - packages = ["."] - pruneopts = "UT" - revision = "edb144dfd453055e1e49a3d8b410a660b5a87613" - version = "v1.0.3" - -[[projects]] - digest = "1:fc5db2e6a4f78b11ab8fdee6e2cd9a61c2c6885479de2ef2cd20ad589c05b099" - name = "github.com/loadimpact/k6" - packages = [ - "js/compiler", - "lib", - "lib/fsext", - "lib/scheduler", - "lib/types", - "loader", - "stats", - ] - pruneopts = "UT" - revision = "459da79ef51b37e5eaba4575c9065d9e592e5c49" - version = "v0.26.2" - -[[projects]] - digest = "1:aff0e9185b5df855488a42e064cb479e994636d3cefa47d053495123d086add4" - name = "github.com/mitchellh/mapstructure" - packages = ["."] - pruneopts = "UT" - revision = "9e1e4717f8567d7ead72d070d064ad17d444a67e" - version = "v1.3.3" - -[[projects]] - branch = "master" - digest = "1:8ad686b49b509bf03549281c04b5fe11f609e9b12bf34a0690acad8cecc94f15" - name = "github.com/oxtoacart/bpool" - packages = ["."] - pruneopts = "UT" - revision = "03653db5a59cd88b481403d312d7c324b56af377" - -[[projects]] - digest = "1:9e1d37b58d17113ec3cb5608ac0382313c5b59470b94ed97d0976e69c7022314" - name = "github.com/pkg/errors" - packages = ["."] - pruneopts = "UT" - revision = "614d223910a179a466c1767a985424175c39b465" - version = "v0.9.1" - -[[projects]] - digest = "1:296753adb808361c576fd439b1e09dd8ea803104bf4e36853d2d829b6eb737ea" - name = "github.com/segmentio/kafka-go" - packages = [ - ".", - "sasl", - ] - pruneopts = "UT" - revision = "e0af1cfbb8dd463571748e350262e2c81754bb73" - version = "v0.3.6" - -[[projects]] - branch = "master" - digest = "1:05eebdd5727fea23083fce0d98d307d70c86baed644178e81608aaa9f09ea469" - name = "github.com/sirupsen/logrus" - packages = ["."] - pruneopts = "UT" - revision = "60c74ad9be0d874af0ab0daef6ab07c5c5911f0d" - -[[projects]] - branch = "master" - digest = "1:27f2257a03f0dd1f8e6634f477d30af83980ccbc9ab73f2db9928b8cb8073609" - name = "github.com/spf13/afero" - packages = [ - ".", - "mem", - ] - pruneopts = "UT" - revision = "c8608266dfb4546f38a16622d1b5aed0291033b9" - -[[projects]] - branch = "master" - digest = "1:020620a097c2bfd056c8db7d31a69ea2cfed874ce985763dcd9ae00f9fa5f74b" - name = "golang.org/x/sys" - packages = [ - "internal/unsafeheader", - "unix", - ] - pruneopts = "UT" - revision = "1151b9dac4a98d49ef7f80f07ddd826ff51e0b36" - -[[projects]] - digest = "1:8d964db8360c12a4063c24530c3462ae8af38ef4036db992841eb86478db6490" - name = "golang.org/x/text" - packages = [ - "cases", - "collate", - "collate/build", - "internal", - "internal/colltab", - "internal/gen", - "internal/language", - "internal/language/compact", - "internal/tag", - "internal/triegen", - "internal/ucd", - "language", - "transform", - "unicode/cldr", - "unicode/norm", - ] - pruneopts = "UT" - revision = "342b2e1fbaa52c93f31447ad2c6abc048c63e475" - version = "v0.3.2" - -[[projects]] - branch = "master" - digest = "1:908ad1a739c1afa54078eec5dc8a56b8ed01c0576b52ca61600471682fce4c33" - name = "golang.org/x/time" - packages = ["rate"] - pruneopts = "UT" - revision = "89c76fbcd5d1cd4969e5d2fe19d48b19d5ad94a0" - -[[projects]] - digest = "1:2d4e0b42440441453b41485a070c94fb690782b2b7b9f0d3519d62df4f8d612d" - name = "gopkg.in/guregu/null.v3" - packages = ["."] - pruneopts = "UT" - revision = "1e6a5b0d3fb642d7bf50a270adf19b54006c0269" - version = "v3.5.0" - -[[projects]] - digest = "1:d95b8bb3ac8504c162b886d9396005640c1bd5c4523ef78f542d14d514802e62" - name = "gopkg.in/linkedin/goavro.v2" - packages = ["."] - pruneopts = "UT" - revision = "58a83ea1539a0c8c3946781abac4103abac92535" - version = "v2.9.8" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - input-imports = [ - "github.com/loadimpact/k6/lib", - "github.com/loadimpact/k6/stats", - "github.com/segmentio/kafka-go", - "gopkg.in/linkedin/goavro.v2", - ] - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml deleted file mode 100644 index cee7782..0000000 --- a/Gopkg.toml +++ /dev/null @@ -1,15 +0,0 @@ -[prune] - go-tests = true - unused-packages = true - -[[constraint]] - name = "github.com/segmentio/kafka-go" - version = "0.3.6" - -[[constraint]] - name = "gopkg.in/linkedin/goavro.v2" - version = "2.9.8" - -[[constraint]] - name = "github.com/loadimpact/k6" - version = "0.26.2" diff --git a/README.md b/README.md index 7faeb81..64785fb 100644 --- a/README.md +++ b/README.md @@ -1,50 +1,38 @@ -# Disclaimer +# xk6-kafka -The [k6](https://github.com/loadimpact/k6) [plugin system](https://github.com/loadimpact/k6/issues/1353) is currently experimental. This plugin is a proof of concept, and it isn't supported by the k6 team, and may break in the future. USE IT AT YOUR OWN RISK! +This is a [k6](https://github.com/loadimpact/k6) extension using the [xk6](https://github.com/loadimpact/k6) system. -This project is also a WIP, so it is not feature-complete, nor something to rely on. Over time, I'll try to add a better API that is more natural to both Go and JavaScript. +| :exclamation: This is a proof of concept, isn't supported by the k6 team, and may break in the future. USE AT YOUR OWN RISK! | +| ---------------------------------------------------------------------------------------------------------------------------- | ---- +This project is a k6 extension that can be used to load test Kafka, using a producer. Per each connection to Kafka, many messages can be sent. These messages are an array of objects containing a key and a value. There is also a consumer for testing purposes, i.e. to make sure you send the correct data to Kafka. The consumer is not meant to be used for testing Kafka under load. The extension supports producing and consuming messages in Avro format, given a schema for key and/or value. -# k6-plugin-kafka +The real purpose of this extension is not only to test Apache Kafka, but also the system you've designed that uses Apache Kafka. So, you can test your consumers, and hence your system, by auto-generating messages and sending them to your system via Apache Kafka. -This project is a k6 plugin that can be used to load test Kafka, using a producer. Per each connection to Kafka, many messages can be sent, which is basically an array of objects containing a key and a value. There's also a consumer for testing purposes, e.g. to make sure you send the correct data to Kafka. The consumer is not meant to be used for testing Kafka under load. The plugin supports producing and consuming messages in Avro format, given a schema for key and/or value. +In order to build the source, you should have the latest version of Go (go1.15) installed. I recommend you to have [gvm](https://github.com/moovweb/gvm) installed. -The real purpose of this plugin is not only to test Apache Kafka, but also the system you've designed that uses Apache Kafka. So, you can test your consumers, and hence your system, by auto-generating messages and sending them to your system via Apache Kafka. +## Build -In order to build the source, you should have the latest version of Go installed, which I recommend you to have [gvm](https://github.com/moovweb/gvm), Go version manager, installed. +To build a `k6` binary with this extension, first ensure you have the prerequisites: - +Then, install [xk6](https://github.com/k6io/xk6) and build your custom k6 binary with the Kafka extension: -## Build k6 from source (with plugin support) +1. Install `xk6`: + ```shell + $ go get -u github.com/k6io/xk6/cmd/xk6 + ``` -This step will be removed once [the plugin support PR](https://github.com/loadimpact/k6/pull/1396) is merged and in production. - -```bash -$ go get -d github.com/loadimpact/k6 -$ cd $GOPATH/src/github.com/loadimpact/k6 -$ git checkout -b andremedeiros-feature/plugins tags/v0.26.2 -$ git pull -f https://github.com/andremedeiros/k6.git feature/plugins -$ make -``` - -## Build plugin from source - -```bash -$ go get -d github.com/mostafa/k6-plugin-kafka -$ cd $GOPATH/src/github.com/mostafa/k6-plugin-kafka -$ ./build.sh -$ cp $GOPATH/src/github.com/loadimpact/k6/k6 $GOPATH/src/github.com/mostafa/k6-plugin-kafka -``` +2. Build the binary: + ```shell + $ xk6 build v0.29.0 --with github.com/mostafa/xk6-kafka + ``` ## Run & Test -First, you need to have your Kafka development environment setup. I recommend you to use [Lenses.io fast-data-dev Docker image](https://github.com/lensesio/fast-data-dev), which is a complete Kafka setup for development that includes: Kafka, Zookeeper, Schema Registry, Kafka-Connect, Landoop Tools, 20+ connectors. It is fairly easy to setup, if you have Docker installed. Just make sure to monitor Docker logs to have a working setup, before attempting to test. +First, you need to have your Kafka development environment setup. I recommend you to use [Lenses.io fast-data-dev Docker image](https://github.com/lensesio/fast-data-dev), which is a complete Kafka setup for development that includes: Kafka, Zookeeper, Schema Registry, Kafka-Connect, Landoop Tools, 20+ connectors. It is fairly easy to setup, if you have Docker installed. Just make sure to monitor Docker logs to have a working setup, before attempting to test. Initial setup, leader election and test data ingestion takes time. ### Development Environment @@ -57,24 +45,24 @@ $ sudo docker logs -f lensesio ### k6 Test -The following k6 test script is used to test this plugin and Apache Kafka in turn. The script is availale as `test.js` with more code and commented sections. The script has 4 parts: +The following k6 test script is used to test this extension and Apache Kafka in turn. The script is available as `test.js` with more code and commented sections. The script has 4 parts: -1. The __imports__ at the top shows the exposed functions that are imported from k6 and the plugin, `check` from k6 and the `writer`, `produce`, `reader`, `consume` from the plugin using the `k6-plugin/kafka` plugin loading convention. +1. The __imports__ at the top shows the exposed functions that are imported from k6 and the extension, `check` from k6 and the `writer`, `produce`, `reader`, `consume` from the extension using the `k6/x/kafka` extension loading convention. 2. The __Avro schema__ defines a value schema that is used by both producer and consumer, according to the [Avro schema specification](https://avro.apache.org/docs/current/spec.html). 3. The __Avro message producer__: - 1. The `writer` function is used to open a connection to the bootstrap servers. The first arguments is an array of string that signifies the bootstrap server addresses and the second is the topic you want to write to. You can reuse this writer object to produce as many messages as you want. + 1. The `writer` function is used to open a connection to the bootstrap servers. The first argument is an array of strings that signifies the bootstrap server addresses and the second is the topic you want to write to. You can reuse this writer object to produce as many messages as you want. 2. The `produce` function is used to send a list of messages to Kafka. The first argument is the `producer` object, the second is the list of messages (with key and value), the third and the fourth are the key schema and value schema in Avro format. If the schema are not passed to the function, the values are treated as normal strings, as in the key schema, where an empty string, `""`, is passed. The produce function returns an `error` if it fails. The check is optional, but `error` being `undefined` means that `produce` function successfully sent the message. 3. The `producer.close()` function closes the `producer` object. 4. The __Avro message consumer__: - 1. The `reader` function is used to open a connection to the bootstrap servers. The first arguments is an array of string that signifies the bootstrap server addresses and the second is the topic you want to reader from. + 1. The `reader` function is used to open a connection to the bootstrap servers. The first argument is an array of strings that signifies the bootstrap server addresses and the second is the topic you want to reader from. 2. The `consume` function is used to read a list of messages from Kafka. The first argument is the `consumer` object, the second is the number of messages to read in one go, the third and the fourth are the key schema and value schema in Avro format. If the schema are not passed to the function, the values are treated as normal strings, as in the key schema, where an empty string, `""`, is passed. The consume function returns an empty array if it fails. The check is optional, but it checks to see if the length of the message array is exactly 10. 3. The `consumer.close()` function closes the `consumer` object. ```javascript import { check } from 'k6'; -import { writer, produce, reader, consume } from 'k6-plugin/kafka'; // import kafka plugin +import { writer, produce, reader, consume } from 'k6/x/kafka'; // import kafka extension // Avro value schema const value_schema = JSON.stringify({ @@ -92,7 +80,7 @@ export default function () { // Avro message producer const producer = writer( ["localhost:9092"], // bootstrap servers - "test-k6-plugin-topic", // Kafka topic + "test-k6-extension-topic", // Kafka topic ) for (let index = 0; index < 100; index++) { @@ -100,10 +88,10 @@ export default function () { [{ key: "DA KEY!", value: JSON.stringify({ - "name": "k6-plugin-kafka", + "name": "k6-extension-kafka", "version": "0.0.1", "author": "Mostafa Moradian", - "description": "k6 Plugin to Load Test Apache Kafka" + "description": "k6 Extension to Load Test Apache Kafka" }) }], "", value_schema); @@ -116,7 +104,7 @@ export default function () { // Avro message consumer const consumer = reader( ["localhost:9092"], // bootstrap servers - "test-k6-plugin-topic", // Kafka topic + "test-k6-extension-topic", // Kafka topic ) // Read 10 messages only @@ -132,7 +120,7 @@ export default function () { You can run k6 with the Kafka extension using the following command: ```bash -$ ./k6 run --vus 1 --duration 10s test.js +$ ./k6 run --vus 50 --duration 60s test.js ``` And here's the test result output: diff --git a/build.sh b/build.sh deleted file mode 100755 index 18bb12f..0000000 --- a/build.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -# This fetches the k6-plugin-kafka from GitHub and -# builds it in your $GOPATH/src/github.com/mostafa/k6-plugin-kafka - -# Install dependencies defined in Gopkg.toml -go get -d - -# Build k6-plugin-kafka -go build -buildmode=plugin -ldflags="-s -w" -o kafka.so github.com/mostafa/k6-plugin-kafka