Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
rmoff committed Jan 22, 2019
1 parent d3f45ff commit 9a87d8c
Show file tree
Hide file tree
Showing 3 changed files with 285 additions and 0 deletions.
1 change: 1 addition & 0 deletions connect-error-handling/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
data
135 changes: 135 additions & 0 deletions connect-error-handling/config/jmx_minimal.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
{
"servers": [
{
"alias": "KSQL",
"host": "ksql-server",
"port": "30002",
"queries": [
{
"obj": "io.confluent.ksql.metrics:type=ksql-engine-query-stats",
"attr": [
"error-rate",
"messages-consumed-per-sec",
"num-active-queries",
"num-idle-queries",
"num-persistent-queries"
],
"resultAlias": "ksql-engine-query-stats",
"outputWriters": [
{
"@class": "com.googlecode.jmxtrans.model.output.InfluxDbWriterFactory",
"url": "http://influxdb:8086",
"username": "root",
"password": "root",
"database": "influx"
}
]
},
{
"obj": "io.confluent.ksql.metrics:type=consumer-metrics,key=*,id=*",
"attr": [
"consumer-failed-messages",
"consumer-messages-per-sec",
"consumer-total-message-bytes",
"consumer-total-messages",
"failed-messages-per-sec"
],
"resultAlias": "ksql-consumer-metrics",
"outputWriters": [
{
"@class": "com.googlecode.jmxtrans.model.output.InfluxDbWriterFactory",
"url": "http://influxdb:8086",
"username": "root",
"password": "root",
"database": "influx"
}
]
},
{
"obj": "io.confluent.ksql.metrics:type=producer-metrics,key=*,id=*",
"attr": [
"failed-messages",
"failed-messages-per-sec",
"messages-per-sec",
"total-messages"
],
"resultAlias": "ksql-producer-metrics",
"outputWriters": [
{
"@class": "com.googlecode.jmxtrans.model.output.InfluxDbWriterFactory",
"url": "http://influxdb:8086",
"username": "root",
"password": "root",
"database": "influx"
}
]
},
{
"obj": "kafka.consumer:type=consumer-metrics,client-id=*",
"attr": [
"incoming-byte-rate",
"io-time-ns-avg",
"io-wait-time-ns-avg",
"network-io-rate",
"outgoing-byte-rate",
"request-rate",
"response-rate"
],
"resultAlias": "kafka-consumer-metrics",
"outputWriters": [
{
"@class": "com.googlecode.jmxtrans.model.output.InfluxDbWriterFactory",
"url": "http://influxdb:8086",
"username": "root",
"password": "root",
"database": "influx"
}
]
},
{
"obj": "kafka.producer:client-id=*,type=producer-metrics",
"attr": [
"batch-size-avg",
"incoming-byte-rate",
"io-time-ns-avg",
"io-wait-time-ns-avg",
"network-io-rate",
"outgoing-byte-rate"
],
"resultAlias": "kafka-producer-metrics",
"outputWriters": [
{
"@class": "com.googlecode.jmxtrans.model.output.InfluxDbWriterFactory",
"url": "http://influxdb:8086",
"username": "root",
"password": "root",
"database": "influx"
}
]
},
{
"obj": "kafka.streams:client-id=*,type=stream-metrics",
"attr": [
"commit-rate",
"poll-rate",
"process-rate",
"punctuate-rate",
"skipped-records-rate",
"task-closed-rate",
"task-created-rate"
],
"resultAlias": "kafka-stream-metrics",
"outputWriters": [
{
"@class": "com.googlecode.jmxtrans.model.output.InfluxDbWriterFactory",
"url": "http://influxdb:8086",
"username": "root",
"password": "root",
"database": "influx"
}
]
}
]
}
]
}
149 changes: 149 additions & 0 deletions connect-error-handling/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
---
version: '2'
services:
zookeeper:
image: "confluentinc/cp-zookeeper:5.1.0"
ports:
- '18086:18086'
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
KAFKA_JMX_HOSTNAME: "localhost"
KAFKA_JMX_PORT: 18086

kafka:
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-
# An important note about accessing Kafka from clients on other machines:
# -----------------------------------------------------------------------
#
# The config used here exposes port 9092 for _external_ connections to the broker
# i.e. those from _outside_ the docker network. This could be from the host machine
# running docker, or maybe further afield if you've got a more complicated setup.
# If the latter is true, you will need to change the value 'localhost' in
# KAFKA_ADVERTISED_LISTENERS to one that is resolvable to the docker host from those
# remote clients
#
# For connections _internal_ to the docker network, such as from other services
# and components, use kafka:29092.
#
# See https://rmoff.net/2018/08/02/kafka-listeners-explained/ for details
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-
#
image: "confluentinc/cp-enterprise-kafka:5.1.0"
ports:
- '9092:9092'
- '18087:18087'
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 100
KAFKA_JMX_HOSTNAME: "localhost"
KAFKA_JMX_PORT: 18087
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka:29092
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_ENABLE: 'false'
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'

schema-registry:
image: "confluentinc/cp-schema-registry:5.1.0"
depends_on:
- zookeeper
- kafka
ports:
- '8081:8081'
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181

ksql-server:
image: confluentinc/cp-ksql-server:5.1.0
ports:
- "18089:18089"
depends_on:
- kafka
- schema-registry
environment:
KSQL_BOOTSTRAP_SERVERS: kafka:29092
KSQL_LISTENERS: http://0.0.0.0:8088
KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KSQL_KSQL_SERVICE_ID: confluent_rmoff_01
KSQL_JMX_HOSTNAME: "localhost"
KSQL_JMX_PORT: 18089

ksql-cli:
image: confluentinc/cp-ksql-cli:5.1.0
depends_on:
- ksql-server
entrypoint: /bin/sh
tty: true

kafka-connect:
image: confluentinc/cp-kafka-connect:5.1.0
depends_on:
- zookeeper
- kafka
- schema-registry
ports:
- "8083:8083"
- "18088:18088"
environment:
CONNECT_BOOTSTRAP_SERVERS: "kafka:29092"
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect-cp"
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_PLUGIN_PATH: '/usr/share/java'
# CONNECT_JMX_HOSTNAME: "localhost"
# CONNECT_JMX_PORT: 18088
KAFKA_JMX_HOSTNAME: "localhost"
KAFKA_JMX_PORT: 18088
volumes:
- $PWD/data:/data
# - /Users/Robin/git/kafka-connect-transform-common/target/kafka-connect-target/usr/share/kafka-connect/kafka-connect-transform-common:/usr/share/java/kafka-connect-transform-common


kafkacat:
image: confluentinc/cp-kafkacat:latest
depends_on:
- kafka
command:
- bash
- -c
- |
while [ 1 -eq 1 ]
do
for i in {1..20}
do
echo '{"foo":"bar'$$i'"}'
echo '{"foo":"bar'$$i'"}' | kafkacat -b kafka:29092 -P -t test_topic
sleep 0.5
done
for i in {1..5}
do
echo '{brokenjson-:"bar'$$i'"}'
echo '{brokenjson-:"bar'$$i'"}' | kafkacat -b kafka:29092 -P -t test_topic
sleep 0.5
done
done

0 comments on commit 9a87d8c

Please sign in to comment.