forked from confluentinc/demo-scene
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix bug in Docker Compose in which HTTP status codes aren't correctly…
… specified
- Loading branch information
Showing
11 changed files
with
285 additions
and
39 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,246 @@ | ||
--- | ||
version: '2' | ||
services: | ||
zookeeper: | ||
image: confluentinc/cp-zookeeper:5.3.0 | ||
container_name: zookeeper | ||
environment: | ||
ZOOKEEPER_CLIENT_PORT: 2181 | ||
ZOOKEEPER_TICK_TIME: 2000 | ||
|
||
kafka: | ||
image: confluentinc/cp-enterprise-kafka:5.3.0 | ||
container_name: kafka | ||
depends_on: | ||
- zookeeper | ||
ports: | ||
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,- | ||
# An important note about accessing Kafka from clients on other machines: | ||
# ----------------------------------------------------------------------- | ||
# | ||
# The config used here exposes port 9092 for _external_ connections to the broker | ||
# i.e. those from _outside_ the docker network. This could be from the host machine | ||
# running docker, or maybe further afield if you've got a more complicated setup. | ||
# If the latter is true, you will need to change the value 'localhost' in | ||
# KAFKA_ADVERTISED_LISTENERS to one that is resolvable to the docker host from those | ||
# remote clients | ||
# | ||
# For connections _internal_ to the docker network, such as from other services | ||
# and components, use kafka:29092. | ||
# | ||
# See https://rmoff.net/2018/08/02/kafka-listeners-explained/ for details | ||
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,- | ||
# | ||
- 9092:9092 | ||
environment: | ||
KAFKA_BROKER_ID: 1 | ||
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 | ||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT | ||
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT | ||
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092 | ||
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter | ||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 | ||
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 100 | ||
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true" | ||
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka:29092 | ||
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181 | ||
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1 | ||
CONFLUENT_METRICS_ENABLE: 'true' | ||
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous' | ||
|
||
schema-registry: | ||
image: confluentinc/cp-schema-registry:5.3.0 | ||
container_name: schema-registry | ||
ports: | ||
- 8081:8081 | ||
depends_on: | ||
- zookeeper | ||
- kafka | ||
environment: | ||
SCHEMA_REGISTRY_HOST_NAME: schema-registry | ||
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181 | ||
|
||
ksql-server: | ||
image: confluentinc/cp-ksql-server:5.3.0 | ||
container_name: ksql-server | ||
ports: | ||
- 8088:8088 | ||
depends_on: | ||
- kafka | ||
environment: | ||
KSQL_BOOTSTRAP_SERVERS: kafka:29092 | ||
KSQL_LISTENERS: http://0.0.0.0:8088 | ||
KSQL_KSQL_SERVICE_ID: confluent_rmoff_01 | ||
KSQL_CUB_KAFKA_TIMEOUT: 300 | ||
KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schema-registry:8081 | ||
# -v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v | ||
# Useful settings for development/laptop use - modify as needed for Prod | ||
KSQL_KSQL_COMMIT_INTERVAL_MS: 2000 | ||
KSQL_KSQL_SINK_PARTITIONS: 1 | ||
KSQL_KSQL_CACHE_MAX_BYTES_BUFFERING: 10000000 | ||
KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: earliest | ||
# -v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v | ||
# Producer Confluent Monitoring Interceptors for Control Center streams monitoring | ||
KSQL_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" | ||
KSQL_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" | ||
|
||
ksql-cli: | ||
image: confluentinc/cp-ksql-cli:5.3.0 | ||
container_name: ksql-cli | ||
depends_on: | ||
- ksql-server | ||
entrypoint: /bin/sh | ||
tty: true | ||
|
||
kafka-connect-01: | ||
image: cp-connect-base-local:latest | ||
# image: confluentinc/cp-kafka-connect:5.3.0 | ||
container_name: kafka-connect-01 | ||
depends_on: | ||
- zookeeper | ||
- kafka | ||
- schema-registry | ||
ports: | ||
- 8083:8083 | ||
environment: | ||
CONNECT_BOOTSTRAP_SERVERS: "kafka:29092" | ||
CONNECT_REST_PORT: 8083 | ||
CONNECT_GROUP_ID: compose-connect-group | ||
CONNECT_CONFIG_STORAGE_TOPIC: _connect-configs | ||
CONNECT_OFFSET_STORAGE_TOPIC: _connect-offsets | ||
CONNECT_STATUS_STORAGE_TOPIC: _connect-status | ||
CONNECT_CONNECTOR_CLIENT_CONFIG_OVERRIDE_POLICY: 'All' | ||
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter | ||
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' | ||
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter | ||
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' | ||
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" | ||
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" | ||
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect-01" | ||
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" | ||
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" | ||
CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n" | ||
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" | ||
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" | ||
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" | ||
CONNECT_PLUGIN_PATH: '/usr/share/java,/usr/share/confluent-hub-components/' | ||
# Interceptor config | ||
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" | ||
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" | ||
CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.3.0.jar | ||
volumes: | ||
- ${PWD}:/data | ||
# In the command section, $ are replaced with $$ to avoid the error 'Invalid interpolation format for "command" option' | ||
command: | ||
- bash | ||
- -c | ||
- | | ||
echo "Installing datagen connector" | ||
confluent-hub install --no-prompt confluentinc/kafka-connect-datagen:0.1.2 | ||
# | ||
if [[ $$? -ne 0 ]]; then | ||
echo -e '\n\n\t--> Connector installation failed. Aborting. <--\n\n' | ||
exit 1 | ||
fi | ||
# | ||
echo "Launching Kafka Connect worker" | ||
/etc/confluent/docker/run & | ||
sleep infinity | ||
kafka-connect-v2.2: | ||
image: confluentinc/cp-kafka-connect:5.2.2 | ||
container_name: kafka-connect-v2.2 | ||
depends_on: | ||
- zookeeper | ||
- kafka | ||
- schema-registry | ||
ports: | ||
- 18083:18083 | ||
environment: | ||
CONNECT_BOOTSTRAP_SERVERS: "kafka:29092" | ||
CONNECT_REST_PORT: 18083 | ||
CONNECT_GROUP_ID: compose-connect-group-v2.2 | ||
CONNECT_CONFIG_STORAGE_TOPIC: _connect-v2.2-configs | ||
CONNECT_OFFSET_STORAGE_TOPIC: _connect-v2.2-offsets | ||
CONNECT_STATUS_STORAGE_TOPIC: _connect-v2.2-status | ||
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter | ||
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' | ||
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter | ||
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081' | ||
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" | ||
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" | ||
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect-v2.2" | ||
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" | ||
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR" | ||
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1" | ||
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1" | ||
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1" | ||
CONNECT_PLUGIN_PATH: '/usr/share/java,/usr/share/confluent-hub-components/' | ||
# Interceptor config | ||
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" | ||
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" | ||
CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.2.2.jar | ||
volumes: | ||
- ${PWD}:/data | ||
# In the command section, $ are replaced with $$ to avoid the error 'Invalid interpolation format for "command" option' | ||
command: | ||
- bash | ||
- -c | ||
- | | ||
echo "Installing datagen connector" | ||
confluent-hub install --no-prompt confluentinc/kafka-connect-datagen:0.1.2 | ||
# | ||
if [[ $$? -ne 0 ]]; then | ||
echo -e '\n\n\t--> Connector installation failed. Aborting. <--\n\n' | ||
exit 1 | ||
fi | ||
# | ||
echo "Launching Kafka Connect worker" | ||
/etc/confluent/docker/run & | ||
sleep infinity | ||
elasticsearch: | ||
image: docker.elastic.co/elasticsearch/elasticsearch:6.7.0 | ||
container_name: elasticsearch | ||
ports: | ||
- 9200:9200 | ||
environment: | ||
xpack.security.enabled: "false" | ||
ES_JAVA_OPTS: "-Xms1g -Xmx1g" | ||
discovery.type: "single-node" | ||
command: | ||
- bash | ||
- -c | ||
- | | ||
/usr/local/bin/docker-entrypoint.sh & | ||
echo "Waiting for Elasticsearch to start ⏳" | ||
while [ $$(curl -s -o /dev/null -w %{http_code} http://localhost:9200/) -ne 200 ] ; do | ||
echo -e $$(date) " Elasticsearch listener HTTP state: " $$(curl -s -o /dev/null -w %{http_code} http://localhost:9200/) " (waiting for 200)" | ||
sleep 5 | ||
done | ||
echo -e "\n--\n+> Creating Elasticsearch dynamic mapping" | ||
curl -XPUT "http://localhost:9200/_template/kafkaconnect/" -H 'Content-Type: application/json' -d' { "index_patterns": "*", "settings": { "number_of_shards": 1, "number_of_replicas": 0 }, "mappings": { "_default_": { "dynamic_templates": [ { "dates": { "match": "*TIMESTAMP", "mapping": { "type": "date" } } }, { "non_analysed_string_template": { "match": "*", "match_mapping_type": "string", "mapping": { "type": "keyword" } } } ] } } }' | ||
sleep infinity | ||
kibana: | ||
image: docker.elastic.co/kibana/kibana:6.7.0 | ||
container_name: kibana | ||
depends_on: | ||
- elasticsearch | ||
ports: | ||
- 5601:5601 | ||
environment: | ||
xpack.security.enabled: "false" | ||
discovery.type: "single-node" | ||
|
||
postgres: | ||
# *-----------------------------* | ||
# To connect to the DB: | ||
# docker-compose exec postgres bash -c 'psql -U $POSTGRES_USER $POSTGRES_DB' | ||
# *-----------------------------* | ||
image: postgres:11 | ||
environment: | ||
- POSTGRES_USER=postgres | ||
- POSTGRES_PASSWORD=postgres | ||
volumes: | ||
- ./data/postgres:/docker-entrypoint-initdb.d/ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.