Skip to content

Commit

Permalink
DEVX-2719: update Confluent CLI name and syntax for CP 7.1.0 (#1077)
Browse files Browse the repository at this point in the history
  • Loading branch information
ybyzek authored Feb 24, 2022
1 parent aff29f7 commit 936bedb
Show file tree
Hide file tree
Showing 15 changed files with 232 additions and 239 deletions.
12 changes: 6 additions & 6 deletions cloud-etl/read-data.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,16 +46,16 @@ if check_confluent_binary; then
check_running_cp ${CONFLUENT} || exit

echo -e "\nData from Kafka topic $KAFKA_TOPIC_NAME_IN:"
echo -e "confluent-v1 local services kafka consume $KAFKA_TOPIC_NAME_IN --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10"
export KAFKA_LOG4J_OPTS="-Dlog4j.rootLogger=DEBUG,stdout -Dlog4j.logger.kafka=DEBUG,stdout" && timeout 10 confluent-v1 local services kafka consume $KAFKA_TOPIC_NAME_IN --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10 2>/dev/null
echo -e "confluent local services kafka consume $KAFKA_TOPIC_NAME_IN --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10"
export KAFKA_LOG4J_OPTS="-Dlog4j.rootLogger=DEBUG,stdout -Dlog4j.logger.kafka=DEBUG,stdout" && timeout 10 confluent local services kafka consume $KAFKA_TOPIC_NAME_IN --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10 2>/dev/null

echo -e "\nData from Kafka topic $KAFKA_TOPIC_NAME_OUT2:"
echo -e "confluent-v1 local services kafka consume $KAFKA_TOPIC_NAME_OUT2 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10"
export KAFKA_LOG4J_OPTS="-Dlog4j.rootLogger=DEBUG,stdout -Dlog4j.logger.kafka=DEBUG,stdout" && timeout 10 confluent-v1 local services kafka consume $KAFKA_TOPIC_NAME_OUT2 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10 2>/dev/null
echo -e "confluent local services kafka consume $KAFKA_TOPIC_NAME_OUT2 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10"
export KAFKA_LOG4J_OPTS="-Dlog4j.rootLogger=DEBUG,stdout -Dlog4j.logger.kafka=DEBUG,stdout" && timeout 10 confluent local services kafka consume $KAFKA_TOPIC_NAME_OUT2 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --max-messages 10 2>/dev/null

echo -e "\nData from Kafka topic $KAFKA_TOPIC_NAME_OUT1:"
echo -e "confluent-v1 local services kafka consume $KAFKA_TOPIC_NAME_OUT1 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --value-format avro --property basic.auth.credentials.source=${BASIC_AUTH_CREDENTIALS_SOURCE} --property schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} --property schema.registry.url=${SCHEMA_REGISTRY_URL} --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer --max-messages 10"
export KAFKA_LOG4J_OPTS="-Dlog4j.rootLogger=DEBUG,stdout -Dlog4j.logger.kafka=DEBUG,stdout" && timeout 10 confluent-v1 local services kafka consume $KAFKA_TOPIC_NAME_OUT1 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --value-format avro --property basic.auth.credentials.source=${BASIC_AUTH_CREDENTIALS_SOURCE} --property schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} --property schema.registry.url=${SCHEMA_REGISTRY_URL} --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer --max-messages 10 2>/dev/null
echo -e "confluent local services kafka consume $KAFKA_TOPIC_NAME_OUT1 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --value-format avro --property basic.auth.credentials.source=${BASIC_AUTH_CREDENTIALS_SOURCE} --property schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} --property schema.registry.url=${SCHEMA_REGISTRY_URL} --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer --max-messages 10"
export KAFKA_LOG4J_OPTS="-Dlog4j.rootLogger=DEBUG,stdout -Dlog4j.logger.kafka=DEBUG,stdout" && timeout 10 confluent local services kafka consume $KAFKA_TOPIC_NAME_OUT1 --cloud --config $CONFIG_FILE --from-beginning --property print.key=true --value-format avro --property basic.auth.credentials.source=${BASIC_AUTH_CREDENTIALS_SOURCE} --property schema.registry.basic.auth.user.info=${SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO} --property schema.registry.url=${SCHEMA_REGISTRY_URL} --property key.deserializer=org.apache.kafka.common.serialization.StringDeserializer --max-messages 10 2>/dev/null

else

Expand Down
2 changes: 1 addition & 1 deletion connect-streams-pipeline/docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ Run example
Example 1: Kafka console producer -> Key:String and Value:String
----------------------------------------------------------------

- Command line ``confluent-v1 local services kafka produce`` produces ``String`` keys and ``String`` values to a Kafka topic.
- Command line ``confluent local services kafka produce`` produces ``String`` keys and ``String`` values to a Kafka topic.
- :devx-examples:`Client application|connect-streams-pipeline/src/main/java/io/confluent/examples/connectandstreams/consoleproducer/StreamsIngest.java` reads from the Kafka topic using ``Serdes.String()`` for both key and value.

.. figure:: images/example_1.jpg
Expand Down
32 changes: 16 additions & 16 deletions connect-streams-pipeline/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ mvn clean compile

echo "auto.offset.reset=earliest" >> $CONFLUENT_HOME/etc/ksqldb/ksql-server.properties
confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:$KAFKA_CONNECT_JDBC_VERSION
confluent-v1 local services start
confluent local services start

# Create the SQL table
TABLE_LOCATIONS=/usr/local/lib/table.locations
Expand All @@ -30,12 +30,12 @@ sleep 2

# Write the contents of the file TABLE_LOCATIONS to a Topic, where the id is the message key and the name and sale are the message value.
cat $TABLE_LOCATIONS | \
confluent-v1 local services kafka produce $TOPIC \
confluent local services kafka produce $TOPIC \
--property parse.key=true \
--property key.separator='|' &>/dev/null

# Run the Consumer to print the key as well as the value from the Topic
confluent-v1 local services kafka consume $TOPIC \
confluent local services kafka consume $TOPIC \
--from-beginning \
--property print.key=true \
--max-messages 10
Expand All @@ -51,11 +51,11 @@ echo -e "\n========== $PACKAGE: Example 2: JDBC source connector with Single Mes
sleep 2

# Run source connector
confluent-v1 local services connect connector unload $PACKAGE &>/dev/null
confluent-v1 local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null
confluent local services connect connector unload $PACKAGE &>/dev/null
confluent local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null

# Run the Consumer to print the key as well as the value from the Topic
confluent-v1 local services kafka consume $TOPIC \
confluent local services kafka consume $TOPIC \
--from-beginning \
--property print.key=true \
--key-deserializer org.apache.kafka.common.serialization.LongDeserializer \
Expand All @@ -72,11 +72,11 @@ echo -e "\n========== $PACKAGE: Example 3: JDBC source connector with SpecificAv
sleep 2

# Run source connector
confluent-v1 local services connect connector unload $PACKAGE &>/dev/null
confluent-v1 local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null
confluent local services connect connector unload $PACKAGE &>/dev/null
confluent local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null

# Run the Consumer to print the key as well as the value from the Topic
confluent-v1 local services kafka consume $TOPIC \
confluent local services kafka consume $TOPIC \
--value-format avro \
--from-beginning \
--property print.key=true \
Expand All @@ -93,11 +93,11 @@ echo -e "\n========== $PACKAGE: Example 4: JDBC source connector with GenericAvr
sleep 2

# Run source connector
confluent-v1 local services connect connector unload $PACKAGE &>/dev/null
confluent-v1 local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null
confluent local services connect connector unload $PACKAGE &>/dev/null
confluent local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null

# Run the Consumer to print the key as well as the value from the Topic
confluent-v1 local services kafka consume $TOPIC \
confluent local services kafka consume $TOPIC \
--value-format avro \
--from-beginning \
--property print.key=true \
Expand All @@ -119,7 +119,7 @@ timeout 20s mvn -q exec:java -Dexec.mainClass=io.confluent.examples.connectandst
curl -X GET http://localhost:8081/subjects/$TOPIC-value/versions/1

# Run the Consumer to print the key as well as the value from the Topic
confluent-v1 local services kafka consume $TOPIC \
confluent local services kafka consume $TOPIC \
--value-format avro \
--key-deserializer org.apache.kafka.common.serialization.LongDeserializer \
--from-beginning \
Expand All @@ -137,11 +137,11 @@ echo -e "\n========== $PACKAGE: Example 6: JDBC source connector with Avro to KS
sleep 2

# Run source connector
confluent-v1 local services connect connector unload $PACKAGE &>/dev/null
confluent-v1 local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null
confluent local services connect connector unload $PACKAGE &>/dev/null
confluent local services connect connector config $PACKAGE --config ./$PACKAGE-connector.properties &>/dev/null

# Run the Consumer to print the key as well as the value from the Topic
confluent-v1 local services kafka consume $TOPIC \
confluent local services kafka consume $TOPIC \
--value-format avro \
--from-beginning \
--property print.key=true \
Expand Down
2 changes: 1 addition & 1 deletion connect-streams-pipeline/stop.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ source ../utils/helper.sh
check_env || exit 1

jps | grep Launcher | awk '{print $1;}' | xargs kill -9
confluent-v1 local destroy
confluent local destroy

rm -fr /tmp/kafka-streams
12 changes: 6 additions & 6 deletions cp-quickstart/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,19 @@ sleep 1
./stop.sh

confluent-hub install --no-prompt confluentinc/kafka-connect-datagen:$KAFKA_CONNECT_DATAGEN_VERSION
confluent-v1 local services start
confluent local services start
sleep 10

if check_cp; then
confluent-v1 local services connect connector config datagen-pageviews --config connectors/datagen_pageviews.config
confluent-v1 local services connect connector config datagen-users --config connectors/datagen_users.config
confluent local services connect connector config datagen-pageviews --config connectors/datagen_pageviews.config
confluent local services connect connector config datagen-users --config connectors/datagen_users.config
else
confluent-v1 local services connect connector config datagen-pageviews --config connectors/datagen_pageviews_oss.config
confluent-v1 local services connect connector config datagen-users --config connectors/datagen_users_oss.config
confluent local services connect connector config datagen-pageviews --config connectors/datagen_pageviews_oss.config
confluent local services connect connector config datagen-users --config connectors/datagen_users_oss.config
fi
sleep 20

confluent-v1 local services connect connector status
confluent local services connect connector status

ksql http://localhost:8088 <<EOF
run script 'statements.sql';
Expand Down
2 changes: 1 addition & 1 deletion cp-quickstart/stop.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ source ../utils/helper.sh
check_env || exit 1

jps | grep DataGen | awk '{print $1;}' | xargs kill -9
confluent-v1 local destroy
confluent local destroy
Loading

0 comments on commit 936bedb

Please sign in to comment.