From 6dac7a7f7fd72aba903ac339f9f6bfcd44897d39 Mon Sep 17 00:00:00 2001 From: dianfu Date: Wed, 31 Jul 2019 15:05:51 +0800 Subject: [PATCH] [hotfix] [travis] Fix the python travis failure (#9286) --- docs/ops/cli.md | 18 ++++++++---------- docs/ops/cli.zh.md | 18 ++++++++---------- .../flink-bin/bin/pyflink-gateway-server.sh | 5 ++--- tools/travis_controller.sh | 3 ++- 4 files changed, 20 insertions(+), 24 deletions(-) diff --git a/docs/ops/cli.md b/docs/ops/cli.md index 6d24f2911406b..4e84267b8f4b4 100644 --- a/docs/ops/cli.md +++ b/docs/ops/cli.md @@ -100,40 +100,38 @@ These examples about how to submit a job in CLI. - Run Python Table program: - ./bin/flink run -py examples/python/table/batch/word_count.py -j + ./bin/flink run -py examples/python/table/batch/word_count.py - Run Python Table program with pyFiles: - ./bin/flink run -py examples/python/table/batch/word_count.py -j \ + ./bin/flink run -py examples/python/table/batch/word_count.py \ -pyfs file:///user.txt,hdfs:///$namenode_address/username.txt - Run Python Table program with pyFiles and pyModule: - ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch -j + ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch - Run Python Table program with parallelism 16: - ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py -j + ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py - Run Python Table program with flink log output disabled: - ./bin/flink run -q -py examples/python/table/batch/word_count.py -j + ./bin/flink run -q -py examples/python/table/batch/word_count.py - Run Python Table program in detached mode: - ./bin/flink run -d -py examples/python/table/batch/word_count.py -j + ./bin/flink run -d -py examples/python/table/batch/word_count.py - Run Python Table program on a specific JobManager: ./bin/flink run -m myJMHost:8081 \ - -py examples/python/table/batch/word_count.py \ - -j + -py examples/python/table/batch/word_count.py - Run Python Table program using a [per-job YARN cluster]({{site.baseurl}}/ops/deployment/yarn_setup.html#run-a-single-flink-job-on-hadoop-yarn) with 2 TaskManagers: ./bin/flink run -m yarn-cluster -yn 2 \ - -py examples/python/table/batch/word_count.py \ - -j + -py examples/python/table/batch/word_count.py ### Job Management Examples diff --git a/docs/ops/cli.zh.md b/docs/ops/cli.zh.md index 8370fd8aee215..b8cc94aad9692 100644 --- a/docs/ops/cli.zh.md +++ b/docs/ops/cli.zh.md @@ -100,40 +100,38 @@ available. - 提交一个Python Table的作业: - ./bin/flink run -py WordCount.py -j + ./bin/flink run -py WordCount.py - 提交一个有多个依赖的Python Table的作业: - ./bin/flink run -py examples/python/table/batch/word_count.py -j \ + ./bin/flink run -py examples/python/table/batch/word_count.py \ -pyfs file:///user.txt,hdfs:///$namenode_address/username.txt - 提交一个有多个依赖的Python Table的作业,Python作业的主入口通过pym选项指定: - ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch -j + ./bin/flink run -pym batch.word_count -pyfs examples/python/table/batch - 提交一个指定并发度为16的Python Table的作业: - ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py -j + ./bin/flink run -p 16 -py examples/python/table/batch/word_count.py - 提交一个关闭flink日志输出的Python Table的作业: - ./bin/flink run -q -py examples/python/table/batch/word_count.py -j + ./bin/flink run -q -py examples/python/table/batch/word_count.py - 提交一个运行在detached模式下的Python Table的作业: - ./bin/flink run -d -py examples/python/table/batch/word_count.py -j + ./bin/flink run -d -py examples/python/table/batch/word_count.py - 提交一个运行在指定JobManager上的Python Table的作业: ./bin/flink run -m myJMHost:8081 \ - -py examples/python/table/batch/word_count.py \ - -j + -py examples/python/table/batch/word_count.py - 提交一个运行在有两个TaskManager的[per-job YARN cluster]({{site.baseurl}}/ops/deployment/yarn_setup.html#run-a-single-flink-job-on-hadoop-yarn)的Python Table的作业: ./bin/flink run -m yarn-cluster -yn 2 \ - -py examples/python/table/batch/word_count.py \ - -j + -py examples/python/table/batch/word_count.py diff --git a/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh b/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh index 16fe6b3f95b39..4cd642cea3427 100644 --- a/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh +++ b/flink-dist/src/main/flink-bin/bin/pyflink-gateway-server.sh @@ -50,7 +50,6 @@ done log=$FLINK_LOG_DIR/flink-$FLINK_IDENT_STRING-python-$HOSTNAME.log log_setting=(-Dlog.file="$log" -Dlog4j.configuration=file:"$FLINK_CONF_DIR"/log4j-cli.properties -Dlogback.configurationFile=file:"$FLINK_CONF_DIR"/logback.xml) -TABLE_JAR_PATH=`echo "$FLINK_HOME"/lib/flink-table*.jar` PYTHON_JAR_PATH=`echo "$FLINK_HOME"/opt/flink-python*.jar` FLINK_TEST_CLASSPATH="" @@ -94,8 +93,8 @@ fi ARGS_COUNT=${#ARGS[@]} if [[ ${ARGS[0]} == "local" ]]; then ARGS=("${ARGS[@]:1:$ARGS_COUNT}") - exec $JAVA_RUN $JVM_ARGS "${log_setting[@]}" -cp ${FLINK_CLASSPATH}:${TABLE_JAR_PATH}:${PYTHON_JAR_PATH}:${FLINK_TEST_CLASSPATH} ${DRIVER} ${ARGS[@]} + exec $JAVA_RUN $JVM_ARGS "${log_setting[@]}" -cp ${FLINK_CLASSPATH}:${PYTHON_JAR_PATH}:${FLINK_TEST_CLASSPATH} ${DRIVER} ${ARGS[@]} else ARGS=("${ARGS[@]:1:$ARGS_COUNT}") - exec "$FLINK_BIN_DIR"/flink run ${ARGS[@]} -c ${DRIVER} -j ${TABLE_JAR_PATH} + exec "$FLINK_BIN_DIR"/flink run ${ARGS[@]} -c ${DRIVER} fi diff --git a/tools/travis_controller.sh b/tools/travis_controller.sh index 3f3f5d86c68fb..256235b5a2985 100755 --- a/tools/travis_controller.sh +++ b/tools/travis_controller.sh @@ -146,7 +146,8 @@ if [ $STAGE == "$STAGE_COMPILE" ]; then ! -path "$CACHE_FLINK_DIR/flink-runtime/target/flink-runtime*tests.jar" \ ! -path "$CACHE_FLINK_DIR/flink-streaming-java/target/flink-streaming-java*tests.jar" \ ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-dist*.jar" \ - ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-table*.jar" \ + ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-table_*.jar" \ + ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/lib/flink-table-blink*.jar" \ ! -path "$CACHE_FLINK_DIR/flink-dist/target/flink-*-bin/flink-*/opt/flink-python*.jar" \ ! -path "$CACHE_FLINK_DIR/flink-connectors/flink-connector-elasticsearch-base/target/flink-*.jar" \ ! -path "$CACHE_FLINK_DIR/flink-connectors/flink-connector-kafka-base/target/flink-*.jar" \