Skip to content

Commit

Permalink
[FLINK-17375] Refactor travis_watchdog.sh into separate ci/ and azure…
Browse files Browse the repository at this point in the history
…-pipelines/ scripts.

The guiding principle in this refactoring was to put everything generic (independent
of concrete CI system (such as Travis or Azure)) into tools/ci/*
and the scripts specific to a CI system (currently Azure) into tools/azure-pipelines/*.
  • Loading branch information
rmetzger committed May 26, 2020
1 parent 75cfab4 commit a22b130
Show file tree
Hide file tree
Showing 21 changed files with 523 additions and 641 deletions.
2 changes: 1 addition & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
- Fill out the template below to describe the changes contributed by the pull request. That will give reviewers the context they need to do the review.
- Make sure that the change passes the automated tests, i.e., `mvn clean verify` passes. You can set up Travis CI to do that following [this guide](https://flink.apache.org/contributing/contribute-code.html#open-a-pull-request).
- Make sure that the change passes the automated tests, i.e., `mvn clean verify` passes. You can set up Azure Pipelines CI to do that following [this guide](https://cwiki.apache.org/confluence/display/FLINK/Azure+Pipelines#AzurePipelines-Tutorial:SettingupAzurePipelinesforaforkoftheFlinkrepository).
- Each pull request should address only one issue, not mix up code from multiple issues.
Expand Down
3 changes: 2 additions & 1 deletion azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,11 @@ resources:
# to understand why the secrets are handled like this
variables:
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
E2E_CACHE_FOLDER: $(Pipeline.Workspace)/e2e_cache
MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
CACHE_KEY: maven | $(Agent.OS) | **/pom.xml, !**/target/**
CACHE_FALLBACK_KEY: maven | $(Agent.OS)
CACHE_FLINK_DIR: $(Pipeline.Workspace)/flink_cache
FLINK_ARTIFACT_DIR: $(Pipeline.Workspace)/flink_artifact
SECRET_S3_BUCKET: $[variables.IT_CASE_S3_BUCKET]
SECRET_S3_ACCESS_KEY: $[variables.IT_CASE_S3_ACCESS_KEY]
SECRET_S3_SECRET_KEY: $[variables.IT_CASE_S3_SECRET_KEY]
Expand Down
6 changes: 3 additions & 3 deletions flink-end-to-end-tests/run-nightly-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ if [ ! -z "$TF_BUILD" ] ; then
echo "COMPRESSING build artifacts."
COMPRESSED_ARCHIVE=${BUILD_BUILDNUMBER}.tgz
mkdir compressed-archive-dir
tar -zcvf compressed-archive-dir/${COMPRESSED_ARCHIVE} $ARTIFACTS_DIR
tar -zcvf compressed-archive-dir/${COMPRESSED_ARCHIVE} -C $ARTIFACTS_DIR .
echo "##vso[task.setvariable variable=ARTIFACT_DIR]$(pwd)/compressed-archive-dir"
}
on_exit compress_logs
Expand Down Expand Up @@ -235,15 +235,15 @@ printf "Running Java end-to-end tests\n"
printf "==============================================================================\n"


LOG4J_PROPERTIES=${END_TO_END_DIR}/../tools/ci/log4j-ci.properties
LOG4J_PROPERTIES=${END_TO_END_DIR}/../tools/ci/log4j.properties

MVN_LOGGING_OPTIONS="-Dlog.dir=${ARTIFACTS_DIR} -DlogBackupDir=${ARTIFACTS_DIR} -Dlog4j.configurationFile=file://$LOG4J_PROPERTIES"
MVN_COMMON_OPTIONS="-Dflink.forkCount=2 -Dflink.forkCountTestPackage=2 -Dfast -Pskip-webui-build"
e2e_modules=$(find flink-end-to-end-tests -mindepth 2 -maxdepth 5 -name 'pom.xml' -printf '%h\n' | sort -u | tr '\n' ',')
e2e_modules="${e2e_modules},$(find flink-walkthroughs -mindepth 2 -maxdepth 2 -name 'pom.xml' -printf '%h\n' | sort -u | tr '\n' ',')"

PROFILE="$PROFILE -Pe2e-travis1 -Pe2e-travis2 -Pe2e-travis3 -Pe2e-travis4 -Pe2e-travis5 -Pe2e-travis6"
run_mvn ${MVN_COMMON_OPTIONS} ${MVN_LOGGING_OPTIONS} ${PROFILE} verify -pl ${e2e_modules} -DdistDir=$(readlink -e build-target)
run_mvn ${MVN_COMMON_OPTIONS} ${MVN_LOGGING_OPTIONS} ${PROFILE} verify -pl ${e2e_modules} -DdistDir=$(readlink -e build-target) -Dcache-dir=$E2E_CACHE_FOLDER -Dcache-ttl=P1M

EXIT_CODE=$?

Expand Down
64 changes: 0 additions & 64 deletions flink-end-to-end-tests/run-pre-commit-tests.sh

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ public boolean accept(File dir, String name) {
}

if (!whitelistedFound) {
// logging in FATAL to see the actual message in TRAVIS tests.
// logging in FATAL to see the actual message in CI tests.
Marker fatal = MarkerFactory.getMarker("FATAL");
LOG.error(fatal, "Prohibited String '{}' in '{}:{}'", aProhibited, f.getAbsolutePath(), lineFromFile);

Expand Down Expand Up @@ -1048,10 +1048,10 @@ public static void teardown() throws Exception {
hdfsSiteXML.delete();
}

// When we are on travis, we copy the temp files of JUnit (containing the MiniYARNCluster log files)
// When we are on CI, we copy the temp files of JUnit (containing the MiniYARNCluster log files)
// to <flinkRoot>/target/flink-yarn-tests-*.
// The files from there are picked up by the tools/ci/* scripts to upload them.
if (isOnTravis()) {
if (isOnCI()) {
File target = new File("../target" + YARN_CONFIGURATION.get(TEST_CLUSTER_NAME_KEY));
if (!target.mkdirs()) {
LOG.warn("Error creating dirs to {}", target);
Expand All @@ -1067,8 +1067,8 @@ public static void teardown() throws Exception {

}

public static boolean isOnTravis() {
return System.getenv("TRAVIS") != null && System.getenv("TRAVIS").equals("true");
public static boolean isOnCI() {
return System.getenv("IS_CI") != null && System.getenv("IS_CI").equals("true");
}

protected void waitApplicationFinishedElseKillIt(
Expand Down
198 changes: 0 additions & 198 deletions tools/azure-pipelines/azure_controller.sh

This file was deleted.

3 changes: 2 additions & 1 deletion tools/azure-pipelines/build-apache-repo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,11 @@ resources:

variables:
MAVEN_CACHE_FOLDER: $(Pipeline.Workspace)/.m2/repository
E2E_CACHE_FOLDER: $(Pipeline.Workspace)/e2e_cache
MAVEN_OPTS: '-Dmaven.repo.local=$(MAVEN_CACHE_FOLDER)'
CACHE_KEY: maven | $(Agent.OS) | **/pom.xml, !**/target/**
CACHE_FALLBACK_KEY: maven | $(Agent.OS)
CACHE_FLINK_DIR: $(Pipeline.Workspace)/flink_cache
FLINK_ARTIFACT_DIR: $(Pipeline.Workspace)/flink_artifact
SECRET_S3_BUCKET: $[variables.IT_CASE_S3_BUCKET]
SECRET_S3_ACCESS_KEY: $[variables.IT_CASE_S3_ACCESS_KEY]
SECRET_S3_SECRET_KEY: $[variables.IT_CASE_S3_SECRET_KEY]
Expand Down
12 changes: 7 additions & 5 deletions tools/azure-pipelines/build-python-wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,10 @@ jobs:
clean: all
steps:
# Compile
- script: STAGE=compile ${{parameters.environment}} ./tools/azure-pipelines/azure_controller.sh compile
displayName: Build
- script: |
${{parameters.environment}} ./tools/ci/compile.sh
./tools/azure-pipelines/create_build_artifact.sh
displayName: Compile
- script: |
VERSION=$(mvn --file pom.xml org.apache.maven.plugins:maven-help-plugin:3.1.0:evaluate -Dexpression=project.version -q -DforceStdout)
Expand All @@ -38,8 +40,8 @@ jobs:
# upload artifacts for building wheels
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(Pipeline.Workspace)/flink.tar.gz
artifactName: FlinkCompileCacheDir-${{parameters.stage_name}}
path: $(FLINK_ARTIFACT_DIR)
artifact: FlinkCompileArtifact-${{parameters.stage_name}}

- job: build_wheels
dependsOn: compile_${{parameters.stage_name}}
Expand All @@ -58,7 +60,7 @@ jobs:
- task: DownloadPipelineArtifact@2
inputs:
path: $(Pipeline.Workspace)
artifact: FlinkCompileCacheDir-${{parameters.stage_name}}
artifact: FlinkCompileArtifact-${{parameters.stage_name}}
- script: |
tar zxf $(Pipeline.Workspace)/flink.tar.gz -C $(Pipeline.Workspace)
mkdir -p flink-dist/target/flink-$(VERSION)-bin
Expand Down
Loading

0 comments on commit a22b130

Please sign in to comment.