Skip to content

Commit

Permalink
[FLINK-22856][Azure] Upgrade to ubuntu-20.04
Browse files Browse the repository at this point in the history
  • Loading branch information
rmetzger committed Jun 8, 2021
1 parent a1dbcc9 commit ed02b5d
Show file tree
Hide file tree
Showing 8 changed files with 41 additions and 37 deletions.
6 changes: 3 additions & 3 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,16 @@ stages:
parameters: # see template file for a definition of the parameters.
stage_name: ci_build
test_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
run_end_to_end: false
container: flink-build-container
jdk: jdk8
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
steps:
- script: ./tools/ci/docs.sh
# CI / Special stage for release, e.g. building PyFlink wheel packages, etc:
Expand Down
2 changes: 1 addition & 1 deletion flink-end-to-end-tests/run-nightly-tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ if [ -z "$FLINK_LOG_DIR" ] ; then
fi

# On Azure CI, use artifacts dir
if [ -z "$DEBUG_FILES_OUTPUT_DIR"] ; then
if [ -z "$DEBUG_FILES_OUTPUT_DIR" ] ; then
export DEBUG_FILES_OUTPUT_DIR="$FLINK_LOG_DIR"
fi

Expand Down
8 changes: 4 additions & 4 deletions flink-end-to-end-tests/test-scripts/common_docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -58,15 +58,15 @@ function build_image() {
}

function start_file_server() {
command -v python >/dev/null 2>&1
command -v python3 >/dev/null 2>&1
if [[ $? -eq 0 ]]; then
python ${TEST_INFRA_DIR}/python2_fileserver.py &
python3 ${TEST_INFRA_DIR}/python3_fileserver.py &
return
fi

command -v python3 >/dev/null 2>&1
command -v python >/dev/null 2>&1
if [[ $? -eq 0 ]]; then
python3 ${TEST_INFRA_DIR}/python3_fileserver.py &
python ${TEST_INFRA_DIR}/python2_fileserver.py &
return
fi

Expand Down
7 changes: 4 additions & 3 deletions flink-end-to-end-tests/test-scripts/common_kubernetes.sh
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ function setup_kubernetes_for_linux {
fi
# conntrack is required for minikube 1.9 and later
sudo apt-get install conntrack
# required to resolve HOST_JUJU_LOCK_PERMISSION error of "minikube start --vm-driver=none"
sudo sysctl fs.protected_regular=0
}

function check_kubernetes_status {
Expand All @@ -76,7 +78,7 @@ function start_kubernetes_if_not_running {
# here.
# Similarly, the kubelets are marking themself as "low disk space",
# causing Flink to avoid this node (again, failing the test)
sudo CHANGE_MINIKUBE_NONE_USER=true minikube start --vm-driver=none \
CHANGE_MINIKUBE_NONE_USER=true sudo -E minikube start --vm-driver=none \
--extra-config=kubelet.image-gc-high-threshold=99 \
--extra-config=kubelet.image-gc-low-threshold=98 \
--extra-config=kubelet.minimum-container-ttl-duration=120m \
Expand Down Expand Up @@ -108,7 +110,6 @@ function start_kubernetes {
exit 1
fi
fi
eval $(minikube docker-env)
}

function stop_kubernetes {
Expand All @@ -118,7 +119,7 @@ function stop_kubernetes {
kill $minikube_mount_pid 2> /dev/null
else
echo "Stopping minikube ..."
stop_command="sudo minikube stop"
stop_command="minikube stop"
if ! retry_times ${MINIKUBE_START_RETRIES} ${MINIKUBE_START_BACKOFF} "${stop_command}"; then
echo "Could not stop minikube. Aborting..."
exit 1
Expand Down
20 changes: 10 additions & 10 deletions tools/azure-pipelines/build-apache-repo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,14 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
run_end_to_end: false
container: flink-build-container
jdk: jdk8
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
steps:
# Skip docs check if this is a pull request that doesn't contain a documentation change
- bash: |
Expand Down Expand Up @@ -109,9 +109,9 @@ stages:
parameters:
stage_name: cron_azure
test_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11"
run_end_to_end: true
container: flink-build-container
Expand All @@ -122,7 +122,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
run_end_to_end: true
container: flink-build-container
Expand All @@ -133,7 +133,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dinclude_hadoop_aws -Dhadoop.version=3.1.3 -Phadoop3-tests"
run_end_to_end: true
container: flink-build-container
Expand All @@ -144,7 +144,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.12 -Phive-1.2.1"
run_end_to_end: true
container: flink-build-container
Expand All @@ -155,7 +155,7 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Djdk11"
run_end_to_end: true
container: flink-build-container
Expand All @@ -166,14 +166,14 @@ stages:
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws -Dscala-2.11 -Penable-adaptive-scheduler"
run_end_to_end: true
container: flink-build-container
jdk: jdk8
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
steps:
- task: UseRubyVersion@0
inputs:
Expand Down
4 changes: 2 additions & 2 deletions tools/azure-pipelines/build-nightly-dist.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
jobs:
- job: ${{parameters.stage_name}}_binary
pool:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
container: flink-build-container
workspace:
clean: all
Expand Down Expand Up @@ -63,7 +63,7 @@ jobs:
# artifact: nightly-release
- job: ${{parameters.stage_name}}_maven
pool:
vmImage: 'ubuntu-16.04'
vmImage: 'ubuntu-20.04'
container: flink-build-container
timeoutInMinutes: 100 # 40 minutes per scala version + 20 buffer
workspace:
Expand Down
2 changes: 1 addition & 1 deletion tools/azure-pipelines/build-python-wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
strategy:
matrix:
linux:
vm-label: 'ubuntu-16.04'
vm-label: 'ubuntu-20.04'
mac:
vm-label: 'macOS-10.15'
pool:
Expand Down
29 changes: 16 additions & 13 deletions tools/azure-pipelines/jobs-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,10 @@ jobs:
condition: not(eq('${{parameters.test_pool_definition.name}}', 'Default'))
displayName: Cache Maven local repo
- script: |
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_11_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_11_X64/bin:$PATH"
displayName: "Set to jdk11"
condition: eq('${{parameters.jdk}}', 'jdk11')
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_8_X64/bin:$PATH"
displayName: "Set to jdk8"
condition: eq('${{parameters.jdk}}', 'jdk8')
# Compile
- script: |
${{parameters.environment}} ./tools/ci/compile.sh || exit $?
Expand Down Expand Up @@ -133,10 +133,10 @@ jobs:
displayName: Cache Maven local repo

- script: |
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_11_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_11_X64/bin:$PATH"
displayName: "Set to jdk11"
condition: eq('${{parameters.jdk}}', 'jdk11')
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_8_X64/bin:$PATH"
displayName: "Set to jdk8"
condition: eq('${{parameters.jdk}}', 'jdk8')
- script: sudo sysctl -w kernel.core_pattern=core.%p
displayName: Set coredump pattern
Expand Down Expand Up @@ -210,10 +210,10 @@ jobs:
continueOnError: true
condition: not(eq(variables['SKIP'], '1'))
- script: |
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_11_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_11_X64/bin:$PATH"
displayName: "Set to jdk11"
condition: eq('${{parameters.jdk}}', 'jdk11')
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64"
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_8_X64/bin:$PATH"
displayName: "Set to jdk8"
condition: eq('${{parameters.jdk}}', 'jdk8')
- script: |
echo "Setting up Maven"
source ./tools/ci/maven-utils.sh
Expand All @@ -223,7 +223,10 @@ jobs:
./tools/azure-pipelines/free_disk_space.sh
echo "Installing required software"
sudo apt-get install -y bc
sudo apt-get install -y bc libapr1
# install libssl1.0.0 for netty tcnative
wget http://security.ubuntu.com/ubuntu/pool/main/o/openssl1.0/libssl1.0.0_1.0.2n-1ubuntu5.6_amd64.deb
sudo apt install ./libssl1.0.0_1.0.2n-1ubuntu5.6_amd64.deb
displayName: Prepare E2E run
condition: not(eq(variables['SKIP'], '1'))
- script: ${{parameters.environment}} ./tools/ci/compile.sh
Expand Down

0 comments on commit ed02b5d

Please sign in to comment.