Skip to content

Commit

Permalink
Arm64 CI setup with TravisCI (scikit-learn#17996)
Browse files Browse the repository at this point in the history
  • Loading branch information
rth authored Jul 31, 2020
1 parent 793f974 commit 90dc61f
Show file tree
Hide file tree
Showing 8 changed files with 86 additions and 38 deletions.
20 changes: 15 additions & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,31 @@ matrix:
# installed from their CI wheels in a virtualenv with the Python
# interpreter provided by travis.
- python: 3.7
env: CHECK_WARNINGS="true"
env:
- CHECK_WARNINGS="true"
- CI_CPU_COUNT="3"
if: type = cron OR commit_message =~ /\[scipy-dev\]/

# As above but build scikit-learn with Intel C compiler (ICC).
- python: 3.7
env:
- CHECK_WARNING="true"
- BUILD_WITH_ICC="true"
- CI_CPU_COUNT="3"
if: type = cron OR commit_message =~ /\[icc-build\]/

- python: 3.7
env:
- CI_CPU_COUNT="8"
os: linux
arch: arm64
if: type = cron OR commit_message =~ /\[arm64\]/

install: source build_tools/travis/install.sh
script:
- bash build_tools/travis/test_script.sh
- bash build_tools/travis/test_docs.sh
- bash build_tools/travis/test_pytest_soft_dependency.sh
- bash build_tools/travis/test_script.sh || travis_terminate 1
- bash build_tools/travis/test_docs.sh || travis_terminate 1
- bash build_tools/travis/test_pytest_soft_dependency.sh || travis_terminate 1
after_success: source build_tools/travis/after_success.sh
notifications:
webhooks:
Expand Down
46 changes: 32 additions & 14 deletions build_tools/travis/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,12 @@
set -e

# Fail fast
echo "CPU Arch: ${TRAVIS_CPU_ARCH}"

# jq is used in travis_fastfail.sh, it's already pre-installed in non arm64
# environments
sudo apt-get install jq

build_tools/travis/travis_fastfail.sh

# Imports get_dep
Expand All @@ -35,28 +41,40 @@ ccache --max-size 100M --show-stats
# If Travvis has language=generic, deactivate does not exist. `|| :` will pass.
deactivate || :


# Install miniconda
fname=Miniconda3-latest-Linux-x86_64.sh
wget https://repo.continuum.io/miniconda/$fname -O miniconda.sh
if [[ "$TRAVIS_CPU_ARCH" == "arm64" ]]; then
wget https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh -O miniconda.sh
else
fname=Miniconda3-latest-Linux-x86_64.sh
wget https://repo.continuum.io/miniconda/$fname -O miniconda.sh
fi
MINICONDA_PATH=$HOME/miniconda
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH
export PATH=$MINICONDA_PATH/bin:$PATH
conda update --yes conda

# Create environment and install dependencies
conda create -n testenv --yes python=3.7

source activate testenv

pip install --upgrade pip setuptools
echo "Installing numpy and scipy master wheels"
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy scipy pandas
pip install --pre cython
echo "Installing joblib master"
pip install https://github.com/joblib/joblib/archive/master.zip
echo "Installing pillow master"
pip install https://github.com/python-pillow/Pillow/archive/master.zip
pip install $(get_dep pytest $PYTEST_VERSION) pytest-cov
if [[ "$TRAVIS_CPU_ARCH" == "amd64" ]]; then
pip install --upgrade pip setuptools
echo "Installing numpy and scipy master wheels"
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy scipy pandas
pip install --pre cython
echo "Installing joblib master"
pip install https://github.com/joblib/joblib/archive/master.zip
echo "Installing pillow master"
pip install https://github.com/python-pillow/Pillow/archive/master.zip
else
conda install -y scipy numpy pandas cython
pip install joblib threadpoolctl
fi

pip install $(get_dep pytest $PYTEST_VERSION) pytest-cov pytest-xdist

# Build scikit-learn in the install.sh script to collapse the verbose
# build output in the travis output when it succeeds.
Expand All @@ -76,11 +94,11 @@ if [[ "$BUILD_WITH_ICC" == "true" ]]; then
# The build_clib command is implicitly used to build libsvm-skl. To compile
# with a different compiler we also need to specify the compiler for this
# command.
python setup.py build_ext --compiler=intelem -i -j 3 build_clib --compiler=intelem
python setup.py build_ext --compiler=intelem -i -j "${CI_CPU_COUNT}" build_clib --compiler=intelem
else
# Use setup.py instead of `pip install -e .` to be able to pass the -j flag
# to speed-up the building multicore CI machines.
python setup.py build_ext --inplace -j 3
python setup.py build_ext --inplace -j "${CI_CPU_COUNT}"
fi

python setup.py develop
Expand Down
2 changes: 1 addition & 1 deletion build_tools/travis/test_docs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ if [[ "$BUILD_WITH_ICC" == "true" ]]; then
source /opt/intel/inteloneapi/setvars.sh
fi

make test-doc
PYTEST="pytest -n $CI_CPU_COUNT" make test-doc
17 changes: 13 additions & 4 deletions build_tools/travis/test_script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ try:
except ImportError:
pass
"
python -c "import multiprocessing as mp; print('%d CPUs' % mp.cpu_count())"
python -c "import joblib; print(joblib.cpu_count(), 'CPUs')"
python -c "import platform; print(platform.machine())"

if [[ "$BUILD_WITH_ICC" == "true" ]]; then
# the tools in the oneAPI toolkits are configured via environment variables
Expand All @@ -36,9 +37,17 @@ run_tests() {
cp setup.cfg $TEST_DIR
cd $TEST_DIR

# Tests that require large downloads over the networks are skipped in CI.
# Here we make sure, that they are still run on a regular basis.
export SKLEARN_SKIP_NETWORK_TESTS=0
if [[ "$TRAVIS_CPU_ARCH" == "arm64" ]]; then
# use pytest-xdist for faster tests
TEST_CMD="$TEST_CMD -n $CI_CPU_COUNT"
else
# Tests that require large downloads over the networks are skipped in CI.
# Here we make sure, that they are still run on a regular basis.
#
# Note that using pytest-xdist is currently not compatible
# with fetching datasets in tests due to datasets cache corruptions issues.
export SKLEARN_SKIP_NETWORK_TESTS=0
fi

if [[ "$COVERAGE" == "true" ]]; then
TEST_CMD="$TEST_CMD --cov sklearn"
Expand Down
28 changes: 19 additions & 9 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,25 @@ def pytest_addoption(parser):


def pytest_collection_modifyitems(config, items):

# FeatureHasher is not compatible with PyPy
if platform.python_implementation() == 'PyPy':
skip_marker = pytest.mark.skip(
reason='FeatureHasher is not compatible with PyPy')
for item in items:
if item.name.endswith(('_hash.FeatureHasher',
'text.HashingVectorizer')):
item.add_marker(skip_marker)
for item in items:
# FeatureHasher is not compatible with PyPy
if (item.name.endswith(('_hash.FeatureHasher',
'text.HashingVectorizer'))
and platform.python_implementation() == 'PyPy'):
marker = pytest.mark.skip(
reason='FeatureHasher is not compatible with PyPy')
item.add_marker(marker)
# Known failure on with GradientBoostingClassifier on ARM64
elif (item.name.endswith('GradientBoostingClassifier')
and platform.machine() == 'aarch64'):

marker = pytest.mark.xfail(
reason=(
'know failure. See '
'https://github.com/scikit-learn/scikit-learn/issues/17797' # noqa
)
)
item.add_marker(marker)

# Skip tests which require internet if the flag is provided
if config.getoption("--skip-network"):
Expand Down
3 changes: 2 additions & 1 deletion doc/developers/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -477,9 +477,10 @@ message, the following actions are taken.
====================== ===================
Commit Message Marker Action Taken by CI
---------------------- -------------------
[scipy-dev] Add a Travis build with our dependencies (numpy, scipy, etc ...) development builds
[ci skip] CI is skipped completely
[lint skip] Azure pipeline skips linting
[scipy-dev] Add a Travis build with our dependencies (numpy, scipy, etc ...) development builds
[arm64] Add a Travis build for the ARM64 / aarch64 little endian architecture
[doc skip] Docs are not built
[doc quick] Docs built, but excludes example gallery plots
[doc build] Docs built including example gallery plots
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/tests/test_k_means.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ def test_k_means_fit_predict(algo, dtype, constructor, seed, max_iter, tol):
# using more than one thread, the absolute values of the labels can be
# different between the 2 strategies but they should correspond to the same
# clustering.
assert v_measure_score(labels_1, labels_2) == 1
assert v_measure_score(labels_1, labels_2) == pytest.approx(1, abs=1e-15)


def test_minibatch_kmeans_verbose():
Expand Down
6 changes: 3 additions & 3 deletions sklearn/neural_network/tests/test_mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ def test_predict_proba_binary():

assert y_proba.shape == (n_samples, n_classes)
assert_array_equal(proba_max, proba_log_max)
assert_array_equal(y_log_proba, np.log(y_proba))
assert_allclose(y_log_proba, np.log(y_proba))

assert roc_auc_score(y, y_proba[:, 1]) == 1.0

Expand All @@ -511,7 +511,7 @@ def test_predict_proba_multiclass():

assert y_proba.shape == (n_samples, n_classes)
assert_array_equal(proba_max, proba_log_max)
assert_array_equal(y_log_proba, np.log(y_proba))
assert_allclose(y_log_proba, np.log(y_proba))


def test_predict_proba_multilabel():
Expand All @@ -535,7 +535,7 @@ def test_predict_proba_multilabel():

assert (y_proba.sum(1) - 1).dot(y_proba.sum(1) - 1) > 1e-10
assert_array_equal(proba_max, proba_log_max)
assert_array_equal(y_log_proba, np.log(y_proba))
assert_allclose(y_log_proba, np.log(y_proba))


def test_shuffle():
Expand Down

0 comments on commit 90dc61f

Please sign in to comment.