Skip to content

Commit

Permalink
CI: run tests with pypy3 on circleci (scipy#8783)
Browse files Browse the repository at this point in the history
* MAINT: make parallelism in tools/cythonize.py adjustable

* CI: run tests with pypy3 on circleci
  • Loading branch information
pv authored and tylerjereddy committed May 4, 2018
1 parent f8d31d8 commit bdfce64
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 9 deletions.
59 changes: 54 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
version: 2
jobs:
build:
build_docs:
docker:
# CircleCI maintains a library of pre-built images
# documented at https://circleci.com/docs/2.0/circleci-images/
Expand Down Expand Up @@ -33,7 +33,7 @@ jobs:
pip install nose mpmath argparse Pillow codecov matplotlib Sphinx==1.7.2
- run:
name: test
name: build docs
command: |
. venv/bin/activate
export SHELL=$(which bash)
Expand Down Expand Up @@ -92,15 +92,64 @@ jobs:
git push --set-upstream origin gh-pages --force
# Run test suite on pypy3
pypy3:
docker:
- image: pypy:3-6.0.0

steps:
- restore_cache:
keys:
- pypy3-ccache-{{ .Branch }}
- pypy3-ccache
- checkout
- run:
name: setup
command: |
apt-get -yq update
apt-get -yq install libatlas-dev libatlas-base-dev liblapack-dev gfortran ccache
ccache -M 512M
export CCACHE_COMPRESS=1
export NPY_NUM_BUILD_JOBS=`pypy3 -c 'import multiprocessing as mp; print(mp.cpu_count())'`
export PATH=/usr/lib/ccache:$PATH
# XXX: use "numpy>=1.15.0" when it's released
pypy3 -mpip install --upgrade pip setuptools wheel
pypy3 -mpip install --no-build-isolation --extra-index https://antocuni.github.io/pypy-wheels/ubuntu pytest pytest-xdist Tempita "Cython>=0.28.2" mpmath
pypy3 -mpip install --no-build-isolation git+https://github.com/numpy/numpy.git@db552b5b6b37f2ff085b304751d7a2ebed26adc9
- run:
name: build
command: |
export CCACHE_COMPRESS=1
export PATH=/usr/lib/ccache:$PATH
# Limit parallelism for Cythonization to 4 processes, to
# avoid exceeding CircleCI memory limits
export SCIPY_NUM_CYTHONIZE_JOBS=4
export NPY_NUM_BUILD_JOBS=`pypy3 -c 'import multiprocessing as mp; print(mp.cpu_count())'`
# Less aggressive optimization flags for faster compilation
OPT="-O1" FOPT="-O1" pypy3 setup.py build
- save_cache:
key: pypy3-ccache-{{ .Branch }}-{{ .BuildNum }}
paths:
- ~/.ccache
- ~/.cache/pip
- run:
name: test
command: |
# CircleCI has 4G memory limit, play it safe
export SCIPY_AVAILABLE_MEM=1G
pypy3 runtests.py -- -rfEX -n 3 --durations=30
workflows:
version: 2
build_and_deploy:
default:
jobs:
- build
- build_docs
- deploy:
requires:
- build
- build_docs

filters:
branches:
only: master
- pypy3
18 changes: 14 additions & 4 deletions tools/cythonize.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
#!/usr/bin/env python
""" cythonize
"""cythonize
Cythonize pyx files into C files as needed.
Usage: cythonize [root_dir]
Default [root_dir] is 'scipy'.
The number of parallel Cython processes is controlled by the
environment variable SCIPY_NUM_CYTHONIZE_JOBS. If not set, determined
from the number of CPUs.
Checks pyx files to see if they have been changed relative to their
corresponding C files. If they have, then runs cython on these files to
recreate the C files.
Expand All @@ -28,6 +32,7 @@
Note: this script does not check any of the dependent C libraries; it only
operates on the Cython .pyx files.
"""

from __future__ import division, print_function, absolute_import
Expand Down Expand Up @@ -240,7 +245,12 @@ def process_generate_pyx(path, lock):

def find_process_files(root_dir):
lock = Lock()
pool = Pool()

try:
num_proc = int(os.environ.get('SCIPY_NUM_CYTHONIZE_JOBS', ''))
pool = Pool(processes=num_proc)
except ValueError:
pool = Pool()

hash_db = load_hashes(HASH_FILE)
# Keep changed pxi/pxd hashes in a separate dict until the end
Expand All @@ -255,7 +265,7 @@ def find_process_files(root_dir):
if os.path.exists(generate_pyx):
jobs.append(generate_pyx)

for result in pool.imap(lambda fn: process_generate_pyx(fn, lock), jobs):
for result in pool.imap_unordered(lambda fn: process_generate_pyx(fn, lock), jobs):
pass

# Process pyx files
Expand All @@ -278,7 +288,7 @@ def find_process_files(root_dir):
jobs.append((cur_dir, fromfile, tofile, function,
hash_db, dep_hashes, lock))

for result in pool.imap(lambda args: process(*args), jobs):
for result in pool.imap_unordered(lambda args: process(*args), jobs):
pass

hash_db.update(dep_hashes)
Expand Down

0 comments on commit bdfce64

Please sign in to comment.