diff --git a/.circleci/config.yml b/.circleci/config.yml index 98c16e233731..f0a67c51c383 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,6 @@ version: 2 jobs: - build: + build_docs: docker: # CircleCI maintains a library of pre-built images # documented at https://circleci.com/docs/2.0/circleci-images/ @@ -33,7 +33,7 @@ jobs: pip install nose mpmath argparse Pillow codecov matplotlib Sphinx==1.7.2 - run: - name: test + name: build docs command: | . venv/bin/activate export SHELL=$(which bash) @@ -92,15 +92,64 @@ jobs: git push --set-upstream origin gh-pages --force + # Run test suite on pypy3 + pypy3: + docker: + - image: pypy:3-6.0.0 + + steps: + - restore_cache: + keys: + - pypy3-ccache-{{ .Branch }} + - pypy3-ccache + - checkout + - run: + name: setup + command: | + apt-get -yq update + apt-get -yq install libatlas-dev libatlas-base-dev liblapack-dev gfortran ccache + ccache -M 512M + export CCACHE_COMPRESS=1 + export NPY_NUM_BUILD_JOBS=`pypy3 -c 'import multiprocessing as mp; print(mp.cpu_count())'` + export PATH=/usr/lib/ccache:$PATH + # XXX: use "numpy>=1.15.0" when it's released + pypy3 -mpip install --upgrade pip setuptools wheel + pypy3 -mpip install --no-build-isolation --extra-index https://antocuni.github.io/pypy-wheels/ubuntu pytest pytest-xdist Tempita "Cython>=0.28.2" mpmath + pypy3 -mpip install --no-build-isolation git+https://github.com/numpy/numpy.git@db552b5b6b37f2ff085b304751d7a2ebed26adc9 + - run: + name: build + command: | + export CCACHE_COMPRESS=1 + export PATH=/usr/lib/ccache:$PATH + # Limit parallelism for Cythonization to 4 processes, to + # avoid exceeding CircleCI memory limits + export SCIPY_NUM_CYTHONIZE_JOBS=4 + export NPY_NUM_BUILD_JOBS=`pypy3 -c 'import multiprocessing as mp; print(mp.cpu_count())'` + # Less aggressive optimization flags for faster compilation + OPT="-O1" FOPT="-O1" pypy3 setup.py build + - save_cache: + key: pypy3-ccache-{{ .Branch }}-{{ .BuildNum }} + paths: + - ~/.ccache + - ~/.cache/pip + - run: + name: test + command: | + # CircleCI has 4G memory limit, play it safe + export SCIPY_AVAILABLE_MEM=1G + pypy3 runtests.py -- -rfEX -n 3 --durations=30 + + workflows: version: 2 - build_and_deploy: + default: jobs: - - build + - build_docs - deploy: requires: - - build + - build_docs filters: branches: only: master + - pypy3 diff --git a/tools/cythonize.py b/tools/cythonize.py index 949422ed2843..a7767687770b 100755 --- a/tools/cythonize.py +++ b/tools/cythonize.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -""" cythonize +"""cythonize Cythonize pyx files into C files as needed. @@ -7,6 +7,10 @@ Default [root_dir] is 'scipy'. +The number of parallel Cython processes is controlled by the +environment variable SCIPY_NUM_CYTHONIZE_JOBS. If not set, determined +from the number of CPUs. + Checks pyx files to see if they have been changed relative to their corresponding C files. If they have, then runs cython on these files to recreate the C files. @@ -28,6 +32,7 @@ Note: this script does not check any of the dependent C libraries; it only operates on the Cython .pyx files. + """ from __future__ import division, print_function, absolute_import @@ -240,7 +245,12 @@ def process_generate_pyx(path, lock): def find_process_files(root_dir): lock = Lock() - pool = Pool() + + try: + num_proc = int(os.environ.get('SCIPY_NUM_CYTHONIZE_JOBS', '')) + pool = Pool(processes=num_proc) + except ValueError: + pool = Pool() hash_db = load_hashes(HASH_FILE) # Keep changed pxi/pxd hashes in a separate dict until the end @@ -255,7 +265,7 @@ def find_process_files(root_dir): if os.path.exists(generate_pyx): jobs.append(generate_pyx) - for result in pool.imap(lambda fn: process_generate_pyx(fn, lock), jobs): + for result in pool.imap_unordered(lambda fn: process_generate_pyx(fn, lock), jobs): pass # Process pyx files @@ -278,7 +288,7 @@ def find_process_files(root_dir): jobs.append((cur_dir, fromfile, tofile, function, hash_db, dep_hashes, lock)) - for result in pool.imap(lambda args: process(*args), jobs): + for result in pool.imap_unordered(lambda args: process(*args), jobs): pass hash_db.update(dep_hashes)