Skip to content

Commit

Permalink
MAINT: Mayavi / NumPy dep (mne-tools#7412)
Browse files Browse the repository at this point in the history
* MAINT: Mayavi / NumPy dep

* FIX: Restore tests for pre

* MAINT: Bump NumPy

* FIX: Bump reqs to ones we can test

* FIX: Bump again

* DOC: Correct README [ci skip]

* FIX: Nest import

* FIX: Def

* FIX: Missed one
  • Loading branch information
larsoner authored Mar 9, 2020
1 parent 1cf95cc commit 7ce4bb7
Show file tree
Hide file tree
Showing 28 changed files with 79 additions and 294 deletions.
4 changes: 1 addition & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ matrix:
# Old dependencies
- os: linux
env: PYTHON_VERSION=3.5
CONDA_DEPENDENCIES="numpy=1.12 scipy=0.18 matplotlib=2.0 pandas=0.19 scikit-learn=0.18"
CONDA_DEPENDENCIES="numpy=1.13 scipy=1.0 matplotlib=2.1 pandas=0.21 scikit-learn=0.19"
CONDA_CHANNELS="conda-forge"

# Minimal (runs with and without testing data)
Expand Down Expand Up @@ -116,8 +116,6 @@ script:
# need the --pre wheels to skip slow ones.
- if [ "${TRAVIS_OS_NAME}" == "osx" ]; then
CONDITION='not slowtest';
elif [ -z "$CONDA_ENV" ] && [ -z "$CONDA_DEPENDENCIES" ]; then
CONDITION='not slowtest';
else
CONDITION='not ultraslowtest';
fi;
Expand Down
12 changes: 6 additions & 6 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -81,22 +81,22 @@ Dependencies
The minimum required dependencies to run MNE-Python are:

- Python >= 3.5
- NumPy >= 1.12.1
- SciPy >= 0.18.1
- NumPy >= 1.13.3
- SciPy >= 1.0.0

For full functionality, some functions require:

- Matplotlib >= 2.0.2
- Matplotlib >= 2.1
- Mayavi >= 4.6
- PySurfer >= 0.8
- Scikit-learn >= 0.18.2
- Scikit-learn >= 0.19.1
- Numba >= 0.40
- NiBabel >= 2.1.0
- Pandas >= 0.19.2
- Pandas >= 0.21
- Picard >= 0.3
- CuPy >= 4.0 (for NVIDIA CUDA acceleration)
- DIPY >= 0.10.1
- PyVista >= 0.20.1
- PyVista >= 0.23.1

Contributing to MNE-Python
^^^^^^^^^^^^^^^^^^^^^^^^^^
Expand Down
8 changes: 8 additions & 0 deletions doc/changes/latest.inc
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,14 @@ Bug
API
~~~

- Bumped minimum requirements to fall 2017 versions by `Eric Larson`_:

- NumPy 1.13.3
- SciPy 1.0.0
- matplotlib 2.1
- scikit-learn 1.19.1 (optional requirement)
- pandas 0.21 (optional requirement)

- :meth:`mne.Epochs.plot` now accepts an ``event_id`` parameter (useful in tandem with ``event_colors`` for specifying event colors by name) by `Daniel McCloy`_.

- New time conversion options for methods :meth:`mne.io.Raw.to_data_frame`, :meth:`mne.Epochs.to_data_frame`, :meth:`mne.Evoked.to_data_frame`, and :meth:`mne.SourceEstimate.to_data_frame`, by `Daniel McCloy`_.
Expand Down
1 change: 1 addition & 0 deletions mne/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def pytest_configure(config):
ignore:.*TraitTuple.*trait.*handler.*deprecated.*:DeprecationWarning
ignore:.*rich_compare.*metadata.*deprecated.*:DeprecationWarning
ignore:.*In future, it will be an error for 'np.bool_'.*:DeprecationWarning
ignore:.*Converting `np\.character` to a dtype is deprecated.*:DeprecationWarning
always:.*get_data.* is deprecated in favor of.*:DeprecationWarning
""" # noqa: E501
for warning_line in warning_lines.split('\n'):
Expand Down
2 changes: 1 addition & 1 deletion mne/cov.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,7 +964,7 @@ def _compute_covariance_auto(data, method, info, method_params, cv,
estimator_cov_info = list()
msg = 'Estimating covariance using %s'

ok_sklearn = check_version('sklearn', '0.15')
ok_sklearn = check_version('sklearn')
if not ok_sklearn and (len(method) != 1 or method[0] != 'empirical'):
raise ValueError('scikit-learn is not installed, `method` must be '
'`empirical`, got %s' % (method,))
Expand Down
40 changes: 11 additions & 29 deletions mne/decoding/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,35 +225,17 @@ def _set_cv(cv, estimator=None, X=None, y=None):
else:
est_is_classifier = is_classifier(estimator)
# Setup CV
if check_version('sklearn', '0.18'):
from sklearn import model_selection as models
from sklearn.model_selection import (check_cv, StratifiedKFold, KFold)
if isinstance(cv, (int, np.int)):
XFold = StratifiedKFold if est_is_classifier else KFold
cv = XFold(n_splits=cv)
elif isinstance(cv, str):
if not hasattr(models, cv):
raise ValueError('Unknown cross-validation')
cv = getattr(models, cv)
cv = cv()
cv = check_cv(cv=cv, y=y, classifier=est_is_classifier)
else:
from sklearn import cross_validation as models
from sklearn.cross_validation import (check_cv, StratifiedKFold, KFold)
if isinstance(cv, (int, np.int)):
if est_is_classifier:
cv = StratifiedKFold(y=y, n_folds=cv)
else:
cv = KFold(n=len(y), n_folds=cv)
elif isinstance(cv, str):
if not hasattr(models, cv):
raise ValueError('Unknown cross-validation')
cv = getattr(models, cv)
if cv.__name__ not in ['KFold', 'LeaveOneOut']:
raise NotImplementedError('CV cannot be defined with str for'
' sklearn < .017.')
cv = cv(len(y))
cv = check_cv(cv=cv, X=X, y=y, classifier=est_is_classifier)
from sklearn import model_selection as models
from sklearn.model_selection import (check_cv, StratifiedKFold, KFold)
if isinstance(cv, (int, np.int)):
XFold = StratifiedKFold if est_is_classifier else KFold
cv = XFold(n_splits=cv)
elif isinstance(cv, str):
if not hasattr(models, cv):
raise ValueError('Unknown cross-validation')
cv = getattr(models, cv)
cv = cv()
cv = check_cv(cv=cv, y=y, classifier=est_is_classifier)

# Extract train and test set to retrieve them at predict time
if hasattr(cv, 'split'):
Expand Down
8 changes: 4 additions & 4 deletions mne/decoding/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import pytest

from mne.fixes import is_regressor, is_classifier
from mne.utils import requires_version, check_version
from mne.utils import requires_sklearn, check_version
from mne.decoding.base import (_get_inverse_funcs, LinearModel, get_coef,
cross_val_multiscore)
from mne.decoding.search_light import SlidingEstimator
Expand Down Expand Up @@ -55,7 +55,7 @@ def _make_data(n_samples=1000, n_features=5, n_targets=3):
return X, Y, A


@requires_version('sklearn', '0.17')
@requires_sklearn
def test_get_coef():
"""Test getting linear coefficients (filters/patterns) from estimators."""
from sklearn.base import TransformerMixin, BaseEstimator
Expand Down Expand Up @@ -206,7 +206,7 @@ def inverse_transform(self, X):
lm.fit(X, Y, sample_weight=np.ones(len(Y)))


@requires_version('sklearn', '0.15')
@requires_sklearn
def test_linearmodel():
"""Test LinearModel class for computing filters and patterns."""
# check categorical target fit in standard linear model
Expand Down Expand Up @@ -260,7 +260,7 @@ def test_linearmodel():
clf.fit(X, wrong_y)


@requires_version('sklearn', '0.18')
@requires_sklearn
def test_cross_val_multiscore():
"""Test cross_val_multiscore for computing scores on decoding over time."""
from sklearn.model_selection import KFold, StratifiedKFold, cross_val_score
Expand Down
12 changes: 4 additions & 8 deletions mne/decoding/tests/test_ems.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import pytest

from mne import io, Epochs, read_events, pick_types
from mne.utils import requires_version, check_version, run_tests_if_main
from mne.utils import requires_sklearn, run_tests_if_main
from mne.decoding import compute_ems, EMS

data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
Expand All @@ -21,9 +21,10 @@
event_id = dict(aud_l=1, vis_l=3)


@requires_version('sklearn', '0.15')
@requires_sklearn
def test_ems():
"""Test event-matched spatial filters."""
from sklearn.model_selection import StratifiedKFold
raw = io.read_raw_fif(raw_fname, preload=False)

# create unequal number of events
Expand Down Expand Up @@ -58,12 +59,7 @@ def test_ems():
# test compute_ems cv
epochs = epochs['aud_r', 'vis_l']
epochs.equalize_event_counts(epochs.event_id)
if check_version('sklearn', '0.18'):
from sklearn.model_selection import StratifiedKFold
cv = StratifiedKFold(n_splits=3)
else:
from sklearn.cross_validation import StratifiedKFold
cv = StratifiedKFold(epochs.events[:, 2])
cv = StratifiedKFold(n_splits=3)
compute_ems(epochs, cv=cv)
compute_ems(epochs, cv=2)
pytest.raises(ValueError, compute_ems, epochs, cv='foo')
Expand Down
3 changes: 1 addition & 2 deletions mne/decoding/tests/test_receptive_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from mne import io, pick_types
from mne.fixes import einsum, rfft, irfft
from mne.utils import requires_version, requires_sklearn, run_tests_if_main
from mne.utils import requires_sklearn, run_tests_if_main
from mne.decoding import ReceptiveField, TimeDelayingRidge
from mne.decoding.receptive_field import (_delay_time_series, _SCORERS,
_times_to_delays, _delays_to_slice)
Expand Down Expand Up @@ -537,7 +537,6 @@ def test_inverse_coef():


@requires_sklearn
@requires_version('scipy', '1.0')
def test_linalg_warning():
"""Test that warnings are issued when no regularization is applied."""
from sklearn.linear_model import Ridge
Expand Down
8 changes: 4 additions & 4 deletions mne/decoding/tests/test_search_light.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from numpy.testing import assert_array_equal, assert_equal
import pytest

from mne.utils import requires_version
from mne.utils import requires_sklearn
from mne.fixes import _get_args
from mne.decoding.search_light import SlidingEstimator, GeneralizingEstimator
from mne.decoding.transformer import Vectorizer
Expand All @@ -24,7 +24,7 @@ def make_data():
return X, y


@requires_version('sklearn', '0.17')
@requires_sklearn
def test_search_light():
"""Test SlidingEstimator."""
from sklearn.linear_model import Ridge, LogisticRegression
Expand Down Expand Up @@ -171,7 +171,7 @@ def transform(self, X):
assert (isinstance(pipe.estimators_[0], BaggingClassifier))


@requires_version('sklearn', '0.17')
@requires_sklearn
def test_generalization_light():
"""Test GeneralizingEstimator."""
from sklearn.pipeline import make_pipeline
Expand Down Expand Up @@ -258,7 +258,7 @@ def test_generalization_light():
assert_array_equal(y_preds[0], y_preds[1])


@requires_version('sklearn', '0.19') # 0.18 does not raise when it should
@requires_sklearn
def test_cross_val_predict():
"""Test cross_val_predict with predict_proba."""
from sklearn.linear_model import LinearRegression
Expand Down
12 changes: 5 additions & 7 deletions mne/decoding/tests/test_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from mne.decoding import (Scaler, FilterEstimator, PSDEstimator, Vectorizer,
UnsupervisedSpatialFilter, TemporalFilter)
from mne.defaults import DEFAULTS
from mne.utils import requires_version, run_tests_if_main, check_version
from mne.utils import requires_sklearn, run_tests_if_main, check_version

tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
Expand Down Expand Up @@ -42,11 +42,9 @@ def test_scaler():
infos = (epochs.info, epochs.info, None, None)
epochs_data_t = epochs_data.transpose([1, 0, 2])
for method, info in zip(methods, infos):
if method == 'median' and not check_version('sklearn', '0.17'):
pytest.raises(ValueError, Scaler, info, method)
continue
if method == 'mean' and not check_version('sklearn', ''):
pytest.raises(ImportError, Scaler, info, method)
if method in ('mean', 'median') and not check_version('sklearn'):
with pytest.raises(ImportError, match='No module'):
Scaler(info, method)
continue
scaler = Scaler(info, method)
X = scaler.fit_transform(epochs_data, y)
Expand Down Expand Up @@ -172,7 +170,7 @@ def test_vectorizer():
np.random.rand(102, 12, 12))


@requires_version('sklearn', '0.16')
@requires_sklearn
def test_unsupervised_spatial_filter():
"""Test unsupervised spatial filter."""
from sklearn.decomposition import PCA
Expand Down
5 changes: 1 addition & 4 deletions mne/decoding/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from .. import pick_types
from ..filter import filter_data, _triage_filter_params
from ..time_frequency.psd import psd_array_multitaper
from ..utils import check_version, fill_doc, _check_option
from ..utils import fill_doc, _check_option
from ..io.pick import (pick_info, _pick_data_channels, _picks_by_type,
_picks_to_idx)
from ..cov import _check_scalings_user
Expand Down Expand Up @@ -125,9 +125,6 @@ def __init__(self, info=None, scalings=None, with_mean=True,
from sklearn.preprocessing import StandardScaler
self._scaler = StandardScaler(self.with_mean, self.with_std)
else: # scalings == 'median':
if not check_version('sklearn', '0.17'):
raise ValueError("median requires version 0.17 of "
"sklearn library")
from sklearn.preprocessing import RobustScaler
self._scaler = RobustScaler(self.with_mean, self.with_std)

Expand Down
11 changes: 5 additions & 6 deletions mne/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from .io.pick import _picks_to_idx
from .cuda import (_setup_cuda_fft_multiply_repeated, _fft_multiply_repeated,
_setup_cuda_fft_resample, _fft_resample, _smart_pad)
from .fixes import minimum_phase, _sosfreqz, irfft, ifftshift, fftfreq
from .fixes import irfft, ifftshift, fftfreq
from .parallel import parallel_func, check_n_jobs
from .time_frequency.multitaper import _mt_spectra, _compute_mt_params
from .utils import (logger, verbose, sum_squared, check_version, warn, _pl,
Expand Down Expand Up @@ -363,6 +363,7 @@ def _construct_fir_filter(sfreq, freq, gain, filter_length, phase, fir_window,
else:
assert fir_design == 'firwin'
fir_design = partial(_firwin_design, sfreq=sfreq)
from scipy.signal import minimum_phase

# issue a warning if attenuation is less than this
min_att_db = 12 if phase == 'minimum' else 20
Expand Down Expand Up @@ -624,7 +625,7 @@ def construct_iir_filter(iir_params, f_pass=None, f_stop=None, sfreq=None,
For more information, see the tutorials
:ref:`disc-filtering` and :ref:`tut-filter-resample`.
""" # noqa: E501
from scipy.signal import iirfilter, iirdesign, freqz
from scipy.signal import iirfilter, iirdesign, freqz, sosfreqz
known_filters = ('bessel', 'butter', 'butterworth', 'cauer', 'cheby1',
'cheby2', 'chebyshev1', 'chebyshev2', 'chebyshevi',
'chebyshevii', 'ellip', 'elliptic')
Expand Down Expand Up @@ -691,7 +692,7 @@ def construct_iir_filter(iir_params, f_pass=None, f_stop=None, sfreq=None,
# get the gains at the cutoff frequencies
if Wp is not None:
if output == 'sos':
cutoffs = _sosfreqz(system, worN=Wp * np.pi)[1]
cutoffs = sosfreqz(system, worN=Wp * np.pi)[1]
else:
cutoffs = freqz(system[0], system[1], worN=Wp * np.pi)[1]
# 2 * 20 here because we do forward-backward filtering
Expand Down Expand Up @@ -1821,15 +1822,13 @@ def savgol_filter(self, h_freq, verbose=None):
>>> evoked.savgol_filter(10.) # low-pass at around 10 Hz # doctest:+SKIP
>>> evoked.plot() # doctest:+SKIP
""" # noqa: E501
from scipy.signal import savgol_filter
_check_preload(self, 'inst.savgol_filter')
h_freq = float(h_freq)
if h_freq >= self.info['sfreq'] / 2.:
raise ValueError('h_freq must be less than half the sample rate')

# savitzky-golay filtering
if not check_version('scipy', '0.14'):
raise RuntimeError('scipy >= 0.14 must be installed for savgol')
from scipy.signal import savgol_filter
window_length = (int(np.round(self.info['sfreq'] /
h_freq)) // 2) * 2 + 1
logger.info('Using savgol length %d' % window_length)
Expand Down
Loading

0 comments on commit 7ce4bb7

Please sign in to comment.