diff --git a/.github/workflows/spec_zero.yml b/.github/workflows/spec_zero.yml
index aafac8ff1da..e3cd7a0b1c7 100644
--- a/.github/workflows/spec_zero.yml
+++ b/.github/workflows/spec_zero.yml
@@ -64,8 +64,9 @@ jobs:
git config --global user.email "50266005+mne-bot@users.noreply.github.com"
git config --global user.name "mne[bot]"
git checkout -b spec_zero
+ git add doc/changes/dev/dependency.rst # one new file, others changed
git commit -am "mne[bot]: Update dependency specifiers"
git push origin spec_zero
- PR_NUM=$(gh pr create --base main --head spec_zero --title "MAINT: Update dependency specifiers" --body "Created by spec_zero [GitHub action](https://github.com/mne-tools/mne-python/actions/runs/${{ github.run_id }}).
*Adjustments may need to be made to shims in \`mne/fixes.py\` in this or another PR. \`git grep TODO VERSION\` is a good starting point for finding potential updates.*" --label "no-changelog-entry-needed")
+ PR_NUM=$(gh pr create --base main --head spec_zero --title "MAINT: Update dependency specifiers" --body "Created by spec_zero [GitHub action](https://github.com/mne-tools/mne-python/actions/runs/${{ github.run_id }}).
*It is very likely that `tools/environment_old.yml` needs to be updated.*
*Adjustments may need to be made to shims in \`mne/fixes.py\` and elswhere in this or another PR. \`git grep TODO VERSION\` is a good starting point for finding potential updates.*")
echo "Opened https://github.com/mne-tools/mne-python/pull/${PR_NUM}" >> $GITHUB_STEP_SUMMARY
if: steps.status.outputs.dirty == 'true'
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index 49bc3ed44c9..14c79460e2f 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -154,6 +154,8 @@ jobs:
if: ${{ !startswith(matrix.kind, 'pip') }}
timeout-minutes: 20
- run: bash ./tools/github_actions_dependencies.sh
+ - run: python ./tools/github_actions_check_old.py
+ if: matrix.kind == 'old'
# Minimal commands on Linux (macOS stalls)
- run: bash ./tools/get_minimal_commands.sh
if: startswith(matrix.os, 'ubuntu') && matrix.kind != 'minimal' && matrix.kind != 'old'
diff --git a/doc/changes/dev/13611.dependency.rst b/doc/changes/dev/13611.dependency.rst
new file mode 100644
index 00000000000..61fa8132670
--- /dev/null
+++ b/doc/changes/dev/13611.dependency.rst
@@ -0,0 +1,8 @@
+Updated minimum for:
+
+- Core dependency ``scipy >= 1.12``
+- Optional dependency ``pandas >= 2.2``
+- Optional dependency ``pyobjc-framework-Cocoa >= 5.2.0; platform_system == "Darwin"``
+- Optional dependency ``scikit-learn >= 1.4``
+
+Changed implemented via CI action created by `Thomas Binns`_.
\ No newline at end of file
diff --git a/environment.yml b/environment.yml
index e8e3d01f688..6caa1234d67 100644
--- a/environment.yml
+++ b/environment.yml
@@ -36,7 +36,7 @@ dependencies:
- numpy >=1.26,<3
- openmeeg >=2.5.7
- packaging
- - pandas >=2.1
+ - pandas >=2.2
- pillow
- pip
- pooch >=1.5
@@ -50,8 +50,8 @@ dependencies:
- pyvistaqt >=0.11
- qdarkstyle !=3.2.2
- qtpy
- - scikit-learn >=1.3
- - scipy >=1.11
+ - scikit-learn >=1.4
+ - scipy >=1.12
- sip
- snirf
- statsmodels
diff --git a/mne/decoding/_fixes.py b/mne/decoding/_fixes.py
index f0f7689bc75..0a71721279e 100644
--- a/mne/decoding/_fixes.py
+++ b/mne/decoding/_fixes.py
@@ -3,6 +3,7 @@
# Copyright the MNE-Python contributors.
try:
+ # TODO VERSION remove once we require sklearn 1.6+
from sklearn.utils.validation import validate_data
except ImportError:
from sklearn.utils.validation import check_array, check_X_y
diff --git a/mne/decoding/tests/test_csp.py b/mne/decoding/tests/test_csp.py
index 4411267b407..f3f5e16cf98 100644
--- a/mne/decoding/tests/test_csp.py
+++ b/mne/decoding/tests/test_csp.py
@@ -495,5 +495,5 @@ def test_csp_component_ordering():
@parametrize_with_checks([CSP(), SPoC()])
def test_sklearn_compliance(estimator, check):
"""Test compliance with sklearn."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.5") # TODO VERSION remove on 1.5+
check(estimator)
diff --git a/mne/decoding/tests/test_ems.py b/mne/decoding/tests/test_ems.py
index c713e1bce17..6dadf5094c3 100644
--- a/mne/decoding/tests/test_ems.py
+++ b/mne/decoding/tests/test_ems.py
@@ -97,5 +97,5 @@ def test_ems():
@parametrize_with_checks([EMS()])
def test_sklearn_compliance(estimator, check):
"""Test compliance with sklearn."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
check(estimator)
diff --git a/mne/decoding/tests/test_receptive_field.py b/mne/decoding/tests/test_receptive_field.py
index b9bf9693bd8..e1d6de8166b 100644
--- a/mne/decoding/tests/test_receptive_field.py
+++ b/mne/decoding/tests/test_receptive_field.py
@@ -590,7 +590,7 @@ def test_linalg_warning():
@parametrize_with_checks([TimeDelayingRidge(0, 10, 1.0, 0.1, "laplacian", n_jobs=1)])
def test_tdr_sklearn_compliance(estimator, check):
"""Test sklearn estimator compliance."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = (
# TDR convolves and thus its output cannot be invariant when
# shuffled or subsampled.
@@ -606,7 +606,7 @@ def test_tdr_sklearn_compliance(estimator, check):
@parametrize_with_checks([ReceptiveField(-1, 2, 1.0, estimator=Ridge(), patterns=True)])
def test_rf_sklearn_compliance(estimator, check):
"""Test sklearn RF compliance."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = (
# RF does time-lagging, so its output cannot be invariant when
# shuffled or subsampled.
diff --git a/mne/decoding/tests/test_ssd.py b/mne/decoding/tests/test_ssd.py
index 236e65b82fd..086413b043f 100644
--- a/mne/decoding/tests/test_ssd.py
+++ b/mne/decoding/tests/test_ssd.py
@@ -621,7 +621,7 @@ def test_get_spectral_ratio():
)
def test_sklearn_compliance(estimator, check):
"""Test LinearModel compliance with sklearn."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = (
# Checks below fail because what sklearn passes as (n_samples, n_features)
# is considered (n_channels, n_times) by SSD and creates problems
diff --git a/mne/decoding/tests/test_time_frequency.py b/mne/decoding/tests/test_time_frequency.py
index 1ac6bba5dcb..9765c85533e 100644
--- a/mne/decoding/tests/test_time_frequency.py
+++ b/mne/decoding/tests/test_time_frequency.py
@@ -57,5 +57,5 @@ def test_timefrequency_basic():
@parametrize_with_checks([TimeFrequency([300, 400], 1000.0, n_cycles=0.25)])
def test_sklearn_compliance(estimator, check):
"""Test LinearModel compliance with sklearn."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
check(estimator)
diff --git a/mne/decoding/tests/test_transformer.py b/mne/decoding/tests/test_transformer.py
index 1911aa650e5..0660b358c3d 100644
--- a/mne/decoding/tests/test_transformer.py
+++ b/mne/decoding/tests/test_transformer.py
@@ -339,7 +339,7 @@ def test_bad_triage():
)
def test_sklearn_compliance(estimator, check):
"""Test LinearModel compliance with sklearn."""
- pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
+ pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = []
if estimator.__class__.__name__ == "FilterEstimator":
ignores += [
diff --git a/mne/decoding/transformer.py b/mne/decoding/transformer.py
index c5fd14d9568..c5b3a5ca704 100644
--- a/mne/decoding/transformer.py
+++ b/mne/decoding/transformer.py
@@ -20,7 +20,7 @@
from ..fixes import _reshape_view
from ..time_frequency import psd_array_multitaper
from ..utils import _check_option, _validate_type, check_version, fill_doc
-from ._fixes import validate_data # TODO VERSION remove with sklearn 1.4+
+from ._fixes import validate_data
class MNETransformerMixin(TransformerMixin):
@@ -44,7 +44,7 @@ def _check_data(
if isinstance(epochs_data, BaseEpochs):
epochs_data = epochs_data.get_data(copy=False)
kwargs = dict(dtype=np.float64, allow_nd=True, order="C")
- if check_version("sklearn", "1.4"): # TODO VERSION sklearn 1.4+
+ if check_version("sklearn", "1.5"): # TODO VERSION sklearn 1.5+
kwargs["force_writeable"] = True
if hasattr(self, "n_features_in_") and check_n_features:
if y is None:
diff --git a/mne/preprocessing/nirs/tests/test_beer_lambert_law.py b/mne/preprocessing/nirs/tests/test_beer_lambert_law.py
index c889237bae9..4d949331f29 100644
--- a/mne/preprocessing/nirs/tests/test_beer_lambert_law.py
+++ b/mne/preprocessing/nirs/tests/test_beer_lambert_law.py
@@ -43,6 +43,8 @@
)
def test_beer_lambert(fname, fmt, tmp_path):
"""Test converting raw CW amplitude files."""
+ if fname.suffix == ".snirf":
+ pytest.importorskip("h5py")
match fmt:
case "nirx":
raw_volt = read_raw_nirx(fname)
diff --git a/mne/preprocessing/nirs/tests/test_nirs.py b/mne/preprocessing/nirs/tests/test_nirs.py
index 069657e2501..49578a58fd0 100644
--- a/mne/preprocessing/nirs/tests/test_nirs.py
+++ b/mne/preprocessing/nirs/tests/test_nirs.py
@@ -40,12 +40,18 @@
)
+def read_raw_snirf_safe(fname):
+ """Wrap to read_raw_snirf, skipping if h5py is not installed."""
+ pytest.importorskip("h5py")
+ return read_raw_snirf(fname)
+
+
@testing.requires_testing_data
@pytest.mark.parametrize(
"fname, readerfn",
[
(fname_nirx_15_0, read_raw_nirx),
- (fname_labnirs_multi_wavelength, read_raw_snirf),
+ (fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_picks(fname, readerfn):
@@ -121,7 +127,7 @@ def _fnirs_check_bads(info):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
- (fname_labnirs_multi_wavelength, read_raw_snirf),
+ (fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_check_bads(fname, readerfn):
@@ -166,7 +172,7 @@ def test_fnirs_check_bads(fname, readerfn):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
- (fname_labnirs_multi_wavelength, read_raw_snirf),
+ (fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_spread_bads(fname, readerfn):
@@ -210,7 +216,7 @@ def test_fnirs_spread_bads(fname, readerfn):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
- (fname_labnirs_multi_wavelength, read_raw_snirf),
+ (fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_channel_frequency_ordering(fname, readerfn):
@@ -598,7 +604,7 @@ def test_order_agnostic(nirx_snirf):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
- (fname_labnirs_multi_wavelength, read_raw_snirf),
+ (fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_nirs_channel_grouping(fname, readerfn):
diff --git a/mne/preprocessing/nirs/tests/test_optical_density.py b/mne/preprocessing/nirs/tests/test_optical_density.py
index 53b66f46238..cbb79f97b72 100644
--- a/mne/preprocessing/nirs/tests/test_optical_density.py
+++ b/mne/preprocessing/nirs/tests/test_optical_density.py
@@ -27,6 +27,8 @@
)
def test_optical_density(fname, readerfn):
"""Test return type for optical density."""
+ if fname.suffix == ".snirf":
+ pytest.importorskip("h5py")
raw_volt = readerfn(fname, preload=False)
_validate_type(raw_volt, BaseRaw, "raw")
diff --git a/mne/preprocessing/nirs/tests/test_scalp_coupling_index.py b/mne/preprocessing/nirs/tests/test_scalp_coupling_index.py
index 832a1158486..a5089623477 100644
--- a/mne/preprocessing/nirs/tests/test_scalp_coupling_index.py
+++ b/mne/preprocessing/nirs/tests/test_scalp_coupling_index.py
@@ -85,6 +85,7 @@ def test_scalp_coupling_index_multi_wavelength():
Similar to test in test_scalp_coupling_index, considers cases
specific to multi-wavelength data.
"""
+ pytest.importorskip("h5py")
raw = optical_density(read_raw_snirf(fname_labnirs_multi_wavelength))
times = np.arange(raw.n_times) / raw.info["sfreq"]
signal = np.sin(2 * np.pi * 1.0 * times) + 1
diff --git a/mne/tests/test_annotations.py b/mne/tests/test_annotations.py
index 596e37d5ce3..b67c8774cf0 100644
--- a/mne/tests/test_annotations.py
+++ b/mne/tests/test_annotations.py
@@ -1051,6 +1051,8 @@ def dummy_annotation_file(tmp_path_factory, ch_names, fmt, with_extras):
@pytest.mark.parametrize("with_extras", [True, False])
def test_io_annotation(dummy_annotation_file, tmp_path, fmt, ch_names, with_extras):
"""Test CSV, TXT, and FIF input/output (which support ch_names)."""
+ if with_extras:
+ pytest.importorskip("pandas")
annot = read_annotations(dummy_annotation_file)
assert annot.orig_time == _ORIG_TIME
kwargs = dict(orig_time=_ORIG_TIME)
diff --git a/mne/tests/test_chpi.py b/mne/tests/test_chpi.py
index ec0d9c3c70f..dbebeeb2eca 100644
--- a/mne/tests/test_chpi.py
+++ b/mne/tests/test_chpi.py
@@ -51,6 +51,7 @@
_record_warnings,
assert_meg_snr,
catch_logging,
+ check_version,
object_diff,
verbose,
)
@@ -884,6 +885,9 @@ def assert_slopes_correlated(actual_meas, desired_meas, *, lim=(0.99, 1.0)):
@testing.requires_testing_data
def test_refit_hpi_locs_basic():
"""Test that HPI locations can be refit."""
+ if not check_version("scipy", "1.13"):
+ # TODO VERSION remove when scipy >= 1.13 is required
+ pytest.xfail("SciPy 1.12 has an lwork bug affecting this test")
raw = read_raw_fif(chpi_fif_fname, allow_maxshield="yes").crop(0, 2).load_data()
# These should be similar (and both should work)
locs = compute_chpi_amplitudes(raw, t_step_min=2, t_window=1)
diff --git a/pyproject.toml b/pyproject.toml
index 08d85f5cea4..f4498eadee8 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -97,7 +97,7 @@ dependencies = [
"numpy >= 1.26, < 3", # released 2023-09-16, will become 2.0 on 2026-06-16
"packaging",
"pooch >= 1.5",
- "scipy >= 1.11", # released 2023-06-28, will become 1.12 on 2026-01-19
+ "scipy >= 1.12", # released 2024-01-20, will become 1.13 on 2026-04-02
"tqdm",
]
description = "MNE-Python project for MEG and EEG data analysis."
@@ -155,7 +155,7 @@ full-no-qt = [
"nilearn",
"numba",
"openmeeg >= 2.5.7",
- "pandas >= 2.1", # released 2023-08-30, will become 2.2 on 2026-01-19
+ "pandas >= 2.2", # released 2024-01-20, will become 2.3 on 2027-06-05
"pillow", # for `Brain.save_image` and `mne.Report`
"pyarrow", # only needed to avoid a deprecation warning in pandas
"pybv",
@@ -165,7 +165,7 @@ full-no-qt = [
"pyvistaqt >= 0.11", # released 2023-06-30, no newer version available
"qdarkstyle != 3.2.2",
"qtpy",
- "scikit-learn >= 1.3", # released 2023-06-30, will become 1.4 on 2026-01-17
+ "scikit-learn >= 1.4", # released 2024-01-18, will become 1.5 on 2026-05-21
"sip",
"snirf",
"statsmodels",
diff --git a/tools/dev/spec_zero_update_versions.py b/tools/dev/spec_zero_update_versions.py
index 4f2945e8a4e..cd925dd3531 100644
--- a/tools/dev/spec_zero_update_versions.py
+++ b/tools/dev/spec_zero_update_versions.py
@@ -9,6 +9,7 @@
adopted.
MNE-Python's policy differs from SPEC0 in the following ways:
+
- Python versions are supported for at least 3 years after release, but possibly longer
at the discretion of the MNE-Python maintainers based on, e.g., maintainability,
features.
@@ -23,6 +24,7 @@
https://github.com/mne-tools/mne-python/pull/13451#discussion_r2445337934
For example, in October 2025:
+
- The latest version of NumPy available 2 years prior was 1.26.1 (released October
2023), making the latest minor release 1.26, which would be pinned. Support for 1.26
would be dropped in June 2026 in favour of 2.0, which was released in June 2024.
@@ -39,6 +41,8 @@
import collections
import datetime
import re
+from copy import deepcopy
+from pathlib import Path
import requests
from packaging.requirements import Requirement
@@ -60,6 +64,8 @@
SUPPORT_TIME = datetime.timedelta(days=365 * 2)
CURRENT_DATE = datetime.datetime.now()
+project_root = Path(__file__).parent.parent.parent
+
def get_release_and_drop_dates(package):
"""Get release and drop dates for a given package from pypi.org."""
@@ -70,7 +76,7 @@ def get_release_and_drop_dates(package):
headers={"Accept": "application/vnd.pypi.simple.v1+json"},
timeout=10,
).json()
- print("OK")
+ print("OK", flush=True)
file_date = collections.defaultdict(list)
for f in response["files"]:
if f["filename"].endswith(".tar.gz") or f["filename"].endswith(".zip"):
@@ -99,7 +105,7 @@ def get_release_and_drop_dates(package):
def update_specifiers(dependencies, releases):
- """Update dependency version specifiers."""
+ """Update dependency version specifiers inplace."""
for idx, dep in enumerate(dependencies):
req = Requirement(dep)
pkg_name = req.name
@@ -153,7 +159,6 @@ def update_specifiers(dependencies, releases):
dependencies._value[idx], min_ver_release, next_ver, next_ver_release
)
dependencies[idx] = _prettify_requirement(req)
- return dependencies
def _as_minor_version(ver):
@@ -239,19 +244,47 @@ def _find_specifier_order(specifiers):
}
# Get dependencies from pyproject.toml
-pyproject = TOMLFile("pyproject.toml")
+pyproject = TOMLFile(project_root / "pyproject.toml")
pyproject_data = pyproject.read()
-project_info = pyproject_data.get("project")
+project_info = pyproject_data["project"]
core_dependencies = project_info["dependencies"]
-opt_dependencies = project_info.get("optional-dependencies", {})
+opt_dependencies = project_info["optional-dependencies"]
# Update version specifiers
-core_dependencies = update_specifiers(core_dependencies, package_releases)
+changed = []
+old_deps = deepcopy(core_dependencies)
+update_specifiers(core_dependencies, package_releases)
+changed.extend(
+ [
+ f"Core dependency ``{new}``"
+ for new, old in zip(core_dependencies, old_deps)
+ if new != old
+ ]
+)
for key in opt_dependencies:
- opt_dependencies[key] = update_specifiers(opt_dependencies[key], package_releases)
-pyproject_data["project"]["dependencies"] = core_dependencies
-if opt_dependencies:
- pyproject_data["project"]["optional-dependencies"] = opt_dependencies
+ old_deps = deepcopy(opt_dependencies[key])
+ update_specifiers(opt_dependencies[key], package_releases)
+ changed.extend(
+ [
+ f"Optional dependency ``{new}``"
+ for new, old in zip(opt_dependencies[key], old_deps)
+ if new != old
+ ]
+ )
+
+# Need to write a changelog entry if versions were updated
+if changed:
+ changelog_text = "Updated minimum for:\n\n"
+ changelog_text += "\n".join(f"- {change}" for change in changed)
+ print(changelog_text, flush=True)
+ # no reason to print this but it should go in the changelog
+ changelog_text += (
+ "\n\nChanged implemented via CI action created by `Thomas Binns`_.\n"
+ )
+ changelog_path = project_root / "doc" / "changes" / "dev" / "dependency.rst"
+ changelog_path.write_text(changelog_text, encoding="utf-8")
+else:
+ print("No dependency versions needed updating.", flush=True)
# Save updated pyproject.toml (replace ugly \" with ' first)
pyproject_data = parse(pyproject_data.as_string().replace('\\"', "'"))
diff --git a/tools/environment_old.yml b/tools/environment_old.yml
index 3b99b93afde..6a9430496cb 100644
--- a/tools/environment_old.yml
+++ b/tools/environment_old.yml
@@ -4,11 +4,11 @@ channels:
- conda-forge
dependencies:
- python =3.10
- - numpy =1.25
- - scipy =1.11
- - matplotlib =3.7
- - pandas =2.0
- - scikit-learn =1.3.0
+ - numpy =1.26
+ - scipy =1.12
+ - matplotlib =3.8
+ - pandas =2.2
+ - scikit-learn =1.4
- nibabel
- tqdm
- pooch =1.5
diff --git a/tools/github_actions_check_old.py b/tools/github_actions_check_old.py
new file mode 100644
index 00000000000..d1dc5550beb
--- /dev/null
+++ b/tools/github_actions_check_old.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Authors: The MNE-Python contributors.
+# License: BSD-3-Clause
+# Copyright the MNE-Python contributors.
+
+import importlib
+import re
+import sys
+from pathlib import Path
+
+want_parts = 7 # should be updated when we add more pins!
+regex = re.compile(r"^ - ([a-zA-Z\-]+) =([0-9.]+)$", re.MULTILINE)
+this_root = Path(__file__).parent
+env_old_text = (this_root / "environment_old.yml").read_text("utf-8")
+parts = regex.findall(env_old_text)
+assert len(parts) == want_parts, f"{len(parts)=} != {want_parts=}"
+bad = list()
+mod_name_map = {
+ "scikit-learn": "sklearn",
+}
+for mod_name, want_ver in parts:
+ if mod_name == "python":
+ got_ver = ".".join(map(str, sys.version_info[:2]))
+ else:
+ try:
+ mod = importlib.import_module(mod_name_map.get(mod_name, mod_name))
+ except Exception as exc:
+ bad.append(f"{mod_name}: not importable ({type(exc).__name__}: {exc})")
+ continue
+ got_ver = mod.__version__.lstrip("v") # pooch prepends v
+ if ".".join(got_ver.split(".")[:2]) != want_ver:
+ bad.append(f"{mod_name}: {got_ver} != {want_ver}")
+if bad:
+ raise RuntimeError("At least one module is the wrong version:\n" + "\n".join(bad))
diff --git a/tools/github_actions_dependencies.sh b/tools/github_actions_dependencies.sh
index ffe75afca44..1b80a6dab2e 100755
--- a/tools/github_actions_dependencies.sh
+++ b/tools/github_actions_dependencies.sh
@@ -16,10 +16,8 @@ if [ ! -z "$CONDA_ENV" ]; then
if [[ "${RUNNER_OS}" != "Windows" ]] && [[ "${CONDA_ENV}" != "environment_"* ]]; then
INSTALL_ARGS=""
fi
- # TODO: Until a PyVista release supports VTK 9.5+
- STD_ARGS="$STD_ARGS https://github.com/pyvista/pyvista/archive/refs/heads/main.zip"
# If on minimal or old, just install testing deps
- if [[ "${CONDA_ENV}" == "environment_"* ]]; then
+ if [[ "${CONDA_ENV}" == *'environment_'* ]]; then
GROUP="test"
EXTRAS=""
STD_ARGS="--progress-bar off"
@@ -39,7 +37,9 @@ else
fi
echo ""
# until quantities releases...
-STD_ARGS="$STD_ARGS git+https://github.com/python-quantities/python-quantities"
+if [[ "${MNE_CI_KIND}" != "old" ]]; then
+ STD_ARGS="$STD_ARGS git+https://github.com/python-quantities/python-quantities"
+fi
echo "::group::Installing test dependencies using pip"
set -x