Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .github/workflows/spec_zero.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,9 @@ jobs:
git config --global user.email "50266005+mne-bot@users.noreply.github.com"
git config --global user.name "mne[bot]"
git checkout -b spec_zero
git add doc/changes/dev/dependency.rst # one new file, others changed
git commit -am "mne[bot]: Update dependency specifiers"
git push origin spec_zero
PR_NUM=$(gh pr create --base main --head spec_zero --title "MAINT: Update dependency specifiers" --body "Created by spec_zero [GitHub action](https://github.com/mne-tools/mne-python/actions/runs/${{ github.run_id }}). <br> <br> *Adjustments may need to be made to shims in \`mne/fixes.py\` in this or another PR. \`git grep TODO VERSION\` is a good starting point for finding potential updates.*" --label "no-changelog-entry-needed")
PR_NUM=$(gh pr create --base main --head spec_zero --title "MAINT: Update dependency specifiers" --body "Created by spec_zero [GitHub action](https://github.com/mne-tools/mne-python/actions/runs/${{ github.run_id }}). <br> <br> *It is very likely that `tools/environment_old.yml` needs to be updated.* <br> <br> *Adjustments may need to be made to shims in \`mne/fixes.py\` and elswhere in this or another PR. \`git grep TODO VERSION\` is a good starting point for finding potential updates.*")
echo "Opened https://github.com/mne-tools/mne-python/pull/${PR_NUM}" >> $GITHUB_STEP_SUMMARY
if: steps.status.outputs.dirty == 'true'
2 changes: 2 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,8 @@ jobs:
if: ${{ !startswith(matrix.kind, 'pip') }}
timeout-minutes: 20
- run: bash ./tools/github_actions_dependencies.sh
- run: python ./tools/github_actions_check_old.py
if: matrix.kind == 'old'
# Minimal commands on Linux (macOS stalls)
- run: bash ./tools/get_minimal_commands.sh
if: startswith(matrix.os, 'ubuntu') && matrix.kind != 'minimal' && matrix.kind != 'old'
Expand Down
8 changes: 8 additions & 0 deletions doc/changes/dev/13611.dependency.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
Updated minimum for:

- Core dependency ``scipy >= 1.12``
- Optional dependency ``pandas >= 2.2``
- Optional dependency ``pyobjc-framework-Cocoa >= 5.2.0; platform_system == "Darwin"``
- Optional dependency ``scikit-learn >= 1.4``

Changed implemented via CI action created by `Thomas Binns`_.
6 changes: 3 additions & 3 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ dependencies:
- numpy >=1.26,<3
- openmeeg >=2.5.7
- packaging
- pandas >=2.1
- pandas >=2.2
- pillow
- pip
- pooch >=1.5
Expand All @@ -50,8 +50,8 @@ dependencies:
- pyvistaqt >=0.11
- qdarkstyle !=3.2.2
- qtpy
- scikit-learn >=1.3
- scipy >=1.11
- scikit-learn >=1.4
- scipy >=1.12
- sip
- snirf
- statsmodels
Expand Down
1 change: 1 addition & 0 deletions mne/decoding/_fixes.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# Copyright the MNE-Python contributors.

try:
# TODO VERSION remove once we require sklearn 1.6+
from sklearn.utils.validation import validate_data
except ImportError:
from sklearn.utils.validation import check_array, check_X_y
Expand Down
2 changes: 1 addition & 1 deletion mne/decoding/tests/test_csp.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,5 +495,5 @@ def test_csp_component_ordering():
@parametrize_with_checks([CSP(), SPoC()])
def test_sklearn_compliance(estimator, check):
"""Test compliance with sklearn."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.5") # TODO VERSION remove on 1.5+
check(estimator)
2 changes: 1 addition & 1 deletion mne/decoding/tests/test_ems.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,5 +97,5 @@ def test_ems():
@parametrize_with_checks([EMS()])
def test_sklearn_compliance(estimator, check):
"""Test compliance with sklearn."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
check(estimator)
4 changes: 2 additions & 2 deletions mne/decoding/tests/test_receptive_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -590,7 +590,7 @@ def test_linalg_warning():
@parametrize_with_checks([TimeDelayingRidge(0, 10, 1.0, 0.1, "laplacian", n_jobs=1)])
def test_tdr_sklearn_compliance(estimator, check):
"""Test sklearn estimator compliance."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = (
# TDR convolves and thus its output cannot be invariant when
# shuffled or subsampled.
Expand All @@ -606,7 +606,7 @@ def test_tdr_sklearn_compliance(estimator, check):
@parametrize_with_checks([ReceptiveField(-1, 2, 1.0, estimator=Ridge(), patterns=True)])
def test_rf_sklearn_compliance(estimator, check):
"""Test sklearn RF compliance."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = (
# RF does time-lagging, so its output cannot be invariant when
# shuffled or subsampled.
Expand Down
2 changes: 1 addition & 1 deletion mne/decoding/tests/test_ssd.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@ def test_get_spectral_ratio():
)
def test_sklearn_compliance(estimator, check):
"""Test LinearModel compliance with sklearn."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = (
# Checks below fail because what sklearn passes as (n_samples, n_features)
# is considered (n_channels, n_times) by SSD and creates problems
Expand Down
2 changes: 1 addition & 1 deletion mne/decoding/tests/test_time_frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,5 +57,5 @@ def test_timefrequency_basic():
@parametrize_with_checks([TimeFrequency([300, 400], 1000.0, n_cycles=0.25)])
def test_sklearn_compliance(estimator, check):
"""Test LinearModel compliance with sklearn."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
check(estimator)
2 changes: 1 addition & 1 deletion mne/decoding/tests/test_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ def test_bad_triage():
)
def test_sklearn_compliance(estimator, check):
"""Test LinearModel compliance with sklearn."""
pytest.importorskip("sklearn", minversion="1.4") # TODO VERSION remove on 1.4+
pytest.importorskip("sklearn", minversion="1.6") # TODO VERSION remove on 1.6+
ignores = []
if estimator.__class__.__name__ == "FilterEstimator":
ignores += [
Expand Down
4 changes: 2 additions & 2 deletions mne/decoding/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from ..fixes import _reshape_view
from ..time_frequency import psd_array_multitaper
from ..utils import _check_option, _validate_type, check_version, fill_doc
from ._fixes import validate_data # TODO VERSION remove with sklearn 1.4+
from ._fixes import validate_data


class MNETransformerMixin(TransformerMixin):
Expand All @@ -44,7 +44,7 @@ def _check_data(
if isinstance(epochs_data, BaseEpochs):
epochs_data = epochs_data.get_data(copy=False)
kwargs = dict(dtype=np.float64, allow_nd=True, order="C")
if check_version("sklearn", "1.4"): # TODO VERSION sklearn 1.4+
if check_version("sklearn", "1.5"): # TODO VERSION sklearn 1.5+
kwargs["force_writeable"] = True
if hasattr(self, "n_features_in_") and check_n_features:
if y is None:
Expand Down
2 changes: 2 additions & 0 deletions mne/preprocessing/nirs/tests/test_beer_lambert_law.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,8 @@
)
def test_beer_lambert(fname, fmt, tmp_path):
"""Test converting raw CW amplitude files."""
if fname.suffix == ".snirf":
pytest.importorskip("h5py")
match fmt:
case "nirx":
raw_volt = read_raw_nirx(fname)
Expand Down
16 changes: 11 additions & 5 deletions mne/preprocessing/nirs/tests/test_nirs.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,18 @@
)


def read_raw_snirf_safe(fname):
"""Wrap to read_raw_snirf, skipping if h5py is not installed."""
pytest.importorskip("h5py")
return read_raw_snirf(fname)


@testing.requires_testing_data
@pytest.mark.parametrize(
"fname, readerfn",
[
(fname_nirx_15_0, read_raw_nirx),
(fname_labnirs_multi_wavelength, read_raw_snirf),
(fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_picks(fname, readerfn):
Expand Down Expand Up @@ -121,7 +127,7 @@ def _fnirs_check_bads(info):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
(fname_labnirs_multi_wavelength, read_raw_snirf),
(fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_check_bads(fname, readerfn):
Expand Down Expand Up @@ -166,7 +172,7 @@ def test_fnirs_check_bads(fname, readerfn):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
(fname_labnirs_multi_wavelength, read_raw_snirf),
(fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_spread_bads(fname, readerfn):
Expand Down Expand Up @@ -210,7 +216,7 @@ def test_fnirs_spread_bads(fname, readerfn):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
(fname_labnirs_multi_wavelength, read_raw_snirf),
(fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_fnirs_channel_frequency_ordering(fname, readerfn):
Expand Down Expand Up @@ -598,7 +604,7 @@ def test_order_agnostic(nirx_snirf):
(fname_nirx_15_0, read_raw_nirx),
(fname_nirx_15_2_short, read_raw_nirx),
(fname_nirx_15_2, read_raw_nirx),
(fname_labnirs_multi_wavelength, read_raw_snirf),
(fname_labnirs_multi_wavelength, read_raw_snirf_safe),
],
)
def test_nirs_channel_grouping(fname, readerfn):
Expand Down
2 changes: 2 additions & 0 deletions mne/preprocessing/nirs/tests/test_optical_density.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
)
def test_optical_density(fname, readerfn):
"""Test return type for optical density."""
if fname.suffix == ".snirf":
pytest.importorskip("h5py")
raw_volt = readerfn(fname, preload=False)
_validate_type(raw_volt, BaseRaw, "raw")

Expand Down
1 change: 1 addition & 0 deletions mne/preprocessing/nirs/tests/test_scalp_coupling_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ def test_scalp_coupling_index_multi_wavelength():
Similar to test in test_scalp_coupling_index, considers cases
specific to multi-wavelength data.
"""
pytest.importorskip("h5py")
raw = optical_density(read_raw_snirf(fname_labnirs_multi_wavelength))
times = np.arange(raw.n_times) / raw.info["sfreq"]
signal = np.sin(2 * np.pi * 1.0 * times) + 1
Expand Down
2 changes: 2 additions & 0 deletions mne/tests/test_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -1051,6 +1051,8 @@ def dummy_annotation_file(tmp_path_factory, ch_names, fmt, with_extras):
@pytest.mark.parametrize("with_extras", [True, False])
def test_io_annotation(dummy_annotation_file, tmp_path, fmt, ch_names, with_extras):
"""Test CSV, TXT, and FIF input/output (which support ch_names)."""
if with_extras:
pytest.importorskip("pandas")
annot = read_annotations(dummy_annotation_file)
assert annot.orig_time == _ORIG_TIME
kwargs = dict(orig_time=_ORIG_TIME)
Expand Down
4 changes: 4 additions & 0 deletions mne/tests/test_chpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
_record_warnings,
assert_meg_snr,
catch_logging,
check_version,
object_diff,
verbose,
)
Expand Down Expand Up @@ -884,6 +885,9 @@ def assert_slopes_correlated(actual_meas, desired_meas, *, lim=(0.99, 1.0)):
@testing.requires_testing_data
def test_refit_hpi_locs_basic():
"""Test that HPI locations can be refit."""
if not check_version("scipy", "1.13"):
# TODO VERSION remove when scipy >= 1.13 is required
pytest.xfail("SciPy 1.12 has an lwork bug affecting this test")
raw = read_raw_fif(chpi_fif_fname, allow_maxshield="yes").crop(0, 2).load_data()
# These should be similar (and both should work)
locs = compute_chpi_amplitudes(raw, t_step_min=2, t_window=1)
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ dependencies = [
"numpy >= 1.26, < 3", # released 2023-09-16, will become 2.0 on 2026-06-16
"packaging",
"pooch >= 1.5",
"scipy >= 1.11", # released 2023-06-28, will become 1.12 on 2026-01-19
"scipy >= 1.12", # released 2024-01-20, will become 1.13 on 2026-04-02
"tqdm",
]
description = "MNE-Python project for MEG and EEG data analysis."
Expand Down Expand Up @@ -155,7 +155,7 @@ full-no-qt = [
"nilearn",
"numba",
"openmeeg >= 2.5.7",
"pandas >= 2.1", # released 2023-08-30, will become 2.2 on 2026-01-19
"pandas >= 2.2", # released 2024-01-20, will become 2.3 on 2027-06-05
"pillow", # for `Brain.save_image` and `mne.Report`
"pyarrow", # only needed to avoid a deprecation warning in pandas
"pybv",
Expand All @@ -165,7 +165,7 @@ full-no-qt = [
"pyvistaqt >= 0.11", # released 2023-06-30, no newer version available
"qdarkstyle != 3.2.2",
"qtpy",
"scikit-learn >= 1.3", # released 2023-06-30, will become 1.4 on 2026-01-17
"scikit-learn >= 1.4", # released 2024-01-18, will become 1.5 on 2026-05-21
"sip",
"snirf",
"statsmodels",
Expand Down
55 changes: 44 additions & 11 deletions tools/dev/spec_zero_update_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
adopted.

MNE-Python's policy differs from SPEC0 in the following ways:

- Python versions are supported for at least 3 years after release, but possibly longer
at the discretion of the MNE-Python maintainers based on, e.g., maintainability,
features.
Expand All @@ -23,6 +24,7 @@
https://github.com/mne-tools/mne-python/pull/13451#discussion_r2445337934

For example, in October 2025:

- The latest version of NumPy available 2 years prior was 1.26.1 (released October
2023), making the latest minor release 1.26, which would be pinned. Support for 1.26
would be dropped in June 2026 in favour of 2.0, which was released in June 2024.
Expand All @@ -39,6 +41,8 @@
import collections
import datetime
import re
from copy import deepcopy
from pathlib import Path

import requests
from packaging.requirements import Requirement
Expand All @@ -60,6 +64,8 @@
SUPPORT_TIME = datetime.timedelta(days=365 * 2)
CURRENT_DATE = datetime.datetime.now()

project_root = Path(__file__).parent.parent.parent


def get_release_and_drop_dates(package):
"""Get release and drop dates for a given package from pypi.org."""
Expand All @@ -70,7 +76,7 @@ def get_release_and_drop_dates(package):
headers={"Accept": "application/vnd.pypi.simple.v1+json"},
timeout=10,
).json()
print("OK")
print("OK", flush=True)
file_date = collections.defaultdict(list)
for f in response["files"]:
if f["filename"].endswith(".tar.gz") or f["filename"].endswith(".zip"):
Expand Down Expand Up @@ -99,7 +105,7 @@ def get_release_and_drop_dates(package):


def update_specifiers(dependencies, releases):
"""Update dependency version specifiers."""
"""Update dependency version specifiers inplace."""
for idx, dep in enumerate(dependencies):
req = Requirement(dep)
pkg_name = req.name
Expand Down Expand Up @@ -153,7 +159,6 @@ def update_specifiers(dependencies, releases):
dependencies._value[idx], min_ver_release, next_ver, next_ver_release
)
dependencies[idx] = _prettify_requirement(req)
return dependencies


def _as_minor_version(ver):
Expand Down Expand Up @@ -239,19 +244,47 @@ def _find_specifier_order(specifiers):
}

# Get dependencies from pyproject.toml
pyproject = TOMLFile("pyproject.toml")
pyproject = TOMLFile(project_root / "pyproject.toml")
pyproject_data = pyproject.read()
project_info = pyproject_data.get("project")
project_info = pyproject_data["project"]
core_dependencies = project_info["dependencies"]
opt_dependencies = project_info.get("optional-dependencies", {})
opt_dependencies = project_info["optional-dependencies"]

# Update version specifiers
core_dependencies = update_specifiers(core_dependencies, package_releases)
changed = []
old_deps = deepcopy(core_dependencies)
update_specifiers(core_dependencies, package_releases)
changed.extend(
[
f"Core dependency ``{new}``"
for new, old in zip(core_dependencies, old_deps)
if new != old
]
)
for key in opt_dependencies:
opt_dependencies[key] = update_specifiers(opt_dependencies[key], package_releases)
pyproject_data["project"]["dependencies"] = core_dependencies
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@tsbinns the old code both updated inplace and set vars, which is unnecessary (and led to some confusion where I assumed things were not updated inplace because the vars were set). I've changed it just to operate inplace where possible.

if opt_dependencies:
pyproject_data["project"]["optional-dependencies"] = opt_dependencies
old_deps = deepcopy(opt_dependencies[key])
update_specifiers(opt_dependencies[key], package_releases)
changed.extend(
[
f"Optional dependency ``{new}``"
for new, old in zip(opt_dependencies[key], old_deps)
if new != old
]
)

# Need to write a changelog entry if versions were updated
if changed:
changelog_text = "Updated minimum for:\n\n"
changelog_text += "\n".join(f"- {change}" for change in changed)
print(changelog_text, flush=True)
# no reason to print this but it should go in the changelog
changelog_text += (
"\n\nChanged implemented via CI action created by `Thomas Binns`_.\n"
)
changelog_path = project_root / "doc" / "changes" / "dev" / "dependency.rst"
changelog_path.write_text(changelog_text, encoding="utf-8")
else:
print("No dependency versions needed updating.", flush=True)

# Save updated pyproject.toml (replace ugly \" with ' first)
pyproject_data = parse(pyproject_data.as_string().replace('\\"', "'"))
Expand Down
Loading