Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 23 additions & 13 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Python package

on:
Expand All @@ -11,34 +8,47 @@ on:

jobs:
build:

runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.8","3.9","3.10"]
# 3.13 is now available and worth testing for NumPy 2.0+
python-version: ["3.10", "3.11", "3.12", "3.13"]

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
cache: 'pip' # Automatically caches your dependencies

- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest pytest-cov

# Install requirements first
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install pyyaml TextGrid
pip install --user -U mne-bids

# Install remaining packages without --user
pip install pyyaml TextGrid mne-bids

# Final sanity check to ensure NumPy 2.0 didn't get downgraded
python -c "import numpy; print(f'Using NumPy {numpy.__version__}')"

- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics

- name: Test with pytest and get coverage
run: |
pytest tests/ --cov=./naplib/ --cov-report=xml
# Using -v helps identify exactly which test crashes if collection fails
pytest -v tests/ --cov=./naplib/ --cov-report=xml

- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4 # Updated to v4
with:
token: ${{ secrets.CODECOV_TOKEN }} # v4 often requires a token
2 changes: 1 addition & 1 deletion naplib/features/aligner.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,7 +307,7 @@ def align_files(self, audio_dir, text_dir, names=None):
if old_fs == 16000:
write_wavfile(join(self.tmp_dir, wavfile_), 16000, wavdata)
else:
wavdata = scipy_resample(wavdata, int(16000. / old_fs))
wavdata = scipy_resample(wavdata, int(len(wavdata) * 16000. / old_fs))
write_wavfile(join(self.tmp_dir, wavfile_), 16000, wavdata)


Expand Down
12 changes: 5 additions & 7 deletions naplib/utils/surfdist.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""

import gdist
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.colors import LightSource
import numpy as np
Expand All @@ -24,7 +25,7 @@ def load_freesurfer_label(annot_input, label_name):
labels, _, names = read_annot(annot_input)
names = [i.decode("utf-8") for i in names]
label_value = names.index(label_name)
label_nodes = np.array(np.where(np.in1d(labels, label_value)), dtype=np.int32)
label_nodes = np.array(np.where(np.isin(labels, label_value)), dtype=np.int32)

return label_nodes

Expand Down Expand Up @@ -81,7 +82,7 @@ def triangles_keep_cortex(triangles, cortex):
# for or each face/triangle keep only those that only contain nodes within the list of cortex nodes
input_shape = triangles.shape
triangle_is_in_cortex = np.all(
np.reshape(np.in1d(triangles.ravel(), cortex), input_shape), axis=1
np.reshape(np.isin(triangles.ravel(), cortex), input_shape), axis=1
)

cortex_triangles_old = np.array(triangles[triangle_is_in_cortex], dtype=np.int32)
Expand All @@ -100,7 +101,7 @@ def translate_src(src, cortex):
"""
Convert source nodes to new surface (without medial wall).
"""
src_new = np.array(np.where(np.in1d(cortex, src))[0], dtype=np.int32)
src_new = np.array(np.where(np.isin(cortex, src))[0], dtype=np.int32)

return src_new

Expand Down Expand Up @@ -190,10 +191,7 @@ def surfdist_viz(

# if cmap is given as string, translate to matplotlib cmap
if isinstance(cmap, str):
try:
cmap = plt.cm.get_cmap(cmap)
except AttributeError:
cmap = plt.get_cmap(cmap)
cmap = mpl.colormaps[cmap]

if ax is None:
premade_ax = False
Expand Down
45 changes: 27 additions & 18 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,20 +1,29 @@
matplotlib>=3.1.0
numpy>=1.15.0,<2.0 # numpy>=2.0 is incompatible with gdist, a dependency of surfdist
scipy>=1.5.0
pandas>=1.0.0
statsmodels>=0.13.0
hdf5storage>=0.1.1
seaborn>=0.12.0
# Core Scientific Stack (Updated for NumPy 2.0 ABI/API compatibility)
numpy>=2.0.0
scipy>=1.13.0
pandas>=2.2.2
matplotlib>=3.9.0
statsmodels>=0.14.2
scikit-learn>=1.5.0

# Data Storage & IO
h5py>=3.11.0
hdf5storage>=0.2.0
pyyaml
TextGrid
scikit-learn
joblib
mne
h5py
patsy
nibabel>=5.2.1

# Visualization
seaborn>=0.13.0
plotly>=5.22.0

# Domain Specific & Specialized
tvb-gdist>=2.9.2
mne>=1.7.1
scikit-spatial>=7.1.0
tdt>=0.5.0
packaging
plotly>=5.0.0
nibabel>=5.0.0
scikit-spatial>=5.0.0
gdist>=2.0.0
TextGrid

# Utilities
joblib>=1.4.2
patsy>=0.5.6
packaging>=24.0
2 changes: 1 addition & 1 deletion tests/preprocessing/test_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def bandpower(x, fs, fmin, fmax):
f, Pxx = scipy.signal.periodogram(x, fs=fs)
ind_min = np.argmax(f > fmin) - 1
ind_max = np.argmax(f > fmax) - 1
return np.trapz(Pxx[ind_min: ind_max], f[ind_min: ind_max])
return np.trapezoid(Pxx[ind_min: ind_max], f[ind_min: ind_max])

@pytest.fixture(scope='module')
def data():
Expand Down
Loading