Skip to content

Commit

Permalink
Adjust to run with newer dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
ximion committed Oct 14, 2022
1 parent f64c456 commit a8835b0
Show file tree
Hide file tree
Showing 13 changed files with 214 additions and 190 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/pyformat.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8]
python-version: ['3.10']

steps:
# Checks out a copy of your repository on the ubuntu-latest machine
Expand All @@ -24,7 +24,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Format with black
run: |
$CONDA/bin/conda install -c conda-forge black==20.8b1 click==8.0.4
$CONDA/bin/conda install -c conda-forge black~=22.8 click~=8.0.4
# stop the build if there are Python syntax errors or undefined names
$CONDA/bin/black minian
- name: Check for modified files
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/testandcov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
max-parallel: 5
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
python-version: ["3.8"]
python-version: ['3.10']
defaults:
run:
shell: bash -l {0}
Expand All @@ -39,7 +39,7 @@ jobs:
environment-file: environment.yml
- name: Install test dependencies
run: |
conda install -c conda-forge black==20.8b1 click==8.0.4 pytest==7.0.1 pytest-cov pytest-rerunfailures coverage==6.3.3
conda install -c conda-forge black~=22.8 click~=8.0.4 pytest~=7.1.2 pytest-cov pytest-rerunfailures coverage~=6.3.3
- name: Lint with black
run: |
black --check minian
Expand All @@ -48,8 +48,8 @@ jobs:
id: n_workers
with:
cond: ${{ runner.os == 'macOS' }}
if_true: 3
if_false: 2
if_true: 2
if_false: 1
- name: Run doctest
run: |
pip install -r requirements/requirements-doc.txt
Expand Down
2 changes: 1 addition & 1 deletion docs/ext/normalize_html_id.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,4 @@ def normalize_id(app, exception):


def setup(app):
app.connect("build-finished", normalize_id)
app.connect("build-finished", normalize_id)
62 changes: 32 additions & 30 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,41 +4,43 @@ channels:
- conda-forge
- defaults
dependencies:
- bokeh=1.4.0
- cvxpy>=1.1.11
- dask=2021.2.0
- datashader=0.12.1
- distributed=2021.2.0
- ecos>=2.0.7
- ffmpeg
- ffmpeg-python>=0.2.0
- python=3.10
- pip
- bokeh~=2.4.3
- dask=2022.9.2
- datashader=0.14.2
- distributed=2022.9.2
- ecos>=2.0.10
- ffmpeg>=4.4
- fftw
- holoviews=1.12.7
- holoviews=1.15.1
- jupyter
- matplotlib-base=3.2
- matplotlib-base~=3.6.0
- natsort
- netcdf4
- networkx=2.4
- numba=0.52.0
- numpy=1.20.2
- opencv=4.2.0
- pandas=1.2.3
- panel=0.8.0
- param=1.9
- pyfftw=0.12.0
- python=3.8
- scikit-image=0.18.1
- scikit-learn=0.22.1
- scipy>=1.4.1
- networkx~=2.8.7
- numba~=0.56.2
- numpy~=1.23.3
- opencv>=4.6
- pandas~=1.5.0
- panel~=0.14.0
- param~=1.12.2
- pyfftw~=0.12.0
- scikit-image~=0.19.3
- scikit-learn~=1.1.2
- scipy~=1.9.1
- scs
- simpleitk=2.0.2
- simpleitk~=2.1.1
- sk-video
- statsmodels>=0.11.1
- statsmodels>=0.13.2
- tifffile
- xarray=0.16.2
- zarr
- sparse=0.11.2
- pymetis=2020.1
- rechunker=0.3.3
- xarray~=2022.9.0
- zarr>=2.12
- sparse=0.13.0
- pymetis=2022.1
- rechunker~=0.5.0
- medpy=0.4.0
- jinja2=2.11.3
- jinja2~=3.1.2
- pip:
- cvxpy>=1.2.1
- ffmpeg-python>=0.2.0
31 changes: 22 additions & 9 deletions minian/cnmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
from scipy.sparse import dia_matrix
from skimage import morphology as moph
from sklearn.linear_model import LassoLars
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from statsmodels.tsa.stattools import acovf

from .utilities import (
Expand Down Expand Up @@ -386,6 +388,8 @@ def update_spatial(
C_store=C_store,
f=f_in,
)
if not isinstance(cur_blk, sparse.SparseArray):
cur_blk = cur_blk.map_blocks(sparse.COO)
else:
cur_blk = darr.array(sparse.zeros((cur_sub.shape)))
A_new[hblk, wblk, 0] = cur_blk
Expand Down Expand Up @@ -512,8 +516,11 @@ def update_spatial_perpx(
if (f is not None) and sub[-1]:
C = np.concatenate([C, f.reshape((-1, 1))], axis=1)
idx = np.concatenate([idx, np.array(len(sub) - 1).reshape(-1)])
clf = LassoLars(alpha=alpha, positive=True)
coef = clf.fit(C, y).coef_
model = make_pipeline(
StandardScaler(with_mean=False),
LassoLars(alpha=alpha, positive=True, normalize=False),
)
coef = model.fit(C, y).named_steps["lassolars"].coef_
mask = coef > 0
coef = coef[mask]
idx = idx[mask]
Expand Down Expand Up @@ -611,7 +618,7 @@ def compute_trace(
"""
fms = Y.coords["frame"]
uid = A.coords["unit_id"]
Y = Y.data
Y = Y.data.map_blocks(sparse.COO)
A = darr.from_array(A.data.map_blocks(sparse.COO).compute(), chunks=-1)
C = C.data.map_blocks(sparse.COO).T
b = (
Expand All @@ -620,10 +627,10 @@ def compute_trace(
.reshape((1, Y.shape[1], Y.shape[2]))
.compute()
)
f = f.fillna(0).data.reshape((-1, 1))
f = f.fillna(0).data.reshape((-1, 1)).map_blocks(sparse.COO)
AtA = darr.tensordot(A, A, axes=[(1, 2), (1, 2)]).compute()
A_norm = (
(1 / (A ** 2).sum(axis=(1, 2)))
(1 / (A**2).sum(axis=(1, 2)))
.map_blocks(
lambda a: sparse.diagonalize(sparse.COO(a)), chunks=(A.shape[0], A.shape[0])
)
Expand All @@ -648,7 +655,9 @@ def compute_trace(
dims=["frame", "unit_id"],
coords={"frame": fms, "unit_id": uid},
)
return YrA.transpose("unit_id", "frame")
YrA = YrA.transpose("unit_id", "frame")
YrA.data = YrA.data.map_blocks(lambda x: x.todense())
return YrA


def update_temporal(
Expand Down Expand Up @@ -833,7 +842,7 @@ def update_temporal(
\mathbf{b_0} \\right \\rVert ^2 + \\alpha \\left \\lvert \mathbf{G}
\mathbf{c} \\right \\rvert \\\\
& \\text{subject to}
& & \mathbf{c} \geq 0, \; \mathbf{G} \mathbf{c} \geq 0
& & \mathbf{c} \geq 0, \; \mathbf{G} \mathbf{c} \geq 0
\\end{aligned}
Where :math:`\mathbf{y}` is the estimated residule trace (`YrA`) for the
Expand All @@ -855,10 +864,12 @@ def update_temporal(
if YrA is None:
YrA = compute_trace(Y, A, b, C, f).persist()
Ymask = (YrA > 0).any("frame").compute()
if hasattr(Ymask.data, "todense"):
Ymask.data = Ymask.data.todense()
A, C, YrA = A.sel(unit_id=Ymask), C.sel(unit_id=Ymask), YrA.sel(unit_id=Ymask)
print("grouping overlaping units")
A_sps = (A.data.map_blocks(sparse.COO) > 0).compute().astype(np.float32)
A_inter = sparse.tensordot(A_sps, A_sps, axes=[(1, 2), (1, 2)])
A_inter = sparse.tensordot(A_sps, A_sps, axes=[(1, 2), (1, 2)]).todense()
A_usum = np.tile(A_sps.sum(axis=(1, 2)).todense(), (A_sps.shape[0], 1))
A_usum = A_usum + A_usum.T
jac = scipy.sparse.csc_matrix(A_inter / (A_usum - A_inter) > jac_thres)
Expand Down Expand Up @@ -1066,7 +1077,7 @@ def get_ar_coef(
else:
max_lag = p + add_lag
cov = acovf(y, fft=True)
C_mat = toeplitz(cov[:max_lag], cov[:p]) - sn ** 2 * np.eye(max_lag, p)
C_mat = toeplitz(cov[:max_lag], cov[:p]) - sn**2 * np.eye(max_lag, p)
g = lstsq(C_mat, cov[1 : max_lag + 1])[0]
if pad:
res = np.zeros(pad)
Expand Down Expand Up @@ -1165,6 +1176,8 @@ def update_temporal_block(
excluded=["pad", "add_lag"],
signature="(f),(),()->(l)",
)
if hasattr(YrA, "todense"):
YrA = YrA.todense()
if normalize:
amean = YrA.sum(axis=1).mean()
norm_factor = YrA.shape[1] / amean
Expand Down
2 changes: 1 addition & 1 deletion minian/motion_correction.py
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ def check_temp(fm: np.ndarray, max_sh: int) -> float:
if perimeter <= 0:
return 0
area = cv2.contourArea(cont)
circularity = 4 * np.pi * (area / (perimeter ** 2))
circularity = 4 * np.pi * (area / (perimeter**2))
return circularity


Expand Down
12 changes: 6 additions & 6 deletions minian/test/test_cross_reg.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ def test_cross_reg_notebook():
cents = pd.read_pickle("./demo_data/cents.pkl")
mappings = pd.read_pickle("./demo_data/mappings.pkl")
assert len(cents) == 508
assert int(cents["height"].sum()) == 99091
assert int(cents["width"].sum()) == 213627
assert len(mappings) == 431
assert int(cents["height"].sum()) == 99096
assert int(cents["width"].sum()) == 213628
assert len(mappings) == 430
assert mappings[("group", "group")].value_counts().to_dict() == {
("session2",): 182,
("session1",): 172,
("session1", "session2"): 77,
("session2",): 181,
("session1",): 171,
("session1", "session2"): 78,
}
5 changes: 3 additions & 2 deletions minian/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -837,10 +837,10 @@ def update_graph(self, scheduler, client, tasks, **kwargs):
for tk in tasks.keys():
for pattern, annt in self.annt_dict.items():
if re.search(pattern, tk):
ts = parent._tasks.get(tk)
ts = parent.tasks.get(tk)
res = annt.get("resources", None)
if res:
ts._resource_restrictions = res
ts.resource_restrictions = res
pri = annt.get("priority", None)
if pri:
pri_org = list(ts._priority)
Expand All @@ -856,6 +856,7 @@ def custom_arr_optimize(
rename_dict: Optional[dict] = None,
rewrite_dict: Optional[dict] = None,
keep_patterns=[],
flush=True,
) -> dict:
"""
Customized implementation of array optimization function.
Expand Down
Loading

0 comments on commit a8835b0

Please sign in to comment.