Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add in slices kwarg to pass into suite2p params #908

Merged
merged 9 commits into from
Feb 17, 2025
4 changes: 2 additions & 2 deletions brainbox/behavior/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ def get_training_status(trials, task_protocol, ephys_sess_dates, n_delay):
ephys_sess_dates])
n_ephys_trials = np.array([compute_n_trials(trials[k]) for k in ephys_sess_dates])

pass_criteria, criteria = criterion_delay(n_ephys, n_ephys_trials, perf_ephys_easy)
pass_criteria, criteria = criterion_delay(n_ephys_trials, perf_ephys_easy, n_ephys=n_ephys)

if pass_criteria:
status = 'ready4delay'
Expand Down Expand Up @@ -997,7 +997,7 @@ def criterion_ephys(psych_20, psych_80, n_trials, perf_easy, rt):
return passing, criteria


def criterion_delay(n_ephys, n_trials, perf_easy):
def criterion_delay(n_trials, perf_easy, n_ephys=1):
"""
Returns bool indicating whether criteria for 'ready4delay' is met.

Expand Down
2 changes: 1 addition & 1 deletion ibllib/oneibl/data_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ def filter(self, session_datasets, **kwargs):
Parameters
----------
session_datasets : pandas.DataFrame
An data frame of session datasets.
A data frame of session datasets.
kwargs
Extra arguments for `one.util.filter_datasets`, namely revision_last_before, qc, and
ignore_qc_not_set.
Expand Down
4 changes: 2 additions & 2 deletions ibllib/pipes/behavior_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -423,15 +423,15 @@ def _behaviour_criterion(self, update=True, truncate_to_pass=True):
from brainbox.behavior import training

trials = alfio.load_object(self.session_path.joinpath(self.output_collection), 'trials').to_df()
good_enough = training.criterion_delay(
good_enough, _ = training.criterion_delay(
n_trials=trials.shape[0],
perf_easy=training.compute_performance_easy(trials),
)
if truncate_to_pass and not good_enough:
n_trials = trials.shape[0]
while not good_enough and n_trials > 400:
n_trials -= 1
good_enough = training.criterion_delay(
good_enough, _ = training.criterion_delay(
n_trials=n_trials,
perf_easy=training.compute_performance_easy(trials[:n_trials]),
)
Expand Down
40 changes: 26 additions & 14 deletions ibllib/pipes/mesoscope_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,14 +129,20 @@ def _run(self, remove_uncompressed=False, verify_output=True, overwrite=False, *
Path to compressed tar file.
"""
outfiles = [] # should be one per raw_imaging_data folder
assert not any(x.operator for x in self.input_files), 'input datasets should not be nested'
_, all_tifs, _ = zip(*(x.find_files(self.session_path) for x in self.input_files))
if self.input_files[0].operator: # multiple device collections
output_identifiers = self.output_files[0].identifiers
# Check that the number of input ollections and output files match
assert len(self.input_files[0].identifiers) == len(output_identifiers)
else:
output_identifiers = [self.output_files[0].identifiers]
assert self.output_files[0].operator is None, 'only one output file expected'

# A list of tifs, grouped by raw imaging data collection
input_files = groupby(chain.from_iterable(all_tifs), key=lambda x: x.parent)
*_, outfile_name = self.output_files[0].identifiers
for in_dir, infiles in input_files:
for (in_dir, infiles), out_id in zip(input_files, output_identifiers):
infiles = list(infiles)
outfile = in_dir / outfile_name
outfile = self.session_path.joinpath(*filter(None, out_id))
if outfile.exists() and not overwrite:
_logger.info('%s already exists; skipping...', outfile.relative_to(self.session_path))
continue
Expand Down Expand Up @@ -277,9 +283,9 @@ def signature(self):
('mpciROIs.cellClassifier.npy', 'alf/FOV*', True),
('mpciROIs.uuids.csv', 'alf/FOV*', True),
('mpciROITypes.names.tsv', 'alf/FOV*', True),
('mpciROIs.masks.npy', 'alf/FOV*', True),
('mpciROIs.neuropilMasks.npy', 'alf/FOV*', True),
('_suite2p_ROIData.raw.zip', self.device_collection, False)]
('mpciROIs.masks.sparse_npz', 'alf/FOV*', True),
('mpciROIs.neuropilMasks.sparse_npz', 'alf/FOV*', True),
('_suite2p_ROIData.raw.zip', 'alf/FOV*', False)]
}
if not self.overwrite: # If not forcing re-registration, check whether bin files already exist on disk
# Including the data.bin in the expected signature ensures raw data files are not needlessly re-downloaded
Expand Down Expand Up @@ -352,6 +358,7 @@ def _rename_outputs(self, suite2p_dir, frameQC_names, frameQC, rename_dict=None)
'spks.npy': 'mpci.ROIActivityDeconvolved.npy',
'Fneu.npy': 'mpci.ROINeuropilActivityF.npy'
}
fov_dsets = [d[0] for d in self.signature['output_files'] if d[1].startswith('alf/FOV')]
for plane_dir in self._get_plane_paths(suite2p_dir):
# Move bin file(s) out of the way
bin_files = list(plane_dir.glob('data*.bin')) # e.g. data.bin, data_raw.bin, data_chan2_raw.bin
Expand All @@ -367,8 +374,12 @@ def _rename_outputs(self, suite2p_dir, frameQC_names, frameQC, rename_dict=None)
n = int(plane_dir.name.split('plane')[1])
fov_dir = self.session_path.joinpath('alf', f'FOV_{n:02}')
if fov_dir.exists():
_logger.debug('Removing old folder %s', fov_dir.relative_to(self.session_path))
shutil.rmtree(str(fov_dir), ignore_errors=False, onerror=None)
for f in filter(Path.exists, map(fov_dir.joinpath, fov_dsets)):
_logger.debug('Removing old file %s', f.relative_to(self.session_path))
f.unlink()
if not any(fov_dir.iterdir()):
_logger.debug('Removing old folder %s', fov_dir.relative_to(self.session_path))
fov_dir.rmdir()
prev_level = _logger.level
_logger.setLevel(logging.WARNING)
shutil.make_archive(str(fov_dir / '_suite2p_ROIData.raw'), 'zip', plane_dir, logger=_logger)
Expand Down Expand Up @@ -404,7 +415,8 @@ def _rename_outputs(self, suite2p_dir, frameQC_names, frameQC, rename_dict=None)
# Remove old suite2p files
shutil.rmtree(str(suite2p_dir), ignore_errors=False, onerror=None)
# Collect all files in those directories
return sorted(self.session_path.joinpath('alf').rglob('FOV_??/*.*.*'))
datasets = self.session_path.joinpath('alf').rglob('FOV_??/*.*.*')
return sorted(x for x in datasets if x.name in fov_dsets)

def load_meta_files(self):
"""Load the extracted imaging metadata files.
Expand Down Expand Up @@ -664,7 +676,7 @@ def bin_per_plane(self, metadata, **kwargs):

options = ('nplanes', 'data_path', 'save_path0', 'save_folder', 'fast_disk', 'batch_size',
'nchannels', 'keep_movie_raw', 'look_one_level_down', 'lines', 'dx', 'dy', 'force_sktiff',
'do_registration')
'do_registration', 'slices')
ops = self._meta2ops(metadata)
ops['force_sktiff'] = False
ops['do_registration'] = True
Expand Down Expand Up @@ -761,7 +773,7 @@ def _run(self, rename_files=True, use_badframes=True, **kwargs):
# Load and consolidate the image metadata from JSON files
metadata, all_meta = self.load_meta_files()

# Create suite2p output folder in raw imaging data folder
# Create suite2p output folder in root session path
raw_image_collections = sorted(self.session_path.glob(f'{self.device_collection}'))
save_path = self.session_path.joinpath(save_folder := 'suite2p')

Expand Down Expand Up @@ -851,8 +863,8 @@ def signature(self):
('_ibl_rawImagingData.meta.json', self.device_collection, True),
('rawImagingData.times_scanImage.npy', self.device_collection, True, True), # register raw
(f'_{self.sync_namespace}_softwareEvents.log.htsv', self.sync_collection, False), ],
'output_files': [('mpci.times.npy', 'alf/mesoscope/FOV*', True),
('mpciStack.timeshift.npy', 'alf/mesoscope/FOV*', True),]
'output_files': [('mpci.times.npy', 'alf/FOV*', True),
('mpciStack.timeshift.npy', 'alf/FOV*', True),]
}
return signature

Expand Down
7 changes: 5 additions & 2 deletions ibllib/pipes/training_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -783,7 +783,7 @@ def _array_to_string(vals):
str_vals += f'{v}, '
return str_vals[:-2]

pos = np.arange(len(criteria))[::-1] * 0.1
pos = np.arange(len(info))[::-1] * 0.1
for i, (k, v) in enumerate(info.items()):
str_v = _array_to_string(v)
text = axs[0].text(0, pos[i], k.capitalize(), color='k', weight='bold', fontsize=8, transform=axs[0].transAxes)
Expand Down Expand Up @@ -813,7 +813,10 @@ def _array_to_string(vals):
def plot_fit_params(df, subject):
fig, axs = plt.subplots(2, 3, figsize=(12, 6), gridspec_kw={'width_ratios': [2, 2, 1]})

display_info(df, axs=[axs[0, 2], axs[1, 2]])
try:
display_info(df, axs=[axs[0, 2], axs[1, 2]])
except ValueError:
print('Could not evaluate detailed training status information')

df = df.drop_duplicates('date').reset_index(drop=True)

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ ibl-neuropixel>=1.6.2
iblutil>=1.13.0
iblqt>=0.4.2
mtscomp>=1.0.1
ONE-api==3.0b4
ONE-api==3.0b5
phylib>=2.6.0
psychofit
slidingRP>=1.1.1 # steinmetz lab refractory period metrics
Expand Down
Loading