Skip to content

Commit

Permalink
Merge pull request #901 from int-brain-lab/aws
Browse files Browse the repository at this point in the history
Aws
  • Loading branch information
oliche authored Jan 6, 2025
2 parents 7295c07 + f3f44ad commit 19c5ea5
Show file tree
Hide file tree
Showing 11 changed files with 61 additions and 43 deletions.
20 changes: 10 additions & 10 deletions brainbox/behavior/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,24 +430,24 @@ def display_status(subj, sess_dates, status, perf_easy=None, n_trials=None, psyc
f"{sess_dates[2]}]")
elif psych_20 is None:
print(f"\n{subj} : {status} \nSession dates={[x for x in sess_dates]}, "
f"Perf easy={[np.around(pe,2) for pe in perf_easy]}, "
f"Perf easy={[np.around(pe, 2) for pe in perf_easy]}, "
f"N trials={[nt for nt in n_trials]} "
f"\nPsych fit over last 3 sessions: "
f"bias={np.around(psych[0],2)}, thres={np.around(psych[1],2)}, "
f"lapse_low={np.around(psych[2],2)}, lapse_high={np.around(psych[3],2)} "
f"bias={np.around(psych[0], 2)}, thres={np.around(psych[1], 2)}, "
f"lapse_low={np.around(psych[2], 2)}, lapse_high={np.around(psych[3], 2)} "
f"\nMedian reaction time at 0 contrast over last 3 sessions = "
f"{np.around(rt,2)}")
f"{np.around(rt, 2)}")

else:
print(f"\n{subj} : {status} \nSession dates={[x for x in sess_dates]}, "
f"Perf easy={[np.around(pe,2) for pe in perf_easy]}, "
f"Perf easy={[np.around(pe, 2) for pe in perf_easy]}, "
f"N trials={[nt for nt in n_trials]} "
f"\nPsych fit over last 3 sessions (20): "
f"bias={np.around(psych_20[0],2)}, thres={np.around(psych_20[1],2)}, "
f"lapse_low={np.around(psych_20[2],2)}, lapse_high={np.around(psych_20[3],2)} "
f"\nPsych fit over last 3 sessions (80): bias={np.around(psych_80[0],2)}, "
f"thres={np.around(psych_80[1],2)}, lapse_low={np.around(psych_80[2],2)}, "
f"lapse_high={np.around(psych_80[3],2)} "
f"bias={np.around(psych_20[0], 2)}, thres={np.around(psych_20[1], 2)}, "
f"lapse_low={np.around(psych_20[2], 2)}, lapse_high={np.around(psych_20[3], 2)} "
f"\nPsych fit over last 3 sessions (80): bias={np.around(psych_80[0], 2)}, "
f"thres={np.around(psych_80[1], 2)}, lapse_low={np.around(psych_80[2], 2)}, "
f"lapse_high={np.around(psych_80[3], 2)} "
f"\nMedian reaction time at 0 contrast over last 3 sessions = "
f"{np.around(rt, 2)}")

Expand Down
26 changes: 14 additions & 12 deletions brainbox/ephys_plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,20 +439,22 @@ def plot_brain_regions(channel_ids, channel_depths=None, brain_regions=None, dis
bar_kwargs.update(**kwargs)
color = col / 255
ax.bar(x=0.5, height=height, color=color, bottom=reg[0], **kwargs)
if label == 'right':
ax.yaxis.tick_right()
ax.set_yticks(region_labels[:, 0].astype(int))
ax.yaxis.set_tick_params(labelsize=8)
ax.set_ylim(np.nanmin(channel_depths), np.nanmax(channel_depths))
ax.get_xaxis().set_visible(False)
ax.set_yticklabels(region_labels[:, 1])
if label == 'right':
ax.yaxis.tick_right()
ax.spines['left'].set_visible(False)
else:
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
if label is not None:
if label == 'right':
ax.yaxis.tick_right()
ax.set_yticks(region_labels[:, 0].astype(int))
ax.yaxis.set_tick_params(labelsize=8)
ax.set_ylim(np.nanmin(channel_depths), np.nanmax(channel_depths))
ax.get_xaxis().set_visible(False)
ax.set_yticklabels(region_labels[:, 1])
if label == 'right':
ax.yaxis.tick_right()
ax.spines['left'].set_visible(False)
else:
ax.spines['right'].set_visible(False)

if title:
ax.set_title(title)

Expand Down
6 changes: 4 additions & 2 deletions brainbox/io/one.py
Original file line number Diff line number Diff line change
Expand Up @@ -899,12 +899,13 @@ def load_spike_sorting_object(self, obj, *args, **kwargs):
self.download_spike_sorting_object(obj, *args, **kwargs)
return self._load_object(self.files[obj])

def get_version(self, spike_sorter='pykilosort'):
def get_version(self, spike_sorter=None):
spike_sorter = (spike_sorter or self.spike_sorter) or 'iblsorter'
collection = self._get_spike_sorting_collection(spike_sorter=spike_sorter)
dset = self.one.alyx.rest('datasets', 'list', session=self.eid, collection=collection, name='spikes.times.npy')
return dset[0]['version'] if len(dset) else 'unknown'

def download_spike_sorting_object(self, obj, spike_sorter='pykilosort', dataset_types=None, collection=None,
def download_spike_sorting_object(self, obj, spike_sorter=None, dataset_types=None, collection=None,
attribute=None, missing='raise', **kwargs):
"""
Downloads an ALF object
Expand All @@ -917,6 +918,7 @@ def download_spike_sorting_object(self, obj, spike_sorter='pykilosort', dataset_
:param missing: 'raise' (default) or 'ignore'
:return:
"""
spike_sorter = (spike_sorter or self.spike_sorter) or 'iblsorter'
if len(self.collections) == 0:
return {}, {}, {}
self.collection = self._get_spike_sorting_collection(spike_sorter=spike_sorter)
Expand Down
2 changes: 1 addition & 1 deletion ibllib/oneibl/data_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,7 +766,7 @@ def setUp(self, **_):
:return:
"""
df = super().getData()
self.one._check_filesystem(df)
self.one._check_filesystem(df, check_hash=False)

def uploadData(self, outputs, version, **kwargs):
"""
Expand Down
5 changes: 4 additions & 1 deletion ibllib/oneibl/patcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,10 @@ def patch_dataset(self, file_list, dry=False, ftp=False, **kwargs):
return
# from the dataset info, set flatIron flag to exists=True
for p, d in zip(file_list, response):
self._patch_dataset(p, dset_id=d['id'], revision=d['revision'], dry=dry, ftp=ftp)
try:
self._patch_dataset(p, dset_id=d['id'], revision=d['revision'], dry=dry, ftp=ftp)
except Exception as e:
raise Exception(f'Error registering file {p}') from e
return response

def patch_datasets(self, file_list, **kwargs):
Expand Down
34 changes: 21 additions & 13 deletions ibllib/pipes/ephys_tasks.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import logging
import traceback
from pathlib import Path
import subprocess
import re
import shutil
import subprocess
import traceback

import packaging.version
import numpy as np
Expand All @@ -13,6 +13,7 @@
from ibldsp.utils import rms
from ibldsp.waveform_extraction import extract_wfs_cbin
import one.alf.io as alfio
import iblutil.util

from ibllib.misc import check_nvidia_driver
from ibllib.pipes import base_tasks
Expand Down Expand Up @@ -602,14 +603,14 @@ def signature(self):
('*sync.npy', f'{self.device_collection}/{self.pname}', True)
],
'output_files': [
# ./raw_ephys_data/probe00/
# ./raw_ephys_data/{self.pname}/
('_iblqc_ephysTimeRmsAP.rms.npy', f'{self.device_collection}/{self.pname}/', True),
('_iblqc_ephysTimeRmsAP.timestamps.npy', f'{self.device_collection}/{self.pname}/', True),
('_iblqc_ephysSaturation.samples.npy', f'{self.device_collection}/{self.pname}/', True),
# ./spike_sorters/iblsorter/probe00
('spike_sorting_iblsorter.log', f'spike_sorters/{self._sortername}/{self.pname}', True),
# ./spike_sorters/iblsorter/{self.pname}
('_kilosort_raw.output.tar', f'spike_sorters/{self._sortername}/{self.pname}/', True),
# ./alf/probe00/iblsorter
# ./alf/{self.pname}/iblsorter
(f'_ibl_log.info_{self.SPIKE_SORTER_NAME}.log', f'alf/{self.pname}/{self._sortername}', True),
('_kilosort_whitening.matrix.npy', f'alf/{self.pname}/{self._sortername}/', True),
('_phy_spikes_subset.channels.npy', f'alf/{self.pname}/{self._sortername}/', True),
('_phy_spikes_subset.spikes.npy', f'alf/{self.pname}/{self._sortername}/', True),
Expand Down Expand Up @@ -638,6 +639,10 @@ def signature(self):
('templates.amps.npy', f'alf/{self.pname}/{self._sortername}/', True),
('templates.waveforms.npy', f'alf/{self.pname}/{self._sortername}/', True),
('templates.waveformsChannels.npy', f'alf/{self.pname}/{self._sortername}/', True),
('waveforms.channels.npz', f'alf/{self.pname}/{self._sortername}/', True),
('waveforms.table.pqt', f'alf/{self.pname}/{self._sortername}/', True),
('waveforms.templates.npy', f'alf/{self.pname}/{self._sortername}/', True),
('waveforms.traces.npy', f'alf/{self.pname}/{self._sortername}/', True),
],
}
return signature
Expand Down Expand Up @@ -710,19 +715,19 @@ def _run_iblsort(self, ap_file):
(discontinued support for old spike sortings in the probe folder <1.5.5)
:return: path of the folder containing ks2 spike sorting output
"""
iblutil.util.setup_logger('iblsorter', level='INFO')
sorter_dir = self.session_path.joinpath("spike_sorters", self.SPIKE_SORTER_NAME, self.pname)
self.FORCE_RERUN = False
if not self.FORCE_RERUN:
log_file = sorter_dir.joinpath(f"spike_sorting_{self.SPIKE_SORTER_NAME}.log")
log_file = sorter_dir.joinpath(f"_ibl_log.info_{self.SPIKE_SORTER_NAME}.log")
if log_file.exists():
run_version = self._fetch_iblsorter_run_version(log_file)
if packaging.version.parse(run_version) >= packaging.version.parse('1.7.0'):
_logger.info(f"Already ran: spike_sorting_{self.SPIKE_SORTER_NAME}.log"
_logger.info(f"Already ran: {log_file}"
f" found in {sorter_dir}, skipping.")
return sorter_dir
else:
self.FORCE_RERUN = True
_logger.info(f"job progress command: tail -f {self.scratch_folder_run} *.log")
self.scratch_folder_run.mkdir(parents=True, exist_ok=True)
check_nvidia_driver()
try:
Expand Down Expand Up @@ -781,7 +786,7 @@ def _run(self):
bin_file=ap_file,
ampfactor=self._sample2v(ap_file),
)
logfile = sorter_dir.joinpath(f"spike_sorting_{self.SPIKE_SORTER_NAME}.log")
logfile = sorter_dir.joinpath(f"_ibl_log.info_{self.SPIKE_SORTER_NAME}.log")
if logfile.exists():
shutil.copyfile(logfile, probe_out_path.joinpath(f"_ibl_log.info_{self.SPIKE_SORTER_NAME}.log"))
# recover the QC files from the spike sorting output and copy them
Expand All @@ -802,11 +807,13 @@ def _run(self):
out = ibllib.ephys.spikes.ks2_to_tar(sorter_dir, tar_dir, force=self.FORCE_RERUN)
out_files.extend(out)
# run waveform extraction
_logger.info(f"Cleaning up temporary folder {self.scratch_folder_run}")
shutil.rmtree(self.scratch_folder_run, ignore_errors=True)
_logger.info("Running waveform extraction")
spikes = alfio.load_object(probe_out_path, 'spikes', attribute=['samples', 'clusters'])
clusters = alfio.load_object(probe_out_path, 'clusters', attribute=['channels'])
channels = alfio.load_object(probe_out_path, 'channels')
extract_wfs_cbin(
_output_waveform_files = extract_wfs_cbin(
bin_file=ap_file,
output_dir=probe_out_path,
spike_samples=spikes['samples'],
Expand All @@ -822,6 +829,7 @@ def _run(self):
preprocess_steps=["phase_shift", "bad_channel_interpolation", "butterworth", "car"],
scratch_dir=self.scratch_folder_run,
)
out_files.extend(_output_waveform_files)
_logger.info(f"Cleaning up temporary folder {self.scratch_folder_run}")
shutil.rmtree(self.scratch_folder_run, ignore_errors=True)
if self.one:
Expand All @@ -845,5 +853,5 @@ def _run(self):
chns = np.load(probe_out_path.joinpath('channels.localCoordinates.npy'))
out = get_aligned_channels(ins[0], chns, one=self.one, save_dir=probe_out_path)
out_files.extend(out)

return out_files
self.assert_expected_outputs()
return sorted(list(set(out_files)))
2 changes: 1 addition & 1 deletion ibllib/pipes/scan_fix_passive_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def move_rename_pairs(from_to_pairs):
for i, (src, dst) in enumerate(from_to_pairs):
src = Path(src)
dst = Path(dst)
log.info(f"Moving {i+1} of {len(from_to_pairs)}: \n{src}\n--> {dst}")
log.info(f"Moving {i + 1} of {len(from_to_pairs)}: \n{src}\n--> {dst}")
try:
shutil.move(str(src / "raw_behavior_data"), str(dst / "raw_passive_data"))
ffile = src.joinpath("passive_data_for_ephys.flag")
Expand Down
2 changes: 2 additions & 0 deletions ibllib/pipes/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,8 @@ def run(self, **kwargs):
self.log = new_log if self.clobber else self.log + new_log
_logger.removeHandler(ch)
ch.close()
if self.on_error == 'raise':
raise FileExistsError(f'Job {self.__class__} exited as a lock was found at {self._lock_file_path()}')
return self.status
outputs = self._run(**kwargs)
_logger.info(f'Job {self.__class__} complete')
Expand Down
2 changes: 1 addition & 1 deletion ibllib/plots/figures.py
Original file line number Diff line number Diff line change
Expand Up @@ -644,7 +644,7 @@ def raw_destripe(raw, fs, t0, i_plt, n_plt,
fig, axs = plt.subplots(nrows=1, ncols=n_plt, figsize=(14, 5), gridspec_kw={'width_ratios': 4 * n_plt})

if i_plt > len(axs) - 1: # Error
raise ValueError(f'The given increment of subplot ({i_plt+1}) '
raise ValueError(f'The given increment of subplot ({i_plt + 1}) '
f'is larger than the total number of subplots ({len(axs)})')

[nc, ns] = raw.shape
Expand Down
3 changes: 2 additions & 1 deletion ibllib/tests/test_ephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from one.api import ONE
import neuropixel
from ibldsp import voltage
import ibldsp.utils

from ibllib.ephys import ephysqc, spikes
from ibllib.tests import TEST_DB
Expand Down Expand Up @@ -59,7 +60,7 @@ def synthetic_with_bad_channels():
st = st[st < ns].astype(np.int32)
stripes = np.zeros(ns)
stripes[st] = 1
stripes = scipy.signal.convolve(stripes, scipy.signal.ricker(1200, 40), 'same') * 1e-6 * 2500
stripes = scipy.signal.convolve(stripes, ibldsp.utils.ricker(1200, 40), 'same') * 1e-6 * 2500

data = data + stripes[:, np.newaxis]
noise = np.random.randn(*data.shape) * 1e-6 * 10
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ seaborn>=0.9.0
tqdm>=4.32.1
# ibl libraries
iblatlas>=0.5.3
ibl-neuropixel>=1.5.0
ibl-neuropixel>=1.6.2
iblutil>=1.13.0
iblqt>=0.3.2
mtscomp>=1.0.1
Expand Down

0 comments on commit 19c5ea5

Please sign in to comment.