def make(self, key): print('Computing SHA-1 and storing in kachery...') nwb_file_abs_path = Nwbfile.get_abs_path(key['nwb_file_name']) with ka.config(use_hard_links=True): kachery_path = ka.store_file(nwb_file_abs_path) key['nwb_file_sha1'] = ka.get_file_hash(kachery_path) self.insert1(key)
def test_sort(sorter_name, min_avg_accuracy, recording_path, sorting_true_path, num_jobs=1, job_handler=None, container='default'): from spikeforest2 import sorters from spikeforest2 import processing import hither_sf as hither import kachery as ka # for now, in this test, don't use gpu for irc gpu = sorter_name in ['kilosort2', 'kilosort', 'tridesclous', 'ironclust'] sorting_results = [] with ka.config(fr='default_readonly'): with hither.config(container=container, gpu=gpu, job_handler=job_handler), hither.job_queue(): sorter = getattr(sorters, sorter_name) for _ in range(num_jobs): sorting_result = sorter.run(recording_path=recording_path, sorting_out=hither.File()) sorting_results.append(sorting_result) assert sorting_result.success sorting_result = sorting_results[0] with ka.config(fr='default_readonly'): with hither.config(container='default', gpu=False): compare_result = processing.compare_with_truth.run( sorting_path=sorting_result.outputs.sorting_out, sorting_true_path=sorting_true_path, json_out=hither.File()) assert compare_result.success obj = ka.load_object(compare_result.outputs.json_out._path) aa = _average_accuracy(obj) print(F'AVERAGE-ACCURACY: {aa}') assert aa >= min_avg_accuracy, f"Average accuracy is lower than expected {aa} < {min_avg_accuracy}" print('Passed.')
def register_groundtruth(*, recdir, output_fname, label, to): with ka.config(to=to): raw_path = ka.store_file(recdir + '/raw.mda') obj = dict(firings=raw_path) obj['self_reference'] = ka.store_object( obj, basename='{}.json'.format(label)) with open(output_fname, 'w') as f: json.dump(obj, f, indent=4)
def from_memory(recording: se.RecordingExtractor, serialize=False, serialize_dtype=None): if serialize: if serialize_dtype is None: raise Exception( 'You must specify the serialize_dtype when serializing recording extractor in from_memory()' ) with hi.TemporaryDirectory() as tmpdir: fname = tmpdir + '/' + _random_string(10) + '_recording.mda' se.BinDatRecordingExtractor.write_recording( recording=recording, save_path=fname, time_axis=0, dtype=serialize_dtype) with ka.config(use_hard_links=True): uri = ka.store_file(fname, basename='raw.mda') num_channels = recording.get_num_channels() channel_ids = [int(a) for a in recording.get_channel_ids()] xcoords = [ recording.get_channel_property(a, 'location')[0] for a in channel_ids ] ycoords = [ recording.get_channel_property(a, 'location')[1] for a in channel_ids ] recording = LabboxEphysRecordingExtractor({ 'recording_format': 'bin1', 'data': { 'raw': uri, 'raw_num_channels': num_channels, 'num_frames': int(recording.get_num_frames()), 'samplerate': float(recording.get_sampling_frequency()), 'channel_ids': channel_ids, 'channel_map': dict( zip([str(c) for c in channel_ids], [int(i) for i in range(num_channels)])), 'channel_positions': dict( zip([str(c) for c in channel_ids], [[float(xcoords[i]), float(ycoords[i])] for i in range(num_channels)])) } }) return recording obj = { 'recording_format': 'in_memory', 'data': register_in_memory_object(recording) } return LabboxEphysRecordingExtractor(obj)
def make(self, key): print('Computing SHA-1 and storing in kachery...') analysis_file_abs_path = AnalysisNwbfile().get_abs_path(key['analysis_file_name']) with ka.config(use_hard_links=True): kachery_path = ka.store_file(analysis_file_abs_path) key['analysis_file_sha1'] = ka.get_file_hash(kachery_path) self.insert1(key) #TODO: load from kachery and fetch_nwb
def register_recording(*, recdir, output_fname, label, to): with ka.config(to=to): raw_path = ka.store_file(recdir + '/raw.mda') obj = dict(raw=raw_path, params=ka.load_object(recdir + '/params.json'), geom=np.genfromtxt(ka.load_file(recdir + '/geom.csv'), delimiter=',').tolist()) obj['self_reference'] = ka.store_object( obj, basename='{}.json'.format(label)) with open(output_fname, 'w') as f: json.dump(obj, f, indent=4)
def main(): thisdir = os.path.dirname(os.path.realpath(__file__)) studysets_obj_path = ka.load_text(thisdir + '/../../recordings/studysets') with ka.config(fr='default_readonly'): studysets_obj = ka.load_object(path=studysets_obj_path) # studysets_obj['StudySets'] new_study_sets = [] for ss in studysets_obj['StudySets']: if ss['name'] != 'PAIRED_ENGLISH': new_study_sets.append(ss) studyset_obj_path = thisdir + '/../../recordings/PAIRED_ENGLISH/PAIRED_ENGLISH.json' studyset_obj = ka.load_object(studyset_obj_path) assert studyset_obj is not None, f'Missing file: {studyset_obj_path}' new_study_sets.append(studyset_obj) studysets_obj['StudySets'] = new_study_sets with ka.config(fr='default_readwrite'): studysets_obj_path = ka.store_object(studysets_obj, basename='studysets.json') with open(thisdir + '/../../recordings/studysets', 'w') as f: f.write(studysets_obj_path)
def test_1(tmp_path, datajoint_server): from nwb_datajoint.data_import import insert_sessions from nwb_datajoint.common import Session, Device, Probe tmpdir = str(tmp_path) os.environ['NWB_DATAJOINT_BASE_DIR'] = tmpdir + '/nwb-data' os.environ['KACHERY_STORAGE_DIR'] = tmpdir + '/nwb-data/kachery-storage' os.mkdir(os.environ['NWB_DATAJOINT_BASE_DIR']) os.mkdir(os.environ['KACHERY_STORAGE_DIR']) nwb_fname = os.environ['NWB_DATAJOINT_BASE_DIR'] + '/test.nwb' with ka.config(fr='default_readonly'): ka.load_file( 'sha1://8ed68285c327b3766402ee75730d87994ac87e87/beans20190718_no_eseries_no_behavior.nwb', dest=nwb_fname) with pynwb.NWBHDF5IO(path=nwb_fname, mode='r') as io: nwbf = io.read() insert_sessions(['test.nwb']) x = (Session() & {'nwb_file_name': 'test.nwb'}).fetch1() assert x['nwb_file_name'] == 'test.nwb' assert x['subject_id'] == 'Beans' assert x['institution_name'] == 'University of California, San Francisco' assert x['lab_name'] == 'Loren Frank' assert x['session_id'] == 'beans_01' assert x['session_description'] == 'Reinforcement leaarning' assert x['session_start_time'] == datetime(2019, 7, 18, 15, 29, 47) assert x['timestamps_reference_time'] == datetime(1970, 1, 1, 0, 0) assert x['experiment_description'] == 'Reinforcement learning' x = Device().fetch() # No devices? assert len(x) == 0 x = Probe().fetch() assert len(x) == 1 assert x[0]['probe_type'] == '128c-4s8mm6cm-20um-40um-sl' assert x[0]['probe_description'] == '128 channel polyimide probe' assert x[0]['num_shanks'] == 4 assert x[0]['contact_side_numbering'] == 'True'
def from_memory(sorting: se.SortingExtractor, serialize=False): if serialize: with hi.TemporaryDirectory() as tmpdir: fname = tmpdir + '/' + _random_string(10) + '_firings.mda' MdaSortingExtractor.write_sorting(sorting=sorting, save_path=fname) with ka.config(use_hard_links=True): uri = ka.store_file(fname, basename='firings.mda') sorting = LabboxEphysSortingExtractor({ 'sorting_format': 'mda', 'data': { 'firings': uri, 'samplerate': sorting.get_sampling_frequency() } }) return sorting obj = { 'sorting_format': 'in_memory', 'data': register_in_memory_object(sorting) } return LabboxEphysSortingExtractor(obj)
#!/usr/bin/env python from spikeforest2 import sorters import hither_sf as hither import kachery as ka recording_path = 'sha1://961f4a641af64dded4821610189f808f0192de4d/SYNTH_MEAREC_TETRODE/synth_mearec_tetrode_noise10_K10_C4/002_synth.json' with ka.config(fr='default_readonly'): #with hither.config(cache='default_readwrite'): with hither.config(container='default'): result = sorters.tridesclous.run(recording_path=recording_path, sorting_out=hither.File()) print(result.outputs.sorting_out)
def test_1(tmp_path, datajoint_server): from nwb_datajoint.common import Session, DataAcquisitionDevice, CameraDevice, Probe from nwb_datajoint.data_import import insert_sessions nwb_datajoint_base_dir = tmp_path / 'nwb-data' os.environ['NWB_DATAJOINT_BASE_DIR'] = str(nwb_datajoint_base_dir) os.environ['KACHERY_STORAGE_DIR'] = str(nwb_datajoint_base_dir / 'kachery-storage') os.mkdir(os.environ['NWB_DATAJOINT_BASE_DIR']) os.mkdir(os.environ['KACHERY_STORAGE_DIR']) raw_dir = nwb_datajoint_base_dir / 'raw' analysis_dir = nwb_datajoint_base_dir / 'analysis' os.mkdir(raw_dir) os.mkdir(analysis_dir) dj.config['stores'] = { 'raw': { 'protocol': 'file', 'location': str(raw_dir), 'stage': str(raw_dir) }, 'analysis': { 'protocol': 'file', 'location': str(analysis_dir), 'stage': str(analysis_dir) } } nwb_fname = str(raw_dir / 'test.nwb') with ka.config(fr='default_readonly'): ka.load_file('sha1://8ed68285c327b3766402ee75730d87994ac87e87/beans20190718_no_eseries_no_behavior.nwb', dest=nwb_fname) # test that the file can be read. this is not used otherwise with pynwb.NWBHDF5IO(path=nwb_fname, mode='r', load_namespaces=True) as io: nwbfile = io.read() assert nwbfile is not None insert_sessions(['test.nwb']) x = (Session() & {'nwb_file_name': 'test_.nwb'}).fetch1() assert x['nwb_file_name'] == 'test_.nwb' assert x['subject_id'] == 'Beans' assert x['institution_name'] == 'University of California, San Francisco' assert x['lab_name'] == 'Loren Frank' assert x['session_id'] == 'beans_01' assert x['session_description'] == 'Reinforcement leaarning' assert x['session_start_time'] == datetime(2019, 7, 18, 15, 29, 47) assert x['timestamps_reference_time'] == datetime(1970, 1, 1, 0, 0) assert x['experiment_description'] == 'Reinforcement learning' x = DataAcquisitionDevice().fetch() # TODO No data acquisition devices? assert len(x) == 0 x = CameraDevice().fetch() assert len(x) == 2 # TODO check camera devices x = Probe().fetch() assert len(x) == 1 assert x[0]['probe_type'] == '128c-4s8mm6cm-20um-40um-sl' assert x[0]['probe_description'] == '128 channel polyimide probe' assert x[0]['num_shanks'] == 4 assert x[0]['contact_side_numbering'] == 'True'