def __init__(self, *, workspace_uri: str) -> None: if not workspace_uri.startswith('workspace://'): default_feed = kp.load_feed('field-slicer-default', create=True) workspace_uri = f'workspace://{default_feed.get_feed_id()}/{workspace_uri}' self._workspace_uri = workspace_uri feed_id, workspace_name = parse_workspace_uri(self._workspace_uri) self._feed = kp.load_feed(f'feed://{feed_id}') self._workspace_name = workspace_name workspace_subfeed = self._feed.get_subfeed( dict(workspaceName=self._workspace_name)) self._field_models = _get_field_models_from_subfeed(workspace_subfeed)
def post(self): x = json.loads(self.request.body) feed_uri = x['feedUri'] subfeed_name = x['subfeedName'] messages = x['messages'] if feed_uri: feed = kp.load_feed(feed_uri) else: feed = kp.load_feed(os.environ['LABBOX_DEFAULT_FEED_NAME'], create=True) subfeed = feed.get_subfeed(subfeed_name) subfeed.append_messages(messages) txt = json.dumps({'success': True}) self.finish(txt)
def post(self): x = json.loads(self.request.body) feed_uri = x['feedUri'] subfeed_name = x['subfeedName'] position = x['position'] if feed_uri: feed = kp.load_feed(feed_uri) else: feed = kp.load_feed(os.environ['LABBOX_DEFAULT_FEED_NAME'], create=True) subfeed = feed.get_subfeed(subfeed_name) subfeed.set_position(position) messages = subfeed.get_next_messages() txt = json.dumps(messages) self.finish(txt)
def _global_job_cache(): jc = _global['job_cache'] if jc is None: feed = kp.load_feed('labbox-job-cache', create=True) jc = hi2.JobCache(feed_uri=feed.get_uri()) _global['job_cache'] = jc return jc
def __init__(self, *, feed: Union[kp.Feed, None], workspace_name: str) -> None: if feed is None: feed = kp.load_feed('labbox-ephys-default') self._feed = feed self._workspace_name = workspace_name workspace_subfeed = self._feed.get_subfeed( dict(workspaceName=self._workspace_name)) self._recordings = _get_recordings_from_subfeed(workspace_subfeed) self._sortings = _get_sortings_from_subfeed(workspace_subfeed)
def _get_joined_channels_config() -> dict: f = kp.load_feed('_kachery_p2p_config', create=True) sf = f.get_subfeed('joined-channels') num_messages = sf.get_num_messages() if (num_messages > 0): sf.set_position(num_messages - 1) joined_channels_config = sf.get_next_message(wait_msec=100) else: joined_channels_config = { 'joinedChannels': [] } return joined_channels_config
def post(self): x = json.loads(self.request.body) feed_uri = x['feedUri'] subfeed_name = x['subfeedName'] messages = x['messages'] if feed_uri: feed = kp.load_feed(feed_uri) else: raise Exception('No feed_uri') subfeed = feed.get_subfeed(subfeed_name) subfeed.append_messages(messages) txt = json.dumps({'success': True}) self.finish(txt)
def post(self): x = json.loads(self.request.body) feed_uri = x['feedUri'] subfeed_name = x['subfeedName'] position = x['position'] if feed_uri: feed = kp.load_feed(feed_uri) else: raise Exception('No feed_uri') subfeed = feed.get_subfeed(subfeed_name) subfeed.set_position(position) messages = subfeed.get_next_messages() txt = json.dumps(messages) self.finish(txt)
def main(): test1() f = kp.load_feed('feed://' + os.environ['FEED_ID']) N1 = 10000 N2 = 1000 a = kp.store_npy(np.meshgrid(np.arange(N1), np.arange(N2))[0]) sf = f.get_subfeed('sf1') sf.append_message({'a': a, 'N1': N1, 'N2': N2}) # test invalid manifest b = kp.store_npy(np.meshgrid(np.arange(N1 + 1), np.arange(N2))[0]) invalid_manifest = kp.store_object({'invalid': True}) b_invalid_manifest = b.split('?')[0] + '?manifest=' + ka.get_file_hash( invalid_manifest) sf.append_message({'b_invalid_manifest': b_invalid_manifest})
def _handle_message(self, widget, msg, buffers): if msg['type'] == 'iterate': self._worker_session.iterate() elif msg['type'] == 'appendSubfeedMessage': feed_uri = msg['feedUri'] subfeed_name = msg['subfeedName'] f = kp.load_feed(feed_uri) sf = f.get_subfeed(subfeed_name) sf.append_message(msg['message']) self._worker_session.iterate() elif msg['type'] == 'addSubfeedWatch': self._worker_session.add_subfeed_watch( watch_name=msg['watchName'], feed_uri=msg['feedUri'], subfeed_name= msg['subfeedName'] ) else: self._worker_session.handle_message(msg)
def test1(): f = kp.create_feed('f1') f2 = kp.load_feed('f1') assert (f.get_uri() == f2.get_uri()) sf = f.get_subfeed('sf1') sf.append_message({'m': 1}) assert (sf.get_num_messages() == 1) x = kp.store_text('abc') sf.set_access_rules({'rules': []}) r = sf.get_access_rules() try: a = kp.load_file( 'sha1://e25f95079381fe07651aa7d37c2f4e8bda19727c/file.txt') raise Exception('Did not get expected error') except LoadFileError as err: pass # expected except Exception as err: raise err
"label": "SF/PAIRED_KAMPFF/paired_kampff/2014_11_25_Pair_3_0", "recording_uri": "sha1://a205f87cef8b7f86df7a09cddbc79a1fbe5df60f/SF/PAIRED_KAMPFF/paired_kampff/2014_11_25_Pair_3_0.json", "sorting_true_uri": "sha1://1cd517687aeca7ecdfaa9695680038d142a75031/firings_true.mda" } # To find more examples, see: https://github.com/flatironinstitute/spikeforest_recordings # However: note that some processing needs to be done to the files in this repo (to add the manifests to the raw data). This is WIP # Adjust these values ########################### X = X1 # Select example from above feed_name = 'labbox-ephys-default' workspace_name = 'default' ################################################# recording_label = X['label'] recording_uri = X['recording_uri'] sorting_true_uri = X['sorting_true_uri'] recording = le.LabboxEphysRecordingExtractor(recording_uri, download=True) sorting_true = le.LabboxEphysSortingExtractor(sorting_true_uri, samplerate=30000) sorting_label = 'true' feed = kp.load_feed(feed_name, create=True) workspace = le.load_workspace(workspace_name=workspace_name, feed=feed) print(f'Feed URI: {feed.get_uri()}') R_id = workspace.add_recording(recording=recording, label=recording_label) S_id = workspace.add_sorting(sorting=sorting_true, recording_id=R_id, label=sorting_label)
# Adjust these values recording_label = 'despy_tet3' sorting_label = 'sorting' recording_nwb_path = '<path or URI of nwb recording>' sorting_nwb_path = '<path or URI of nwb sorting>' feed_uri = '{feedUri}' workspace_name = '{workspaceName}' recording_uri = ka.store_object({ 'recording_format': 'nwb', 'data': { 'path': recording_nwb_path } }) sorting_uri = ka.store_object({ 'sorting_format': 'nwb', 'data': { 'path': sorting_nwb_path } }) sorting = le.LabboxEphysSortingExtractor(sorting_uri, samplerate=30000) recording = le.LabboxEphysRecordingExtractor(recording_uri, download=True) feed = kp.load_feed(feed_uri) workspace = le.load_workspace(workspace_name=workspace_name, feed=feed) print(f'Feed URI: {feed.get_uri()}') R_id = workspace.add_recording(recording=recording, label=recording_label) S_id = workspace.add_sorting(sorting=sorting, recording_id=R_id, label=sorting_label)
#!/usr/bin/env python import kachery_p2p as kp import random x = kp.load_feed('testfeed2', create=True) print(x.get_uri()) msg = {'rand': random.random()} x.append_messages([msg]) msgs = x.get_messages() print(len(msgs)) print(msgs[-1]) assert msgs[-1]['rand'] == msg['rand'] # for a in x.get_messages(): # print(a)
def handle_message(self, msg): type0 = msg.get('type') if type0 == 'reportClientInfo': print('reported client info:', msg) self._feed_uri = msg['clientInfo']['feedUri'] self._document_id = msg['clientInfo']['documentId'] self._readonly = msg['clientInfo']['readOnly'] if not self._feed_uri: self._feed_uri = 'feed://' + self._default_feed_id # self._feed_uri = kp.create_feed(feed_name='labbox-ephys-default').get_uri() # assert self._feed_uri.startswith('sha1://'), 'For now, feedUri must start with sha1://' self._feed = kp.load_feed(self._feed_uri) for key in ['recordings', 'sortings']: self._subfeed_positions[key] = 0 subfeed_name = dict(key=key, documentId=self._document_id) subfeed = self._feed.get_subfeed(subfeed_name) for m in subfeed.get_next_messages(wait_msec=10): self._send_message({ 'type': 'action', 'action': m['action'] }) self._subfeed_positions[ key] = self._subfeed_positions[key] + 1 self._send_message({'type': 'reportInitialLoadComplete'}) if self._feed: qm = self._queued_document_action_messages self._queued_document_action_messages = [] for m in qm: self.handle_message(m) elif type0 == 'appendDocumentAction': if self._readonly: print( 'Cannot append document action. This is a readonly feed.') return if self._feed is None: self._queued_document_action_messages.append(msg) else: subfeed_name = dict(key=msg['key'], documentId=self._document_id) subfeed = self._feed.get_subfeed(subfeed_name) subfeed.append_message({'action': msg['action']}) elif type0 == 'hitherCreateJob': functionName = msg['functionName'] kwargs = msg['kwargs'] opts = msg['opts'] client_job_id = msg['clientJobId'] if opts.get('newHitherJobMethod', False): try: job = hi.run(functionName, **kwargs, labbox=self._labbox_context).wait() except Exception as err: self._send_message({ 'type': 'hitherJobCreationError', 'client_job_id': client_job_id, 'error': str(err) + ' (new method)' }) return setattr(job, '_client_job_id', client_job_id) job_id = job._job_id self._jobs_by_id[job_id] = job print( f'======== Created hither job (2): {job_id} {functionName}' ) self._send_message({ 'type': 'hitherJobCreated', 'job_id': job_id, 'client_job_id': client_job_id }) else: hither_config = opts.get('hither_config', {}) job_handler_name = opts.get('job_handler_name', 'default') required_files = opts.get('required_files', {}) jh = self._get_job_handler_from_name(job_handler_name) hither_config['job_handler'] = jh hither_config['required_files'] = required_files if hither_config['job_handler'].is_remote: hither_config['container'] = True if 'use_job_cache' in hither_config: if hither_config['use_job_cache']: hither_config['job_cache'] = self._default_job_cache del hither_config['use_job_cache'] with hi.Config(**hither_config): try: job = hi.run(functionName, **kwargs) except Exception as err: self._send_message({ 'type': 'hitherJobCreationError', 'client_job_id': client_job_id, 'error': str(err) + ' (old method)' }) return setattr(job, '_client_job_id', client_job_id) job_id = job._job_id self._jobs_by_id[job_id] = job print( f'======== Created hither job: {job_id} {functionName} ({job_handler_name})' ) self._send_message({ 'type': 'hitherJobCreated', 'job_id': job_id, 'client_job_id': client_job_id }) elif type0 == 'hitherCancelJob': job_id = msg['job_id'] assert job_id, 'Missing job_id' assert job_id in self._jobs_by_id, f'No job with id: {job_id}' job = self._jobs_by_id[job_id] job.cancel()
def _set_joined_channels_config(joined_channels_config: dict): f = kp.load_feed('_kachery_p2p_config', create=True) sf = f.get_subfeed('joined-channels') sf.append_message(joined_channels_config)
def handle_message(self, msg): type0 = msg.get('type') if type0 == 'reportClientInfo': print('reported client info:', msg) self._feed_uri = msg['clientInfo']['feedUri'] self._workspace_name = msg['clientInfo']['workspaceName'] self._readonly = msg['clientInfo']['readOnly'] if not self._feed_uri: self._feed_uri = 'feed://' + self._default_feed_id # self._feed_uri = kp.create_feed(feed_name='labbox-ephys-default').get_uri() # assert self._feed_uri.startswith('sha1://'), 'For now, feedUri must start with sha1://' self._feed = kp.load_feed(self._feed_uri) for key in ['recordings', 'sortings']: self._subfeed_positions[key] = 0 subfeed_name = dict(key=key, workspaceName=self._workspace_name) subfeed = self._feed.get_subfeed(subfeed_name) messages = subfeed.get_next_messages(wait_msec=10) for m in messages: if 'action' in m: self._send_message({ 'type': 'action', 'action': m['action'] }) else: print(f'WARNING: No action in message for {key}') self._subfeed_positions[ key] = self._subfeed_positions[key] + len(messages) self._send_message({'type': 'reportInitialLoadComplete'}) if self._feed: qm = self._queued_document_action_messages self._queued_document_action_messages = [] for m in qm: self.handle_message(m) elif type0 == 'appendDocumentAction': if self._readonly: print( 'Cannot append document action. This is a readonly feed.') return if self._feed is None: self._queued_document_action_messages.append(msg) else: subfeed_name = dict(key=msg['key'], workspaceName=self._workspace_name) subfeed = self._feed.get_subfeed(subfeed_name) subfeed.append_message({'action': msg['action']}) elif type0 == 'hitherCreateJob': functionName = msg['functionName'] kwargs = msg['kwargs'] client_job_id = msg['clientJobId'] try: outer_job = hi.run(functionName, **kwargs, labbox=self._labbox_context) except Exception as err: self._send_message({ 'type': 'hitherJobError', 'job_id': client_job_id, 'client_job_id': client_job_id, 'error_message': f'Error creating outer job: {str(err)}', 'runtime_info': None }) return try: job_or_result = outer_job.wait() except Exception as err: self._send_message({ 'type': 'hitherJobError', 'job_id': outer_job._job_id, 'client_job_id': client_job_id, 'error_message': str(err), 'runtime_info': outer_job.get_runtime_info() }) return if hasattr(job_or_result, '_job_id'): job = job_or_result setattr(job, '_client_job_id', client_job_id) job_id = job._job_id self._jobs_by_id[job_id] = job print( f'======== Created hither job (2): {job_id} {functionName}' ) self._send_message({ 'type': 'hitherJobCreated', 'job_id': job_id, 'client_job_id': client_job_id }) else: result = job_or_result msg = { 'type': 'hitherJobFinished', 'client_job_id': client_job_id, 'job_id': client_job_id, # 'result': _make_json_safe(result), 'result_sha1': ka.get_file_hash(ka.store_object(_make_json_safe(result))), 'runtime_info': outer_job.get_runtime_info() } elif type0 == 'hitherCancelJob': job_id = msg['job_id'] assert job_id, 'Missing job_id' assert job_id in self._jobs_by_id, f'No job with id: {job_id}' job = self._jobs_by_id[job_id] job.cancel()