def importValue(self, import_value: ImportValue): logging.info( f"Importing value {import_value.eContainer().eContainer().getPath()}" ) nwb_obj = ImportValueMapper.import_values[import_value] var_to_extract = import_value.eContainer().eContainer().id if isinstance(nwb_obj, TimeSeries): time_series = nwb_obj if var_to_extract in ['time', 'timestamps']: timestamps = NWBReader.get_timeseries_timestamps(time_series) timestamps_unit = guess_units( time_series.timestamps_unit) if hasattr( time_series, 'timestamps_unit' ) and time_series.timestamps_unit else 's' return GeppettoModelFactory.create_time_series( timestamps, timestamps_unit) else: plottable_timeseries = NWBReader.get_plottable_timeseries( time_series) unit = guess_units(time_series.unit) time_series_value = GeppettoModelFactory.create_time_series( plottable_timeseries[0], unit) return time_series_value else: # TODO handle other possible ImportValue(s) pass
def importValue(self, import_value: ImportValue): logging.info( f"Importing value {import_value.eContainer().eContainer().getPath()}" ) nwb_obj = ImportValueMapper.import_values[import_value] var_to_extract = import_value.eContainer().eContainer().id if isinstance(nwb_obj, TimeSeries): time_series = nwb_obj if var_to_extract in ['time', 'timestamps']: timestamps = NWBReader.get_timeseries_timestamps(time_series) if time_series.rate is not None: for index, item in enumerate(timestamps): timestamps[index] = ( timestamps[index] / time_series.rate / time_series.rate) + time_series.starting_time timestamps_unit = guess_units( time_series.timestamps_unit) if hasattr( time_series, 'timestamps_unit' ) and time_series.timestamps_unit else 's' return GeppettoModelFactory.create_time_series( timestamps, timestamps_unit) else: plottable_timeseries = NWBReader.get_plottable_timeseries( time_series) unit = guess_units(time_series.unit) time_series_value = GeppettoModelFactory.create_time_series( plottable_timeseries and plottable_timeseries[0], unit) if time_series.conversion is not None: for index, item in enumerate(time_series_value.value): time_series_value.value[index] = time_series_value.value[index] * \ time_series.conversion stringV = str(time_series_value) return time_series_value elif isinstance(nwb_obj, dict): plottable_timeseries = NWBReader.get_mono_dimensional_timeseries_aux( nwb_obj['data']) unit = guess_units(nwb_obj['unit']) time_series_value = GeppettoModelFactory.create_time_series( plottable_timeseries and plottable_timeseries[0], unit) if nwb_obj['conversion'] is not None: for index, item in enumerate(time_series_value.value): time_series_value.value[index] = time_series_value.value[index] * \ nwb_obj['conversion'] return time_series_value else: # TODO handle other possible ImportValue(s) pass
def getGeppettoModel(self, netpyne_model): logging.debug('Creating a Geppetto Model') # We create a GeppettoModel instance and we set a name a assign a lib geppetto_model = GeppettoModelFactory.createGeppettoModel( 'NetPyNEModel') self.factory = GeppettoModelFactory(geppetto_model) netpyne_geppetto_library = pygeppetto.GeppettoLibrary( name='netpynelib') geppetto_model.libraries.append(netpyne_geppetto_library) self.extractPopulations(netpyne_model, netpyne_geppetto_library, geppetto_model) self.extractInstances(netpyne_model, netpyne_geppetto_library, geppetto_model) return geppetto_model
def importType(self, url, type_name, library, geppetto_model_access: GeppettoModelAccess): logging.info(f"Importing type {type_name}, url: {url}") model_factory = GeppettoModelFactory( geppetto_model_access.geppetto_common_library) mapper = GenericCompositeMapper(model_factory, library) # build compositeTypes for pynwb objects root_type = mapper.create_type(self.get_nwbfile(), type_name=type_name, type_id=type_name) if isinstance(self.nwb_file_name, str) and type_name == 'nwbfile': root_type.variables.append( model_factory.create_url_variable( id='source file', url=self.source_url if 'http' in self.source_url else 'file://' + self.source_url)) return root_type
def importType(self, url, type_name, library, geppetto_model_access: GeppettoModelAccess): logging.info(f"Importing type {type_name}, url: {url}") model_factory = GeppettoModelFactory( geppetto_model_access.geppetto_common_library) mapper = GenericCompositeMapper(model_factory, library) # build compositeTypes for pynwb objects root_type = mapper.create_type(self.get_nwbfile(), type_name=type_name, type_id=type_name) if isinstance(self.nwb_file_name, str) and type_name == 'nwbfile': from nwb_explorer.nwb_data_manager import CACHE_DIRNAME root_type.variables.append( model_factory.create_url_variable( id='source file', url= f"{settings.home_page}/{CACHE_DIRNAME}/{os.path.basename(self.nwb_file_name)}" )) return root_type
class PyNWBAnotherTestCase(TestCase): factory = GeppettoModelFactory() def setUp(self): start_time = datetime(2017, 4, 3, 11, 0, 0) create_date = datetime(2017, 4, 15, 12, 0, 0) # create your NWBFile object nwbfile = NWBFile('PyNWB Sample File', 'A simple NWB file', 'NWB_test', start_time, file_create_date=create_date) # create acquisition metadata optical_channel = OpticalChannel('test_optical_channel', 'optical channel source', 'optical channel description', 3.14) imaging_plane = nwbfile.create_imaging_plane('test_imaging_plane', 'ophys integration tests', optical_channel, 'imaging plane description', 'imaging_device_1', 6.28, '2.718', 'GFP', 'somewhere in the brain', (1, 2, 1, 2, 3), 4.0, 'manifold unit', 'A frame to refer to') # create acquisition data image_series = TwoPhotonSeries(name='test_iS', source='a hypothetical source', dimension=[2], external_file=['images.tiff'], imaging_plane=imaging_plane, starting_frame=[1, 2, 3], format='tiff', timestamps=list()) nwbfile.add_acquisition(image_series) mod = nwbfile.create_processing_module('img_seg_example', 'ophys demo', 'an example of writing Ca2+ imaging data') img_seg = ImageSegmentation('a toy image segmentation container') mod.add_data_interface(img_seg) ps = img_seg.create_plane_segmentation('integration test PlaneSegmentation', 'plane segmentation description', imaging_plane, 'test_plane_seg_name', image_series) # add two ROIs # - first argument is the pixel mask i.e. a list of pixels and their weights # - second argument is the image mask w, h = 3, 3 pix_mask1 = [(0, 0, 1.1), (1, 1, 1.2), (2, 2, 1.3)] img_mask1 = [[0.0 for x in range(w)] for y in range(h)] img_mask1[0][0] = 1.1 img_mask1[1][1] = 1.2 img_mask1[2][2] = 1.3 ps.add_roi('1234', pix_mask1, img_mask1) pix_mask2 = [(0, 0, 2.1), (1, 1, 2.2)] img_mask2 = [[0.0 for x in range(w)] for y in range(h)] img_mask2[0][0] = 2.1 img_mask2[1][1] = 2.2 ps.add_roi('5678', pix_mask2, img_mask2) # add a Fluorescence container fl = Fluorescence('a toy fluorescence container') mod.add_data_interface(fl) # get an ROI table region i.e. a subset of ROIs to create a RoiResponseSeries from rt_region = ps.create_roi_table_region('the first of two ROIs', region=[0]) # make some fake timeseries data data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] timestamps = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9] rrs = fl.create_roi_response_series('test_roi_response_series', 'RoiResponseSeries integration test', data, 'lumens', rt_region, timestamps=timestamps) # write data nwb_path = './test_data/nwb_test_file.nwb' with NWBHDF5IO(nwb_path, 'w') as io: io.write(nwbfile) def test_open_generated_NWB_file_retrieve_time_series(self): nwb_path = './test_data/nwb_test_file.nwb' # read data back in io = NWBHDF5IO(nwb_path, 'r') nwbfile = io.read() # get the processing module mod = nwbfile.get_processing_module('img_seg_example') # get the RoiResponseSeries from the Fluorescence data interface rrs = mod['Fluorescence'].get_roi_response_series() # get the data... rrs_data = rrs.data rrs_timestamps = rrs.timestamps self.assertTrue(np.array_equal(rrs_data, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9])) self.assertTrue(np.array_equal(rrs_timestamps, [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])) io.close() def test_open_static_NWB_file_and_fish_time_series_data(self): file_path = './test_data/brain_observatory.nwb' # read data back in io = NWBHDF5IO(file_path, 'r') nwbfile = io.read() # get the processing module mod = nwbfile.get_processing_module('ophys_module') stimulus = nwbfile.get_stimulus('locally_sparse_noise_4deg') stimulus_data = stimulus.data stimulus_timestamps = stimulus.timestamps # get the RoiResponseSeries from the Fluorescence data interface # get the data... rrs = mod['dff_interface'].get_roi_response_series() rrs_data = rrs.data rrs_timestamps = rrs.timestamps print(stimulus_data[()]) print(stimulus_timestamps[()]) io.close() #def test_open_big_static_NWB_file(self): # file_path = './test_data/mem_potential_real.nwb' # # read data back in # io = NWBHDF5IO(file_path, 'r') # nwbfile = io.read() # # mod = nwbfile.get_processing_module('name') # rrs_data = mod['va_table'].data # time_data = np.arange(len(rrs_data)) # # nwbType = pygeppetto.CompositeType(id=str('nwb'), name=str('nwb'), abstract= False) # va_table_val1 = self.factory.createTimeSeries('myTimeSeriesValue', rrs_data[()][:,0].tolist(), 'V') # nwbType.variables.append(self.factory.createStateVariable('DfOverF_1', va_table_val1)) # time = self.factory.createTimeSeries('myTimeSeriesValue', time_data.tolist(), 's') # geppetto_model.variables.append(self.factory.createStateVariable('time', time)) # io.close()
def __init__(self): self.factory = GeppettoModelFactory()
class NetPyNEModelInterpreter(): def __init__(self): self.factory = GeppettoModelFactory() def getGeppettoModel(self, netpyne_model): logging.debug('Creating a Geppetto Model') # We create a GeppettoModel instance and we set a name a assign a lib geppetto_model = self.factory.createGeppettoModel('NetPyNEModel') netpyne_geppetto_library = pygeppetto.GeppettoLibrary( name='netpynelib') geppetto_model.libraries.append(netpyne_geppetto_library) self.extractPopulations(netpyne_model, netpyne_geppetto_library, geppetto_model) self.extractInstances(netpyne_model, netpyne_geppetto_library, geppetto_model) return geppetto_model def extractPopulations(self, netpyne_model, netpyne_geppetto_library, geppetto_model): # Initialise network network = pygeppetto.CompositeType(id='network_netpyne', name='network_netpyne') netpyne_geppetto_library.types.append(network) # Create intermediate population structure for easy access (by key) populations = {} for index, cell in enumerate(netpyne_model.net.allCells): # This will be only executed the first time for each population if cell['tags']['pop'] not in populations: # Create CellType, VisualType, ArrayType, ArrayVariable and append to netpyne library if 'cellType' in cell['tags']: composite_id = cell['tags']['cellType'] else: composite_id = cell['tags']['pop'] + "_cell" cellType = pygeppetto.CompositeType(id=str(composite_id), name=str(composite_id), abstract=False) visualType = pygeppetto.CompositeVisualType( id='cellMorphology', name='cellMorphology') cellType.visualType = visualType defaultValue = ArrayValue(elements=[]) arrayType = pygeppetto.ArrayType(size=0, arrayType=cellType, id=str(cell['tags']['pop']), name=str(cell['tags']['pop']), defaultValue=defaultValue) arrayVariable = pygeppetto.Variable( id=str(cell['tags']['pop'])) arrayVariable.types.append(arrayType) network.variables.append(arrayVariable) netpyne_geppetto_library.types.append(cellType) netpyne_geppetto_library.types.append(visualType) netpyne_geppetto_library.types.append(arrayType) # Save in intermediate structure populations[cell['tags']['pop']] = arrayType # Note: no need to check if pt3d since already done via netpyne sim.net.defineCellShapes() in instantiateNetPyNEModel secs = cell['secs'] # Iterate sections creating spheres and cylinders if hasattr(secs, 'items'): for sec_name, sec in list(secs.items()): if 'pt3d' in sec['geom']: points = sec['geom']['pt3d'] for i in range(len(points) - 1): # draw soma as a cylinder, not as a sphere (more accurate representation of 3d pts) visualType.variables.append( self.factory.createCylinder( str(sec_name), bottomRadius=float(points[i][3] / 2), topRadius=float(points[i + 1][3] / 2), position=Point(x=float(points[i][0]), y=float(points[i][1]), z=float(points[i][2])), distal=Point(x=float(points[i + 1][0]), y=float(points[i + 1][1]), z=float(points[i + 1][2])))) # Save the cell position and update elements in defaultValue and size populations[cell['tags']['pop']].size = populations[ cell['tags']['pop']].size + 1 populations[cell['tags']['pop']].defaultValue.elements.append( ArrayElement(index=len( populations[cell['tags']['pop']].defaultValue.elements), position=Point(x=float(cell['tags']['x']), y=-float(cell['tags']['y']), z=float(cell['tags']['z'])))) def extractInstances(self, netpyne_model, netpyne_geppetto_library, geppetto_model): instance = pygeppetto.Variable(id='network') instance.types.append(netpyne_geppetto_library.types[0]) geppetto_model.variables.append(instance) pass
class NWBModelInterpreter(ModelInterpreter): def __init__(self): self.factory = GeppettoModelFactory() def importType(self, url, typeName, library, commonLibraryAccess): logging.debug('Creating a Geppetto Model') geppetto_model = self.factory.createGeppettoModel('GepettoModel') nwb_geppetto_library = pygeppetto.GeppettoLibrary(name='nwblib', id='nwblib') geppetto_model.libraries.append(nwb_geppetto_library) # read data io = NWBHDF5IO(url, 'r') nwbfile = io.read() # get the processing module mod = nwbfile.get_processing_module('ophys_module') # get the RoiResponseSeries from the Fluorescence data interface # get the data... rrs = mod['dff_interface'].get_roi_response_series() rrs_data = rrs.data rrs_timestamps = rrs.timestamps stimulus = nwbfile.get_stimulus('locally_sparse_noise_4deg') stimulus_data = [float(i) for i in stimulus.data] stimulus_timestamps = stimulus.timestamps[()] nwbType = pygeppetto.CompositeType(id=str('nwb'), name=str('nwb'), abstract=False) dff_val1 = self.factory.createTimeSeries('myTimeSeriesValue', rrs_data[()][0].tolist(), 'V') nwbType.variables.append( self.factory.createStateVariable('DfOverF_1', dff_val1)) dff_val2 = self.factory.createTimeSeries('myTimeSeriesValue', rrs_data[()][1].tolist(), 'V') nwbType.variables.append( self.factory.createStateVariable('DfOverF_2', dff_val2)) time = self.factory.createTimeSeries('myTimeSeriesValue', rrs_timestamps[()].tolist(), 's') geppetto_model.variables.append( self.factory.createStateVariable('time', time)) stimulus_value = self.factory.createTimeSeries('myTimeSeriesValue', stimulus_data, 'V') nwbType.variables.append( self.factory.createStateVariable('Stimulus', stimulus_value)) stimulus_time = self.factory.createTimeSeries( 'myTimeSeriesValue', stimulus_timestamps.tolist(), 's') geppetto_model.variables.append( self.factory.createStateVariable('stimulus_time', stimulus_time)) # add type to nwb nwb_geppetto_library.types.append(nwbType) # add top level variables nwb_variable = Variable(id='nwb') nwb_variable.types.append(nwbType) geppetto_model.variables.append(nwb_variable) return geppetto_model def importValue(self, importValue): pass def getName(self): return "NWB Model Interpreter" def getDependentModels(self): return []