def test_lab_meta(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0') test_meta_ext = NWBGroupSpec( neurodata_type_def='MyTestMetaData', neurodata_type_inc='LabMetaData', doc='my test meta data', attributes=[ NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype')]) ns_builder.add_spec(self.ext_source, test_meta_ext) ns_builder.export(self.ns_path, outdir=self.tempdir) ns_abs_path = os.path.join(self.tempdir, self.ns_path) load_namespaces(ns_abs_path) @register_class('MyTestMetaData', self.prefix) class MyTestMetaData(LabMetaData): __nwbfields__ = ('test_attr',) @docval({'name': 'name', 'type': str, 'doc': 'name'}, {'name': 'test_attr', 'type': float, 'doc': 'test attribute'}) def __init__(self, **kwargs): test_attr = popargs('test_attr', kwargs) super(MyTestMetaData, self).__init__(**kwargs) self.test_attr = test_attr nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal())) nwbfile.add_lab_meta_data(MyTestMetaData(name='test_name', test_attr=5.))
def create_LabMetaData_extension_from_schemas(schema_list, prefix): # Initializations: outdir = os.path.abspath(os.path.dirname(__file__)) ext_source = '%s_extension.yaml' % prefix ns_path = '%s_namespace.yaml' % prefix neurodata_type_list_as_str = str( [schema.neurodata_type for schema in schema_list]) extension_doc = 'LabMetaData extensions: {neurodata_type_list_as_str} ({prefix})'.format( neurodata_type_list_as_str=neurodata_type_list_as_str, prefix=prefix) ns_builder = NWBNamespaceBuilder(extension_doc, prefix) for schema in schema_list: docval_list, attributes, nwbfields_list = extract_from_schema(schema) neurodata_type = schema.neurodata_type # Build the spec: ext_group_spec = NWBGroupSpec(neurodata_type_def=neurodata_type, neurodata_type_inc='LabMetaData', doc=extension_doc, attributes=attributes) # Add spec to builder: ns_builder.add_spec(ext_source, ext_group_spec) # Export spec ns_builder.export(ns_path, outdir=outdir)
def test_lab_meta_auto(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0') test_meta_ext = NWBGroupSpec(neurodata_type_def='MyTestMetaData', neurodata_type_inc='LabMetaData', doc='my test meta data', attributes=[ NWBAttributeSpec(name='test_attr', dtype='float', doc='test_dtype') ]) ns_builder.add_spec(self.ext_source, test_meta_ext) ns_builder.export(self.ns_path, outdir=self.tempdir) ns_abs_path = os.path.join(self.tempdir, self.ns_path) load_namespaces(ns_abs_path) MyTestMetaData = get_class('MyTestMetaData', self.prefix) nwbfile = NWBFile("a file with header data", "NB123A", datetime(2017, 5, 1, 12, 0, 0, tzinfo=tzlocal())) nwbfile.add_lab_meta_data( MyTestMetaData(name='test_name', test_attr=5.))
def test_catch_dup_name(self): ns_builder1 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1", version='0.1.0') ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[ NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int') ], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder1.add_spec(self.ext_source1, ext1) ns_builder1.export(self.ns_path1, outdir=self.tempdir) ns_builder2 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1", version='0.1.0') ext2 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[ NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int') ], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder2.add_spec(self.ext_source2, ext2) ns_builder2.export(self.ns_path2, outdir=self.tempdir) type_map = get_type_map( extensions=os.path.join(self.tempdir, self.ns_path1)) type_map.load_namespaces(os.path.join(self.tempdir, self.ns_path2))
def test_catch_dup_name(self): ns_builder1 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1") ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[ NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int') ], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder1.add_spec(self.ext_source1, ext1) ns_builder1.export(self.ns_path1, outdir=self.tempdir) ns_builder2 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1") ext2 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[ NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int') ], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder2.add_spec(self.ext_source2, ext2) ns_builder2.export(self.ns_path2, outdir=self.tempdir) type_map = get_type_map( extensions=os.path.join(self.tempdir, self.ns_path1)) with self.assertWarnsRegex( UserWarning, r"ignoring namespace '\S+' because it already exists"): type_map.load_namespaces(os.path.join(self.tempdir, self.ns_path2))
def create_pynwb_extension_from_schemas(schema_list, prefix: str): # Initializations: outdir = os.path.abspath(os.path.dirname(__file__)) ext_source = f'{prefix}.extension.yaml' ns_path = f'{prefix}.namespace.yaml' extension_doc = ("Allen Institute behavior and optical " "physiology extensions") ns_builder = NWBNamespaceBuilder( doc=extension_doc, name=prefix, version="0.2.0", author="Allen Institute for Brain Science", contact="*****@*****.**") # Loops through and create NWB custom group specs for schemas found in: # allensdk.brain_observatory.behavior.schemas for schema in schema_list: docval_list, attributes, nwbfields_list = extract_from_schema(schema) # Build the spec: ext_group_spec = NWBGroupSpec( neurodata_type_def=schema.neurodata_type, neurodata_type_inc=schema.neurodata_type_inc, doc=schema.neurodata_doc, attributes=attributes) # Add spec to builder: ns_builder.add_spec(ext_source, ext_group_spec) # Export spec ns_builder.export(ns_path, outdir=outdir)
def test_load_namespace_with_reftype_attribute_check_autoclass_const(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix) test_ds_ext = NWBDatasetSpec( doc='test dataset to add an attr', name='test_data', shape=(None, ), attributes=[ NWBAttributeSpec(name='target_ds', doc='the target the dataset applies to', dtype=RefSpec('TimeSeries', 'object')) ], neurodata_type_def='my_new_type') ns_builder.add_spec(self.ext_source, test_ds_ext) ns_builder.export(self.ns_path, outdir=self.tempdir) type_map = get_type_map( extensions=os.path.join(self.tempdir, self.ns_path)) my_new_type = type_map.get_container_cls(self.prefix, 'my_new_type') docval = None for tmp in get_docval(my_new_type.__init__): if tmp['name'] == 'target_ds': docval = tmp break self.assertIsNotNone(docval) self.assertEqual(docval['type'], TimeSeries)
def test_export(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0') ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[NWBAttributeSpec(name='trode_id', doc='the tetrode id', dtype='int')], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder.add_spec(self.ext_source, ext1) ns_builder.export(self.ns_path, outdir=self.tempdir)
def test_export(self): ns_builder = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension") ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[NWBAttributeSpec('trode_id', 'int', 'the tetrode id')], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder.add_spec(self.ext_source, ext1) ns_builder.export(self.ns_path, outdir=self.tempdir)
def build_settings(dict): """ WIP builds metadata based on passed in dict Does not support dict(s) deeper than 1 ATM""" # Settings: neurodata_type = 'MetaData' prefix = 'NHP' outdir = './' extension_doc = 'lab metadata extension' metadata_ext_group_spec = NWBGroupSpec(neurodata_type_def=neurodata_type, neurodata_type_inc='LabMetaData', doc=extension_doc, attributes=[ NWBAttributeSpec( name='experiment_id', dtype='int', doc='HW'), NWBAttributeSpec(name='test', dtype='text', doc='HW') ]) #Export spec: ext_source = '%s_extension.yaml' % prefix ns_path = '%s_namespace.yaml' % prefix ns_builder = NWBNamespaceBuilder(extension_doc, prefix, version=str(1)) ns_builder.add_spec(ext_source, metadata_ext_group_spec) ns_builder.export(ns_path, outdir=outdir) #Read spec and load namespace: ns_abs_path = os.path.join(outdir, ns_path) load_namespaces(ns_abs_path) class MetaData(LabMetaData): __nwbfields__ = ('experiment_id', 'test') @docval({ 'name': 'name', 'type': str, 'doc': 'name' }, { 'name': 'experiment_id', 'type': int, 'doc': 'HW' }, { 'name': 'test', 'type': str, 'doc': 'HW' }) def __init__(self, **kwargs): name, ophys_experiment_id, test = popargs('name', 'experiment_id', 'test', kwargs) super(OphysBehaviorMetaData, self).__init__(name=name) self.experiment_id = experiment_id self.test = test register_class('MetaData', prefix, MetaData) return MetaData
def test_load_namespace_with_reftype_attribute(self): ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', self.prefix, version='0.1.0') test_ds_ext = NWBDatasetSpec(doc='test dataset to add an attr', name='test_data', shape=(None,), attributes=[NWBAttributeSpec(name='target_ds', doc='the target the dataset applies to', dtype=RefSpec('TimeSeries', 'object'))], neurodata_type_def='my_new_type') ns_builder.add_spec(self.ext_source, test_ds_ext) ns_builder.export(self.ns_path, outdir=self.tempdir) get_type_map(extensions=os.path.join(self.tempdir, self.ns_path))
def test_catch_duplicate_spec(self): spec1 = NWBGroupSpec("This is my new group 1", "Group1", neurodata_type_inc="NWBDataInterface", neurodata_type_def="Group1") spec2 = NWBGroupSpec("This is my new group 2", "Group2", groups=[spec1], neurodata_type_inc="NWBDataInterface", neurodata_type_def="Group2") ns_builder = NWBNamespaceBuilder("Example namespace", "pynwb_test_ext") ns_builder.add_spec(self.ext_source, spec1) with self.assertRaises(ValueError): ns_builder.add_spec(self.ext_source, spec2)
def test_catch_dup_name(self): ns_builder1 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1") ext1 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[NWBAttributeSpec('trode_id', 'int', 'the tetrode id')], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder1.add_spec(self.ext_source1, ext1) ns_builder1.export(self.ns_path1, outdir=self.tempdir) ns_builder2 = NWBNamespaceBuilder('Extension for us in my Lab', "pynwb_test_extension1") ext2 = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[NWBAttributeSpec('trode_id', 'int', 'the tetrode id')], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder2.add_spec(self.ext_source2, ext2) ns_builder2.export(self.ns_path2, outdir=self.tempdir) load_namespaces(os.path.join(self.tempdir, self.ns_path1)) with self.assertRaises(KeyError): load_namespaces(os.path.join(self.tempdir, self.ns_path2))
name='vertices', dtype='float', dims=('vertex_number', 'xyz'))], attributes=[ NWBAttributeSpec( name='help', dtype='text', doc='help', value='This holds Surface objects')]) surfaces = NWBGroupSpec( neurodata_type_def='CorticalSurfaces', neurodata_type_inc='NWBDataInterface', name='cortical_surfaces', doc='triverts for cortical surfaces', quantity='?', groups=[surface], attributes=[NWBAttributeSpec( name='help', dtype='text', doc='help', value='This holds the vertices and faces for the cortical surface ' 'meshes')]) ecog_subject = NWBGroupSpec( neurodata_type_def='ECoGSubject', neurodata_type_inc='Subject', name='subject', doc='extension of subject that holds cortical surface data', groups=[surfaces] ) ns_builder = NWBNamespaceBuilder(doc=namespace + ' extensions', name=namespace, version='1.1', author='Ben Dichter', contact='*****@*****.**') ns_builder.add_spec(ext_source, ecog_subject) ns_builder.export(ns_path)
dtype='int'), NWBDatasetSpec('scanimage auto-generated notes', name='scanimage_notes', dtype='text') ] metadata_attr = [ NWBAttributeSpec('help', 'Metadata from Bernardo-Sabatini ScanImage', 'text', value='Metadata from Bernardo-Sabatini ScanImage') ] metadata_spec = NWBGroupSpec('ScanImage-specific metadata', name='scanimage_metadata', datasets=metadata_datasets, attributes=metadata_attr, neurodata_type_inc='LabMetaData', neurodata_type_def='ScanImageMetaData') # Export namespace ext_source = 'sb_scanimage.specs.yaml' ns_builder = NWBNamespaceBuilder( 'Extension for use with Bernardo-Sabatini ScanImage', 'sb_scanimage', version='0.1', author='Lawrence Niu', contact='*****@*****.**') ns_builder.add_spec(ext_source, metadata_spec) ns_path = 'sb_scanimage.namespace.yaml' ns_builder.export(ns_path)
def generate_extended_schema(): # set up silverlab namespace ns_builder = NWBNamespaceBuilder( 'Extensions for acousto-optic lens data', 'silverlab_extended_schema', 'Silver lab data extension to NWB format for acousto-optic lens experiments', version='0.4') ns_builder.include_type('LabMetaData', namespace='core') ns_builder.include_type('TwoPhotonSeries', namespace='core') # define attributes Silver lab extension cycle_time_attr = NWBAttributeSpec( name='cycle_time', doc='time in seconds for the microscope to acquire all ROIs once ' 'and return to its starting position', dtype='float') cycles_per_trial_attr = NWBAttributeSpec( name='cycles_per_trial', doc='how many microscope cycles occur in each experimental trial', dtype="int") imaging_mode_attr = NWBAttributeSpec( name='imaging_mode', doc='the acquisition mode for the experiment; ' 'pointing = single-voxel ROIs, ' 'miniscan = 2d rectangular ROIs, ' 'volume = 3d cuboid ROIs', dtype='text') frame_size_attr = NWBAttributeSpec( name='frame_size', doc='the 2d imaging frame size in voxels', shape=(2, ), dtype='int') silverlab_api_version_attr = NWBAttributeSpec( name='silverlab_api_version', doc='For potential future backwards compatibility, ' 'store the \'version\' of this API that created the file.', dtype='text') labview_version_attr = NWBAttributeSpec( name='labview_version', doc='The version of LabVIEW the data came from', dtype='text', required=False) pockels_column_names_attr = NWBAttributeSpec( name='columns', doc='column names for the zplane pockels dataset', shape=(4, ), dtype='text') # define datasets for Silver lab extensions zplane_pockels_ds = NWBDatasetSpec( doc='pockels data set, recording calibration data ' 'for focusing at different z-planes in four columns: ' 'Z offset from focal plane (micrometres), ' 'normalised Z, ' '\'Pockels\' i.e. laser power in %, ' 'and z offset for drive motors', name='pockels', shape=(None, 4), attributes=[pockels_column_names_attr], neurodata_type_def='ZplanePockelsDataset') # define groups for Silver lab extensions silverlab_optophys_specs = NWBGroupSpec( doc='A place to store Silver lab specific optophysiology data', attributes=[ cycle_time_attr, cycles_per_trial_attr, frame_size_attr, imaging_mode_attr ], datasets=[zplane_pockels_ds], neurodata_type_def='SilverLabOptophysiology', neurodata_type_inc='LabMetaData') silverlab_metadata_specs = NWBGroupSpec( doc='A place to store Silver lab specific metadata', attributes=[silverlab_api_version_attr, labview_version_attr], neurodata_type_def='SilverLabMetaData', neurodata_type_inc='LabMetaData', ) # dimensions ordered as t, x, y [, z], like the TimeSeries data itself silverlab_pixel_time_offset_data = NWBDatasetSpec( doc='A datastructure to hold time offsets for pixels. The' 'time offsets are the acquisition time of each pixel ' 'relative to a starting time. The starting time is the ' 'start of the cycle for pre-2018 LabView versions, ' 'and the start of the trial for new versions.', name='pixel_time_offsets', shape=[(None, None), (None, None, None), (None, None, None, None)], neurodata_type_def='PixelTimeOffsets') silverlab_roi_image_specs = NWBGroupSpec( doc='An extension to PyNWB\'s TwoPhotonSeries class, designed to hold ' 'pixels from an ROI as well as the PixelTimeOffsets for them.', datasets=[silverlab_pixel_time_offset_data], neurodata_type_def='ROISeriesWithPixelTimeOffsets', neurodata_type_inc='TwoPhotonSeries') # export as schema extension ext_source = 'silverlab.ophys.yaml' ns_builder.add_spec(ext_source, silverlab_optophys_specs) ext_source = 'silverlab.metadata.yaml' ns_builder.add_spec(ext_source, silverlab_metadata_specs) ext_source = 'silverlab.roi.yaml' ns_builder.add_spec(ext_source, silverlab_roi_image_specs) ns_builder.export('silverlab.namespace.yaml')
def main(): ''' Create the specification using PyNWB helpers. Now we will create the specification ('draw up the blueprints') for the Frank Lab extension. The main entries in a spec file are Groups, Attributes and Datasets; PyNWB provides helpers that allow us to generate our spec files using these components. We primarily use the [NWBGroupSpec](https://pynwb.readthedocs.io/en/stable/pynwb.spec.html#pynwb.spec.NWBGroupSpec) # noqa class to help us create valid NWB groups. An NWB group is basically just a container that can have things like a name, attributes, datasets, and even nested groups. In fl_extension.py (where we implement this extension in Python), each of these groups will get its own Python class. We add items within a group using [NWBAttributeSpec]() and [NWBDatasetSpec](). An NWB attribute is just what it sounds like: a short piece of metadata defining some attribute, such as a "help" text. An NWB dataset is also pretty self-explanatory: it's just some data (numbers, text, etc.). As an example, the cell below describes the representation of a behavioral task, and will generate the following lines in the franklab.extensions.yaml file: ``` - neurodata_type_def: Task neurodata_type_inc: NWBDataInterface doc: a behavioral task attributes: - name: name dtype: text doc: the name of this task - name: description dtype: text doc: description of this task - name: help dtype: text doc: help doc value: help value ``` --------------------------- Task (i.e. free exploration, W-alternation, sleep) ------ A Task consists simply of two attributes: - a name (i.e. W-alternation, sleep, free exploration) - a description of the task Note that Task inherits from something called NWBDataInterface. NWBDataInterface is a group in PyNWB representing basically any kind of data, and which we can store in the NWB file in a ProcessingModule. See create_franklab_nwbfile.ipynb for a discussion of Processing Modules. --------------------------- ''' task = NWBGroupSpec(neurodata_type_def='Task', neurodata_type_inc='NWBDataInterface', doc='a behavioral task', attributes=[ NWBAttributeSpec( name='name', doc='the name of this task', dtype='text'), NWBAttributeSpec( name='description', doc='description of this task', dtype='text'), NWBAttributeSpec( name='help', doc='help doc', dtype='text', value='Behavioral Task')]) # --------------------------- # Apparatus (i.e. tracks, mazes, sleep boxes, arenas) # -------- # We represent the topology of apparatuses using a graph representation # (i.e. nodes and edges) # - Nodes represent a component of an apparatus that you'd like the ability # to refer # (e.g. W-track arms, reward wells, novel object, open field components) # - Edges represent the topological connectivity of the nodes # (i.e. there should be an edge between the left track arm and the left # reward well) # In addition, all nodes will contain x/y coordinates that allow us to # reconstruct not just # the topology, but also the spatial geometry, of the apparatuses. # # Below, we will first define the nodes and edges. Finally we will define # the Apparatus itself # as a container that holds the nodes and edges as sub-groups. # --------------------------- # Node # ----- # Abstract represention for any kind of node in the topological graph # We won't actually implement abstract nodes. Rather this is a parent group # from which our more specific types of nodes will inherit. Note that NWB # specifications have inheritance. # The quantity '*' means that we can have any number (0 or more) nodes. node = NWBGroupSpec( neurodata_type_def='Node', neurodata_type_inc='NWBDataInterface', doc='nodes in the graph', quantity='*', attributes=[NWBAttributeSpec(name='name', doc='the name of this node', dtype='text'), NWBAttributeSpec(name='help', doc='help doc', dtype='text', value='Apparatus Node')]) # Edge # ------- # Edges between any two nodes in the graph. # An edge's only dataset is the name (string) of the two nodes that the # edge connects # Note that we don't actually include the nodes themselves, just their # names, in an edge. edge = NWBGroupSpec( neurodata_type_def='Edge', neurodata_type_inc='NWBDataInterface', doc='edges in the graph', quantity='*', datasets=[ NWBDatasetSpec( doc='names of the nodes this edge connects', name='edge_nodes', dtype='text', dims=['first_node_name|second_node_name'], shape=[2])], attributes=[ NWBAttributeSpec( name='help', doc='help doc', dtype='text', value='Apparatus Edge')]) # Point Node # ----------- # A node that represents a single 2D point in space (e.g. reward well, # novel object location) point_node = NWBGroupSpec( neurodata_type_def='PointNode', neurodata_type_inc='Node', doc='node representing a point in 2D space', quantity='*', datasets=[NWBDatasetSpec(doc='x/y coordinate of this 2D point', name='coords', dtype='float', dims=['num_coords', 'x_vals|y_vals'], shape=[1, 2])], attributes=[NWBAttributeSpec(name='help', doc='help doc', dtype='text', value='Apparatus Point')]) # Segment Node # ------------- # A node that represents a linear segement in 2D space, defined by its # start and end points # (e.g. a single arm of W-track maze) segment_node = NWBGroupSpec( neurodata_type_def='SegmentNode', neurodata_type_inc='Node', doc=('node representing a 2D linear segment defined by its start and' 'end points'), quantity='*', datasets=[ NWBDatasetSpec(doc=('x/y coordinates of the start and end points ' 'of this segment'), name='coords', dtype='float', dims=['num_coords', 'x_vals|y_vals'], shape=[2, 2])], attributes=[ NWBAttributeSpec(name='help', doc='help doc', dtype='text', value='Apparatus Segment')]) # Polygon Node # ------------- # A node that represents a polygon area (e.g. open field, sleep box) # A polygon is defined by its external vertices and, optionally, by # any interior points of interest (e.g. interior wells, objects) polygon_node = NWBGroupSpec( neurodata_type_def='PolygonNode', neurodata_type_inc='Node', doc='node representing a 2D polygon area', quantity='*', datasets=[ NWBDatasetSpec( doc='x/y coordinates of the exterior points of this polygon', name='coords', dtype='float', dims=['num_coords', 'x_vals|y_vals'], shape=['null', 2]), NWBDatasetSpec( doc='x/y coordinates of interior points inside this polygon', name='interior_coords', dtype='float', quantity='?', dims=['num_coords', 'x_vals|y_vals'], shape=['null', 2])], attributes=[ NWBAttributeSpec(name='help', doc='help doc', dtype='text', value='Apparatus Polygon')]) # Apparatus # ------------- # Finally, we define the apparatus itself. # It is has two sub-groups: nodes and edges. apparatus = NWBGroupSpec( neurodata_type_def='Apparatus', neurodata_type_inc='NWBDataInterface', doc='a graph of nodes and edges', quantity='*', groups=[node, edge], attributes=[ NWBAttributeSpec(name='name', doc='the name of this apparatus', dtype='text'), NWBAttributeSpec(name='help', doc='help doc', dtype='text', value='Behavioral Apparatus')]) # ### Save the extension specification # The specification consists of two YAML (.yaml) files: one for the actual # blueprint and one for the namespace. In a world with many blueprints, we # need namespaces to effectively categorize/store them, like the drawers of # an architect's filing cabinet. namespace_builder = NWBNamespaceBuilder( f'{namespace} extensions', namespace, version='0.1.0') namespace_builder.add_spec(extension_filename, apparatus) namespace_builder.add_spec(extension_filename, task) namespace_builder.add_spec(extension_filename, point_node) namespace_builder.add_spec(extension_filename, segment_node) namespace_builder.add_spec(extension_filename, polygon_node) # Bug: NamespaceBuilder.add_spec creates the .extensions.yaml file in the # current directory (it errors if you pass in a file path containing '/' # to add_spec, above.) old_cwd = os.getcwd() os.chdir(yaml_dir) namespace_builder.export(namespace_path) os.chdir(old_cwd)
LabMetaData_ext.add_attribute( name='auditory_stim_association', doc='Descriptor of auditory stimulus-lick port association.', dtype='text', shape=None, ) LabMetaData_ext.add_attribute( name='visual_stim_association', doc='Descriptor of auditory stimulus-lick port association.', dtype='text', shape=None, ) LabMetaData_ext.add_attribute( name='behavior_version', doc='Version of schema used to translate LabView data to NWB.', dtype='text', shape=None, ) # Add the extension ext_source = 'uobrainflex.specs.yaml' ns_builder.include_type('LabMetaData', namespace='core') ns_builder.add_spec(ext_source, LabMetaData_ext) # Save the namespace and extensions ns_path = 'uobrainflex.namespace.yaml' ns_builder.export(ns_path)
# to this namespace. Finally, # it calls :py:meth:`~pynwb.form.spec.write.NamespaceBuilder.export` to save the extensions to disk for downstream use. from pynwb.spec import NWBNamespaceBuilder, NWBGroupSpec, NWBAttributeSpec ns_path = "mylab.namespace.yaml" ext_source = "mylab.extensions.yaml" ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', "mylab") ext = NWBGroupSpec( 'A custom ElectricalSeries for my lab', attributes=[NWBAttributeSpec('trode_id', 'the tetrode id', 'int')], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder.add_spec(ext_source, ext) ns_builder.export(ns_path) #################### # Running this block will produce two YAML files. # # The first file contains the specification of the namespace. # # .. code-block:: yaml # # # mylab.namespace.yaml # namespaces: # - doc: Extension for use in my Lab # name: mylab # schema: # - namespace: core
dtype='float', required=False), NWBAttributeSpec( name='microdrive', doc='whether a microdrive was used (0: not used, 1: used)', dtype='int'), NWBAttributeSpec(name='microdrive_lead', doc='um/turn', dtype='float', required=False), NWBAttributeSpec(name='microdrive_id', doc='id of microdrive', dtype='int', required=False), NWBAttributeSpec(name='help', doc='help', dtype='text', value='Information about optical fiber') ]) ns_builder = NWBNamespaceBuilder(doc=namespace + ' extensions', name=namespace, version='1.0', author='Ben Dichter', contact='*****@*****.**') specs = (subject, optical_fiber) for spec in specs: ns_builder.add_spec(ext_source, spec) ns_builder.export(ns_path)
quantity='?'), NWBDatasetSpec(name='label_index', neurodata_type_inc='VectorIndex', doc='indexes label', quantity='?') ]) CompartmentsSeries = NWBGroupSpec( neurodata_type_def='CompartmentSeries', neurodata_type_inc='TimeSeries', doc='Stores continuous data from cell compartments', links=[ NWBLinkSpec( name='compartments', target_type='Compartments', doc='meta-data about compartments in this CompartmentSeries', quantity='?') ]) # Export doc = 'NWB:N extension for storing large-scale simulation output ' \ 'with multi-cell multi-compartment recordings' ns_builder = NWBNamespaceBuilder(doc=doc, name=name, version='0.2.0', author=['Ben Dichter', 'Kael Dai'], contact='*****@*****.**') for neurodata_type in [Compartments, CompartmentsSeries]: ns_builder.add_spec(ext_source, neurodata_type) ns_builder.export(ns_path)
csd_virtual_electrode_vertical_positions = NWBDatasetSpec(name="virtual_electrode_y_positions", doc="Virtual vertical positions of electrodes from which CSD was calculated", attributes=[unit_attr], dtype='float32', shape=(None,)) csd_virtual_electrode_horizontal_positions = NWBDatasetSpec(name="virtual_electrode_x_positions", doc="Virtual horizontal positions of electrodes from which CSD was calculated", attributes=[unit_attr], dtype='float32', shape=(None,)) ecephys_csd_ext = NWBGroupSpec( doc="A group containing current source density (CSD) data and virtual electrode locations", groups=[csd_timeseries_group], datasets=[csd_virtual_electrode_horizontal_positions, csd_virtual_electrode_vertical_positions], neurodata_type_def="EcephysCSD", neurodata_type_inc="NWBDataInterface" ) ext_source = "ndx-aibs-ecephys.extension.yaml" ns_builder.add_spec(ext_source, ecephys_probe_ext) ns_builder.add_spec(ext_source, ecephys_egroup_ext) ns_builder.add_spec(ext_source, ecephys_specimen_ext) ns_builder.add_spec(ext_source, ecephys_eye_tracking_rig_metadata_ext) ns_builder.add_spec(ext_source, ecephys_csd_ext) namespace_path = "ndx-aibs-ecephys.namespace.yaml" ns_builder.export(namespace_path)
# The following block of code demonstrates how to create a new namespace, and then add a new `neurodata_type` # to this namespace. Finally, # it calls :py:meth:`~pynwb.form.spec.write.NamespaceBuilder.export` to save the extensions to disk for downstream use. from pynwb.spec import NWBNamespaceBuilder, NWBGroupSpec, NWBAttributeSpec ns_path = "mylab.namespace.yaml" ext_source = "mylab.extensions.yaml" ns_builder = NWBNamespaceBuilder('Extension for use in my Lab', "mylab") ext = NWBGroupSpec('A custom ElectricalSeries for my lab', attributes=[NWBAttributeSpec('trode_id', 'int', 'the tetrode id')], neurodata_type_inc='ElectricalSeries', neurodata_type_def='TetrodeSeries') ns_builder.add_spec(ext_source, ext) ns_builder.export(ns_path) #################### # Running this block will produce two YAML files. # # The first file contains the specification of the namespace. # # .. code-block:: yaml # # # mylab.namespace.yaml # namespaces: # - doc: Extension for use in my Lab # name: mylab # schema: # - namespace: core
# {'name': 'vertices', 'type': ('array_data', 'data'),'doc': 'faces for this surface', 'default': None}) # def __init__(self, **kwargs): # call_docval_func(super((BlenderSurface, self).__init__, kwargs)) # self.faces = getargs('faces', kwargs) # self.vertices = getargs('vertices', kwargs) #ns_builder.add_spec(ext_source, blender_surface) blender_plane_segmentation = NWBGroupSpec( 'A plane to store data from blender', neurodata_type_inc='PlaneSegmentation', neurodata_type_def='BlenderPlaneSegmentation', groups=[blender_surface]) ns_builder.add_spec(ext_source, blender_plane_segmentation) #Writes YAML files ns_builder.export(ns_path) load_namespaces('blenderbits.namespace.yaml') BlenderSurface = get_class('BlenderSurface', 'TanLab') BlenderPlaneSegmentation = get_class('BlenderPlaneSegmentation', 'TanLab') #Read in OBJ os.chdir( 'C:/Users/Mrika/OneDrive/TanLab/NWBHackathonFiles/HackthonFiles/ObjectModels' ) soma_triangles = o3d.io.read_triangle_mesh("soma.obj") soma_triangles = np.asarray(soma_triangles.triangles)