def test_datatype_extension(self): base = DatasetSpec('my first dataset', 'int', name='dataset1', dimension=(None, None), attributes=self.attributes, linkable=False, namespace='core', data_type_def='EphysData') attributes = [ AttributeSpec('attribute3', 'my first extending attribute', 'float') ] ext = DatasetSpec('my first dataset extension', 'int', name='dataset1', dimension=(None, None), attributes=attributes, linkable=False, namespace='core', data_type_inc=base, data_type_def='SpikeData') self.assertDictEqual(ext['attributes'][0], attributes[0]) self.assertDictEqual(ext['attributes'][1], self.attributes[0]) self.assertDictEqual(ext['attributes'][2], self.attributes[1]) ext_attrs = ext.attributes self.assertIs(ext, ext_attrs[0].parent) self.assertIs(ext, ext_attrs[1].parent) self.assertIs(ext, ext_attrs[2].parent)
def test_name_with_compatible_quantity(self): # Make sure compatible quantity flags pass when name is fixed DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity='zero_or_one') DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity=1)
def test_name_with_incompatible_quantity(self): # Check that we raise an error when the quantity allows more than one instance with a fixed name with self.assertRaises(ValueError): DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity='zero_or_many') with self.assertRaises(ValueError): DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity='one_or_many')
def test_datatype_table_extension_diff_format(self): dtype1 = DtypeSpec('column1', 'the first column', 'int') dtype2 = DtypeSpec('column2', 'the second column', 'float64') base = DatasetSpec('my first table', [dtype1, dtype2], attributes=self.attributes, data_type_def='SimpleTable') self.assertEqual(base['dtype'], [dtype1, dtype2]) self.assertEqual(base['doc'], 'my first table') dtype3 = DtypeSpec('column2', 'the second column, with greater precision', 'int32') with self.assertRaisesRegex(ValueError, 'Cannot extend float64 to int32'): ext = DatasetSpec('my first table extension', # noqa: F841 [dtype3], data_type_inc=base, data_type_def='ExtendedTable')
def test_datatype_table_extension(self): dtype1 = DtypeSpec('column1', 'the first column', 'int') dtype2 = DtypeSpec('column2', 'the second column', 'float') base = DatasetSpec('my first table', [dtype1, dtype2], attributes=self.attributes, namespace='core', data_type_def='SimpleTable') self.assertEqual(base['dtype'], [dtype1, dtype2]) self.assertEqual(base['doc'], 'my first table') dtype3 = DtypeSpec('column3', 'the third column', 'str') ext = DatasetSpec('my first table extension', [dtype3], namespace='core', data_type_inc=base, data_type_def='ExtendedTable') self.assertEqual(ext['dtype'], [dtype1, dtype2, dtype3]) self.assertEqual(ext['doc'], 'my first table extension')
def setUp(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec('attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'str') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.type_map.register_map(Bar, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.bar_spec)
def test_datatype_table_extension_higher_precision(self): dtype1 = DtypeSpec('column1', 'the first column', 'int') dtype2 = DtypeSpec('column2', 'the second column', 'float32') base = DatasetSpec('my first table', [dtype1, dtype2], attributes=self.attributes, data_type_def='SimpleTable') self.assertEqual(base['dtype'], [dtype1, dtype2]) self.assertEqual(base['doc'], 'my first table') dtype3 = DtypeSpec('column2', 'the second column, with greater precision', 'float64') ext = DatasetSpec('my first table extension', [dtype3], data_type_inc=base, data_type_def='ExtendedTable') self.assertEqual(ext['dtype'], [dtype1, dtype3]) self.assertEqual(ext['doc'], 'my first table extension')
def getSpecs(self): bar = GroupSpec('A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec( 'attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'str') ]) foo = GroupSpec('A test group that contains a data type', data_type_def='Foo', groups=[ GroupSpec('A Bar group for Foos', name='my_bar', data_type_inc='Bar') ], attributes=[ AttributeSpec( 'foo_attr', 'a string attribute specified as text', 'text', required=False) ]) return (bar, foo)
def getSpecs(self): ret = GroupSpec('A test group specification with a data type', data_type_def='Bar', datasets=[DatasetSpec('an example dataset', 'int', name='data', attributes=[AttributeSpec( 'attr2', 'an example integer attribute', 'int')])], attributes=[AttributeSpec('attr1', 'an example string attribute', 'text')]) return (ret,)
def test_constructor_invalid_table(self): with self.assertRaises(ValueError): DatasetSpec('my first table', [DtypeSpec('column1', 'the first column', 'int'), {} # <--- Bad compound type spec must raise an error ], name='table1', attributes=self.attributes)
def setUp(self): self.attributes = [ AttributeSpec('attribute1', 'my first attribute', 'text'), AttributeSpec('attribute2', 'my second attribute', 'text') ] self.dset1_attributes = [ AttributeSpec('attribute3', 'my third attribute', 'text'), AttributeSpec('attribute4', 'my fourth attribute', 'text') ] self.dset2_attributes = [ AttributeSpec('attribute5', 'my fifth attribute', 'text'), AttributeSpec('attribute6', 'my sixth attribute', 'text') ] self.datasets = [ DatasetSpec('my first dataset', 'int', name='dataset1', attributes=self.dset1_attributes, linkable=True), DatasetSpec('my second dataset', 'int', name='dataset2', dimension=(None, None), attributes=self.dset2_attributes, linkable=True, namespace='core', data_type_def='VoltageArray') ] self.subgroups = [ GroupSpec('A test subgroup', name='subgroup1', linkable=False), GroupSpec('A test subgroup', name='subgroup2', linkable=False) ] self.ndt_attr_spec = AttributeSpec('data_type', 'the data type of this object', 'text', value='EphysData') self.ns_attr_spec = AttributeSpec( 'namespace', 'the namespace for the data type of this object', 'text', required=False)
def test_constructor_invalidate_dtype(self): with self.assertRaises(ValueError): DatasetSpec(doc='my first dataset', dtype='my bad dtype', # <-- Expect AssertionError due to bad type name='dataset1', dims=(None, None), attributes=self.attributes, linkable=False, data_type_def='EphysData')
def test_constructor_shape(self): shape = [None, 2] spec = DatasetSpec('my first dataset', 'int', name='dataset1', shape=shape, attributes=self.attributes) self.assertEqual(spec['shape'], shape) self.assertEqual(spec.shape, shape)
def setUpBarSpec(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[DatasetSpec('an example dataset', 'int', name='data')], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'str'), AttributeSpec('attr2', 'an example integer attribute', 'int') ])
def test_datatype_extension_groupspec(self): '''Test to make sure DatasetSpec catches when a GroupSpec used as data_type_inc''' base = GroupSpec('a fake grop', data_type_def='EphysData') with self.assertRaises(TypeError): ext = DatasetSpec('my first dataset extension', # noqa: F841 'int', name='dataset1', data_type_inc=base, data_type_def='SpikeData')
def test_constructor_ref_spec(self): dtype = RefSpec('TimeSeries', 'object') spec = DatasetSpec(doc='my first dataset', dtype=dtype, name='dataset1', dims=(None, None), attributes=self.attributes, linkable=False, data_type_def='EphysData') self.assertDictEqual(spec['dtype'], dtype)
def setUpBazSpec(self): self.baz_spec = DatasetSpec( 'an Baz type', 'int', name='MyBaz', data_type_def='Baz', attributes=[ AttributeSpec('baz_attr', 'an example string attribute', 'text') ])
def test_hierarchy(self): spikes_spec = DatasetSpec('my extending dataset', 'int', namespace='core', data_type_inc='EphysData', data_type_def='SpikeData') lfp_spec = DatasetSpec('my second extending dataset', 'int', namespace='core', data_type_inc='EphysData', data_type_def='LFPData') self.catalog.register_spec(self.spec, 'test.yaml') self.catalog.register_spec(spikes_spec, 'test.yaml') self.catalog.register_spec(lfp_spec, 'test.yaml') spike_hierarchy = self.catalog.get_hierarchy('SpikeData') lfp_hierarchy = self.catalog.get_hierarchy('LFPData') ephys_hierarchy = self.catalog.get_hierarchy('EphysData') self.assertTupleEqual(spike_hierarchy, ('SpikeData', 'EphysData')) self.assertTupleEqual(lfp_hierarchy, ('LFPData', 'EphysData')) self.assertTupleEqual(ephys_hierarchy, ('EphysData',))
def setUp(self): self.catalog = SpecCatalog() self.attributes = [ AttributeSpec('attribute1', 'my first attribute', 'text'), AttributeSpec('attribute2', 'my second attribute', 'text') ] self.spec = DatasetSpec('my first dataset', 'int', name='dataset1', dims=(None, None), attributes=self.attributes, linkable=False, data_type_def='EphysData')
def test_constructor(self): spec = DatasetSpec('my first dataset', 'int', name='dataset1', attributes=self.attributes) self.assertEqual(spec['dtype'], 'int') self.assertEqual(spec['name'], 'dataset1') self.assertEqual(spec['doc'], 'my first dataset') self.assertNotIn('linkable', spec) self.assertNotIn('data_type_def', spec) self.assertListEqual(spec['attributes'], self.attributes) self.assertIs(spec, self.attributes[0].parent) self.assertIs(spec, self.attributes[1].parent) json.dumps(spec)
def test_constructor_nwbtype(self): spec = DatasetSpec('my first dataset', 'int', name='dataset1', attributes=self.attributes, linkable=False, data_type_def='EphysData') self.assertEqual(spec['dtype'], 'int') self.assertEqual(spec['name'], 'dataset1') self.assertEqual(spec['doc'], 'my first dataset') self.assertEqual(spec['data_type_def'], 'EphysData') self.assertFalse(spec['linkable']) self.assertListEqual(spec['attributes'], self.attributes) self.assertIs(spec, self.attributes[0].parent) self.assertIs(spec, self.attributes[1].parent)
def test_constructor_table(self): dtype1 = DtypeSpec('column1', 'the first column', 'int') dtype2 = DtypeSpec('column2', 'the second column', 'float') spec = DatasetSpec('my first table', [dtype1, dtype2], name='table1', attributes=self.attributes) self.assertEqual(spec['dtype'], [dtype1, dtype2]) self.assertEqual(spec['name'], 'table1') self.assertEqual(spec['doc'], 'my first table') self.assertNotIn('linkable', spec) self.assertNotIn('data_type_def', spec) self.assertListEqual(spec['attributes'], self.attributes) self.assertIs(spec, self.attributes[0].parent) self.assertIs(spec, self.attributes[1].parent) json.dumps(spec)
def test_type_extension(self): spec = GroupSpec('A test group', name='parent_type', datasets=self.datasets, attributes=self.attributes, linkable=False, data_type_def='EphysData') dset1_attributes_ext = [ AttributeSpec('dset1_extra_attribute', 'an extra attribute for the first dataset', 'text') ] ext_datasets = [ DatasetSpec('my first dataset extension', 'int', name='dataset1', attributes=dset1_attributes_ext, linkable=True), ] ext_attributes = [ AttributeSpec('ext_extra_attribute', 'an extra attribute for the group', 'text'), ] ext = GroupSpec('A test group extension', name='child_type', datasets=ext_datasets, attributes=ext_attributes, linkable=False, data_type_inc=spec, data_type_def='SpikeData') ext_dset1 = ext.get_dataset('dataset1') ext_dset1_attrs = ext_dset1.attributes self.assertDictEqual(ext_dset1_attrs[0], dset1_attributes_ext[0]) self.assertDictEqual(ext_dset1_attrs[1], self.dset1_attributes[0]) self.assertDictEqual(ext_dset1_attrs[2], self.dset1_attributes[1]) self.assertEqual(ext.data_type_def, 'SpikeData') self.assertEqual(ext.data_type_inc, 'EphysData') ext_dset2 = ext.get_dataset('dataset2') self.maxDiff = None # this will suffice for now, assertDictEqual doesn't do deep equality checks self.assertEqual(str(ext_dset2), str(self.datasets[1])) self.assertAttributesEqual(ext_dset2, self.datasets[1]) # self.ns_attr_spec ndt_attr_spec = AttributeSpec( 'data_type', 'the data type of this object', # noqa: F841 'text', value='SpikeData') res_attrs = ext.attributes self.assertDictEqual(res_attrs[0], ext_attributes[0]) self.assertDictEqual(res_attrs[1], self.attributes[0]) self.assertDictEqual(res_attrs[2], self.attributes[1]) # test that inherited specs are tracked appropriate for d in self.datasets: with self.subTest(dataset=d.name): self.assertTrue(ext.is_inherited_spec(d)) self.assertFalse(spec.is_inherited_spec(d)) json.dumps(spec)
def test_constructor_default_value(self): spec = DatasetSpec(doc='test', default_value=5, dtype='int', data_type_def='test') self.assertEqual(spec.default_value, 5)
def setUp(self): self.attributes = [ AttributeSpec('attribute1', 'my first attribute', 'text'), AttributeSpec('attribute2', 'my second attribute', 'text') ] self.dset1_attributes = [ AttributeSpec('attribute3', 'my third attribute', 'text'), AttributeSpec('attribute4', 'my fourth attribute', 'text') ] self.dset2_attributes = [ AttributeSpec('attribute5', 'my fifth attribute', 'text'), AttributeSpec('attribute6', 'my sixth attribute', 'text') ] self.datasets = [ DatasetSpec( 'my first dataset', # noqa: F405 'int', name='dataset1', attributes=self.dset1_attributes, linkable=True), DatasetSpec( 'my second dataset', # noqa: F405 'int', name='dataset2', dimension=(None, None), attributes=self.dset2_attributes, linkable=True, data_type_def='VoltageArray') ] self.spec = GroupSpec( 'A test group', # noqa: F405 name='root_constructor_nwbtype', datasets=self.datasets, attributes=self.attributes, linkable=False, data_type_def='EphysData') dset1_attributes_ext = [ AttributeSpec('dset1_extra_attribute', 'an extra attribute for the first dataset', 'text') ] self.ext_datasets = [ DatasetSpec( 'my first dataset extension', # noqa: F405 'int', name='dataset1', attributes=dset1_attributes_ext, linkable=True), ] self.ext_attributes = [ AttributeSpec('ext_extra_attribute', 'an extra attribute for the group', 'text'), ] self.ext_spec = GroupSpec( 'A test group extension', # noqa: F405 name='root_constructor_nwbtype', datasets=self.ext_datasets, attributes=self.ext_attributes, linkable=False, data_type_inc='EphysData', data_type_def='SpikeData') to_dump = {'groups': [self.spec, self.ext_spec]} self.specs_path = 'test_load_namespace.specs.yaml' self.namespace_path = 'test_load_namespace.namespace.yaml' with open(self.specs_path, 'w') as tmp: yaml.safe_dump(json.loads(json.dumps(to_dump)), tmp, default_flow_style=False) ns_dict = { 'doc': 'a test namespace', 'name': self.NS_NAME, 'schema': [{ 'source': self.specs_path }] } self.namespace = SpecNamespace.build_namespace(**ns_dict) # noqa: F405 to_dump = {'namespaces': [self.namespace]} with open(self.namespace_path, 'w') as tmp: yaml.safe_dump(json.loads(json.dumps(to_dump)), tmp, default_flow_style=False) self.ns_catalog = NamespaceCatalog() # noqa: F405