def create_specs(self, quantity): # Type BasicBucket contains: # - [quantity] untyped group # - [quantity] untyped dataset # - [quantity] untyped array dataset # quantity can be only '?' or 1 untyped_group_spec = GroupSpec( doc='A test group specification with no data type', name='untyped_group', quantity=quantity, ) untyped_dataset_spec = DatasetSpec( doc='A test dataset specification with no data type', name='untyped_dataset', dtype='int', quantity=quantity, ) untyped_array_dataset_spec = DatasetSpec( doc='A test dataset specification with no data type', name='untyped_array_dataset', dtype='int', dims=[None], shape=[None], quantity=quantity, ) basic_bucket_spec = GroupSpec( doc='A test group specification for a data type containing data type', name="test_bucket", data_type_def='BasicBucket', groups=[untyped_group_spec], datasets=[untyped_dataset_spec, untyped_array_dataset_spec], ) return [basic_bucket_spec]
def create_specs(self, quantity): # Type SimpleBucket contains: # - [quantity] groups of data_type_inc SimpleFoo and [quantity] group of data_type_inc NotSimpleFoo # - [quantity] datasets of data_type_inc SimpleQux and [quantity] datasets of data_type_inc NotSimpleQux # - [quantity] links of target_type SimpleFoo and [quantity] links of target_type NotSimpleFoo foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='SimpleFoo', ) not_foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='NotSimpleFoo', ) qux_spec = DatasetSpec( doc='A test group specification with a data type', data_type_def='SimpleQux', ) not_qux_spec = DatasetSpec( doc='A test group specification with a data type', data_type_def='NotSimpleQux', ) foo_inc_spec = GroupSpec( doc='the SimpleFoos in this bucket', data_type_inc='SimpleFoo', quantity=quantity ) not_foo_inc_spec = GroupSpec( doc='the SimpleFoos in this bucket', data_type_inc='NotSimpleFoo', quantity=quantity ) qux_inc_spec = DatasetSpec( doc='the SimpleQuxs in this bucket', data_type_inc='SimpleQux', quantity=quantity ) not_qux_inc_spec = DatasetSpec( doc='the SimpleQuxs in this bucket', data_type_inc='NotSimpleQux', quantity=quantity ) foo_link_spec = LinkSpec( doc='the links in this bucket', target_type='SimpleFoo', quantity=quantity ) not_foo_link_spec = LinkSpec( doc='the links in this bucket', target_type='NotSimpleFoo', quantity=quantity ) bucket_spec = GroupSpec( doc='A test group specification for a data type containing data type', name="test_bucket", data_type_def='SimpleBucket', groups=[foo_inc_spec, not_foo_inc_spec], datasets=[qux_inc_spec, not_qux_inc_spec], links=[foo_link_spec, not_foo_link_spec] ) return [foo_spec, not_foo_spec, qux_spec, not_qux_spec, bucket_spec]
def setUp(self): self.attributes = [ AttributeSpec('attribute1', 'my first attribute', 'text'), AttributeSpec('attribute2', 'my second attribute', 'text') ] self.dset1_attributes = [ AttributeSpec('attribute3', 'my third attribute', 'text'), AttributeSpec('attribute4', 'my fourth attribute', 'text') ] self.dset2_attributes = [ AttributeSpec('attribute5', 'my fifth attribute', 'text'), AttributeSpec('attribute6', 'my sixth attribute', 'text') ] self.datasets = [ DatasetSpec('my first dataset', 'int', name='dataset1', attributes=self.dset1_attributes, linkable=True), DatasetSpec('my second dataset', 'int', name='dataset2', attributes=self.dset2_attributes, linkable=True, data_type_def='VoltageArray') ] self.subgroups = [ GroupSpec('A test subgroup', name='subgroup1', linkable=False), GroupSpec('A test subgroup', name='subgroup2', linkable=False) ]
def test_datatype_extension(self): base = DatasetSpec('my first dataset', 'int', name='dataset1', attributes=self.attributes, linkable=False, data_type_def='EphysData') attributes = [ AttributeSpec('attribute3', 'my first extending attribute', 'float') ] ext = DatasetSpec('my first dataset extension', 'int', name='dataset1', attributes=attributes, linkable=False, data_type_inc=base, data_type_def='SpikeData') self.assertDictEqual(ext['attributes'][0], attributes[0]) self.assertDictEqual(ext['attributes'][1], self.attributes[0]) self.assertDictEqual(ext['attributes'][2], self.attributes[1]) ext_attrs = ext.attributes self.assertIs(ext, ext_attrs[0].parent) self.assertIs(ext, ext_attrs[1].parent) self.assertIs(ext, ext_attrs[2].parent)
def _build_separate_namespaces(self): # create an empty extension to test ClassGenerator._get_container_type resolution # the Bar class has not been mapped yet to the bar spec qux_spec = DatasetSpec(doc='A test extension', data_type_def='Qux') spam_spec = DatasetSpec(doc='A test extension', data_type_def='Spam') create_load_namespace_yaml(namespace_name='ndx-qux', specs=[qux_spec, spam_spec], output_dir=self.test_dir, incl_types={}, type_map=self.type_map) # resolve Spam first so that ndx-qux is resolved first self.type_map.get_dt_container_cls('Spam', 'ndx-qux') baz_spec = GroupSpec(doc='A test extension', data_type_def='Baz', data_type_inc='Bar', groups=[ GroupSpec(data_type_inc='Qux', doc='a qux', quantity='?'), GroupSpec(data_type_inc='Bar', doc='a bar', quantity='?') ]) create_load_namespace_yaml(namespace_name='ndx-test', specs=[baz_spec], output_dir=self.test_dir, incl_types={ CORE_NAMESPACE: ['Bar'], 'ndx-qux': ['Qux'] }, type_map=self.type_map)
def getSpecs(self): ret = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec('attr2', 'an example integer attribute', 'int') ]), DatasetSpec('an example time dataset', 'isodatetime', name='time'), DatasetSpec('an array of times', 'isodatetime', name='time_array', dims=('num_times', ), shape=(None, )) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text') ]) return (ret, )
def create_specs(self, quantity): # Type SimpleBucket contains: # - contains [quantity] groups of data_type_def SimpleFoo # - contains [quantity] datasets of data_type_def SimpleQux # NOTE: links do not have data_type_def, so leave them out of these tests # NOTE: nested type definitions are strongly discouraged now foo_spec = GroupSpec( doc='the SimpleFoos in this bucket', data_type_def='SimpleFoo', quantity=quantity ) qux_spec = DatasetSpec( doc='the SimpleQuxs in this bucket', data_type_def='SimpleQux', quantity=quantity ) not_foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='NotSimpleFoo', ) not_qux_spec = DatasetSpec( doc='A test group specification with a data type', data_type_def='NotSimpleQux', ) bucket_spec = GroupSpec( doc='A test group specification for a data type containing data type', name="test_bucket", data_type_def='SimpleBucket', groups=[foo_spec], datasets=[qux_spec] ) return [foo_spec, not_foo_spec, qux_spec, not_qux_spec, bucket_spec]
def create_specs(self, quantity): # Type SimpleBucket contains: # - an untyped group "foo_holder" which contains [quantity] groups of data_type_inc SimpleFoo # - an untyped group "qux_holder" which contains [quantity] datasets of data_type_inc SimpleQux # - an untyped group "link_holder" which contains [quantity] links of target_type SimpleFoo foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='SimpleFoo', ) not_foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='NotSimpleFoo', ) qux_spec = DatasetSpec( doc='A test group specification with a data type', data_type_def='SimpleQux', ) not_qux_spec = DatasetSpec( doc='A test group specification with a data type', data_type_def='NotSimpleQux', ) foo_inc_spec = GroupSpec( doc='the SimpleFoos in this bucket', data_type_inc='SimpleFoo', quantity=quantity ) foo_holder_spec = GroupSpec( doc='An untyped subgroup for SimpleFoos', name='foo_holder', groups=[foo_inc_spec] ) qux_inc_spec = DatasetSpec( doc='the SimpleQuxs in this bucket', data_type_inc='SimpleQux', quantity=quantity ) qux_holder_spec = GroupSpec( doc='An untyped subgroup for SimpleQuxs', name='qux_holder', datasets=[qux_inc_spec] ) foo_link_spec = LinkSpec( doc='the links in this bucket', target_type='SimpleFoo', quantity=quantity ) link_holder_spec = GroupSpec( doc='An untyped subgroup for links', name='link_holder', links=[foo_link_spec] ) bucket_spec = GroupSpec( doc='A test group specification for a data type containing data type', name="test_bucket", data_type_def='SimpleBucket', groups=[foo_holder_spec, qux_holder_spec, link_holder_spec] ) return [foo_spec, not_foo_spec, qux_spec, not_qux_spec, bucket_spec]
def test_name_with_compatible_quantity(self): # Make sure compatible quantity flags pass when name is fixed DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity='zero_or_one') DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity=1)
def __translate_kwargs(kwargs): kwargs[DatasetSpec.def_key()] = kwargs.pop( BaseStorageOverride.def_key()) kwargs[DatasetSpec.inc_key()] = kwargs.pop( BaseStorageOverride.inc_key()) args = [ kwargs.pop(x['name']) for x in get_docval(DatasetSpec.__init__) if 'default' not in x ] return args, kwargs
def test_two_unnamed_datasets_same_type(self): """Test creating a group contains multiple unnamed datasets with type X.""" child0 = DatasetSpec(doc='Group 0', data_type_inc='Type0') child1 = DatasetSpec(doc='Group 1', data_type_inc='Type0') msg = "Cannot have multiple groups/datasets with the same data type without specifying name" with self.assertRaisesWith(ValueError, msg): GroupSpec(doc='A test group', name='parent', datasets=[child0, child1], data_type_def='ParentType')
def test_unnamed_named_dataset_same_type(self): """Test get_data_type when a group contains both an unnamed and named dataset with type X.""" child0 = DatasetSpec(doc='Group 0', data_type_inc='Type0') child1 = DatasetSpec(doc='Group 1', data_type_inc='Type0', name='type1') parent_spec = GroupSpec(doc='A test group', name='parent', datasets=[child0, child1], data_type_def='ParentType') self.assertIs(parent_spec.get_data_type('Type0'), child0)
def test_name_with_incompatible_quantity(self): # Check that we raise an error when the quantity allows more than one instance with a fixed name with self.assertRaises(ValueError): DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity='zero_or_many') with self.assertRaises(ValueError): DatasetSpec(doc='my first dataset', dtype='int', name='ds1', quantity='one_or_many')
def test_two_named_datasets_same_type(self): """Test get_data_type when a group contains multiple named datasets with type X.""" child0 = DatasetSpec(doc='Group 0', data_type_inc='Type0', name='group0') child1 = DatasetSpec(doc='Group 1', data_type_inc='Type0', name='group1') parent_spec = GroupSpec(doc='A test group', name='parent', datasets=[child0, child1], data_type_def='ParentType') self.assertEqual(parent_spec.get_data_type('Type0'), [child0, child1])
def test_datatype_table_extension(self): dtype1 = DtypeSpec('column1', 'the first column', 'int') dtype2 = DtypeSpec('column2', 'the second column', 'float') base = DatasetSpec('my first table', [dtype1, dtype2], attributes=self.attributes, data_type_def='SimpleTable') self.assertEqual(base['dtype'], [dtype1, dtype2]) self.assertEqual(base['doc'], 'my first table') dtype3 = DtypeSpec('column3', 'the third column', 'text') ext = DatasetSpec('my first table extension', [dtype3], data_type_inc=base, data_type_def='ExtendedTable') self.assertEqual(ext['dtype'], [dtype1, dtype2, dtype3]) self.assertEqual(ext['doc'], 'my first table extension')
def test_dci_input(self): spec = DatasetSpec('an example dataset', 'int64', name='data') value = DataChunkIterator(np.array([1, 2, 3], dtype=np.int32)) msg = "Spec 'data': Value with data type int32 is being converted to data type int64 as specified." with self.assertWarnsWith(UserWarning, msg): ret, ret_dtype = ObjectMapper.convert_dtype(spec, value) # no conversion self.assertIs(ret, value) self.assertEqual(ret_dtype, np.int64) spec = DatasetSpec('an example dataset', 'int16', name='data') value = DataChunkIterator(np.array([1, 2, 3], dtype=np.int32)) ret, ret_dtype = ObjectMapper.convert_dtype(spec, value) # no conversion self.assertIs(ret, value) self.assertEqual(ret_dtype, np.int32) # increase precision
def test_datatype_table_extension_diff_format(self): dtype1 = DtypeSpec('column1', 'the first column', 'int') dtype2 = DtypeSpec('column2', 'the second column', 'float64') base = DatasetSpec('my first table', [dtype1, dtype2], attributes=self.attributes, data_type_def='SimpleTable') self.assertEqual(base['dtype'], [dtype1, dtype2]) self.assertEqual(base['doc'], 'my first table') dtype3 = DtypeSpec('column2', 'the second column, with greater precision', 'int32') with self.assertRaisesWith(ValueError, 'Cannot extend float64 to int32'): DatasetSpec('my first table extension', [dtype3], data_type_inc=base, data_type_def='ExtendedTable')
def getSpecs(self): bar = GroupSpec('A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec( 'attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', text('an example string attribute'), 'text') ]) foo = GroupSpec('A test group that contains a data type', data_type_def='Foo', groups=[ GroupSpec('A Bar group for Foos', name='my_bar', data_type_inc='Bar') ], attributes=[ AttributeSpec( 'foo_attr', 'a string attribute specified as text', 'text', required=False) ]) return (bar, foo)
def test_update_docval_dset_shape(self): """Test that update_docval_args for a dataset with shape sets the type and shape keys.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', datasets=[ DatasetSpec(name='dset1', doc='a string dataset', dtype='text', shape=[None]) ]) not_inherited_fields = {'dset1': spec.get_dataset('dset1')} docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='dset1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'dset1', 'type': ('array_data', 'data'), 'doc': 'a string dataset', 'shape': [None] }] self.assertListEqual(docval_args, expected)
def get_refined_bar_data_spec(self): refined_spec = DatasetSpec( doc='A BarData', data_type_inc='BarData', quantity='*', ) return refined_spec
def setUp(self): self.attr1 = AttributeSpec(name='attr1', doc='a string attribute', dtype='text') self.attr2 = AttributeSpec(name='attr2', doc='an integer attribute', dtype='int') self.attr3 = AttributeSpec(name='attr3', doc='an integer attribute', dtype='int') self.bar_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec(doc='a dataset', dtype='int', name='data', attributes=[self.attr2]) ], attributes=[self.attr1]) specs = [self.bar_spec] containers = {'Bar': Bar} self.type_map = create_test_type_map(specs, containers) self.spec_catalog = self.type_map.namespace_catalog.get_namespace( CORE_NAMESPACE).catalog self.cls = self.type_map.get_dt_container_cls(self.bar_spec.data_type) self.bar = self.cls(name='bar', data=[1], attr1='attr1', attr2=1) obj_mapper_bar = self.type_map.get_map(self.bar) obj_mapper_bar.map_spec('attr2', spec=self.attr2)
def setUp(self): self.foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='Foo', datasets=[ DatasetSpec(doc='an example dataset', dtype='int', name='my_data', attributes=[ AttributeSpec( name='attr2', doc='an example integer attribute', dtype='int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.type_map.register_map(Foo, FooMapper) self.manager = BuildManager(self.type_map)
def test_build_empty_data(self): """Test building of a Data object with empty data.""" baz_inc_spec = DatasetSpec(doc='doc', data_type_inc='Baz', quantity=ZERO_OR_MANY) baz_holder_spec = GroupSpec(doc='doc', data_type_def='BazHolder', datasets=[baz_inc_spec]) self.spec_catalog.register_spec(baz_holder_spec, 'test.yaml') self.type_map.register_container_type(CORE_NAMESPACE, 'BazHolder', BazHolder) self.holder_mapper = ObjectMapper(baz_holder_spec) baz = Baz('MyBaz', [], 'abcdefghijklmnopqrstuvwxyz') holder = BazHolder('holder', [baz]) builder = self.holder_mapper.build(holder, self.manager) expected = GroupBuilder( name='holder', datasets=[ DatasetBuilder(name='MyBaz', data=[], attributes={ 'baz_attr': 'abcdefghijklmnopqrstuvwxyz', 'data_type': 'Baz', 'namespace': 'test_core', 'object_id': baz.object_id }) ]) self.assertBuilderEqual(builder, expected)
def setUp(self): self.foo_spec = GroupSpec( 'A test group specification with data type Foo', data_type_def='Foo') self.bar_spec = GroupSpec( 'A test group specification with a data type Bar', data_type_def='Bar', datasets=[DatasetSpec('an example dataset', 'int', name='data')], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text'), AttributeSpec('attr2', 'an example integer attribute', 'int'), AttributeSpec('foo', 'a referenced foo', RefSpec('Foo', 'object'), required=False) ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.manager = BuildManager(self.type_map) self.foo_mapper = ObjectMapper(self.foo_spec) self.bar_mapper = ObjectMapper(self.bar_spec)
def setUp(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec('attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.type_map.register_map(Bar, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.bar_spec)
def test_numeric_spec(self): spec_type = 'numeric' spec = DatasetSpec('an example dataset', spec_type, name='data') value = np.uint64(4) ret, ret_dtype = ObjectMapper.convert_dtype(spec, value) self.assertEqual(ret, value) self.assertIs(type(ret), np.uint64) self.assertEqual(ret_dtype, np.uint64) value = DataChunkIterator(data=[1, 2, 3]) ret, ret_dtype = ObjectMapper.convert_dtype(spec, value) self.assertEqual(ret, value) self.assertIs(ret.dtype.type, np.dtype(int).type) self.assertIs(type(ret.data[0]), int) self.assertEqual(ret_dtype, np.dtype(int).type) value = ['a', 'b'] msg = "Cannot convert from <class 'str'> to 'numeric' specification dtype." with self.assertRaisesWith(ValueError, msg): ObjectMapper.convert_dtype(spec, value) value = np.array(['a', 'b']) msg = "Cannot convert from <class 'numpy.str_'> to 'numeric' specification dtype." with self.assertRaisesWith(ValueError, msg): ObjectMapper.convert_dtype(spec, value) value = [] msg = "Cannot infer dtype of empty list or tuple. Please use numpy array with specified dtype." with self.assertRaisesWith(ValueError, msg): ObjectMapper.convert_dtype(spec, value)
def test_compound_type(self): """Test that convert_dtype passes through arguments if spec dtype is a list without any validation.""" spec_type = [DtypeSpec('an int field', 'f1', 'int'), DtypeSpec('a float field', 'f2', 'float')] spec = DatasetSpec('an example dataset', spec_type, name='data') value = ['a', 1, 2.2] res, ret_dtype = ObjectMapper.convert_dtype(spec, value) self.assertListEqual(res, value) self.assertListEqual(ret_dtype, spec_type)
def test_three_named_datasets_same_type(self): """Test get_target_type when a group contains three named links with type X.""" child0 = DatasetSpec(doc='Group 0', data_type_inc='Type0', name='group0') child1 = DatasetSpec(doc='Group 1', data_type_inc='Type0', name='group1') child2 = DatasetSpec(doc='Group 2', data_type_inc='Type0', name='group2') parent_spec = GroupSpec(doc='A test group', name='parent', datasets=[child0, child1, child2], data_type_def='ParentType') self.assertEqual(parent_spec.get_data_type('Type0'), [child0, child1, child2])
def test_constructor_invalid_table(self): with self.assertRaises(ValueError): DatasetSpec('my first table', [DtypeSpec('column1', 'the first column', 'int'), {} # <--- Bad compound type spec must raise an error ], name='table1', attributes=self.attributes)
def test_type_extension(self): spec = GroupSpec('A test group', name='parent_type', datasets=self.datasets, attributes=self.attributes, linkable=False, data_type_def='EphysData') dset1_attributes_ext = [ AttributeSpec('dset1_extra_attribute', 'an extra attribute for the first dataset', 'text') ] ext_datasets = [ DatasetSpec('my first dataset extension', 'int', name='dataset1', attributes=dset1_attributes_ext, linkable=True), ] ext_attributes = [ AttributeSpec('ext_extra_attribute', 'an extra attribute for the group', 'text'), ] ext = GroupSpec('A test group extension', name='child_type', datasets=ext_datasets, attributes=ext_attributes, linkable=False, data_type_inc=spec, data_type_def='SpikeData') ext_dset1 = ext.get_dataset('dataset1') ext_dset1_attrs = ext_dset1.attributes self.assertDictEqual(ext_dset1_attrs[0], dset1_attributes_ext[0]) self.assertDictEqual(ext_dset1_attrs[1], self.dset1_attributes[0]) self.assertDictEqual(ext_dset1_attrs[2], self.dset1_attributes[1]) self.assertEqual(ext.data_type_def, 'SpikeData') self.assertEqual(ext.data_type_inc, 'EphysData') ext_dset2 = ext.get_dataset('dataset2') self.maxDiff = None # this will suffice for now, assertDictEqual doesn't do deep equality checks self.assertEqual(str(ext_dset2), str(self.datasets[1])) self.assertAttributesEqual(ext_dset2, self.datasets[1]) # self.ns_attr_spec ndt_attr_spec = AttributeSpec('data_type', 'the data type of this object', # noqa: F841 'text', value='SpikeData') res_attrs = ext.attributes self.assertDictEqual(res_attrs[0], ext_attributes[0]) self.assertDictEqual(res_attrs[1], self.attributes[0]) self.assertDictEqual(res_attrs[2], self.attributes[1]) # test that inherited specs are tracked appropriate for d in self.datasets: with self.subTest(dataset=d.name): self.assertTrue(ext.is_inherited_spec(d)) self.assertFalse(spec.is_inherited_spec(d)) json.dumps(spec)