def setUpManager(self, specs): spec_catalog = SpecCatalog() schema_file = 'test.yaml' for s in specs: spec_catalog.register_spec(s, schema_file) namespace = SpecNamespace(doc='a test namespace', name=CORE_NAMESPACE, schema=[{ 'source': schema_file }], version='0.1.0', catalog=spec_catalog) namespace_catalog = NamespaceCatalog() namespace_catalog.add_namespace(CORE_NAMESPACE, namespace) type_map = TypeMap(namespace_catalog) type_map.register_container_type(CORE_NAMESPACE, 'SimpleFoo', SimpleFoo) type_map.register_container_type(CORE_NAMESPACE, 'NotSimpleFoo', NotSimpleFoo) type_map.register_container_type(CORE_NAMESPACE, 'SimpleQux', SimpleQux) type_map.register_container_type(CORE_NAMESPACE, 'NotSimpleQux', NotSimpleQux) type_map.register_container_type(CORE_NAMESPACE, 'SimpleBucket', SimpleBucket) type_map.register_map(SimpleBucket, self.setUpBucketMapper()) self.manager = BuildManager(type_map)
class TestBase(unittest.TestCase): def setUp(self): self.foo_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Foo', datasets=[ DatasetSpec('an example dataset', 'int', name='my_data', attributes=[ AttributeSpec('attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.type_map.register_map(Foo, ObjectMapper) self.manager = BuildManager(self.type_map)
class ObjectMapperMixin(metaclass=ABCMeta): def setUp(self): self.setUpBarSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.bar_spec) @abstractmethod def setUpBarSpec(self): raise NotImplementedError( 'Cannot run test unless setUpBarSpec is implemented') def test_default_mapping(self): attr_map = self.mapper.get_attr_names(self.bar_spec) keys = set(attr_map.keys()) for key in keys: with self.subTest(key=key): self.assertIs(attr_map[key], self.mapper.get_attr_spec(key)) self.assertIs(attr_map[key], self.mapper.get_carg_spec(key))
def create_test_type_map(specs, container_classes, mappers=None): """ Create a TypeMap with the specs registered under a test namespace, and classes and mappers registered to type names. :param specs: list of specs :param container_classes: dict of type name to container class :param mappers: (optional) dict of type name to mapper class :return: the constructed TypeMap """ spec_catalog = SpecCatalog() schema_file = 'test.yaml' for s in specs: spec_catalog.register_spec(s, schema_file) namespace = SpecNamespace(doc='a test namespace', name=CORE_NAMESPACE, schema=[{ 'source': schema_file }], version='0.1.0', catalog=spec_catalog) namespace_catalog = NamespaceCatalog() namespace_catalog.add_namespace(CORE_NAMESPACE, namespace) type_map = TypeMap(namespace_catalog) for type_name, container_cls in container_classes.items(): type_map.register_container_type(CORE_NAMESPACE, type_name, container_cls) if mappers: for type_name, mapper_cls in mappers.items(): container_cls = container_classes[type_name] type_map.register_map(container_cls, mapper_cls) return type_map
class TestObjectMapper(with_metaclass(ABCMeta, unittest.TestCase)): def setUp(self): self.setUpBarSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.type_map.register_map(Bar, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.bar_spec) def setUpBarSpec(self): raise unittest.SkipTest('setUpBarSpec not implemented') def test_default_mapping(self): attr_map = self.mapper.get_attr_names(self.bar_spec) keys = set(attr_map.keys()) for key in keys: with self.subTest(key=key): self.assertIs(attr_map[key], self.mapper.get_attr_spec(key)) self.assertIs(attr_map[key], self.mapper.get_carg_spec(key))
def setUp(self): spec_catalog = SpecCatalog() for spec in self.getSpecs(): spec_catalog.register_spec(spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=spec_catalog) self.vmap = ValidatorMap(self.namespace)
class TestObjectMapperBadValue(TestCase): def test_bad_value(self): """Test that an error is raised if the container attribute value for a spec with a data type is not a container or collection of containers. """ class Qux(Container): @docval( { 'name': 'name', 'type': str, 'doc': 'the name of this Qux' }, { 'name': 'foo', 'type': int, 'doc': 'a group' }) def __init__(self, **kwargs): name, foo = getargs('name', 'foo', kwargs) super().__init__(name=name) self.__foo = foo if isinstance(foo, Foo): self.__foo.parent = self @property def foo(self): return self.__foo self.qux_spec = GroupSpec( doc='A test group specification with data type Qux', data_type_def='Qux', groups=[GroupSpec('an example dataset', data_type_inc='Foo')]) self.foo_spec = GroupSpec( 'A test group specification with data type Foo', data_type_def='Foo') self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.qux_spec, 'test.yaml') self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Qux', Qux) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.qux_spec) container = Qux('my_qux', foo=1) msg = "Qux 'my_qux' attribute 'foo' has unexpected type." with self.assertRaisesWith(ContainerConfigurationError, msg): self.mapper.build(container, self.manager)
class TestDataIOEdgeCases(TestCase): def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_map(Baz, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): self.baz_spec = DatasetSpec( doc='an Baz type', dtype=None, name='MyBaz', data_type_def='Baz', shape=[None], attributes=[ AttributeSpec('baz_attr', 'an example string attribute', 'text') ]) def test_build_dataio(self): """Test building of a dataset with data_type and no dtype with value DataIO.""" container = Baz('my_baz', H5DataIO(['a', 'b', 'c', 'd'], chunks=True), 'value1') builder = self.type_map.build(container) self.assertIsInstance(builder.get('data'), H5DataIO) def test_build_datachunkiterator(self): """Test building of a dataset with data_type and no dtype with value DataChunkIterator.""" container = Baz('my_baz', DataChunkIterator(['a', 'b', 'c', 'd']), 'value1') builder = self.type_map.build(container) self.assertIsInstance(builder.get('data'), DataChunkIterator) def test_build_dataio_datachunkiterator(self): # hdmf#512 """Test building of a dataset with no dtype and no data_type with value DataIO wrapping a DCI.""" container = Baz( 'my_baz', H5DataIO(DataChunkIterator(['a', 'b', 'c', 'd']), chunks=True), 'value1') builder = self.type_map.build(container) self.assertIsInstance(builder.get('data'), H5DataIO) self.assertIsInstance(builder.get('data').data, DataChunkIterator)
class TestTypeMap(unittest.TestCase): def setUp(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar') self.foo_spec = GroupSpec( 'A test group specification with data type Foo', data_type_def='Foo') self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) # self.build_manager = BuildManager(self.type_map) def test_get_map_unique_mappers(self): self.type_map.register_map(Bar, ObjectMapper) self.type_map.register_map(Foo, ObjectMapper) bar_inst = Bar('my_bar', list(range(10)), 'value1', 10) foo_inst = Foo(name='my_foo') bar_mapper = self.type_map.get_map(bar_inst) foo_mapper = self.type_map.get_map(foo_inst) self.assertIsNot(bar_mapper, foo_mapper) def test_get_map(self): self.type_map.register_map(Bar, ObjectMapper) container_inst = Bar('my_bar', list(range(10)), 'value1', 10) mapper = self.type_map.get_map(container_inst) self.assertIsInstance(mapper, ObjectMapper) self.assertIs(mapper.spec, self.bar_spec) mapper2 = self.type_map.get_map(container_inst) self.assertIs(mapper, mapper2) def test_get_map_register(self): class MyMap(ObjectMapper): pass self.type_map.register_map(Bar, MyMap) container_inst = Bar('my_bar', list(range(10)), 'value1', 10) mapper = self.type_map.get_map(container_inst) self.assertIs(mapper.spec, self.bar_spec) self.assertIsInstance(mapper, MyMap)
def customSetUp(self, bar_spec): spec_catalog = SpecCatalog() spec_catalog.register_spec(bar_spec, 'test.yaml') namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=spec_catalog) namespace_catalog = NamespaceCatalog() namespace_catalog.add_namespace(CORE_NAMESPACE, namespace) type_map = TypeMap(namespace_catalog) type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) return type_map
def setUp(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar') spec_catalog = SpecCatalog() spec_catalog.register_spec(self.bar_spec, 'test.yaml') namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=spec_catalog) namespace_catalog = NamespaceCatalog() namespace_catalog.add_namespace(CORE_NAMESPACE, namespace) self.type_map = TypeMap(namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
def set_up_spec(self, dtype): spec_catalog = SpecCatalog() spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[DatasetSpec('an example dataset', dtype, name='data')], attributes=[AttributeSpec('attr1', 'an example attribute', dtype)]) spec_catalog.register_spec(spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=spec_catalog) self.vmap = ValidatorMap(self.namespace)
class TestDataMapScalar(TestCase): def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'BazScalar', BazScalar) self.type_map.register_map(BazScalar, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): self.baz_spec = DatasetSpec(doc='a BazScalar type', dtype='int', name='MyBaz', data_type_def='BazScalar') def test_construct_scalar_dataset(self): """Test constructing a Data object with an h5py.Dataset with shape (1, ) for scalar spec.""" with h5py.File('test.h5', 'w') as file: test_ds = file.create_dataset('test_ds', data=[1]) expected = BazScalar( name='MyBaz', data=1, ) builder = DatasetBuilder( name='MyBaz', data=test_ds, attributes={ 'data_type': 'BazScalar', 'namespace': CORE_NAMESPACE, 'object_id': expected.object_id }, ) container = self.mapper.construct(builder, self.manager) self.assertTrue(np.issubdtype(type( container.data), np.integer)) # as opposed to h5py.Dataset self.assertContainerEqual(container, expected) os.remove('test.h5')
def test_register_generator(self): """Test TypeMap.register_generator and ClassGenerator.register_generator.""" class MyClassGenerator(CustomClassGenerator): @classmethod def apply_generator_to_field(cls, field_spec, bases, type_map): return True @classmethod def process_field_spec(cls, classdict, docval_args, parent_cls, attr_name, not_inherited_fields, type_map, spec): # append attr_name to classdict['__custom_fields__'] list classdict.setdefault('process_field_spec', list()).append(attr_name) @classmethod def post_process(cls, classdict, bases, docval_args, spec): classdict['post_process'] = True spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text') ]) spec_catalog = SpecCatalog() spec_catalog.register_spec(spec, 'test.yaml') namespace = SpecNamespace(doc='a test namespace', name=CORE_NAMESPACE, schema=[{ 'source': 'test.yaml' }], version='0.1.0', catalog=spec_catalog) namespace_catalog = NamespaceCatalog() namespace_catalog.add_namespace(CORE_NAMESPACE, namespace) type_map = TypeMap(namespace_catalog) type_map.register_generator(MyClassGenerator) cls = type_map.get_dt_container_cls('Baz', CORE_NAMESPACE) self.assertEqual(cls.process_field_spec, ['attr1']) self.assertTrue(cls.post_process)
def setUp(self): self.set_up_specs() spec_catalog = SpecCatalog() spec_catalog.register_spec(self.bar_data_spec, 'test.yaml') spec_catalog.register_spec(self.bar_data_holder_spec, 'test.yaml') namespace = SpecNamespace(doc='a test namespace', name=CORE_NAMESPACE, schema=[{ 'source': 'test.yaml' }], version='0.1.0', catalog=spec_catalog) namespace_catalog = NamespaceCatalog() namespace_catalog.add_namespace(CORE_NAMESPACE, namespace) type_map = TypeMap(namespace_catalog) type_map.register_container_type(CORE_NAMESPACE, 'BarData', BarData) type_map.register_container_type(CORE_NAMESPACE, 'BarDataHolder', BarDataHolder) type_map.register_map(BarData, ExtBarDataMapper) type_map.register_map(BarDataHolder, ObjectMapper) self.manager = BuildManager(type_map)
class BazSpecMixin: def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_map(Baz, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): raise NotImplementedError('Test must implement this method.')
class TestDataMap(unittest.TestCase): def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_map(Baz, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): self.baz_spec = DatasetSpec( 'an Baz type', 'int', name='MyBaz', data_type_def='Baz', attributes=[ AttributeSpec('baz_attr', 'an example string attribute', 'text') ]) def test_build(self): ''' Test default mapping functionality when no attributes are nested ''' container = Baz('my_baz', list(range(10)), 'abcdefghijklmnopqrstuvwxyz') builder = self.mapper.build(container, self.manager) expected = DatasetBuilder( 'my_baz', list(range(10)), attributes={'baz_attr': 'abcdefghijklmnopqrstuvwxyz'}) self.assertDictEqual(builder, expected)
class BuildDatasetOfReferencesMixin: def setUp(self): self.setUpBazSpec() self.foo_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='Foo', datasets=[ DatasetSpec(name='my_data', doc='an example dataset', dtype='int') ], attributes=[ AttributeSpec(name='attr1', doc='an example string attribute', dtype='text'), AttributeSpec(name='attr2', doc='an example int attribute', dtype='int'), AttributeSpec(name='attr3', doc='an example float attribute', dtype='float') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.type_map.register_map(Baz, ObjectMapper) self.type_map.register_map(Foo, ObjectMapper) self.manager = BuildManager(self.type_map)
class TestDynamicContainer(unittest.TestCase): def setUp(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec('attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.type_map.register_map(Bar, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.bar_spec) def test_dynamic_container_creation(self): baz_spec = GroupSpec( 'A test extension with no Container class', data_type_def='Baz', data_type_inc=self.bar_spec, attributes=[ AttributeSpec('attr3', 'an example float attribute', 'float'), AttributeSpec('attr4', 'another example float attribute', 'float') ]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_container_cls(CORE_NAMESPACE, 'Baz') expected_args = {'name', 'data', 'attr1', 'attr2', 'attr3', 'attr4'} received_args = set() for x in get_docval(cls.__init__): received_args.add(x['name']) with self.subTest(name=x['name']): self.assertNotIn('default', x) self.assertSetEqual(expected_args, received_args) self.assertEqual(cls.__name__, 'Baz') self.assertTrue(issubclass(cls, Bar)) def test_dynamic_container_creation_defaults(self): baz_spec = GroupSpec( 'A test extension with no Container class', data_type_def='Baz', data_type_inc=self.bar_spec, attributes=[ AttributeSpec('attr3', 'an example float attribute', 'float'), AttributeSpec('attr4', 'another example float attribute', 'float') ]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_container_cls(CORE_NAMESPACE, 'Baz') expected_args = {'name', 'data', 'attr1', 'attr2', 'attr3', 'attr4'} received_args = set(map(lambda x: x['name'], get_docval(cls.__init__))) self.assertSetEqual(expected_args, received_args) self.assertEqual(cls.__name__, 'Baz') self.assertTrue(issubclass(cls, Bar)) def test_dynamic_container_constructor(self): baz_spec = GroupSpec( 'A test extension with no Container class', data_type_def='Baz', data_type_inc=self.bar_spec, attributes=[ AttributeSpec('attr3', 'an example float attribute', 'float'), AttributeSpec('attr4', 'another example float attribute', 'float') ]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_container_cls(CORE_NAMESPACE, 'Baz') # TODO: test that constructor works! inst = cls('My Baz', [1, 2, 3, 4], 'string attribute', 1000, attr3=98.6, attr4=1.0) self.assertEqual(inst.name, 'My Baz') self.assertEqual(inst.data, [1, 2, 3, 4]) self.assertEqual(inst.attr1, 'string attribute') self.assertEqual(inst.attr2, 1000) self.assertEqual(inst.attr3, 98.6) self.assertEqual(inst.attr4, 1.0) def test_dynamic_container_constructor_name(self): # name is specified in spec and cannot be changed baz_spec = GroupSpec( 'A test extension with no Container class', data_type_def='Baz', data_type_inc=self.bar_spec, name='A fixed name', attributes=[ AttributeSpec('attr3', 'an example float attribute', 'float'), AttributeSpec('attr4', 'another example float attribute', 'float') ]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_container_cls(CORE_NAMESPACE, 'Baz') with self.assertRaises(TypeError): inst = cls('My Baz', [1, 2, 3, 4], 'string attribute', 1000, attr3=98.6, attr4=1.0) inst = cls([1, 2, 3, 4], 'string attribute', 1000, attr3=98.6, attr4=1.0) self.assertEqual(inst.name, 'A fixed name') self.assertEqual(inst.data, [1, 2, 3, 4]) self.assertEqual(inst.attr1, 'string attribute') self.assertEqual(inst.attr2, 1000) self.assertEqual(inst.attr3, 98.6) self.assertEqual(inst.attr4, 1.0) def test_dynamic_container_constructor_name_default_name(self): # if both name and default_name are specified, name should be used with self.assertWarns(Warning): baz_spec = GroupSpec( 'A test extension with no Container class', data_type_def='Baz', data_type_inc=self.bar_spec, name='A fixed name', default_name='A default name', attributes=[ AttributeSpec('attr3', 'an example float attribute', 'float'), AttributeSpec('attr4', 'another example float attribute', 'float') ]) self.spec_catalog.register_spec(baz_spec, 'extension.yaml') cls = self.type_map.get_container_cls(CORE_NAMESPACE, 'Baz') inst = cls([1, 2, 3, 4], 'string attribute', 1000, attr3=98.6, attr4=1.0) self.assertEqual(inst.name, 'A fixed name')
class TestDynamicDynamicTable(TestCase): def setUp(self): self.dt_spec = GroupSpec( 'A test extension that contains a dynamic table', data_type_def='TestTable', data_type_inc='DynamicTable', datasets=[ DatasetSpec( data_type_inc='VectorData', name='my_col', doc='a test column', dtype='float' ), DatasetSpec( data_type_inc='VectorData', name='indexed_col', doc='a test column', dtype='float' ), DatasetSpec( data_type_inc='VectorIndex', name='indexed_col_index', doc='a test column', ), DatasetSpec( data_type_inc='VectorData', name='optional_col1', doc='a test column', dtype='float', quantity='?', ), DatasetSpec( data_type_inc='VectorData', name='optional_col2', doc='a test column', dtype='float', quantity='?', ) ] ) self.dt_spec2 = GroupSpec( 'A test extension that contains a dynamic table', data_type_def='TestDTRTable', data_type_inc='DynamicTable', datasets=[ DatasetSpec( data_type_inc='DynamicTableRegion', name='ref_col', doc='a test column', ), DatasetSpec( data_type_inc='DynamicTableRegion', name='indexed_ref_col', doc='a test column', ), DatasetSpec( data_type_inc='VectorIndex', name='indexed_ref_col_index', doc='a test column', ), DatasetSpec( data_type_inc='DynamicTableRegion', name='optional_ref_col', doc='a test column', quantity='?' ), DatasetSpec( data_type_inc='DynamicTableRegion', name='optional_indexed_ref_col', doc='a test column', quantity='?' ), DatasetSpec( data_type_inc='VectorIndex', name='optional_indexed_ref_col_index', doc='a test column', quantity='?' ), DatasetSpec( data_type_inc='VectorData', name='optional_col3', doc='a test column', dtype='float', quantity='?', ) ] ) from hdmf.spec.write import YAMLSpecWriter writer = YAMLSpecWriter(outdir='.') self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.dt_spec, 'test.yaml') self.spec_catalog.register_spec(self.dt_spec2, 'test.yaml') self.namespace = SpecNamespace( 'a test namespace', CORE_NAMESPACE, [ dict( namespace='hdmf-common', ), dict(source='test.yaml'), ], version='0.1.0', catalog=self.spec_catalog ) self.test_dir = tempfile.mkdtemp() spec_fpath = os.path.join(self.test_dir, 'test.yaml') namespace_fpath = os.path.join(self.test_dir, 'test-namespace.yaml') writer.write_spec(dict(groups=[self.dt_spec, self.dt_spec2]), spec_fpath) writer.write_namespace(self.namespace, namespace_fpath) self.namespace_catalog = NamespaceCatalog() hdmf_typemap = get_type_map() self.type_map = TypeMap(self.namespace_catalog) self.type_map.merge(hdmf_typemap, ns_catalog=True) self.type_map.load_namespaces(namespace_fpath) self.manager = BuildManager(self.type_map) self.TestTable = self.type_map.get_dt_container_cls('TestTable', CORE_NAMESPACE) self.TestDTRTable = self.type_map.get_dt_container_cls('TestDTRTable', CORE_NAMESPACE) def tearDown(self) -> None: shutil.rmtree(self.test_dir) def test_dynamic_table(self): assert issubclass(self.TestTable, DynamicTable) assert self.TestTable.__columns__[0] == dict( name='my_col', description='a test column', required=True ) def test_forbids_incorrect_col(self): test_table = self.TestTable(name='test_table', description='my test table') with self.assertRaises(ValueError): test_table.add_row(my_col=3.0, indexed_col=[1.0, 3.0], incorrect_col=5) def test_dynamic_column(self): test_table = self.TestTable(name='test_table', description='my test table') test_table.add_column('dynamic_column', 'this is a dynamic column') test_table.add_row( my_col=3.0, indexed_col=[1.0, 3.0], dynamic_column=4, optional_col2=.5, ) test_table.add_row( my_col=4.0, indexed_col=[2.0, 4.0], dynamic_column=4, optional_col2=.5, ) np.testing.assert_array_equal(test_table['indexed_col'].target.data, [1., 3., 2., 4.]) np.testing.assert_array_equal(test_table['dynamic_column'].data, [4, 4]) def test_optional_col(self): test_table = self.TestTable(name='test_table', description='my test table') test_table.add_row(my_col=3.0, indexed_col=[1.0, 3.0], optional_col2=.5) test_table.add_row(my_col=4.0, indexed_col=[2.0, 4.0], optional_col2=.5) def test_dynamic_table_region(self): test_table = self.TestTable(name='test_table', description='my test table') test_table.add_row(my_col=3.0, indexed_col=[1.0, 3.0], optional_col2=.5) test_table.add_row(my_col=4.0, indexed_col=[2.0, 4.0], optional_col2=.5) test_dtr_table = self.TestDTRTable(name='test_dtr_table', description='my table', target_tables={'ref_col': test_table, 'indexed_ref_col': test_table}) self.assertIs(test_dtr_table['ref_col'].table, test_table) self.assertIs(test_dtr_table['indexed_ref_col'].target.table, test_table) test_dtr_table.add_row(ref_col=0, indexed_ref_col=[0, 1]) test_dtr_table.add_row(ref_col=0, indexed_ref_col=[0, 1]) np.testing.assert_array_equal(test_dtr_table['indexed_ref_col'].target.data, [0, 1, 0, 1]) np.testing.assert_array_equal(test_dtr_table['ref_col'].data, [0, 0]) def test_dynamic_table_region_optional(self): test_table = self.TestTable(name='test_table', description='my test table') test_table.add_row(my_col=3.0, indexed_col=[1.0, 3.0], optional_col2=.5) test_table.add_row(my_col=4.0, indexed_col=[2.0, 4.0], optional_col2=.5) test_dtr_table = self.TestDTRTable(name='test_dtr_table', description='my table', target_tables={'optional_ref_col': test_table, 'optional_indexed_ref_col': test_table}) self.assertIs(test_dtr_table['optional_ref_col'].table, test_table) self.assertIs(test_dtr_table['optional_indexed_ref_col'].target.table, test_table) test_dtr_table.add_row(ref_col=0, indexed_ref_col=[0, 1], optional_ref_col=0, optional_indexed_ref_col=[0, 1]) test_dtr_table.add_row(ref_col=0, indexed_ref_col=[0, 1], optional_ref_col=0, optional_indexed_ref_col=[0, 1]) np.testing.assert_array_equal(test_dtr_table['optional_indexed_ref_col'].target.data, [0, 1, 0, 1]) np.testing.assert_array_equal(test_dtr_table['optional_ref_col'].data, [0, 0]) def test_dynamic_table_region_bad_target_col(self): test_table = self.TestTable(name='test_table', description='my test table') test_table.add_row(my_col=3.0, indexed_col=[1.0, 3.0], optional_col2=.5) test_table.add_row(my_col=4.0, indexed_col=[2.0, 4.0], optional_col2=.5) msg = r"^'bad' is not the name of a predefined column of table .*" with self.assertRaisesRegex(ValueError, msg): self.TestDTRTable(name='test_dtr_table', description='my table', target_tables={'bad': test_table}) def test_dynamic_table_region_non_dtr_target(self): test_table = self.TestTable(name='test_table', description='my test table') test_table.add_row(my_col=3.0, indexed_col=[1.0, 3.0], optional_col2=.5) test_table.add_row(my_col=4.0, indexed_col=[2.0, 4.0], optional_col2=.5) msg = "Column 'optional_col3' must be a DynamicTableRegion to have a target table." with self.assertRaisesWith(ValueError, msg): self.TestDTRTable(name='test_dtr_table', description='my table', target_tables={'optional_col3': test_table}) def test_roundtrip(self): # NOTE this does not use H5RoundTripMixin because this requires custom validation test_table = self.TestTable(name='test_table', description='my test table') test_table.add_column('dynamic_column', 'this is a dynamic column') test_table.add_row( my_col=3.0, indexed_col=[1.0, 3.0], dynamic_column=4, optional_col2=.5, ) self.filename = os.path.join(self.test_dir, 'test_TestTable.h5') with HDF5IO(self.filename, manager=self.manager, mode='w') as write_io: write_io.write(test_table, cache_spec=True) self.reader = HDF5IO(self.filename, manager=self.manager, mode='r') read_container = self.reader.read() self.assertIsNotNone(str(test_table)) # added as a test to make sure printing works self.assertIsNotNone(str(read_container)) # make sure we get a completely new object self.assertNotEqual(id(test_table), id(read_container)) # the name of the root container of a file is always 'root' (see h5tools.py ROOT_NAME) # thus, ignore the name of the container when comparing original container vs read container self.assertContainerEqual(read_container, test_table, ignore_name=True) builder = self.reader.read_builder() # TODO fix ValueError: No specification for 'Container' in namespace 'test_core' validator = ValidatorMap(self.manager.namespace_catalog.get_namespace(name=CORE_NAMESPACE)) errors = validator.validate(builder) if errors: for err in errors: raise Exception(err) self.reader.close()
class TestDataMap(BazSpecMixin, TestCase): def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_map(Baz, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): self.baz_spec = DatasetSpec( doc='an Baz type', dtype='int', name='MyBaz', data_type_def='Baz', shape=[None], attributes=[ AttributeSpec('baz_attr', 'an example string attribute', 'text') ]) def test_build(self): ''' Test default mapping functionality when no attributes are nested ''' container = Baz('MyBaz', list(range(10)), 'abcdefghijklmnopqrstuvwxyz') builder = self.mapper.build(container, self.manager) expected = DatasetBuilder( 'MyBaz', list(range(10)), attributes={'baz_attr': 'abcdefghijklmnopqrstuvwxyz'}) self.assertBuilderEqual(builder, expected) def test_build_empty_data(self): """Test building of a Data object with empty data.""" baz_inc_spec = DatasetSpec(doc='doc', data_type_inc='Baz', quantity=ZERO_OR_MANY) baz_holder_spec = GroupSpec(doc='doc', data_type_def='BazHolder', datasets=[baz_inc_spec]) self.spec_catalog.register_spec(baz_holder_spec, 'test.yaml') self.type_map.register_container_type(CORE_NAMESPACE, 'BazHolder', BazHolder) self.holder_mapper = ObjectMapper(baz_holder_spec) baz = Baz('MyBaz', [], 'abcdefghijklmnopqrstuvwxyz') holder = BazHolder('holder', [baz]) builder = self.holder_mapper.build(holder, self.manager) expected = GroupBuilder( name='holder', datasets=[ DatasetBuilder(name='MyBaz', data=[], attributes={ 'baz_attr': 'abcdefghijklmnopqrstuvwxyz', 'data_type': 'Baz', 'namespace': 'test_core', 'object_id': baz.object_id }) ]) self.assertBuilderEqual(builder, expected) def test_append(self): with h5py.File('test.h5', 'w') as file: test_ds = file.create_dataset('test_ds', data=[1, 2, 3], chunks=True, maxshape=(None, )) container = Baz('MyBaz', test_ds, 'abcdefghijklmnopqrstuvwxyz') container.append(4) np.testing.assert_array_equal(container[:], [1, 2, 3, 4]) os.remove('test.h5') def test_extend(self): with h5py.File('test.h5', 'w') as file: test_ds = file.create_dataset('test_ds', data=[1, 2, 3], chunks=True, maxshape=(None, )) container = Baz('MyBaz', test_ds, 'abcdefghijklmnopqrstuvwxyz') container.extend([4, 5]) np.testing.assert_array_equal(container[:], [1, 2, 3, 4, 5]) os.remove('test.h5')
class SpecCatalogTest(TestCase): def setUp(self): self.catalog = SpecCatalog() self.attributes = [ AttributeSpec('attribute1', 'my first attribute', 'text'), AttributeSpec('attribute2', 'my second attribute', 'text') ] self.spec = DatasetSpec('my first dataset', 'int', name='dataset1', dims=(None, None), attributes=self.attributes, linkable=False, data_type_def='EphysData') def test_register_spec(self): self.catalog.register_spec(self.spec, 'test.yaml') result = self.catalog.get_spec('EphysData') self.assertIs(result, self.spec) def test_hierarchy(self): spikes_spec = DatasetSpec('my extending dataset', 'int', data_type_inc='EphysData', data_type_def='SpikeData') lfp_spec = DatasetSpec('my second extending dataset', 'int', data_type_inc='EphysData', data_type_def='LFPData') self.catalog.register_spec(self.spec, 'test.yaml') self.catalog.register_spec(spikes_spec, 'test.yaml') self.catalog.register_spec(lfp_spec, 'test.yaml') spike_hierarchy = self.catalog.get_hierarchy('SpikeData') lfp_hierarchy = self.catalog.get_hierarchy('LFPData') ephys_hierarchy = self.catalog.get_hierarchy('EphysData') self.assertTupleEqual(spike_hierarchy, ('SpikeData', 'EphysData')) self.assertTupleEqual(lfp_hierarchy, ('LFPData', 'EphysData')) self.assertTupleEqual(ephys_hierarchy, ('EphysData', )) def test_subtypes(self): """ -BaseContainer--+-->AContainer--->ADContainer | +-->BContainer """ base_spec = GroupSpec(doc='Base container', data_type_def='BaseContainer') acontainer = GroupSpec(doc='AContainer', data_type_inc='BaseContainer', data_type_def='AContainer') adcontainer = GroupSpec(doc='ADContainer', data_type_inc='AContainer', data_type_def='ADContainer') bcontainer = GroupSpec(doc='BContainer', data_type_inc='BaseContainer', data_type_def='BContainer') self.catalog.register_spec(base_spec, 'test.yaml') self.catalog.register_spec(acontainer, 'test.yaml') self.catalog.register_spec(adcontainer, 'test.yaml') self.catalog.register_spec(bcontainer, 'test.yaml') base_spec_subtypes = self.catalog.get_subtypes('BaseContainer') base_spec_subtypes = tuple( sorted(base_spec_subtypes )) # Sort so we have a guaranteed order for comparison acontainer_subtypes = self.catalog.get_subtypes('AContainer') bcontainer_substypes = self.catalog.get_subtypes('BContainer') adcontainer_subtypes = self.catalog.get_subtypes('ADContainer') self.assertTupleEqual(adcontainer_subtypes, ()) self.assertTupleEqual(bcontainer_substypes, ()) self.assertTupleEqual(acontainer_subtypes, ('ADContainer', )) self.assertTupleEqual(base_spec_subtypes, ('AContainer', 'ADContainer', 'BContainer')) def test_subtypes_norecursion(self): """ -BaseContainer--+-->AContainer--->ADContainer | +-->BContainer """ base_spec = GroupSpec(doc='Base container', data_type_def='BaseContainer') acontainer = GroupSpec(doc='AContainer', data_type_inc='BaseContainer', data_type_def='AContainer') adcontainer = GroupSpec(doc='ADContainer', data_type_inc='AContainer', data_type_def='ADContainer') bcontainer = GroupSpec(doc='BContainer', data_type_inc='BaseContainer', data_type_def='BContainer') self.catalog.register_spec(base_spec, 'test.yaml') self.catalog.register_spec(acontainer, 'test.yaml') self.catalog.register_spec(adcontainer, 'test.yaml') self.catalog.register_spec(bcontainer, 'test.yaml') base_spec_subtypes = self.catalog.get_subtypes('BaseContainer', recursive=False) base_spec_subtypes = tuple( sorted(base_spec_subtypes )) # Sort so we have a guaranteed order for comparison acontainer_subtypes = self.catalog.get_subtypes('AContainer', recursive=False) bcontainer_substypes = self.catalog.get_subtypes('BContainer', recursive=False) adcontainer_subtypes = self.catalog.get_subtypes('ADContainer', recursive=False) self.assertTupleEqual(adcontainer_subtypes, ()) self.assertTupleEqual(bcontainer_substypes, ()) self.assertTupleEqual(acontainer_subtypes, ('ADContainer', )) self.assertTupleEqual(base_spec_subtypes, ('AContainer', 'BContainer')) def test_subtypes_unknown_type(self): subtypes_of_bad_type = self.catalog.get_subtypes('UnknownType') self.assertTupleEqual(subtypes_of_bad_type, ()) def test_get_spec_source_file(self): spikes_spec = GroupSpec('test group', data_type_def='SpikeData') source_file_path = '/test/myt/test.yaml' self.catalog.auto_register(spikes_spec, source_file_path) recorded_source_file_path = self.catalog.get_spec_source_file( 'SpikeData') self.assertEqual(recorded_source_file_path, source_file_path) def test_get_full_hierarchy(self): """ BaseContainer--+-->AContainer--->ADContainer | +-->BContainer Expected output: >> print(json.dumps(full_hierarchy, indent=4)) >> { >> "BaseContainer": { >> "AContainer": { >> "ADContainer": {} >> }, >> "BContainer": {} >> } """ base_spec = GroupSpec(doc='Base container', data_type_def='BaseContainer') acontainer = GroupSpec(doc='AContainer', data_type_inc='BaseContainer', data_type_def='AContainer') adcontainer = GroupSpec(doc='ADContainer', data_type_inc='AContainer', data_type_def='ADContainer') bcontainer = GroupSpec(doc='BContainer', data_type_inc='BaseContainer', data_type_def='BContainer') self.catalog.register_spec(base_spec, 'test.yaml') self.catalog.register_spec(acontainer, 'test.yaml') self.catalog.register_spec(adcontainer, 'test.yaml') self.catalog.register_spec(bcontainer, 'test.yaml') full_hierarchy = self.catalog.get_full_hierarchy() expected_hierarchy = { "BaseContainer": { "AContainer": { "ADContainer": {} }, "BContainer": {} } } self.assertDictEqual(full_hierarchy, expected_hierarchy) def test_copy_spec_catalog(self): # Register the spec first self.catalog.register_spec(self.spec, 'test.yaml') result = self.catalog.get_spec('EphysData') self.assertIs(result, self.spec) # Now test the copy re = copy.copy(self.catalog) self.assertTupleEqual(self.catalog.get_registered_types(), re.get_registered_types()) def test_deepcopy_spec_catalog(self): # Register the spec first self.catalog.register_spec(self.spec, 'test.yaml') result = self.catalog.get_spec('EphysData') self.assertIs(result, self.spec) # Now test the copy re = copy.deepcopy(self.catalog) self.assertTupleEqual(self.catalog.get_registered_types(), re.get_registered_types()) def test_catch_duplicate_spec_nested(self): spec1 = GroupSpec( data_type_def='Group1', doc='This is my new group 1', ) spec2 = GroupSpec( data_type_def='Group2', doc='This is my new group 2', groups=[spec1], # nested definition ) source = 'test_extension.yaml' self.catalog.register_spec(spec1, source) self.catalog.register_spec( spec2, source) # this is OK because Group1 is the same spec ret = self.catalog.get_spec('Group1') self.assertIs(ret, spec1) def test_catch_duplicate_spec_different(self): spec1 = GroupSpec( data_type_def='Group1', doc='This is my new group 1', ) spec2 = GroupSpec( data_type_def='Group1', doc='This is my other group 1', ) source = 'test_extension.yaml' self.catalog.register_spec(spec1, source) msg = "'Group1' - cannot overwrite existing specification" with self.assertRaisesWith(ValueError, msg): self.catalog.register_spec(spec2, source) def test_catch_duplicate_spec_different_source(self): spec1 = GroupSpec( data_type_def='Group1', doc='This is my new group 1', ) spec2 = GroupSpec( data_type_def='Group1', doc='This is my new group 1', ) source1 = 'test_extension1.yaml' source2 = 'test_extension2.yaml' self.catalog.register_spec(spec1, source1) msg = "'Group1' - cannot overwrite existing specification" with self.assertRaisesWith(ValueError, msg): self.catalog.register_spec(spec2, source2)
class TestDataMapScalarCompound(TestCase): def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'BazScalarCompound', BazScalarCompound) self.type_map.register_map(BazScalarCompound, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): self.baz_spec = DatasetSpec( doc='a BazScalarCompound type', dtype=[ DtypeSpec(name='id', dtype='uint64', doc='The unique identifier in this table.'), DtypeSpec(name='attr1', dtype='text', doc='A text attribute.'), ], name='MyBaz', data_type_def='BazScalarCompound', ) def test_construct_scalar_compound_dataset(self): """Test construct on a compound h5py.Dataset with shape (1, ) for scalar spec does not resolve the data.""" with h5py.File('test.h5', 'w') as file: comp_type = np.dtype([('id', np.uint64), ('attr1', h5py.special_dtype(vlen=str))]) test_ds = file.create_dataset(name='test_ds', data=np.array((1, 'text'), dtype=comp_type), shape=(1, ), dtype=comp_type) expected = BazScalarCompound( name='MyBaz', data=(1, 'text'), ) builder = DatasetBuilder( name='MyBaz', data=test_ds, attributes={ 'data_type': 'BazScalarCompound', 'namespace': CORE_NAMESPACE, 'object_id': expected.object_id }, ) container = self.mapper.construct(builder, self.manager) self.assertEqual(type(container.data), h5py.Dataset) self.assertContainerEqual(container, expected) os.remove('test.h5')
class TestBaseProcessFieldSpec(TestCase): def setUp(self): self.bar_spec = GroupSpec( doc='A test group specification with a data type', data_type_def='EmptyBar') self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'EmptyBar', EmptyBar) def test_update_docval(self): """Test update_docval_args for a variety of data types and mapping configurations.""" spec = GroupSpec( doc="A test group specification with a data type", data_type_def="Baz", groups=[ GroupSpec(doc="a group", data_type_inc="EmptyBar", quantity="?") ], datasets=[ DatasetSpec( doc="a dataset", dtype="int", name="data", attributes=[ AttributeSpec(name="attr2", doc="an integer attribute", dtype="int") ], ) ], attributes=[ AttributeSpec(name="attr1", doc="a string attribute", dtype="text"), AttributeSpec(name="attr3", doc="a numeric attribute", dtype="numeric"), AttributeSpec(name="attr4", doc="a float attribute", dtype="float"), ], ) expected = [ { 'name': 'data', 'type': (int, np.int32, np.int64), 'doc': 'a dataset' }, { 'name': 'attr1', 'type': str, 'doc': 'a string attribute' }, { 'name': 'attr2', 'type': (int, np.int32, np.int64), 'doc': 'an integer attribute' }, { 'name': 'attr3', 'doc': 'a numeric attribute', 'type': (float, np.float32, np.float64, np.int8, np.int16, np.int32, np.int64, int, np.uint8, np.uint16, np.uint32, np.uint64) }, { 'name': 'attr4', 'doc': 'a float attribute', 'type': (float, np.float32, np.float64) }, { 'name': 'bar', 'type': EmptyBar, 'doc': 'a group', 'default': None }, ] not_inherited_fields = { 'data': spec.get_dataset('data'), 'attr1': spec.get_attribute('attr1'), 'attr2': spec.get_dataset('data').get_attribute('attr2'), 'attr3': spec.get_attribute('attr3'), 'attr4': spec.get_attribute('attr4'), 'bar': spec.groups[0] } docval_args = list() for i, attr_name in enumerate(not_inherited_fields): with self.subTest(attr_name=attr_name): CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name=attr_name, not_inherited_fields=not_inherited_fields, type_map=self.type_map, spec=spec) self.assertListEqual(docval_args, expected[:( i + 1)]) # compare with the first i elements of expected def test_update_docval_attr_shape(self): """Test that update_docval_args for an attribute with shape sets the type and shape keys.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text', shape=[None]) ]) not_inherited_fields = {'attr1': spec.get_attribute('attr1')} docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [None] }] self.assertListEqual(docval_args, expected) def test_update_docval_dset_shape(self): """Test that update_docval_args for a dataset with shape sets the type and shape keys.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', datasets=[ DatasetSpec(name='dset1', doc='a string dataset', dtype='text', shape=[None]) ]) not_inherited_fields = {'dset1': spec.get_dataset('dset1')} docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='dset1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'dset1', 'type': ('array_data', 'data'), 'doc': 'a string dataset', 'shape': [None] }] self.assertListEqual(docval_args, expected) def test_update_docval_default_value(self): """Test that update_docval_args for an optional field with default value sets the default key.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text', required=False, default_value='value') ]) not_inherited_fields = {'attr1': spec.get_attribute('attr1')} docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'attr1', 'type': str, 'doc': 'a string attribute', 'default': 'value' }] self.assertListEqual(docval_args, expected) def test_update_docval_default_value_none(self): """Test that update_docval_args for an optional field sets default: None.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text', required=False) ]) not_inherited_fields = {'attr1': spec.get_attribute('attr1')} docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'attr1', 'type': str, 'doc': 'a string attribute', 'default': None }] self.assertListEqual(docval_args, expected) def test_update_docval_default_value_none_required_parent(self): """Test that update_docval_args for an optional field with a required parent sets default: None.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', groups=[ GroupSpec(name='group1', doc='required untyped group', attributes=[ AttributeSpec( name='attr1', doc='a string attribute', dtype='text', required=False) ]) ]) not_inherited_fields = { 'attr1': spec.get_group('group1').get_attribute('attr1') } docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'attr1', 'type': str, 'doc': 'a string attribute', 'default': None }] self.assertListEqual(docval_args, expected) def test_update_docval_required_field_optional_parent(self): """Test that update_docval_args for a required field with an optional parent sets default: None.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', groups=[ GroupSpec(name='group1', doc='required untyped group', attributes=[ AttributeSpec( name='attr1', doc='a string attribute', dtype='text') ], quantity='?') ]) not_inherited_fields = { 'attr1': spec.get_group('group1').get_attribute('attr1') } docval_args = list() CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'attr1', 'type': str, 'doc': 'a string attribute', 'default': None }] self.assertListEqual(docval_args, expected) def test_process_field_spec_overwrite(self): """Test that docval generation overwrites previous docval args.""" spec = GroupSpec(doc='A test group specification with a data type', data_type_def='Baz', attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text', shape=[None]) ]) not_inherited_fields = {'attr1': spec.get_attribute('attr1')} docval_args = [ { 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [[None], [None, None]] }, # this dict will be overwritten below { 'name': 'attr2', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [[None], [None, None]] } ] CustomClassGenerator.process_field_spec( classdict={}, docval_args=docval_args, parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr1', not_inherited_fields=not_inherited_fields, type_map=TypeMap(), spec=spec) expected = [{ 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [None] }, { 'name': 'attr2', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [[None], [None, None]] }] self.assertListEqual(docval_args, expected) def test_process_field_spec_link(self): """Test that processing a link spec does not set child=True in __fields__.""" classdict = {} not_inherited_fields = { 'attr3': LinkSpec(name='attr3', target_type='EmptyBar', doc='a link') } CustomClassGenerator.process_field_spec( classdict=classdict, docval_args=[], parent_cls=EmptyBar, # <-- arbitrary class attr_name='attr3', not_inherited_fields=not_inherited_fields, type_map=self.type_map, spec=GroupSpec('dummy', 'doc')) expected = {'__fields__': [{'name': 'attr3', 'doc': 'a link'}]} self.assertDictEqual(classdict, expected) def test_post_process_fixed_name(self): """Test that docval generation for a class with a fixed name does not contain a docval arg for name.""" spec = GroupSpec( doc='A test group specification with a data type', data_type_def='Baz', name='MyBaz', # <-- fixed name attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text', shape=[None]) ]) classdict = {} bases = [Container] docval_args = [{ 'name': 'name', 'type': str, 'doc': 'name' }, { 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [None] }] CustomClassGenerator.post_process(classdict, bases, docval_args, spec) expected = [{ 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [None] }] self.assertListEqual(docval_args, expected) def test_post_process_default_name(self): """Test that docval generation for a class with a default name has the default value for name set.""" spec = GroupSpec( doc='A test group specification with a data type', data_type_def='Baz', default_name='MyBaz', # <-- default name attributes=[ AttributeSpec(name='attr1', doc='a string attribute', dtype='text', shape=[None]) ]) classdict = {} bases = [Container] docval_args = [{ 'name': 'name', 'type': str, 'doc': 'name' }, { 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [None] }] CustomClassGenerator.post_process(classdict, bases, docval_args, spec) expected = [{ 'name': 'name', 'type': str, 'doc': 'name', 'default': 'MyBaz' }, { 'name': 'attr1', 'type': ('array_data', 'data'), 'doc': 'a string attribute', 'shape': [None] }] self.assertListEqual(docval_args, expected)
class TestDataMap(TestCase): def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_map(Baz, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec) def setUpBazSpec(self): self.baz_spec = DatasetSpec( 'an Baz type', 'int', name='MyBaz', data_type_def='Baz', shape=[None], attributes=[ AttributeSpec('baz_attr', 'an example string attribute', 'text') ]) def test_build(self): ''' Test default mapping functionality when no attributes are nested ''' container = Baz('my_baz', list(range(10)), 'abcdefghijklmnopqrstuvwxyz') builder = self.mapper.build(container, self.manager) expected = DatasetBuilder( 'my_baz', list(range(10)), attributes={'baz_attr': 'abcdefghijklmnopqrstuvwxyz'}) self.assertDictEqual(builder, expected) def test_append(self): with h5py.File('test.h5', 'w') as file: test_ds = file.create_dataset('test_ds', data=[1, 2, 3], chunks=True, maxshape=(None, )) container = Baz('my_baz', test_ds, 'abcdefghijklmnopqrstuvwxyz') container.append(4) np.testing.assert_array_equal(container[:], [1, 2, 3, 4]) os.remove('test.h5') def test_extend(self): with h5py.File('test.h5', 'w') as file: test_ds = file.create_dataset('test_ds', data=[1, 2, 3], chunks=True, maxshape=(None, )) container = Baz('my_baz', test_ds, 'abcdefghijklmnopqrstuvwxyz') container.extend([4, 5]) np.testing.assert_array_equal(container[:], [1, 2, 3, 4, 5]) os.remove('test.h5')
class TestReference(TestCase): def setUp(self): self.foo_spec = GroupSpec( 'A test group specification with data type Foo', data_type_def='Foo') self.bar_spec = GroupSpec( 'A test group specification with a data type Bar', data_type_def='Bar', datasets=[DatasetSpec('an example dataset', 'int', name='data')], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text'), AttributeSpec('attr2', 'an example integer attribute', 'int'), AttributeSpec('foo', 'a referenced foo', RefSpec('Foo', 'object'), required=False) ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.manager = BuildManager(self.type_map) self.foo_mapper = ObjectMapper(self.foo_spec) self.bar_mapper = ObjectMapper(self.bar_spec) def test_build_attr_ref(self): ''' Test default mapping functionality when one container contains an attribute reference to another container. ''' foo_inst = Foo('my_foo') bar_inst1 = Bar('my_bar1', list(range(10)), 'value1', 10, foo=foo_inst) bar_inst2 = Bar('my_bar2', list(range(10)), 'value1', 10) foo_builder = self.manager.build(foo_inst, root=True) bar1_builder = self.manager.build(bar_inst1, root=True) # adds refs bar2_builder = self.manager.build(bar_inst2, root=True) foo_expected = GroupBuilder('my_foo', attributes={ 'data_type': 'Foo', 'namespace': CORE_NAMESPACE, 'object_id': foo_inst.object_id }) bar1_expected = GroupBuilder( 'n/a', # name doesn't matter datasets={'data': DatasetBuilder('data', list(range(10)))}, attributes={ 'attr1': 'value1', 'attr2': 10, 'foo': ReferenceBuilder(foo_expected), 'data_type': 'Bar', 'namespace': CORE_NAMESPACE, 'object_id': bar_inst1.object_id }) bar2_expected = GroupBuilder( 'n/a', # name doesn't matter datasets={'data': DatasetBuilder('data', list(range(10)))}, attributes={ 'attr1': 'value1', 'attr2': 10, 'data_type': 'Bar', 'namespace': CORE_NAMESPACE, 'object_id': bar_inst2.object_id }) self.assertDictEqual(foo_builder, foo_expected) self.assertDictEqual(bar1_builder, bar1_expected) self.assertDictEqual(bar2_builder, bar2_expected) def test_build_attr_ref_invalid(self): ''' Test default mapping functionality when one container contains an attribute reference to another container. ''' bar_inst1 = Bar('my_bar1', list(range(10)), 'value1', 10) bar_inst1._Bar__foo = object() # make foo object a non-container type msg = "invalid type for reference 'foo' (<class 'object'>) - must be AbstractContainer" with self.assertRaisesWith(ValueError, msg): self.bar_mapper.build(bar_inst1, self.manager)
class TestLinkedContainer(TestCase): def setUp(self): self.foo_spec = GroupSpec( 'A test group specification with data type Foo', data_type_def='Foo') self.bar_spec = GroupSpec( 'A test group specification with a data type Bar', data_type_def='Bar', groups=[self.foo_spec], datasets=[DatasetSpec('an example dataset', 'int', name='data')], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'text'), AttributeSpec('attr2', 'an example integer attribute', 'int') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.foo_spec, 'test.yaml') self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], version='0.1.0', catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.manager = BuildManager(self.type_map) self.foo_mapper = ObjectMapper(self.foo_spec) self.bar_mapper = ObjectMapper(self.bar_spec) def test_build_child_link(self): ''' Test default mapping functionality when one container contains a child link to another container ''' foo_inst = Foo('my_foo') bar_inst1 = Bar('my_bar1', list(range(10)), 'value1', 10, foo=foo_inst) # bar_inst2.foo should link to bar_inst1.foo bar_inst2 = Bar('my_bar2', list(range(10)), 'value1', 10, foo=foo_inst) foo_builder = self.foo_mapper.build(foo_inst, self.manager) bar1_builder = self.bar_mapper.build(bar_inst1, self.manager) bar2_builder = self.bar_mapper.build(bar_inst2, self.manager) foo_expected = GroupBuilder('my_foo') inner_foo_builder = GroupBuilder('my_foo', attributes={ 'data_type': 'Foo', 'namespace': CORE_NAMESPACE, 'object_id': foo_inst.object_id }) bar1_expected = GroupBuilder( 'my_bar1', datasets={'data': DatasetBuilder('data', list(range(10)))}, groups={'foo': inner_foo_builder}, attributes={ 'attr1': 'value1', 'attr2': 10 }) link_foo_builder = LinkBuilder(builder=inner_foo_builder) bar2_expected = GroupBuilder( 'my_bar2', datasets={'data': DatasetBuilder('data', list(range(10)))}, links={'foo': link_foo_builder}, attributes={ 'attr1': 'value1', 'attr2': 10 }) self.assertBuilderEqual(foo_builder, foo_expected) self.assertBuilderEqual(bar1_builder, bar1_expected) self.assertBuilderEqual(bar2_builder, bar2_expected) @unittest.expectedFailure def test_build_broken_link_parent(self): ''' Test that building a container with a broken link that has a parent raises an error. ''' foo_inst = Foo('my_foo') Bar('my_bar1', list(range(10)), 'value1', 10, foo=foo_inst) # foo_inst.parent is this bar # bar_inst2.foo should link to bar_inst1.foo bar_inst2 = Bar('my_bar2', list(range(10)), 'value1', 10, foo=foo_inst) # TODO bar_inst.foo.parent exists but is never built - this is a tricky edge case that should raise an error with self.assertRaises(OrphanContainerBuildError): self.bar_mapper.build(bar_inst2, self.manager) def test_build_broken_link_no_parent(self): ''' Test that building a container with a broken link that has no parent raises an error. ''' foo_inst = Foo('my_foo') bar_inst1 = Bar('my_bar1', list(range(10)), 'value1', 10, foo=foo_inst) # foo_inst.parent is this bar # bar_inst2.foo should link to bar_inst1.foo bar_inst2 = Bar('my_bar2', list(range(10)), 'value1', 10, foo=foo_inst) bar_inst1.remove_foo() msg = ( "my_bar2 (my_bar2): Linked Foo 'my_foo' has no parent. Remove the link or ensure the linked container " "is added properly.") with self.assertRaisesWith(OrphanContainerBuildError, msg): self.bar_mapper.build(bar_inst2, self.manager)
class SpecCatalogTest(unittest.TestCase): def setUp(self): self.catalog = SpecCatalog() self.attributes = [ AttributeSpec('attribute1', 'my first attribute', 'text'), AttributeSpec('attribute2', 'my second attribute', 'text') ] self.spec = DatasetSpec('my first dataset', 'int', name='dataset1', dims=(None, None), attributes=self.attributes, linkable=False, data_type_def='EphysData') def test_register_spec(self): self.catalog.register_spec(self.spec, 'test.yaml') result = self.catalog.get_spec('EphysData') self.assertIs(result, self.spec) def test_hierarchy(self): spikes_spec = DatasetSpec('my extending dataset', 'int', data_type_inc='EphysData', data_type_def='SpikeData') lfp_spec = DatasetSpec('my second extending dataset', 'int', data_type_inc='EphysData', data_type_def='LFPData') self.catalog.register_spec(self.spec, 'test.yaml') self.catalog.register_spec(spikes_spec, 'test.yaml') self.catalog.register_spec(lfp_spec, 'test.yaml') spike_hierarchy = self.catalog.get_hierarchy('SpikeData') lfp_hierarchy = self.catalog.get_hierarchy('LFPData') ephys_hierarchy = self.catalog.get_hierarchy('EphysData') self.assertTupleEqual(spike_hierarchy, ('SpikeData', 'EphysData')) self.assertTupleEqual(lfp_hierarchy, ('LFPData', 'EphysData')) self.assertTupleEqual(ephys_hierarchy, ('EphysData', )) def test_subtypes(self): """ -BaseContainer--+-->AContainer--->ADContainer | +-->BContainer """ base_spec = GroupSpec(doc='Base container', data_type_def='BaseContainer') acontainer = GroupSpec(doc='AContainer', data_type_inc='BaseContainer', data_type_def='AContainer') adcontainer = GroupSpec(doc='ADContainer', data_type_inc='AContainer', data_type_def='ADContainer') bcontainer = GroupSpec(doc='BContainer', data_type_inc='BaseContainer', data_type_def='BContainer') self.catalog.register_spec(base_spec, 'test.yaml') self.catalog.register_spec(acontainer, 'test.yaml') self.catalog.register_spec(adcontainer, 'test.yaml') self.catalog.register_spec(bcontainer, 'test.yaml') base_spec_subtypes = self.catalog.get_subtypes('BaseContainer') base_spec_subtypes = tuple( sorted(base_spec_subtypes )) # Sort so we have a guaranteed order for comparison acontainer_subtypes = self.catalog.get_subtypes('AContainer') bcontainer_substypes = self.catalog.get_subtypes('BContainer') adcontainer_subtypes = self.catalog.get_subtypes('ADContainer') self.assertTupleEqual(adcontainer_subtypes, ()) self.assertTupleEqual(bcontainer_substypes, ()) self.assertTupleEqual(acontainer_subtypes, ('ADContainer', )) self.assertTupleEqual(base_spec_subtypes, ('AContainer', 'ADContainer', 'BContainer')) def test_subtypes_norecursion(self): """ -BaseContainer--+-->AContainer--->ADContainer | +-->BContainer """ base_spec = GroupSpec(doc='Base container', data_type_def='BaseContainer') acontainer = GroupSpec(doc='AContainer', data_type_inc='BaseContainer', data_type_def='AContainer') adcontainer = GroupSpec(doc='ADContainer', data_type_inc='AContainer', data_type_def='ADContainer') bcontainer = GroupSpec(doc='BContainer', data_type_inc='BaseContainer', data_type_def='BContainer') self.catalog.register_spec(base_spec, 'test.yaml') self.catalog.register_spec(acontainer, 'test.yaml') self.catalog.register_spec(adcontainer, 'test.yaml') self.catalog.register_spec(bcontainer, 'test.yaml') base_spec_subtypes = self.catalog.get_subtypes('BaseContainer', recursive=False) base_spec_subtypes = tuple( sorted(base_spec_subtypes )) # Sort so we have a guaranteed order for comparison acontainer_subtypes = self.catalog.get_subtypes('AContainer', recursive=False) bcontainer_substypes = self.catalog.get_subtypes('BContainer', recursive=False) adcontainer_subtypes = self.catalog.get_subtypes('ADContainer', recursive=False) self.assertTupleEqual(adcontainer_subtypes, ()) self.assertTupleEqual(bcontainer_substypes, ()) self.assertTupleEqual(acontainer_subtypes, ('ADContainer', )) self.assertTupleEqual(base_spec_subtypes, ('AContainer', 'BContainer')) def test_subtypes_unknown_type(self): subtypes_of_bad_type = self.catalog.get_subtypes('UnknownType') self.assertTupleEqual(subtypes_of_bad_type, ()) def test_get_spec_source_file(self): spikes_spec = GroupSpec('test group', data_type_def='SpikeData') source_file_path = '/test/myt/test.yaml' self.catalog.auto_register(spikes_spec, source_file_path) recorded_source_file_path = self.catalog.get_spec_source_file( 'SpikeData') self.assertEqual(recorded_source_file_path, source_file_path) def test_get_full_hierarchy(self): """ BaseContainer--+-->AContainer--->ADContainer | +-->BContainer Expected output: >> print(json.dumps(full_hierarchy, indent=4)) >> { >> "BaseContainer": { >> "AContainer": { >> "ADContainer": {} >> }, >> "BContainer": {} >> } """ base_spec = GroupSpec(doc='Base container', data_type_def='BaseContainer') acontainer = GroupSpec(doc='AContainer', data_type_inc='BaseContainer', data_type_def='AContainer') adcontainer = GroupSpec(doc='ADContainer', data_type_inc='AContainer', data_type_def='ADContainer') bcontainer = GroupSpec(doc='BContainer', data_type_inc='BaseContainer', data_type_def='BContainer') self.catalog.register_spec(base_spec, 'test.yaml') self.catalog.register_spec(acontainer, 'test.yaml') self.catalog.register_spec(adcontainer, 'test.yaml') self.catalog.register_spec(bcontainer, 'test.yaml') full_hierarchy = self.catalog.get_full_hierarchy() expected_hierarchy = { "BaseContainer": { "AContainer": { "ADContainer": {} }, "BContainer": {} } } self.assertDictEqual(full_hierarchy, expected_hierarchy) def test_copy_spec_catalog(self): # Register the spec first self.catalog.register_spec(self.spec, 'test.yaml') result = self.catalog.get_spec('EphysData') self.assertIs(result, self.spec) # Now test the copy re = copy.copy(self.catalog) self.assertTupleEqual(self.catalog.get_registered_types(), re.get_registered_types()) def test_deepcopy_spec_catalog(self): # Register the spec first self.catalog.register_spec(self.spec, 'test.yaml') result = self.catalog.get_spec('EphysData') self.assertIs(result, self.spec) # Now test the copy re = copy.deepcopy(self.catalog) self.assertTupleEqual(self.catalog.get_registered_types(), re.get_registered_types())