class TestNestedBase(TestBase): def setUp(self): if type(self) == TestNestedBase: raise unittest.SkipTest('Abstract Base Class') super(TestNestedBase, self).setUp() self.foo_bucket = FooBucket('test_foo_bucket', [ Foo('my_foo1', list(range(10)), 'value1', 10), Foo('my_foo2', list(range(10, 20)), 'value2', 20) ]) self.foo_builders = { 'my_foo1': GroupBuilder('my_foo1', datasets={ 'my_data': DatasetBuilder('my_data', list(range(10)), attributes={'attr2': 10}) }, attributes={ 'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo' }), 'my_foo2': GroupBuilder('my_foo2', datasets={ 'my_data': DatasetBuilder('my_data', list(range(10, 20)), attributes={'attr2': 20}) }, attributes={ 'attr1': 'value2', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo' }) } self.setUpBucketBuilder() self.setUpBucketSpec() self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml') self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket) self.type_map.register_map(FooBucket, ObjectMapper) self.manager = BuildManager(self.type_map) def setUpBucketBuilder(self): pass def setUpBucketSpec(self): pass def test_build(self): ''' Test default mapping for an Container that has an Container as an attribute value ''' builder = self.manager.build(self.foo_bucket) self.assertDictEqual(builder, self.bucket_builder) def test_construct(self): container = self.manager.construct(self.bucket_builder) self.assertEqual(container, self.foo_bucket)
def setUp(self): super(TestNestedBase, self).setUp() self.foo_bucket = FooBucket('test_foo_bucket', [ Foo('my_foo1', list(range(10)), 'value1', 10), Foo('my_foo2', list(range(10, 20)), 'value2', 20)]) self.foo_builders = { 'my_foo1': GroupBuilder('my_foo1', datasets={'my_data': DatasetBuilder( 'my_data', list(range(10)), attributes={'attr2': 10})}, attributes={'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo'}), 'my_foo2': GroupBuilder('my_foo2', datasets={'my_data': DatasetBuilder( 'my_data', list(range(10, 20)), attributes={'attr2': 20})}, attributes={'attr1': 'value2', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo'}) } self.setUpBucketBuilder() self.setUpBucketSpec() self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml') self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket) self.type_map.register_map(FooBucket, ObjectMapper) self.manager = BuildManager(self.type_map)
def setUp(self): self.bar_spec = GroupSpec( 'A test group specification with a data type', data_type_def='Bar', datasets=[ DatasetSpec('an example dataset', 'int', name='data', attributes=[ AttributeSpec('attr2', 'an example integer attribute', 'int') ]) ], attributes=[ AttributeSpec('attr1', 'an example string attribute', 'str') ]) self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.bar_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{ 'source': 'test.yaml' }], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar) self.type_map.register_map(Bar, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.bar_spec)
def setUp(self): type_map = get_type_map() self.manager = BuildManager(type_map) self.path = "test_pynwb_io_hdf5.h5" self.start_time = datetime(1970, 1, 1, 12, 0, 0) self.create_date = datetime(2017, 4, 15, 12, 0, 0) self.ts_builder = GroupBuilder('test_timeseries', attributes={'ancestry': 'TimeSeries', 'source': 'example_source', 'neurodata_type': 'TimeSeries', 'help': 'General purpose TimeSeries'}, datasets={'data': DatasetBuilder('data', list(range(100, 200, 10)), attributes={'unit': 'SIunit', 'conversion': 1.0, 'resolution': 0.1}), 'timestamps': DatasetBuilder( 'timestamps', list(range(10)), attributes={'unit': 'Seconds', 'interval': 1})}) self.ts = TimeSeries('test_timeseries', 'example_source', list(range(100, 200, 10)), unit='SIunit', resolution=0.1, timestamps=list(range(10))) self.manager.prebuilt(self.ts, self.ts_builder) self.builder = GroupBuilder( 'root', groups={'acquisition': GroupBuilder('acquisition', groups={'timeseries': GroupBuilder('timeseries', groups={'test_timeseries': self.ts_builder}), 'images': GroupBuilder('images')}), 'analysis': GroupBuilder('analysis'), 'epochs': GroupBuilder('epochs'), 'general': GroupBuilder('general'), 'processing': GroupBuilder('processing'), 'stimulus': GroupBuilder( 'stimulus', groups={'presentation': GroupBuilder('presentation'), 'templates': GroupBuilder('templates')})}, datasets={'file_create_date': DatasetBuilder('file_create_date', [str(self.create_date)]), 'identifier': DatasetBuilder('identifier', 'TEST123'), 'session_description': DatasetBuilder('session_description', 'a test NWB File'), 'nwb_version': DatasetBuilder('nwb_version', '1.0.6'), 'session_start_time': DatasetBuilder('session_start_time', str(self.start_time))}, attributes={'neurodata_type': 'NWBFile'})
def setUp(self): self.setUpBazSpec() self.spec_catalog = SpecCatalog() self.spec_catalog.register_spec(self.baz_spec, 'test.yaml') self.namespace = SpecNamespace('a test namespace', CORE_NAMESPACE, [{'source': 'test.yaml'}], catalog=self.spec_catalog) self.namespace_catalog = NamespaceCatalog() self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace) self.type_map = TypeMap(self.namespace_catalog) self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz) self.type_map.register_map(Baz, ObjectMapper) self.manager = BuildManager(self.type_map) self.mapper = ObjectMapper(self.baz_spec)
class TestHDF5Writer(GroupBuilderTestCase): def setUp(self): type_map = get_type_map() self.manager = BuildManager(type_map) self.path = "test_pynwb_io_hdf5.h5" self.start_time = datetime(1970, 1, 1, 12, 0, 0) self.create_date = datetime(2017, 4, 15, 12, 0, 0) self.ts_builder = GroupBuilder('test_timeseries', attributes={'ancestry': 'TimeSeries', 'source': 'example_source', 'neurodata_type': 'TimeSeries', 'help': 'General purpose TimeSeries'}, datasets={'data': DatasetBuilder('data', list(range(100, 200, 10)), attributes={'unit': 'SIunit', 'conversion': 1.0, 'resolution': 0.1}), 'timestamps': DatasetBuilder( 'timestamps', list(range(10)), attributes={'unit': 'Seconds', 'interval': 1})}) self.ts = TimeSeries('test_timeseries', 'example_source', list(range(100, 200, 10)), unit='SIunit', resolution=0.1, timestamps=list(range(10))) self.manager.prebuilt(self.ts, self.ts_builder) self.builder = GroupBuilder( 'root', groups={'acquisition': GroupBuilder('acquisition', groups={'timeseries': GroupBuilder('timeseries', groups={'test_timeseries': self.ts_builder}), 'images': GroupBuilder('images')}), 'analysis': GroupBuilder('analysis'), 'epochs': GroupBuilder('epochs'), 'general': GroupBuilder('general'), 'processing': GroupBuilder('processing'), 'stimulus': GroupBuilder( 'stimulus', groups={'presentation': GroupBuilder('presentation'), 'templates': GroupBuilder('templates')})}, datasets={'file_create_date': DatasetBuilder('file_create_date', [str(self.create_date)]), 'identifier': DatasetBuilder('identifier', 'TEST123'), 'session_description': DatasetBuilder('session_description', 'a test NWB File'), 'nwb_version': DatasetBuilder('nwb_version', '1.0.6'), 'session_start_time': DatasetBuilder('session_start_time', str(self.start_time))}, attributes={'neurodata_type': 'NWBFile'}) def tearDown(self): if os.path.exists(self.path): os.remove(self.path) def check_fields(self): f = File(self.path) self.assertIn('acquisition', f) self.assertIn('analysis', f) self.assertIn('epochs', f) self.assertIn('general', f) self.assertIn('processing', f) self.assertIn('file_create_date', f) self.assertIn('identifier', f) self.assertIn('session_description', f) self.assertIn('nwb_version', f) self.assertIn('session_start_time', f) acq = f.get('acquisition') self.assertIn('images', acq) self.assertIn('timeseries', acq) ts = acq.get('timeseries') self.assertIn('test_timeseries', ts) return f def test_write_builder(self): writer = HDF5IO(self.path, self.manager) writer.write_builder(self.builder) writer.close() self.check_fields() def test_write_attribute_reference_container(self): writer = HDF5IO(self.path, self.manager) self.builder.set_attribute('ref_attribute', self.ts) writer.write_builder(self.builder) writer.close() f = self.check_fields() self.assertIsInstance(f.attrs['ref_attribute'], Reference) self.assertEqual(f['acquisition/timeseries/test_timeseries'], f[f.attrs['ref_attribute']]) def test_write_attribute_reference_builder(self): writer = HDF5IO(self.path, self.manager) self.builder.set_attribute('ref_attribute', self.ts_builder) writer.write_builder(self.builder) writer.close() f = self.check_fields() self.assertIsInstance(f.attrs['ref_attribute'], Reference) self.assertEqual(f['acquisition/timeseries/test_timeseries'], f[f.attrs['ref_attribute']]) def test_write_context_manager(self): with HDF5IO(self.path, self.manager) as writer: writer.write_builder(self.builder) self.check_fields() def test_read_builder(self): self.maxDiff = None io = HDF5IO(self.path, self.manager) io.write_builder(self.builder) builder = io.read_builder() self.assertBuilderEqual(builder, self.builder) io.close()