Esempio n. 1
0
 def setUpBuilder(self):
     table_builder = self.get_table_builder(self)
     data = list(zip(range(10), range(10, 20)))
     timestamps = list(map(lambda x: x/10, range(10)))
     return GroupBuilder('test_eS',
                         attributes={'source': 'a hypothetical source',
                                     'namespace': base.CORE_NAMESPACE,
                                     'comments': 'no comments',
                                     'description': 'no description',
                                     'neurodata_type': 'ElectricalSeries',
                                     'help': 'Stores acquired voltage data from extracellular recordings'},
                         datasets={'data': DatasetBuilder('data',
                                                          data,
                                                          attributes={'unit': 'volt',
                                                                      'conversion': 1.0,
                                                                      'resolution': 0.0}),
                                   'timestamps': DatasetBuilder('timestamps',
                                                                timestamps,
                                                                attributes={'unit': 'Seconds', 'interval': 1}),
                                   'electrodes': DatasetBuilder('electrodes', data=[0, 2],
                                                               attributes={
                                                                   'neurodata_type': 'DynamicTableRegion',
                                                                   'namespace': 'core',
                                                                   'table': ReferenceBuilder(table_builder),
                                                                   'description': 'the first and third electrodes',
                                                                   'help': 'a subset (i.e. slice or region) of a DynamicTable'})})  # noqa: E501
Esempio n. 2
0
 def setUpBuilder(self):
     TestPlaneSegmentation.get_plane_segmentation_builder(self)
     return GroupBuilder(
         'test_roi_response_series',
         attributes={
             'source': 'RoiResponseSeries integration test',
             'namespace': base.CORE_NAMESPACE,
             'comments': 'no comments',
             'description': 'no description',
             'neurodata_type': 'RoiResponseSeries',
             'help': ('ROI responses over an imaging plane. Each element on the second dimension of data[] '
                      'should correspond to the signal from one ROI')},
         datasets={
             'data': DatasetBuilder(
                 'data', [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
                 attributes={
                     'unit': 'lumens',
                     'conversion': 1.0,
                     'resolution': 0.0}
             ),
             'timestamps': DatasetBuilder('timestamps', [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
                                          attributes={'unit': 'Seconds', 'interval': 1}),
             'rois': DatasetBuilder('rois', RegionBuilder([0], self.rois_builder),
                                    attributes={'help': 'A region reference to an ROITable',
                                                'description': 'the first of two ROIs',
                                                'namespace': 'core',
                                                'neurodata_type': 'ROITableRegion'}),
         })
Esempio n. 3
0
    def setUp(self):
        super(TestNestedBase, self).setUp()
        self.foo_bucket = FooBucket('test_foo_bucket', [
                            Foo('my_foo1', list(range(10)), 'value1', 10),
                            Foo('my_foo2', list(range(10, 20)), 'value2', 20)])
        self.foo_builders = {
            'my_foo1': GroupBuilder('my_foo1',
                                    datasets={'my_data': DatasetBuilder(
                                        'my_data',
                                        list(range(10)),
                                        attributes={'attr2': 10})},
                                    attributes={'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo'}),
            'my_foo2': GroupBuilder('my_foo2', datasets={'my_data':
                                                         DatasetBuilder(
                                                             'my_data',
                                                             list(range(10, 20)),
                                                             attributes={'attr2': 20})},
                                    attributes={'attr1': 'value2', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo'})
        }
        self.setUpBucketBuilder()
        self.setUpBucketSpec()

        self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml')
        self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket)
        self.type_map.register_map(FooBucket, ObjectMapper)
        self.manager = BuildManager(self.type_map)
Esempio n. 4
0
 def setUpBuilder(self):
     return GroupBuilder('test_timeseries',
                         attributes={
                             'source': 'example_source',
                             'namespace': base.CORE_NAMESPACE,
                             'neurodata_type': 'TimeSeries',
                             'description': 'no description',
                             'comments': 'no comments',
                             'help': 'General time series object'
                         },
                         datasets={
                             'data':
                             DatasetBuilder('data',
                                            list(range(100, 200, 10)),
                                            attributes={
                                                'unit': 'SIunit',
                                                'conversion': 1.0,
                                                'resolution': 0.1
                                            }),
                             'timestamps':
                             DatasetBuilder('timestamps',
                                            list(range(10)),
                                            attributes={
                                                'unit': 'Seconds',
                                                'interval': 1
                                            })
                         })
Esempio n. 5
0
 def test_link_h5py_dataset_h5dataio_input(self):
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', np.arange(10), attributes={}))
     self.io.write_dataset(self.f, DatasetBuilder('test_softlink',
                                                  H5DataIO(data=self.f['test_dataset'],
                                                           link_data=True),
                                                  attributes={}))
     self.assertTrue(isinstance(self.f.get('test_softlink', getlink=True), SoftLink))
Esempio n. 6
0
 def test_copy_h5py_dataset_input(self):
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', np.arange(10), attributes={}))
     self.io.write_dataset(self.f,
                           DatasetBuilder('test_copy', self.f['test_dataset'], attributes={}),
                           link_data=False)
     self.assertTrue(isinstance(self.f.get('test_copy', getlink=True), HardLink))
     self.assertListEqual(self.f['test_dataset'][:].tolist(),
                          self.f['test_copy'][:].tolist())
Esempio n. 7
0
 def test_intersecting_datasets(self):
     gb1 = GroupBuilder(
         'gb1',
         datasets={'dataset2': DatasetBuilder('dataset2', [1, 2, 3])})
     gb2 = GroupBuilder(
         'gb2',
         datasets={'dataset2': DatasetBuilder('dataset2', [4, 5, 6])})
     gb1.deep_update(gb2)
     self.assertIn('dataset2', gb1)
     self.assertListEqual(gb1['dataset2'].data, gb2['dataset2'].data)
Esempio n. 8
0
 def test_mutually_exclusive_datasets(self):
     gb1 = GroupBuilder(
         'gb1',
         datasets={'dataset1': DatasetBuilder('dataset1', [1, 2, 3])})
     gb2 = GroupBuilder(
         'gb2',
         datasets={'dataset2': DatasetBuilder('dataset2', [4, 5, 6])})
     gb1.deep_update(gb2)
     self.assertIn('dataset2', gb1)
     # self.assertIs(gb1['dataset2'], gb2['dataset2'])
     self.assertListEqual(gb1['dataset2'].data, gb2['dataset2'].data)
Esempio n. 9
0
 def test_copy_h5py_dataset_h5dataio_input(self):
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', np.arange(10), attributes={}))
     self.io.write_dataset(self.f,
                           DatasetBuilder('test_copy',
                                          H5DataIO(data=self.f['test_dataset'],
                                                   link_data=False),  # Force dataset copy
                                          attributes={}),
                           link_data=True)  # Make sure the default behavior is set to link the data
     self.assertTrue(isinstance(self.f.get('test_copy', getlink=True), HardLink))
     self.assertListEqual(self.f['test_dataset'][:].tolist(),
                          self.f['test_copy'][:].tolist())
Esempio n. 10
0
 def setUpBuilder(self):
     return GroupBuilder('Clustering',
                         attributes={
                             'help': 'Clustered spike data, whether from automatic clustering tools (eg, klustakwik) or as a result of manual sorting',  # noqa: E501
                             'source': "an example source for Clustering",
                             'neurodata_type': 'Clustering',
                             'namespace': base.CORE_NAMESPACE},
                         datasets={
                             'num': DatasetBuilder('num', [0, 1, 2, 0, 1, 2]),
                             'times': DatasetBuilder('times', list(range(10, 61, 10))),
                             'peak_over_rms': DatasetBuilder('peak_over_rms', [100, 101, 102]),
                             'description': DatasetBuilder('description', "A fake Clustering interface")})
Esempio n. 11
0
 def setUpBuilder(self):
     optchan_builder = GroupBuilder(
         'optchan1',
         attributes={
             'neurodata_type': 'OpticalChannel',
             'namespace': 'core',
             'help': 'Metadata about an optical channel used to record from an imaging plane',
             'source': 'unit test TestImagingPlaneIO'},
         datasets={
             'description': DatasetBuilder('description', 'a fake OpticalChannel'),
             'emission_lambda': DatasetBuilder('emission_lambda', '3.14')},
     )
     return GroupBuilder(
         'imgpln1',
         attributes={
             'neurodata_type': 'ImagingPlane',
             'namespace': 'core',
             'source': 'unit test TestImagingPlaneIO',
             'help': 'Metadata about an imaging plane'},
         datasets={
             'description': DatasetBuilder('description', 'a fake ImagingPlane'),
             'device': DatasetBuilder('device', 'imaging_device_1'),
             'excitation_lambda': DatasetBuilder('excitation_lambda', '6.28'),
             'imaging_rate': DatasetBuilder('imaging_rate', '2.718'),
             'indicator': DatasetBuilder('indicator', 'GFP'),
             'location': DatasetBuilder('location', 'somewhere in the brain')},
         groups={
             'optchan1': optchan_builder
         }
     )
Esempio n. 12
0
 def test_write_dataset_list_fillvalue(self):
     a = H5DataIO(np.arange(20).reshape(5, 4), fillvalue=-1)
     self.io.write_dataset(self.f,
                           DatasetBuilder('test_dataset', a, attributes={}))
     dset = self.f['test_dataset']
     self.assertTrue(np.all(dset[:] == a.data))
     self.assertEqual(dset.fillvalue, -1)
Esempio n. 13
0
 def test_write_dataset_iterable_multidimensional_array(self):
     a = np.arange(30).reshape(5, 2, 3)
     aiter = iter(a)
     daiter = DataChunkIterator.from_iterable(aiter, buffer_size=2)
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', daiter, attributes={}))
     dset = self.f['test_dataset']
     self.assertListEqual(dset[:].tolist(), a.tolist())
Esempio n. 14
0
 def test_write_dataset_scalar(self):
     a = 10
     self.io.write_dataset(self.f,
                           DatasetBuilder('test_dataset', a, attributes={}))
     dset = self.f['test_dataset']
     self.assertTupleEqual(dset.shape, ())
     self.assertEqual(dset[()], a)
Esempio n. 15
0
 def test_write_dataset_list_chunked(self):
     a = H5DataIO(np.arange(30).reshape(5, 2, 3),
                  chunks=(1, 1, 3))
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', a, attributes={}))
     dset = self.f['test_dataset']
     self.assertTrue(np.all(dset[:] == a.data))
     self.assertEqual(dset.chunks, (1, 1, 3))
Esempio n. 16
0
 def test_write_dataset_list_compress(self):
     a = H5DataIO(np.arange(30).reshape(5, 2, 3), compress=True)
     self.io.write_dataset(self.f,
                           DatasetBuilder('test_dataset', a, attributes={}))
     dset = self.f['test_dataset']
     self.assertTrue(np.all(dset[:] == a.data))
     self.assertEqual(dset.compression, 'gzip')
Esempio n. 17
0
 def test_write_table_nested(self):
     b_cmpd_dt = np.dtype([('c', np.int32), ('d', np.float64)])
     cmpd_dt = np.dtype([('a', np.int32), ('b', b_cmpd_dt)])
     data = np.zeros(10, dtype=cmpd_dt)
     data['a'][1] = 101
     data['b']['c'] = 202
     data['b']['d'] = 10.1
     b_dt = [{
         'name': 'c',
         'dtype': 'int32',
         'doc': 'c column'
     }, {
         'name': 'd',
         'dtype': 'float64',
         'doc': 'd column'
     }]
     dt = [{
         'name': 'a',
         'dtype': 'int32',
         'doc': 'a column'
     }, {
         'name': 'b',
         'dtype': b_dt,
         'doc': 'b column'
     }]
     self.io.write_dataset(
         self.f,
         DatasetBuilder('test_dataset', data, attributes={}, dtype=dt))
     dset = self.f['test_dataset']
     self.assertEqual(dset['a'].tolist(), data['a'].tolist())
     self.assertEqual(dset['b'].tolist(), data['b'].tolist())
Esempio n. 18
0
 def test_valid(self):
     builder = GroupBuilder('my_bar',
                            attributes={'data_type': 'Bar', 'attr1': text('a string attribute')},
                            datasets=[DatasetBuilder('data', 100, attributes={'attr2': 10})])
     validator = self.vmap.get_validator('Bar')
     result = validator.validate(builder)
     self.assertEqual(len(result), 0)
Esempio n. 19
0
 def test_write_dataset_string(self):
     a = 'test string'
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', a, attributes={}))
     dset = self.f['test_dataset']
     self.assertTupleEqual(dset.shape, ())
     # self.assertEqual(dset[()].decode('utf-8'), a)
     self.assertEqual(dset[()], a)
Esempio n. 20
0
 def setUpBuilder(self):
     return GroupBuilder('subject',
                         attributes={
                             'source': 'Subject integration test',
                             'namespace': base.CORE_NAMESPACE,
                             'neurodata_type': 'Subject',
                             'help': 'Information about the subject'
                         },
                         datasets={
                             'age':
                             DatasetBuilder('age', '12 mo'),
                             'description':
                             DatasetBuilder('description',
                                            'An unfortunate rat'),
                             'genotype':
                             DatasetBuilder('genotype', 'WT'),
                             'sex':
                             DatasetBuilder('sex', 'M'),
                             'species':
                             DatasetBuilder('species', 'Rattus norvegicus'),
                             'subject_id':
                             DatasetBuilder('subject_id', 'RAT123'),
                             'weight':
                             DatasetBuilder('weight', '2 lbs')
                         })
Esempio n. 21
0
 def setUpBuilder(self):
     optchan_builder = GroupBuilder(
         'optchan1',
         attributes={
             'neurodata_type': 'OpticalChannel',
             'namespace': 'core',
             'help': 'Metadata about an optical channel used to record from an imaging plane'},
         datasets={
             'description': DatasetBuilder('description', 'a fake OpticalChannel'),
             'emission_lambda': DatasetBuilder('emission_lambda', 500.)},
     )
     device_builder = GroupBuilder('dev1',
                                   attributes={'neurodata_type': 'Device',
                                               'namespace': 'core',
                                               'help': 'A recording device e.g. amplifier'})
     return GroupBuilder(
         'imgpln1',
         attributes={
             'neurodata_type': 'ImagingPlane',
             'namespace': 'core',
             'help': 'Metadata about an imaging plane'},
         datasets={
             'description': DatasetBuilder('description', 'a fake ImagingPlane'),
             'excitation_lambda': DatasetBuilder('excitation_lambda', 600.),
             'imaging_rate': DatasetBuilder('imaging_rate', 300.),
             'indicator': DatasetBuilder('indicator', 'GFP'),
             'location': DatasetBuilder('location', 'somewhere in the brain')},
         groups={
             'optchan1': optchan_builder
         },
         links={
             'device': LinkBuilder(device_builder, 'device')
         }
     )
Esempio n. 22
0
 def setUpBuilder(self):
     device = GroupBuilder('device_name',
                           attributes={
                               'help': 'A recording device e.g. amplifier',
                               'namespace': 'core',
                               'neurodata_type': 'Device'
                           })
     datasets = [
         DatasetBuilder('slice', data=u'tissue slice'),
         DatasetBuilder('resistance', data=u'something measured in ohms'),
         DatasetBuilder('seal', data=u'sealing method'),
         DatasetBuilder('description', data=u'a fake electrode object'),
         DatasetBuilder('location', data=u'Springfield Elementary School'),
         DatasetBuilder('filtering',
                        data=u'a meaningless free-form text field'),
         DatasetBuilder('initial_access_resistance',
                        data=u'I guess this changes'),
     ]
     elec = GroupBuilder('elec0',
                         attributes={
                             'help':
                             'Metadata about an intracellular electrode',
                             'namespace': 'core',
                             'neurodata_type': 'IntracellularElectrode',
                         },
                         datasets={d.name: d
                                   for d in datasets},
                         links={'device': LinkBuilder(device, 'device')})
     return elec
Esempio n. 23
0
    def test_valid_wo_opt_attr(self):
        bar_builder = GroupBuilder('my_bar',
                                   attributes={'data_type': 'Bar', 'attr1': text('a string attribute')},
                                   datasets=[DatasetBuilder('data', 100, attributes={'attr2': 10})])
        foo_builder = GroupBuilder('my_foo',
                                   attributes={'data_type': 'Foo'},
                                   groups=[bar_builder])

        results = self.vmap.validate(foo_builder)
        self.assertEqual(len(results), 0)
Esempio n. 24
0
 def test_is_empty_false_group_dataset(self):
     """Test is_empty() when group has a subgroup with a dataset"""
     gb = GroupBuilder(
         'gb', {
             'my_subgroup':
             GroupBuilder(
                 'my_subgroup',
                 datasets={'my_dataset': DatasetBuilder('my_dataset')})
         })
     self.assertEqual(gb.is_empty(), False)
Esempio n. 25
0
 def test_build(self):
     ''' Test default mapping functionality when no attributes are nested '''
     container = Baz('my_baz', list(range(10)),
                     'abcdefghijklmnopqrstuvwxyz')
     builder = self.mapper.build(container, self.manager)
     expected = DatasetBuilder(
         'my_baz',
         list(range(10)),
         attributes={'baz_attr': 'abcdefghijklmnopqrstuvwxyz'})
     self.assertDictEqual(builder, expected)
Esempio n. 26
0
 def test_construct_memoization(self):
     builder = GroupBuilder(
         'my_foo', datasets={'my_data': DatasetBuilder(
             'my_data',
             list(range(10)),
             attributes={'attr2': 10})},
         attributes={'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo'})
     expected = Foo('my_foo', list(range(10)), 'value1', 10)  # noqa: F841
     container1 = self.manager.construct(builder)
     container2 = self.manager.construct(builder)
     self.assertIs(container1, container2)
Esempio n. 27
0
    def test_valid(self):
        bar_builder = GroupBuilder('my_bar',
                                   attributes={'data_type': 'Bar', 'attr1': 'a string attribute'},
                                   datasets=[DatasetBuilder('data', 100, attributes={'attr2': 10})])

        foo_builder = GroupBuilder('my_foo',
                                   attributes={'data_type': 'Foo', 'foo_attr': 'example Foo object'},
                                   groups=[bar_builder])

        results = self.vmap.validate(foo_builder)
        self.assertEqual(len(results), 0)
Esempio n. 28
0
 def test_build(self):
     ''' Test default mapping functionality when no attributes are nested '''
     container = Bar('my_bar', list(range(10)), 'value1', 10)
     builder = self.mapper.build(container, self.manager)
     expected = GroupBuilder(
         'my_bar',
         datasets={'data': DatasetBuilder('data', list(range(10)))},
         attributes={
             'attr1': 'value1',
             'attr2': 10
         })
     self.assertDictEqual(builder, expected)
Esempio n. 29
0
 def test_warning_on_setting_io_options_on_h5dataset_input(self):
     self.io.write_dataset(self.f, DatasetBuilder('test_dataset', np.arange(10), attributes={}))
     with warnings.catch_warnings(record=True) as w:
         H5DataIO(self.f['test_dataset'],
                  compression='gzip',
                  compression_opts=4,
                  fletcher32=True,
                  shuffle=True,
                  maxshape=(10, 20),
                  chunks=(10,),
                  fillvalue=100)
         self.assertEqual(len(w), 7)
Esempio n. 30
0
 def test_construct(self):
     builder = GroupBuilder(
         'my_bar',
         datasets={'data': DatasetBuilder('data', list(range(10)))},
         attributes={
             'attr1': 'value1',
             'attr2': 10,
             'data_type': 'Bar',
             'namespace': CORE_NAMESPACE
         })
     expected = Bar('my_bar', list(range(10)), 'value1', 10)
     container = self.mapper.construct(builder, self.manager)
     self.assertEqual(container, expected)