Exemplo n.º 1
0
 def setUp(self):
     self.bar_spec = GroupSpec(
         'A test group specification with a data type',
         data_type_def='Bar',
         datasets=[
             DatasetSpec('an example dataset',
                         'int',
                         name='data',
                         attributes=[
                             AttributeSpec('attr2',
                                           'an example integer attribute',
                                           'int')
                         ])
         ],
         attributes=[
             AttributeSpec('attr1', 'an example string attribute', 'text')
         ])
     self.spec_catalog = SpecCatalog()
     self.spec_catalog.register_spec(self.bar_spec, 'test.yaml')
     self.namespace = SpecNamespace('a test namespace',
                                    CORE_NAMESPACE, [{
                                        'source': 'test.yaml'
                                    }],
                                    catalog=self.spec_catalog)
     self.namespace_catalog = NamespaceCatalog()
     self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
     self.type_map = TypeMap(self.namespace_catalog)
     self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
     self.type_map.register_map(Bar, ObjectMapper)
     self.manager = BuildManager(self.type_map)
     self.mapper = ObjectMapper(self.bar_spec)
Exemplo n.º 2
0
    def test_load_namespace_none_version(self):
        """Test that reading a namespace file without a version works but raises a warning."""
        # create namespace with version key (remove it later)
        ns_dict = {
            'doc': 'a test namespace',
            'name': 'test_ns',
            'schema': [{
                'source': self.specs_path
            }],
            'version': '0.0.1'
        }
        namespace = SpecNamespace.build_namespace(**ns_dict)
        namespace[
            'version'] = None  # work around lack of setter to remove version key

        # write the namespace to file without version key
        to_dump = {'namespaces': [namespace]}
        with open(self.namespace_path, 'w') as tmp:
            yaml_obj = yaml.YAML(typ='safe', pure=True)
            yaml_obj.default_flow_style = False
            yaml_obj.dump(json.loads(json.dumps(to_dump)), tmp)

        # load the namespace from file
        ns_catalog = NamespaceCatalog()
        msg = (
            "Loaded namespace 'test_ns' is missing the required key 'version'. Version will be set to "
            "'%s'. Please notify the extension author." %
            SpecNamespace.UNVERSIONED)
        with self.assertWarnsWith(UserWarning, msg):
            ns_catalog.load_namespaces(self.namespace_path)

        self.assertEqual(
            ns_catalog.get_namespace('test_ns').version,
            SpecNamespace.UNVERSIONED)
Exemplo n.º 3
0
def create_test_type_map(specs, container_classes, mappers=None):
    """
    Create a TypeMap with the specs registered under a test namespace, and classes and mappers registered to type names.
    :param specs: list of specs
    :param container_classes: dict of type name to container class
    :param mappers: (optional) dict of type name to mapper class
    :return: the constructed TypeMap
    """
    spec_catalog = SpecCatalog()
    schema_file = 'test.yaml'
    for s in specs:
        spec_catalog.register_spec(s, schema_file)
    namespace = SpecNamespace(doc='a test namespace',
                              name=CORE_NAMESPACE,
                              schema=[{
                                  'source': schema_file
                              }],
                              version='0.1.0',
                              catalog=spec_catalog)
    namespace_catalog = NamespaceCatalog()
    namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
    type_map = TypeMap(namespace_catalog)
    for type_name, container_cls in container_classes.items():
        type_map.register_container_type(CORE_NAMESPACE, type_name,
                                         container_cls)
    if mappers:
        for type_name, mapper_cls in mappers.items():
            container_cls = container_classes[type_name]
            type_map.register_map(container_cls, mapper_cls)
    return type_map
Exemplo n.º 4
0
 def setUpManager(self, specs):
     spec_catalog = SpecCatalog()
     schema_file = 'test.yaml'
     for s in specs:
         spec_catalog.register_spec(s, schema_file)
     namespace = SpecNamespace(doc='a test namespace',
                               name=CORE_NAMESPACE,
                               schema=[{
                                   'source': schema_file
                               }],
                               version='0.1.0',
                               catalog=spec_catalog)
     namespace_catalog = NamespaceCatalog()
     namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
     type_map = TypeMap(namespace_catalog)
     type_map.register_container_type(CORE_NAMESPACE, 'SimpleFoo',
                                      SimpleFoo)
     type_map.register_container_type(CORE_NAMESPACE, 'NotSimpleFoo',
                                      NotSimpleFoo)
     type_map.register_container_type(CORE_NAMESPACE, 'SimpleQux',
                                      SimpleQux)
     type_map.register_container_type(CORE_NAMESPACE, 'NotSimpleQux',
                                      NotSimpleQux)
     type_map.register_container_type(CORE_NAMESPACE, 'SimpleBucket',
                                      SimpleBucket)
     type_map.register_map(SimpleBucket, self.setUpBucketMapper())
     self.manager = BuildManager(type_map)
Exemplo n.º 5
0
    def setUp(self):
        self.foo_spec = GroupSpec(
            doc='A test group specification with a data type',
            data_type_def='Foo',
            datasets=[
                DatasetSpec(doc='an example dataset',
                            dtype='int',
                            name='my_data',
                            attributes=[
                                AttributeSpec(
                                    name='attr2',
                                    doc='an example integer attribute',
                                    dtype='int')
                            ])
            ],
            attributes=[
                AttributeSpec('attr1', 'an example string attribute', 'text')
            ])

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.type_map.register_map(Foo, FooMapper)
        self.manager = BuildManager(self.type_map)
Exemplo n.º 6
0
    def test_write_cache_spec(self):
        '''
        Round-trip test for writing spec and reading it back in
        '''

        with File(self.path) as fil:
            with HDF5IO(self.path, manager=self.manager, file=fil,
                        mode='a') as io:
                io.write(self.container)
        with File(self.path) as f:
            self.assertIn('specifications', f)

        ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec,
                                      NWBNamespace)
        HDF5IO.load_namespaces(ns_catalog, self.path, namespaces=['core'])
        original_ns = self.manager.namespace_catalog.get_namespace('core')
        cached_ns = ns_catalog.get_namespace('core')
        self.maxDiff = None
        for key in ('author', 'contact', 'doc', 'full_name', 'name'):
            with self.subTest(namespace_field=key):
                self.assertEqual(original_ns[key], cached_ns[key])
        for dt in original_ns.get_registered_types():
            with self.subTest(neurodata_type=dt):
                original_spec = original_ns.get_spec(dt)
                cached_spec = cached_ns.get_spec(dt)
                with self.subTest(test='data_type spec read back in'):
                    self.assertIsNotNone(cached_spec)
                with self.subTest(test='cached spec preserved original spec'):
                    self.assertDictEqual(original_spec, cached_spec)
Exemplo n.º 7
0
    def test_load_namespace_unversioned_version(self):
        """Test that reading a namespace file with version=unversioned string works but raises a warning."""
        # create namespace with version key (remove it later)
        ns_dict = {
            'doc': 'a test namespace',
            'name': 'test_ns',
            'schema': [
                {'source': self.specs_path}
            ],
            'version': '0.0.1'
        }
        namespace = SpecNamespace.build_namespace(**ns_dict)
        namespace['version'] = str(SpecNamespace.UNVERSIONED)  # work around lack of setter to remove version key

        # write the namespace to file without version key
        to_dump = {'namespaces': [namespace]}
        with open(self.namespace_path, 'w') as tmp:
            yaml.safe_dump(json.loads(json.dumps(to_dump)), tmp, default_flow_style=False)

        # load the namespace from file
        ns_catalog = NamespaceCatalog()
        msg = "Loaded namespace 'test_ns' is unversioned. Please notify the extension author."
        with self.assertWarnsWith(UserWarning, msg):
            ns_catalog.load_namespaces(self.namespace_path)

        self.assertEqual(ns_catalog.get_namespace('test_ns').version, SpecNamespace.UNVERSIONED)
Exemplo n.º 8
0
    def setUp(self):
        self.foo_spec = GroupSpec(
            'A test group specification with data type Foo',
            data_type_def='Foo')
        self.bar_spec = GroupSpec(
            'A test group specification with a data type Bar',
            data_type_def='Bar',
            datasets=[DatasetSpec('an example dataset', 'int', name='data')],
            attributes=[
                AttributeSpec('attr1', 'an example string attribute', 'text'),
                AttributeSpec('attr2', 'an example integer attribute', 'int'),
                AttributeSpec('foo',
                              'a referenced foo',
                              RefSpec('Foo', 'object'),
                              required=False)
            ])

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.bar_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
        self.manager = BuildManager(self.type_map)
        self.foo_mapper = ObjectMapper(self.foo_spec)
        self.bar_mapper = ObjectMapper(self.bar_spec)
Exemplo n.º 9
0
    def test_bad_value(self):
        """Test that an error is raised if the container attribute value for a spec with a data type is not a container
        or collection of containers.
        """
        class Qux(Container):
            @docval(
                {
                    'name': 'name',
                    'type': str,
                    'doc': 'the name of this Qux'
                }, {
                    'name': 'foo',
                    'type': int,
                    'doc': 'a group'
                })
            def __init__(self, **kwargs):
                name, foo = getargs('name', 'foo', kwargs)
                super().__init__(name=name)
                self.__foo = foo
                if isinstance(foo, Foo):
                    self.__foo.parent = self

            @property
            def foo(self):
                return self.__foo

        self.qux_spec = GroupSpec(
            doc='A test group specification with data type Qux',
            data_type_def='Qux',
            groups=[GroupSpec('an example dataset', data_type_inc='Foo')])
        self.foo_spec = GroupSpec(
            'A test group specification with data type Foo',
            data_type_def='Foo')
        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.qux_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Qux', Qux)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.manager = BuildManager(self.type_map)
        self.mapper = ObjectMapper(self.qux_spec)

        container = Qux('my_qux', foo=1)
        msg = "Qux 'my_qux' attribute 'foo' has unexpected type."
        with self.assertRaisesWith(ContainerConfigurationError, msg):
            self.mapper.build(container, self.manager)
Exemplo n.º 10
0
 def customSetUp(self, bar_spec):
     spec_catalog = SpecCatalog()
     spec_catalog.register_spec(bar_spec, 'test.yaml')
     namespace = SpecNamespace('a test namespace',
                               CORE_NAMESPACE, [{
                                   'source': 'test.yaml'
                               }],
                               version='0.1.0',
                               catalog=spec_catalog)
     namespace_catalog = NamespaceCatalog()
     namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
     type_map = TypeMap(namespace_catalog)
     type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
     return type_map
Exemplo n.º 11
0
 def setUp(self):
     self.bar_spec = GroupSpec(
         'A test group specification with a data type', data_type_def='Bar')
     spec_catalog = SpecCatalog()
     spec_catalog.register_spec(self.bar_spec, 'test.yaml')
     namespace = SpecNamespace('a test namespace',
                               CORE_NAMESPACE, [{
                                   'source': 'test.yaml'
                               }],
                               catalog=spec_catalog)
     namespace_catalog = NamespaceCatalog()
     namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
     self.type_map = TypeMap(namespace_catalog)
     self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
Exemplo n.º 12
0
 def setUp(self):
     self.setUpBazSpec()
     self.spec_catalog = SpecCatalog()
     self.spec_catalog.register_spec(self.baz_spec, 'test.yaml')
     self.namespace = SpecNamespace('a test namespace',
                                    CORE_NAMESPACE, [{
                                        'source': 'test.yaml'
                                    }],
                                    catalog=self.spec_catalog)
     self.namespace_catalog = NamespaceCatalog()
     self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
     self.type_map = TypeMap(self.namespace_catalog)
     self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz)
     self.type_map.register_map(Baz, ObjectMapper)
     self.manager = BuildManager(self.type_map)
     self.mapper = ObjectMapper(self.baz_spec)
Exemplo n.º 13
0
 def setUp(self):
     self.bar_spec = GroupSpec(
         doc='A test group specification with a data type',
         data_type_def='EmptyBar')
     self.spec_catalog = SpecCatalog()
     self.spec_catalog.register_spec(self.bar_spec, 'test.yaml')
     self.namespace = SpecNamespace('a test namespace',
                                    CORE_NAMESPACE, [{
                                        'source': 'test.yaml'
                                    }],
                                    version='0.1.0',
                                    catalog=self.spec_catalog)
     self.namespace_catalog = NamespaceCatalog()
     self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
     self.type_map = TypeMap(self.namespace_catalog)
     self.type_map.register_container_type(CORE_NAMESPACE, 'EmptyBar',
                                           EmptyBar)
Exemplo n.º 14
0
    def test_register_generator(self):
        """Test TypeMap.register_generator and ClassGenerator.register_generator."""
        class MyClassGenerator(CustomClassGenerator):
            @classmethod
            def apply_generator_to_field(cls, field_spec, bases, type_map):
                return True

            @classmethod
            def process_field_spec(cls, classdict, docval_args, parent_cls,
                                   attr_name, not_inherited_fields, type_map,
                                   spec):
                # append attr_name to classdict['__custom_fields__'] list
                classdict.setdefault('process_field_spec',
                                     list()).append(attr_name)

            @classmethod
            def post_process(cls, classdict, bases, docval_args, spec):
                classdict['post_process'] = True

        spec = GroupSpec(doc='A test group specification with a data type',
                         data_type_def='Baz',
                         attributes=[
                             AttributeSpec(name='attr1',
                                           doc='a string attribute',
                                           dtype='text')
                         ])

        spec_catalog = SpecCatalog()
        spec_catalog.register_spec(spec, 'test.yaml')
        namespace = SpecNamespace(doc='a test namespace',
                                  name=CORE_NAMESPACE,
                                  schema=[{
                                      'source': 'test.yaml'
                                  }],
                                  version='0.1.0',
                                  catalog=spec_catalog)
        namespace_catalog = NamespaceCatalog()
        namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
        type_map = TypeMap(namespace_catalog)
        type_map.register_generator(MyClassGenerator)
        cls = type_map.get_dt_container_cls('Baz', CORE_NAMESPACE)

        self.assertEqual(cls.process_field_spec, ['attr1'])
        self.assertTrue(cls.post_process)
Exemplo n.º 15
0
    def test_catch_dup_name(self):
        ns_builder1 = NamespaceBuilder('Extension doc',
                                       "test_ext",
                                       version='0.1.0')
        ns_builder1.add_spec(self.ext_source1,
                             GroupSpec('doc', data_type_def='MyType'))
        ns_builder1.export(self.ns_path1, outdir=self.tempdir)
        ns_builder2 = NamespaceBuilder('Extension doc',
                                       "test_ext",
                                       version='0.2.0')
        ns_builder2.add_spec(self.ext_source2,
                             GroupSpec('doc', data_type_def='MyType'))
        ns_builder2.export(self.ns_path2, outdir=self.tempdir)

        ns_catalog = NamespaceCatalog()
        ns_catalog.load_namespaces(os.path.join(self.tempdir, self.ns_path1))

        msg = "Ignoring cached namespace 'test_ext' version 0.2.0 because version 0.1.0 is already loaded."
        with self.assertWarnsRegex(UserWarning, msg):
            ns_catalog.load_namespaces(
                os.path.join(self.tempdir, self.ns_path2))
Exemplo n.º 16
0
 def _check_spec(self):
     ns_catalog = NamespaceCatalog()
     HDF5IO.load_namespaces(ns_catalog, self.path)
     self.maxDiff = None
     for namespace in self.manager.namespace_catalog.namespaces:
         with self.subTest(namespace=namespace):
             original_ns = self.manager.namespace_catalog.get_namespace(namespace)
             cached_ns = ns_catalog.get_namespace(namespace)
             ns_fields_to_check = list(original_ns.keys())
             ns_fields_to_check.remove('schema')  # schema fields will not match, so reset
             for ns_field in ns_fields_to_check:
                 with self.subTest(namespace_field=ns_field):
                     self.assertEqual(original_ns[ns_field], cached_ns[ns_field])
             for dt in original_ns.get_registered_types():
                 with self.subTest(data_type=dt):
                     original_spec = original_ns.get_spec(dt)
                     cached_spec = cached_ns.get_spec(dt)
                     with self.subTest('Data type spec is read back in'):
                         self.assertIsNotNone(cached_spec)
                     with self.subTest('Cached spec matches original spec'):
                         self.assertDictEqual(original_spec, cached_spec)
Exemplo n.º 17
0
 def setUp(self):
     self.set_up_specs()
     spec_catalog = SpecCatalog()
     spec_catalog.register_spec(self.bar_data_spec, 'test.yaml')
     spec_catalog.register_spec(self.bar_data_holder_spec, 'test.yaml')
     namespace = SpecNamespace(doc='a test namespace',
                               name=CORE_NAMESPACE,
                               schema=[{
                                   'source': 'test.yaml'
                               }],
                               version='0.1.0',
                               catalog=spec_catalog)
     namespace_catalog = NamespaceCatalog()
     namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
     type_map = TypeMap(namespace_catalog)
     type_map.register_container_type(CORE_NAMESPACE, 'BarData', BarData)
     type_map.register_container_type(CORE_NAMESPACE, 'BarDataHolder',
                                      BarDataHolder)
     type_map.register_map(BarData, ExtBarDataMapper)
     type_map.register_map(BarDataHolder, ObjectMapper)
     self.manager = BuildManager(type_map)
Exemplo n.º 18
0
    def test_catch_dup_name_same_version(self):
        ns_builder1 = NamespaceBuilder('Extension doc',
                                       "test_ext",
                                       version='0.1.0')
        ns_builder1.add_spec(self.ext_source1,
                             GroupSpec('doc', data_type_def='MyType'))
        ns_builder1.export(self.ns_path1, outdir=self.tempdir)
        ns_builder2 = NamespaceBuilder('Extension doc',
                                       "test_ext",
                                       version='0.1.0')
        ns_builder2.add_spec(self.ext_source2,
                             GroupSpec('doc', data_type_def='MyType'))
        ns_builder2.export(self.ns_path2, outdir=self.tempdir)

        ns_catalog = NamespaceCatalog()
        ns_catalog.load_namespaces(os.path.join(self.tempdir, self.ns_path1))

        # no warning should be raised (but don't just check for 0 warnings -- warnings can come from other sources)
        msg = "Ignoring cached namespace 'test_ext' version 0.1.0 because version 0.1.0 is already loaded."
        with warnings.catch_warnings(record=True) as ws:
            ns_catalog.load_namespaces(
                os.path.join(self.tempdir, self.ns_path2))
        for w in ws:
            self.assertTrue(str(w) != msg)
Exemplo n.º 19
0
    def __init__(self, **kwargs):
        path, mode, manager, extensions, load_namespaces, file_obj, comm =\
            popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', 'file', 'comm', kwargs)

        # root group
        self.__rgroup = file_obj
        chunk_store = getattr(file_obj, 'chunk_store', None)
        if chunk_store is not None:
            try:
                filename = getattr(chunk_store.source, 'path', None)
                if filename is None:
                    filename = chunk_store.source.name
            except:
                filename = None
        if filename is None:
            filename = f'{type(file_obj.store).__name__}'
        self.__rgroup.filename = filename

        file_obj = self.__set_rgroup(file_obj)

        self.__built = dict()       # keep track of each builder for each dataset/group/link for each file
        self.__read = dict()        # keep track of which files have been read. Key is the filename value is the builder
        self.__file = file_obj

        if load_namespaces:
            if manager is not None:
                warn("loading namespaces from file - ignoring 'manager'")
            if extensions is not None:
                warn("loading namespaces from file - ignoring 'extensions' argument")
            # namespaces are not loaded when creating an NWBZARRHDF5IO object in write mode
            if 'w' in mode or mode == 'x':
                raise ValueError("cannot load namespaces from file when writing to it")

            tm = get_type_map()
            self.load_namespaces(tm, path, file=file_obj)
            manager = BuildManager(tm)

            # XXX: Leaving this here in case we want to revert to this strategy for
            #      loading cached namespaces
            # ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            # super(NWBZARRHDF5IO, self).load_namespaces(ns_catalog, path)
            # tm = TypeMap(ns_catalog)
            # tm.copy_mappers(get_type_map())
        else:
            if manager is not None and extensions is not None:
                raise ValueError("'manager' and 'extensions' cannot be specified together")
            elif extensions is not None:
                manager = get_manager(extensions=extensions)
            elif manager is None:
                manager = get_manager()

        self.logger = logging.getLogger('%s.%s' % (self.__class__.__module__, self.__class__.__qualname__))

        if file_obj is not None:
            if path is None:
                path = file_obj.filename
            elif os.path.abspath(file_obj.filename) != os.path.abspath(path):
                msg = 'You argued %s as this object\'s path, ' % path
                msg += 'but supplied a file with filename: %s' % file_obj.filename
                raise ValueError(msg)
        elif path is None:
            TypeError("Must supply either 'path' or 'file' arg to HDF5IO.")

        if file_obj is None and not os.path.exists(path) and (mode == 'r' or mode == 'r+'):
            msg = "Unable to open file %s in '%s' mode. File does not exist." % (path, mode)
            raise UnsupportedOperation(msg)

        if file_obj is None and os.path.exists(path) and (mode == 'w-' or mode == 'x'):
            msg = "Unable to open file %s in '%s' mode. File already exists." % (path, mode)
            raise UnsupportedOperation(msg)

        if manager is None:
            manager = BuildManager(TypeMap(NamespaceCatalog()))
        elif isinstance(manager, TypeMap):
            manager = BuildManager(manager)

        # TO DO #
        self._HDF5IO__comm = comm
        self._HDF5IO__mode = mode
        self._HDF5IO__path = path
        self._HDF5IO__file = file_obj
        super(_HDF5IO, self).__init__(manager, source=path)
        self._HDF5IO__ref_queue = deque()  # a queue of the references that need to be added
        self._HDF5IO__dci_queue = deque()  # a queue of DataChunkIterators that need to be exhausted
Exemplo n.º 20
0
    ret = dict()
    ret['namespace_path'] = os.path.join(
        resource_filename(__name__, 'nwb-schema/core'), __core_ns_file_name)
    return ret


def _get_resources():
    # LEGACY: Needed to support legacy implementation.
    return __get_resources()


# a global namespace catalog
global __NS_CATALOG
global __TYPE_MAP

__NS_CATALOG = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)

hdmf_typemap = hdmf.common.get_type_map()
__NS_CATALOG.merge(hdmf_typemap.namespace_catalog)

__TYPE_MAP = TypeMap(__NS_CATALOG)
__TYPE_MAP.merge(hdmf_typemap)


@docval(
    {
        'name': 'extensions',
        'type': (str, TypeMap, list),
        'doc':
        'a path to a namespace, a TypeMap, or a list consisting of paths to namespaces and TypeMaps',
        'default': None
Exemplo n.º 21
0
def main():

    ep = """
    use --nspath to validate against an extension. If --ns is not specified,
    validate against all namespaces in namespace file.
    """

    parser = ArgumentParser(description="Validate an NWB file", epilog=ep)
    parser.add_argument("paths", type=str, nargs='+', help="NWB file paths")
    parser.add_argument('-p', '--nspath', type=str, help="the path to the namespace YAML file")
    parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against")
    parser.add_argument("-lns", "--list-namespaces", dest="list_namespaces",
                        action='store_true', help="List the available namespaces and exit.")

    feature_parser = parser.add_mutually_exclusive_group(required=False)
    feature_parser.add_argument("--cached-namespace", dest="cached_namespace", action='store_true',
                                help="Use the cached namespace (default).")
    feature_parser.add_argument('--no-cached-namespace', dest="cached_namespace", action='store_false',
                                help="Don't use the cached namespace.")
    parser.set_defaults(cached_namespace=True)

    args = parser.parse_args()
    ret = 0

    if args.nspath:
        if not os.path.isfile(args.nspath):
            print("The namespace file {} is not a valid file.".format(args.nspath), file=sys.stderr)
            sys.exit(1)

        if args.cached_namespace:
            print("Turning off validation against cached namespace information "
                  "as --nspath was passed.", file=sys.stderr)
            args.cached_namespace = False

    for path in args.paths:

        if not os.path.isfile(path):
            print("The file {} does not exist.".format(path), file=sys.stderr)
            ret = 1
            continue

        if args.cached_namespace:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            ns_deps = NWBHDF5IO.load_namespaces(catalog, path)
            s = set(ns_deps.keys())       # determine which namespaces are the most
            for k in ns_deps:             # specific (i.e. extensions) and validate
                s -= ns_deps[k].keys()    # against those
            # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357
            if 'hdmf-experimental' in s:
                s.remove('hdmf-experimental')  # remove validation of hdmf-experimental for now
            namespaces = list(sorted(s))
            if len(namespaces) > 0:
                tm = TypeMap(catalog)
                manager = BuildManager(tm)
                specloc = "cached namespace information"
            else:
                manager = None
                namespaces = [CORE_NAMESPACE]
                specloc = "pynwb namespace information"
                print("The file {} has no cached namespace information. "
                      "Falling back to {}.".format(path, specloc), file=sys.stderr)
        elif args.nspath:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            namespaces = catalog.load_namespaces(args.nspath)

            if len(namespaces) == 0:
                print("Could not load namespaces from file {}.".format(args.nspath), file=sys.stderr)
                sys.exit(1)

            tm = TypeMap(catalog)
            manager = BuildManager(tm)
            specloc = "--nspath namespace information"
        else:
            manager = None
            namespaces = [CORE_NAMESPACE]
            specloc = "pynwb namespace information"

        if args.list_namespaces:
            print("\n".join(namespaces))
            ret = 0
            continue

        if args.ns:
            if args.ns in namespaces:
                namespaces = [args.ns]
            else:
                print("The namespace {} could not be found in {} as only {} is present.".format(
                      args.ns, specloc, namespaces), file=sys.stderr)
                ret = 1
                continue

        with NWBHDF5IO(path, mode='r', manager=manager) as io:
            for ns in namespaces:
                print("Validating {} against {} using namespace {}.".format(path, specloc, ns))
                ret = ret or _validate_helper(io=io, namespace=ns)

    sys.exit(ret)
Exemplo n.º 22
0
    def setUp(self):
        self.dt_spec = GroupSpec(
            'A test extension that contains a dynamic table',
            data_type_def='TestTable',
            data_type_inc='DynamicTable',
            datasets=[
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='my_col',
                    doc='a test column',
                    dtype='float'
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='indexed_col',
                    doc='a test column',
                    dtype='float'
                ),
                DatasetSpec(
                    data_type_inc='VectorIndex',
                    name='indexed_col_index',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='optional_col1',
                    doc='a test column',
                    dtype='float',
                    quantity='?',
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='optional_col2',
                    doc='a test column',
                    dtype='float',
                    quantity='?',
                )
            ]
        )

        self.dt_spec2 = GroupSpec(
            'A test extension that contains a dynamic table',
            data_type_def='TestDTRTable',
            data_type_inc='DynamicTable',
            datasets=[
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='ref_col',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='indexed_ref_col',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='VectorIndex',
                    name='indexed_ref_col_index',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='optional_ref_col',
                    doc='a test column',
                    quantity='?'
                ),
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='optional_indexed_ref_col',
                    doc='a test column',
                    quantity='?'
                ),
                DatasetSpec(
                    data_type_inc='VectorIndex',
                    name='optional_indexed_ref_col_index',
                    doc='a test column',
                    quantity='?'
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='optional_col3',
                    doc='a test column',
                    dtype='float',
                    quantity='?',
                )
            ]
        )

        from hdmf.spec.write import YAMLSpecWriter
        writer = YAMLSpecWriter(outdir='.')

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.dt_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.dt_spec2, 'test.yaml')
        self.namespace = SpecNamespace(
            'a test namespace', CORE_NAMESPACE,
            [
                dict(
                    namespace='hdmf-common',
                ),
                dict(source='test.yaml'),
            ],
            version='0.1.0',
            catalog=self.spec_catalog
        )

        self.test_dir = tempfile.mkdtemp()
        spec_fpath = os.path.join(self.test_dir, 'test.yaml')
        namespace_fpath = os.path.join(self.test_dir, 'test-namespace.yaml')
        writer.write_spec(dict(groups=[self.dt_spec, self.dt_spec2]), spec_fpath)
        writer.write_namespace(self.namespace, namespace_fpath)
        self.namespace_catalog = NamespaceCatalog()
        hdmf_typemap = get_type_map()
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.merge(hdmf_typemap, ns_catalog=True)
        self.type_map.load_namespaces(namespace_fpath)
        self.manager = BuildManager(self.type_map)

        self.TestTable = self.type_map.get_dt_container_cls('TestTable', CORE_NAMESPACE)
        self.TestDTRTable = self.type_map.get_dt_container_cls('TestDTRTable', CORE_NAMESPACE)
Exemplo n.º 23
0
def main():

    ep = """
    use --nspath to validate against an extension. If --ns is not specified,
    validate against all namespaces in namespace file.
    """

    parser = ArgumentParser(description="Validate an NWB file", epilog=ep)
    parser.add_argument("paths", type=str, nargs='+', help="NWB file paths")
    parser.add_argument('-p',
                        '--nspath',
                        type=str,
                        help="the path to the namespace YAML file")
    parser.add_argument("-n",
                        "--ns",
                        type=str,
                        help="the namespace to validate against")

    feature_parser = parser.add_mutually_exclusive_group(required=False)
    feature_parser.add_argument("--cached-namespace",
                                dest="cached_namespace",
                                action='store_true',
                                help="Use the cached namespace (default).")
    feature_parser.add_argument('--no-cached-namespace',
                                dest="cached_namespace",
                                action='store_false',
                                help="Don't use the cached namespace.")
    parser.set_defaults(cached_namespace=True)

    args = parser.parse_args()
    ret = 0

    if args.nspath:
        if not os.path.isfile(args.nspath):
            print("The namespace file {} is not a valid file.".format(
                args.nspath),
                  file=sys.stderr)
            sys.exit(1)

        if args.cached_namespace:
            print(
                "Turning off validation against cached namespace information"
                "as --nspath was passed.",
                file=sys.stderr)
            args.cached_namespace = False

    for path in args.paths:

        if not os.path.isfile(path):
            print("The file {} does not exist.".format(path), file=sys.stderr)
            ret = 1
            continue

        if args.cached_namespace:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec,
                                       NWBNamespace)
            namespaces = HDF5IO.load_namespaces(catalog, path).keys()
            if len(namespaces) > 0:
                tm = TypeMap(catalog)
                manager = BuildManager(tm)
                specloc = "cached namespace information"
            else:
                manager = None
                namespaces = available_namespaces()
                specloc = "pynwb namespace information"
                print("The file {} has no cached namespace information. "
                      "Falling back to {}.".format(path, specloc),
                      file=sys.stderr)
        elif args.nspath:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec,
                                       NWBNamespace)
            namespaces = catalog.load_namespaces(args.nspath)

            if len(namespaces) == 0:
                print("Could not load namespaces from file {}.".format(
                    args.nspath),
                      file=sys.stderr)
                sys.exit(1)

            tm = TypeMap(catalog)
            manager = BuildManager(tm)
            specloc = "--nspath namespace information"
        else:
            manager = None
            namespaces = available_namespaces()
            specloc = "pynwb namespace information"

        if args.ns:
            if args.ns in namespaces:
                namespaces = [args.ns]
            else:
                print("The namespace {} could not be found in {}.".format(
                    args.ns, specloc),
                      file=sys.stderr)
                ret = 1
                continue

        with NWBHDF5IO(path, mode='r', manager=manager) as io:
            for ns in namespaces:
                print("Validating {} against {} using namespace {}.".format(
                    path, specloc, ns))
                ret = ret or _validate_helper(io=io, namespace=ns)

    sys.exit(ret)
Exemplo n.º 24
0
 def setUp(self):
     self.attributes = [
         AttributeSpec('attribute1', 'my first attribute', 'text'),
         AttributeSpec('attribute2', 'my second attribute', 'text')
     ]
     self.dset1_attributes = [
         AttributeSpec('attribute3', 'my third attribute', 'text'),
         AttributeSpec('attribute4', 'my fourth attribute', 'text')
     ]
     self.dset2_attributes = [
         AttributeSpec('attribute5', 'my fifth attribute', 'text'),
         AttributeSpec('attribute6', 'my sixth attribute', 'text')
     ]
     self.datasets = [
         DatasetSpec('my first dataset',  # noqa: F405
                     'int',
                     name='dataset1',
                     attributes=self.dset1_attributes,
                     linkable=True),
         DatasetSpec('my second dataset',  # noqa: F405
                     'int',
                     name='dataset2',
                     dims=(None, None),
                     attributes=self.dset2_attributes,
                     linkable=True,
                     data_type_def='VoltageArray')
     ]
     self.spec = GroupSpec('A test group',  # noqa: F405
                           name='root_constructor_datatype',
                           datasets=self.datasets,
                           attributes=self.attributes,
                           linkable=False,
                           data_type_def='EphysData')
     dset1_attributes_ext = [
         AttributeSpec('dset1_extra_attribute', 'an extra attribute for the first dataset', 'text')
     ]
     self.ext_datasets = [
         DatasetSpec('my first dataset extension',  # noqa: F405
                     'int',
                     name='dataset1',
                     attributes=dset1_attributes_ext,
                     linkable=True),
     ]
     self.ext_attributes = [
         AttributeSpec('ext_extra_attribute', 'an extra attribute for the group', 'text'),
     ]
     self.ext_spec = GroupSpec('A test group extension',  # noqa: F405
                                name='root_constructor_datatype',
                                datasets=self.ext_datasets,
                                attributes=self.ext_attributes,
                                linkable=False,
                                data_type_inc='EphysData',
                                data_type_def='SpikeData')
     to_dump = {'groups': [self.spec, self.ext_spec]}
     self.specs_path = 'test_load_namespace.specs.yaml'
     self.namespace_path = 'test_load_namespace.namespace.yaml'
     with open(self.specs_path, 'w') as tmp:
         yaml.safe_dump(json.loads(json.dumps(to_dump)), tmp, default_flow_style=False)
     ns_dict = {
         'doc': 'a test namespace',
         'name': self.NS_NAME,
         'schema': [
             {'source': self.specs_path}
         ]
     }
     self.namespace = SpecNamespace.build_namespace(**ns_dict)  # noqa: F405
     to_dump = {'namespaces': [self.namespace]}
     with open(self.namespace_path, 'w') as tmp:
         yaml.safe_dump(json.loads(json.dumps(to_dump)), tmp, default_flow_style=False)
     self.ns_catalog = NamespaceCatalog()  # noqa: F405
Exemplo n.º 25
0
 def setUp(self):  # noqa: C901
     self.ns_catalog = NamespaceCatalog(CustomGroupSpec, CustomDatasetSpec,
                                        CustomSpecNamespace)
     hdmf_typemap = get_type_map()
     self.ns_catalog.merge(hdmf_typemap.namespace_catalog)