Esempio n. 1
0
    def setUp(self):
        super().setUp()
        self.foo_bucket = FooBucket('test_foo_bucket', [
                            Foo('my_foo1', list(range(10)), 'value1', 10),
                            Foo('my_foo2', list(range(10, 20)), 'value2', 20)])
        self.foo_builders = {
            'my_foo1': GroupBuilder('my_foo1',
                                    datasets={'my_data': DatasetBuilder(
                                        'my_data',
                                        list(range(10)),
                                        attributes={'attr2': 10})},
                                    attributes={'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo',
                                                'object_id': self.foo_bucket.foos['my_foo1'].object_id}),
            'my_foo2': GroupBuilder('my_foo2', datasets={'my_data':
                                                         DatasetBuilder(
                                                             'my_data',
                                                             list(range(10, 20)),
                                                             attributes={'attr2': 20})},
                                    attributes={'attr1': 'value2', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo',
                                                'object_id': self.foo_bucket.foos['my_foo2'].object_id})
        }
        self.setUpBucketBuilder()
        self.setUpBucketSpec()

        self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml')
        self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket)
        self.type_map.register_map(FooBucket, self.setUpBucketMapper())
        self.manager = BuildManager(self.type_map)
Esempio n. 2
0
    def setUp(self):
        self.foo_spec = GroupSpec(
            'A test group specification with data type Foo',
            data_type_def='Foo')
        self.bar_spec = GroupSpec(
            'A test group specification with a data type Bar',
            data_type_def='Bar',
            datasets=[DatasetSpec('an example dataset', 'int', name='data')],
            attributes=[
                AttributeSpec('attr1', 'an example string attribute', 'text'),
                AttributeSpec('attr2', 'an example integer attribute', 'int'),
                AttributeSpec('foo',
                              'a referenced foo',
                              RefSpec('Foo', 'object'),
                              required=False)
            ])

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.bar_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
        self.manager = BuildManager(self.type_map)
        self.foo_mapper = ObjectMapper(self.foo_spec)
        self.bar_mapper = ObjectMapper(self.bar_spec)
Esempio n. 3
0
class TestNestedBase(with_metaclass(ABCMeta, TestBase)):
    def setUp(self):
        super(TestNestedBase, self).setUp()
        self.foo_bucket = FooBucket('test_foo_bucket', [
            Foo('my_foo1', list(range(10)), 'value1', 10),
            Foo('my_foo2', list(range(10, 20)), 'value2', 20)
        ])
        self.foo_builders = {
            'my_foo1':
            GroupBuilder('my_foo1',
                         datasets={
                             'my_data':
                             DatasetBuilder('my_data',
                                            list(range(10)),
                                            attributes={'attr2': 10})
                         },
                         attributes={
                             'attr1': 'value1',
                             'namespace': CORE_NAMESPACE,
                             'data_type': 'Foo'
                         }),
            'my_foo2':
            GroupBuilder('my_foo2',
                         datasets={
                             'my_data':
                             DatasetBuilder('my_data',
                                            list(range(10, 20)),
                                            attributes={'attr2': 20})
                         },
                         attributes={
                             'attr1': 'value2',
                             'namespace': CORE_NAMESPACE,
                             'data_type': 'Foo'
                         })
        }
        self.setUpBucketBuilder()
        self.setUpBucketSpec()

        self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml')
        self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket',
                                              FooBucket)
        self.type_map.register_map(FooBucket, ObjectMapper)
        self.manager = BuildManager(self.type_map)

    def setUpBucketBuilder(self):
        raise unittest.SkipTest('Abstract Base Class')

    def setUpBucketSpec(self):
        raise unittest.SkipTest('Abstract Base Class')

    def test_build(self):
        ''' Test default mapping for an Container that has an Container as an attribute value '''
        builder = self.manager.build(self.foo_bucket)
        self.assertDictEqual(builder, self.bucket_builder)

    def test_construct(self):
        container = self.manager.construct(self.bucket_builder)
        self.assertEqual(container, self.foo_bucket)
Esempio n. 4
0
class NestedBaseMixin(metaclass=ABCMeta):

    def setUp(self):
        super().setUp()
        self.foo_bucket = FooBucket('test_foo_bucket', [
                            Foo('my_foo1', list(range(10)), 'value1', 10),
                            Foo('my_foo2', list(range(10, 20)), 'value2', 20)])
        self.foo_builders = {
            'my_foo1': GroupBuilder('my_foo1',
                                    datasets={'my_data': DatasetBuilder(
                                        'my_data',
                                        list(range(10)),
                                        attributes={'attr2': 10})},
                                    attributes={'attr1': 'value1', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo',
                                                'object_id': self.foo_bucket.foos['my_foo1'].object_id}),
            'my_foo2': GroupBuilder('my_foo2', datasets={'my_data':
                                                         DatasetBuilder(
                                                             'my_data',
                                                             list(range(10, 20)),
                                                             attributes={'attr2': 20})},
                                    attributes={'attr1': 'value2', 'namespace': CORE_NAMESPACE, 'data_type': 'Foo',
                                                'object_id': self.foo_bucket.foos['my_foo2'].object_id})
        }
        self.setUpBucketBuilder()
        self.setUpBucketSpec()

        self.spec_catalog.register_spec(self.bucket_spec, 'test.yaml')
        self.type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket)
        self.type_map.register_map(FooBucket, self.setUpBucketMapper())
        self.manager = BuildManager(self.type_map)

    @abstractmethod
    def setUpBucketBuilder(self):
        raise NotImplementedError('Cannot run test unless setUpBucketBuilder is implemented')

    @abstractmethod
    def setUpBucketSpec(self):
        raise NotImplementedError('Cannot run test unless setUpBucketSpec is implemented')

    @abstractmethod
    def setUpBucketMapper(self):
        raise NotImplementedError('Cannot run test unless setUpBucketMapper is implemented')

    def test_build(self):
        ''' Test default mapping for an Container that has an Container as an attribute value '''
        builder = self.manager.build(self.foo_bucket)
        self.assertDictEqual(builder, self.bucket_builder)

    def test_construct(self):
        container = self.manager.construct(self.bucket_builder)
        self.assertEqual(container, self.foo_bucket)
Esempio n. 5
0
    def setUp(self):
        self.foo_spec = GroupSpec(
            doc='A test group specification with a data type',
            data_type_def='Foo',
            datasets=[
                DatasetSpec(doc='an example dataset',
                            dtype='int',
                            name='my_data',
                            attributes=[
                                AttributeSpec(
                                    name='attr2',
                                    doc='an example integer attribute',
                                    dtype='int')
                            ])
            ],
            attributes=[
                AttributeSpec('attr1', 'an example string attribute', 'text')
            ])

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.type_map.register_map(Foo, FooMapper)
        self.manager = BuildManager(self.type_map)
Esempio n. 6
0
def get_manager(**kwargs):
    '''
    Get a BuildManager to use for I/O using the given extensions. If no extensions are provided,
    return a BuildManager that uses the core namespace
    '''
    type_map = call_docval_func(get_type_map, kwargs)
    return BuildManager(type_map)
Esempio n. 7
0
 def setUpManager(self, specs):
     spec_catalog = SpecCatalog()
     schema_file = 'test.yaml'
     for s in specs:
         spec_catalog.register_spec(s, schema_file)
     namespace = SpecNamespace(doc='a test namespace',
                               name=CORE_NAMESPACE,
                               schema=[{
                                   'source': schema_file
                               }],
                               version='0.1.0',
                               catalog=spec_catalog)
     namespace_catalog = NamespaceCatalog()
     namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
     type_map = TypeMap(namespace_catalog)
     type_map.register_container_type(CORE_NAMESPACE, 'SimpleFoo',
                                      SimpleFoo)
     type_map.register_container_type(CORE_NAMESPACE, 'NotSimpleFoo',
                                      NotSimpleFoo)
     type_map.register_container_type(CORE_NAMESPACE, 'SimpleQux',
                                      SimpleQux)
     type_map.register_container_type(CORE_NAMESPACE, 'NotSimpleQux',
                                      NotSimpleQux)
     type_map.register_container_type(CORE_NAMESPACE, 'SimpleBucket',
                                      SimpleBucket)
     type_map.register_map(SimpleBucket, self.setUpBucketMapper())
     self.manager = BuildManager(type_map)
Esempio n. 8
0
 def setUp(self):
     self.bar_spec = GroupSpec(
         'A test group specification with a data type',
         data_type_def='Bar',
         datasets=[
             DatasetSpec('an example dataset',
                         'int',
                         name='data',
                         attributes=[
                             AttributeSpec('attr2',
                                           'an example integer attribute',
                                           'int')
                         ])
         ],
         attributes=[
             AttributeSpec('attr1', 'an example string attribute', 'text')
         ])
     self.spec_catalog = SpecCatalog()
     self.spec_catalog.register_spec(self.bar_spec, 'test.yaml')
     self.namespace = SpecNamespace('a test namespace',
                                    CORE_NAMESPACE, [{
                                        'source': 'test.yaml'
                                    }],
                                    catalog=self.spec_catalog)
     self.namespace_catalog = NamespaceCatalog()
     self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
     self.type_map = TypeMap(self.namespace_catalog)
     self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
     self.type_map.register_map(Bar, ObjectMapper)
     self.manager = BuildManager(self.type_map)
     self.mapper = ObjectMapper(self.bar_spec)
Esempio n. 9
0
    def test_bad_value(self):
        """Test that an error is raised if the container attribute value for a spec with a data type is not a container
        or collection of containers.
        """
        class Qux(Container):
            @docval(
                {
                    'name': 'name',
                    'type': str,
                    'doc': 'the name of this Qux'
                }, {
                    'name': 'foo',
                    'type': int,
                    'doc': 'a group'
                })
            def __init__(self, **kwargs):
                name, foo = getargs('name', 'foo', kwargs)
                super().__init__(name=name)
                self.__foo = foo
                if isinstance(foo, Foo):
                    self.__foo.parent = self

            @property
            def foo(self):
                return self.__foo

        self.qux_spec = GroupSpec(
            doc='A test group specification with data type Qux',
            data_type_def='Qux',
            groups=[GroupSpec('an example dataset', data_type_inc='Foo')])
        self.foo_spec = GroupSpec(
            'A test group specification with data type Foo',
            data_type_def='Foo')
        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.qux_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Qux', Qux)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.manager = BuildManager(self.type_map)
        self.mapper = ObjectMapper(self.qux_spec)

        container = Qux('my_qux', foo=1)
        msg = "Qux 'my_qux' attribute 'foo' has unexpected type."
        with self.assertRaisesWith(ContainerConfigurationError, msg):
            self.mapper.build(container, self.manager)
Esempio n. 10
0
 def setUp(self):
     self.setUpBazSpec()
     self.spec_catalog = SpecCatalog()
     self.spec_catalog.register_spec(self.baz_spec, 'test.yaml')
     self.namespace = SpecNamespace('a test namespace',
                                    CORE_NAMESPACE, [{
                                        'source': 'test.yaml'
                                    }],
                                    catalog=self.spec_catalog)
     self.namespace_catalog = NamespaceCatalog()
     self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
     self.type_map = TypeMap(self.namespace_catalog)
     self.type_map.register_container_type(CORE_NAMESPACE, 'Baz', Baz)
     self.type_map.register_map(Baz, ObjectMapper)
     self.manager = BuildManager(self.type_map)
     self.mapper = ObjectMapper(self.baz_spec)
Esempio n. 11
0
    def __init__(self, **kwargs):
        path, mode, manager, extensions, load_namespaces, file_obj, comm, driver =\
            popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', 'file', 'comm', 'driver', kwargs)
        if load_namespaces:
            if manager is not None:
                warn("loading namespaces from file - ignoring 'manager'")
            if extensions is not None:
                warn(
                    "loading namespaces from file - ignoring 'extensions' argument"
                )
            # namespaces are not loaded when creating an NWBHDF5IO object in write mode
            if 'w' in mode or mode == 'x':
                raise ValueError(
                    "cannot load namespaces from file when writing to it")

            tm = get_type_map()
            super(NWBHDF5IO, self).load_namespaces(tm,
                                                   path,
                                                   file=file_obj,
                                                   driver=driver)
            manager = BuildManager(tm)

            # XXX: Leaving this here in case we want to revert to this strategy for
            #      loading cached namespaces
            # ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            # super(NWBHDF5IO, self).load_namespaces(ns_catalog, path)
            # tm = TypeMap(ns_catalog)
            # tm.copy_mappers(get_type_map())
        else:
            if manager is not None and extensions is not None:
                raise ValueError(
                    "'manager' and 'extensions' cannot be specified together")
            elif extensions is not None:
                manager = get_manager(extensions=extensions)
            elif manager is None:
                manager = get_manager()
        super(NWBHDF5IO, self).__init__(path,
                                        manager=manager,
                                        mode=mode,
                                        file=file_obj,
                                        comm=comm,
                                        driver=driver)
Esempio n. 12
0
 def setUp(self):
     self.set_up_specs()
     spec_catalog = SpecCatalog()
     spec_catalog.register_spec(self.bar_data_spec, 'test.yaml')
     spec_catalog.register_spec(self.bar_data_holder_spec, 'test.yaml')
     namespace = SpecNamespace(doc='a test namespace',
                               name=CORE_NAMESPACE,
                               schema=[{
                                   'source': 'test.yaml'
                               }],
                               version='0.1.0',
                               catalog=spec_catalog)
     namespace_catalog = NamespaceCatalog()
     namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
     type_map = TypeMap(namespace_catalog)
     type_map.register_container_type(CORE_NAMESPACE, 'BarData', BarData)
     type_map.register_container_type(CORE_NAMESPACE, 'BarDataHolder',
                                      BarDataHolder)
     type_map.register_map(BarData, ExtBarDataMapper)
     type_map.register_map(BarDataHolder, ObjectMapper)
     self.manager = BuildManager(type_map)
Esempio n. 13
0
    def __init__(self, **kwargs):
        path, mode, manager, extensions, load_namespaces, file_obj, comm =\
            popargs('path', 'mode', 'manager', 'extensions', 'load_namespaces', 'file', 'comm', kwargs)

        # root group
        self.__rgroup = file_obj
        chunk_store = getattr(file_obj, 'chunk_store', None)
        if chunk_store is not None:
            try:
                filename = getattr(chunk_store.source, 'path', None)
                if filename is None:
                    filename = chunk_store.source.name
            except:
                filename = None
        if filename is None:
            filename = f'{type(file_obj.store).__name__}'
        self.__rgroup.filename = filename

        file_obj = self.__set_rgroup(file_obj)

        self.__built = dict()       # keep track of each builder for each dataset/group/link for each file
        self.__read = dict()        # keep track of which files have been read. Key is the filename value is the builder
        self.__file = file_obj

        if load_namespaces:
            if manager is not None:
                warn("loading namespaces from file - ignoring 'manager'")
            if extensions is not None:
                warn("loading namespaces from file - ignoring 'extensions' argument")
            # namespaces are not loaded when creating an NWBZARRHDF5IO object in write mode
            if 'w' in mode or mode == 'x':
                raise ValueError("cannot load namespaces from file when writing to it")

            tm = get_type_map()
            self.load_namespaces(tm, path, file=file_obj)
            manager = BuildManager(tm)

            # XXX: Leaving this here in case we want to revert to this strategy for
            #      loading cached namespaces
            # ns_catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            # super(NWBZARRHDF5IO, self).load_namespaces(ns_catalog, path)
            # tm = TypeMap(ns_catalog)
            # tm.copy_mappers(get_type_map())
        else:
            if manager is not None and extensions is not None:
                raise ValueError("'manager' and 'extensions' cannot be specified together")
            elif extensions is not None:
                manager = get_manager(extensions=extensions)
            elif manager is None:
                manager = get_manager()

        self.logger = logging.getLogger('%s.%s' % (self.__class__.__module__, self.__class__.__qualname__))

        if file_obj is not None:
            if path is None:
                path = file_obj.filename
            elif os.path.abspath(file_obj.filename) != os.path.abspath(path):
                msg = 'You argued %s as this object\'s path, ' % path
                msg += 'but supplied a file with filename: %s' % file_obj.filename
                raise ValueError(msg)
        elif path is None:
            TypeError("Must supply either 'path' or 'file' arg to HDF5IO.")

        if file_obj is None and not os.path.exists(path) and (mode == 'r' or mode == 'r+'):
            msg = "Unable to open file %s in '%s' mode. File does not exist." % (path, mode)
            raise UnsupportedOperation(msg)

        if file_obj is None and os.path.exists(path) and (mode == 'w-' or mode == 'x'):
            msg = "Unable to open file %s in '%s' mode. File already exists." % (path, mode)
            raise UnsupportedOperation(msg)

        if manager is None:
            manager = BuildManager(TypeMap(NamespaceCatalog()))
        elif isinstance(manager, TypeMap):
            manager = BuildManager(manager)

        # TO DO #
        self._HDF5IO__comm = comm
        self._HDF5IO__mode = mode
        self._HDF5IO__path = path
        self._HDF5IO__file = file_obj
        super(_HDF5IO, self).__init__(manager, source=path)
        self._HDF5IO__ref_queue = deque()  # a queue of the references that need to be added
        self._HDF5IO__dci_queue = deque()  # a queue of DataChunkIterators that need to be exhausted
Esempio n. 14
0
def _get_manager():

    foo_spec = GroupSpec('A test group specification with a data type',
                         data_type_def='Foo',
                         datasets=[DatasetSpec('an example dataset',
                                               'int',
                                               name='my_data',
                                               attributes=[AttributeSpec('attr2',
                                                                         'an example integer attribute',
                                                                         'int')])],
                         attributes=[AttributeSpec('attr1', 'an example string attribute', 'text')])

    tmp_spec = GroupSpec('A subgroup for Foos',
                         name='foo_holder',
                         groups=[GroupSpec('the Foos in this bucket', data_type_inc='Foo', quantity=ZERO_OR_MANY)])

    bucket_spec = GroupSpec('A test group specification for a data type containing data type',
                            data_type_def='FooBucket',
                            groups=[tmp_spec])

    class BucketMapper(ObjectMapper):
        def __init__(self, spec):
            super(BucketMapper, self).__init__(spec)
            foo_spec = spec.get_group('foo_holder').get_data_type('Foo')
            self.map_spec('foos', foo_spec)

    file_spec = GroupSpec("A file of Foos contained in FooBuckets",
                          name='root',
                          data_type_def='FooFile',
                          groups=[GroupSpec('Holds the FooBuckets',
                                            name='buckets',
                                            groups=[GroupSpec("One ore more FooBuckets",
                                                              data_type_inc='FooBucket',
                                                              quantity=ONE_OR_MANY)])])

    class FileMapper(ObjectMapper):
        def __init__(self, spec):
            super(FileMapper, self).__init__(spec)
            bucket_spec = spec.get_group('buckets').get_data_type('FooBucket')
            self.map_spec('buckets', bucket_spec)

    spec_catalog = SpecCatalog()
    spec_catalog.register_spec(foo_spec, 'test.yaml')
    spec_catalog.register_spec(bucket_spec, 'test.yaml')
    spec_catalog.register_spec(file_spec, 'test.yaml')
    namespace = SpecNamespace(
        'a test namespace',
        CORE_NAMESPACE,
        [{'source': 'test.yaml'}],
        catalog=spec_catalog)
    namespace_catalog = NamespaceCatalog()
    namespace_catalog.add_namespace(CORE_NAMESPACE, namespace)
    type_map = TypeMap(namespace_catalog)

    type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
    type_map.register_container_type(CORE_NAMESPACE, 'FooBucket', FooBucket)
    type_map.register_container_type(CORE_NAMESPACE, 'FooFile', FooFile)

    type_map.register_map(FooBucket, BucketMapper)
    type_map.register_map(FooFile, FileMapper)

    manager = BuildManager(type_map)
    return manager
Esempio n. 15
0
def main():

    ep = """
    use --nspath to validate against an extension. If --ns is not specified,
    validate against all namespaces in namespace file.
    """

    parser = ArgumentParser(description="Validate an NWB file", epilog=ep)
    parser.add_argument("paths", type=str, nargs='+', help="NWB file paths")
    parser.add_argument('-p',
                        '--nspath',
                        type=str,
                        help="the path to the namespace YAML file")
    parser.add_argument("-n",
                        "--ns",
                        type=str,
                        help="the namespace to validate against")

    feature_parser = parser.add_mutually_exclusive_group(required=False)
    feature_parser.add_argument("--cached-namespace",
                                dest="cached_namespace",
                                action='store_true',
                                help="Use the cached namespace (default).")
    feature_parser.add_argument('--no-cached-namespace',
                                dest="cached_namespace",
                                action='store_false',
                                help="Don't use the cached namespace.")
    parser.set_defaults(cached_namespace=True)

    args = parser.parse_args()
    ret = 0

    if args.nspath:
        if not os.path.isfile(args.nspath):
            print("The namespace file {} is not a valid file.".format(
                args.nspath),
                  file=sys.stderr)
            sys.exit(1)

        if args.cached_namespace:
            print(
                "Turning off validation against cached namespace information"
                "as --nspath was passed.",
                file=sys.stderr)
            args.cached_namespace = False

    for path in args.paths:

        if not os.path.isfile(path):
            print("The file {} does not exist.".format(path), file=sys.stderr)
            ret = 1
            continue

        if args.cached_namespace:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec,
                                       NWBNamespace)
            namespaces = HDF5IO.load_namespaces(catalog, path).keys()
            if len(namespaces) > 0:
                tm = TypeMap(catalog)
                manager = BuildManager(tm)
                specloc = "cached namespace information"
            else:
                manager = None
                namespaces = available_namespaces()
                specloc = "pynwb namespace information"
                print("The file {} has no cached namespace information. "
                      "Falling back to {}.".format(path, specloc),
                      file=sys.stderr)
        elif args.nspath:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec,
                                       NWBNamespace)
            namespaces = catalog.load_namespaces(args.nspath)

            if len(namespaces) == 0:
                print("Could not load namespaces from file {}.".format(
                    args.nspath),
                      file=sys.stderr)
                sys.exit(1)

            tm = TypeMap(catalog)
            manager = BuildManager(tm)
            specloc = "--nspath namespace information"
        else:
            manager = None
            namespaces = available_namespaces()
            specloc = "pynwb namespace information"

        if args.ns:
            if args.ns in namespaces:
                namespaces = [args.ns]
            else:
                print("The namespace {} could not be found in {}.".format(
                    args.ns, specloc),
                      file=sys.stderr)
                ret = 1
                continue

        with NWBHDF5IO(path, mode='r', manager=manager) as io:
            for ns in namespaces:
                print("Validating {} against {} using namespace {}.".format(
                    path, specloc, ns))
                ret = ret or _validate_helper(io=io, namespace=ns)

    sys.exit(ret)
Esempio n. 16
0
class TestReference(TestCase):
    def setUp(self):
        self.foo_spec = GroupSpec(
            'A test group specification with data type Foo',
            data_type_def='Foo')
        self.bar_spec = GroupSpec(
            'A test group specification with a data type Bar',
            data_type_def='Bar',
            datasets=[DatasetSpec('an example dataset', 'int', name='data')],
            attributes=[
                AttributeSpec('attr1', 'an example string attribute', 'text'),
                AttributeSpec('attr2', 'an example integer attribute', 'int'),
                AttributeSpec('foo',
                              'a referenced foo',
                              RefSpec('Foo', 'object'),
                              required=False)
            ])

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.foo_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.bar_spec, 'test.yaml')
        self.namespace = SpecNamespace('a test namespace',
                                       CORE_NAMESPACE, [{
                                           'source': 'test.yaml'
                                       }],
                                       version='0.1.0',
                                       catalog=self.spec_catalog)
        self.namespace_catalog = NamespaceCatalog()
        self.namespace_catalog.add_namespace(CORE_NAMESPACE, self.namespace)
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Foo', Foo)
        self.type_map.register_container_type(CORE_NAMESPACE, 'Bar', Bar)
        self.manager = BuildManager(self.type_map)
        self.foo_mapper = ObjectMapper(self.foo_spec)
        self.bar_mapper = ObjectMapper(self.bar_spec)

    def test_build_attr_ref(self):
        ''' Test default mapping functionality when one container contains an attribute reference to another container.
        '''
        foo_inst = Foo('my_foo')
        bar_inst1 = Bar('my_bar1', list(range(10)), 'value1', 10, foo=foo_inst)
        bar_inst2 = Bar('my_bar2', list(range(10)), 'value1', 10)

        foo_builder = self.manager.build(foo_inst, root=True)
        bar1_builder = self.manager.build(bar_inst1, root=True)  # adds refs
        bar2_builder = self.manager.build(bar_inst2, root=True)

        foo_expected = GroupBuilder('my_foo',
                                    attributes={
                                        'data_type': 'Foo',
                                        'namespace': CORE_NAMESPACE,
                                        'object_id': foo_inst.object_id
                                    })
        bar1_expected = GroupBuilder(
            'n/a',  # name doesn't matter
            datasets={'data': DatasetBuilder('data', list(range(10)))},
            attributes={
                'attr1': 'value1',
                'attr2': 10,
                'foo': ReferenceBuilder(foo_expected),
                'data_type': 'Bar',
                'namespace': CORE_NAMESPACE,
                'object_id': bar_inst1.object_id
            })
        bar2_expected = GroupBuilder(
            'n/a',  # name doesn't matter
            datasets={'data': DatasetBuilder('data', list(range(10)))},
            attributes={
                'attr1': 'value1',
                'attr2': 10,
                'data_type': 'Bar',
                'namespace': CORE_NAMESPACE,
                'object_id': bar_inst2.object_id
            })
        self.assertDictEqual(foo_builder, foo_expected)
        self.assertDictEqual(bar1_builder, bar1_expected)
        self.assertDictEqual(bar2_builder, bar2_expected)

    def test_build_attr_ref_invalid(self):
        ''' Test default mapping functionality when one container contains an attribute reference to another container.
        '''
        bar_inst1 = Bar('my_bar1', list(range(10)), 'value1', 10)
        bar_inst1._Bar__foo = object()  # make foo object a non-container type

        msg = "invalid type for reference 'foo' (<class 'object'>) - must be AbstractContainer"
        with self.assertRaisesWith(ValueError, msg):
            self.bar_mapper.build(bar_inst1, self.manager)
Esempio n. 17
0
def main():

    ep = """
    use --nspath to validate against an extension. If --ns is not specified,
    validate against all namespaces in namespace file.
    """

    parser = ArgumentParser(description="Validate an NWB file", epilog=ep)
    parser.add_argument("paths", type=str, nargs='+', help="NWB file paths")
    parser.add_argument('-p', '--nspath', type=str, help="the path to the namespace YAML file")
    parser.add_argument("-n", "--ns", type=str, help="the namespace to validate against")
    parser.add_argument("-lns", "--list-namespaces", dest="list_namespaces",
                        action='store_true', help="List the available namespaces and exit.")

    feature_parser = parser.add_mutually_exclusive_group(required=False)
    feature_parser.add_argument("--cached-namespace", dest="cached_namespace", action='store_true',
                                help="Use the cached namespace (default).")
    feature_parser.add_argument('--no-cached-namespace', dest="cached_namespace", action='store_false',
                                help="Don't use the cached namespace.")
    parser.set_defaults(cached_namespace=True)

    args = parser.parse_args()
    ret = 0

    if args.nspath:
        if not os.path.isfile(args.nspath):
            print("The namespace file {} is not a valid file.".format(args.nspath), file=sys.stderr)
            sys.exit(1)

        if args.cached_namespace:
            print("Turning off validation against cached namespace information "
                  "as --nspath was passed.", file=sys.stderr)
            args.cached_namespace = False

    for path in args.paths:

        if not os.path.isfile(path):
            print("The file {} does not exist.".format(path), file=sys.stderr)
            ret = 1
            continue

        if args.cached_namespace:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            ns_deps = NWBHDF5IO.load_namespaces(catalog, path)
            s = set(ns_deps.keys())       # determine which namespaces are the most
            for k in ns_deps:             # specific (i.e. extensions) and validate
                s -= ns_deps[k].keys()    # against those
            # TODO remove this workaround for issue https://github.com/NeurodataWithoutBorders/pynwb/issues/1357
            if 'hdmf-experimental' in s:
                s.remove('hdmf-experimental')  # remove validation of hdmf-experimental for now
            namespaces = list(sorted(s))
            if len(namespaces) > 0:
                tm = TypeMap(catalog)
                manager = BuildManager(tm)
                specloc = "cached namespace information"
            else:
                manager = None
                namespaces = [CORE_NAMESPACE]
                specloc = "pynwb namespace information"
                print("The file {} has no cached namespace information. "
                      "Falling back to {}.".format(path, specloc), file=sys.stderr)
        elif args.nspath:
            catalog = NamespaceCatalog(NWBGroupSpec, NWBDatasetSpec, NWBNamespace)
            namespaces = catalog.load_namespaces(args.nspath)

            if len(namespaces) == 0:
                print("Could not load namespaces from file {}.".format(args.nspath), file=sys.stderr)
                sys.exit(1)

            tm = TypeMap(catalog)
            manager = BuildManager(tm)
            specloc = "--nspath namespace information"
        else:
            manager = None
            namespaces = [CORE_NAMESPACE]
            specloc = "pynwb namespace information"

        if args.list_namespaces:
            print("\n".join(namespaces))
            ret = 0
            continue

        if args.ns:
            if args.ns in namespaces:
                namespaces = [args.ns]
            else:
                print("The namespace {} could not be found in {} as only {} is present.".format(
                      args.ns, specloc, namespaces), file=sys.stderr)
                ret = 1
                continue

        with NWBHDF5IO(path, mode='r', manager=manager) as io:
            for ns in namespaces:
                print("Validating {} against {} using namespace {}.".format(path, specloc, ns))
                ret = ret or _validate_helper(io=io, namespace=ns)

    sys.exit(ret)
Esempio n. 18
0
    def setUp(self):
        self.dt_spec = GroupSpec(
            'A test extension that contains a dynamic table',
            data_type_def='TestTable',
            data_type_inc='DynamicTable',
            datasets=[
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='my_col',
                    doc='a test column',
                    dtype='float'
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='indexed_col',
                    doc='a test column',
                    dtype='float'
                ),
                DatasetSpec(
                    data_type_inc='VectorIndex',
                    name='indexed_col_index',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='optional_col1',
                    doc='a test column',
                    dtype='float',
                    quantity='?',
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='optional_col2',
                    doc='a test column',
                    dtype='float',
                    quantity='?',
                )
            ]
        )

        self.dt_spec2 = GroupSpec(
            'A test extension that contains a dynamic table',
            data_type_def='TestDTRTable',
            data_type_inc='DynamicTable',
            datasets=[
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='ref_col',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='indexed_ref_col',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='VectorIndex',
                    name='indexed_ref_col_index',
                    doc='a test column',
                ),
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='optional_ref_col',
                    doc='a test column',
                    quantity='?'
                ),
                DatasetSpec(
                    data_type_inc='DynamicTableRegion',
                    name='optional_indexed_ref_col',
                    doc='a test column',
                    quantity='?'
                ),
                DatasetSpec(
                    data_type_inc='VectorIndex',
                    name='optional_indexed_ref_col_index',
                    doc='a test column',
                    quantity='?'
                ),
                DatasetSpec(
                    data_type_inc='VectorData',
                    name='optional_col3',
                    doc='a test column',
                    dtype='float',
                    quantity='?',
                )
            ]
        )

        from hdmf.spec.write import YAMLSpecWriter
        writer = YAMLSpecWriter(outdir='.')

        self.spec_catalog = SpecCatalog()
        self.spec_catalog.register_spec(self.dt_spec, 'test.yaml')
        self.spec_catalog.register_spec(self.dt_spec2, 'test.yaml')
        self.namespace = SpecNamespace(
            'a test namespace', CORE_NAMESPACE,
            [
                dict(
                    namespace='hdmf-common',
                ),
                dict(source='test.yaml'),
            ],
            version='0.1.0',
            catalog=self.spec_catalog
        )

        self.test_dir = tempfile.mkdtemp()
        spec_fpath = os.path.join(self.test_dir, 'test.yaml')
        namespace_fpath = os.path.join(self.test_dir, 'test-namespace.yaml')
        writer.write_spec(dict(groups=[self.dt_spec, self.dt_spec2]), spec_fpath)
        writer.write_namespace(self.namespace, namespace_fpath)
        self.namespace_catalog = NamespaceCatalog()
        hdmf_typemap = get_type_map()
        self.type_map = TypeMap(self.namespace_catalog)
        self.type_map.merge(hdmf_typemap, ns_catalog=True)
        self.type_map.load_namespaces(namespace_fpath)
        self.manager = BuildManager(self.type_map)

        self.TestTable = self.type_map.get_dt_container_cls('TestTable', CORE_NAMESPACE)
        self.TestDTRTable = self.type_map.get_dt_container_cls('TestDTRTable', CORE_NAMESPACE)