Пример #1
0
    def nodes(self):
        nodes = OrderedDict()
        linemap = {}
        for line in self.section.nodes:
            try:
                nodename, node = find_obj(line, True)
            except (AttributeError, ModuleNotFoundError):
                raise ConfigError(
                    f'Collection {self.name!r} cannot find node {line!r}')

            node = Node.from_dag_node(node)

            if node in linemap:
                raise ConfigError(
                    f'Collection {self.name!r} node {line!r} already listed as {linemap[node]!r}'
                )
            linemap[node] = line
            if nodename in nodes:
                raise ConfigError(
                    f'Collection {self.name!r} duplicate name {nodename!r}')
            if not node.schema:
                raise ConfigError(
                    f'Collection {self.name!r} node {line!r} does not define schema'
                )
            nodes[nodename] = node
        return nodes
Пример #2
0
    def from_dag_node(cls, func):
        if hasattr(func, '__marv_node__'):
            dnode = func.__marv_node__
        else:
            dnode = func
            func = None

        node = NODE_CACHE.get(dnode)
        if node is not None:
            return node

        if func is None:
            func = find_obj(dnode.function)
        namespace, name = dnode.function.rsplit('.', 1)
        schema = find_obj(
            dnode.message_schema) if dnode.message_schema is not None else None
        inputs = dnode.inputs
        specs = OrderedDict(
            ((name,
              InputSpec(name=name,
                        value=(value if not isinstance(value, dag.Stream) else
                               StreamSpec(node=Node.from_dag_node(value.node),
                                          name=value.name)),
                        foreach=name == dnode.foreach))
             for name, value in ((name, getattr(inputs, name))
                                 for name in inputs.__fields__.keys())))
        node = cls(func,
                   schema=schema,
                   version=dnode.version,
                   name=name,
                   namespace=namespace,
                   specs=specs,
                   group=dnode.group,
                   dag_node=dnode)
        NODE_CACHE[dnode] = node
        return node
Пример #3
0
    def __init__(self, site, app_root='', middlewares=None):
        self.aioapp = web.Application(middlewares=middlewares or [])
        self.aioapp['api_endpoints'] = {}
        self.aioapp['app_root'] = app_root.rstrip('/')
        self.aioapp['config'] = {
            'SECRET_KEY': site.config.marv.sessionkey_file.read_text(),
        }
        self.aioapp['debug'] = False
        self.aioapp['route_acl'] = find_obj(site.config.marv.acl)()
        self.aioapp['site'] = site
        self.aioapp.route = self.route

        for func in self.STARTUP_FNS:
            self.aioapp.on_startup.append(func)

        for func in self.SHUTDOWN_FNS:
            self.aioapp.on_shutdown.append(func)

        self.initialize_routes()
Пример #4
0
    def from_segments(cls, meta, *segments):
        from marv_api.utils import find_obj  # pylint: disable=import-outside-toplevel

        schema = find_obj(meta.pop('protoname'))
        struct_reader = schema.from_segments(segments)
        return cls(struct_reader, **meta)
Пример #5
0
    async def run(self,
                  setid,
                  selected_nodes=None,
                  deps=None,
                  force=None,
                  keep=None,
                  force_dependent=None,
                  update_detail=None,
                  update_listing=None,
                  excluded_nodes=None,
                  cachesize=None):
        # pylint: disable=too-many-arguments,too-many-locals,too-many-branches

        assert not force_dependent or selected_nodes

        excluded_nodes = set(excluded_nodes or [])
        async with scoped_session(self.db) as txn:
            dataset = await Dataset.get(setid=setid)\
                                   .prefetch_related('collection', 'files')\
                                   .using_db(txn)
        collection = self.collections[dataset.collection.name]
        selected_nodes = set(selected_nodes or [])
        if not (selected_nodes or update_listing or update_detail):
            selected_nodes.update(collection.listing_deps)
            selected_nodes.update(collection.detail_deps)
        persistent = collection.nodes
        try:
            nodes = {
                persistent[name] if ':' not in name else Node.from_dag_node(
                    find_obj(name))
                for name in selected_nodes if name not in excluded_nodes
                if name != 'dataset'
            }
        except KeyError as exc:
            raise ConfigError(
                f'Collection {collection.name!r} has no node {exc}')

        if force_dependent:
            nodes.update(x for name in selected_nodes
                         for x in persistent[name].dependent)
        nodes = sorted(nodes)

        storedir = self.config.marv.storedir
        store = Store(storedir, persistent)

        changed = False
        try:
            if nodes:
                changed = await run_nodes(dataset,
                                          nodes,
                                          store,
                                          force=force,
                                          persistent=persistent,
                                          deps=deps,
                                          cachesize=cachesize,
                                          site=self)
        finally:
            if not keep:
                for stream in store.pending:
                    if stream.streamfile:
                        stream.streamfile.close()
                for stream in store.readstreams:
                    if stream.streamfile:
                        stream.streamfile.close()
                for tmpdir, tmpdir_fd in store.pending.values():
                    store.logdebug('Cleaning up %r', tmpdir)
                    shutil.rmtree(tmpdir)
                    fcntl.flock(tmpdir_fd, fcntl.LOCK_UN)
                    os.close(tmpdir_fd)
                store.pending.clear()

        if changed or update_detail:
            collection.render_detail(dataset)
            log.verbose('%s detail rendered', setid)
        if changed or update_listing:
            await collection.update_listings([dataset])
            log.verbose('%s listing rendered', setid)

        return changed
Пример #6
0
 def convert(self, value, param, ctx):
     return find_obj(value)
Пример #7
0
 def scanner(self):
     return find_obj(self.section.scanner)
Пример #8
0
 def compare(self):
     return find_obj(self.section.compare) if self.section.compare else None