async def marvcli_comment_add(user, message, datasets): """Add comment as user for one or more datasets.""" async with create_site() as site: try: await site.db.comment_by_setids(datasets, user, message) except DBError as e: err(f'ERROR: {e}', exit=1)
async def marvcli_scan(dry_run): """Scan for new and changed files.""" async with create_site() as site: try: await site.scan(dry_run) except ConfigError as exc: err(f'ERROR: {exc}', exit=1)
def detail_to_dict(obj): dct = obj.to_dict(verbose=True, which=True) widgets = dct.get('summary', {'widgets': []})['widgets'][:] widgets.extend(widget for sec in dct['sections'] for widget in sec['widgets']) for widget in widgets: try: fixup_widget(widget) except Exception: from pprint import pformat # pylint: disable=import-outside-toplevel err(pformat(dct)) err(pformat(widget)) raise return dct
async def marvcli_tag(ctx, add, remove, strict, datasets): """Add or remove tags to datasets.""" if not any([add, remove]) or not datasets: click.echo(ctx.get_help()) ctx.exit(1) async with create_site() as site: try: await site.db.update_tags_by_setids(datasets, add, remove, idempotent=not strict) except DBPermissionError: if strict: err('ERROR: --strict prevented add of existing or remove of non-existing tag(s).', exit=1) raise
async def marvcli_dump(dump_file): """Dump database to json file. Use '-' for stdout. """ if str(dump_file) != '-' and dump_file.exists(): err('ERROR: Dump file must not exist already!', exit=1) siteconf = make_config(get_site_config()) try: dump = await Site.Database.dump_database(siteconf.marv.dburi) except (DBNotInitialized, DBVersionError) as exc: err(f'ERROR: {exc}', exit=1) if str(dump_file) == '-': json.dump(dump, sys.stdout, sort_keys=True, indent=2) else: with dump_file.open('w') as f: json.dump(dump, f, sort_keys=True, indent=2)
def load_dataset(setdir, dataset): # pylint: disable=redefined-outer-name setid = SetID(dataset.setid) files = [{'path': x.path, 'missing': bool(x.missing), 'mtime': x.mtime * 10**6, 'size': x.size} for x in sorted(dataset.files, key=lambda x: x.idx)] dct = {'id': setid, 'name': dataset.name, 'files': files, 'time_added': dataset.time_added * 10**6, 'timestamp': dataset.timestamp * 10**6} try: wrapper = Wrapper.from_dict(Dataset, dct, setdir=setdir) except KjException as e: from pprint import pformat # pylint: disable=import-outside-toplevel err('Schema violation for %s with data:\n%s\nschema: %s' % ( Dataset.schema.node.displayName, pformat(dct), Dataset.schema.node.displayName)) raise e return [wrapper]
async def app_factory(): """App factory used inside of worker. Note: Terminates execution via sys.exit(4) to prevent master from restarting worker. """ try: site = await Site.create(config) try: application = App(site, app_root=approot).aioapp except Exception: # pylint: disable=broad-except await site.destroy() raise except (sqlite3.OperationalError, DBNotInitialized) as exc: err(f'{exc!r}\nDid you run marv init?', exit=4) except DBVersionError as exc: err( f'{exc!r}\n' 'Existing database is not compatible with this version of MARV. ' 'Check the migration instructions.', exit=4) except (PermissionError, SiteError) as exc: err(f'ERROR: {exc}', exit=4) except Exception: # pylint: disable=broad-except traceback.print_exc() sys.exit(4) return application
def msg(self, __msg=None, _schema=NOTSET, **kw): from .io import TheEnd assert (__msg is not None) ^ bool(kw), (__msg, kw) data = kw if __msg is None else __msg if self.group: assert isinstance(data, (Handle, TheEnd)), (self, data) elif not isinstance(data, (Wrapper, Handle, TheEnd)): if _schema is NOTSET: from marv_api.ioctrl import NODE_SCHEMA # pylint: disable=import-outside-toplevel schema = NODE_SCHEMA.get() else: schema = _schema if schema is not None: try: data = Wrapper.from_dict(schema, data) except KjException: from pprint import pformat # pylint: disable=import-outside-toplevel _node = schema.schema.node err(f'Schema violation for {_node.displayName} with data:\n' f'{pformat(data)}\nschema: {_node.displayName}') raise return Msg(next(self._counter), self, data)
async def create_site(init=None): siteconf = get_site_config() try: site = await Site.create(siteconf, init=init) except (sqlite3.OperationalError, DBNotInitialized) as exc: if PDB: raise err(f'{exc!r}\n\nDid you run marv init?\n', exit=1) except DBVersionError as exc: err( f'{exc!r}\n\n' 'Existing database is not compatible with this version of MARV. ' 'Check the migration instructions.', exit=1) except (ConfigError, SiteError) as exc: err(f'ERROR: {exc}', exit=1) try: yield site finally: await site.destroy()
async def marvcli_run( # noqa: C901 ctx, datasets, deps, excluded_nodes, force, force_dependent, force_deps, keep, keep_going, list_nodes, list_dependent, selected_nodes, update_detail, update_listing, cachesize, collections, ): """Run nodes for selected datasets. Datasets are specified by a list of set ids, or --collection <name>, use --collection=* to run for all collections. --node in conjunction with --collection=* will pick those collections for which the selected nodes are configured. Set ids may be abbreviated to any uniquely identifying prefix. """ # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements if collections and datasets: ctx.fail('--collection and DATASETS are mutually exclusive') if list_dependent and not selected_nodes: ctx.fail('--list-dependent needs at least one selected --node') if force_dependent and not selected_nodes: ctx.fail('--force-dependent needs at least one selected --node') if not any([datasets, collections, list_nodes]): click.echo(ctx.get_help()) ctx.exit(1) deps = 'force' if force_deps else deps force = force_deps or force async with create_site() as site: if '*' in collections: if selected_nodes: collections = [ k for k, v in site.collections.items() if set(v.nodes).issuperset(selected_nodes) ] if not collections: ctx.fail('No collections have all selected nodes') else: collections = None else: for col in collections: if col not in site.collections: ctx.fail(f'Unknown collection: {col}') if list_nodes: for col in (collections or sorted(site.collections.keys())): click.echo(f'{col}:') for name in sorted(site.collections[col].nodes): if name == 'dataset': continue click.echo(f' {name}') return if list_dependent: for col in (collections or sorted(site.collections.keys())): click.echo(f'{col}:') dependent = { x for name in selected_nodes for x in site.collections[col].nodes[name].dependent } for name in sorted(x.name for x in dependent): click.echo(f' {name}') return errors = [] if datasets: setids = datasets else: setids = await site.db.get_datasets_for_collections(collections) if not PDB: # TODO: Move signal handling into runner def handle_abort(_1, _2): marv_node.run.setabort() signal.signal(signal.SIGINT, handle_abort) signal.signal(signal.SIGTERM, handle_abort) for setid in setids: if PDB: await site.run(setid, selected_nodes, deps, force, keep, force_dependent, update_detail, update_listing, excluded_nodes, cachesize=cachesize) else: try: await site.run(setid, selected_nodes, deps, force, keep, force_dependent, update_detail, update_listing, excluded_nodes, cachesize=cachesize) except ConfigError as exc: err(f'ERROR: {exc}', exit=1) except DoesNotExist: click.echo(f'ERROR: unknown {setid!r}', err=True) if not keep_going: raise except RequestedMessageTooOld as e: _req = e.args[0]._requestor.node.name # pylint: disable=no-member,protected-access _handle = e.args[0].handle.node.name # pylint: disable=no-member click.echo(f""" ERROR: {_req} pulled {_handle} message {e.args[1]} not being in memory anymore. See https://ternaris.com/marv-robotics/docs/patterns.html#reduce-separately """, err=True) ctx.abort() except marv_node.run.Aborted: ctx.abort() except ReaderError as e: errors.append(setid) log.error('Reader error for dataset %s: %s', setid, e) except Exception as e: # pylint: disable=broad-except errors.append(setid) if isinstance(e, DirectoryAlreadyExists): click.echo(f""" ERROR: Directory for node run already exists: {e.args[0]!r} In case no other node run is in progress, this is a bug which you are kindly asked to report, providing information regarding any previous, failed node runs. """, err=True) if not keep_going: ctx.abort() else: log.error('Exception occured for dataset %s:', setid, exc_info=True) log.error('Error occured for dataset %s: %s', setid, e) if not keep_going: ctx.exit(1) if errors: log.error('There were errors for %r', errors)
def run(self): try: NoSigintArbiter(self).run() except RuntimeError as e: err(f'\nERROR: {e}', exit=1)