Пример #1
0
def import_last_stop_dataset(instance_name, wait=True):
    """
    reimport the last datasets of of all instances with import_stops_in_mimir = true

    you can set wait=False if you don't want to wait for the result
    """
    instance = models.Instance.query_existing().filter_by(
        name=instance_name).first()

    if not instance:
        raise Exception("cannot find instance {}".format(instance_name))

    files = [
        d.name for d in instance.last_datasets(nb_dataset=1, family_type='pt')
    ]
    files.extend([
        d.name for d in instance.last_datasets(nb_dataset=1, family_type='poi')
    ])
    logger = logging.getLogger(__name__)
    logger.info('we reimport to mimir the last dataset of %s, composed of: %s',
                instance.name, files)
    if len(files) >= 1:
        for _file in files:
            future = tasks.import_in_mimir(_file, instance)
            if wait and future:
                wait_or_raise(future)
            logger.info('last datasets reimport finished for %s',
                        instance.name)
    else:
        logger.info('No file to reimport to mimir the last dataset of %s',
                    instance.name)
def import_last_autocomplete_dataset(instance_name, wait=True):
    """
    reimport the last datasets of an autocomplete instance

    you can set wait=False if you don't want to wait for the result
    """
    instance = models.AutocompleteParameter.query.filter_by(name=instance_name).first()

    if not instance:
        raise Exception("cannot find autocomplete instance {}".format(instance_name))

    files = [d.name for d in instance.last_datasets(1)]
    logger = logging.getLogger(__name__)
    logger.info('we reimport the last dataset of autocomplete %s, composed of: %s', instance_name, files)
    future, _ = tasks.import_autocomplete(files, instance, backup_file=False)
    if wait and future:
        wait_or_raise(future)

    logger.info('last datasets reimport finished for %s', instance_name)
Пример #3
0
def import_last_dataset(
    instance_name,
    background=False,
    reload_kraken=False,
    custom_output_dir=None,
    nowait=False,
    allow_mimir=False,
    skip_2ed=False,
):
    """
    reimport the last dataset of a instance
    By default the kraken is not reloaded, the '-r' switch can activate it

    the custom_output_dir parameter is a subdirectory for the nav file created.
    If not given, the instance default one is taken
    By default job is not run on the workers, you need to pass --background for use them, in that case you can
    also pass --nowait for not waiting the end of the job

    """
    instance = models.Instance.query_existing().filter_by(name=instance_name).first()
    # little trick to keep a flag on the command line: -a enable mimir import,
    # we reverse it to match import_data
    skip_mimir = not allow_mimir

    if not instance:
        raise Exception("cannot find instance {}".format(instance_name))

    files = [d.name for d in instance.last_datasets(1)]
    logger = get_instance_logger(instance)
    logger.info('we reimport the last dataset of %s, composed of: %s', instance.name, files)
    future = tasks.import_data(
        files,
        instance,
        backup_file=False,
        asynchronous=background,
        reload=reload_kraken,
        custom_output_dir=custom_output_dir,
        skip_mimir=skip_mimir,
        skip_2ed=skip_2ed,
    )
    if not nowait and future:
        wait_or_raise(future)