Exemple #1
0
def import_in_mimir(_file, instance, asynchronous=True):
    """
    Import pt data stops to autocomplete
    """
    datatype, _ = utils.type_of_data(_file)
    family_type = utils.family_of_data(datatype)

    current_app.logger.debug("Import {} data to mimir".format(family_type))

    action = None

    if family_type == 'pt':
        if instance.import_ntfs_in_mimir:
            action = ntfs2mimir.si(instance.name, _file)
        # Deprecated: https://github.com/CanalTP/mimirsbrunn/blob/4430eed1d81247fffa7cf32ba675a9c5ad8b1cbe/documentation/components.md#stops2mimir
        if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
            action = stops2mimir.si(instance.name, _file)
    elif family_type == 'poi':
        action = poi2mimir.si(instance.name, _file)
    else:
        current_app.logger.warning("Unsupported family_type {}".format(family_type))

    if asynchronous:
        return action.delay()
    else:
        # all job are run in sequence and import_in_mimir will only return when all the jobs are finish
        return action.apply()
Exemple #2
0
def import_in_mimir(_file, instance, asynchronous=True):
    """
    Import pt data stops to autocomplete
    """
    datatype, _ = utils.type_of_data(_file)
    family_type = utils.family_of_data(datatype)

    current_app.logger.debug("Import {} data to mimir".format(family_type))

    action = None

    if family_type == 'pt':
        if instance.import_ntfs_in_mimir:
            action = ntfs2mimir.si(instance.name, _file)
        if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
            action = stops2mimir.si(instance.name, _file)
    elif family_type == 'poi':
        action = poi2mimir.si(instance.name, _file)
    else:
        current_app.logger.warning("Unsupported family_type {}".format(family_type))

    if asynchronous:
        return action.delay()
    else:
        # all job are run in sequence and import_in_mimir will only return when all the jobs are finish
        return action.apply()
Exemple #3
0
def import_in_mimir(_file, instance, asynchronous=True):
    """
    Import pt data stops to autocomplete
    """
    datatype, _ = utils.type_of_data(_file)
    family_type = utils.family_of_data(datatype)
    current_app.logger.debug("Import {} data to mimir".format(family_type))

    actions = []

    for version in (2, 7):
        if not is_activate_autocomplete_version(version):
            logging.getLogger(__name__).info(
                "Disable import mimir version {}".format(version))
            continue
        if family_type == 'pt':
            if instance.import_ntfs_in_mimir:
                actions.append(ntfs2mimir.si(instance.name, _file, version))
            # Deprecated: https://github.com/hove-io/mimirsbrunn/blob/4430eed1d81247fffa7cf32ba675a9c5ad8b1cbe/documentation/components.md#stops2mimir
            if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
                actions.append(stops2mimir.si(instance.name, _file, version))
        elif family_type == 'poi':
            actions.append(poi2mimir.si(instance.name, _file, version))
        else:
            current_app.logger.warning(
                "Unsupported family_type {}".format(family_type))

    if asynchronous:
        return chain(*actions).delay()
    else:
        # all job are run in sequence and import_in_mimir will only return when all the jobs are finish
        return chain(*actions).apply()
Exemple #4
0
def send_to_mimir(instance, filename, family_type):
    """
    :param instance: instance to receive the data
    :param filename: file to inject towards mimir
    :param family_type: dataset's family type

    - create a job with a data_set
    - data injection towards mimir(stops2mimir, ntfs2mimir, poi2mimir)

    returns action list
    """

    # if mimir isn't setup do not try to import data for the autocompletion
    if not current_app.config.get('MIMIR_URL'):
        return []

    # Bail out if the family type is not one that mimir deals with.
    if family_type not in ['pt', 'poi']:
        return []

    # This test is to avoid creating a new job if there is no action on mimir.
    if not (instance.import_ntfs_in_mimir or instance.import_stops_in_mimir):
        return []

    actions = []
    job = models.Job()
    job.instance = instance
    job.state = 'running'

    dataset = models.DataSet()
    dataset.family_type = 'mimir'
    dataset.type = 'fusio'

    # currently the name of a dataset is the path to it
    dataset.name = filename
    models.db.session.add(dataset)
    job.data_sets.append(dataset)

    models.db.session.add(job)
    models.db.session.commit()

    if family_type == 'pt':
        # Import ntfs in Mimir
        if instance.import_ntfs_in_mimir:
            actions.append(ntfs2mimir.si(instance.name, filename, job.id, dataset_uid=dataset.uid))

        # Import stops in Mimir.
        # if we are loading pt data we might want to load the stops to autocomplete
        # This action is deprecated: https://github.com/CanalTP/mimirsbrunn/blob/4430eed1d81247fffa7cf32ba675a9c5ad8b1cbe/documentation/components.md#stops2mimir
        if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
            actions.append(stops2mimir.si(instance.name, filename, job.id, dataset_uid=dataset.uid))
    else:  # assume family_type == 'poi':
        actions.append(poi2mimir.si(instance.name, filename, job.id, dataset_uid=dataset.uid))

    actions.append(finish_job.si(job.id))
    return actions
Exemple #5
0
def send_to_mimir(instance, filename):
    """
    :param instance: instance to receive the data
    :param filename: file to inject towards mimir

    - create a job with a data_set
    - data injection towards mimir(stops2mimir, ntfs2mimir)

    returns action list
    """
    # This test is to avoid creating a new job if there is no action on mimir.
    if not (instance.import_ntfs_in_mimir or instance.import_stops_in_mimir):
        return []

    actions = []
    job = models.Job()
    instance_config = load_instance_config(instance.name)
    job.instance = instance
    job.state = 'running'

    dataset = models.DataSet()
    dataset.family_type = 'mimir'
    dataset.type = 'fusio'

    # currently the name of a dataset is the path to it
    dataset.name = filename
    models.db.session.add(dataset)
    job.data_sets.append(dataset)

    models.db.session.add(job)
    models.db.session.commit()

    # Import ntfs in Mimir
    if instance.import_ntfs_in_mimir:
        actions.append(
            ntfs2mimir.si(instance_config,
                          filename,
                          job.id,
                          dataset_uid=dataset.uid))

    # Import stops in Mimir
    # if we are loading pt data we might want to load the stops to autocomplete
    if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
        actions.append(
            stops2mimir.si(instance_config,
                           filename,
                           job.id,
                           dataset_uid=dataset.uid))

    actions.append(finish_job.si(job.id))
    return actions
Exemple #6
0
def import_in_mimir(_file, instance, async=True):
    """
    Import pt data stops to autocomplete
    """
    datatype, _ = utils.type_of_data(_file)
    family_type = utils.family_of_data(datatype)

    current_app.logger.debug("Import {} data to mimir".format(family_type))

    action = None

    if family_type == 'pt':
        if instance.import_ntfs_in_mimir:
            action = ntfs2mimir.si(instance.name, _file)
        if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
            action = stops2mimir.si(instance.name, _file)
    elif family_type == 'poi':
        action = poi2mimir.si(instance.name, _file)
    else:
        current_app.logger.warning(
            "Unsupported family_type {}".format(family_type))

    if async:
        return action.delay()
    else:
        # all job are run in sequence and import_in_mimir will only return when all the jobs are finish
        return action.apply()


@celery.task()
def update_autocomplete():
Exemple #7
0
    else:
        # all job are run in sequence and import_data will only return when all the jobs are finish
        return chain(*actions).apply(), job


@celery.task()
def import_in_mimir(_file, instance, async=True):
    """
    Import pt data stops to autocomplete
    """
    current_app.logger.debug("Import pt data to mimir")
    instance_config = load_instance_config(instance.name)
    if instance.import_ntfs_in_mimir:
        action = ntfs2mimir.si(instance_config, _file)
    if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
        action = stops2mimir.si(instance_config, _file)
    if async:
        return action.delay()
    else:
        # all job are run in sequence and import_in_mimir will only return when all the jobs are finish
        return action.apply()


@celery.task()
def update_autocomplete():
    current_app.logger.debug("Update autocomplete data")
    autocomplete_dir = current_app.config['TYR_AUTOCOMPLETE_DIR']
    for autocomplete_instance in models.AutocompleteParameter.query.all():
        files = glob.glob(autocomplete_instance.source_dir(autocomplete_dir) + "/*")
        if files:
            import_autocomplete(files, autocomplete_instance, backup_file=True)
Exemple #8
0
def send_to_mimir(instance, filename, family_type):
    """
    :param instance: instance to receive the data
    :param filename: file to inject towards mimir
    :param family_type: dataset's family type

    - create a job with a data_set
    - data injection towards mimir(stops2mimir, ntfs2mimir, poi2mimir)

    returns action list
    """

    # if mimir isn't setup do not try to import data for the autocompletion
    if not any([
            is_activate_autocomplete_version(2)
            or is_activate_autocomplete_version(7)
    ]):
        return []

    # Bail out if the family type is not one that mimir deals with.
    if family_type not in ['pt', 'poi']:
        return []

    # This test is to avoid creating a new job if there is no action on mimir.
    if not (instance.import_ntfs_in_mimir or instance.import_stops_in_mimir):
        return []

    actions = []
    job = models.Job()
    job.instance = instance
    job.state = 'running'

    if is_activate_autocomplete_version(7):
        dataset_es7 = create_and_get_dataset(ds_type="fusio",
                                             family_type="mimir7",
                                             filename=filename)
        models.db.session.add(dataset_es7)
        job.data_sets.append(dataset_es7)

    if is_activate_autocomplete_version(2):
        dataset_es2 = create_and_get_dataset(ds_type="fusio",
                                             family_type="mimir",
                                             filename=filename)
        models.db.session.add(dataset_es2)
        job.data_sets.append(dataset_es2)

    models.db.session.add(job)
    models.db.session.commit()

    for version in (2, 7):
        if not is_activate_autocomplete_version(version):
            logging.getLogger(__name__).info(
                "Disable import mimir version {}".format(version))
            continue
        ds = dataset_es7 if version == 7 else dataset_es2
        if family_type == 'pt':
            # Import ntfs in Mimir
            if instance.import_ntfs_in_mimir:
                actions.append(
                    ntfs2mimir.si(instance.name,
                                  filename,
                                  version,
                                  job.id,
                                  dataset_uid=ds.uid))
            # Import stops in Mimir.
            # if we are loading pt data we might want to load the stops to autocomplete
            # This action is deprecated: https://github.com/hove-io/mimirsbrunn/blob/4430eed1d81247fffa7cf32ba675a9c5ad8b1cbe/documentation/components.md#stops2mimir
            if instance.import_stops_in_mimir and not instance.import_ntfs_in_mimir:
                actions.append(
                    stops2mimir.si(instance.name,
                                   filename,
                                   version,
                                   job.id,
                                   dataset_uid=ds.uid))
        else:  # assume family_type == 'poi':
            actions.append(
                poi2mimir.si(instance.name,
                             filename,
                             version,
                             job.id,
                             dataset_uid=ds.uid))

    actions.append(finish_job.si(job.id))
    return actions
Exemple #9
0
        models.db.session.add(dataset)
        job.data_sets.append(dataset)

    if actions:
        models.db.session.add(job)
        models.db.session.commit()
        for action in actions:
            action.kwargs['job_id'] = job.id
        #We pass the job id to each tasks, but job need to be commited for having an id
        binarisation = [ed2nav.si(instance_config, job.id, custom_output_dir)]
        actions.append(chain(*binarisation))
        if dataset.family_type == 'pt' and instance.import_stops_in_mimir:
            # if we are loading pt data we might want to load the stops to autocomplete
            actions.append(
                stops2mimir.si(instance_config,
                               filename,
                               job.id,
                               dataset_uid=dataset.uid))
        if reload:
            actions.append(reload_data.si(instance_config, job.id))
        actions.append(finish_job.si(job.id))
        if async:
            return chain(*actions).delay()
        else:
            # all job are run in sequence and import_data will only return when all the jobs are finish
            return chain(*actions).apply()


@celery.task()
def update_data():
    for instance in models.Instance.query_existing().all():
        current_app.logger.debug("Update data of : {}".format(instance.name))
Exemple #10
0
        #currently the name of a dataset is the path to it
        dataset.name = filename
        models.db.session.add(dataset)
        job.data_sets.append(dataset)

    if actions:
        models.db.session.add(job)
        models.db.session.commit()
        for action in actions:
            action.kwargs['job_id'] = job.id
        #We pass the job id to each tasks, but job need to be commited for having an id
        binarisation = [ed2nav.si(instance_config, job.id, custom_output_dir)]
        actions.append(chain(*binarisation))
        if dataset.family_type == 'pt' and instance.import_stops_in_mimir:
            # if we are loading pt data we might want to load the stops to autocomplete
            actions.append(stops2mimir.si(instance_config, filename, job.id, dataset_uid=dataset.uid))
        if reload:
            actions.append(reload_data.si(instance_config, job.id))
        actions.append(finish_job.si(job.id))
        if async:
            return chain(*actions).delay()
        else:
            # all job are run in sequence and import_data will only return when all the jobs are finish
            return chain(*actions).apply()


@celery.task()
def update_data():
    for instance in models.Instance.query_existing().all():
        current_app.logger.debug("Update data of : {}".format(instance.name))
        instance_config = load_instance_config(instance.name)