예제 #1
0
def ntfs2mimir(self, instance_name, input, autocomplete_version, job_id=None, dataset_uid=None):
    """
    launch ntfs2mimir
    """
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
    executable = "ntfs2mimir" if autocomplete_version == 2 else "ntfs2mimir7"
    logger.debug('running {} for Es{}'.format(executable, autocomplete_version))

    working_directory = unzip_if_needed(input)
    cosmogony_file = models.DataSet.get_cosmogony_file_path()
    argv = get_ntfs2mimir_params(instance_name, working_directory, autocomplete_version, cosmogony_file)
    try:
        res = launch_exec(executable, argv, logger)
        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            # ntfs2mimir have to be non-blocking.
            # @TODO : Find a way to raise error without breaking celery tasks chain
            logger.error('{} failed'.format(executable))
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
예제 #2
0
def ntfs2mimir(self, instance_name, input, job_id=None, dataset_uid=None):
    """
    launch ntfs2mimir
    """
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
    cnx_string = current_app.config['MIMIR_URL']

    working_directory = unzip_if_needed(input)

    argv = ['--input', working_directory, '--connection-string', cnx_string, '--dataset', instance_name]
    try:
        res = launch_exec('ntfs2mimir', argv, logger)
        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            # ntfs2mimir have to be non-blocking.
            # @TODO : Find a way to raise error without breaking celery tasks chain
            logger.error('ntfs2mimir failed')
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
예제 #3
0
def poi2mimir(self, instance_name, input, job_id=None, dataset_uid=None):
    """ launch poi2mimir """
    dataset_name = 'priv.{}'.format(
        instance_name)  # We give the dataset a prefix to prevent
    #   collision with other datasets.

    job = None
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
        instance = models.Instance.query_existing().filter_by(
            name=instance_name).first()

    cnx_string = current_app.config['MIMIR_URL']

    poi_file = input

    # Note: the dataset is for the moment the instance name, we'll need to change this when we'll aggregate
    argv = [
        '--input', poi_file, '--connection-string', cnx_string, '--dataset',
        dataset_name, '--private'
    ]

    try:
        if job:
            with collect_metric('poi2mimir', job, dataset_uid):
                res = launch_exec('poi2mimir', argv, logger)
        else:
            res = launch_exec('poi2mimir', argv, logger)

        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            logger.error('poi2mimir failed')
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
        else:
            instance.poi_dataset = dataset_name
            models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
예제 #4
0
def stops2mimir(self, instance_name, input, job_id=None, dataset_uid=None):
    """
    launch stops2mimir

    Note: this is temporary, this will be done by tartare when tartare will be available
    """
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
    cnx_string = current_app.config['MIMIR_URL']

    working_directory = os.path.dirname(input)

    stops_file = os.path.join(working_directory, 'stops.txt')

    # Note: the dataset is for the moment the instance name, we'll need to change this when we'll aggregate
    argv = [
        '--input', stops_file, '--connection-string', cnx_string, '--dataset',
        instance_name
    ]

    try:
        res = launch_exec('stops2mimir', argv, logger)
        if res != 0:
            # Do not raise error because that it breaks celery tasks chain.
            # stops2mimir have to be non-blocking.
            # @TODO : Find a way to raise error without breaking celery tasks chain
            logger.error('stops2mimir failed')
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
예제 #5
0
def poi2mimir(self, instance_name, input, autocomplete_version, job_id=None, dataset_uid=None):
    """launch poi2mimir"""
    dataset_name = 'priv.{}'.format(instance_name)  # We give the dataset a prefix to prevent
    #   collision with other datasets.
    job = None
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
        instance = models.Instance.query_existing().filter_by(name=instance_name).first()
    executable = "poi2mimir" if autocomplete_version == 2 else "poi2mimir7"
    logger.debug('running {} version autocomplete {}'.format(executable, autocomplete_version))
    cosmogony_file = models.DataSet.get_cosmogony_file_path()
    argv = get_poi2mimir_params(input, dataset_name, autocomplete_version, cosmogony_file)
    try:
        if job:
            with collect_metric(executable, job, dataset_uid):
                res = launch_exec(executable, argv, logger)
        else:
            res = launch_exec(executable, argv, logger)

        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            logger.error('{} failed'.format(executable))
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
        else:
            instance.poi_dataset = dataset_name
            models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise