示例#1
0
def binarize(ed_db_params, output, ed_component_path, cities_db_params):
    # type: (DbParams, str, str, DbParams) -> None
    """
    Binarize the data from the database to a file.

    :param ed_db_params: the parameters of the database
    :param output: the name of the output file (usually with extension ".nav.lz4")
    :param ed_component_path: the path to the "ed2nav" binary
    :param cities_db_params: the parameters for the cities of the database
    """
    logger.info('creating data.nav')
    ed2nav = 'ed2nav'
    if ed_component_path:
        ed2nav = os.path.join(ed_component_path, ed2nav)
    launch_exec.launch_exec(
        ed2nav,
        [
            "-o",
            output,
            "--connection-string",
            ed_db_params.old_school_cnx_string(),
            "--cities-connection-string",
            cities_db_params.old_school_cnx_string(),
        ],
        logger,
    )
    logger.info("data.nav is created successfully: {}".format(output))
示例#2
0
def poi2mimir(self, instance_name, input, job_id=None, dataset_uid=None):
    """ launch poi2mimir """
    dataset_name = 'priv.{}'.format(
        instance_name)  # We give the dataset a prefix to prevent
    #   collision with other datasets.

    job = None
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
        instance = models.Instance.query_existing().filter_by(
            name=instance_name).first()

    cnx_string = current_app.config['MIMIR_URL']

    poi_file = input

    # Note: the dataset is for the moment the instance name, we'll need to change this when we'll aggregate
    argv = [
        '--input', poi_file, '--connection-string', cnx_string, '--dataset',
        dataset_name, '--private'
    ]

    try:
        if job:
            with collect_metric('poi2mimir', job, dataset_uid):
                res = launch_exec('poi2mimir', argv, logger)
        else:
            res = launch_exec('poi2mimir', argv, logger)

        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            logger.error('poi2mimir failed')
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
        else:
            instance.poi_dataset = dataset_name
            models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
示例#3
0
def osm2mimir(self, autocomplete_instance, filename, job_id, dataset_uid, autocomplete_version):
    """launch osm2mimir"""
    executable = "osm2mimir" if autocomplete_version == 2 else "osm2mimir7"
    autocomplete_instance = models.db.session.merge(autocomplete_instance)  # reatache the object
    logger = get_autocomplete_instance_logger(autocomplete_instance, task_id=job_id)
    logger.debug('running {} for {}'.format(executable, job_id))
    job = models.Job.query.get(job_id)
    data_filename = unzip_if_needed(filename)
    custom_config = "custom_config"
    working_directory = os.path.dirname(data_filename)
    custom_config_config_toml = '{}/{}.toml'.format(working_directory, custom_config)
    data = autocomplete_instance.config_toml.encode("utf-8")
    cosmogony_file = models.DataSet.get_cosmogony_file_path()
    with open(custom_config_config_toml, 'w') as f:
        f.write(data)
    params = get_osm2mimir_params(
        autocomplete_instance,
        data_filename,
        working_directory,
        custom_config,
        autocomplete_version,
        cosmogony_file,
    )
    try:
        res = launch_exec(executable, params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError('{} failed'.format(executable))
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#4
0
def openaddresses2mimir(self, autocomplete_instance, filename, job_id, dataset_uid, autocomplete_version):
    """launch openaddresses2mimir"""
    executable = "openaddresses2mimir" if autocomplete_version == 2 else "openaddresses2mimir7"
    autocomplete_instance = models.db.session.merge(autocomplete_instance)  # reatache the object
    logger = get_autocomplete_instance_logger(autocomplete_instance, task_id=job_id)
    if autocomplete_instance.address != 'OA':
        logger.warning(
            'no open addresses data will be loaded for instance {} because the address are read from {}'.format(
                autocomplete_instance.name, autocomplete_instance.address
            )
        )
        return

    job = models.Job.query.get(job_id)
    working_directory = unzip_if_needed(filename)
    cosmogony_file = models.DataSet.get_cosmogony_file_path()
    params = get_openaddresses2mimir_params(
        autocomplete_instance, working_directory, autocomplete_version, cosmogony_file
    )
    try:
        res = launch_exec(executable, params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError('{} failed'.format(executable))
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#5
0
def ntfs2mimir(self, instance_name, input, autocomplete_version, job_id=None, dataset_uid=None):
    """
    launch ntfs2mimir
    """
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
    executable = "ntfs2mimir" if autocomplete_version == 2 else "ntfs2mimir7"
    logger.debug('running {} for Es{}'.format(executable, autocomplete_version))

    working_directory = unzip_if_needed(input)
    cosmogony_file = models.DataSet.get_cosmogony_file_path()
    argv = get_ntfs2mimir_params(instance_name, working_directory, autocomplete_version, cosmogony_file)
    try:
        res = launch_exec(executable, argv, logger)
        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            # ntfs2mimir have to be non-blocking.
            # @TODO : Find a way to raise error without breaking celery tasks chain
            logger.error('{} failed'.format(executable))
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
示例#6
0
def fare2ed(self, instance_config, filename, job_id):
    """ launch fare2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance)
    try:
        working_directory = os.path.dirname(filename)

        zip_file = zipfile.ZipFile(filename)
        zip_file.extractall(path=working_directory)

        res = launch_exec("fare2ed", [
            '-f', working_directory, '--connection-string',
            make_connection_string(instance_config)
        ], logger)
        if res != 0:
            #@TODO: exception
            raise ValueError('fare2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#7
0
def bano2mimir(self, autocomplete_instance, filename, job_id, dataset_uid):
    """ launch bano2mimir """
    autocomplete_instance = models.db.session.merge(
        autocomplete_instance)  #reatache the object
    logger = get_autocomplete_instance_logger(autocomplete_instance,
                                              task_id=job_id)
    job = models.Job.query.get(job_id)
    cnx_string = current_app.config['MIMIR_URL']
    working_directory = unzip_if_needed(filename)

    if autocomplete_instance.address != 'BANO':
        logger.warn(
            'no bano data will be loaded for instance {} because the address are read from {}'
            .format(autocomplete_instance.name, autocomplete_instance.address))
        return

    try:
        res = launch_exec("bano2mimir", [
            '-i', working_directory, '--connection-string', cnx_string,
            '--dataset', autocomplete_instance.name
        ], logger)
        if res != 0:
            #@TODO: exception
            raise ValueError('bano2mimir failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#8
0
def fare2ed(self, instance_config, filename, job_id, dataset_uid):
    """ launch fare2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance, task_id=job_id)
    try:

        working_directory = unzip_if_needed(filename)

        params = ["-f", working_directory]
        params.append("--connection-string")
        params.append(make_connection_string(instance_config))
        params.append("--local_syslog")
        params.append("--log_comment")
        params.append(instance_config.name)
        res = launch_exec("fare2ed", params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError('fare2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#9
0
def stops2mimir(self, instance_config, input, job_id, dataset_uid):
    """
    launch stops2mimir

    Note: this is temporary, this will be done by tartare when tartare will be available
    """
    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance)
    cnx_string = current_app.config['MIMIR_URL']

    working_directory = os.path.dirname(input)

    stops_file = os.path.join(working_directory, 'stops.txt')

    # Note: the dataset is for the moment the instance name, we'll need to change this when we'll aggregate
    argv = [
        '--input', stops_file, '--connection-string', cnx_string, '--dataset',
        instance_config.name
    ]

    try:
        res = launch_exec('stops2mimir', argv, logger)
        if res != 0:
            raise ValueError('stops2mimir failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#10
0
def cosmogony2mimir(self, autocomplete_instance, filename, job_id,
                    dataset_uid):
    """ launch cosmogony2mimir """
    autocomplete_instance = models.db.session.merge(
        autocomplete_instance)  # reattach the object
    logger = get_autocomplete_instance_logger(autocomplete_instance,
                                              task_id=job_id)
    logger.debug('running cosmogony2mimir for {}'.format(job_id))
    job = models.Job.query.get(job_id)
    cnx_string = current_app.config['MIMIR_URL']
    cosmo_file = unzip_if_needed(filename)

    params = [
        '--input',
        cosmo_file,
        '--connection-string',
        cnx_string,
        '--dataset',
        autocomplete_instance.name,
        '--french-id-retrocompatibility',
    ]
    try:
        res = launch_exec("cosmogony2mimir", params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError('cosmogony2mimir failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#11
0
def import_data(data_dir, db_params, ed_component_path):
    # type: (str, DbParams, str) -> None
    """
    Call the right binary for its data (all the "*2ed") to create data then load it in the database.

    :param data_dir: the directory containing the data for "*2ed"
    :param db_params: the parameters of the database
    :param ed_component_path: the path of the directory containing the binary "*2ed"
    """
    files = glob.glob(data_dir + "/*")  # type: List[str]
    data_type, file_to_load = utils.type_of_data(files)  # type: str,str
    if not data_type:
        logger.info('unknown data type for dir {}, skipping'.format(data_dir))
        return

    # we consider that we only have to load one kind of data per directory
    import_component = data_type + '2ed'  # type: str
    if ed_component_path:
        import_component = os.path.join(ed_component_path, import_component)

    if file_to_load.endswith('.zip') or file_to_load.endswith('.geopal'):
        # TODO: handle geopal as non zip ; if it's a zip, we unzip it
        zip_file = zipfile.ZipFile(file_to_load)  # type: zipfile.ZipFile
        zip_file.extractall(path=data_dir)
        file_to_load = data_dir

    if launch_exec.launch_exec(import_component, [
            "-i", file_to_load, "--connection-string",
            db_params.old_school_cnx_string()
    ], logger):
        raise Exception(
            'Error: problem with running {}, stoping'.format(import_component))
示例#12
0
def osm2mimir(self, autocomplete_instance, filename, job_id, dataset_uid):
    """ launch osm2mimir """
    logger = logging.getLogger("autocomplete")
    logger.debug('running osm2mimir for {}'.format(job_id))
    job = models.Job.query.get(job_id)
    cnx_string = current_app.config['MIMIR_URL'] + '/' + MIMIR_INDEX
    working_directory = unzip_if_needed(filename)
    autocomplete_instance = models.db.session.merge(autocomplete_instance)#reatache the object
    try:
        params = ['-i', working_directory, '--connection-string', cnx_string]
        for lvl in autocomplete_instance.admin_level:
            params.append('--level')
            params.append(str(lvl))
        if autocomplete_instance.admin in utils.admin_source_types:
            params.append('--import-admin')
        if autocomplete_instance.street in utils.street_source_types:
            params.append('--import-way')
        res = launch_exec("osm2mimir",
                          params,
                          logger)
        if res != 0:
            #@TODO: exception
            raise ValueError('osm2mimir failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#13
0
def synonym2ed(self, instance_config, filename, job_id, dataset_uid):
    """ launch synonym2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance, task_id=job_id)
    try:
        connection_string = make_connection_string(instance_config)
        res = None
        params = ["-i", filename]
        params.append("--connection-string")
        params.append(connection_string)
        params.append("--local_syslog")
        params.append("--log_comment")
        params.append(instance_config.name)
        with collect_metric('synonym2ed', job, dataset_uid):
            res = launch_exec('synonym2ed', params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError('synonym2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#14
0
def osm2ed(self, instance_config, osm_filename, job_id, dataset_uid):
    """ launch osm2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance

    if os.path.isdir(osm_filename):
        osm_filename = glob.glob('{}/*.pbf'.format(osm_filename))[0]

    logger = get_instance_logger(instance)
    try:
        connection_string = make_connection_string(instance_config)
        res = None
        args = ["-i", osm_filename, "--connection-string", connection_string]
        for poi_type in instance.poi_types:
            args.append('-p')
            if poi_type.name:
                args.append(u'{}={}'.format(poi_type.uri, poi_type.name))
            else:
                args.append(poi_type.uri)

        with collect_metric('osm2ed', job, dataset_uid):
            res = launch_exec('osm2ed', args, logger)
        if res != 0:
            #@TODO: exception
            raise ValueError('osm2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#15
0
def ed2nav(self, instance_config, job_id, custom_output_dir):
    """ Launch ed2nav"""
    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance)
    try:
        output_file = instance_config.target_file

        if custom_output_dir:
            # we change the target_filename to store it in a subdir
            target_path = os.path.join(os.path.dirname(output_file),
                                       custom_output_dir)
            output_file = os.path.join(target_path,
                                       os.path.basename(output_file))
            if not os.path.exists(target_path):
                os.makedirs(target_path)

        connection_string = make_connection_string(instance_config)
        argv = ["-o", output_file, "--connection-string", connection_string]
        if 'CITIES_DATABASE_URI' in current_app.config and current_app.config[
                'CITIES_DATABASE_URI']:
            argv.extend([
                "--cities-connection-string",
                current_app.config['CITIES_DATABASE_URI']
            ])

        res = launch_exec('ed2nav', argv, logger)
        if res != 0:
            raise ValueError('ed2nav failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#16
0
文件: tasks.py 项目: kinnou02/navitia
def cities(file_path, job_id, exe):
    """ Launch 'cities' or 'cosmogony2cities' """
    job = models.Job.query.get(job_id)
    res = -1
    try:
        res = launch_exec(
            "{}".format(exe),
            [
                '-i', file_path, '--connection-string',
                current_app.config['CITIES_DATABASE_URI']
            ],
            logging,
        )
        if res != 0:
            job.state = 'failed'
            logging.error('{} failed'.format(exe))
        else:
            job.state = 'done'

    except Exception as e:
        logging.exception('{} exception : {}'.format(exe, e.message))
        job.state = 'failed'
        models.db.session.commit()
        raise

    models.db.session.commit()
    logging.info('Import of {} finished'.format(exe))
    return res
示例#17
0
def gtfs2ed(self, instance_config, gtfs_filename, job_id):
    """ Unzip gtfs file launch gtfs2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance)
    try:
        working_directory = os.path.dirname(gtfs_filename)

        zip_file = zipfile.ZipFile(gtfs_filename)
        zip_file.extractall(path=working_directory)

        params = ["-i", working_directory]
        if instance_config.aliases_file:
            params.append("-a")
            params.append(instance_config.aliases_file)

        if instance_config.synonyms_file:
            params.append("-s")
            params.append(instance_config.synonyms_file)

        connection_string = make_connection_string(instance_config)
        params.append("--connection-string")
        params.append(connection_string)
        res = launch_exec("gtfs2ed", params, logger)
        if res != 0:
            raise ValueError('gtfs2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#18
0
def ntfs2mimir(self, instance_name, input, job_id=None, dataset_uid=None):
    """
    launch ntfs2mimir
    """
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
    cnx_string = current_app.config['MIMIR_URL']

    working_directory = unzip_if_needed(input)

    argv = ['--input', working_directory, '--connection-string', cnx_string, '--dataset', instance_name]
    try:
        res = launch_exec('ntfs2mimir', argv, logger)
        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            # ntfs2mimir have to be non-blocking.
            # @TODO : Find a way to raise error without breaking celery tasks chain
            logger.error('ntfs2mimir failed')
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
示例#19
0
def poi2ed(self, instance_config, filename, job_id, dataset_uid):
    """ launch poi2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance
    logger = get_instance_logger(instance)
    try:
        working_directory = os.path.dirname(filename)

        zip_file = zipfile.ZipFile(filename)
        zip_file.extractall(path=working_directory)

        connection_string = make_connection_string(instance_config)
        res = None
        with collect_metric('poi2ed', job, dataset_uid):
            res = launch_exec('poi2ed', [
                "-i", working_directory, "--connection-string",
                connection_string
            ], logger)
        if res != 0:
            #@TODO: exception
            raise ValueError('poi2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#20
0
def gtfs2ed(self, instance_config, gtfs_filename, job_id, dataset_uid):
    """ Unzip gtfs file launch gtfs2ed """

    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance)
    try:
        working_directory = unzip_if_needed(gtfs_filename)

        params = ["-i", working_directory]
        if instance_config.aliases_file:
            params.append("-a")
            params.append(instance_config.aliases_file)

        if instance_config.synonyms_file:
            params.append("-s")
            params.append(instance_config.synonyms_file)

        connection_string = make_connection_string(instance_config)
        params.append("--connection-string")
        params.append(connection_string)
        res = None
        with collect_metric('gtfs2ed', job, dataset_uid):
            res = launch_exec("gtfs2ed", params, logger)
        if res != 0:
            raise ValueError('gtfs2ed failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#21
0
def poi2ed(self, instance_config, filename, job_id, dataset_uid):
    """ launch poi2ed """

    job = models.Job.query.get(job_id)
    dataset = _retrieve_dataset_and_set_state("poi", job.id)
    instance = job.instance
    logger = get_instance_logger(instance, task_id=job_id)
    try:
        working_directory = unzip_if_needed(filename)

        connection_string = make_connection_string(instance_config)
        res = None
        params = ["-i", working_directory]
        params.append("--connection-string")
        params.append(connection_string)
        params.append("--local_syslog")
        params.append("--log_comment")
        params.append(instance_config.name)
        with collect_metric("poi2ed", job, dataset_uid):
            res = launch_exec("poi2ed", params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError("poi2ed failed")
        dataset.state = "done"
    except:
        logger.exception("")
        job.state = "failed"
        dataset.state = "failed"
        raise
    finally:
        models.db.session.commit()
示例#22
0
def poi2mimir(self, instance_name, input, autocomplete_version, job_id=None, dataset_uid=None):
    """launch poi2mimir"""
    dataset_name = 'priv.{}'.format(instance_name)  # We give the dataset a prefix to prevent
    #   collision with other datasets.
    job = None
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
        instance = models.Instance.query_existing().filter_by(name=instance_name).first()
    executable = "poi2mimir" if autocomplete_version == 2 else "poi2mimir7"
    logger.debug('running {} version autocomplete {}'.format(executable, autocomplete_version))
    cosmogony_file = models.DataSet.get_cosmogony_file_path()
    argv = get_poi2mimir_params(input, dataset_name, autocomplete_version, cosmogony_file)
    try:
        if job:
            with collect_metric(executable, job, dataset_uid):
                res = launch_exec(executable, argv, logger)
        else:
            res = launch_exec(executable, argv, logger)

        if res != 0:
            # Do not raise error because it breaks celery tasks chain.
            logger.error('{} failed'.format(executable))
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
        else:
            instance.poi_dataset = dataset_name
            models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise
示例#23
0
def cities(osm_path):
    """ launch cities """
    res = -1
    try:
        res = launch_exec("cities", ['-i', osm_path,
                                      '--connection-string',
                                      current_app.config['CITIES_DATABASE_URI']],
                          logging)
        if res!=0:
            logging.error('cities failed')
    except:
        logging.exception('')
    logging.info('Import of cities finished')
    return res
示例#24
0
def cosmogony2cities(cosmogony_path):
    """ launch cosmogony2cities """
    res = -1
    try:
        res = launch_exec(
            "cosmogony2cities",
            ['--input', cosmogony_path, '--connection-string', current_app.config['CITIES_DATABASE_URI']],
            logging,
        )
        if res != 0:
            logging.error('cosmogony2cities failed')
    except Exception as e:
        logging.exception('cosmogony2cities exception : {}'.format(e.message))

    logging.info('Import of cosmogony2cities finished')
    return res
示例#25
0
def osm2ed(self, instance_config, osm_filename, job_id, dataset_uid):
    """ launch osm2ed """
    job = models.Job.query.get(job_id)
    dataset = _retrieve_dataset_and_set_state("osm", job.id)
    instance = job.instance
    poi_types_json = None
    if instance.poi_type_json:
        poi_types_json = instance.poi_type_json.poi_types_json

    osm_filename = unzip_if_needed(osm_filename)
    if os.path.isdir(osm_filename):
        osm_filename = glob.glob('{}/*.pbf'.format(osm_filename))[0]

    logger = get_instance_logger(instance, task_id=job_id)
    try:
        connection_string = make_connection_string(instance_config)
        res = None
        args = ["-i", osm_filename, "--connection-string", connection_string]
        if poi_types_json:
            args.append('-p')
            args.append(u'{}'.format(poi_types_json))

        args.append("--local_syslog")
        args.append("--log_comment")
        args.append(instance_config.name)

        if instance.admins_from_cities_db:
            cities_db = current_app.config.get('CITIES_DATABASE_URI')
            if not cities_db:
                raise ValueError(
                    "impossible to use osm2ed with cities db since no cities database configuration has been set"
                )
            args.extend(["--cities-connection-string", cities_db])
        with collect_metric("osm2ed", job, dataset_uid):
            res = launch_exec("osm2ed", args, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError("osm2ed failed")
        dataset.state = "done"
    except:
        logger.exception("")
        job.state = "failed"
        dataset.state = "failed"
        raise
    finally:
        models.db.session.commit()
示例#26
0
def load_cities(cities_file, cities_db_params, cities_exec_path):
    # type: (str, DbParams, str) -> None
    """
    Load cities in the database.

    :param cities_file: the path to the directory containing the data for the "ed2nav" binary
    :param cities_db_params: the parameters of the database
    :param cities_exec_path: the path of the directory containing the "cities" binary
    """
    cities_exec = os.path.join(cities_exec_path, 'cities')  # type: str

    if launch_exec.launch_exec(cities_exec, [
            "-i", cities_file, "--connection-string",
            cities_db_params.old_school_cnx_string()
    ], logger):
        raise Exception(
            'Error: problem with running {}, stoping'.format(cities_exec))
示例#27
0
def cosmogony2mimir(self, autocomplete_instance, filename, job_id, dataset_uid, autocomplete_version):
    executable = "cosmogony2mimir" if autocomplete_version == 2 else "cosmogony2mimir7"
    autocomplete_instance = models.db.session.merge(autocomplete_instance)  # reattach the object
    logger = get_autocomplete_instance_logger(autocomplete_instance, task_id=job_id)
    logger.debug('running {} for {}, version autocomplete {}'.format(executable, job_id, autocomplete_version))
    job = models.Job.query.get(job_id)
    cosmo_file = unzip_if_needed(filename)
    params = get_cosmogony2mimir_params(cosmo_file, autocomplete_instance, autocomplete_version)
    try:
        res = launch_exec(executable, params, logger)
        if res != 0:
            # @TODO: exception
            raise ValueError('{} failed'.format(executable))
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#28
0
def bano2mimir(self, autocomplete_instance, filename, job_id, dataset_uid):
    """ launch bano2mimir """
    logger = logging.getLogger("autocomplete")
    job = models.Job.query.get(job_id)
    cnx_string = current_app.config['MIMIR_URL'] + '/' + MIMIR_INDEX
    working_directory = unzip_if_needed(filename)
    try:
        res = launch_exec("bano2mimir",
                          ['-i', working_directory, '--connection-string', cnx_string],
                          logger)
        if res != 0:
            #@TODO: exception
            raise ValueError('bano2mimir failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#29
0
def ed2nav(self, instance_config, job_id, custom_output_dir):
    """ Launch ed2nav"""
    job = models.Job.query.get(job_id)
    instance = job.instance

    logger = get_instance_logger(instance, task_id=job_id)
    try:
        output_file = instance_config.target_file

        if custom_output_dir:
            # we change the target_filename to store it in a subdir
            target_path = os.path.join(os.path.dirname(output_file),
                                       custom_output_dir)
            output_file = os.path.join(target_path,
                                       os.path.basename(output_file))
            if not os.path.exists(target_path):
                os.makedirs(target_path)

        connection_string = make_connection_string(instance_config)
        argv = ["-o", output_file, "--connection-string", connection_string]
        if 'CITIES_DATABASE_URI' in current_app.config and current_app.config[
                'CITIES_DATABASE_URI']:
            argv.extend([
                "--cities-connection-string",
                current_app.config['CITIES_DATABASE_URI']
            ])
        if instance.full_sn_geometries:
            argv.extend(['--full_street_network_geometries'])

        argv.extend(['--local_syslog'])
        argv.extend(["--log_comment", instance_config.name])

        res = None
        with collect_metric('ed2nav', job, None):
            res = launch_exec('ed2nav', argv, logger)
            os.system('sync')  # we sync to be safe
        if res != 0:
            raise ValueError('ed2nav failed')
    except:
        logger.exception('')
        job.state = 'failed'
        models.db.session.commit()
        raise
示例#30
0
def stops2mimir(self, instance_name, input, job_id=None, dataset_uid=None):
    """
    launch stops2mimir

    Note: this is temporary, this will be done by tartare when tartare will be available
    """
    # We don't have job_id while doing a reimport of all instances with import_stops_in_mimir = true
    if job_id:
        job = models.Job.query.get(job_id)
        instance = job.instance
        logger = get_instance_logger(instance, task_id=job_id)
    else:
        logger = get_task_logger(logging.getLogger("autocomplete"))
    cnx_string = current_app.config['MIMIR_URL']

    working_directory = os.path.dirname(input)

    stops_file = os.path.join(working_directory, 'stops.txt')

    # Note: the dataset is for the moment the instance name, we'll need to change this when we'll aggregate
    argv = [
        '--input', stops_file, '--connection-string', cnx_string, '--dataset',
        instance_name
    ]

    try:
        res = launch_exec('stops2mimir', argv, logger)
        if res != 0:
            # Do not raise error because that it breaks celery tasks chain.
            # stops2mimir have to be non-blocking.
            # @TODO : Find a way to raise error without breaking celery tasks chain
            logger.error('stops2mimir failed')
            if job_id:
                job.state = 'failed'
                models.db.session.commit()
    except:
        logger.exception('')
        if job_id:
            job.state = 'failed'
            models.db.session.commit()

        raise