示例#1
0
 def _save_config(self, conn, config):
     """ Save the configuration that needs to remain stable for the given
         database as database properties.
     """
     properties.set_property(conn, DBCFG_NORMALIZATION, self.normalization)
     properties.set_property(conn, DBCFG_MAXWORDFREQ,
                             config.MAX_WORD_FREQUENCY)
示例#2
0
def test_set_property_new(prop_table, temp_db_conn, temp_db_cursor, prefill):
    if prefill:
        temp_db_cursor.execute(
            "INSERT INTO nominatim_properties VALUES('something', 'bar')")

    properties.set_property(temp_db_conn, 'something', 'else')

    assert temp_db_cursor.scalar("""SELECT value FROM nominatim_properties
                                    WHERE property = 'something'""") == 'else'

    assert properties.get_property(temp_db_conn, 'something') == 'else'
示例#3
0
def test_set_property_new(property_factory, temp_db_conn, temp_db_cursor,
                          prefill):
    if prefill:
        property_factory('something', 'bar')

    properties.set_property(temp_db_conn, 'something', 'else')

    assert temp_db_cursor.scalar("""SELECT value FROM nominatim_properties
                                    WHERE property = 'something'""") == 'else'

    assert properties.get_property(temp_db_conn, 'something') == 'else'
示例#4
0
    def _set_database_date(dsn):
        """ Determine the database date and set the status accordingly.
        """
        with connect(dsn) as conn:
            try:
                dbdate = status.compute_database_date(conn)
                status.set_status(conn, dbdate)
                LOG.info('Database is at %s.', dbdate)
            except Exception as exc: # pylint: disable=broad-except
                LOG.error('Cannot determine date of database: %s', exc)

            properties.set_property(conn, 'database_version',
                                    '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
示例#5
0
def migrate(config, paths):
    """ Check for the current database version and execute migrations,
        if necesssary.
    """
    with connect(config.get_libpq_dsn()) as conn:
        if conn.table_exists('nominatim_properties'):
            db_version_str = properties.get_property(conn, 'database_version')
        else:
            db_version_str = None

        if db_version_str is not None:
            parts = db_version_str.split('.')
            db_version = tuple(
                [int(x) for x in parts[:2] + parts[2].split('-')])

            if db_version == NOMINATIM_VERSION:
                LOG.warning("Database already at latest version (%s)",
                            db_version_str)
                return 0

            LOG.info("Detected database version: %s", db_version_str)
        else:
            db_version = _guess_version(conn)

        has_run_migration = False
        for version, func in _MIGRATION_FUNCTIONS:
            if db_version <= version:
                LOG.warning("Runnning: %s (%s)",
                            func.__doc__.split('\n', 1)[0],
                            '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(version))
                kwargs = dict(conn=conn, config=config, paths=paths)
                func(**kwargs)
                conn.commit()
                has_run_migration = True

        if has_run_migration:
            LOG.warning('Updating SQL functions.')
            refresh.create_functions(conn, config)
            tokenizer = tokenizer_factory.get_tokenizer_for_db(config)
            tokenizer.update_sql_functions(config)

        properties.set_property(
            conn, 'database_version',
            '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))

        conn.commit()

    return 0
示例#6
0
def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
    """ Sets up the configuration so that the test dummy tokenizer will be
        loaded when the tokenizer factory is used. Also returns a factory
        with which a new dummy tokenizer may be created.
    """
    monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')

    def _import_dummy(module, *args, **kwargs):
        return dummy_tokenizer

    monkeypatch.setattr(importlib, "import_module", _import_dummy)
    properties.set_property(temp_db_conn, 'tokenizer', 'dummy')

    def _create_tokenizer():
        return dummy_tokenizer.DummyTokenizer(None, None)

    return _create_tokenizer
 def _save_config(self, config):
     """ Save the configuration that needs to remain stable for the given
         database as database properties.
     """
     with connect(self.dsn) as conn:
         set_property(conn, DBCFG_NORMALIZATION, self.normalization)
         set_property(conn, DBCFG_MAXWORDFREQ, config.MAX_WORD_FREQUENCY)
         set_property(conn, DBCFG_TRANSLITERATION, self.transliteration)
         set_property(conn, DBCFG_ABBREVIATIONS,
                      json.dumps(self.abbreviations))
示例#8
0
 def save_config_to_db(self, conn):
     """ Save the part of the configuration that cannot be changed into
         the database.
     """
     set_property(conn, DBCFG_IMPORT_NORM_RULES, self.normalization_rules)
     set_property(conn, DBCFG_IMPORT_TRANS_RULES,
                  self.transliteration_rules)
     set_property(conn, DBCFG_IMPORT_ANALYSIS_RULES,
                  json.dumps(self.analysis_rules))
示例#9
0
    def run(args):  # pylint: disable=too-many-statements
        from ..tools import database_import
        from ..tools import refresh
        from ..indexer.indexer import Indexer
        from ..tools import postcodes

        if args.osm_file and not Path(args.osm_file).is_file():
            LOG.fatal("OSM file '%s' does not exist.", args.osm_file)
            raise UsageError('Cannot access file.')

        if args.continue_at is None:
            database_import.setup_database_skeleton(
                args.config.get_libpq_dsn(),
                args.data_dir,
                args.no_partitions,
                rouser=args.config.DATABASE_WEBUSER)

            LOG.warning('Installing database module')
            with connect(args.config.get_libpq_dsn()) as conn:
                database_import.install_module(
                    args.module_dir,
                    args.project_dir,
                    args.config.DATABASE_MODULE_PATH,
                    conn=conn)

            LOG.warning('Importing OSM data file')
            database_import.import_osm_data(Path(args.osm_file),
                                            args.osm2pgsql_options(0, 1),
                                            drop=args.no_updates,
                                            ignore_errors=args.ignore_errors)

            with connect(args.config.get_libpq_dsn()) as conn:
                LOG.warning('Create functions (1st pass)')
                refresh.create_functions(conn, args.config, args.sqllib_dir,
                                         False, False)
                LOG.warning('Create tables')
                database_import.create_tables(conn,
                                              args.config,
                                              args.sqllib_dir,
                                              reverse_only=args.reverse_only)
                refresh.load_address_levels_from_file(
                    conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
                LOG.warning('Create functions (2nd pass)')
                refresh.create_functions(conn, args.config, args.sqllib_dir,
                                         False, False)
                LOG.warning('Create table triggers')
                database_import.create_table_triggers(conn, args.config,
                                                      args.sqllib_dir)
                LOG.warning('Create partition tables')
                database_import.create_partition_tables(
                    conn, args.config, args.sqllib_dir)
                LOG.warning('Create functions (3rd pass)')
                refresh.create_functions(conn, args.config, args.sqllib_dir,
                                         False, False)

            LOG.warning('Importing wikipedia importance data')
            data_path = Path(args.config.WIKIPEDIA_DATA_PATH
                             or args.project_dir)
            if refresh.import_wikipedia_articles(args.config.get_libpq_dsn(),
                                                 data_path) > 0:
                LOG.error('Wikipedia importance dump file not found. '
                          'Will be using default importances.')

        if args.continue_at is None or args.continue_at == 'load-data':
            LOG.warning('Initialise tables')
            with connect(args.config.get_libpq_dsn()) as conn:
                database_import.truncate_data_tables(
                    conn, args.config.MAX_WORD_FREQUENCY)

            LOG.warning('Load data into placex table')
            database_import.load_data(args.config.get_libpq_dsn(),
                                      args.data_dir, args.threads
                                      or psutil.cpu_count() or 1)

            LOG.warning('Calculate postcodes')
            postcodes.import_postcodes(args.config.get_libpq_dsn(),
                                       args.project_dir)

        if args.continue_at is None or args.continue_at in ('load-data',
                                                            'indexing'):
            if args.continue_at is not None and args.continue_at != 'load-data':
                with connect(args.config.get_libpq_dsn()) as conn:
                    SetupAll._create_pending_index(
                        conn, args.config.TABLESPACE_ADDRESS_INDEX)
            LOG.warning('Indexing places')
            indexer = Indexer(args.config.get_libpq_dsn(), args.threads
                              or psutil.cpu_count() or 1)
            indexer.index_full(analyse=not args.index_noanalyse)

        LOG.warning('Post-process tables')
        with connect(args.config.get_libpq_dsn()) as conn:
            database_import.create_search_indices(conn,
                                                  args.config,
                                                  args.sqllib_dir,
                                                  drop=args.no_updates)
            LOG.warning('Create search index for default country names.')
            database_import.create_country_names(conn, args.config)

        webdir = args.project_dir / 'website'
        LOG.warning('Setup website at %s', webdir)
        refresh.setup_website(webdir, args.phplib_dir, args.config)

        with connect(args.config.get_libpq_dsn()) as conn:
            try:
                dbdate = status.compute_database_date(conn)
                status.set_status(conn, dbdate)
                LOG.info('Database is at %s.', dbdate)
            except Exception as exc:  # pylint: disable=broad-except
                LOG.error('Cannot determine date of database: %s', exc)

            properties.set_property(
                conn, 'database_version',
                '{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))

        return 0
示例#10
0
文件: mocks.py 项目: lonvia/Nominatim
 def set(self, name, value):
     """ Set a property in the table to the given value.
     """
     properties.set_property(self.conn, name, value)