def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock, tmp_path): tarfile = tmp_path / 'sample.tar.gz' tarfile.write_text("""Random text""") with pytest.raises(UsageError): tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock())
def test_add_tiger_data_bad_file(def_config, tiger_table, tokenizer_mock, tmp_path): sqlfile = tmp_path / '1010.csv' sqlfile.write_text("""Random text""") tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 0
def test_add_tiger_data_bad_file(def_config, tmp_path, sql_preprocessor, temp_db_cursor, threads, temp_db_with_extensions): temp_db_cursor.execute('CREATE TABLE place (id INT)') sqlfile = tmp_path / '1010.txt' sqlfile.write_text("""Random text""") tiger_data.add_tiger_data(str(tmp_path), def_config, threads) assert temp_db_cursor.table_rows('place') == 0
def test_add_tiger_data(def_config, tmp_path, sql_preprocessor, temp_db_cursor, threads, temp_db_with_extensions): temp_db_cursor.execute('CREATE TABLE place (id INT)') sqlfile = tmp_path / '1010.sql' sqlfile.write_text("""INSERT INTO place values (1); INSERT INTO non_existant_table values (1);""") tiger_data.add_tiger_data(str(tmp_path), def_config, threads) assert temp_db_cursor.table_rows('place') == 1
def test_add_tiger_data_hnr_nan(def_config, tiger_table, tokenizer_mock, csv_factory, tmp_path): csv_factory('file1', hnr_from=99) csv_factory('file2', hnr_from='L12') csv_factory('file3', hnr_to='12.4') tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 1 assert tiger_table.row()['start'] == 99
def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock, tmp_path): tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(__file__) tar.close() tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 0
def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock, tmp_path, src_dir, threads): tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv')) tar.close() tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads, tokenizer_mock()) assert tiger_table.count() == 6213
def test_add_tiger_data(dsn, src_dir, def_config, tmp_path, sql_preprocessor, temp_db_cursor, threads, temp_db): temp_db_cursor.execute('CREATE EXTENSION hstore') temp_db_cursor.execute('CREATE EXTENSION postgis') temp_db_cursor.execute('CREATE TABLE place (id INT)') sqlfile = tmp_path / '1010.sql' sqlfile.write_text("""INSERT INTO place values (1)""") tiger_data.add_tiger_data(dsn, str(tmp_path), threads, def_config, src_dir / 'lib-sql') assert temp_db_cursor.table_rows('place') == 1
def test_add_tiger_data_bad_tarfile(def_config, tmp_path, temp_db_cursor, threads, temp_db_with_extensions, sql_preprocessor): temp_db_cursor.execute('CREATE TABLE place (id INT)') sqlfile = tmp_path / '1010.txt' sqlfile.write_text("""Random text""") tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(sqlfile) tar.close() tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads) assert temp_db_cursor.table_rows('place') == 0
def test_add_tiger_data_tarfile(def_config, tmp_path, temp_db_cursor, threads, temp_db_with_extensions, sql_preprocessor): temp_db_cursor.execute('CREATE TABLE place (id INT)') sqlfile = tmp_path / '1010.sql' sqlfile.write_text("""INSERT INTO place values (1); INSERT INTO non_existant_table values (1);""") tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(sqlfile) tar.close() tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads) assert temp_db_cursor.table_rows('place') == 1
def test_add_tiger_data_tarfile(dsn, src_dir, def_config, tmp_path, temp_db_cursor, threads, temp_db, sql_preprocessor): temp_db_cursor.execute('CREATE EXTENSION hstore') temp_db_cursor.execute('CREATE EXTENSION postgis') temp_db_cursor.execute('CREATE TABLE place (id INT)') sqlfile = tmp_path / '1010.sql' sqlfile.write_text("""INSERT INTO place values (1)""") tar = tarfile.open("sample.tar.gz", "w:gz") tar.add(sqlfile) tar.close() tiger_data.add_tiger_data(dsn, str(src_dir / 'sample.tar.gz'), threads, def_config, src_dir / 'lib-sql') assert temp_db_cursor.table_rows('place') == 1
def run(args): from nominatim.tokenizer import factory as tokenizer_factory from nominatim.tools import tiger_data, add_osm_data if args.tiger_data: tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config) return tiger_data.add_tiger_data( args.tiger_data, args.config, args.threads or psutil.cpu_count() or 1, tokenizer) osm2pgsql_params = args.osm2pgsql_options(default_cache=1000, default_threads=1) if args.file or args.diff: return add_osm_data.add_data_from_file(args.file or args.diff, osm2pgsql_params) if args.node: return add_osm_data.add_osm_object('node', args.node, args.use_main_api, osm2pgsql_params) if args.way: return add_osm_data.add_osm_object('way', args.way, args.use_main_api, osm2pgsql_params) if args.relation: return add_osm_data.add_osm_object('relation', args.relation, args.use_main_api, osm2pgsql_params) return 0
def run(args): if args.tiger_data: return tiger_data.add_tiger_data(args.tiger_data, args.config, args.threads or 1) params = ['update.php'] if args.file: params.extend(('--import-file', args.file)) elif args.diff: params.extend(('--import-diff', args.diff)) elif args.node: params.extend(('--import-node', args.node)) elif args.way: params.extend(('--import-way', args.way)) elif args.relation: params.extend(('--import-relation', args.relation)) if args.use_main_api: params.append('--use-main-api') return run_legacy_script(*params, nominatim_env=args)
def test_add_tiger_data_no_files(def_config, tiger_table, tokenizer_mock, tmp_path): tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 0
def test_add_tiger_data(def_config, src_dir, tiger_table, tokenizer_mock, threads): tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'), def_config, threads, tokenizer_mock()) assert tiger_table.count() == 6213