def run_task(run_uid, run, stage_dir, download_dir): LOG.debug('Running ExportRun with id: {0}'.format(run_uid)) job = run.job valid_name = get_valid_filename(job.name) geom = load_geometry(job.simplified_geom.json) export_formats = job.export_formats mapping = Mapping(job.feature_selection) def start_task(name): task = ExportTask.objects.get(run__uid=run_uid, name=name) task.status = 'RUNNING' task.started_at = timezone.now() task.save() def finish_task(name, created_files, planet_file=False): LOG.debug('Task Finish: {0} for run: {1}'.format(name, run_uid)) task = ExportTask.objects.get(run__uid=run_uid, name=name) task.status = 'SUCCESS' task.finished_at = timezone.now() # assumes each file only has one part (all are zips or PBFs) task.filenames = [basename(file.parts[0]) for file in created_files] if planet_file is False: total_bytes = 0 for file in created_files: total_bytes += file.size() task.filesize_bytes = total_bytes task.save() is_hdx_export = HDXExportRegion.objects.filter(job_id=run.job_id).exists() is_partner_export = PartnerExportRegion.objects.filter( job_id=run.job_id).exists() planet_file = False polygon_centroid = False if is_hdx_export: planet_file = HDXExportRegion.objects.get( job_id=run.job_id).planet_file if is_partner_export: export_region = PartnerExportRegion.objects.get(job_id=run.job_id) planet_file = export_region.planet_file polygon_centroid = export_region.polygon_centroid # Run PDC special task. if export_region.group.name == "PDC" and planet_file is True and polygon_centroid is True: params = { "PLANET_FILE": settings.PLANET_FILE, "MAPPING": mapping, "STAGE_DIR": stage_dir, "DOWNLOAD_DIR": download_dir, "VALID_NAME": valid_name } if "geopackage" not in export_formats: raise ValueError("geopackage must be the export format") paths = run_pdc_task(params) start_task("geopackage") target = join(download_dir, "{}.gpkg".format(valid_name)) shutil.move(paths["geopackage"], target) os.chmod(target, 0o644) finish_task("geopackage", [osm_export_tool.File("gpkg", [target], '')], planet_file) send_completion_notification(run) run.status = 'COMPLETED' run.finished_at = timezone.now() run.save() LOG.debug('Finished ExportRun with id: {0}'.format(run_uid)) return if is_hdx_export: geopackage = None shp = None kml = None tabular_outputs = [] if 'geopackage' in export_formats: geopackage = tabular.MultiGeopackage(join(stage_dir, valid_name), mapping) tabular_outputs.append(geopackage) start_task('geopackage') if 'shp' in export_formats: shp = tabular.Shapefile(join(stage_dir, valid_name), mapping) tabular_outputs.append(shp) start_task('shp') if 'kml' in export_formats: kml = tabular.Kml(join(stage_dir, valid_name), mapping) tabular_outputs.append(kml) start_task('kml') if planet_file: h = tabular.Handler(tabular_outputs, mapping, polygon_centroid=polygon_centroid) source = OsmiumTool('osmium', settings.PLANET_FILE, geom, join(stage_dir, 'extract.osm.pbf'), tempdir=stage_dir) else: h = tabular.Handler(tabular_outputs, mapping, clipping_geom=geom, polygon_centroid=polygon_centroid) mapping_filter = mapping if job.unfiltered: mapping_filter = None source = Overpass(settings.OVERPASS_API_URL, geom, join(stage_dir, 'overpass.osm.pbf'), tempdir=stage_dir, use_curl=True, mapping=mapping_filter) LOG.debug('Source start for run: {0}'.format(run_uid)) source_path = source.path() LOG.debug('Source end for run: {0}'.format(run_uid)) h.apply_file(source_path, locations=True, idx='sparse_file_array') all_zips = [] def add_metadata(z, theme): columns = [] for key in theme.keys: columns.append( '{0} http://wiki.openstreetmap.org/wiki/Key:{0}'.format( key)) columns = '\n'.join(columns) readme = ZIP_README.format(criteria=theme.matcher.to_sql(), columns=columns) z.writestr("README.txt", readme) if geopackage: geopackage.finalize() zips = [] for theme in mapping.themes: destination = join( download_dir, valid_name + '_' + slugify(theme.name) + '_gpkg.zip') matching_files = [ f for f in geopackage.files if 'theme' in f.extra and f.extra['theme'] == theme.name ] with zipfile.ZipFile(destination, 'w', zipfile.ZIP_DEFLATED, True) as z: add_metadata(z, theme) for file in matching_files: for part in file.parts: z.write(part, os.path.basename(part)) zips.append( osm_export_tool.File('geopackage', [destination], {'theme': theme.name})) finish_task('geopackage', zips) all_zips += zips if shp: shp.finalize() zips = [] for file in shp.files: # for HDX geopreview to work # each file (_polygons, _lines) is a separate zip resource # the zipfile must end with only .zip (not .shp.zip) destination = join( download_dir, os.path.basename(file.parts[0]).replace('.', '_') + '.zip') with zipfile.ZipFile(destination, 'w', zipfile.ZIP_DEFLATED, True) as z: theme = [ t for t in mapping.themes if t.name == file.extra['theme'] ][0] add_metadata(z, theme) for part in file.parts: z.write(part, os.path.basename(part)) zips.append( osm_export_tool.File('shp', [destination], {'theme': file.extra['theme']})) finish_task('shp', zips) all_zips += zips if kml: kml.finalize() zips = [] for file in kml.files: destination = join( download_dir, os.path.basename(file.parts[0]).replace('.', '_') + '.zip') with zipfile.ZipFile(destination, 'w', zipfile.ZIP_DEFLATED, True) as z: theme = [ t for t in mapping.themes if t.name == file.extra['theme'] ][0] add_metadata(z, theme) for part in file.parts: z.write(part, os.path.basename(part)) zips.append( osm_export_tool.File('kml', [destination], {'theme': file.extra['theme']})) finish_task('kml', zips) all_zips += zips if 'garmin_img' in export_formats: start_task('garmin_img') garmin_files = nontabular.garmin(source_path, settings.GARMIN_SPLITTER, settings.GARMIN_MKGMAP, tempdir=stage_dir) zipped = create_package(join(download_dir, valid_name + '_gmapsupp_img.zip'), garmin_files, boundary_geom=geom, output_name='garmin_img') all_zips.append(zipped) finish_task('garmin_img', [zipped]) if settings.SYNC_TO_HDX: print("Syncing to HDX") region = HDXExportRegion.objects.get(job_id=run.job_id) public_dir = settings.HOSTNAME + join(settings.EXPORT_MEDIA_ROOT, run_uid) sync_region(region, all_zips, public_dir) send_hdx_completion_notification(run, run.job.hdx_export_region_set.first()) else: geopackage = None shp = None kml = None tabular_outputs = [] if 'geopackage' in export_formats: geopackage = tabular.Geopackage(join(stage_dir, valid_name), mapping) tabular_outputs.append(geopackage) start_task('geopackage') if 'shp' in export_formats: shp = tabular.Shapefile(join(stage_dir, valid_name), mapping) tabular_outputs.append(shp) start_task('shp') if 'kml' in export_formats: kml = tabular.Kml(join(stage_dir, valid_name), mapping) tabular_outputs.append(kml) start_task('kml') if planet_file: h = tabular.Handler(tabular_outputs, mapping, polygon_centroid=polygon_centroid) source = OsmiumTool('osmium', settings.PLANET_FILE, geom, join(stage_dir, 'extract.osm.pbf'), tempdir=stage_dir, mapping=mapping) else: h = tabular.Handler(tabular_outputs, mapping, clipping_geom=geom, polygon_centroid=polygon_centroid) mapping_filter = mapping if job.unfiltered: mapping_filter = None source = Overpass(settings.OVERPASS_API_URL, geom, join(stage_dir, 'overpass.osm.pbf'), tempdir=stage_dir, use_curl=True, mapping=mapping_filter) LOG.debug('Source start for run: {0}'.format(run_uid)) source_path = source.path() LOG.debug('Source end for run: {0}'.format(run_uid)) h.apply_file(source_path, locations=True, idx='sparse_file_array') bundle_files = [] if geopackage: geopackage.finalize() zipped = create_package(join(download_dir, valid_name + '_gpkg.zip'), geopackage.files, boundary_geom=geom) bundle_files += geopackage.files finish_task('geopackage', [zipped]) if shp: shp.finalize() zipped = create_package(join(download_dir, valid_name + '_shp.zip'), shp.files, boundary_geom=geom) bundle_files += shp.files finish_task('shp', [zipped]) if kml: kml.finalize() zipped = create_package(join(download_dir, valid_name + '_kml.zip'), kml.files, boundary_geom=geom) bundle_files += kml.files finish_task('kml', [zipped]) if 'garmin_img' in export_formats: start_task('garmin_img') garmin_files = nontabular.garmin(source_path, settings.GARMIN_SPLITTER, settings.GARMIN_MKGMAP, tempdir=stage_dir) bundle_files += garmin_files zipped = create_package(join(download_dir, valid_name + '_gmapsupp_img.zip'), garmin_files, boundary_geom=geom) finish_task('garmin_img', [zipped]) if 'mwm' in export_formats: start_task('mwm') mwm_dir = join(stage_dir, 'mwm') if not exists(mwm_dir): os.makedirs(mwm_dir) mwm_files = nontabular.mwm(source_path, mwm_dir, settings.GENERATE_MWM, settings.GENERATOR_TOOL) bundle_files += mwm_files zipped = create_package(join(download_dir, valid_name + '_mwm.zip'), mwm_files, boundary_geom=geom) finish_task('mwm', [zipped]) if 'osmand_obf' in export_formats: start_task('osmand_obf') osmand_files = nontabular.osmand(source_path, settings.OSMAND_MAP_CREATOR_DIR, tempdir=stage_dir) bundle_files += osmand_files zipped = create_package(join(download_dir, valid_name + '_Osmand2_obf.zip'), osmand_files, boundary_geom=geom) finish_task('osmand_obf', [zipped]) if 'mbtiles' in export_formats: start_task('mbtiles') mbtiles_files = nontabular.mbtiles( geom, join(stage_dir, valid_name + '.mbtiles'), job.mbtiles_source, job.mbtiles_minzoom, job.mbtiles_maxzoom) bundle_files += mbtiles_files zipped = create_package(join(download_dir, valid_name + '_mbtiles.zip'), mbtiles_files, boundary_geom=geom) finish_task('mbtiles', [zipped]) if 'osm_pbf' in export_formats: bundle_files += [ osm_export_tool.File('osm_pbf', [source_path], '') ] if 'bundle' in export_formats: start_task('bundle') zipped = create_posm_bundle( join(download_dir, valid_name + '-bundle.tar.gz'), bundle_files, job.name, valid_name, job.description, geom) finish_task('bundle', [zipped]) # do this last so we can do a mv instead of a copy if 'osm_pbf' in export_formats: start_task('osm_pbf') target = join(download_dir, valid_name + '.osm.pbf') shutil.move(source_path, target) os.chmod(target, 0o644) finish_task('osm_pbf', [osm_export_tool.File('pbf', [target], '')], planet_file) send_completion_notification(run) run.status = 'COMPLETED' run.finished_at = timezone.now() run.save() LOG.debug('Finished ExportRun with id: {0}'.format(run_uid))
tempdir = 'tmp' with open('../osm_export_tool/mappings/default.yml', 'r') as f: mapping_txt = f.read() mapping = Mapping(mapping_txt) source = Overpass('http://overpass.hotosm.org', geom, join(tempdir, 'extract.osm.pbf'), tempdir=tempdir, mapping=mapping, use_existing=False) shp = tabular.Shapefile("tmp/example", mapping) gpkg = tabular.Geopackage("tmp/example", mapping) kml = tabular.Kml("tmp/example", mapping) tabular_outputs = [shp, gpkg, kml] h = tabular.Handler(tabular_outputs, mapping) h.apply_file(source.path(), locations=True, idx='sparse_file_array') for output in tabular_outputs: output.finalize() osmand_files = nontabular.osmand(source.path(), 'tools/OsmAndMapCreator-main', tempdir=tempdir) garmin_files = nontabular.garmin(source.path(), 'tools/splitter-r583/splitter.jar', 'tools/mkgmap-r3890/mkgmap.jar',