def ingest(request): """ View to ingest a browse report delivered via HTTP-POST. The XML file is expected to be included within the POST data. """ try: status = get_status() if not status.running: raise IngestionException("Ingest is not possible if the state of " "the server is not 'RUNNING'.", "InvalidState") if request.method != "POST": raise IngestionException("Method '%s' is not allowed, use 'POST' " "only." % request.method.upper(), "MethodNotAllowed") try: document = etree.parse(request) except etree.XMLSyntaxError, e: raise IngestionException("Could not parse request XML. Error was: " "'%s'." % str(e), "InvalidRequest") try: parsed_browse_report = decode_browse_report(document.getroot()) results = ingest_browse_report(parsed_browse_report) # unify exception code for some exception types except (XMLDecodeError, DecodingException), e: raise IngestionException(str(e), "InvalidRequest")
def ingest(request): """ View to ingest a browse report delivered via HTTP-POST. The XML file is expected to be included within the POST data. """ try: status = get_status() if not status.running: raise IngestionException( "Ingest is not possible if the state of " "the server is not 'RUNNING'.", "InvalidState") if request.method != "POST": raise IngestionException( "Method '%s' is not allowed, use 'POST' " "only." % request.method.upper(), "MethodNotAllowed") try: document = etree.parse(request) except etree.XMLSyntaxError, e: raise IngestionException( "Could not parse request XML. Error was: " "'%s'." % str(e), "InvalidRequest") try: parsed_browse_report = decode_browse_report(document.getroot()) results = ingest_browse_report(parsed_browse_report) # unify exception code for some exception types except (XMLDecodeError, DecodingException), e: raise IngestionException(str(e), "InvalidRequest")
def _handle_file(self, filename, create_result, config): logger.info("Processing input file '%s'." % filename) # parse the xml file and obtain its data structures as a # parsed browse report. logger.info("Parsing XML file '%s'." % filename) document = etree.parse(filename) parsed_browse_report = decode_browse_report(document.getroot()) # ingest the parsed browse report logger.info("Ingesting browse report with %d browse%s." % (len(parsed_browse_report), "s" if len(parsed_browse_report) > 1 else "")) results = ingest_browse_report(parsed_browse_report, config=config) if create_result: # print ingest result print( render_to_string("control/ingest_response.xml", {"results": results})) logger.info( "%d browse%s handled, %d successfully replaced " "and %d successfully inserted." % (results.to_be_replaced, "s have been" if results.to_be_replaced > 1 else " has been", results.actually_replaced, results.actually_inserted))
def _handle_file(self, filename, create_result, config): logger.info("Processing input file '%s'." % filename) # parse the xml file and obtain its data structures as a # parsed browse report. self.print_msg("Parsing XML file '%s'." % filename, 1) document = etree.parse(filename) parsed_browse_report = decode_browse_report(document.getroot()) # ingest the parsed browse report self.print_msg("Ingesting browse report with %d browse%s." % (len(parsed_browse_report), "s" if len(parsed_browse_report) > 1 else "")) results = ingest_browse_report(parsed_browse_report, config=config) if create_result: # print ingest result print(render_to_string("control/ingest_response.xml", {"results": results})) self.print_msg("%d browses have been handled whereof %d have been " "successfully replaced and %d successfully inserted." % (results.to_be_replaced, results.actually_replaced, results.actually_inserted))
def import_browse_report(p, browse_report_file, browse_layer_model, crs, seed_cache_levels, import_cache_levels, config): """ """ seed_areas = [] report_result = IngestBrowseReportResult() browse_report = decode_browse_report(etree.parse(browse_report_file)) browse_report_model = create_browse_report(browse_report, browse_layer_model) for browse in browse_report: with transaction.commit_manually(): with transaction.commit_manually(using="mapcache"): try: result = import_browse(p, browse, browse_report_model, browse_layer_model, crs, seed_areas, config) report_result.add(result) transaction.commit() transaction.commit(using="mapcache") except Exception, e: logger.error("Failure during import of browse '%s'." % browse.browse_identifier) logger.debug(traceback.format_exc() + "\n") transaction.rollback() transaction.rollback(using="mapcache") report_result.add(IngestBrowseFailureResult( browse.browse_identifier, type(e).__name__, str(e)) ) continue tileset_name = browse_layer_model.id dim = isotime(browse.start_time) + "/" + isotime(browse.end_time) ts = tileset.open(get_tileset_path(browse_layer_model.browse_type, config), mode="w") grid = URN_TO_GRID[browse_layer_model.grid] tile_num = 0 # import cache for minzoom, maxzoom in import_cache_levels: logger.info("Importing cached tiles from zoom level %d to %d." % (minzoom, maxzoom)) for x, y, z, f in p.get_cache_files(tileset_name, grid, dim): if z < minzoom or z > maxzoom: continue ts.add_tile(tileset_name, grid, dim, x, y, z, f) tile_num += 1 logger.info("Imported %d cached tiles." % tile_num) # seed cache for minzoom, maxzoom in seed_cache_levels: logger.info("Re-seeding tile cache from zoom level %d to %d." % (minzoom, maxzoom)) seed_mapcache(tileset=browse_layer_model.id, grid=browse_layer_model.grid, minx=result.extent[0], miny=result.extent[1], maxx=result.extent[2], maxy=result.extent[3], minzoom=minzoom, maxzoom=maxzoom, start_time=result.time_interval[0], end_time=result.time_interval[1], delete=False, **get_mapcache_seed_config(config)) logger.info("Successfully finished seeding.")
def import_browse_report(p, browse_report_file, browse_layer_model, crs, seed_cache_levels, import_cache_levels, config): """ """ seed_areas = [] report_result = IngestBrowseReportResult() browse_report = decode_browse_report(etree.parse(browse_report_file)) browse_report_model = create_browse_report(browse_report, browse_layer_model) for browse in browse_report: with transaction.commit_manually(): with transaction.commit_manually(using="mapcache"): try: result = import_browse(p, browse, browse_report_model, browse_layer_model, crs, seed_areas, config) report_result.add(result) transaction.commit() transaction.commit(using="mapcache") except Exception, e: logger.error("Failure during import of browse '%s'." % browse.browse_identifier) logger.debug(traceback.format_exc() + "\n") transaction.rollback() transaction.rollback(using="mapcache") report_result.add( IngestBrowseFailureResult(browse.browse_identifier, type(e).__name__, str(e))) continue tileset_name = browse_layer_model.id dim = isotime(browse.start_time) + "/" + isotime(browse.end_time) ts = tileset.open(get_tileset_path(browse_layer_model.browse_type, config), mode="w") grid = URN_TO_GRID[browse_layer_model.grid] tile_num = 0 # import cache for minzoom, maxzoom in import_cache_levels: logger.info("Importing cached tiles from zoom level %d to %d." % (minzoom, maxzoom)) for x, y, z, f in p.get_cache_files(tileset_name, grid, dim): if z < minzoom or z > maxzoom: continue ts.add_tile(tileset_name, grid, dim, x, y, z, f) tile_num += 1 logger.info("Imported %d cached tiles." % tile_num) # seed cache for minzoom, maxzoom in seed_cache_levels: logger.info("Re-seeding tile cache from zoom level %d to %d." % (minzoom, maxzoom)) seed_mapcache(tileset=browse_layer_model.id, grid=browse_layer_model.grid, minx=result.extent[0], miny=result.extent[1], maxx=result.extent[2], maxy=result.extent[3], minzoom=minzoom, maxzoom=maxzoom, start_time=result.time_interval[0], end_time=result.time_interval[1], delete=False, **get_mapcache_seed_config(config)) logger.info("Successfully finished seeding.")