def notify(summary, message, urgency=None, ip_address=None, config=None): config = config or get_ngeo_config() urgency = urgency or "INFO" if urgency not in ("INFO", "CRITICAL", "BLOCK"): raise ValueError("Invalid urgency value '%s'." % urgency) try: if not ip_address: # get the value for "notification_url" and fall back to # "address" ip_address = safe_get(config, "control", "notification_url") if not ip_address: ctrl_config = get_controller_config( get_controller_config_path(config)) logger.debug( "No 'notification_url' present. Trying to fall back to " "registered IP address.") ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except (IOError, NoSectionError): # probably no config file present, so IP cannot be determined. pass if not ip_address: # cannot log this error as we would run into an endless loop logger.info("Cannot send notification to CTRL.") return tree = E( "notifyControllerServer", E("header", E("timestamp", isotime(now())), E("instance", get_instance_id(config)), E("subsystem", "BROW"), E("urgency", urgency)), E("body", E("summary", summary), E("message", message))) if ip_address.startswith("http://") or ip_address.startswith("https://"): pass else: ip_address = "http://%s" % ip_address if not ip_address.endswith("/notify"): ip_address += "/notify" logger.info("Sending notification to CTRL at IP '%s'." % ip_address) req = urllib2.Request(url=ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'application/xml'}) try: urllib2.urlopen(req, timeout=1) except (urllib2.HTTPError, urllib2.URLError), e: logger.info("Error sending notification: %s" % e) logger.debug(traceback.format_exc() + "\n")
def get_ingest_config(config=None): config = config or get_ngeo_config() return { "strategy": safe_get(config, INGEST_SECTION, "strategy", "merge"), "merge_threshold": parse_time_delta( safe_get(config, INGEST_SECTION, "merge_threshold", "5h") ) }
def get_optimization_config(config=None): """ Returns a dictionary with all optimization specific config settings. """ values = {} config = config or get_ngeo_config() values["bandmode"] = RGB try: values["overviews"] = config.getboolean(INGEST_SECTION, "overviews") except: pass values["overview_levels"] = safe_get( config, INGEST_SECTION, "overview_levels") if values["overview_levels"]: values["overview_levels"] = map( int, values["overview_levels"].split(",")) try: values["overview_minsize"] = config.getint( INGEST_SECTION, "overview_minsize") except: pass values["overview_resampling"] = safe_get( config, INGEST_SECTION, "overview_resampling") try: values["color_index"] = config.getboolean(INGEST_SECTION, "color_index") except: pass try: values["footprint_alpha"] = config.getboolean( INGEST_SECTION, "footprint_alpha") if values["footprint_alpha"]: values["bandmode"] = RGBA except: pass try: values["simplification_factor"] = config.getfloat( INGEST_SECTION, "simplification_factor") except: pass in_memory = False try: in_memory = config.getboolean(INGEST_SECTION, "in_memory") except: pass values["temporary_directory"] = "/vsimem/" if in_memory else None return values
def get_mapcache_seed_config(config=None): """ Returns a dicitonary with all mapcache related config settings. """ values = {} config = config or get_ngeo_config() values["seed_command"] = safe_get(config, SEED_SECTION, "seed_command", "mapcache_seed") values["config_file"] = config.get(SEED_SECTION, "config_file") values["threads"] = int(safe_get(config, SEED_SECTION, "threads", 1)) return values
def get_ingest_config(config=None): config = config or get_ngeo_config() return { "strategy": safe_get(config, INGEST_SECTION, "strategy", "replace"), "merge_threshold": parse_time_delta( safe_get(config, INGEST_SECTION, "merge_threshold", "5h") ), "regular_grid_clipping": safe_get( config, INGEST_SECTION, "regular_grid_clipping", "false" ).lower() in ("true", "1", "on", "yes") }
def send_report(ip_address=None, begin=None, end=None, access_logfile=None, report_logfile=None, config=None): config = config or get_ngeo_config() try: if not ip_address: ctrl_config = get_controller_config(get_controller_config_path(config)) ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except IOError: # probably no config file present, so IP cannot be determined. pass if not ip_address: raise Exception("IP address could not be determined") tree = get_report_xml(begin, end, types, access_logfile, report_logfile) req = urllib2.Request( url="http://%s/notify" % ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'text/xml'} ) print req.data try: urllib2.urlopen(req, timeout=10) except (urllib2.HTTPError, urllib2.URLError), e: logger.error( "Could not send report (%s): '%s'" % (type(e).__name__, str(e)) ) raise
def send_report(ip_address=None, begin=None, end=None, access_logfile=None, report_logfile=None, config=None): config = config or get_ngeo_config() try: if not ip_address: ctrl_config = get_controller_config( get_controller_config_path(config)) ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except IOError: # probably no config file present, so IP cannot be determined. pass if not ip_address: raise Exception("IP address could not be determined") tree = get_report_xml(begin, end, types, access_logfile, report_logfile) req = urllib2.Request(url="http://%s/notify" % ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'text/xml'}) print req.data try: urllib2.urlopen(req, timeout=10) except (urllib2.HTTPError, urllib2.URLError), e: logger.error("Could not send report (%s): '%s'" % (type(e).__name__, str(e))) raise
def get_configured_log_file_patterns(config): config = config or get_ngeo_config() items = safe_get(config, CTRL_SECTION, "report_log_files") if items is None: return [] return map(get_project_relative_path, items.split(","))
def get_failure_dir(config=None): """ Returns the configured failure directory. """ config = config or get_ngeo_config() return get_project_relative_path( safe_get(config, "control.ingest", "failure_dir") )
def get_format_config(config=None): """ Returns a dictionary with all preprocessing format specific configuration settings. """ values = {} config = config or get_ngeo_config() values["compression"] = safe_get(config, INGEST_SECTION, "compression") if values["compression"] == "JPEG": value = safe_get(config, INGEST_SECTION, "jpeg_quality") values["jpeg_quality"] = int(value) if value is not None else None elif values["compression"] == "DEFLATE": value = safe_get(config, INGEST_SECTION, "zlevel") values["zlevel"] = int(value) if value is not None else None try: values["tiling"] = config.getboolean(INGEST_SECTION, "tiling") except: pass return values
def _handle_file(self, filename, config): root = etree.parse(filename) start_revision = root.findtext(ns_cfg("startRevision")) end_revision = root.findtext(ns_cfg("endRevision")) remove_layers_elems = root.xpath( "cfg:removeConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers_elems = root.xpath("cfg:addConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers = [] for layers_elem in add_layers_elems: add_layers.extend(decode_browse_layers(layers_elem)) remove_layers = [] for layers_elem in remove_layers_elems: remove_layers.extend(decode_browse_layers(layers_elem)) # get the mapcache config xml file path to make it transaction safe mapcache_config = get_mapcache_seed_config(config) mapcache_xml_filename = mapcache_config["config_file"] # transaction safety here with FileTransaction((mapcache_xml_filename, ), copy=True): with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): for browse_layer in add_layers: if models.BrowseLayer.objects.filter( id=browse_layer.id).exists(): update_browse_layer(browse_layer, config) else: add_browse_layer(browse_layer, config) for browse_layer in remove_layers: delete_browse_layer(browse_layer, config=config) # set the new revision config = config or get_ngeo_config() if not config.has_section("config"): config.add_section("config") revision = int(safe_get(config, "config", "revision", 0)) config.set("config", "revision", int(end_revision)) write_ngeo_config()
def _handle_file(self, filename, config): root = etree.parse(filename) start_revision = root.findtext(ns_cfg("startRevision")) end_revision = root.findtext(ns_cfg("endRevision")) remove_layers_elems = root.xpath("cfg:removeConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers_elems = root.xpath("cfg:addConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers = [] for layers_elem in add_layers_elems: add_layers.extend(decode_browse_layers(layers_elem)) remove_layers = [] for layers_elem in remove_layers_elems: remove_layers.extend(decode_browse_layers(layers_elem)) # get the mapcache config xml file path to make it transaction safe mapcache_config = get_mapcache_seed_config(config) mapcache_xml_filename = mapcache_config["config_file"] # transaction safety here with FileTransaction((mapcache_xml_filename,), copy=True): with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): for browse_layer in add_layers: if models.BrowseLayer.objects.filter(id=browse_layer.id).exists(): update_browse_layer(browse_layer, config) else: add_browse_layer(browse_layer, config) for browse_layer in remove_layers: delete_browse_layer(browse_layer, config=config) # set the new revision config = config or get_ngeo_config() if not config.has_section("config"): config.add_section("config") revision = int(safe_get(config, "config", "revision", 0)) config.set("config", "revision", int(end_revision)) write_ngeo_config()
def handle(self, begin=None, end=None, url=None, filename=None, access_logfile=None, report_logfile=None, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) conf = get_ngeo_config() report_store_dir = safe_get(conf, "control", "report_store_dir", "/var/www/ngeo/store/reports/") filename = join(report_store_dir, basename(filename)) logger.info("Starting report generation from command line.") if begin: begin = getDateTime(begin) if end: end = getDateTime(end) if filename and url: logger.error("Both Filename and URL specified.") raise CommandError("Both Filename and URL specified.") if filename: logger.info("Save report to file '%s'." % filename) save_report(filename, begin, end, access_logfile, report_logfile) elif url: logger.info("Send report to URL '%s'." % url) send_report(url, begin, end, access_logfile, report_logfile) else: logger.error("Neither Filename nor URL specified.") raise CommandError("Neither Filename nor URL specified.") logger.info("Successfully finished report generation.")
def notify(summary, message, urgency=None, ip_address=None, config=None): config = config or get_ngeo_config() urgency = urgency or "INFO" if urgency not in ("INFO", "CRITICAL", "BLOCK"): raise ValueError("Invalid urgency value '%s'." % urgency) try: if not ip_address: ctrl_config = get_controller_config(get_controller_config_path(config)) ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except IOError: # probably no config file present, so IP cannot be determined. pass if not ip_address: return tree = E("notifyControllerServer", E("header", E("timestamp", isotime(now())), E("instance", get_instance_id(config)), E("subsystem", "BROW"), E("urgency", urgency) ), E("body", E("summary", summary), E("message", message) ) ) req = urllib2.Request( url="http://%s/notify" % ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'text/xml'} ) try: urllib2.urlopen(req, timeout=1) except (urllib2.HTTPError, urllib2.URLError): # could not send notification. Out of options pass
def handle(self, begin=None, end=None, url=None, filename=None, access_logfile=None, report_logfile=None, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) conf = get_ngeo_config() report_store_dir = safe_get( conf, "control", "report_store_dir", "/var/www/ngeo/store/reports/" ) filename = join(report_store_dir, basename(filename)) logger.info("Starting report generation from command line.") if begin: begin = getDateTime(begin) if end: end = getDateTime(end) if filename and url: logger.error("Both Filename and URL specified.") raise CommandError("Both Filename and URL specified.") if filename: logger.info("Save report to file '%s'." % filename) save_report(filename, begin, end, access_logfile, report_logfile) elif url: logger.info("Send report to URL '%s'." % url) send_report(url, begin, end, access_logfile, report_logfile) else: logger.error("Neither Filename nor URL specified.") raise CommandError("Neither Filename nor URL specified.") logger.info("Successfully finished report generation.")
def get_optimized_path(file_name, directory=None, config=None): """ Returns an absolute path to a filename within the storage directory for optimized raster files. Uses the 'control.ingest.optimized_files_dir' setting from the ngEO configuration. Also tries to get the postfix for optimized files from the 'control.ingest.optimized_files_postfix' setting from the ngEO configuration. All relative paths are treated relative to the PROJECT_DIR directory setting. """ config = config or get_ngeo_config() file_name = basename(file_name) if directory: file_name = join(directory, file_name) optimized_dir = get_project_relative_path( config.get(INGEST_SECTION, "optimized_files_dir") ) postfix = safe_get(config, INGEST_SECTION, "optimized_files_postfix", "") root, ext = splitext(file_name) return join(optimized_dir, root + postfix + ext)
def notify(summary, message, urgency=None, ip_address=None, config=None): config = config or get_ngeo_config() urgency = urgency or "INFO" if urgency not in ("INFO", "CRITICAL", "BLOCK"): raise ValueError("Invalid urgency value '%s'." % urgency) try: if not ip_address: # get the value for "notification_url" and fall back to # "address" ip_address = safe_get( config, "control", "notification_url" ) if not ip_address: ctrl_config = get_controller_config( get_controller_config_path(config) ) logger.debug( "No 'notification_url' present. Trying to fall back to " "registered IP address." ) ip_address = safe_get( ctrl_config, CONTROLLER_SERVER_SECTION, "address" ) except (IOError, NoSectionError): # probably no config file present, so IP cannot be determined. pass if not ip_address: # cannot log this error as we would run into an endless loop logger.info("Cannot send notification to CTRL.") return tree = E("notifyControllerServer", E("header", E("timestamp", isotime(now())), E("instance", get_instance_id(config)), E("subsystem", "BROW"), E("urgency", urgency) ), E("body", E("summary", summary), E("message", message) ) ) if ip_address.startswith("http://") or ip_address.startswith("https://"): pass else: ip_address = "http://%s" % ip_address if not ip_address.endswith("/notify"): ip_address += "/notify" logger.info("Sending notification to CTRL at IP '%s'." % ip_address) req = urllib2.Request( url=ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'application/xml'} ) try: urllib2.urlopen(req, timeout=1) except (urllib2.HTTPError, urllib2.URLError), e: logger.info("Error sending notification: %s" % e) logger.debug(traceback.format_exc() + "\n")
def config(request): try: status = get_status() config = get_ngeo_config() if request.method not in ("PUT", "POST"): raise Exception("Invalid request method '%s'." % request.method) if request.method == "POST": # "setting" new configuration, which means removing the previous one. action = "set" else: action = "update" root = etree.parse(request) start_revision = root.findtext(ns_cfg("startRevision")) end_revision = root.findtext(ns_cfg("endRevision")) # TODO: check current and last revision remove_layers_elems = root.xpath("cfg:removeConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers_elems = root.xpath("cfg:addConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers = [] for layers_elem in add_layers_elems: add_layers.extend(decode_browse_layers(layers_elem)) remove_layers = [] for layers_elem in remove_layers_elems: remove_layers.extend(decode_browse_layers(layers_elem)) # get the mapcache config xml file path to make it transaction safe mapcache_config = get_mapcache_seed_config(config) mapcache_xml_filename = mapcache_config["config_file"] # transaction safety here with FileTransaction((mapcache_xml_filename,), copy=True): with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): for browse_layer in add_layers: if models.BrowseLayer.objects.filter(id=browse_layer.id).exists(): update_browse_layer(browse_layer, config) else: add_browse_layer(browse_layer, config) for browse_layer in remove_layers: delete_browse_layer(browse_layer, config) # set the new revision config = get_ngeo_config() if not config.has_section("config"): config.add_section("config") revision = int(safe_get(config, "config", "revision", 0)) config.set("config", "revision", end_revision) write_ngeo_config() # return with the new revision return HttpResponse('<?xml version="1.0"?>\n' '<synchronizeConfigurationResponse>%s</synchronizeConfigurationResponse>' % end_revision ) except Exception, e: logger.error("%s: %s" % (type(e).__name__, str(e))) logger.debug(traceback.format_exc()) return HttpResponse( '<faultcode>ConfigurationError</faultcode>\n' '<faultstring>%s</faultstring>' % str(e), status=400 )
def decode_browse_layers(browse_layers_elem, config=None): logger.info("Start decoding browse layer.") config = config or get_ngeo_config() timedimension_default = safe_get( config, "mapcache", "timedimension_default", "2014" ) tile_query_limit_default = safe_get( config, "mapcache", "tile_query_limit_default", "100" ) browse_layers = [] for browse_layer_elem in browse_layers_elem.findall(ns_cfg("browseLayer")): opt = { "strategy": "inherit" } description_elem = browse_layer_elem.find(ns_cfg("description")) if description_elem is not None: opt["description"] = description_elem.text or "" related_dataset_ids_elem = browse_layer_elem.find(ns_cfg("relatedDatasetIds")) related_dataset_ids = [elem.text for elem in related_dataset_ids_elem] rgb_bands_elem = browse_layer_elem.find(ns_cfg("rgbBands")) if rgb_bands_elem is not None: r, g, b = map(int, rgb_bands_elem.text.split(",")) opt["r_band"] = r; opt["g_band"] = g; opt["b_band"] = b radiometric_interval_elem = browse_layer_elem.find(ns_cfg("radiometricInterval")) if radiometric_interval_elem is not None: opt["radiometric_interval_min"] = int(radiometric_interval_elem.find(ns_cfg("min")).text) opt["radiometric_interval_max"] = int(radiometric_interval_elem.find(ns_cfg("max")).text) strategy_elem = browse_layer_elem.find(ns_cfg("strategy")) if strategy_elem is not None: opt["strategy"] = strategy_elem.text opt["timedimension_default"] = browse_layer_elem.findtext( ns_cfg("timeDimensionDefault") ) or timedimension_default opt["tile_query_limit"] = int( browse_layer_elem.findtext(ns_cfg("tileQueryLimit")) or tile_query_limit_default ) opt["contains_volumes"] = browse_layer_elem.findtext(ns_cfg("contains_volumes")) == "true" browse_layers.append(BrowseLayer( browse_layer_elem.get("browseLayerId"), browse_layer_elem.find(ns_cfg("browseType")).text, browse_layer_elem.find(ns_cfg("title")).text, browse_layer_elem.find(ns_cfg("grid")).text, browse_layer_elem.find(ns_cfg("browseAccessPolicy")).text, browse_layer_elem.find(ns_cfg("containsVerticalCurtains")).text == "true", int(browse_layer_elem.find(ns_cfg("highestMapLevel")).text), int(browse_layer_elem.find(ns_cfg("lowestMapLevel")).text), browse_layer_elem.find(ns_cfg("hostingBrowseServerName")).text, related_dataset_ids, **opt )) return browse_layers
def seed_mapcache(seed_command, config_file, tileset, grid, minx, miny, maxx, maxy, minzoom, maxzoom, start_time, end_time, threads, delete, force=True): # translate grid URN to mapcache grid name try: grid = URN_TO_GRID[grid] except KeyError: raise SeedException("Invalid grid '%s'." % grid) bounds = CRS_BOUNDS[GRID_TO_SRID[grid]] full = float(abs(bounds[0]) + abs(bounds[2])) dateline_crossed = False if maxx > bounds[2]: dateline_crossed = True # extent is always within [bounds[0],bounds[2]] # where maxx can be >bounds[2] but <=full if minx < bounds[0] or minx > bounds[2] or maxx < bounds[0] or maxx > full: raise SeedException("Invalid extent '%s,%s,%s,%s'." % (minx, miny, maxx, maxy)) if minzoom is None: minzoom = 0 if maxzoom is None: maxzoom = 6 # start- and end-time are expected to be UTC Zulu start_time = start_time.replace(tzinfo=None) end_time = end_time.replace(tzinfo=None) logger.info( "Starting mapcache seed with parameters: command='%s', " "config_file='%s', tileset='%s', grid='%s', " "extent='%s,%s,%s,%s', zoom='%s,%s', nthreads='%s', " "mode='%s', dimension='TIME=%sZ/%sZ'." % (seed_command, config_file, tileset, grid, minx, miny, maxx, maxy, minzoom, maxzoom, threads, "seed" if not delete else "delete", start_time.isoformat(), end_time.isoformat())) seed_args = [ seed_command, "-c", config_file, "-t", tileset, "-g", grid, "-e", "%f,%f,%f,%f" % (minx, miny, bounds[2] if dateline_crossed else maxx, maxy), "-n", str(threads), "-z", "%d,%d" % (minzoom, maxzoom), "-D", "TIME=%sZ/%sZ" % (start_time.isoformat(), end_time.isoformat()), "-m", "seed" if not delete else "delete", "-q", "-M", "1,1", ] if not delete and force: seed_args.append("-f") try: config = get_ngeo_config() timeout = safe_get(config, "mapcache.seed", "timeout") timeout = float(timeout) if timeout is not None else 60.0 except: timeout = 60.0 try: lock = FileLock(get_project_relative_path("mapcache_seed.lck"), timeout=timeout) with lock: logger.debug("mapcache seeding command: '%s'. raw: '%s'." % (" ".join(seed_args), seed_args)) process = subprocess.Popen(seed_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() for string in (out, err): for line in string.split("\n"): if line != '': logger.info("MapCache output: %s" % line) if process.returncode != 0: raise SeedException("'%s' failed. Returncode '%d'." % (seed_command, process.returncode)) # seed second extent if dateline is crossed if dateline_crossed: with lock: index = seed_args.index("%f,%f,%f,%f" % (minx, miny, bounds[2], maxy)) seed_args[index] = "%f,%f,%f,%f" % (bounds[0], miny, maxx - full, maxy) logger.debug("mapcache seeding command: '%s'. raw: '%s'." % (" ".join(seed_args), seed_args)) process = subprocess.Popen(seed_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() for string in (out, err): for line in string.split("\n"): if line != '': logger.info("MapCache output: %s" % line) if process.returncode != 0: raise SeedException("'%s' failed. Returncode '%d'." % (seed_command, process.returncode)) except LockException, e: raise SeedException("Seeding failed: %s" % str(e))
def seed_mapcache(seed_command, config_file, tileset, grid, minx, miny, maxx, maxy, minzoom, maxzoom, start_time, end_time, threads, delete, force=True): # translate grid URN to mapcache grid name try: grid = URN_TO_GRID[grid] except KeyError: raise SeedException("Invalid grid '%s'." % grid) bounds = CRS_BOUNDS[GRID_TO_SRID[grid]] full = float(abs(bounds[0]) + abs(bounds[2])) dateline_crossed = False if maxx>bounds[2]: dateline_crossed = True # extent is always within [bounds[0],bounds[2]] # where maxx can be >bounds[2] but <=full if minx<bounds[0] or minx>bounds[2] or maxx<bounds[0] or maxx>full: raise SeedException("Invalid extent '%s,%s,%s,%s'." % (minx, miny, maxx, maxy)) if minzoom is None: minzoom = 0 if maxzoom is None: maxzoom = 6 # start- and end-time are expected to be UTC Zulu start_time = start_time.replace(tzinfo=None) end_time = end_time.replace(tzinfo=None) logger.info("Starting mapcache seed with parameters: command='%s', " "config_file='%s', tileset='%s', grid='%s', " "extent='%s,%s,%s,%s', zoom='%s,%s', nthreads='%s', " "mode='%s', dimension='TIME=%sZ/%sZ'." % (seed_command, config_file, tileset, grid, minx, miny, maxx, maxy, minzoom, maxzoom, threads, "seed" if not delete else "delete", start_time.isoformat(), end_time.isoformat())) seed_args = [ seed_command, "-c", config_file, "-t", tileset, "-g", grid, "-e", "%f,%f,%f,%f" % (minx, miny, bounds[2] if dateline_crossed else maxx, maxy), "-n", str(threads), "-z", "%d,%d" % (minzoom, maxzoom), "-D", "TIME=%sZ/%sZ" % (start_time.isoformat(), end_time.isoformat()), "-m", "seed" if not delete else "delete", "-q", "-M", "1,1", ] if not delete and force: seed_args.append("-f") try: config = get_ngeo_config() timeout = safe_get(config, "mapcache.seed", "timeout") timeout = float(timeout) if timeout is not None else 60.0 except: timeout = 60.0 try: lock = FileLock( get_project_relative_path("mapcache_seed.lck"), timeout=timeout ) with lock: logger.debug("mapcache seeding command: '%s'. raw: '%s'." % (" ".join(seed_args), seed_args)) process = subprocess.Popen(seed_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() for string in (out, err): for line in string.split("\n"): if line != '': logger.info("MapCache output: %s" % line) if process.returncode != 0: raise SeedException("'%s' failed. Returncode '%d'." % (seed_command, process.returncode)) # seed second extent if dateline is crossed if dateline_crossed: with lock: index = seed_args.index("%f,%f,%f,%f" % (minx, miny, bounds[2], maxy)) seed_args[index] = "%f,%f,%f,%f" % (bounds[0], miny, maxx-full, maxy) logger.debug("mapcache seeding command: '%s'. raw: '%s'." % (" ".join(seed_args), seed_args)) process = subprocess.Popen(seed_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() for string in (out, err): for line in string.split("\n"): if line != '': logger.info("MapCache output: %s" % line) if process.returncode != 0: raise SeedException("'%s' failed. Returncode '%d'." % (seed_command, process.returncode)) except LockException, e: raise SeedException("Seeding failed: %s" % str(e))
def decode_browse_layers(browse_layers_elem, config=None): logger.info("Start decoding browse layer.") config = config or get_ngeo_config() timedimension_default = safe_get(config, "mapcache", "timedimension_default", "2014") tile_query_limit_default = safe_get(config, "mapcache", "tile_query_limit_default", "100") browse_layers = [] for browse_layer_elem in browse_layers_elem.findall(ns_cfg("browseLayer")): opt = {"strategy": "inherit"} description_elem = browse_layer_elem.find(ns_cfg("description")) if description_elem is not None: opt["description"] = description_elem.text or "" related_dataset_ids_elem = browse_layer_elem.find( ns_cfg("relatedDatasetIds")) related_dataset_ids = [elem.text for elem in related_dataset_ids_elem] rgb_bands_elem = browse_layer_elem.find(ns_cfg("rgbBands")) if rgb_bands_elem is not None: r, g, b = map(int, rgb_bands_elem.text.split(",")) opt["r_band"] = r opt["g_band"] = g opt["b_band"] = b radiometric_interval_elem = browse_layer_elem.find( ns_cfg("radiometricInterval")) if radiometric_interval_elem is not None: opt["radiometric_interval_min"] = int( radiometric_interval_elem.find(ns_cfg("min")).text) opt["radiometric_interval_max"] = int( radiometric_interval_elem.find(ns_cfg("max")).text) strategy_elem = browse_layer_elem.find(ns_cfg("strategy")) if strategy_elem is not None: opt["strategy"] = strategy_elem.text harvesting_source_elem = browse_layer_elem.find( ns_cfg("harvestingSource")) if harvesting_source_elem is not None: opt["harvesting_source"] = harvesting_source_elem.text opt["timedimension_default"] = browse_layer_elem.findtext( ns_cfg("timeDimensionDefault")) or timedimension_default opt["tile_query_limit"] = int( browse_layer_elem.findtext(ns_cfg("tileQueryLimit")) or tile_query_limit_default) browse_layers.append( BrowseLayer( browse_layer_elem.get("browseLayerId"), browse_layer_elem.find(ns_cfg("browseType")).text, browse_layer_elem.find(ns_cfg("title")).text, browse_layer_elem.find(ns_cfg("grid")).text, browse_layer_elem.find(ns_cfg("browseAccessPolicy")).text, browse_layer_elem.find( ns_cfg("containsVerticalCurtains")).text == "true", int(browse_layer_elem.find(ns_cfg("highestMapLevel")).text), int(browse_layer_elem.find(ns_cfg("lowestMapLevel")).text), browse_layer_elem.find(ns_cfg("hostingBrowseServerName")).text, related_dataset_ids, **opt)) return browse_layers
def config(request): try: status = get_status() config = get_ngeo_config() if request.method not in ("PUT", "POST"): raise Exception("Invalid request method '%s'." % request.method) if request.method == "POST": # "setting" new configuration, which means removing the previous one. action = "set" else: action = "update" root = etree.parse(request) start_revision = root.findtext(ns_cfg("startRevision")) end_revision = root.findtext(ns_cfg("endRevision")) # TODO: check current and last revision remove_layers_elems = root.xpath( "cfg:removeConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers_elems = root.xpath("cfg:addConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers = [] for layers_elem in add_layers_elems: add_layers.extend(decode_browse_layers(layers_elem)) remove_layers = [] for layers_elem in remove_layers_elems: remove_layers.extend(decode_browse_layers(layers_elem)) # get the mapcache config xml file path to make it transaction safe mapcache_config = get_mapcache_seed_config(config) mapcache_xml_filename = mapcache_config["config_file"] # transaction safety here with FileTransaction((mapcache_xml_filename, ), copy=True): with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): for browse_layer in add_layers: if models.BrowseLayer.objects.filter( id=browse_layer.id).exists(): update_browse_layer(browse_layer, config) else: add_browse_layer(browse_layer, config) for browse_layer in remove_layers: delete_browse_layer(browse_layer, config=config) # set the new revision config = get_ngeo_config() if not config.has_section("config"): config.add_section("config") revision = int(safe_get(config, "config", "revision", 0)) config.set("config", "revision", int(end_revision)) write_ngeo_config() # return with the new revision return HttpResponse( '<?xml version="1.0"?>\n' '<synchronizeConfigurationResponse>%s</synchronizeConfigurationResponse>' % end_revision) except Exception, e: logger.error("%s: %s" % (type(e).__name__, str(e))) logger.debug(traceback.format_exc()) return HttpResponse('<faultcode>ConfigurationError</faultcode>\n' '<faultstring>%s</faultstring>' % str(e), status=400)
def get_config_revision(): config = get_ngeo_config() return E("getConfigurationRevisionResponse", E("revision", str(safe_get(config, "config", "revision", 0))))
def get_config_revision(): config = get_ngeo_config() return E("getConfigurationRevisionResponse", E("revision", str(safe_get(config, "config", "revision", 0))) )
def get_optimization_config(config=None): """ Returns a dictionary with all optimization specific config settings. """ values = {} config = config or get_ngeo_config() values["bandmode"] = RGB try: values["overviews"] = config.getboolean(INGEST_SECTION, "overviews") except: pass values["overview_levels"] = safe_get( config, INGEST_SECTION, "overview_levels") if values["overview_levels"]: values["overview_levels"] = map( int, values["overview_levels"].split(",")) try: values["overview_minsize"] = config.getint( INGEST_SECTION, "overview_minsize") except: pass values["overview_resampling"] = safe_get( config, INGEST_SECTION, "overview_resampling") try: values["color_index"] = config.getboolean(INGEST_SECTION, "color_index") except: pass try: values["footprint_alpha"] = config.getboolean( INGEST_SECTION, "footprint_alpha") if values["footprint_alpha"]: values["bandmode"] = RGBA except: pass try: values["sieve_max_threshold"] = config.getint( INGEST_SECTION, "sieve_max_threshold") except: pass try: values["simplification_factor"] = config.getfloat( INGEST_SECTION, "simplification_factor") except: pass in_memory = False try: in_memory = config.getboolean(INGEST_SECTION, "in_memory") except: pass values["temporary_directory"] = "/vsimem/" if in_memory else None return values