def send_report(ip_address=None, begin=None, end=None, access_logfile=None, report_logfile=None, config=None): config = config or get_ngeo_config() try: if not ip_address: ctrl_config = get_controller_config( get_controller_config_path(config)) ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except IOError: # probably no config file present, so IP cannot be determined. pass if not ip_address: raise Exception("IP address could not be determined") tree = get_report_xml(begin, end, types, access_logfile, report_logfile) req = urllib2.Request(url="http://%s/notify" % ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'text/xml'}) print req.data try: urllib2.urlopen(req, timeout=10) except (urllib2.HTTPError, urllib2.URLError), e: logger.error("Could not send report (%s): '%s'" % (type(e).__name__, str(e))) raise
def handle(self, *filenames, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) mode = kwargs["mode"] on_error = kwargs["on_error"] config = get_ngeo_config() if not filenames: raise CommandError("No input files provided.") # handle each file separately for filename in filenames: try: # handle each browse report self._handle_file(filename, mode, config) except Exception, e: # handle exceptions if on_error == "continue": # just print the traceback and continue self.print_msg("%s: %s" % (type(e).__name__, str(e)), 1, error=True) continue elif on_error == "stop": # re-raise the exception to stop the execution raise
def send_report(ip_address=None, begin=None, end=None, access_logfile=None, report_logfile=None, config=None): config = config or get_ngeo_config() try: if not ip_address: ctrl_config = get_controller_config(get_controller_config_path(config)) ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except IOError: # probably no config file present, so IP cannot be determined. pass if not ip_address: raise Exception("IP address could not be determined") tree = get_report_xml(begin, end, types, access_logfile, report_logfile) req = urllib2.Request( url="http://%s/notify" % ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'text/xml'} ) print req.data try: urllib2.urlopen(req, timeout=10) except (urllib2.HTTPError, urllib2.URLError), e: logger.error( "Could not send report (%s): '%s'" % (type(e).__name__, str(e)) ) raise
def setUp_files(self): # create a temporary storage directory, copy the reference test data # into it, and point the control.ingest.storage_dir to this location self.temp_storage_dir = tempfile.mktemp() # create a temp dir config = get_ngeo_config() section = "control.ingest" self.config_filename = tempfile.NamedTemporaryFile(delete=False).name environ["NGEO_CONFIG_FILE"] = self.config_filename shutil.copytree(join(settings.PROJECT_DIR, self.storage_dir), self.temp_storage_dir) config.set(section, "storage_dir", self.temp_storage_dir) # create a temporary optimized files directory, empty. point the # control.ingest.optimized_files_dir to it self.temp_optimized_files_dir = tempfile.mkdtemp() config.set(section, "optimized_files_dir", self.temp_optimized_files_dir) self.temp_success_dir = tempfile.mkdtemp() config.set(section, "success_dir", self.temp_success_dir) self.temp_failure_dir = tempfile.mkdtemp() config.set(section, "failure_dir", self.temp_failure_dir) # copy files to optimized dir for filename_src, filename_dst in self.copy_to_optimized: filename_src = join(settings.PROJECT_DIR, "data", filename_src) filename_dst = join(self.temp_optimized_files_dir, filename_dst) safe_makedirs(dirname(filename_dst)) shutil.copy(filename_src, filename_dst) # setup mapcache config/files as retrieved from template self.temp_mapcache_dir = tempfile.mkdtemp() + "/" db_file = settings.DATABASES["mapcache"]["TEST_NAME"] mapcache_config_file = join(self.temp_mapcache_dir, "mapcache.xml") self.mapcache_config_file = mapcache_config_file with open(mapcache_config_file, "w+") as f: f.write(render_to_string("test_control/mapcache.xml", {"mapcache_dir": self.temp_mapcache_dir, "mapcache_test_db": db_file, "browse_layers": models.BrowseLayer.objects.all(), "base_url": getattr(self, "live_server_url", "http://localhost/browse")})) config.set(SEED_SECTION, "config_file", mapcache_config_file) config.set("mapcache", "tileset_root", self.temp_mapcache_dir) # setup mapcache dummy seed command seed_command_file = tempfile.NamedTemporaryFile(delete=False) seed_command_file.write("#!/bin/sh\nexit 0") self.seed_command = seed_command_file.name seed_command_file.close() st = stat(self.seed_command) chmod(self.seed_command, st.st_mode | S_IEXEC) config.set(SEED_SECTION, "seed_command", self.seed_command) self.temp_status_config = join(tempfile.gettempdir(), "status.conf")
def unregister(instance_id, cs_id, cs_ip, config=None): config = config or get_ngeo_config() assert_instance_id(instance_id, config) try: with FileLock(get_controller_config_lockfile_path(config)): controller_config_path = get_controller_config_path(config) if not exists(controller_config_path): raise ControllerAssertionError( "This Browse Server instance was not yet registered.", reason="UNBOUND" ) # TODO: controller server ID was removed? controller_config = get_controller_config(controller_config_path) assert_controller_id(cs_id, controller_config, "CONTROLLER_OTHER") assert_controller_ip(cs_ip, controller_config) # remove the controller configuration to complete unregistration os.remove(controller_config_path) except LockException: raise ControllerAssertionError( "There is currently another registration in progress.", reason="CONTROLLER_OTHER" )
def get_status(config=None): """ Convenience function to return a `Status` object with the global configuration. """ config = config or get_ngeo_config() return Status(config)
def handle(self, *filenames, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse layer configuration from command line.") if not filenames: raise CommandError("No input files provided.") on_error = kwargs["on_error"] config = get_ngeo_config() no_files_handled_success = 0 no_files_handled_error = 0 # handle each file separately for filename in filenames: try: # handle each browse layer xml self._handle_file(filename, config) no_files_handled_success += 1 except Exception, e: # handle exceptions no_files_handled_error += 1 logger.error("%s: %s" % (type(e).__name__, str(e))) if on_error == "continue": # continue the execution with the next file continue elif on_error == "stop": # re-raise the exception to stop the execution raise CommandError(e)
def register(instance_id, instance_type, cs_id, cs_ip, config=None): config = config or get_ngeo_config() assert_instance_id(instance_id, config) assert_instance_type(instance_type) try: with FileLock(get_controller_config_lockfile_path(config)): controller_config_path = get_controller_config_path(config) if not exists(controller_config_path): create_controller_config(controller_config_path, cs_id, cs_ip) else: controller_config = get_controller_config( controller_config_path) assert_controller_id(cs_id, controller_config, "ALREADY_OTHER") assert_controller_ip(cs_ip, controller_config) # IP address and ID are the same, so raise the "ALREADY_SAME" # error. raise ControllerAssertionError( "This browse server is already registered on this " "controller server.", reason="ALREADY_SAME") except LockException: raise ControllerAssertionError( "There is currently another registration in progress.", reason="ALREADY_OTHER")
def get_status(config=None): """ Convenience function to return a `Status` object with the global configuration. """ config = config or get_ngeo_config() return Status(config)
def register(instance_id, instance_type, cs_id, cs_ip, config=None): config = config or get_ngeo_config() assert_instance_id(instance_id, config) assert_instance_type(instance_type) try: with FileLock(get_controller_config_lockfile_path(config)): controller_config_path = get_controller_config_path(config) if not exists(controller_config_path): create_controller_config(controller_config_path, cs_id, cs_ip) else: controller_config = get_controller_config( controller_config_path ) assert_controller_id(cs_id, controller_config, "ALREADY_OTHER") assert_controller_ip(cs_ip, controller_config) # IP address and ID are the same, so raise the "ALREADY_SAME" # error. raise ControllerAssertionError( "This browse server is already registered on this " "controller server.", reason="ALREADY_SAME" ) except LockException: raise ControllerAssertionError( "There is currently another registration in progress.", reason="ALREADY_OTHER" )
def handle(self, *filenames, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse layer configuration from command line.") if not filenames: raise CommandError("No input files provided.") on_error = kwargs["on_error"] config = get_ngeo_config() no_files_handled_success = 0 no_files_handled_error = 0 # handle each file separately for filename in filenames: try: # handle each browse layer xml self._handle_file(filename, config) no_files_handled_success += 1 except Exception, e: # handle exceptions no_files_handled_error += 1 logger.error("%s: %s" % (type(e).__name__, str(e))) if on_error == "continue": # continue the execution with the next file continue elif on_error == "stop": # re-raise the exception to stop the execution raise CommandError(e)
def ingest_browse(parsed_browse, browse_report, browse_layer, preprocessor, crs, success_dir, failure_dir, seed_areas, config=None): """ Ingests a single browse report, performs the preprocessing of the data file and adds the generated browse model to the browse report model. Returns a boolean value, indicating whether or not the browse has been inserted or replaced a previous browse entry. """ # TODO: if curtain: check that layer allows curtains # TODO: same for volumes logger.info("Ingesting browse '%s'." % (parsed_browse.browse_identifier or "<<no ID>>")) replaced = False replaced_extent = None replaced_filename = None merge_with = None merge_footprint = None config = config or get_ngeo_config() coverage_id = parsed_browse.browse_identifier if not coverage_id: # no identifier given, generate a new one coverage_id = _generate_coverage_id(parsed_browse, browse_layer) logger.info("No browse identifier given, generating coverage ID '%s'." % coverage_id) else: try: models.NCNameValidator(coverage_id) except ValidationError: # given ID is not valid, generate a new identifier old_id = coverage_id coverage_id = _generate_coverage_id(parsed_browse, browse_layer) logger.info("Browse ID '%s' is not a valid coverage ID. Using " "generated ID '%s'." % (old_id, coverage_id)) # get the `leave_original` setting leave_original = False try: leave_original = config.getboolean("control.ingest", "leave_original") except: pass # get the input and output filenames storage_path = get_storage_path() input_filename = abspath(get_storage_path(parsed_browse.file_name, config=config)) # check that the input filename is valid -> somewhere under the storage dir if commonprefix((input_filename, storage_path)) != storage_path: raise IngestionException("Input path '%s' points to an invalid " "location." % parsed_browse.file_name) try: models.FileNameValidator(input_filename) except ValidationError, e: raise IngestionException("%s" % str(e), "ValidationError")
def get_failure_dir(config=None): """ Returns the configured failure directory. """ config = config or get_ngeo_config() return get_project_relative_path( safe_get(config, "control.ingest", "failure_dir") )
def get_configured_log_file_patterns(config): config = config or get_ngeo_config() items = safe_get(config, CTRL_SECTION, "report_log_files") if items is None: return [] return map(get_project_relative_path, items.split(","))
def get_status_config_path(config=None): """ Returns the configured failure directory. """ config = config or get_ngeo_config() return get_project_relative_path( config.get(CTRL_SECTION, "status_config_path", "config/status") )
def get_failure_dir(config=None): """ Returns the configured failure directory. """ config = config or get_ngeo_config() return get_project_relative_path( safe_get(config, "control.ingest", "failure_dir") )
def get_report_xml(begin, end, access_logfile=None, report_logfile=None, config=None): start = datetime.utcnow().isoformat("T") + "Z" config = config or get_ngeo_config() component_name = config.get("control", "instance_id") reports = [] if access_logfile: reports.append(BrowseAccessReport(begin, end, access_logfile)) if report_logfile: reports.append(BrowseReportReport(begin, end, report_logfile)) root = E("DWH_DATA") header = E("HEADER", E("CONTENT_ID", "NGEO_BROW")) root.append(header) window_start_date = "" window_end_date = "" rowset = E("ROWSET") for report in reports: for record in report.get_records(): report_data = report.get_data(record) rowset.append( E("ROW", *[E(key, report_data[key]) for key in report.get_fields()])) try: date = datetime.strptime(report_data["TIME"], "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: date = datetime.strptime(report_data["TIME"], "%Y-%m-%dT%H:%M:%SZ") window_start_date = date if ( window_start_date == "" or window_start_date > date) else window_start_date window_end_date = date if ( window_end_date == "" or window_end_date < date) else window_end_date root.append(rowset) header.append(E("EXTRACTION_START_DATE", start)) header.append( E("EXTRACTION_END_DATE", datetime.utcnow().isoformat("T") + "Z")) header.append( E( "WINDOW_START_DATE", "" if window_start_date == "" else window_start_date.replace(tzinfo=None).isoformat("T") + "Z")) header.append( E( "WINDOW_END_DATE", "" if window_start_date == "" else window_end_date.replace(tzinfo=None).isoformat("T") + "Z")) return root
def handle(self, *filenames, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse ingestion from command line.") on_error = kwargs["on_error"] delete_on_success = kwargs["delete_on_success"] storage_dir = kwargs.get("storage_dir") optimized_dir = kwargs.get("optimized_dir") create_result = kwargs["create_result"] leave_original = kwargs["leave_original"] # check consistency if not len(filenames): logger.error("No input files given.") raise CommandError("No input files given.") # set config values section = "control.ingest" config = get_ngeo_config() # all paths are relative to the current working directory if they are # not yet absolute themselves if storage_dir is not None: storage_dir = os.path.abspath(storage_dir) config.set(section, "storage_dir", storage_dir) logger.info("Using storage directory '%s'." % storage_dir) if optimized_dir is not None: optimized_dir = os.path.abspath(optimized_dir) config.set(section, "optimized_files_dir", optimized_dir) logger.info("Using optimized files directory '%s'." % optimized_dir) config.set(section, "delete_on_success", delete_on_success) config.set(section, "leave_original", leave_original) no_reports_handled_success = 0 no_reports_handled_error = 0 # handle each file separately for filename in filenames: try: # handle each browse report self._handle_file(filename, create_result, config) no_reports_handled_success += 1 except Exception, e: # handle exceptions no_reports_handled_error += 1 logger.error("%s: %s" % (type(e).__name__, str(e))) if on_error == "continue": # continue the execution with the next file continue elif on_error == "stop": # re-raise the exception to stop the execution raise
def get_values(self): root = read_mapcache_xml(get_ngeo_config()) try: template = root.xpath("auth_method[1]/template/text()")[0] baseurl = self.cmd_line_re.match(template).group("url") except (IndexError, AttributeError): baseurl = "" return {"baseurl": baseurl}
def get_tileset_path(browse_type, config=None): """ Returns the path to a tileset SQLite file in the `tileset_root` dir. """ config = config or get_ngeo_config() tileset_root = config.get(MAPCACHE_SECTION, "tileset_root") tileset = browse_type + ".sqlite" if not browse_type.endswith(".sqlite") else "" return join(get_project_relative_path(tileset_root), tileset)
def get_values(self): root = read_mapcache_xml(get_ngeo_config()) try: template = root.xpath("auth_method[1]/template/text()")[0] baseurl = self.cmd_line_re.match(template).group("url") except (IndexError, AttributeError): baseurl = "" return {"baseurl": baseurl}
def get_ingest_config(config=None): config = config or get_ngeo_config() return { "strategy": safe_get(config, INGEST_SECTION, "strategy", "merge"), "merge_threshold": parse_time_delta( safe_get(config, INGEST_SECTION, "merge_threshold", "5h") ) }
def notify(summary, message, urgency=None, ip_address=None, config=None): config = config or get_ngeo_config() urgency = urgency or "INFO" if urgency not in ("INFO", "CRITICAL", "BLOCK"): raise ValueError("Invalid urgency value '%s'." % urgency) try: if not ip_address: # get the value for "notification_url" and fall back to # "address" ip_address = safe_get(config, "control", "notification_url") if not ip_address: ctrl_config = get_controller_config( get_controller_config_path(config)) logger.debug( "No 'notification_url' present. Trying to fall back to " "registered IP address.") ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except (IOError, NoSectionError): # probably no config file present, so IP cannot be determined. pass if not ip_address: # cannot log this error as we would run into an endless loop logger.info("Cannot send notification to CTRL.") return tree = E( "notifyControllerServer", E("header", E("timestamp", isotime(now())), E("instance", get_instance_id(config)), E("subsystem", "BROW"), E("urgency", urgency)), E("body", E("summary", summary), E("message", message))) if ip_address.startswith("http://") or ip_address.startswith("https://"): pass else: ip_address = "http://%s" % ip_address if not ip_address.endswith("/notify"): ip_address += "/notify" logger.info("Sending notification to CTRL at IP '%s'." % ip_address) req = urllib2.Request(url=ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'application/xml'}) try: urllib2.urlopen(req, timeout=1) except (urllib2.HTTPError, urllib2.URLError), e: logger.info("Error sending notification: %s" % e) logger.debug(traceback.format_exc() + "\n")
def add_mapcache_layer_xml(browse_layer, config=None): name = browse_layer.id config = config or get_ngeo_config() root = read_mapcache_xml(config) if len(root.xpath("cache[@name='%s']|source[@name='%s']|tileset[@name='%s']" % (name, name, name))): raise Exception( "Cannot add browse layer to mapcache config, because a layer with " "the name '%s' is already inserted." % name ) tileset_path = get_tileset_path(browse_layer.browse_type) root.extend([ E("cache", E("dbfile", tileset_path), E("detect_blank", "true"), name=name, type="sqlite3" ), E("source", E("getmap", E("params", E("LAYERS", name), E("TRANSPARENT", "true") ) ), E("http", E("url", "http://localhost/browse/ows?") ), name=name, type="wms" ), E("tileset", E("source", name), E("cache", name), E("grid", URN_TO_GRID[browse_layer.grid], **{ "max-cached-zoom": str(browse_layer.highest_map_level), "out-of-zoom-strategy": "reassemble" } ), E("format", "mixed"), E("metatile", "8 8"), E("expires", "3600"), E("read-only", "true"), E("timedimension", E("dbfile", settings.DATABASES["mapcache"]["NAME"]), E("query", "select strftime('%Y-%m-%dT%H:%M:%SZ',start_time)||'/'||strftime('%Y-%m-%dT%H:%M:%SZ',end_time) from time where source_id=:tileset and start_time<=datetime(:end_timestamp,'unixepoch') and end_time>=datetime(:start_timestamp,'unixepoch') and maxx>=:minx and maxy>=:miny and minx<=:maxx and miny<=:maxy order by end_time desc limit 100"), type="sqlite", default="2010" # TODO: default year into layer definition ), name=name ) ]) write_mapcache_xml(root, config)
def get_optimization_config(config=None): """ Returns a dictionary with all optimization specific config settings. """ values = {} config = config or get_ngeo_config() values["bandmode"] = RGB try: values["overviews"] = config.getboolean(INGEST_SECTION, "overviews") except: pass values["overview_levels"] = safe_get( config, INGEST_SECTION, "overview_levels") if values["overview_levels"]: values["overview_levels"] = map( int, values["overview_levels"].split(",")) try: values["overview_minsize"] = config.getint( INGEST_SECTION, "overview_minsize") except: pass values["overview_resampling"] = safe_get( config, INGEST_SECTION, "overview_resampling") try: values["color_index"] = config.getboolean(INGEST_SECTION, "color_index") except: pass try: values["footprint_alpha"] = config.getboolean( INGEST_SECTION, "footprint_alpha") if values["footprint_alpha"]: values["bandmode"] = RGBA except: pass try: values["simplification_factor"] = config.getfloat( INGEST_SECTION, "simplification_factor") except: pass in_memory = False try: in_memory = config.getboolean(INGEST_SECTION, "in_memory") except: pass values["temporary_directory"] = "/vsimem/" if in_memory else None return values
def save_report(filename, begin=None, end=None, access_logfile=None, report_logfile=None, config=None): config = config or get_ngeo_config() tree = get_report_xml(begin, end, access_logfile, report_logfile, config) with open(filename, "w+") as f: f.write(etree.tostring(tree, pretty_print=True))
def get_mapcache_seed_config(config=None): """ Returns a dicitonary with all mapcache related config settings. """ values = {} config = config or get_ngeo_config() values["seed_command"] = safe_get(config, SEED_SECTION, "seed_command", "mapcache_seed") values["config_file"] = config.get(SEED_SECTION, "config_file") values["threads"] = int(safe_get(config, SEED_SECTION, "threads", 1)) return values
def get_ingest_config(config=None): config = config or get_ngeo_config() return { "strategy": safe_get(config, INGEST_SECTION, "strategy", "replace"), "merge_threshold": parse_time_delta( safe_get(config, INGEST_SECTION, "merge_threshold", "5h") ), "regular_grid_clipping": safe_get( config, INGEST_SECTION, "regular_grid_clipping", "false" ).lower() in ("true", "1", "on", "yes") }
def get_ingest_config(config=None): config = config or get_ngeo_config() return { "strategy": safe_get(config, INGEST_SECTION, "strategy", "replace"), "merge_threshold": parse_time_delta( safe_get(config, INGEST_SECTION, "merge_threshold", "5h") ), "regular_grid_clipping": safe_get( config, INGEST_SECTION, "regular_grid_clipping", "false" ).lower() in ("true", "1", "on", "yes") }
def remove_mapcache_layer_xml(browse_layer, config=None): config = config or get_ngeo_config() name = browse_layer.id root = read_mapcache_xml(config) root.remove(root.xpath("cache[@name='%s']" % name)[0]) root.remove(root.xpath("source[@name='%s']" % name)[0]) root.remove(root.xpath("tileset[@name='%s']" % name)[0]) write_mapcache_xml(root, config)
def handle(self, *filenames, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) on_error = kwargs["on_error"] delete_on_success = kwargs["delete_on_success"] storage_dir = kwargs.get("storage_dir") optimized_dir = kwargs.get("optimized_dir") create_result = kwargs["create_result"] leave_original = kwargs["leave_original"] # check consistency if not len(filenames): raise CommandError("No input files given.") # set config values section = "control.ingest" config = get_ngeo_config() # all paths are relative to the current working directory if they are # not yet absolute themselves if storage_dir is not None: storage_dir = os.path.abspath(storage_dir) config.set(section, "storage_dir", storage_dir) self.print_msg("Using storage directory '%s'." % storage_dir, 2) if optimized_dir is not None: optimized_dir = os.path.abspath(optimized_dir) config.set(section, "optimized_files_dir", optimized_dir) self.print_msg("Using optimized files directory '%s'." % optimized_dir, 2) config.set(section, "delete_on_success", delete_on_success) config.set(section, "leave_original", leave_original) # handle each file separately for filename in filenames: try: # handle each browse report self._handle_file(filename, create_result, config) except Exception, e: # handle exceptions if on_error == "continue": # just print the traceback and continue self.print_msg("%s: %s" % (type(e).__name__, str(e)), 1, error=True) continue elif on_error == "stop": # re-raise the exception to stop the execution raise
def get_storage_path(file_name=None, storage_dir=None, config=None): """ Returns an absolute path to a filename within the intermediary storage directory for uploaded but unprocessed files. """ config = config or get_ngeo_config() if not storage_dir: storage_dir = config.get(INGEST_SECTION, "storage_dir") if not file_name: return get_project_relative_path(storage_dir) return get_project_relative_path(join(storage_dir, file_name))
def get_storage_path(file_name=None, storage_dir=None, config=None): """ Returns an absolute path to a filename within the intermediary storage directory for uploaded but unprocessed files. """ config = config or get_ngeo_config() if not storage_dir: storage_dir = config.get(INGEST_SECTION, "storage_dir") if not file_name: return get_project_relative_path(storage_dir) return get_project_relative_path(join(storage_dir, file_name))
def _handle_file(self, filename, config): root = etree.parse(filename) start_revision = root.findtext(ns_cfg("startRevision")) end_revision = root.findtext(ns_cfg("endRevision")) remove_layers_elems = root.xpath( "cfg:removeConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers_elems = root.xpath("cfg:addConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers = [] for layers_elem in add_layers_elems: add_layers.extend(decode_browse_layers(layers_elem)) remove_layers = [] for layers_elem in remove_layers_elems: remove_layers.extend(decode_browse_layers(layers_elem)) # get the mapcache config xml file path to make it transaction safe mapcache_config = get_mapcache_seed_config(config) mapcache_xml_filename = mapcache_config["config_file"] # transaction safety here with FileTransaction((mapcache_xml_filename, ), copy=True): with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): for browse_layer in add_layers: if models.BrowseLayer.objects.filter( id=browse_layer.id).exists(): update_browse_layer(browse_layer, config) else: add_browse_layer(browse_layer, config) for browse_layer in remove_layers: delete_browse_layer(browse_layer, config=config) # set the new revision config = config or get_ngeo_config() if not config.has_section("config"): config.add_section("config") revision = int(safe_get(config, "config", "revision", 0)) config.set("config", "revision", int(end_revision)) write_ngeo_config()
def handle(self, *args, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) package_paths = args if not len(package_paths): raise CommandError("No packages given.") ignore_cache = kwargs["ignore_cache"] config = get_ngeo_config() for package_path in package_paths: result = import_package(package_path, ignore_cache, config)
def update_browse_layer(browse_layer, config=None): config = config or get_ngeo_config() try: browse_layer_model = models.BrowseLayer.objects.get(id=browse_layer.id) except models.BrowseLayer.DoesNotExist: raise Exception("Could not update the previous browse layer") immutable_values = ( "id", "browse_type", "contains_vertical_curtains", "r_band", "g_band", "b_band", "radiometric_interval_min", "radiometric_interval_max", "grid", "lowest_map_level", "highest_map_level", "strategy" ) for key in immutable_values: if getattr(browse_layer_model, key) != getattr(browse_layer, key): raise Exception("Cannot change immutable property '%s'." % key) mutable_values = [ "title", "description", "browse_access_policy", "timedimension_default", "tile_query_limit" ] refresh_mapcache_xml = False for key in mutable_values: setattr(browse_layer_model, key, getattr(browse_layer, key)) if key in ("timedimension_default", "tile_query_limit"): refresh_mapcache_xml = True for related_dataset_id in browse_layer.related_dataset_ids: models.RelatedDataset.objects.get_or_create( dataset_id=related_dataset_id, browse_layer=browse_layer_model ) # remove all related datasets that are not referenced anymore models.RelatedDataset.objects.filter( browse_layer=browse_layer_model ).exclude( dataset_id__in=browse_layer.related_dataset_ids ).delete() browse_layer_model.full_clean() browse_layer_model.save() if not browse_layer.contains_volumes and not browse_layer.contains_vertical_curtains: if refresh_mapcache_xml: remove_mapcache_layer_xml(browse_layer, config) add_mapcache_layer_xml(browse_layer, config)
def setUp_config(self): # set up default config and specific config config = get_ngeo_config() config.set(CTRL_SECTION, "status_config_path", self.temp_status_config) for configuration in (self.default_configuration, self.configuration): for (section, option), value in configuration.items(): if not config.has_section(section): config.add_section(section) if value is not None: config.set(section, option, value) else: config.remove_option(section, option) if self.status_config is not None: with open(self.temp_status_config, "w+") as f: f.write(self.status_config)
def set_values(self, baseurl): config = get_ngeo_config() root = read_mapcache_xml(config) try: template_elem = root.xpath("auth_method[1]/template")[0] template = template_elem.text except IndexError: pass # no template given? match = self.cmd_line_re.match(template) if match: template = "".join((match.string[:match.start("url")], baseurl, match.string[match.end("url"):])) else: template += " --baseurl %s" % baseurl template_elem.text = template write_mapcache_xml(root, config)
def remove_mapcache_layer_xml(browse_layer, config=None): config = config or get_ngeo_config() name = browse_layer.id root = read_mapcache_xml(config) logger.info("Removing cache, source, and tileset for '%s' from mapcache " "config." % name) try: root.remove(root.xpath("cache[@name='%s']" % name)[0]) root.remove(root.xpath("source[@name='%s']" % name)[0]) root.remove(root.xpath("tileset[@name='%s']" % name)[0]) except IndexError: logger.warning( "Failed to remove browse layer from mapcache config, because a " "layer with the name '%s' could not be found." % name) write_mapcache_xml(root, config)
def _handle_file(self, filename, config): root = etree.parse(filename) start_revision = root.findtext(ns_cfg("startRevision")) end_revision = root.findtext(ns_cfg("endRevision")) remove_layers_elems = root.xpath("cfg:removeConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers_elems = root.xpath("cfg:addConfiguration/cfg:browseLayers", namespaces={"cfg": ns_cfg.uri}) add_layers = [] for layers_elem in add_layers_elems: add_layers.extend(decode_browse_layers(layers_elem)) remove_layers = [] for layers_elem in remove_layers_elems: remove_layers.extend(decode_browse_layers(layers_elem)) # get the mapcache config xml file path to make it transaction safe mapcache_config = get_mapcache_seed_config(config) mapcache_xml_filename = mapcache_config["config_file"] # transaction safety here with FileTransaction((mapcache_xml_filename,), copy=True): with transaction.commit_on_success(): with transaction.commit_on_success(using="mapcache"): for browse_layer in add_layers: if models.BrowseLayer.objects.filter(id=browse_layer.id).exists(): update_browse_layer(browse_layer, config) else: add_browse_layer(browse_layer, config) for browse_layer in remove_layers: delete_browse_layer(browse_layer, config=config) # set the new revision config = config or get_ngeo_config() if not config.has_section("config"): config.add_section("config") revision = int(safe_get(config, "config", "revision", 0)) config.set("config", "revision", int(end_revision)) write_ngeo_config()
def remove_mapcache_layer_xml(browse_layer, config=None): config = config or get_ngeo_config() name = browse_layer.id root = read_mapcache_xml(config) logger.info("Removing cache, source, and tileset for '%s' from mapcache " "config." % name) try: root.remove(root.xpath("cache[@name='%s']" % name)[0]) root.remove(root.xpath("source[@name='%s']" % name)[0]) root.remove(root.xpath("tileset[@name='%s']" % name)[0]) except IndexError: logger.warning( "Failed to remove browse layer from mapcache config, because a " "layer with the name '%s' could not be found." % name ) write_mapcache_xml(root, config)
def handle(self, begin=None, end=None, url=None, filename=None, access_logfile=None, report_logfile=None, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) conf = get_ngeo_config() report_store_dir = safe_get(conf, "control", "report_store_dir", "/var/www/ngeo/store/reports/") filename = join(report_store_dir, basename(filename)) logger.info("Starting report generation from command line.") if begin: begin = getDateTime(begin) if end: end = getDateTime(end) if filename and url: logger.error("Both Filename and URL specified.") raise CommandError("Both Filename and URL specified.") if filename: logger.info("Save report to file '%s'." % filename) save_report(filename, begin, end, access_logfile, report_logfile) elif url: logger.info("Send report to URL '%s'." % url) send_report(url, begin, end, access_logfile, report_logfile) else: logger.error("Neither Filename nor URL specified.") raise CommandError("Neither Filename nor URL specified.") logger.info("Successfully finished report generation.")
def get_report_xml(begin, end, access_logfile=None, report_logfile=None, config=None): start = datetime.utcnow().isoformat("T") + "Z" config = config or get_ngeo_config() component_name = config.get("control", "instance_id") reports = [] if access_logfile: reports.append(BrowseAccessReport(begin, end, access_logfile)) if report_logfile: reports.append(BrowseReportReport(begin, end, report_logfile)) root = E("DWH_DATA") header = E("HEADER", E("CONTENT_ID", "NGEO_BROW")) root.append(header) window_start_date = "" window_end_date = "" rowset = E("ROWSET") for report in reports: for record in report.get_records(): report_data = report.get_data(record) rowset.append( E("ROW", *[ E(key, report_data[key]) for key in report.get_fields() ]) ) try: date = datetime.strptime(report_data["TIME"], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: date = datetime.strptime(report_data["TIME"], "%Y-%m-%dT%H:%M:%SZ" ) window_start_date = date if (window_start_date == "" or window_start_date > date) else window_start_date window_end_date = date if (window_end_date == "" or window_end_date < date) else window_end_date root.append(rowset) header.append(E("EXTRACTION_START_DATE", start)) header.append(E("EXTRACTION_END_DATE", datetime.utcnow().isoformat("T") + "Z")) header.append(E("WINDOW_START_DATE", "" if window_start_date == "" else window_start_date.replace(tzinfo=None).isoformat("T") + "Z")) header.append(E("WINDOW_END_DATE", "" if window_start_date == "" else window_end_date.replace(tzinfo=None).isoformat("T") + "Z")) return root
def set_values(self, baseurl): config = get_ngeo_config() root = read_mapcache_xml(config) try: template_elem = root.xpath("auth_method[1]/template")[0] template = template_elem.text except IndexError: pass # no template given? match = self.cmd_line_re.match(template) if match: template = "".join(( match.string[:match.start("url")], baseurl, match.string[match.end("url"):] )) else: template += " --baseurl %s" % baseurl template_elem.text = template write_mapcache_xml(root, config)
def notify(summary, message, urgency=None, ip_address=None, config=None): config = config or get_ngeo_config() urgency = urgency or "INFO" if urgency not in ("INFO", "CRITICAL", "BLOCK"): raise ValueError("Invalid urgency value '%s'." % urgency) try: if not ip_address: ctrl_config = get_controller_config(get_controller_config_path(config)) ip_address = safe_get(ctrl_config, CONTROLLER_SERVER_SECTION, "address") except IOError: # probably no config file present, so IP cannot be determined. pass if not ip_address: return tree = E("notifyControllerServer", E("header", E("timestamp", isotime(now())), E("instance", get_instance_id(config)), E("subsystem", "BROW"), E("urgency", urgency) ), E("body", E("summary", summary), E("message", message) ) ) req = urllib2.Request( url="http://%s/notify" % ip_address, data=etree.tostring(tree, pretty_print=True), headers={'Content-Type': 'text/xml'} ) try: urllib2.urlopen(req, timeout=1) except (urllib2.HTTPError, urllib2.URLError): # could not send notification. Out of options pass
def add_browse_layer(browse_layer, config=None): """ Add a browse layer to the ngEO Browse Server system. This includes the database models, cache configuration and filesystem paths. """ config = config or get_ngeo_config() try: # create a new browse layer model browse_layer_model = models.BrowseLayer( **browse_layer.get_kwargs() ) browse_layer_model.full_clean() browse_layer_model.save() for related_dataset_id in browse_layer.related_dataset_ids: models.RelatedDataset.objects.get_or_create( dataset_id=related_dataset_id, browse_layer=browse_layer_model ) except Exception: raise # create EOxServer dataset series eoxs_models.DatasetSeries.objects.create(identifier=browse_layer.id) # remove source from mapcache sqlite if not browse_layer.contains_volumes and not browse_layer.contains_vertical_curtains: mapcache_models.Source.objects.create(name=browse_layer.id) # add an XML section to the mapcache config xml add_mapcache_layer_xml(browse_layer, config) # create a base directory for optimized files directory = get_project_relative_path(join( config.get(INGEST_SECTION, "optimized_files_dir"), browse_layer.id )) if not os.path.exists(directory): os.makedirs(directory)
def handle(self, *args, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse import from command line.") package_paths = args if not len(package_paths): logger.error("No packages given.") raise CommandError("No packages given.") ignore_cache = kwargs["ignore_cache"] config = get_ngeo_config() for package_path in package_paths: result = import_package(package_path, ignore_cache, config) logger.info("Successfully finished browse import from command line.")
def unregister(instance_id, cs_id, cs_ip, config=None): config = config or get_ngeo_config() assert_instance_id(instance_id, config) try: with FileLock(get_controller_config_lockfile_path(config)): controller_config_path = get_controller_config_path(config) if not exists(controller_config_path): raise ControllerAssertionError( "This Browse Server instance was not yet registered.", reason="UNBOUND") controller_config = get_controller_config(controller_config_path) assert_controller_id(cs_id, controller_config, "CONTROLLER_OTHER") assert_controller_ip(cs_ip, controller_config) # remove the controller configuration to complete unregistration os.remove(controller_config_path) except LockException: raise ControllerAssertionError( "There is currently another registration in progress.", reason="CONTROLLER_OTHER")
def controller_server(request): config = get_ngeo_config() try: status = get_status() if not status.running: raise Exception("Server is currently not running.") if request.method not in ("POST", "DELETE"): raise Exception("Invalid request method '%s'." % request.method) values = json.load(request) # POST means "register" if request.method == "POST": register(values["instanceId"], values["instanceType"], values["controllerServerId"], get_client_ip(request), config) # DELETE means "unregister" elif request.method == "DELETE": unregister(values["instanceId"], values["controllerServerId"], get_client_ip(request), config) except Exception as e: logger.debug(traceback.format_exc()) logger.warning(str(e)) instance_id = get_instance_id(config) values = { "faultString": str(e), "instanceId": instance_id, "reason": getattr(e, "reason", "NO_CODE") } if settings.DEBUG: values["traceback"] = traceback.format_exc() return JsonResponse(values, status=400) return JsonResponse({"result": "SUCCESS"})
def get_format_config(config=None): """ Returns a dictionary with all preprocessing format specific configuration settings. """ values = {} config = config or get_ngeo_config() values["compression"] = safe_get(config, INGEST_SECTION, "compression") if values["compression"] == "JPEG": value = safe_get(config, INGEST_SECTION, "jpeg_quality") values["jpeg_quality"] = int(value) if value is not None else None elif values["compression"] == "DEFLATE": value = safe_get(config, INGEST_SECTION, "zlevel") values["zlevel"] = int(value) if value is not None else None try: values["tiling"] = config.getboolean(INGEST_SECTION, "tiling") except: pass return values
def get_optimized_path(file_name, directory=None, config=None): """ Returns an absolute path to a filename within the storage directory for optimized raster files. Uses the 'control.ingest.optimized_files_dir' setting from the ngEO configuration. Also tries to get the postfix for optimized files from the 'control.ingest.optimized_files_postfix' setting from the ngEO configuration. All relative paths are treated relative to the PROJECT_DIR directory setting. """ config = config or get_ngeo_config() file_name = basename(file_name) if directory: file_name = join(directory, file_name) optimized_dir = get_project_relative_path( config.get(INGEST_SECTION, "optimized_files_dir") ) postfix = safe_get(config, INGEST_SECTION, "optimized_files_postfix", "") root, ext = splitext(file_name) return join(optimized_dir, root + postfix + ext)
def get_config_revision(): config = get_ngeo_config() return E("getConfigurationRevisionResponse", E("revision", str(safe_get(config, "config", "revision", 0))))
def wrapper(*args, **kwargs): config = get_ngeo_config() mapcache_config = get_mapcache_seed_config(config) with FileLock(get_project_relative_path("mapcache.xml.lck")): return func(*args, **kwargs)
def set_values(self, **kwargs): config = get_ngeo_config() section = self.section for key, value in kwargs.items(): config.set(self.section, key, value)
def seed_mapcache(seed_command, config_file, tileset, grid, minx, miny, maxx, maxy, minzoom, maxzoom, start_time, end_time, threads, delete, force=True): # translate grid URN to mapcache grid name try: grid = URN_TO_GRID[grid] except KeyError: raise SeedException("Invalid grid '%s'." % grid) bounds = CRS_BOUNDS[GRID_TO_SRID[grid]] full = float(abs(bounds[0]) + abs(bounds[2])) dateline_crossed = False if maxx > bounds[2]: dateline_crossed = True # extent is always within [bounds[0],bounds[2]] # where maxx can be >bounds[2] but <=full if minx < bounds[0] or minx > bounds[2] or maxx < bounds[0] or maxx > full: raise SeedException("Invalid extent '%s,%s,%s,%s'." % (minx, miny, maxx, maxy)) if minzoom is None: minzoom = 0 if maxzoom is None: maxzoom = 6 # start- and end-time are expected to be UTC Zulu start_time = start_time.replace(tzinfo=None) end_time = end_time.replace(tzinfo=None) logger.info( "Starting mapcache seed with parameters: command='%s', " "config_file='%s', tileset='%s', grid='%s', " "extent='%s,%s,%s,%s', zoom='%s,%s', nthreads='%s', " "mode='%s', dimension='TIME=%sZ/%sZ'." % (seed_command, config_file, tileset, grid, minx, miny, maxx, maxy, minzoom, maxzoom, threads, "seed" if not delete else "delete", start_time.isoformat(), end_time.isoformat())) seed_args = [ seed_command, "-c", config_file, "-t", tileset, "-g", grid, "-e", "%f,%f,%f,%f" % (minx, miny, bounds[2] if dateline_crossed else maxx, maxy), "-n", str(threads), "-z", "%d,%d" % (minzoom, maxzoom), "-D", "TIME=%sZ/%sZ" % (start_time.isoformat(), end_time.isoformat()), "-m", "seed" if not delete else "delete", "-q", "-M", "1,1", ] if not delete and force: seed_args.append("-f") try: config = get_ngeo_config() timeout = safe_get(config, "mapcache.seed", "timeout") timeout = float(timeout) if timeout is not None else 60.0 except: timeout = 60.0 try: lock = FileLock(get_project_relative_path("mapcache_seed.lck"), timeout=timeout) with lock: logger.debug("mapcache seeding command: '%s'. raw: '%s'." % (" ".join(seed_args), seed_args)) process = subprocess.Popen(seed_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() for string in (out, err): for line in string.split("\n"): if line != '': logger.info("MapCache output: %s" % line) if process.returncode != 0: raise SeedException("'%s' failed. Returncode '%d'." % (seed_command, process.returncode)) # seed second extent if dateline is crossed if dateline_crossed: with lock: index = seed_args.index("%f,%f,%f,%f" % (minx, miny, bounds[2], maxy)) seed_args[index] = "%f,%f,%f,%f" % (bounds[0], miny, maxx - full, maxy) logger.debug("mapcache seeding command: '%s'. raw: '%s'." % (" ".join(seed_args), seed_args)) process = subprocess.Popen(seed_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() for string in (out, err): for line in string.split("\n"): if line != '': logger.info("MapCache output: %s" % line) if process.returncode != 0: raise SeedException("'%s' failed. Returncode '%d'." % (seed_command, process.returncode)) except LockException, e: raise SeedException("Seeding failed: %s" % str(e))
def add_mapcache_layer_xml(browse_layer, config=None): name = browse_layer.id config = config or get_ngeo_config() root = read_mapcache_xml(config) if len( root.xpath( "cache[@name='%s']|source[@name='%s']|tileset[@name='%s']" % (name, name, name))): raise LayerException( "Cannot add browse layer to mapcache config, because a layer with " "the name '%s' is already inserted." % name) tileset_path = get_tileset_path(browse_layer.browse_type) bounds = CRS_BOUNDS[GRID_TO_SRID[URN_TO_GRID[browse_layer.grid]]] full = float(abs(bounds[0]) + abs(bounds[2])) root.extend([ E("cache", E("dbfile", tileset_path), E("detect_blank", "true"), E("pragma", "2147483646", name="max_page_count"), E("pragma", "2048", name="page_size"), name=name, type="sqlite3"), E("source", E("getmap", E("params", E("LAYERS", name), E("TRANSPARENT", "true"))), E("http", E("url", "http://localhost/browse/ows?")), name=name, type="wms"), E("tileset", E( "metadata", E("title", str(browse_layer.title)), *([E("abstract", str(browse_layer.description))] if browse_layer.description else [])), E("source", name), E("cache", name), E( "grid", URN_TO_GRID[browse_layer.grid], **{ "max-cached-zoom": str(browse_layer.highest_map_level), "out-of-zoom-strategy": "reassemble" }), E("format", "mixed"), E("metatile", "8 8"), E("expires", "3600"), E("read-only", "true"), E("timedimension", E("dbfile", settings.DATABASES["mapcache"]["NAME"]), E( "query", "select * from (select strftime('%Y-%m-%dT%H:%M:%SZ',start_time)||'/'||strftime('%Y-%m-%dT%H:%M:%SZ',end_time) as interval from time where source_id=:tileset and (start_time<datetime(:end_timestamp,'unixepoch') and (end_time>datetime(:start_timestamp,'unixepoch')) or (start_time=end_time and start_time<datetime(:end_timestamp,'unixepoch') and end_time>=datetime(:start_timestamp,'unixepoch'))) and ((maxx>=:minx and minx<=:maxx) or (maxx>" + str(bounds[2]) + " and (maxx-" + str(full) + ")>=:minx and (minx-" + str(full) + ")<=:maxx)) and maxy>=:miny and miny<=:maxy order by end_time desc limit " + str(browse_layer.tile_query_limit) + ") order by interval asc"), type="sqlite", default=str(browse_layer.timedimension_default)), *([E("auth_method", "cmdlineauth")] if browse_layer.browse_access_policy in ("RESTRICTED", "PRIVATE") else []), name=name) ]) logger.info("Adding cache, source, and tileset for '%s' to mapcache " "config." % name) write_mapcache_xml(root, config)