def handle(self, *args, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse deletion from command line.") browse_layer_id = kwargs.get("browse_layer_id") browse_type = kwargs.get("browse_type") if not browse_layer_id and not browse_type: logger.error("No browse layer or browse type was specified.") raise CommandError("No browse layer or browse type was specified.") elif browse_layer_id and browse_type: logger.error("Both browse layer and browse type were specified.") raise CommandError( "Both browse layer and browse type were specified." ) start = kwargs.get("start") end = kwargs.get("end") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) self._handle(start, end, browse_layer_id, browse_type) logger.info("Successfully finished browse deletion from command line.")
def handle(self, *args, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse deletion from command line.") browse_layer_id = kwargs.get("browse_layer_id") browse_type = kwargs.get("browse_type") if not browse_layer_id and not browse_type: logger.error("No browse layer or browse type was specified.") raise CommandError("No browse layer or browse type was specified.") elif browse_layer_id and browse_type: logger.error("Both browse layer and browse type were specified.") raise CommandError( "Both browse layer and browse type were specified.") start = kwargs.get("start") end = kwargs.get("end") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) self._handle(start, end, browse_layer_id, browse_type) logger.info("Successfully finished browse deletion from command line.")
def handle(self, *args, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) browse_layer_id = kwargs.get("browse_layer_id") browse_type = kwargs.get("browse_type") if not browse_layer_id and not browse_type: raise CommandError("No browse layer or browse type was specified.") elif browse_layer_id and browse_type: raise CommandError("Both browse layer and browse type were specified.") start = kwargs.get("start") end = kwargs.get("end") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) self._handle(start, end, browse_layer_id, browse_type)
def _parse_datetime(input_str): """ Helper callback function to check if a given datetime is correct. """ try: getDateTime(input_str) except: raise argparse.ArgumentTypeError("Wrong datetime format. Use ISO 8601.") return input_str
def get_records(self): with open(self.filename) as f: for line in f: items = line[:-1].split("/\\/\\") date = getDateTime(items[0]) if self.end and self.end < date: continue elif self.begin and self.begin > date: continue yield BrowseReportRecord(*items)
def handle(self, begin=None, end=None, url=None, filename=None, access_logfile=None, report_logfile=None, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) conf = get_ngeo_config() report_store_dir = safe_get(conf, "control", "report_store_dir", "/var/www/ngeo/store/reports/") filename = join(report_store_dir, basename(filename)) logger.info("Starting report generation from command line.") if begin: begin = getDateTime(begin) if end: end = getDateTime(end) if filename and url: logger.error("Both Filename and URL specified.") raise CommandError("Both Filename and URL specified.") if filename: logger.info("Save report to file '%s'." % filename) save_report(filename, begin, end, access_logfile, report_logfile) elif url: logger.info("Send report to URL '%s'." % url) send_report(url, begin, end, access_logfile, report_logfile) else: logger.error("Neither Filename nor URL specified.") raise CommandError("Neither Filename nor URL specified.") logger.info("Successfully finished report generation.")
def handle(self, begin=None, end=None, url=None, filename=None, access_logfile=None, report_logfile=None, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) conf = get_ngeo_config() report_store_dir = safe_get( conf, "control", "report_store_dir", "/var/www/ngeo/store/reports/" ) filename = join(report_store_dir, basename(filename)) logger.info("Starting report generation from command line.") if begin: begin = getDateTime(begin) if end: end = getDateTime(end) if filename and url: logger.error("Both Filename and URL specified.") raise CommandError("Both Filename and URL specified.") if filename: logger.info("Save report to file '%s'." % filename) save_report(filename, begin, end, access_logfile, report_logfile) elif url: logger.info("Send report to URL '%s'." % url) send_report(url, begin, end, access_logfile, report_logfile) else: logger.error("Neither Filename nor URL specified.") raise CommandError("Neither Filename nor URL specified.") logger.info("Successfully finished report generation.")
def normalize(self, dimension, value): if value is None or len(value) == 0: return None elif dimension in ("phenomenonTime", "time", "t"): if value[0] == '"' and value[-1] == '"': token = value.lstrip('"').rstrip('"') return getDateTime(token) # this raises an UnknkownParameterFormatException if the datetime format is not recognized else: raise InvalidSubsettingException("Date/Time tokens have to be enclosed in quotation marks (\")") else: try: return float(value) except: raise InvalidSubsettingException("'%s' not recognized as a number" % value)
def handle(self, *args, **kwargs): System.init() # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) logger.info("Starting browse export from command line.") browse_layer_id = kwargs.get("browse_layer_id") browse_type = kwargs.get("browse_type") if not browse_layer_id and not browse_type: logger.error("No browse layer or browse type was specified.") raise CommandError("No browse layer or browse type was specified.") elif browse_layer_id and browse_type: logger.error("Both browse layer and browse type were specified.") raise CommandError("Both browse layer and browse type were specified.") start = kwargs.get("start") end = kwargs.get("end") compression = kwargs.get("compression") export_cache = kwargs["export_cache"] output_path = kwargs.get("output_path") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if not output_path: output_path = package.generate_filename(compression) with package.create(output_path, compression) as p: # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id) except BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist" % browse_layer_id) raise CommandError("Browse layer '%s' does not exist" % browse_layer_id) else: try: browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type) except BrowseLayer.DoesNotExist: logger.error("Browse layer with browse type '%s' does " "not exist" % browse_type) raise CommandError("Browse layer with browse type '%s' does " "not exist" % browse_type) browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model) p.set_browse_layer( serialize_browse_layers((browse_layer,), pretty_print=True) ) # query browse reports; optionally filter for start/end time browse_reports_qs = BrowseReport.objects.all() # apply start/end filter if start and not end: browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start) elif end and not start: browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end) elif start and end: browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, browses__end_time__lte=end) # use count annotation to exclude all browse reports with no browses browse_reports_qs = browse_reports_qs.annotate( browse_count=Count('browses') ).filter(browse_layer=browse_layer_model, browse_count__gt=0) # iterate over all browse reports for browse_report_model in browse_reports_qs: browses_qs = Browse.objects.filter( browse_report=browse_report_model ) if start: browses_qs = browses_qs.filter(start_time__gte=start) if end: browses_qs = browses_qs.filter(end_time__lte=end) browse_report = browsereport_data.BrowseReport.from_model( browse_report_model, browses_qs ) # iterate over all browses in the query for browse, browse_model in izip(browse_report, browses_qs): coverage_wrapper = System.getRegistry().getFromFactory( "resources.coverages.wrappers.EOCoverageFactory", {"obj_id": browse_model.coverage_id} ) # set the base_filename = browse_model.coverage_id data_filename = base_filename + ".tif" md_filename = base_filename + ".xml" footprint_filename = base_filename + ".wkb" browse._file_name = data_filename # add optimized browse image to package data_package = coverage_wrapper.getData() data_package.prepareAccess() browse_file_path = data_package.getGDALDatasetIdentifier() with open(browse_file_path) as f: p.add_browse(f, data_filename) wkb = coverage_wrapper.getFootprint().wkb p.add_footprint(footprint_filename, wkb) if export_cache: time_model = mapcache_models.Time.objects.get( start_time__lte=browse_model.start_time, end_time__gte=browse_model.end_time, source__name=browse_layer_model.id ) # get "dim" parameter dim = (isotime(time_model.start_time) + "/" + isotime(time_model.end_time)) # exit if a merged browse is found if dim != (isotime(browse_model.start_time) + "/" + isotime(browse_model.end_time)): logger.error("Browse layer '%s' contains " "merged browses and exporting " "of cache is requested. Try " "without exporting the cache." % browse_layer_model.id) raise CommandError("Browse layer '%s' contains " "merged browses and exporting " "of cache is requested. Try " "without exporting the cache." % browse_layer_model.id) # get path to sqlite tileset and open it ts = tileset.open( get_tileset_path(browse_layer.browse_type) ) for tile_desc in ts.get_tiles( browse_layer.id, URN_TO_GRID[browse_layer.grid], dim=dim, minzoom=browse_layer.highest_map_level, maxzoom=browse_layer.lowest_map_level ): p.add_cache_file(*tile_desc) # save browse report xml and add it to the package p.add_browse_report( serialize_browse_report(browse_report, pretty_print=True), name="%s_%s_%s_%s.xml" % ( browse_report.browse_type, browse_report.responsible_org_name, browse_report.date_time.strftime("%Y%m%d%H%M%S%f"), uuid.uuid4().hex ) ) logger.info("Successfully finished browse export from command line.")
def handle(self, *browse_layer_id, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) # check consistency if not len(browse_layer_id): logger.error("No browse layer given.") raise CommandError("No browse layer given.") elif len(browse_layer_id) > 1: logger.error("Too many browse layers given.") raise CommandError("Too many browse layers given.") else: browse_layer_id = browse_layer_id[0] try: # get the according browse layer browse_layer = models.BrowseLayer.objects.get(id=browse_layer_id) except models.BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist." % browse_layer_id) raise CommandError("Browse layer '%s' does not exist." % browse_layer_id) start = kwargs.get("start") end = kwargs.get("end") histogram = kwargs.get("histogram") num_browses_only = kwargs.get("num_browses_only") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if histogram and histogram not in ['year', 'month', 'day']: raise CommandError("Wrong value '%s' for histogram given. " "Allowed values are 'year', 'month', and " "'day'." % histogram) elif not histogram: histogram = 'month' # get all browses of browse layer browses_qs = models.Browse.objects.filter(browse_layer=browse_layer) times_qs = mapcache_models.Time.objects.filter(source=browse_layer.id) # apply start/end filter if start and not end: browses_qs = browses_qs.filter(start_time__gte=start) times_qs = times_qs.filter(start_time__gte=start) elif end and not start: browses_qs = browses_qs.filter(end_time__lte=end) times_qs = times_qs.filter(end_time__lte=end) elif start and end: browses_qs = browses_qs.filter(start_time__gte=start, end_time__lte=end) times_qs = times_qs.filter(start_time__gte=start, end_time__lte=end) num_browses = len(browses_qs) num_browses_cache = len(times_qs) if num_browses_only: logger.info( "-----------------------------------------------------") logger.info("Browse image statistics for browse layer '%s':" % browse_layer.id) logger.info( "-----------------------------------------------------") logger.info("Number of browses: %d" % num_browses) logger.info("Number in cache: %d" % num_browses_cache) logger.info( "-----------------------------------------------------") #TODO: Add further statistics switches #elif: else: logger.info( "-----------------------------------------------------") logger.info("Full statistics for browse layer '%s':" % browse_layer.id) logger.info( "-----------------------------------------------------") logger.info("Number of browses: %d" % num_browses) logger.info("Number in cache: %d" % num_browses_cache) logger.info( "-----------------------------------------------------") logger.info("Time histogram: ") truncate_date = connection.ops.date_trunc_sql( histogram, 'start_time') browses_qs_hist = browses_qs.extra({ 'date': truncate_date }).values('date').annotate( no_entries=Count('browse_identifier')).order_by('date') for hist_entry in browses_qs_hist: logger.info("%s: %d" % (hist_entry["date"].strftime( "%Y-%m-%d" if histogram == "day" else "%Y" if histogram == "year" else "%Y-%m"), hist_entry["no_entries"])) logger.info( "-----------------------------------------------------")
def handle(self, *browse_layer_id, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) # check consistency if not len(browse_layer_id): logger.error("No browse layer given.") raise CommandError("No browse layer given.") elif len(browse_layer_id) > 1: logger.error("Too many browse layers given.") raise CommandError("Too many browse layers given.") else: browse_layer_id = browse_layer_id[0] try: # get the according browse layer browse_layer = models.BrowseLayer.objects.get(id=browse_layer_id) except models.BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist." % browse_layer_id) raise CommandError("Browse layer '%s' does not exist." % browse_layer_id) start = kwargs.get("start") end = kwargs.get("end") dry_run = kwargs.get("dry_run") force = kwargs.get("force") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if force: logger.info("Starting reseeding browse layer '%s'." % browse_layer_id) else: logger.info("Starting seeding browse layer '%s'." % browse_layer_id) times_qs = mapcache_models.Time.objects.filter(source=browse_layer.id) # apply start/end filter if start and not end: times_qs = times_qs.filter(start_time__gte=start) elif end and not start: times_qs = times_qs.filter(end_time__lte=end) elif start and end: times_qs = times_qs.filter(start_time__gte=start, end_time__lte=end) for time_model in times_qs: if dry_run: logger.info("Time span to (re)seed is %s/%s." % (isotime( time_model.start_time), isotime(time_model.end_time))) else: try: logger.info("(Re)seeding time span %s/%s." % (isotime( time_model.start_time), isotime(time_model.end_time))) seed_mapcache(tileset=browse_layer.id, grid=browse_layer.grid, minx=time_model.minx, miny=time_model.miny, maxx=time_model.maxx, maxy=time_model.maxy, minzoom=browse_layer.lowest_map_level, maxzoom=browse_layer.highest_map_level, start_time=time_model.start_time, end_time=time_model.end_time, delete=False, force=force, **get_mapcache_seed_config()) logger.info("Successfully finished (re)seeding time span.") except Exception, e: logger.warn("(Re)seeding failed: %s" % str(e))
def handle(self, *browse_layer_id, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) # check consistency if not len(browse_layer_id): logger.error("No browse layer given.") raise CommandError("No browse layer given.") elif len(browse_layer_id) > 1: logger.error("Too many browse layers given.") raise CommandError("Too many browse layers given.") else: browse_layer_id = browse_layer_id[0] try: # get the according browse layer browse_layer = models.BrowseLayer.objects.get(id=browse_layer_id) except models.BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist." % browse_layer_id) raise CommandError("Browse layer '%s' does not exist." % browse_layer_id) start = kwargs.get("start") end = kwargs.get("end") histogram = kwargs.get("histogram") num_browses_only = kwargs.get("num_browses_only") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if histogram and histogram not in ['year', 'month', 'day']: raise CommandError("Wrong value '%s' for histogram given. " "Allowed values are 'year', 'month', and " "'day'." % histogram) elif not histogram: histogram = 'month' # get all browses of browse layer browses_qs = models.Browse.objects.filter( browse_layer=browse_layer ) times_qs = mapcache_models.Time.objects.filter( source=browse_layer.id ) # apply start/end filter if start and not end: browses_qs = browses_qs.filter(start_time__gte=start) times_qs = times_qs.filter(start_time__gte=start) elif end and not start: browses_qs = browses_qs.filter(end_time__lte=end) times_qs = times_qs.filter(end_time__lte=end) elif start and end: browses_qs = browses_qs.filter(start_time__gte=start, end_time__lte=end) times_qs = times_qs.filter(start_time__gte=start, end_time__lte=end) num_browses = len(browses_qs) num_browses_cache = len(times_qs) if num_browses_only: logger.info("-----------------------------------------------------") logger.info("Browse image statistics for browse layer '%s':" % browse_layer.id) logger.info("-----------------------------------------------------") logger.info("Number of browses: %d" % num_browses) logger.info("Number in cache: %d" % num_browses_cache) logger.info("-----------------------------------------------------") #TODO: Add further statistics switches #elif: else: logger.info("-----------------------------------------------------") logger.info("Full statistics for browse layer '%s':" % browse_layer.id) logger.info("-----------------------------------------------------") logger.info("Number of browses: %d" % num_browses) logger.info("Number in cache: %d" % num_browses_cache) logger.info("-----------------------------------------------------") logger.info("Time histogram: ") truncate_date = connection.ops.date_trunc_sql(histogram, 'start_time') browses_qs_hist = browses_qs.extra({'date':truncate_date}).values('date').annotate(no_entries=Count('browse_identifier')).order_by('date') for hist_entry in browses_qs_hist: logger.info("%s: %d" % (hist_entry["date"].strftime("%Y-%m-%d" if histogram=="day" else "%Y" if histogram=="year" else "%Y-%m"), hist_entry["no_entries"])) logger.info("-----------------------------------------------------")
def handle(self, *args, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) browse_layer_id = kwargs.get("browse_layer_id") browse_type = kwargs.get("browse_type") if not browse_layer_id and not browse_type: raise CommandError("No browse layer or browse type was specified.") elif browse_layer_id and browse_type: raise CommandError("Both browse layer and browse type were specified.") start = kwargs.get("start") end = kwargs.get("end") compression = kwargs.get("compression") export_cache = kwargs["export_cache"] output_path = kwargs.get("output_path") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if not output_path: output_path = package.generate_filename(compression) with package.create(output_path, compression) as p: # query the browse layer if browse_layer_id: try: browse_layer_model = BrowseLayer.objects.get(id=browse_layer_id) except BrowseLayer.DoesNotExist: raise CommandError("Browse layer '%s' does not exist" % browse_layer_id) else: try: browse_layer_model = BrowseLayer.objects.get(browse_type=browse_type) except BrowseLayer.DoesNotExist: raise CommandError("Browse layer with browse type '%s' does " "not exist" % browse_type) browse_layer = browselayer_data.BrowseLayer.from_model(browse_layer_model) p.set_browse_layer( serialize_browse_layers((browse_layer,), pretty_print=True) ) # query browse reports; optionally filter for start/end time browse_reports_qs = BrowseReport.objects.all() # apply start/end filter if start and not end: browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start) elif end and not start: browse_reports_qs = browse_reports_qs.filter(browses__end_time__lte=end) elif start and end: browse_reports_qs = browse_reports_qs.filter(browses__start_time__gte=start, browses__end_time__lte=end) # use count annotation to exclude all browse reports with no browses browse_reports_qs = browse_reports_qs.annotate( browse_count=Count('browses') ).filter(browse_layer=browse_layer_model, browse_count__gt=0) # iterate over all browse reports for browse_report_model in browse_reports_qs: browses_qs = Browse.objects.filter( browse_report=browse_report_model ) if start: browses_qs = browses_qs.filter(start_time__gte=start) if end: browses_qs = browses_qs.filter(end_time__lte=end) browse_report = browsereport_data.BrowseReport.from_model( browse_report_model, browses_qs ) # iterate over all browses in the query for browse, browse_model in izip(browse_report, browses_qs): coverage = eoxs_models.RectifiedDataset.objects.get( identifier=browse_model.coverage_id ) # set the base_filename = browse_model.coverage_id data_filename = base_filename + ".tif" md_filename = base_filename + ".xml" footprint_filename = base_filename + ".wkb" browse._file_name = data_filename # add optimized browse image to package data_item = coverage.data_items.get( semantic__startswith="bands" ) browse_file_path = data_item.location with open(browse_file_path) as f: p.add_browse(f, data_filename) wkb = coverage.footprint.wkb p.add_footprint(footprint_filename, wkb) if export_cache: time_model = mapcache_models.Time.objects.get( start_time__lte=browse_model.start_time, end_time__gte=browse_model.end_time, source__name=browse_layer_model.id ) # get "dim" parameter dim = (isoformat(time_model.start_time) + "/" + isoformat(time_model.end_time)) # exit if a merged browse is found if dim != (isoformat(browse_model.start_time) + "/" + isoformat(browse_model.end_time)): raise CommandError("Browse layer '%s' contains " "merged browses and exporting " "of cache is requested. Try " "without exporting the cache." % browse_layer_model.id) # get path to sqlite tileset and open it ts = tileset.open( get_tileset_path(browse_layer.id) ) for tile_desc in ts.get_tiles( browse_layer.id, URN_TO_GRID[browse_layer.grid], dim=dim, minzoom=browse_layer.highest_map_level, maxzoom=browse_layer.lowest_map_level ): p.add_cache_file(*tile_desc) # save browse report xml and add it to the package p.add_browse_report( serialize_browse_report(browse_report, pretty_print=True), name="%s_%s_%s_%s.xml" % ( browse_report.browse_type, browse_report.responsible_org_name, browse_report.date_time.strftime("%Y%m%d%H%M%S%f"), uuid.uuid4().hex ) )
def handle(self, *browse_layer_id, **kwargs): # parse command arguments self.verbosity = int(kwargs.get("verbosity", 1)) traceback = kwargs.get("traceback", False) self.set_up_logging(["ngeo_browse_server"], self.verbosity, traceback) # check consistency if not len(browse_layer_id): logger.error("No browse layer given.") raise CommandError("No browse layer given.") elif len(browse_layer_id) > 1: logger.error("Too many browse layers given.") raise CommandError("Too many browse layers given.") else: browse_layer_id = browse_layer_id[0] try: # get the according browse layer browse_layer = models.BrowseLayer.objects.get(id=browse_layer_id) except models.BrowseLayer.DoesNotExist: logger.error("Browse layer '%s' does not exist." % browse_layer_id) raise CommandError("Browse layer '%s' does not exist." % browse_layer_id) start = kwargs.get("start") end = kwargs.get("end") dry_run = kwargs.get("dry_run") force = kwargs.get("force") # parse start/end if given if start: start = getDateTime(start) if end: end = getDateTime(end) if force: logger.info("Starting reseeding browse layer '%s'." % browse_layer_id) else: logger.info("Starting seeding browse layer '%s'." % browse_layer_id) times_qs = mapcache_models.Time.objects.filter( source=browse_layer.id ) # apply start/end filter if start and not end: times_qs = times_qs.filter(start_time__gte=start) elif end and not start: times_qs = times_qs.filter(end_time__lte=end) elif start and end: times_qs = times_qs.filter(start_time__gte=start, end_time__lte=end) for time_model in times_qs: if dry_run: logger.info("Time span to (re)seed is %s/%s." % (isotime(time_model.start_time), isotime(time_model.end_time))) else: try: logger.info("(Re)seeding time span %s/%s." % (isotime(time_model.start_time), isotime(time_model.end_time))) seed_mapcache(tileset=browse_layer.id, grid=browse_layer.grid, minx=time_model.minx, miny=time_model.miny, maxx=time_model.maxx, maxy=time_model.maxy, minzoom=browse_layer.lowest_map_level, maxzoom=browse_layer.highest_map_level, start_time=time_model.start_time, end_time=time_model.end_time, delete=False, force=force, **get_mapcache_seed_config()) logger.info("Successfully finished (re)seeding time span.") except Exception, e: logger.warn("(Re)seeding failed: %s" % str(e))