def animation_datetimes(self): """For every date/time in the last 24 hours, we check if the data is available. We need at least the "full" geotiff, and if the user is logged in, then possibly a geotiff for the user's region as well. Returned JSON is set as a variable in Javascript (wms_neerslagradar.html), and used in lizard_neerslagradar.js to load the whole animation.""" data = [] for dt in animation_datetimes(utc_now()): p = netcdf.time_2_path(dt) p = reproject.cache_path(p, "EPSG:3857", TIFF_BBOX.split(", "), 525, 497) logger.debug("Checking path: {0}".format(p)) if os.path.exists(p): data.append( { # Translate the UTC datetime to the timezone # in Settings "datetime": (dt.astimezone(pytz.timezone(settings.TIME_ZONE)).strftime("%Y-%m-%dT%H:%M")) } ) logger.debug("Data: {0}".format(data)) return json.dumps(data)
def handle(self, *args, **options): parser = OptionParser(option_list=self.option_list) (options, args) = parser.parse_args() logger.warn("IGNORED from=%s", options.from_) logger.warn("IGNORED skip_existing=%s", options.skip_existing) time_from = dateutil.parser.parse('2011-01-07T00:00:00.000Z') time_to = dateutil.parser.parse('2011-01-08T00:00:00.000Z') times_list = [time_from] if time_to: interval = datetime.timedelta(minutes=5) time = time_from while time < time_to: time += interval times_list.append(time) nc = netCDF4.Dataset(settings.RADAR_NC_PATH, 'r') with contextlib.closing(nc): for time in times_list: try: path = netcdf.time_2_path(time) netcdf.mk_geotiff(nc, time, path) logger.info('Created geotiff for {}'.format(time)) except: logger.exception( 'While creating geotiff for {}'.format(time))
def get(self, request): # WMS standard parameters width = int(request.GET.get('WIDTH', '512')) height = int(request.GET.get('HEIGHT', '512')) opacity = float(request.GET.get('OPACITY', '0.6')) bbox = request.GET.get( 'BBOX', '151345.64262053, 6358643.0784661, ' '981757.51779509, 7136466.2781877') bbox = tuple([float(i.strip()) for i in bbox.split(',')]) srs = request.GET.get('SRS', 'EPSG:3857') # Either a time span, or a single time can be passed times = request.GET.get( 'TIME', '2011-01-07T00:00:00.000Z/2011-01-08T00:00:00.000Z') times = times.split('/') if len(times) == 1: time_from = dateutil.parser.parse(times[0]) # time_to = None elif len(times) == 2: time_from = dateutil.parser.parse(times[0]) # time_to = dateutil.parser.parse(times[1]) else: raise Exception('No time provided') path = netcdf.time_2_path(time_from) return self.serve_geotiff(path, width, height, bbox, srs, opacity)
def create_projected_geotiffs(stdout, verbose=False): """Creating reprojected images is too slow to do on the fly, so they can be generated using this script. The images generated are all 512x512, and all Google projections. Images are generated for each 5-minute step in the last 24 hours, both for the entire radar extent and for all users' extents. This results in a lot of files. Another script should run to clean up the files afterwards. """ to_srs = 'EPSG:3857' bbox_whole = tuple(float(c) for c in views.TIFF_BBOX.split(", ")) extents = set((bbox_whole,)) for user in User.objects.all(): user_extent = models.Region.extent_for_user(user) if user_extent is not None: if verbose: stdout.write("Adding extent for {0}.\n".format(user)) extents.add((user_extent['left'], user_extent['bottom'], user_extent['right'], user_extent['top'])) files_to_keep = set() for current in views.animation_datetimes(views.LOGGED_IN_ANIMATION_HOURS): if verbose: stdout.write(str(current) + "\n") path = netcdf.time_2_path(current) if not os.path.exists(path): if verbose: stdout.write("Skipping {0} because it doesn't exist.\n" .format(path)) continue for extent in extents: result = reproject.reprojected_image( path, to_srs, extent, 525, 497) if result is not None: files_to_keep.add(result) if verbose: stdout.write("Created {0}.\n".format(result)) # Remove files we don't need to keep anymore if hasattr(settings, 'GEOTIFF_ANIMATION_CACHE_DIR'): top_dir = settings.GEOTIFF_ANIMATION_CACHE_DIR clean_dir(top_dir, files_to_keep, stdout, verbose)
def create_projected_geotiffs(stdout, verbose=False): """Creating reprojected images is too slow to do on the fly, so they can be generated using this script. The images generated are all 512x512, and all Google projections. Images are generated for each 5-minute step in the last 24 hours, both for the entire radar extent and for all users' extents. This results in a lot of files. Another script should run to clean up the files afterwards. """ to_srs = 'EPSG:3857' bbox_whole = tuple(float(c) for c in views.TIFF_BBOX.split(", ")) extents = set((bbox_whole, )) for user in User.objects.all(): user_extent = models.Region.extent_for_user(user) if user_extent is not None: if verbose: stdout.write("Adding extent for {0}.\n".format(user)) extents.add((user_extent['left'], user_extent['bottom'], user_extent['right'], user_extent['top'])) files_to_keep = set() for current in views.animation_datetimes(views.LOGGED_IN_ANIMATION_HOURS): if verbose: stdout.write(str(current) + "\n") path = netcdf.time_2_path(current) if not os.path.exists(path): if verbose: stdout.write( "Skipping {0} because it doesn't exist.\n".format(path)) continue for extent in extents: result = reproject.reprojected_image(path, to_srs, extent, 525, 497) if result is not None: files_to_keep.add(result) if verbose: stdout.write("Created {0}.\n".format(result)) # Remove files we don't need to keep anymore if hasattr(settings, 'GEOTIFF_ANIMATION_CACHE_DIR'): top_dir = settings.GEOTIFF_ANIMATION_CACHE_DIR clean_dir(top_dir, files_to_keep, stdout, verbose)
def get(self, request): # WMS standard parameters width = int(request.GET.get('WIDTH', '512')) height = int(request.GET.get('HEIGHT', '512')) opacity = float(request.GET.get('OPACITY', '0.6')) bbox = request.GET.get('BBOX', TIFF_BBOX) bbox = tuple([float(i.strip()) for i in bbox.split(',')]) srs = request.GET.get('SRS', 'EPSG:3857') # Either a time span, or a single time can be passed times = request.GET.get('TIME', '') times = times.split('/') if len(times) == 1 and times[0]: time_from = dateutil.parser.parse(times[0]) elif len(times) == 2: time_from = dateutil.parser.parse(times[0]) else: raise Exception('No time provided') # Translate from the site's timezone to UTC # time_from is a UTC datetime, but that's incorrect, it's # actually in the site's timezone. So we have to turn it into # a naive datetime first. time_from = datetime.datetime(year=time_from.year, month=time_from.month, day=time_from.day, hour=time_from.hour, minute=time_from.minute) # Get the site timezone tz = pytz.timezone(settings.TIME_ZONE) # Translate to UTC time_from = tz.localize(time_from).astimezone(pytz.UTC) # Get the path for this datetime path = netcdf.time_2_path(time_from) return self.serve_geotiff(path, width, height, bbox, srs, opacity)
def get(self, request): # WMS standard parameters width = int(request.GET.get("WIDTH", "512")) height = int(request.GET.get("HEIGHT", "512")) opacity = float(request.GET.get("OPACITY", "0.6")) bbox = request.GET.get("BBOX", TIFF_BBOX) bbox = tuple([float(i.strip()) for i in bbox.split(",")]) srs = request.GET.get("SRS", "EPSG:3857") # Either a time span, or a single time can be passed times = request.GET.get("TIME", "") times = times.split("/") if len(times) == 1 and times[0]: time_from = dateutil.parser.parse(times[0]) elif len(times) == 2: time_from = dateutil.parser.parse(times[0]) else: raise Exception("No time provided") # Translate from the site's timezone to UTC # time_from is a UTC datetime, but that's incorrect, it's # actually in the site's timezone. So we have to turn it into # a naive datetime first. time_from = datetime.datetime( year=time_from.year, month=time_from.month, day=time_from.day, hour=time_from.hour, minute=time_from.minute ) # Get the site timezone tz = pytz.timezone(settings.TIME_ZONE) # Translate to UTC time_from = tz.localize(time_from).astimezone(pytz.UTC) # Get the path for this datetime path = netcdf.time_2_path(time_from) return self.serve_geotiff(path, width, height, bbox, srs, opacity)