def test_returned_datetimes(self): start_date = datetime.datetime( year=2013, month=1, day=2, hour=15, minute=17) datetimes = list(views.animation_datetimes(start_date)) self.assertEquals(len(datetimes), 24 * 12) self.assertEquals( datetimes[0], datetime.datetime( year=2013, month=1, day=1, hour=15, minute=20)) self.assertEquals( datetimes[-1], datetime.datetime( year=2013, month=1, day=2, hour=15, minute=15))
def create_projected_geotiffs(stdout, verbose=False): """Creating reprojected images is too slow to do on the fly, so they can be generated using this script. The images generated are all 512x512, and all Google projections. Images are generated for each 5-minute step in the last 24 hours, both for the entire radar extent and for all users' extents. This results in a lot of files. Another script should run to clean up the files afterwards. """ to_srs = 'EPSG:3857' bbox_whole = tuple(float(c) for c in views.TIFF_BBOX.split(", ")) extents = set((bbox_whole,)) for user in User.objects.all(): user_extent = models.Region.extent_for_user(user) if user_extent is not None: if verbose: stdout.write("Adding extent for {0}.\n".format(user)) extents.add((user_extent['left'], user_extent['bottom'], user_extent['right'], user_extent['top'])) files_to_keep = set() for current in views.animation_datetimes(views.LOGGED_IN_ANIMATION_HOURS): if verbose: stdout.write(str(current) + "\n") path = netcdf.time_2_path(current) if not os.path.exists(path): if verbose: stdout.write("Skipping {0} because it doesn't exist.\n" .format(path)) continue for extent in extents: result = reproject.reprojected_image( path, to_srs, extent, 525, 497) if result is not None: files_to_keep.add(result) if verbose: stdout.write("Created {0}.\n".format(result)) # Remove files we don't need to keep anymore if hasattr(settings, 'GEOTIFF_ANIMATION_CACHE_DIR'): top_dir = settings.GEOTIFF_ANIMATION_CACHE_DIR clean_dir(top_dir, files_to_keep, stdout, verbose)
def create_projected_geotiffs(stdout, verbose=False): """Creating reprojected images is too slow to do on the fly, so they can be generated using this script. The images generated are all 512x512, and all Google projections. Images are generated for each 5-minute step in the last 24 hours, both for the entire radar extent and for all users' extents. This results in a lot of files. Another script should run to clean up the files afterwards. """ to_srs = 'EPSG:3857' bbox_whole = tuple(float(c) for c in views.TIFF_BBOX.split(", ")) extents = set((bbox_whole, )) for user in User.objects.all(): user_extent = models.Region.extent_for_user(user) if user_extent is not None: if verbose: stdout.write("Adding extent for {0}.\n".format(user)) extents.add((user_extent['left'], user_extent['bottom'], user_extent['right'], user_extent['top'])) files_to_keep = set() for current in views.animation_datetimes(views.LOGGED_IN_ANIMATION_HOURS): if verbose: stdout.write(str(current) + "\n") path = netcdf.time_2_path(current) if not os.path.exists(path): if verbose: stdout.write( "Skipping {0} because it doesn't exist.\n".format(path)) continue for extent in extents: result = reproject.reprojected_image(path, to_srs, extent, 525, 497) if result is not None: files_to_keep.add(result) if verbose: stdout.write("Created {0}.\n".format(result)) # Remove files we don't need to keep anymore if hasattr(settings, 'GEOTIFF_ANIMATION_CACHE_DIR'): top_dir = settings.GEOTIFF_ANIMATION_CACHE_DIR clean_dir(top_dir, files_to_keep, stdout, verbose)
def test_returned_datetimes(self): datetimes = list(views.animation_datetimes(24)) self.assertEquals(len(datetimes), 24 * 12) self.assertEquals( datetimes[0], datetime.datetime( year=2013, month=1, day=1, hour=15, minute=20)) self.assertEquals( datetimes[-1], datetime.datetime( year=2013, month=1, day=2, hour=15, minute=15))