def create_geotiff(dt_aggregate, code='5min'): pathhelper = utils.PathHelper(basedir=config.AGGREGATE_DIR, code=code, template='{code}_{timestamp}.h5') rasterlayerkwargs = utils.rain_kwargs( name='jet', max_rain=2, threshold=0.008, ) aggregatepath = pathhelper.path(dt_aggregate) tifpath_rd = os.path.join(config.IMG_DIR, 'geotiff', 'rd', dt_aggregate.strftime('%Y-%m-%d-%H-%M.tiff')) with h5py.File(aggregatepath, 'r') as h5: array = h5['precipitation'] mask = np.equal(array, h5.attrs['fill_value']) masked_array = np.ma.array(array, mask=mask) # Create the rd tiff. utils.makedir(os.path.dirname(tifpath_rd)) gridtools.RasterLayer(array=masked_array, extent=h5.attrs['grid_extent'], projection=h5.attrs['grid_projection'], **rasterlayerkwargs).save(tifpath_rd, rgba=True) logging.info('saved {}.'.format(os.path.basename(tifpath_rd))) logging.debug('saved {}.'.format(tifpath_rd))
def create_geotiff(dt_aggregate, code='5min'): pathhelper = utils.PathHelper( basedir=config.AGGREGATE_DIR, code=code, template='{code}_{timestamp}.h5' ) rasterlayerkwargs = utils.rain_kwargs( name='jet', max_rain=2, threshold=0.008, ) aggregatepath = pathhelper.path(dt_aggregate) tifpath_rd = os.path.join( config.IMG_DIR, 'geotiff', 'rd', dt_aggregate.strftime('%Y-%m-%d-%H-%M.tiff') ) with h5py.File(aggregatepath, 'r') as h5: array = h5['precipitation'] mask = np.equal(array, h5.attrs['fill_value']) masked_array = np.ma.array(array, mask=mask) # Create the rd tiff. utils.makedir(os.path.dirname(tifpath_rd)) gridtools.RasterLayer(array=masked_array, extent=h5.attrs['grid_extent'], projection=h5.attrs['grid_projection'], **rasterlayerkwargs).save(tifpath_rd, rgba=True) logging.info('saved {}.'.format(os.path.basename(tifpath_rd))) logging.debug('saved {}.'.format(tifpath_rd))
def create_png(products, **kwargs): """ Create image for products. This is a kind of sandbox version. """ utils.makedir(config.IMG_DIR) # Load some images img_shape = shape_image() img_blue = plain_image(color=(0, 0, 127)) img_shape_filled = shape_image_filled() # Get dutch time label tz_amsterdam = pytz.timezone('Europe/Amsterdam') tz_utc = pytz.timezone('UTC') # Loop products for product in products: utc = tz_utc.localize(product.datetime) amsterdam = utc.astimezone(tz_amsterdam) label = amsterdam.strftime('%Y-%m-%d %H:%M') label_from_kwargs = kwargs.get('label', '') if label_from_kwargs: label += ' ' + label_from_kwargs offset = 0.1, 0.9 # Get data image try: with product.get() as h5: array = h5['precipitation'][...] / h5.attrs['composite_count'] mask = np.equal(array, h5.attrs['fill_value']) img_radars = radars_image(h5=h5, label=label, offset=offset) except IOError: logging.debug('Does not exist: {}'.format(product.path)) continue masked_array = np.ma.array(array, mask=mask) img_rain = data_image(masked_array, max_rain=2, threshold=0.008) timestamp = utils.datetime2timestamp(product.datetime) filename = '{}{}.{}'.format( timestamp, kwargs.get('postfix', ''), kwargs.get('format', 'png'), ) # Merge and save path = os.path.join(config.IMG_DIR, filename) utils.merge([ img_radars, img_rain, img_shape, img_shape_filled, img_blue, ]).save(path) logging.info('saved {}.'.format(os.path.basename(path))) logging.debug('saved {}.'.format(path))
def create_png_for_animated_gif(products, **kwargs): """ Create image for products. This is the tweaked version that creates the pngs for use in the animated gif. """ utils.makedir(config.IMG_DIR) # Load some images img_mapbox = mapbox_image() img_osm = osm_image() img_shape = shape_image() img_blue = plain_image(color=(0, 0, 127)) img_shape_filled = shape_image_filled() # Get dutch time label tz_amsterdam = pytz.timezone('Europe/Amsterdam') tz_utc = pytz.timezone('UTC') # Loop products for product in products: utc = tz_utc.localize(product.datetime) amsterdam = utc.astimezone(tz_amsterdam) #label = amsterdam.strftime('%Y-%m-%d %H:%M') label = amsterdam.strftime('%H:%M') offset = 0.25, 0.82 # Get data image with product.get() as h5: array = h5['precipitation'][...] / h5.attrs['composite_count'] mask = np.equal(array, h5.attrs['fill_value']) img_radars = radars_image(h5=h5, label=label, offset=offset) masked_array = np.ma.array(array, mask=mask) img_rain = data_image(masked_array, max_rain=2, threshold=0.008) timestamp = utils.datetime2timestamp(product.datetime) filename = '{}{}.{}'.format( timestamp, kwargs.get('postfix', ''), kwargs.get('format', 'png'), ) # Merge and save path = os.path.join(config.IMG_DIR, filename) utils.merge([ img_radars, img_rain, img_mapbox, img_shape, img_shape_filled, img_blue, ]).save(path) logging.info('saved {}.'.format(os.path.basename(path))) logging.debug('saved {}.'.format(path))
def __init__(self, datetime, max_age=86400): """ Set datetime and empty connection dictionary. Max age is in seconds, measured from datetime. """ utils.makedir(config.SOURCE_DIR) self.datetime = datetime self.max_age = max_age self.connections = {} # Check what is already there. self.arrived = []
def create_tif(products, image_dir=None, **kwargs): """ Save a tif with metadata """ if image_dir: target_dir = image_dir else: target_dir = config.IMG_DIR utils.makedir(target_dir) # Loop products for product in products: # Get data try: with h5py.File(product.path, 'r') as h5: data = np.ma.masked_equal(h5['precipitation'], h5.attrs['fill_value']) attrs = dict(h5.attrs) except IOError: logging.info('Not found: {}, skipping.'.format(product)) continue # make a dataset raster_layer = gridtools.RasterLayer( array=data, extent=attrs['grid_extent'], projection=attrs['grid_projection'], ) mem = raster_layer.create_dataset(datatype=6) mem.SetProjection(PROJECTION_RD_WKT) band = mem.GetRasterBand(1) band.WriteArray(data.filled(band.GetNoDataValue())) for k, v in attrs.items(): if hasattr(v, 'tolist'): v = json.dumps(v.tolist()) band.SetMetadataItem(str(k), str(v)) timestamp = utils.datetime2timestamp(product.datetime) filename = '{}{}.{}'.format( timestamp, kwargs.get('postfix', ''), kwargs.get('format', 'png'), ) path = os.path.join(target_dir, filename) driver = gdal.GetDriverByName(b'gtiff') driver.CreateCopy(path, mem) logging.info('saved {}.'.format(os.path.basename(path))) logging.debug('saved {}.'.format(path))
def get(self): """ Return a readonly hdf5 dataset for requested scans. If the dataset is not available, it is created. If it is available, but lacks some scan requested, it is appended. """ utils.makedir(os.path.dirname(self.path)) try: # Improperly closed h5 files cannot be opened. dataset = h5py.File(self.path, 'a') except IOError: dataset = h5py.File(self.path, 'w') if len(dataset): logging.debug( 'Multiscan file already has {}.'.format(', '.join(dataset)) ) else: logging.debug('Starting with empty multiscan file.') for scancode in self.scancodes: scan = ScanSignature( scandatetime=self.multiscandatetime, scancode=scancode, ).get_scan(self.grid) if scan is None or scancode in dataset: continue if scan.is_readable(): self._add(dataset=dataset, scan=scan) else: # Remove it. scanpath = scan.signature.get_scanpath() try: shutil.move(scanpath, config.RADAR_DIR) logging.warn('Removed corrupt scanfile {}'.format( os.path.basename(scanpath), )) except (OSError, IOError): pass # No permission. dataset.close() return h5py.File(self.path, 'r')
def get(self): """ Return a readonly hdf5 dataset for requested scans. If the dataset is not available, it is created. If it is available, but lacks some scan requested, it is appended. """ utils.makedir(os.path.dirname(self.path)) try: # Improperly closed h5 files cannot be opened. dataset = h5py.File(self.path, 'a') except IOError: dataset = h5py.File(self.path, 'w') if len(dataset): logging.debug('Multiscan file already has {}.'.format( ', '.join(dataset))) else: logging.debug('Starting with empty multiscan file.') for scancode in self.scancodes: scan = ScanSignature( scandatetime=self.multiscandatetime, scancode=scancode, ).get_scan(self.grid) if scan is None or scancode in dataset: continue if scan.is_readable(): self._add(dataset=dataset, scan=scan) else: # Remove it. scanpath = scan.signature.get_scanpath() try: shutil.move(scanpath, config.RADAR_DIR) logging.warn('Removed corrupt scanfile {}'.format( os.path.basename(scanpath), )) except (OSError, IOError): pass # No permission. dataset.close() return h5py.File(self.path, 'r')
def publish_local(self, cascade=False): """ Publish to target dirs as configured in config. """ # Prepare dirs logging.debug( 'Preparing {} dirs.'.format(len(config.COPY_TARGET_DIRS)), ) for target_dir in config.COPY_TARGET_DIRS: for path in [path for d in config.PRODUCT_CODE.values() for path in d.values()]: utils.makedir(os.path.join(target_dir, path)) # Do the copying logging.debug('Copying publications.') for publication in self.publications(cascade=cascade): proddict = config.PRODUCT_CODE[publication.timeframe] for target_dir in config.COPY_TARGET_DIRS: target_subdir = os.path.join( target_dir, proddict[publication.prodcode], ) shutil.copy(publication.path, target_subdir) logging.info('Local target dir copying complete.')
def create(self): """ Return newly created threddsfile. """ utils.makedir(os.path.dirname(self.path)) h5 = h5py.File(self.path) # East east = scans.BASEGRID.get_grid()[0][0] dataset = h5.create_dataset( 'east', east.shape, east.dtype, compression='gzip', shuffle=True, ) dataset[...] = east # North north = scans.BASEGRID.get_grid()[1][:, 0] dataset = h5.create_dataset( 'north', north.shape, north.dtype, compression='gzip', shuffle=True, ) dataset[...] = north # Time time = h5.create_dataset( 'time', [self.timesteps], np.uint32, compression='gzip', shuffle=True, ) time.attrs['standard_name'] = b'time' time.attrs['long_name'] = b'time' time.attrs['calendar'] = b'gregorian' time.attrs['units'] = self.datetime.strftime( 'seconds since %Y-%m-%d' ) time[...] = self._time() # Precipitation shape = scans.BASEGRID.get_shape() + tuple([self.timesteps]) dataset = h5.create_dataset( 'precipitation', shape, np.float32, fillvalue=config.NODATAVALUE, compression='gzip', shuffle=True, chunks=(20, 20, 24) ) # Availability dataset = h5.create_dataset( 'available', [self.timesteps], np.uint8, fillvalue=0, compression='gzip', shuffle=True, ) dataset[...] = 0 # Dimensions h5['precipitation'].dims.create_scale(h5['north']) h5['precipitation'].dims.create_scale(h5['east']) h5['precipitation'].dims.create_scale(h5['time']) h5['precipitation'].dims[0].attach_scale(h5['north']) h5['precipitation'].dims[1].attach_scale(h5['east']) h5['precipitation'].dims[2].attach_scale(h5['time']) h5['available'].dims.create_scale(h5['time']) h5['available'].dims[0].attach_scale(h5['time']) logging.info( 'Created ThreddsFile {}'.format(os.path.basename(self.path)), ) logging.debug(self.path) return h5
def create(self): """ Return newly created threddsfile. """ utils.makedir(os.path.dirname(self.path)) h5 = h5py.File(self.path) # East east = scans.BASEGRID.get_grid()[0][0] dataset = h5.create_dataset( 'east', east.shape, east.dtype, compression='gzip', shuffle=True, ) dataset[...] = east # North north = scans.BASEGRID.get_grid()[1][:, 0] dataset = h5.create_dataset( 'north', north.shape, north.dtype, compression='gzip', shuffle=True, ) dataset[...] = north # Time time = h5.create_dataset( 'time', [self.timesteps], np.uint32, compression='gzip', shuffle=True, ) time.attrs['standard_name'] = b'time' time.attrs['long_name'] = b'time' time.attrs['calendar'] = b'gregorian' time.attrs['units'] = self.datetime.strftime('seconds since %Y-%m-%d') time[...] = self._time() # Precipitation shape = scans.BASEGRID.get_shape() + tuple([self.timesteps]) dataset = h5.create_dataset('precipitation', shape, np.float32, fillvalue=config.NODATAVALUE, compression='gzip', shuffle=True, chunks=(20, 20, 24)) # Availability dataset = h5.create_dataset( 'available', [self.timesteps], np.uint8, fillvalue=0, compression='gzip', shuffle=True, ) dataset[...] = 0 # Dimensions h5['precipitation'].dims.create_scale(h5['north']) h5['precipitation'].dims.create_scale(h5['east']) h5['precipitation'].dims.create_scale(h5['time']) h5['precipitation'].dims[0].attach_scale(h5['north']) h5['precipitation'].dims[1].attach_scale(h5['east']) h5['precipitation'].dims[2].attach_scale(h5['time']) h5['available'].dims.create_scale(h5['time']) h5['available'].dims[0].attach_scale(h5['time']) logging.info( 'Created ThreddsFile {}'.format(os.path.basename(self.path)), ) logging.debug(self.path) return h5