def create_geotiff(dt_aggregate, code='5min'): pathhelper = utils.PathHelper(basedir=config.AGGREGATE_DIR, code=code, template='{code}_{timestamp}.h5') rasterlayerkwargs = utils.rain_kwargs( name='jet', max_rain=2, threshold=0.008, ) aggregatepath = pathhelper.path(dt_aggregate) tifpath_rd = os.path.join(config.IMG_DIR, 'geotiff', 'rd', dt_aggregate.strftime('%Y-%m-%d-%H-%M.tiff')) with h5py.File(aggregatepath, 'r') as h5: array = h5['precipitation'] mask = np.equal(array, h5.attrs['fill_value']) masked_array = np.ma.array(array, mask=mask) # Create the rd tiff. utils.makedir(os.path.dirname(tifpath_rd)) gridtools.RasterLayer(array=masked_array, extent=h5.attrs['grid_extent'], projection=h5.attrs['grid_projection'], **rasterlayerkwargs).save(tifpath_rd, rgba=True) logging.info('saved {}.'.format(os.path.basename(tifpath_rd))) logging.debug('saved {}.'.format(tifpath_rd))
def __init__(self, prodcode, timeframe, datetime, radars=None, declutter=None): # Attributes self.datetime = datetime self.prodcode = prodcode self.timeframe = timeframe # Derived attributes self.radars = config.ALL_RADARS if radars is None else radars if declutter is None: self.declutter = dict(size=config.DECLUTTER_SIZE, history=config.DECLUTTER_HISTORY) else: self.declutter = declutter # determine product paths code = config.PRODUCT_CODE[self.timeframe][self.prodcode] self.path = utils.PathHelper( basedir=config.CALIBRATE_DIR, code=code, template=config.PRODUCT_TEMPLATE, ).path(datetime) self.ftp_path = os.path.join(code, os.path.basename(self.path))
def __init__(self, datetime=None, timeframe=None, prodcode='r', merge=True): """ Return a threddsfile object configured with an url attribute that is suitable for use with opendap. """ if datetime is None or timeframe is None: # Create a bare thredds_file object, # so get_for_product() does not break. return self.timeframe = timeframe self.datetime = self._datetime(datetime) self.timedelta = config.TIMEFRAME_DELTA[timeframe] self.timesteps = self._timesteps() self.prodcode = prodcode self.merge = merge basecode = config.PRODUCT_CODE[timeframe][prodcode] if merge: code = basecode.split('_')[0] else: code = basecode self.url = utils.PathHelper( basedir=config.OPENDAP_ROOT, code=code, template=config.PRODUCT_TEMPLATE, ).path(self.datetime)
def get_for_product(cls, product, merge=False): """ Return ThreddsFile instance to which product belongs. If merge == True, threddsfiles from all products are merged in one threddsfile per timeframe. The avalailable variable will contain a flag that refers to the product that was stored at a particular time coordinate. The paths will be the same regardless of the productcode, and the data will only be overwritten if the new flag is equal or higher than the already existing flag. The flags are: 2: Realtime 3: Near-realtime 4: Afterwards """ thredds_file = cls() thredds_file.timeframe = product.timeframe thredds_file.datetime = thredds_file._datetime(product.datetime) thredds_file.timedelta = config.TIMEFRAME_DELTA[product.timeframe] thredds_file.timesteps = thredds_file._timesteps() basecode = config.PRODUCT_CODE[product.timeframe][product.prodcode] if merge: code = basecode.split('_')[0] thredds_file.flag = cls.FLAGS[product.prodcode] else: code = basecode thredds_file.flag = 1 thredds_file.path = utils.PathHelper( basedir=config.THREDDS_DIR, code=code, template=config.PRODUCT_TEMPLATE, ).path(thredds_file.datetime) return thredds_file
def get_path_helper(timeframe, prodcode): """ Return pathhelper for combination. """ consistent = utils.consistent_product_expected(prodcode=prodcode, timeframe=timeframe) basedir = config.CONSISTENT_DIR if consistent else config.CALIBRATE_DIR code = config.PRODUCT_CODE[timeframe][prodcode] template = config.PRODUCT_TEMPLATE return utils.PathHelper(basedir=basedir, code=code, template=template)
def __init__(self, multiscandatetime, scancodes, grid, basedir): self.multiscandatetime = multiscandatetime self.scancodes = scancodes self.grid = grid self.path = self.pathhelper = utils.PathHelper( basedir=basedir, code=config.MULTISCAN_CODE, template='{code}_{timestamp}.h5').path(multiscandatetime)
def command(target_path, range_text): """ Newstyle clutter gathering. """ logfile = open( os.path.join(os.path.dirname(target_path), 'clutter.log'), 'w', ) # collect daterange = utils.DateRange(range_text) pathhelper = utils.PathHelper(basedir=config.MULTISCAN_DIR, code=config.MULTISCAN_CODE, template='{code}_{timestamp}.h5') result = dict() count = dict() for dt in daterange.iterdatetimes(): path = pathhelper.path(dt) logger.info('Processing {}'.format(path)) with h5py.File(path, 'r') as h5: for k, v in h5.iteritems(): logger.debug('Radar: {}'.format(k)) d = v['rain'] a = d[:] r = np.where(a == -9999, 0, a) s = r.sum() logfile.write('{}, {}, {}\n'.format(dt, k, s)) logger.debug('Sum: {}'.format(s)) if s > get_threshold(k): logger.debug('Skipping.') continue if k in result: result[k] += r count[k] += 1 logger.debug('Adding.') continue result[k] = r count[k] = 1 logger.debug('Creating.') logger.info('Counts: {}'.format(count)) logfile.close() # save logger.info('Saving {}'.format(target_path)) with h5py.File(target_path, 'w') as h5: for k in result: # Write to result d = h5.create_dataset( k, data=result[k] / count[k], dtype='f4', compression='lzf', shuffle=True, ) d.attrs['cluttercount'] = count[k] d.attrs['threshold'] = get_threshold(k) h5.attrs['cluttercount'] = int(sum(count.values()) / len(count)) h5.attrs['range'] = b'{} - {}'.format(daterange.start, daterange.stop) logging.info('Done summing clutter.')
def __init__(self, datetime): self.datetime = datetime # determine product paths code = config.NOWCAST_PRODUCT_CODE self.path = utils.PathHelper( basedir=config.NOWCAST_CALIBRATE_DIR, code=code, template=config.PRODUCT_TEMPLATE, ).path(datetime) self.ftp_path = os.path.join(code, os.path.basename(self.path))
def __init__(self, datetime, prodcode, timeframe): self.datetime = datetime self.date = datetime # Backwards compatible self.prodcode = prodcode self.product = prodcode # Backwards compatible self.timeframe = timeframe # determine product paths code = config.PRODUCT_CODE[self.timeframe][self.prodcode] self.path = utils.PathHelper( basedir=config.CONSISTENT_DIR, code=code, template=config.PRODUCT_TEMPLATE, ).path(datetime) self.ftp_path = os.path.join(code, os.path.basename(self.path))
def command(text): """ Check existence of realtime files. """ # prepare period = periods.Period(text) recently = Datetime.utcnow() - TOLERANCE helper = utils.PathHelper(basedir=config.CALIBRATE_DIR, code='TF0005_R', template=config.PRODUCT_TEMPLATE) # the check for datetime in period: if datetime > recently: continue path = helper.path(datetime) if not os.path.exists(path): timestamp = utils.datetime2timestamp(datetime) logger.debug('bin/master -r {}'.format(timestamp))
def make(self): """ Copy aggregate. """ source_path = utils.PathHelper( basedir=config.NOWCAST_AGGREGATE_DIR, code='5min', template='{code}_{timestamp}.h5', ).path(self.datetime) if not os.path.exists(source_path): return try: os.makedirs(os.path.dirname(self.path)) except: pass shutil.copy(source_path, self.path) logging.info('Create CopiedProduct {}'.format( os.path.basename(self.path))) logging.debug(self.path)
def command(target_path, range_text): """ Rain histogram gathering. """ # collect daterange = utils.DateRange(range_text) pathhelper = utils.PathHelper( basedir=config.MULTISCAN_DIR, code=config.MULTISCAN_CODE, template='{code}_{timestamp}.h5' ) result = dict() for dt in daterange.iterdatetimes(): path = pathhelper.path(dt) logger.info('Processing {}'.format(path)) with h5py.File(path, 'r') as h5: for k, v in h5.iteritems(): logger.debug('Radar: {}'.format(k)) r = v['rain'][:] r[r == -9999] = 0 if k not in result: result[k] = Histogram(edges=EDGES, shape=r.shape) result[k].add(r) # save logger.info('Saving: {}'.format(target_path)) with h5py.File(target_path, 'w') as h5: for k, v in result.items(): d = h5.create_dataset( k, data=v.array, dtype='f4', compression='lzf', shuffle=True, ) d.attrs['count'] = v.count d.attrs['edges'] = v.edges h5.attrs['range'] = b'{} - {}'.format( daterange.start, daterange.stop ) logging.info('Done creating histogram.')
def _aggregate_radar(self, aggregatedatetime, method): template = 'aggregate.m{method}_%Y%m%d%H%M%S.{extension}' path = os.path.join( config.AGGREGATE_DIR, aggregatedatetime.strftime(template).format( method=method, extension='tif', ), ) if os.path.exists(path): logging.debug('We have this one in cache: {}'.format( os.path.basename(path), )) return gdal.Open(path) logging.info('doing {}'.format(aggregatedatetime)) phkwargs = dict( basedir=config.COMPOSITE_DIR, template='{code}_{timestamp}.tif', ) ph = utils.PathHelper(code=self.CODES[method], **phkwargs) datetime_start = aggregatedatetime - datetime.timedelta(days=1) datetime_stop = aggregatedatetime - datetime.timedelta(minutes=5) text = '{}-{}'.format( datetime_start.strftime('%Y%m%d%H%M'), datetime_stop.strftime('%Y%m%d%H%M'), ) scandatetimes = utils.DateRange(text).iterdatetimes() dataset = gdal.GetDriverByName(b'mem').CreateCopy( b'', gdal.Open(ph.path(datetime.datetime(2011, 1, 1), )), ) rain = np.ma.zeros(gridtools.BaseGrid(dataset).get_shape()) count = np.zeros(gridtools.BaseGrid(dataset).get_shape()) for scandatetime in scandatetimes: logging.debug('adding {}'.format(scandatetime)) composite = gdal.Open(ph.path(scandatetime)) if composite is None: logging.warn('No composite found for method {} at {}'.format( method, scandatetime, )) continue ma = gridtools.ds2ma(composite) count += ~ma.mask # Where no mask, we count rain rain += ma.filled(0) rain /= 12 # Composite unit is mm/hr, but we add every 5 minutes. rain.mask = np.less(count, 1) dataset.GetRasterBand(1).WriteArray(rain.filled(config.NODATAVALUE)) gdal.GetDriverByName(b'GTiff').CreateCopy(path, dataset, 0, ['COMPRESS=DEFLATE']) gridtools.RasterLayer(dataset, **utils.rain_kwargs(name='jet')).save( path.replace('.tif', '.png'), ) # Adding the counts as tif count_dataset = gdal.GetDriverByName(b'gtiff').Create( path.replace('.tif', '_count.tif'), dataset.RasterXSize, dataset.RasterYSize, 1, gdalconst.GDT_UInt16, ) count_dataset.GetRasterBand(1).WriteArray(count) return dataset
def get_path(self): """ Return the file path where this aggregate should be stored. """ path_helper = utils.PathHelper(basedir=self.basedir, code=self.code, template='{code}_{timestamp}.h5') return path_helper.path(self.datetime)