def downscale_wrapper( arr, affine, crs, baseline, output_filename, downscaling_operation, post_downscale_function ): # rotate if ( self.data.ds.lon > 200.0 ).any() == True: dat, lons = utils.shiftgrid( 180., self.data.anomalies, self.historical.ds.lon, start=False ) a,b,c,d,e,f,g,h,i = affine #flip it to the greenwich-centering src_transform = affine.Affine( a, b, -180.0, d, e, 180.0 ) else: dat, lons = ( self.data.ds, self.historical.ds.lon ) src_transform = affine # reproject / resample src_crs = {'init':'epsg:4326'} src_nodata = None # DangerTown™ baseline_meta = baseline.meta baseline_meta.update( compress='lzw' ) output_arr = np.empty_like( baeline.read( 1 ) ) # TODO: make this function available for manipulation if used for different needs reproject( arr, output_arr, src_transform=src_transform, src_crs=src_crs, src_nodata=src_nodata, \ dst_transform=baseline_meta['affine'], dst_crs=baseline_meta['crs'],\ dst_nodata=None, resampling=RESAMPLING.cubic_spline, SOURCE_EXTRA=1000 ) # downscale return utils.downscale( arr, output_arr, output_filename, downscaling_operation, \ baseline_meta, post_downscale_function, mask=None, mask_value=0 )
def downscale( self, output_dir, prefix=None, ncpus=32 ): import affine import itertools from functools import partial # import multiprocessing from pathos import multiprocessing # output_filenames time = self.anomalies.time.to_pandas() time_suffix = [ '_'.join([str(t.month), str(t.year)]) for t in time ] if prefix: output_filenames = [ os.path.join( output_dir, '_'.join([prefix, ts]) + '.tif' ) for ts in time_suffix ] else: if not self.historical.units: units = 'units' else: units = self.historical.units output_filenames = [ os.path.join( output_dir, '_'.join([self.historical.variable, units, \ self.metric, self.historical.model, ts]) + '.tif') for ts in time_suffix ] # rotate if ( self.anomalies.lon > 200.0 ).any() == True: dat, lons = utils.shiftgrid( 180., self.anomalies, self.anomalies.lon, start=False ) a,b,c,d,e,f,g,h,i = self.affine # flip it to the greenwich-centering src_transform = affine.Affine( a, b, -180.0, d, e, 180.0 ) else: dat, lons = ( self.anomalies, self.anomalies.lon ) src_transform = self.affine # run and output rstlist = [ rasterio.open( fn ) for fn in self.baseline.filelist ] # # # # # # maybe itertools.repeat is needed here? # these are same references rstlist = itertools.repeat( rstlist, (self.anomalies.shape[0] / 12) ) rstlist = [ j for i in rstlist for j in i ] # rstlist = rstlist * (self.anomalies.shape[0] / 12) # # # # # args = zip( self.anomalies, rstlist, output_filenames ) args = [{'anom':i.data, 'base':j, 'output_filename':k} for i,j,k in args ] downscaling_operation_switch = {'absolute':'add', 'relative':'mult'} downscaling_operation = downscaling_operation_switch[ self.ds_type ] # anom_arr, baseline_arr, output_filename, downscaling_operation, \ # meta, post_downscale_function, mask=None, mask_value=0 f = partial( self.interp_ds, src_transform=src_transform, downscaling_operation=downscaling_operation, post_downscale_function=None, mask=None, mask_value=0 ) pool = multiprocessing.Pool( ncpus ) out = pool.map( lambda x: f( **x ), args[:11] ) pool.join() pool.close() return output_dir