def _resample_temporal(self, region, shapefile=None): """Helper method that calls temporal resampling routines. Parameters: region : str Identifier of the region in the shapefile. If the default shapefile is used, this would be the FIPS country code. shapefile : str, optional Path to shape file, uses "world country admin boundary shapefile" by default. """ src_file = self._get_tmp_filepath('spatial', region) if not os.path.exists(src_file): print '[Info] No data available for this period' return False data = {} variables, _, period = nc.get_properties(src_file) dtindex = dt.get_dtindex(self.dest_temp_res, period[0], period[1]) for date in dtindex: # skip if data for period is not complete # if date > period[1]: # continue if self.dest_temp_res == 'dekad': if date.day < 21: begin = datetime(date.year, date.month, date.day - 10 + 1) else: begin = datetime(date.year, date.month, 21) end = date else: begin = period[0] end = date data = {} metadata = {} for var in variables: img, _, _, meta = \ nc.read_variable(src_file, var, begin, end) metadata[var] = meta data[var] = average_layers(img, self.dest_nan_value) dest_file = self.src_file[region] nc.save_image(data, date, region, metadata, dest_file, self.dest_start_date, self.dest_sp_res, self.dest_nan_value, shapefile, self.dest_temp_res) # delete intermediate netCDF file print '' os.unlink(src_file)
def test_save_image(self): save_image(self.image, self.timestamp, self.region, self.metadata, self.testfilename, self.start_date, self.sp_res, temp_res=self.temp_res) with Dataset(self.testfilename) as nc_data: data = nc_data.variables[self.variable] mask = np.array(self.mask, dtype=bool) nptest.assert_array_equal(self.data, data[0]) nptest.assert_array_equal(mask, data[0].mask) assert data.getncattr('_FillValue') == self.fill_value assert data.getncattr('Attribute1') == \ self.metadata[self.variable]['Attribute1']
def test_read_variable(self): save_image(self.image, self.timestamp, self.region, self.metadata, self.testfilename, self.start_date, self.sp_res, temp_res=self.temp_res) image, lon, lat, metadata = read_variable(self.testfilename, self.variable, self.timestamp) nptest.assert_array_equal(image, self.data) nptest.assert_array_equal(lon, self.lon) nptest.assert_array_equal(lat, self.lat) assert metadata == self.metadata[self.variable]
def test_get_properties(self): save_image(self.image, self.timestamp, self.region, self.metadata, self.testfilename, self.start_date, self.sp_res, temp_res=self.temp_res) variables, dimensions, period = get_properties(self.testfilename) timediff1 = self.timestamp - period[0] timediff2 = self.timestamp - period[1] assert variables[0] == self.variable assert dimensions == ['lat', 'lon', 'time'] assert timediff1.days == 0 assert timediff2.days == 0
def test_get_properties(self): save_image( self.image, self.timestamp, self.region, self.metadata, self.testfilename, self.start_date, self.sp_res, temp_res=self.temp_res, ) variables, dimensions, period = get_properties(self.testfilename) timediff1 = self.timestamp - period[0] assert self.variable in variables assert dimensions == ["lat", "lon", "time"] assert timediff1.days == 0
def _resample_spatial(self, region, begin, end, delete_rawdata, shapefile=None): """Helper method that calls spatial resampling routines. Parameters: region : str FIPS country code (https://en.wikipedia.org/wiki/FIPS_country_code) begin : datetime Start date of resampling end : datetime End date of resampling delete_rawdata : bool True if original downloaded files should be deleted after resampling """ dest_file = self._get_tmp_filepath('spatial', region) dirList = os.listdir(self.rawdata_path) dirList.sort() if region == 'global': grid = gr.RegularGrid(sp_res=self.dest_sp_res) else: grid = gr.ShapeGrid(region, self.dest_sp_res, shapefile) for item in dirList: src_file = os.path.join(self.rawdata_path, item) fdate = get_file_date(item, self.filedate) if begin is not None: if fdate < begin: continue if end is not None: if fdate > end: continue if check_compressed(src_file): dirname = os.path.splitext(item)[0] dirpath = os.path.join(self.rawdata_path, dirname) unpack(src_file) src_file = select_file(os.listdir(dirpath)) src_file = os.path.join(dirpath, src_file) if begin is not None: if fdate < begin: if check_compressed(item): shutil.rmtree(os.path.join(self.rawdata_path, os.path.splitext(item)[0])) continue if end is not None: if fdate > end: if check_compressed(item): shutil.rmtree(os.path.join(self.rawdata_path, os.path.splitext(item)[0])) continue print '.', image, _, _, _, timestamp, metadata = \ resample_to_shape(src_file, region, self.dest_sp_res, grid, self.name, self.nan_value, self.dest_nan_value, self.variables, shapefile) if timestamp is None: timestamp = get_file_date(item, self.filedate) if self.temp_res == self.dest_temp_res: filename = (region + '_' + str(self.dest_sp_res) + '_' + str(self.dest_temp_res) + '.nc') dfile = os.path.join(self.data_path, filename) nc.save_image(image, timestamp, region, metadata, dfile, self.dest_start_date, self.dest_sp_res, self.dest_nan_value, shapefile, self.dest_temp_res) else: nc.write_tmp_file(image, timestamp, region, metadata, dest_file, self.dest_start_date, self.dest_sp_res, self.dest_nan_value, shapefile) # deletes unpacked files if existing if check_compressed(item): shutil.rmtree(os.path.join(self.rawdata_path, os.path.splitext(item)[0])) print ''
def setUp(self): self.sp_res = 60 self.region = 'UG' self.timestamp = datetime.today() self.start_date = datetime.today() self.temp_res = 'day' self.fill_value = -99 self.variable = 'data' # create image self.shape = (3, 6) self.mask = np.array([[1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 1], [1, 0, 1, 0, 1, 0]]) self.image = {} self.data = np.ma.array(np.ones(self.shape), mask=self.mask, fill_value=self.fill_value) self.data.data[np.where(self.mask == 1)] = self.fill_value self.image['data'] = self.data self.image['data2'] = self.data * 2 # create metadata self.metadata = { 'data': { 'Attribute1': 'Value1' }, 'data2': { 'Attribut2': 'Value2' }, 'data3': { 'Attribut3': 'Value3' } } if not os.path.exists(os.path.join(curpath(), 'data')): os.mkdir(os.path.join(curpath(), 'data')) self.grid = gr.ShapeGrid(self.region, self.sp_res) self.globalgrid = gr.RegularGrid(sp_res=self.sp_res) # Build NetCDF testfile self.ncfile = os.path.join(curpath(), 'data', 'test_nc.nc') if os.path.exists(self.ncfile): os.remove(self.ncfile) save_image(self.image, self.timestamp, 'global', self.metadata, self.ncfile, self.start_date, self.sp_res, temp_res=self.temp_res) # Build HDF5 testfile self.h5file = os.path.join(curpath(), 'data', 'tests_hdf5.h5') if os.path.exists(self.h5file): os.remove(self.h5file) with h5py.File(self.h5file, 'w') as hdf5_file: group = hdf5_file.create_group('group') for dataset_name in self.image.keys(): attributes = self.metadata[dataset_name] write_data = self.image[dataset_name] dataset = group.create_dataset(dataset_name, write_data.shape, write_data.dtype, write_data) for key in attributes: dataset.attrs[key] = attributes[key] # Build png Testfile self.pngfile = os.path.join(curpath(), 'data', 'test_png.png') if os.path.exists(self.pngfile): os.remove(self.pngfile) n = 60 pngimg = np.kron(np.copy(self.data), np.ones((n, n))) pngimg[pngimg == self.fill_value] = np.NAN plt.imsave(self.pngfile, pngimg)
def setUp(self): self.sp_res = 60 self.region = 'UG' self.timestamp = datetime.today() self.start_date = datetime.today() self.temp_res = 'day' self.fill_value = -99 self.variable = 'data' # create image self.shape = (3, 6) self.mask = np.array([[1, 0, 1, 0, 1, 0], [0, 1, 0, 1, 0, 1], [1, 0, 1, 0, 1, 0]]) self.image = {} self.data = np.ma.array(np.ones(self.shape), mask=self.mask, fill_value=self.fill_value) self.data.data[np.where(self.mask == 1)] = self.fill_value self.image['data'] = self.data self.image['data2'] = self.data * 2 # create metadata self.metadata = {'data': {'Attribute1': 'Value1'}, 'data2': {'Attribut2': 'Value2'}, 'data3': {'Attribut3': 'Value3'}} if not os.path.exists(os.path.join(curpath(), 'data')): os.mkdir(os.path.join(curpath(), 'data')) self.grid = gr.ShapeGrid(self.region, self.sp_res) self.globalgrid = gr.RegularGrid(sp_res=self.sp_res) # Build NetCDF testfile self.ncfile = os.path.join(curpath(), 'data', 'test_nc.nc') if os.path.exists(self.ncfile): os.remove(self.ncfile) save_image(self.image, self.timestamp, 'global', self.metadata, self.ncfile, self.start_date, self.sp_res, temp_res=self.temp_res) # Build HDF5 testfile self.h5file = os.path.join(curpath(), 'data', 'tests_hdf5.h5') if os.path.exists(self.h5file): os.remove(self.h5file) with h5py.File(self.h5file, 'w') as hdf5_file: group = hdf5_file.create_group('group') for dataset_name in self.image.keys(): attributes = self.metadata[dataset_name] write_data = self.image[dataset_name] dataset = group.create_dataset(dataset_name, write_data.shape, write_data.dtype, write_data) for key in attributes: dataset.attrs[key] = attributes[key] # Build png Testfile self.pngfile = os.path.join(curpath(), 'data', 'test_png.png') if os.path.exists(self.pngfile): os.remove(self.pngfile) n = 60 pngimg = np.kron(np.copy(self.data), np.ones((n, n))) pngimg[pngimg == self.fill_value] = np.NAN plt.imsave(self.pngfile, pngimg)
def _resample_spatial(self, region, begin, end, delete_rawdata, shapefile=None): """Helper method that calls spatial resampling routines. Parameters: region : str FIPS country code (https://en.wikipedia.org/wiki/FIPS_country_code) begin : datetime Start date of resampling end : datetime End date of resampling delete_rawdata : bool True if original downloaded files should be deleted after resampling """ dest_file = self._get_tmp_filepath('spatial', region) dirList = os.listdir(self.rawdata_path) dirList.sort() if region == 'global': grid = gr.RegularGrid(sp_res=self.dest_sp_res) else: grid = gr.ShapeGrid(region, self.dest_sp_res, shapefile) for item in dirList: src_file = os.path.join(self.rawdata_path, item) fdate = get_file_date(item, self.filedate) if begin is not None: if fdate < begin: continue if end is not None: if fdate > end: continue if check_compressed(src_file): dirname = os.path.splitext(item)[0] dirpath = os.path.join(self.rawdata_path, dirname) unpack(src_file) src_file = select_file(os.listdir(dirpath)) src_file = os.path.join(dirpath, src_file) if begin is not None: if fdate < begin: if check_compressed(item): shutil.rmtree( os.path.join(self.rawdata_path, os.path.splitext(item)[0])) continue if end is not None: if fdate > end: if check_compressed(item): shutil.rmtree( os.path.join(self.rawdata_path, os.path.splitext(item)[0])) continue print '.', try: image, _, _, _, timestamp, metadata = \ resample_to_shape(src_file, region, self.dest_sp_res, grid, self.name, self.nan_value, self.dest_nan_value, self.variables, shapefile) except ValueError: print "[INFO] no data available for that region." return "[INFO] no data available for that region." if timestamp is None: timestamp = get_file_date(item, self.filedate) if self.temp_res == self.dest_temp_res: filename = (region + '_' + str(self.dest_sp_res) + '_' + str(self.dest_temp_res) + '.nc') dfile = os.path.join(self.data_path, filename) nc.save_image(image, timestamp, region, metadata, dfile, self.dest_start_date, self.dest_sp_res, self.dest_nan_value, shapefile, self.dest_temp_res) else: nc.write_tmp_file(image, timestamp, region, metadata, dest_file, self.dest_start_date, self.dest_sp_res, self.dest_nan_value, shapefile) # deletes unpacked files if existing if check_compressed(item): shutil.rmtree( os.path.join(self.rawdata_path, os.path.splitext(item)[0])) print ''