def CCI_genIO(valid_gpis, start_date, end_date, plot=False): start_jd = julian.julday(start_date.month, start_date.day, start_date.year, start_date.hour, start_date.minute, start_date.second) end_jd = julian.julday(end_date.month, end_date.day, end_date.year, end_date.hour, end_date.minute, end_date.second) parent_grid = cci_grid.ESA_CCI_SM_grid_v4_1_indl() nearest_gpis = parent_grid.find_nearest_gpi(valid_gpis['lon'], valid_gpis['lat']) nearest_gpis = np.unique(nearest_gpis[0]) cells = parent_grid.gpi2cell(nearest_gpis) header = 'jd,sm,sm_noise,sensor,freqband,nobs,year,month,day' descr = [('year', np.uint), ('month', np.uint), ('day', np.uint)] for cell in sorted(np.unique(cells)): gpis, lons, lats = parent_grid.grid_points_for_cell(cell) grid = CellGrid(lons, lats, np.ones_like(lons, dtype=np.int16) * cell, gpis=gpis) cfg_path = ('/home/ipfeil/GitRepos/rs-data-readers/rsdata/'+ 'ESA_CCI_SM/datasets/') version = 'ESA_CCI_SM_v02.3' param = 'esa_cci_sm_monthly' cci_io = ESA_CCI_SM(version=version, parameter=param, grid=grid, cfg_path=cfg_path) for ts, gp in cci_io.iter_ts(): if gp not in nearest_gpis: continue valid_date_idx = np.where((ts['jd']>=start_jd) & (ts['jd']<=end_jd))[0] ts_valid_dates = ts[valid_date_idx] ts_dates = add_field(ts_valid_dates, descr) dates = julian.julian2datetime(ts_dates['jd']) years = [date.year for date in dates] ts_dates['year'] = years ts_dates['month'] = [date.month for date in dates] ts_dates['day'] = [date.day for date in dates] np.savetxt('/media/sf_D/CCI_csv/'+str(gp)+'.csv', ts_dates, delimiter=',', header=header) if plot == True: valid_ind = np.where(ts_valid_dates['sm'] != -999999) dates = julian.julian2datetime(ts_valid_dates['jd'][valid_ind]) plt.plot(dates, ts_valid_dates['sm'][valid_ind]) plt.title('ESA CCI SM combined monthly average, gpi: '+str(gp)) plt.xlabel('date') plt.ylabel('soil moisture [%]') plt.show()
def moving_average(Ser, window_size=1): ''' Applies a moving average (box) filter on an input time series Parameters ---------- Ser : pandas.Series (index must be a DateTimeIndex or julian date) window_size : float, optional The size of the moving_average window [days] that will be applied on the input Series Default: 1 Returns ------- Ser : pandas.Series moving-average filtered time series ''' # if index is datetimeindex then convert it to julian date if type(Ser.index) == pd.DatetimeIndex: jd_index = julday(Ser.index.month, Ser.index.day, Ser.index.year, Ser.index.hour, Ser.index.minute, Ser.index.second) else: jd_index = Ser.index.values filtered = boxcar_filter(np.squeeze(Ser.values.astype(np.double)), jd_index.astype(np.double), window=window_size) result = pd.Series(filtered, index=Ser.index) return result
def test_julday_single_arrays(): jds = julday(np.array([5]), np.array([25]), np.array([2016]), np.array([10]), np.array([20]), np.array([11])) jds_should = np.array([2457533.93068287]) nptest.assert_almost_equal(jds, jds_should)
def test_julday_arrays(): jds = julday(np.array([5, 5]), np.array([25, 25]), np.array([2016, 2016]), np.array([10, 10]), np.array([20, 20]), np.array([11, 11])) jds_should = np.array([2457533.93068287, 2457533.93068287]) nptest.assert_almost_equal(jds, jds_should)
def moving_average(Ser, window_size=1, fillna=False, min_obs=1): ''' Applies a moving average (box) filter on an input time series Parameters ---------- Ser : pandas.Series (index must be a DateTimeIndex or julian date) window_size : float, optional The size of the moving_average window [days] that will be applied on the input Series Default: 1 fillna: bool, optional Fill nan values at the center window value min_obs: int The minimum amount of observations necessary for a valid moving average Returns ------- Ser : pandas.Series moving-average filtered time series ''' # if index is datetimeindex then convert it to julian date if type(Ser.index) == pd.DatetimeIndex: jd_index = julday(np.asarray(Ser.index.month), np.asarray(Ser.index.day), np.asarray(Ser.index.year), np.asarray(Ser.index.hour), np.asarray(Ser.index.minute), np.asarray(Ser.index.second)) else: jd_index = Ser.index.values filtered = boxcar_filter( np.atleast_1d(np.squeeze(Ser.values.astype(np.double))), jd_index.astype(np.double), window=window_size, fillna=fillna, min_obs=min_obs) result = pd.Series(filtered, index=Ser.index) return result
def moving_average(Ser, window_size=1, fillna=False, min_obs=1): ''' Applies a moving average (box) filter on an input time series Parameters ---------- Ser : pandas.Series (index must be a DateTimeIndex or julian date) window_size : float, optional The size of the moving_average window [days] that will be applied on the input Series Default: 1 fillna: bool, optional Fill nan values at the center window value min_obs: int The minimum amount of observations necessary for a valid moving average Returns ------- Ser : pandas.Series moving-average filtered time series ''' # if index is datetimeindex then convert it to julian date if type(Ser.index) == pd.DatetimeIndex: jd_index = julday(np.asarray(Ser.index.month), np.asarray(Ser.index.day), np.asarray(Ser.index.year), np.asarray(Ser.index.hour), np.asarray(Ser.index.minute), np.asarray(Ser.index.second)) else: jd_index = Ser.index.values filtered = boxcar_filter(np.atleast_1d( np.squeeze(Ser.values.astype(np.double))), jd_index.astype(np.double), window=window_size, fillna=fillna, min_obs=min_obs) result = pd.Series(filtered, index=Ser.index) return result
def moving_average(Ser, window_size=1): ''' Applies a moving average (box) filter on an input time series Parameters ---------- Ser : pandas.Series (index must be a DateTimeIndex or julian date) window_size : float, optional The size of the moving_average window [days] that will be applied on the input Series Default: 1 Returns ------- Ser : pandas.Series moving-average filtered time series ''' # if index is datetimeindex then convert it to julian date if type(Ser.index) == pd.DatetimeIndex: jd_index = julday(np.asarray(Ser.index.month), np.asarray(Ser.index.day), np.asarray(Ser.index.year), np.asarray(Ser.index.hour), np.asarray(Ser.index.minute), np.asarray(Ser.index.second)) else: jd_index = Ser.index.values filtered = boxcar_filter( np.atleast_1d(np.squeeze(Ser.values.astype(np.double))), jd_index.astype(np.double), window=window_size) result = pd.Series(filtered, index=Ser.index) return result
def test_julday(): jd = julday(5, 25, 2016, 10, 20, 11) jd_should = 2457533.9306828701 nptest.assert_almost_equal(jd, jd_should)
def _read_spec_file(self, filename, timestamp=None): """ Read specific image for given datetime timestamp. Parameters ---------- filename : string filename timestamp : datetime.datetime exact observation timestamp of the image that should be read Returns ------- data : dict dictionary of numpy arrays that hold the image data for each variable of the dataset metadata : dict dictionary of numpy arrays that hold the metadata timestamp : datetime.datetime exact timestamp of the image lon : numpy.array or None array of longitudes, if None self.grid will be assumed lat : numpy.array or None array of latitudes, if None self.grid will be assumed time_var : string or None variable name of observation times in the data dict, if None all observations have the same timestamp """ latitude = [] longitude = [] ssm = [] dates = [] orbit_number = [] direction_of_motion = [] ssm_sens = [] frozen_lsf = [] snow_cover = [] topo_complex = [] ssm_noise = [] ssm_mean = [] beam_ident = [] azimuth = [] incidence = [] sig0 = [] sigma40 = [] sigma40_noise = [] with bufr_reader.BUFRReader(filename) as bufr: for message in bufr.messages(): latitude.append(message[:, 12]) longitude.append(message[:, 13]) ssm.append(message[:, 64]) orbit_number.append(message[:, 15]) direction_of_motion.append(message[:, 5]) ssm_sens.append(message[:, 70]) frozen_lsf.append(message[:, 79]) snow_cover.append(message[:, 78]) topo_complex.append(message[:, 81]) ssm_noise.append(message[:, 65]) ssm_mean.append(message[:, 73]) sigma40.append(message[:, 66]) sigma40_noise.append(message[:, 67]) beam_ident.append([message[:, 20], message[:, 34], message[:, 48]]) incidence.append([message[:, 21], message[:, 35], message[:, 49]]) azimuth.append([message[:, 22], message[:, 36], message[:, 50]]) sig0.append([message[:, 23], message[:, 37], message[:, 51]]) years = message[:, 6].astype(int) months = message[:, 7].astype(int) days = message[:, 8].astype(int) hours = message[:, 9].astype(int) minutes = message[:, 10].astype(int) seconds = message[:, 11].astype(int) dates.append(julian.julday(months, days, years, hours, minutes, seconds)) ssm = np.concatenate(ssm) latitude = np.concatenate(latitude) longitude = np.concatenate(longitude) orbit_number = np.concatenate(orbit_number) direction_of_motion = np.concatenate(direction_of_motion) ssm_sens = np.concatenate(ssm_sens) frozen_lsf = np.concatenate(frozen_lsf) snow_cover = np.concatenate(snow_cover) topo_complex = np.concatenate(topo_complex) ssm_noise = np.concatenate(ssm_noise) ssm_mean = np.concatenate(ssm_mean) dates = np.concatenate(dates) sigma40 = np.concatenate(sigma40) sigma40_noise = np.concatenate(sigma40_noise) data = {'ssm': ssm, 'ssm_noise': ssm_noise, 'snow_cover': snow_cover, 'frozen_prob': frozen_lsf, 'topo_complex': topo_complex, 'jd': dates } return data, {}, timestamp, longitude, latitude, 'jd'