def l1qc_process(cf, ds1): # get the netCDF attributes from the control file qcts.do_attributes(cf, ds1) # round the Python datetime to the nearest second qcutils.round_datetime(ds1, mode="nearest_second") #check for gaps in the Python datetime series and fix if present fixtimestepmethod = qcutils.get_keyvaluefromcf(cf, ["options"], "FixTimeStepMethod", default="round") if qcutils.CheckTimeStep(ds1): qcutils.FixTimeStep(ds1, fixtimestepmethod=fixtimestepmethod) # recalculate the Excel datetime qcutils.get_xldatefromdatetime(ds1) # get the Year, Month, Day etc from the Python datetime qcutils.get_ymdhmsfromdatetime(ds1) # write the processing level to a global attribute ds1.globalattributes['nc_level'] = str("L1") # get the start and end date from the datetime series unless they were # given in the control file if 'start_date' not in ds1.globalattributes.keys(): ds1.globalattributes['start_date'] = str( ds1.series['DateTime']['Data'][0]) if 'end_date' not in ds1.globalattributes.keys(): ds1.globalattributes['end_date'] = str( ds1.series['DateTime']['Data'][-1]) # calculate variances from standard deviations and vice versa qcts.CalculateStandardDeviations(cf, ds1) # create new variables using user defined functions qcts.DoFunctions(cf, ds1) # create a series of synthetic downwelling shortwave radiation qcts.get_synthetic_fsd(ds1)
def interpolate_to_30minutes(ds_60minutes): ds_30minutes = qcio.DataStructure() # copy the global attributes for this_attr in list(ds_60minutes.globalattributes.keys()): ds_30minutes.globalattributes[this_attr] = ds_60minutes.globalattributes[this_attr] # update the global attribute "time_step" ds_30minutes.globalattributes["time_step"] = 30 # generate the 30 minute datetime series dt_loc_60minutes = ds_60minutes.series["DateTime"]["Data"] dt_loc_30minutes = [x for x in perdelta(dt_loc_60minutes[0],dt_loc_60minutes[-1],datetime.timedelta(minutes=30))] nRecs_30minutes = len(dt_loc_30minutes) dt_utc_60minutes = ds_60minutes.series["DateTime_UTC"]["Data"] dt_utc_30minutes = [x for x in perdelta(dt_utc_60minutes[0],dt_utc_60minutes[-1],datetime.timedelta(minutes=30))] # update the global attribute "nc_nrecs" ds_30minutes.globalattributes['nc_nrecs'] = nRecs_30minutes ds_30minutes.series["DateTime"] = {} ds_30minutes.series["DateTime"]["Data"] = dt_loc_30minutes flag = numpy.zeros(len(dt_loc_30minutes),dtype=numpy.int32) ds_30minutes.series["DateTime"]["Flag"] = flag ds_30minutes.series["DateTime_UTC"] = {} ds_30minutes.series["DateTime_UTC"]["Data"] = dt_utc_30minutes flag = numpy.zeros(len(dt_utc_30minutes),dtype=numpy.int32) ds_30minutes.series["DateTime_UTC"]["Flag"] = flag # get the year, month etc from the datetime qcutils.get_xldatefromdatetime(ds_30minutes) qcutils.get_ymdhmsfromdatetime(ds_30minutes) # interpolate to 30 minutes nRecs_60 = len(ds_60minutes.series["DateTime"]["Data"]) nRecs_30 = len(ds_30minutes.series["DateTime"]["Data"]) x_60minutes = numpy.arange(0,nRecs_60,1) x_30minutes = numpy.arange(0,nRecs_60-0.5,0.5) varlist_60 = list(ds_60minutes.series.keys()) # strip out the date and time variables already done for item in ["DateTime","DateTime_UTC","xlDateTime","Year","Month","Day","Hour","Minute","Second","Hdh","Hr_UTC"]: if item in varlist_60: varlist_60.remove(item) # now do the interpolation (its OK to interpolate accumulated precipitation) for label in varlist_60: series_60minutes,flag,attr = qcutils.GetSeries(ds_60minutes,label) ci_60minutes = numpy.zeros(len(series_60minutes)) idx = numpy.where(abs(series_60minutes-float(c.missing_value))<c.eps)[0] ci_60minutes[idx] = float(1) int_fn = interp1d(x_60minutes,series_60minutes) series_30minutes = int_fn(x_30minutes) int_fn = interp1d(x_60minutes,ci_60minutes) ci_30minutes = int_fn(x_30minutes) idx = numpy.where(abs(ci_30minutes-float(0))>c.eps)[0] series_30minutes[idx] = numpy.float64(c.missing_value) flag_30minutes = numpy.zeros(nRecs_30, dtype=numpy.int32) flag_30minutes[idx] = numpy.int32(1) qcutils.CreateSeries(ds_30minutes,label,series_30minutes,Flag=flag_30minutes,Attr=attr) # get the UTC hour hr_utc = [float(x.hour)+float(x.minute)/60 for x in dt_utc_30minutes] attr = qcutils.MakeAttributeDictionary(long_name='UTC hour') flag_30minutes = numpy.zeros(nRecs_30, dtype=numpy.int32) qcutils.CreateSeries(ds_30minutes,'Hr_UTC',hr_utc,Flag=flag_30minutes,Attr=attr) return ds_30minutes
def l1qc(cf): # get the data series from the Excel file in_filename = qcio.get_infilenamefromcf(cf) if not qcutils.file_exists(in_filename, mode="quiet"): msg = " Input file " + in_filename + " not found ..." logger.error(msg) ds1 = qcio.DataStructure() ds1.returncodes = {"value": 1, "message": msg} return ds1 file_name, file_extension = os.path.splitext(in_filename) if "csv" in file_extension.lower(): ds1 = qcio.csv_read_series(cf) if ds1.returncodes["value"] != 0: return ds1 # get a series of Excel datetime from the Python datetime objects qcutils.get_xldatefromdatetime(ds1) else: ds1 = qcio.xl_read_series(cf) if ds1.returncodes["value"] != 0: return ds1 # get a series of Python datetime objects from the Excel datetime qcutils.get_datetimefromxldate(ds1) # get the netCDF attributes from the control file qcts.do_attributes(cf, ds1) # round the Python datetime to the nearest second qcutils.round_datetime(ds1, mode="nearest_second") #check for gaps in the Python datetime series and fix if present fixtimestepmethod = qcutils.get_keyvaluefromcf(cf, ["options"], "FixTimeStepMethod", default="round") if qcutils.CheckTimeStep(ds1): qcutils.FixTimeStep(ds1, fixtimestepmethod=fixtimestepmethod) # recalculate the Excel datetime qcutils.get_xldatefromdatetime(ds1) # get the Year, Month, Day etc from the Python datetime qcutils.get_ymdhmsfromdatetime(ds1) # write the processing level to a global attribute ds1.globalattributes['nc_level'] = str("L1") # get the start and end date from the datetime series unless they were # given in the control file if 'start_date' not in ds1.globalattributes.keys(): ds1.globalattributes['start_date'] = str( ds1.series['DateTime']['Data'][0]) if 'end_date' not in ds1.globalattributes.keys(): ds1.globalattributes['end_date'] = str( ds1.series['DateTime']['Data'][-1]) # calculate variances from standard deviations and vice versa qcts.CalculateStandardDeviations(cf, ds1) # create new variables using user defined functions qcts.DoFunctions(cf, ds1) # create a series of synthetic downwelling shortwave radiation qcts.get_synthetic_fsd(ds1) # check missing data and QC flags are consistent qcutils.CheckQCFlags(ds1) return ds1
def interpolate_to_30minutes(ds_60minutes): ds_30minutes = qcio.DataStructure() # copy the global attributes for this_attr in ds_60minutes.globalattributes.keys(): ds_30minutes.globalattributes[this_attr] = ds_60minutes.globalattributes[this_attr] # update the global attribute "time_step" ds_30minutes.globalattributes["time_step"] = 30 # generate the 30 minute datetime series dt_loc_60minutes = ds_60minutes.series["DateTime"]["Data"] dt_loc_30minutes = [x for x in perdelta(dt_loc_60minutes[0],dt_loc_60minutes[-1],datetime.timedelta(minutes=30))] nRecs_30minutes = len(dt_loc_30minutes) dt_utc_60minutes = ds_60minutes.series["DateTime_UTC"]["Data"] dt_utc_30minutes = [x for x in perdelta(dt_utc_60minutes[0],dt_utc_60minutes[-1],datetime.timedelta(minutes=30))] # update the global attribute "nc_nrecs" ds_30minutes.globalattributes['nc_nrecs'] = nRecs_30minutes flag_30minutes = numpy.zeros(nRecs_30minutes) ds_30minutes.series["DateTime"] = {} ds_30minutes.series["DateTime"]["Data"] = dt_loc_30minutes ds_30minutes.series["DateTime_UTC"] = {} ds_30minutes.series["DateTime_UTC"]["Data"] = dt_utc_30minutes # get the year, month etc from the datetime qcutils.get_xldatefromdatetime(ds_30minutes) qcutils.get_ymdhmsfromdatetime(ds_30minutes) # interpolate to 30 minutes nRecs_60 = len(ds_60minutes.series["DateTime"]["Data"]) nRecs_30 = len(ds_30minutes.series["DateTime"]["Data"]) x_60minutes = numpy.arange(0,nRecs_60,1) x_30minutes = numpy.arange(0,nRecs_60-0.5,0.5) varlist_60 = ds_60minutes.series.keys() # strip out the date and time variables already done for item in ["DateTime","DateTime_UTC","xlDateTime","Year","Month","Day","Hour","Minute","Second","Hdh","Hr_UTC"]: if item in varlist_60: varlist_60.remove(item) # now do the interpolation (its OK to interpolate accumulated precipitation) for label in varlist_60: series_60minutes,flag,attr = qcutils.GetSeries(ds_60minutes,label) ci_60minutes = numpy.zeros(len(series_60minutes)) idx = numpy.where(abs(series_60minutes-float(c.missing_value))<c.eps)[0] ci_60minutes[idx] = float(1) int_fn = interp1d(x_60minutes,series_60minutes) series_30minutes = int_fn(x_30minutes) int_fn = interp1d(x_60minutes,ci_60minutes) ci_30minutes = int_fn(x_30minutes) idx = numpy.where(abs(ci_30minutes-float(0))>c.eps)[0] series_30minutes[idx] = numpy.float64(c.missing_value) qcutils.CreateSeries(ds_30minutes,label,series_30minutes,Flag=flag_30minutes,Attr=attr) # get the UTC hour hr_utc = [float(x.hour)+float(x.minute)/60 for x in dt_utc_30minutes] attr = qcutils.MakeAttributeDictionary(long_name='UTC hour') qcutils.CreateSeries(ds_30minutes,'Hr_UTC',hr_utc,Flag=flag_30minutes,Attr=attr) return ds_30minutes
def l1qc(cf): # get the data series from the Excel file in_filename = qcio.get_infilenamefromcf(cf) if not qcutils.file_exists(in_filename,mode="quiet"): msg = " Input file "+in_filename+" not found ..." log.error(msg) ds1 = qcio.DataStructure() ds1.returncodes = {"value":1,"message":msg} return ds1 file_name,file_extension = os.path.splitext(in_filename) if "csv" in file_extension.lower(): ds1 = qcio.csv_read_series(cf) if ds1.returncodes["value"] != 0: return ds1 # get a series of Excel datetime from the Python datetime objects qcutils.get_xldatefromdatetime(ds1) else: ds1 = qcio.xl_read_series(cf) if ds1.returncodes["value"] != 0: return ds1 # get a series of Python datetime objects from the Excel datetime qcutils.get_datetimefromxldate(ds1) # get the netCDF attributes from the control file qcts.do_attributes(cf,ds1) # round the Python datetime to the nearest second qcutils.round_datetime(ds1,mode="nearest_second") #check for gaps in the Python datetime series and fix if present fixtimestepmethod = qcutils.get_keyvaluefromcf(cf,["options"],"FixTimeStepMethod",default="round") if qcutils.CheckTimeStep(ds1): qcutils.FixTimeStep(ds1,fixtimestepmethod=fixtimestepmethod) # recalculate the Excel datetime qcutils.get_xldatefromdatetime(ds1) # get the Year, Month, Day etc from the Python datetime qcutils.get_ymdhmsfromdatetime(ds1) # write the processing level to a global attribute ds1.globalattributes['nc_level'] = str("L1") # get the start and end date from the datetime series unless they were # given in the control file if 'start_date' not in ds1.globalattributes.keys(): ds1.globalattributes['start_date'] = str(ds1.series['DateTime']['Data'][0]) if 'end_date' not in ds1.globalattributes.keys(): ds1.globalattributes['end_date'] = str(ds1.series['DateTime']['Data'][-1]) # calculate variances from standard deviations and vice versa qcts.CalculateStandardDeviations(cf,ds1) # create new variables using user defined functions qcts.DoFunctions(cf,ds1) # create a series of synthetic downwelling shortwave radiation qcts.get_synthetic_fsd(ds1) return ds1
def l1qc_read_files(cf): # get the data series from the Excel file in_filename = qcio.get_infilenamefromcf(cf) if not qcutils.file_exists(in_filename, mode="quiet"): msg = " Input file " + in_filename + " not found ..." log.error(msg) ds1 = qcio.DataStructure() ds1.returncodes = {"value": 1, "message": msg} return ds1 file_name, file_extension = os.path.splitext(in_filename) if "csv" in file_extension.lower(): ds1 = qcio.csv_read_series(cf) if ds1.returncodes["value"] != 0: return ds1 # get a series of Excel datetime from the Python datetime objects qcutils.get_xldatefromdatetime(ds1) else: ds1 = qcio.xl_read_series(cf) if ds1.returncodes["value"] != 0: return ds1 # get a series of Python datetime objects from the Excel datetime qcutils.get_datetimefromxldate(ds1) return ds1
site_tz = pytz.timezone(site_timezone) # put the time zone (UTC) into the datetime dt_utc = [x.replace(tzinfo=pytz.utc) for x in dt_UTC] # convert from UTC to local time dt_loc = [x.astimezone(site_tz) for x in dt_utc] # remove any daylight saving adjustments (towers run on standard time) dt_loc = [x - x.dst() for x in dt_loc] # strip the time zone from the local datetime series dt_loc = [x.replace(tzinfo=None) for x in dt_loc] ds.series["DateTime"]["Data"] = dt_loc # update global attributes ds.globalattributes["nc_nrecs"] = len(dt_loc) ds.globalattributes["start_datetime"] = str(dt_loc[0]) ds.globalattributes["end_datetime"] = str(dt_loc[-1]) # get the Excel datetime qcutils.get_xldatefromdatetime(ds) # get the year, month, day, hour, minute and second qcutils.get_ymdhmsfromdatetime(ds) # put the QC'd, smoothed and interpolated EVI into the data structure flag = numpy.zeros(len(dt_loc), dtype=numpy.int32) attr = qcutils.MakeAttributeDictionary( long_name="MODIS EVI, smoothed and interpolated", units="none", horiz_resolution="250m", cutout_size=str(site_cutout), evi_quality_threshold=str(evi_quality_threshold), evi_sd_threshold=str(evi_sd_threshold), evi_interpolate=str(evi_interpolate), evi_smooth_filter=str(evi_smooth_filter), sg_num_points=str(sg_num_points), sg_order=str(sg_num_points))
# UTC netCDF time series at tower time step for interpolation tmp = [x.replace(tzinfo=None) for x in dt_erai_utc_tts] erai_time_tts = netCDF4.date2num(tmp, time_units) # local datetime series at tower time step dt_erai_loc_tts = [x.astimezone(site_tz) for x in dt_erai_utc_tts] # NOTE: will have to disable daylight saving at some stage, towers stay on Standard Time # PRI hopes that the following line will do this ... dt_erai_loc_tts = [x - x.dst() for x in dt_erai_loc_tts] # make the datetime series timezone naive and put it in data structure dt_erai_loc_tts = [x.replace(tzinfo=None) for x in dt_erai_loc_tts] ds_erai.series["DateTime"]["Data"] = dt_erai_loc_tts ds_erai.globalattributes["nc_nrecs"] = len(dt_erai_loc_tts) ds_erai.globalattributes["start_datetime"] = str(dt_erai_loc_tts[0]) ds_erai.globalattributes["end_datetime"] = str(dt_erai_loc_tts[-1]) # get the Excel datetime qcutils.get_xldatefromdatetime(ds_erai) # get the year, month, day, hour, minute and second qcutils.get_ymdhmsfromdatetime(ds_erai) # get the solar altitude, we will use this later to interpolate the ERA Interim solar # data from the ERA-I 3 hour time step to the tower time step. # NOTE: alt_solar is in degrees alt_solar_3hr = numpy.array([ pysolar.GetAltitude(erai_latitude, erai_longitude, dt) for dt in dt_erai_utc_cor ]) # get the solar altitude at the tower time step alt_solar_tts = numpy.array([ pysolar.GetAltitude(erai_latitude, erai_longitude, dt) for dt in dt_erai_utc_tts ]) idx = numpy.where(alt_solar_tts <= 0)[0]
dt_utc_60minutes=[x.replace(tzinfo=pytz.utc) for x in dt_utc_60minutes] # get local time from UTC dt_loc_60minutes=[x.astimezone(site_tz) for x in dt_utc_60minutes] # NOTE: will have to disable daylight saving at some stage, towers stay on Standard Time # PRI hopes that the following line will do this ... dt_loc_60minutes=[x-x.dst() for x in dt_loc_60minutes] # make local time timezone naive to match datetimes in OzFluxQC dt_loc_60minutes=[x.replace(tzinfo=None) for x in dt_loc_60minutes] ds_60minutes.series["DateTime"] = {} ds_60minutes.series["DateTime"]["Data"] = dt_loc_60minutes ds_60minutes.series["DateTime_UTC"] = {} ds_60minutes.series["DateTime_UTC"]["Data"] = dt_utc_60minutes # get the year, month etc from the datetime flag_60minutes = numpy.zeros(nRecs,dtype=numpy.int32) ds_60minutes.series["DateTime"]["Flag"] = flag_60minutes qcutils.get_xldatefromdatetime(ds_60minutes) qcutils.get_ymdhmsfromdatetime(ds_60minutes) # get derived quantities and adjust units # air temperature from K to C attr = qcutils.GetAttributeDictionary(ds_60minutes,"Ta_00") if attr["units"] == "K": for i in range(0,3): for j in range(0,3): label = "Ta_"+str(i)+str(j) Ta,f,a = qcutils.GetSeriesasMA(ds_60minutes,label) Ta = Ta - c.C2K attr["units"] = "C" qcutils.CreateSeries(ds_60minutes,label,Ta,Flag=flag_60minutes,Attr=attr) # soil temperature from K to C attr = qcutils.GetAttributeDictionary(ds_60minutes,"Ts_00") if attr["units"] == "K":
# read the netcdf files logging.info('Reading the netCDF files for '+info["site_name"]) f = access_read_mfiles2(file_list,var_list=var_list) # get the data from the netCDF files and write it to the 60 minute data structure logging.info('Getting the ACCESS data') get_accessdata(cf,ds_60minutes,f,info) # set some global attributes logging.info('Setting global attributes') set_globalattributes(ds_60minutes,info) # check for time gaps in the file logging.info("Checking for time gaps") if qcutils.CheckTimeStep(ds_60minutes): qcutils.FixTimeStep(ds_60minutes) # get the datetime in some different formats logging.info('Getting xlDateTime and YMDHMS') qcutils.get_xldatefromdatetime(ds_60minutes) qcutils.get_ymdhmsfromdatetime(ds_60minutes) #f.close() # get derived quantities and adjust units logging.info("Changing units and getting derived quantities") # air temperature from K to C changeunits_airtemperature(ds_60minutes) # soil temperature from K to C changeunits_soiltemperature(ds_60minutes) # pressure from Pa to kPa changeunits_pressure(ds_60minutes) # wind speed from components get_windspeedanddirection(ds_60minutes) # relative humidity from temperature, specific humidity and pressure get_relativehumidity(ds_60minutes) # absolute humidity from temperature and relative humidity
ds_all.globalattributes["elevation"] = site_elevation ts = int(ds_all.globalattributes["time_step"]) ldt_all = [result for result in qcutils.perdelta(start_date,end_date,datetime.timedelta(minutes=ts))] nRecs = len(ldt_all) ds_all.globalattributes["nc_nrecs"] = nRecs ds_all.series["DateTime"] = {} ds_all.series["DateTime"]["Data"] = ldt_all flag = numpy.zeros(nRecs,dtype=numpy.int32) ds_all.series["DateTime"]["Flag"] = flag ds_all.series["DateTime"]["Attr"] = {} ds_all.series['DateTime']["Attr"]["long_name"] = "Date-time object" ds_all.series['DateTime']["Attr"]["units"] = "None" # get the year, month, day, hour, minute and seconds from the Python datetime qcutils.get_ymdhmsfromdatetime(ds_all) # get the xlDateTime from the xlDateTime = qcutils.get_xldatefromdatetime(ds_all) attr = qcutils.MakeAttributeDictionary(long_name="Date/time in Excel format",units="days since 1899-12-31 00:00:00") qcutils.CreateSeries(ds_all,"xlDateTime",xlDateTime,Flag=flag,Attr=attr) # loop over the stations for idx,bom_id in enumerate(ds_dict.keys()): log.info("Merging BoM site: "+str(bom_id)) ds = ds_dict[bom_id] ldt = ds.series["DateTime"]["Data"] index = qcutils.FindIndicesOfBInA(ldt,ldt_all) # loop over the variables for label in ["Precip","Ta","Td","RH","Ws","Wd","Wg","ps"]: data_all = numpy.ma.ones(nRecs,dtype=numpy.float64)*float(c.missing_value) flag_all = numpy.zeros(nRecs,dtype=numpy.int32) data,flag,attr = qcutils.GetSeriesasMA(ds,label) data_all[index] = data flag_all[index] = flag
ds_30.globalattributes["xl_datemode"] = str(0) ds_30.globalattributes["site_name"] = cf["Sites"][site]["site_name"] time_units = getattr(bios_ncfile.variables["time"],"units") qcutils.get_datetimefromnctime(ds_30,time,time_units) qcutils.round_datetime(ds_30,mode="nearest_timestep") if qcutils.CheckTimeStep(ds_30): qcutils.FixTimeStep(ds_30) ldt_30 = ds_30.series["DateTime"]["Data"] si = qcutils.GetDateIndex(ldt_30,start_date,default=0,ts=ts,match="startnexthour") ei = qcutils.GetDateIndex(ldt_30,end_date,default=len(ldt_30),ts=ts,match="endprevioushour") ds_30.series["DateTime"]["Data"] = ds_30.series["DateTime"]["Data"][si:ei+1] ds_30.series["DateTime"]["Flag"] = ds_30.series["DateTime"]["Flag"][si:ei+1] ldt_30 = ds_30.series["DateTime"]["Data"] nRecs = ds_30.globalattributes["nc_nrecs"] = len(ldt_30) flag = numpy.zeros(nRecs) qcutils.get_ymdhmsfromdatetime(ds_30) xl_date_loc = qcutils.get_xldatefromdatetime(ds_30) attr = qcutils.MakeAttributeDictionary(long_name="Date/time (local) in Excel format",units="days since 1899-12-31 00:00:00") qcutils.CreateSeries(ds_30,"xlDateTime",xl_date_loc,flag,attr) # get the data for label in var_list: bios_name = cf["Variables"][label]["bios_name"] if len(bios_ncfile.variables[bios_name].shape)==1: #print label+" has 1 dimension" data = bios_ncfile.variables[bios_name][:][si:ei+1] elif len(bios_ncfile.variables[bios_name].shape)==2: #print label+" has 2 dimensions" data = bios_ncfile.variables[bios_name][:,0][si:ei+1] elif len(bios_ncfile.variables[bios_name].shape)==3: #print label+" has 3 dimensions" data = bios_ncfile.variables[bios_name][:,0,0][si:ei+1] attr = {}
site_tz = pytz.timezone(site_timezone) # put the time zone (UTC) into the datetime dt_utc = [x.replace(tzinfo=pytz.utc) for x in dt_UTC] # convert from UTC to local time dt_loc = [x.astimezone(site_tz) for x in dt_utc] # remove any daylight saving adjustments (towers run on standard time) dt_loc = [x-x.dst() for x in dt_loc] # strip the time zone from the local datetime series dt_loc = [x.replace(tzinfo=None) for x in dt_loc] ds.series["DateTime"]["Data"] = dt_loc # update global attributes ds.globalattributes["nc_nrecs"] = len(dt_loc) ds.globalattributes["start_datetime"] = str(dt_loc[0]) ds.globalattributes["end_datetime"] = str(dt_loc[-1]) # get the Excel datetime qcutils.get_xldatefromdatetime(ds) # get the year, month, day, hour, minute and second qcutils.get_ymdhmsfromdatetime(ds) # put the QC'd, smoothed and interpolated EVI into the data structure flag = numpy.zeros(len(dt_loc),dtype=numpy.int32) attr = qcutils.MakeAttributeDictionary(long_name="MODIS EVI, smoothed and interpolated",units="none", horiz_resolution="250m", cutout_size=str(site_cutout), evi_quality_threshold=str(evi_quality_threshold), evi_sd_threshold=str(evi_sd_threshold), evi_interpolate=str(evi_interpolate), evi_smooth_filter=str(evi_smooth_filter), sg_num_points=str(sg_num_points), sg_order=str(sg_num_points)) qcutils.CreateSeries(ds,"EVI",evi_interp2_smooth,Flag=flag,Attr=attr)
ds_30.globalattributes["xl_datemode"] = str(0) ds_30.globalattributes["site_name"] = cf["Sites"][site]["site_name"] time_units = getattr(bios_ncfile.variables["time"],"units") qcutils.get_datetimefromnctime(ds_30,time,time_units) qcutils.round_datetime(ds_30,mode="nearest_timestep") if qcutils.CheckTimeStep(ds_30): qcutils.FixTimeStep(ds_30) ldt_30 = ds_30.series["DateTime"]["Data"] si = qcutils.GetDateIndex(ldt_30,start_date,default=0,ts=ts,match="startnexthour") ei = qcutils.GetDateIndex(ldt_30,end_date,default=len(ldt_30),ts=ts,match="endprevioushour") ds_30.series["DateTime"]["Data"] = ds_30.series["DateTime"]["Data"][si:ei+1] ds_30.series["DateTime"]["Flag"] = ds_30.series["DateTime"]["Flag"][si:ei+1] ldt_30 = ds_30.series["DateTime"]["Data"] nRecs = ds_30.globalattributes["nc_nrecs"] = len(ldt_30) flag = numpy.zeros(nRecs) qcutils.get_ymdhmsfromdatetime(ds_30) xl_date_loc = qcutils.get_xldatefromdatetime(ds_30) attr = qcutils.MakeAttributeDictionary(long_name="Date/time (local) in Excel format",units="days since 1899-12-31 00:00:00") qcutils.CreateSeries(ds_30,"xlDateTime",xl_date_loc,Flag=flag,Attr=attr) # get the data for label in var_list: bios_name = cf["Variables"][label]["bios_name"] if len(bios_ncfile.variables[bios_name].shape)==1: #print label+" has 1 dimension" data = bios_ncfile.variables[bios_name][:][si:ei+1] elif len(bios_ncfile.variables[bios_name].shape)==2: #print label+" has 2 dimensions" data = bios_ncfile.variables[bios_name][:,0][si:ei+1] elif len(bios_ncfile.variables[bios_name].shape)==3: #print label+" has 3 dimensions" data = bios_ncfile.variables[bios_name][:,0,0][si:ei+1] attr = {}
# get the 60 minute data structure ds_aws_60minute = qcio.DataStructure() # get the global attributes for item in ds_aws_30minute.globalattributes.keys(): ds_aws_60minute.globalattributes[item] = ds_aws_30minute.globalattributes[item] # overwrite with 60 minute values as appropriate ds_aws_60minute.globalattributes["nc_nrecs"] = str(nRecs_60minute) ds_aws_60minute.globalattributes["time_step"] = str(60) # put the Python datetime into the data structure ds_aws_60minute.series["DateTime"] = {} ds_aws_60minute.series["DateTime"]["Data"] = dt_aws_60minute ds_aws_60minute.series["DateTime"]["Flag"] = numpy.zeros(nRecs_60minute,dtype=numpy.int32) ds_aws_60minute.series["DateTime"]["Attr"] = qcutils.MakeAttributeDictionary(long_name="DateTime in local time zone",units="None") # add the Excel datetime, year, month etc qcutils.get_xldatefromdatetime(ds_aws_60minute) qcutils.get_ymdhmsfromdatetime(ds_aws_60minute) # loop over the series and take the average (every thing but Precip) or sum (Precip) for item in series_list: if "Precip" in item: data_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour) data_2d = numpy.reshape(data_30minute,(nRecs_30minute/2,2)) flag_2d = numpy.reshape(flag_30minute,(nRecs_30minute/2,2)) data_60minute = numpy.ma.sum(data_2d,axis=1) flag_60minute = numpy.ma.max(flag_2d,axis=1) qcutils.CreateSeries(ds_aws_60minute,item,data_60minute,Flag=flag_60minute,Attr=attr) elif "Wd" in item: Ws_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour) Wd_30minute,flag_30minute,attr = qcutils.GetSeriesasMA(ds_aws_30minute,item,si=si_wholehour,ei=ei_wholehour) U_30minute,V_30minute = qcutils.convert_WsWdtoUV(Ws_30minute,Wd_30minute) U_2d = numpy.reshape(U_30minute,(nRecs_30minute/2,2))