precip['precip_interpolated_10minres_reshaped'] = np.reshape(precip['precip_interpolated_10minres'][starting_ind:],\ (int(interpolated_precip_shape/6),6)) precip['date_interpolated_10minres_reshaped'] = np.reshape(precip['date_meteoswiss'][starting_ind:],\ (int(interpolated_precip_shape/6),6)) # Calculate hourly sums precip['precip_interpolated_hourly'] = [] for i in range(0,int(interpolated_precip_shape/6)): precip['precip_interpolated_hourly'].append(sum(precip['precip_interpolated_10minres_reshaped'][i,:])) precip['precip_interpolated_hourly'] = np.array(precip['precip_interpolated_hourly']) # Create hourly datelist # Note: Date always refers to precip from the last hour!!! precip['date_hourly_meteoswiss'] = make_datelist(precip['date_interpolated_10minres_reshaped'][0,-1],\ precip['date_interpolated_10minres_reshaped'][-1,-1],1) precip['date_hourly_meteoswiss'] = np.array(precip['date_hourly_meteoswiss']) #--------------------------------------------------------- # Import processed combiprecip data #--------------------------------------------------------- print('import processed combiprecip data') combiprecip_data = np.load(combiprecippath+'\combiprecip_processed.npy') #--------------------------------------------------------- # Extract location from combiprecip dataset #--------------------------------------------------------- station_index = np.where(combiprecip_data[()]['combiprecip'][0,:] == station_id)[0][0] precip['combiprecip'] = combiprecip_data[()]['combiprecip'][1:,station_index] precip['date_combiprecip'] = np.array(combiprecip_data[()]['date_combiprecip'])
,'Bachtel','Beatenberg','Birmensdorf','Davos','Grosswangen','Jussy' ,'Laegeren_FF','Laegeren_Hut','Lausanne','Muri_Beech','Muri_Spruce' ,'Muri_Meteo','Neunkirch_SE','Neunkirch_N','Neunkirch_SW','Novaggio' ,'Pfynwald-Illgraben_NW','Pfynwald-Illgraben_N','Riehen_Forest' ,'Riehen_Meteo','Sagno_SW','Sagno_SE','Sagno_Meteo','Saillon_1' ,'Saillon_2','Saillon_3','Salgesch','Schaenis','Schmitten' ,'Sempach','Surava_S','Surava_N','Visp','Vordemwald','Zürich','Wangen_SW'] #--------------------------------------------------------- # Create control datelist for time period considered #--------------------------------------------------------- startdate = datetime.datetime(2011,1,1,0) enddate = datetime.datetime(2019,5,21,23,50) hstep = 1/6 datelist_control = make_datelist(startdate,enddate,hstep) #--------------------------------------------------------- # Choose options: # Path 1: Load and process station data # Path 2: Load the processed data #--------------------------------------------------------- data_new,stationnames_new = import_meteoswiss_data(stationnames=stationnames,variable='temp',path=temppath,process='no',\ datelist_control=datelist_control) # Check for shape and amount of values in the arrays #for i in range(0,len(stationnames_new)): # print(data_new[()][stationnames_new[i]].shape) # print(np.where(~np.isnan(data_new[()][stationnames_new[i]][1,:].astype(float)))[0].shape) #---------------------------------------------------------
# Plot plt.plot(globalrad['treenetdate'][wsl_ind_0:wsl_ind_1+1],\ globalrad['treenetglobalrad'][wsl_ind_0:wsl_ind_1+1],ls='-',color='orange',label='treenet') plt.plot(globalrad['date_interpolation'][interp_ind_0:interp_ind_1+1],\ globalrad['globalrad_interpolated_novertical'][interp_ind_0:interp_ind_1+1],ls='-',color='blue',label='interpolation horizontal only') plt.ylabel(r'Global radiation [$W/m^2$]',fontsize=20) plt.title(treenetstation+' WSL',fontsize=25,loc='left') plt.title(nowdate.strftime('%b %Y'),fontsize=25,loc='right') plt.legend(loc=2) # Calculate RMSEs RMSE_novertical = np.sqrt(np.nanmean((globalrad['treenetglobalrad'][wsl_ind_0:wsl_ind_1+1] - \ globalrad['globalrad_interpolated_novertical'][interp_ind_0:interp_ind_1+1])**2)) # Add table with values below labels = [r'$RMSE_{novertical}$'] plt.table(cellText=[['%.2f' % RMSE_novertical+r' $W/m^2$']],\ bbox = [0.0,-0.12, 1.0, 0.07],cellLoc='center',rowLoc='center',colLabels=labels,fontsize=20) # Format x-axis and grid ax = plt.axes() datelist = make_datelist(globalrad['treenetdate'][wsl_ind_0],globalrad['treenetdate'][wsl_ind_1],1/6) ax.xaxis_date() plt.xticks(datelist[0::6*24*7]) ax.xaxis.set_major_formatter(dt.DateFormatter('%d-%m-%Y %H')) ax.xaxis.grid(True, which='major', color='0.5',alpha=0.7, linestyle='--',lw=1.5) saveas='\\globalrad_comparison_'+treenetstation+'_' plt.savefig(figpath+saveas+nowdate.strftime('%Y_%m')+'.png',bbox_inches='tight',dpi=400) plt.close('all') gc.collect()
temp['temp_interpolated_novertical'][interp_ind_0:interp_ind_1+1])**2)) RMSE_standardgradient = np.sqrt(np.nanmean((temp['treenettemp'][wsl_ind_0:wsl_ind_1+1] - \ temp['temp_interpolated_standardgradient'][interp_ind_0:interp_ind_1+1])**2)) RMSE_empiricalgradient = np.sqrt(np.nanmean((temp['treenettemp'][wsl_ind_0:wsl_ind_1+1] - \ temp['temp_interpolated_empiricalgradient'][interp_ind_0:interp_ind_1+1])**2)) # Add table with values below labels = [ r'$RMSE_{novertical}$', r'$RMSE_{standard}$', r'$RMSE_{empirical}$' ] plt.table(cellText=[['%.2f' % RMSE_novertical+' °C','%.2f' % RMSE_standardgradient+' °C','%.2f' % RMSE_empiricalgradient+' °C']],\ bbox = [0.0,-0.12, 1.0, 0.07],cellLoc='center',rowLoc='center',colLabels=labels,fontsize=20) # Format x-axis and grid ax = plt.axes() datelist = make_datelist(temp['treenetdate'][wsl_ind_0], temp['treenetdate'][wsl_ind_1], 1 / 6) ax.xaxis_date() plt.xticks(datelist[0::6 * 24 * 7]) ax.xaxis.set_major_formatter(dt.DateFormatter('%d-%m-%Y %H')) ax.xaxis.grid(True, which='major', color='0.5', alpha=0.7, linestyle='--', lw=1.5) saveas = '\\temp_comparison_' + treenetstation + '_' plt.savefig(figpath + saveas + nowdate.strftime('%Y_%m') + '.png', bbox_inches='tight', dpi=400) plt.close('all')
,'Sempach','Surava_S','Surava_N','Visp','Vordemwald','Zürich','Wangen_SW'] #--------------------------------------------------------- # Import data from all meteoswiss stations #--------------------------------------------------------- data = import_precipitation_data(stationnames=stationnames,resolution='10min',path=precippath) #--------------------------------------------------------- # Create datelist for time period considered #--------------------------------------------------------- station = stationnames[0] # in order to have non-nan list startdate = datetime.datetime(int(data[station][1,0]),int(data[station][2,0]),int(data[station][3,0]),int(data[station][4,0]),int(data[station][5,0])) enddate = datetime.datetime(int(data[station][1,-1]),int(data[station][2,-1]),int(data[station][3,-1]),int(data[station][4,-1]),int(data[station][5,-1])) hstep = 1/6 datelist = make_datelist(startdate,enddate,hstep) #for i in range(0,len(stationnames)): # print(data[stationnames[i]].shape) #--------------------------------------------------------- # shift precip data into separate container #--------------------------------------------------------- precip = np.zeros((len(stationnames),len(datelist))) for i in range(0,len(stationnames)): station = stationnames[i] precip[i,:] = data[station][6,:] # Filtering: Convert unrealistic high values into nans for j in range(0,len(datelist)): if precip[i,j] > 50:
(int(interpolated_precip_shape/6),6)) precip['date_interpolated_10minres_reshaped'] = np.reshape(precip['date_meteoswiss'][starting_ind:],\ (int(interpolated_precip_shape/6),6)) # Calculate hourly sums precip['precip_interpolated_hourly'] = [] for i in range(0, int(interpolated_precip_shape / 6)): precip['precip_interpolated_hourly'].append( sum(precip['precip_interpolated_10minres_reshaped'][i, :])) precip['precip_interpolated_hourly'] = np.array( precip['precip_interpolated_hourly']) # Create hourly datelist # Note: Date always refers to precip from the last hour!!! precip['date_hourly_meteoswiss'] = make_datelist(precip['date_interpolated_10minres_reshaped'][0,-1],\ precip['date_interpolated_10minres_reshaped'][-1,-1],1) precip['date_hourly_meteoswiss'] = np.array(precip['date_hourly_meteoswiss']) #--------------------------------------------------------- # Import processed combiprecip data #--------------------------------------------------------- combiprecip_data = import_combiprecip(combiprecippath=combiprecippath,processing_combiprecip='no',\ save_combiprecip='yes') #--------------------------------------------------------- # Extract location from combiprecip dataset #--------------------------------------------------------- station_index = np.where( combiprecip_data[()]['combiprecip'][0, :] == station_id)[0][0] precip['combiprecip'] = combiprecip_data[()]['combiprecip'][1:, station_index] precip['date_combiprecip'] = np.array(combiprecip_data[()]['date_combiprecip'])
precip['combiprecip'][combi_ind_0:combi_ind_1+1],color='blue',width=0.02,align='center') axarr[2].scatter(precip['date_combiprecip'][combi_ind_0:combi_ind_1+1][combiprecip_nans],\ np.zeros((len(combiprecip_nans))),color='red',s=20) axarr[2].set_ylabel('Precipitation [mm/h]', fontsize=20) axarr[2].set_title('Combi Precip', fontsize=25) ymin3, ymax3 = axarr[2].get_ylim() axarr[0].set_ylim([0, np.max([ymax1, ymax2, ymax3])]) axarr[1].set_ylim([0, np.max([ymax1, ymax2, ymax3])]) axarr[2].set_ylim([0, np.max([ymax1, ymax2, ymax3])]) plt.suptitle(precip['date_hourly_UTC'][wsl_ind_0].strftime('%b %Y'), fontsize=40) # Format x-axis and grid datelist = make_datelist(precip['date_hourly_UTC'][wsl_ind_0], precip['date_hourly_UTC'][wsl_ind_1], 1) for axx in axarr: axx.xaxis_date() axx.xaxis.set_ticks(datelist[0::24 * 7], minor=False) axx.xaxis.set_major_formatter(dt.DateFormatter('%d-%m-%Y %H')) axx.xaxis.grid(True, which='major', color='0.5', alpha=0.7, linestyle='--', lw=1.5) saveas = '\precip_comparison_' + treenetstation + '_' plt.savefig(figpath + saveas + nowdate.strftime('%Y_%m') + '.png', bbox_inches='tight')
def import_combiprecip(combiprecippath, processing_combiprecip, save_combiprecip): if processing_combiprecip == 'yes': # Load the data print('import combiprecip data') combiprecip_data = [] for i in range(2005, 2020): #for i in range(2012,2013): print(i) f = open( combiprecippath + '\T_lor500_' + str(i) + '_o\prad_lor500.s' + str(i), 'r') for line in f: line = line.strip() line = line.split() combiprecip_data.append(line) # Identify the unnecessary lines (except for id line: keep it once) combiprecip_list = [] trash_ind = [] trash_ind = [[ i for i, val in enumerate(combiprecip_data) if val == combiprecip_data[0] ]] trash_ind.append([ i for i, val in enumerate(combiprecip_data) if val == combiprecip_data[1] ]) trash_ind.append([ i for i, val in enumerate(combiprecip_data) if val == combiprecip_data[2] ]) trash_ind = [item for sublist in trash_ind for item in sublist] # Get rid of them by copying everything into combiprecip_intermediate # except for the unnecessary lines for i in range(0, len(combiprecip_data)): if i in trash_ind and i != 1: continue else: combiprecip_list.append(combiprecip_data[i]) combiprecip_intermediate = np.array(combiprecip_list) # Convert the strings into int and float combiprecip = np.zeros(np.shape(combiprecip_intermediate)) for i in range(0, combiprecip.shape[0]): if i == 0: for j in range(4, combiprecip_intermediate.shape[1]): combiprecip[i, j] = np.int(combiprecip_intermediate[i, j]) else: for j in range(0, combiprecip_intermediate.shape[1]): combiprecip[i, j] = np.float(combiprecip_intermediate[i, j]) #--------------------------------------------------------- # Identify gaps and fill them with nans #--------------------------------------------------------- print('process combiprecip data') # create control datelist containing all dates from start to end datelist_control = [] startdate = datetime.datetime(int(combiprecip[1,0]),int(combiprecip[1,1]),\ int(combiprecip[1,2]),int(combiprecip[1,3])) enddate = datetime.datetime(int(combiprecip[-1,0]),int(combiprecip[-1,1]),\ int(combiprecip[-1,2]),int(combiprecip[-1,3])) datelist_control = make_datelist(startdate, enddate, 1) # extract datelist from files # note: change 24 UTC to 00 UTC of next day # start with 1 because of id line!!! datelist_test = [] count = 0 for i in range(1, combiprecip.shape[0]): if int(combiprecip[i, 3]) == 24: datelist_test.append(datelist_test[i - 2] + datetime.timedelta(hours=1)) else: datelist_test.append(datetime.datetime(int(combiprecip[i,0]),int(combiprecip[i,1]),\ int(combiprecip[i,2]),int(combiprecip[i,3]))) # define new combiprecip container combiprecip_new = {} combiprecip_new['date_combiprecip'] = [] combiprecip_new['combiprecip'] = [] # Fill in id's in first line combiprecip_new['combiprecip'] = [combiprecip[0, 4:]] # loop over datelist and check for missing values # fill up missing values in datelist # append nans at the respective dates count = 0 one_hour = timedelta(hours=1) test_date = datelist_control[0] while test_date <= datelist_control[-1]: # case if the respective date is missing in combiprecip data if test_date not in datelist_test: print(test_date) combiprecip_new['date_combiprecip'].append(test_date) combiprecip_new['combiprecip'].append( np.full(combiprecip.shape[1] - 4, np.nan)) count += 1 # case if the respecitve date is in the combiprecip data else: now_ind = datelist_test.index(test_date) combiprecip_new['date_combiprecip'].append( datelist_test[now_ind]) combiprecip_new['combiprecip'].append(combiprecip[now_ind + 1, 4:]) test_date += one_hour combiprecip_new['combiprecip'] = np.array( combiprecip_new['combiprecip']) if save_combiprecip == 'yes': np.save(combiprecippath + '\combiprecip_processed.npy', combiprecip_new) return combiprecip_new #--------------------------------------------------------- # Import processed combiprecip data #--------------------------------------------------------- if processing_combiprecip == 'no': print('import processed combiprecip data') combiprecip_data = np.load(combiprecippath + '\combiprecip_processed.npy') return combiprecip_data
def import_lwf_data(treenetstation, path, variable, process_treenet_data): from functions import make_datelist # Load data from csv file print('import wsl data of ' + treenetstation) # Convert the string of the stationname if treenetstation == 'LWF-Lens3': treenetstationname_new = 'lens' elif 'Neunkirch' in treenetstation: treenetstationname_new = 'neunkirch' elif treenetstation == 'Schaenis': treenetstationname_new = 'schänis' else: treenetstationname_new = treenetstation.lower() if process_treenet_data == 'yes': # Import the data treenet_data = {} treenet_data[treenetstation] = np.genfromtxt(path+'\\'+variable+'_data_2011010100_2019052123_'+treenetstationname_new+'.txt',\ unpack=True,missing_values='-',skip_header=3,usecols=[1,2],delimiter=';') # Create control datelist startdate = datetime.datetime( 2011, 1, 1, 0, 0) # earlier none of the files have data (checked manually) enddate = datetime.datetime(2019, 5, 21, 23, 50) hstep = 1 / 6 datelist_control = make_datelist(startdate, enddate, hstep) # Identify and fill gaps if there are any treenet_data_new = {} treenet_data_new[treenetstation] = [[], []] if treenet_data[treenetstation].shape[1] != len(datelist_control): print('process data of ' + treenetstation) # Read out datelist of the station datelist_test = [] for i in range(0, treenet_data[treenetstation].shape[1]): datelist_test.append(datetime.datetime(int(str(treenet_data[treenetstation][0,i])[0:4]),\ int(str(treenet_data[treenetstation][0,i])[4:6]),\ int(str(treenet_data[treenetstation][0,i])[6:8]),\ int(str(treenet_data[treenetstation][0,i])[8:10]),\ int(str(treenet_data[treenetstation][0,i])[10:12]))) # Convert the test datelist to a set datelist_test_unordered = set(datelist_test) # Compare test datelist to control datelist ten_minutes = datetime.timedelta(minutes=10) test_date = datelist_control[0] while test_date <= datelist_control[-1]: # In case the respective date is missing in the station data if test_date not in datelist_test_unordered: treenet_data_new[treenetstation][0].append(test_date) treenet_data_new[treenetstation][1].append(np.nan) # In case the respecitve date is in the station data else: now_ind = datelist_test.index(test_date) treenet_data_new[treenetstation][0].append( datelist_test[now_ind]) treenet_data_new[treenetstation][1].append( treenet_data[treenetstation][1, now_ind]) test_date += ten_minutes # Case where there are no missing data points else: treenet_data_new[treenetstation][0] = datelist_control treenet_data_new[treenetstation][1] = treenet_data[treenetstation][ 1, :] # Final call to convert list structure to numpy arrays treenet_final = {} treenet_final['treenet' + variable] = np.array( treenet_data_new[treenetstation])[1, :].astype(float) treenet_final['treenetdate'] = np.array( treenet_data_new[treenetstation])[0, :] # Save data to numpy file np.save(path + '\\' + treenetstation + '_processed.npy', treenet_final) if process_treenet_data == 'no': treenet_final = np.load(path + '\\' + treenetstation + '_processed.npy')[()] return treenet_final
def import_lwf_precipitation_data(treenetstation, treenetprecip_res, path, process_schänis): #--------------------------------------------------------- # Import modules and functions #--------------------------------------------------------- import csv from functions import make_datelist #--------------------------------------------------------- # Import precipitation data of TreeNet station #--------------------------------------------------------- # Load data from csv or txt file print('import wsl data of ' + treenetstation) # Different handling for the csv files than for txt files if treenetstation in [ 'Pfynwald', 'Bachtel', 'Muri_Beech', 'Muri_Spruce', 'Muri_Meteo', 'Riehen_Forest', 'Riehen_Meteo', 'Grosswangen' ]: treenet_data = [] f = open(path + '\\' + treenetstation + '_precipitation.csv', 'r') reader = csv.reader(f) for row in reader: treenet_data.append(row) treenet_data = np.array(treenet_data) # Extract precipitation and date precip = {} precip['date_UTC+1'] = [] precip['treenetprecip'] = [] for i in range(1, treenet_data.shape[0]): # Extract date from strings and convert to datetime format precip['date_UTC+1'].append(datetime.datetime(int(treenet_data[i,2][0:4]),int(treenet_data[i,2][5:7]),\ int(treenet_data[i,2][8:10]),int(treenet_data[i,2][11:13]),int(treenet_data[i,2][14:16]))) # Extract precipitation if treenet_data[i, 3] != 'NA': precip['treenetprecip'].append(treenet_data[i, 3].astype( np.float)) elif treenet_data[i, 3] == 'NA': precip['treenetprecip'].append(np.nan) else: # Only to check for strange cases print('strange') precip['treenetprecip'] = np.array(precip['treenetprecip']) #--------------------------------------------------------- # Calculate hourly sums and convert time if data sourc is csv #--------------------------------------------------------- # First check if first and last entry are start and finish of an hour # identify the indices to read out correct data for i in range(0, 6): if precip['date_UTC+1'][i].minute == 10: starting_ind = i break else: continue for i in range(-1, -7, -1): if precip['date_UTC+1'][i].minute == 0: ending_ind = i break else: continue # Reshape the list into hourly blocks in order to calculate sums # Save shape beforehand if ending_ind != -1: treenetprecip_shape = len( precip['treenetprecip'][starting_ind:ending_ind + 1]) for key in precip.keys(): precip[key] = np.reshape(precip[key][starting_ind:ending_ind+1],\ (int(treenetprecip_shape/6),6)) else: for key in precip.keys(): treenetprecip_shape = len( precip['treenetprecip'][starting_ind:]) precip[key] = np.reshape(precip[key][starting_ind:],\ (int(treenetprecip_shape/6),6)) # Calculate hourly sums precip['precip_hourly'] = [] for i in range(0, int(treenetprecip_shape / 6)): precip['precip_hourly'].append(sum(precip['treenetprecip'][i, :])) precip['precip_hourly'] = np.array(precip['precip_hourly']) # Create hourly datelist # Note: Date always refers to precip from the last hour!!! precip['date_hourly_UTC+1'] = make_datelist(precip['date_UTC+1'][0,-1],\ precip['date_UTC+1'][-1,-1],1) # Convert to UTC precip['date_hourly_UTC'] = [] for i in range(0, len(precip['date_hourly_UTC+1'])): precip['date_hourly_UTC'].append(precip['date_hourly_UTC+1'][i] - datetime.timedelta(hours=1)) precip['date_hourly_UTC'] = np.array(precip['date_hourly_UTC']) #--------------------------------------------------------- # Import WSL LWF data # Handling for the LWF data (in textfiles) #--------------------------------------------------------- else: # Convert the string of the stationname for special cases if treenetstation == 'LWF-Lens3': treenetstationname_new = 'lens' elif 'Neunkirch' in treenetstation: treenetstationname_new = 'neunkirch' elif treenetstation == 'Schaenis': treenetstationname_new = 'schänis' else: treenetstationname_new = treenetstation.lower() #--------------------------------------------------------- # Possibility to import 10minres data and convert them to hourly #--------------------------------------------------------- # case where hourly data is used if treenetprecip_res == 'hourly': # Import the data treenet_data = {} treenet_data[treenetstation] = np.loadtxt(path+'\precip_data_2011010100_2018123123_'+treenetstationname_new+'.dat',\ unpack=True,skiprows=9) # NaN handling: Convert the fill numbers (32767) into nans treenet_data[treenetstation][treenet_data[treenetstation] == 32767] = np.nan # Create datelist startdate = datetime.datetime(int(treenet_data[treenetstation][1,0]),int(treenet_data[treenetstation][2,0]),\ int(treenet_data[treenetstation][3,0]),int(treenet_data[treenetstation][4,0])) enddate = datetime.datetime(int(treenet_data[treenetstation][1,-1]),int(treenet_data[treenetstation][2,-1]),\ int(treenet_data[treenetstation][3,-1]),int(treenet_data[treenetstation][4,-1])) hstep = 1 # Save precipitation to numpy array precip = {} precip['date_hourly_UTC'] = make_datelist(startdate, enddate, hstep) precip['date_hourly_UTC'] = np.array(precip['date_hourly_UTC']) precip['precip_hourly'] = np.array( treenet_data[treenetstation][6, :]) # case where 10minres data is used elif treenetprecip_res == '10minres': # create exception for Schaenis (different data format) if treenetstation == 'Schaenis': if process_schänis == 'yes': # Import the data treenet_data = {} treenet_data[treenetstation] = np.genfromtxt(path+'\\precip_data_2011010100_2019052123_'+treenetstationname_new+'_freiland.txt',\ unpack=True,missing_values='-',skip_header=3,usecols=[1,2],delimiter=';') # Create control datelist startdate = datetime.datetime(2013, 1, 1, 0, 0) enddate = datetime.datetime(2019, 5, 21, 23, 50) hstep = 1 / 6 datelist_control = make_datelist(startdate, enddate, hstep) print('process data of ' + treenetstation) # Read out datelist of the station datelist_test = [] for i in range(0, treenet_data[treenetstation].shape[1]): datelist_test.append(datetime.datetime(int(str(treenet_data[treenetstation][0,i])[0:4]),\ int(str(treenet_data[treenetstation][0,i])[4:6]),\ int(str(treenet_data[treenetstation][0,i])[6:8]),\ int(str(treenet_data[treenetstation][0,i])[8:10]),\ int(str(treenet_data[treenetstation][0,i])[10:12]))) # Identify and fill gaps treenet_data_new = {} treenet_data_new[treenetstation] = [[], []] # Convert the test datelist to a set datelist_test_unordered = set(datelist_test) # Compare test datelist to control datelist ten_minutes = datetime.timedelta(minutes=10) test_date = datelist_control[0] while test_date <= datelist_control[-1]: # In case the respective date is missing in the station data if test_date not in datelist_test_unordered: treenet_data_new[treenetstation][0].append( test_date) treenet_data_new[treenetstation][1].append(np.nan) # In case the respecitve date is in the station data else: now_ind = datelist_test.index(test_date) treenet_data_new[treenetstation][0].append( datelist_test[now_ind]) treenet_data_new[treenetstation][1].append( treenet_data[treenetstation][1, now_ind]) test_date += ten_minutes # Save precipitation to numpy array precip = {} precip['date_10minres_UTC'] = np.array(datelist_control) precip['precip_10minres'] = np.array( treenet_data_new[treenetstation])[1, :].astype(float) # Save processed Schänis data np.save(path + '\Schänis_processed.npy', precip) if process_schänis == 'no': precip = np.load(path + '\Schänis_processed.npy') precip = precip[()] # Correct for the unrealistic values count = 0 for i in range(0, precip['precip_10minres'].shape[0]): if precip['precip_10minres'][i] > 15: if precip['precip_10minres'][i-1] > 15 and \ precip['precip_10minres'][i+1] > 15: precip['precip_10minres'][i] = np.nan count += 1 if precip['date_10minres_UTC'][i].year > 2017: precip['precip_10minres'][i] = np.nan if precip['date_10minres_UTC'][i].year < 2014: precip['precip_10minres'][i] = np.nan # Here starts the "normal" procedure else: # Import the data treenet_data = {} treenet_data[treenetstation] = np.loadtxt(path+'\precip_data_2011010100_2013123123_'+treenetstationname_new+'.dat',\ unpack=True,skiprows=9) treenet_data[treenetstation] = np.append(treenet_data[treenetstation],np.loadtxt(path+'\precip_data_2014010100_2016123123_'+treenetstationname_new+'.dat',\ unpack=True,skiprows=9),axis=1) treenet_data[treenetstation] = np.append(treenet_data[treenetstation],np.loadtxt(path+'\precip_data_2017010100_2019052123_'+treenetstationname_new+'.dat',\ unpack=True,skiprows=9),axis=1) # NaN handling: Convert the fill numbers (32767) into nans treenet_data[treenetstation][treenet_data[treenetstation] == 32767] = np.nan # Create datelist startdate = datetime.datetime(int(treenet_data[treenetstation][1,0]),int(treenet_data[treenetstation][2,0]),\ int(treenet_data[treenetstation][3,0]),int(treenet_data[treenetstation][4,0]),int(treenet_data[treenetstation][5,0])) enddate = datetime.datetime(int(treenet_data[treenetstation][1,-1]),int(treenet_data[treenetstation][2,-1]),\ int(treenet_data[treenetstation][3,-1]),int(treenet_data[treenetstation][4,-1]),int(treenet_data[treenetstation][5,-1])) hstep = 1 / 6 # Save precipitation to numpy array precip = {} precip['date_10minres_UTC'] = make_datelist( startdate, enddate, hstep) precip['date_10minres_UTC'] = np.array( precip['date_10minres_UTC']) precip['precip_10minres'] = np.array( treenet_data[treenetstation][6, :]) # Calculate hourly values for i in range(0, 6): if precip['date_10minres_UTC'][i].minute == 10: starting_ind = i break else: continue for i in range(-1, -7, -1): if precip['date_10minres_UTC'][i].minute == 0: ending_ind = i break else: continue if ending_ind != -1: precip_shape = precip['precip_10minres'][ starting_ind:ending_ind + 1].shape[0] precip['precip_10minres_reshaped'] = np.reshape(precip['precip_10minres'][starting_ind:ending_ind+1],\ (int(precip_shape/6),6)) precip['date_10minres_UTC_reshaped'] = np.reshape(precip['date_10minres_UTC'][starting_ind:ending_ind+1],\ (int(precip_shape/6),6)) else: precip_shape = precip['precip_10minres'][starting_ind:].shape[ 0] precip['precip_10minres_reshaped'] = np.reshape(precip['precip_10minres'][starting_ind:],\ (int(precip_shape/6),6)) precip['date_10minres_UTC_reshaped'] = np.reshape(precip['date_10minres_UTC'][starting_ind:],\ (int(precip_shape/6),6)) # Calculate hourly sums precip['precip_hourly'] = [] for i in range(0, int(precip_shape / 6)): precip['precip_hourly'].append( sum(precip['precip_10minres_reshaped'][i, :])) precip['precip_hourly'] = np.array(precip['precip_hourly']) # Create hourly datelist # Note: Date always refers to precip from the last hour!!! precip['date_hourly_UTC'] = make_datelist(precip['date_10minres_UTC_reshaped'][0,-1],\ precip['date_10minres_UTC_reshaped'][-1,-1],1) precip['date_hourly_UTC'] = np.array(precip['date_hourly_UTC']) return precip