mask = np.ma.masked_where( modis_proj_array[1] == 0, modis_bin_array ) # remember to use the proj tif 2nd layer as the mask for binary tif creation cloud_cover_pixel = (mask == -999).sum() if cloud_cover_pixel == 0: break if cloud_cover_pixel > 0: valid_date.drop(time) print 'drop bin file at {}'.format(modis_bin_path) else: print modis_bin_path print os.path.isfile(modis_bin_path) os.remove(modis_bin_path) new_bin_array = np.where(modis_proj_array[1] != 0, modis_bin_array, -999) array_to_raster(output_path=modis_bin_path, source_path=modis_proj_path, array_data=new_bin_array) print 'cloud cover interpolation for {}'.format( modis_bin_path) else: valid_date[bin_col_name].ix[time] = modis_bin_path valid_date.to_csv(valid_date_path) else: print 'failed to create modis binary file' print 'snow_cover_3: swe and modis binary file is done'
if calculate_total_sublimation: var_array_sum_dict['total_sub'] = np.where( mask, var_array_sum_dict['ueb_Ec'] + var_array_sum_dict['ueb_Es'], -99999.0) var_array_sum_dict['water_loss'] = np.where( mask, 100 * var_array_sum_dict['total_sub'] / var_array_sum_dict['uebPrec'], -99999.0) # export array sum as tif for var in var_array_sum_dict.keys(): sum_tif_path = os.path.join(stats_folder, 'sum_{}_{}.tif'.format(var, year)) if not os.path.isfile(sum_tif_path): array_to_raster(output_path=sum_tif_path, source_path=tif_path, array_data=var_array_sum_dict[var], no_data=-99999.0) # step2 get mean from array sum tif ############################################## # make sure to add the total_sub and water_loss variable if calculate_total_sublimation: var_list.extend(['total_sub', 'water_loss']) # get domain average for annual sum domain_ave_df = pd.DataFrame(index=range(start_year, end_year), columns=var_list) for year in domain_ave_df.index: for var in domain_ave_df.columns: sum_tif_path = os.path.join(stats_folder, 'sum_{}_{}.tif'.format(var, year)) if os.path.isfile(sum_tif_path):
var_array_annual_mean_dict['total_sub'] = np.where( mask, (var_array_sum_dict['ueb_Ec'] + var_array_sum_dict['ueb_Es']) / yr, -99999.0) var_array_sum_dict['water_loss'] = np.where( mask, 100 * var_array_sum_dict['total_sub'] / var_array_sum_dict['uebPrec'], -99999.0) var_array_annual_mean_dict['water_loss'] = np.where( mask, 100 * var_array_annual_mean_dict['total_sub'] / var_array_annual_mean_dict['uebPrec'], -99999.0) # step 2 export array sum and annual mean as tif ############################################################# for var in var_array_sum_dict.keys(): array_to_raster(output_path=os.path.join(stats_folder, 'sum_{}.tif'.format(var)), source_path=tif_path, array_data=var_array_sum_dict[var], no_data=-99999.0) array_to_raster(output_path=os.path.join( stats_folder, 'annual_mean_{}.tif'.format(var)), source_path=tif_path, array_data=var_array_annual_mean_dict[var], no_data=-99999.0) # step3 calculate the domain average ####################################################### if calculate_total_sublimation: var_list.extend(['total_sub', 'water_loss']) # calculate the domain_ave_df = pd.DataFrame(index=var_list, columns=['sum', 'annual_mean'])
np.save(file_path, stack_array) stack_sum = np.where(mask == False, np.nansum(stack_array, axis=0), -999) np.save(file_path+'_sum', stack_sum) stack_sum_dict[data_type] = stack_sum ma = np.ma.masked_equal(stack_sum, -999, copy=False) plt.imshow(ma, interpolation='nearest') plt.colorbar() plt.title('plot of sum of {} type pixels'.format(data_type)) plt.savefig(file_path + '.png') plt.clf() # export result as raster data array_to_raster(output_path=file_path + '.tif', source_path=swe_proj_path, array_data=stack_sum, no_data=-1) except Exception as e: continue # calculate accurate data and evaluation ######################################################### print 'step3: calculate error stats ' snow = np.where(mask == False, (stack_sum_dict['B'] + stack_sum_dict['D']), -999.0) dry = np.where(mask == False, (stack_sum_dict['A'] + stack_sum_dict['C']), -999.0) snow_error = np.where(mask == False, stack_sum_dict['B']*1.0/snow, -999.0) dry_error = np.where(mask == False, stack_sum_dict['C']*1.0/dry, -999.0) for data_type, stack in zip(['snow', 'dry', 'snow error', 'dry error'], [snow, dry, snow_error, dry_error]): try: file_path = os.path.join(stats_folder, '{}_{}'.format(data_type, model)) np.save(file_path, stack)
days_of_snow = np.where(nan_mask == False, np.nansum(all_snow_stack, axis=0), np.nan) file_path = os.path.join( stats_folder, 'days_of_snow_{}_{}'.format(name, swe_bin_col)) np.save(file_path, days_of_snow) fig = plt.imshow(days_of_snow, interpolation='nearest') plt.colorbar() plt.title('plot of {}'.format('days of snow from {}'.format(name))) plt.savefig(file_path + '.png') plt.clf() # export result as raster data array_to_raster(output_path=file_path + '.tif', source_path=swe_bin_path, array_data=days_of_snow) # stack layers and calculate oa values nan_mask = np.isnan(model) all_stack = np.stack(layer_stack) missing_data = np.where(nan_mask == False, np.isnan(all_stack).sum(axis=0), np.nan) mismatch_data = np.where(nan_mask == False, np.nansum(abs(all_stack), axis=0), np.nan) oa_data = 1 - mismatch_data.astype('float32') / (all_stack.shape[0] - missing_data) result = { 'oa': oa_data, 'missing': missing_data,