def compute_internal_validation(ui): # change button text logging.info('Internal validation computations started.') ui.pb_id003.setText('In progress...') ui.pb_id003.setStyleSheet( "background-color: rgb(255, 250, 205); color: rgb(255,0,0);") # set data path global data_path data_path = ui.le_id223.text() # global vars global min_degree global max_degree global gauss min_degree = ui.sb_id018.value() max_degree = ui.sb_id019.value() gauss = ui.dsb_id014.value() # groops bin global groops_bin groops_bin = ui.le_id218.text() if ui.cb_id079.isChecked(): groops_bin = ui.le_id220.text() # polygon global grid_file global buffer_grid_file temp_dir = tempfile.mkdtemp() grid_file = os.path.join(temp_dir, 'tmp.grid') buffer_grid_file = os.path.join(temp_dir, 'tmp_buffer.grid') if ui.rb_id057.isChecked(): polygon = compute_grace._region_settings(ui) lon, lat, area = dfr.rgi_grid_in_polygon( polygon, ignore_zeros=ui.cb_id075.isChecked()) grid_file_global = pkg_resources.resource_filename( 'picasso.data', 'grids/geographical_30-30.grid') lon0, lat0, h0, area0 = np.genfromtxt(grid_file_global, skip_header=2, unpack=True) area_i = griddata((lon0, lat0), (area0), (lon, lat), method='nearest') groops_interface.make_grid_file(groops_bin, grid_file, lon, lat, 0, area_i) if ui.rb_id100.isChecked(): grid_file = ui.le_id236.text() # dirs project_data_functions.create_project_directories(ui) project_data_functions.clean_up_internal_validation_directories(ui) # # M1, M2 _compute_M1_grids(ui) _compute_M2_grids(ui) _compute_M1_syn_grids(ui) _time_series(ui) shutil.rmtree(temp_dir) # change back button text ui.pb_id003.setText('Compute Internal Validation') ui.pb_id003.setStyleSheet( "background-color: rgb(239, 240, 241); color: rgb(0,0,0);") logging.info('Internal validation computations finished.')
def _compute_M2_grids(ui): logging.info('Computation of M2 grids started.') # paths project_name = str(ui.le_id001.text()) project_dir = str(ui.le_id002.text()) grace_path = os.path.join(project_dir, project_name, 'grace', 'single_solution') grids_path = os.path.join(grace_path, 'grids') internal_validation_path = os.path.join(project_dir, project_name, 'validation', 'internal') # constant density density = 1025.0 # which grid if (ui.rb_id057.isChecked() or ui.rb_id100.isChecked()): lon, lat, _, _ = np.genfromtxt(grid_file, skip_header=2, unpack=True) _compute_buffer_grid(ui, lon, lat) # loop over all dates mjd_start, mjd_end = compute_grace._date_settings(ui) mjd = mjd_start while mjd <= mjd_end: mjd1, mjd2 = date_functions.mjd2mjdmrange(mjd) mjd = (mjd2 + 1) ymstr = date_functions.mjd2ymstring(mjd1, '%Y-%m')[0].decode("utf-8") grid_file_pm = os.path.join(grids_path, '%s-grid.txt' % (ymstr)) pointmass_file = os.path.join(internal_validation_path, 'grids', 'm2_%s-pointmass.txt' % (ymstr)) temp_dir = tempfile.mkdtemp() tmp_grid = os.path.join(temp_dir, 'tmp.grid') _compute_gfc2grid(ui, groops_bin, buffer_grid_file, tmp_grid, mjd1, min_degree, max_degree, gauss) try: if ui.rb_id058.isChecked(): lon, lat, _, _ = np.genfromtxt(grid_file_pm, skip_header=2, unpack=True) _compute_buffer_grid(ui, lon, lat) _compute_gfc2grid(ui, groops_bin, buffer_grid_file, tmp_grid, mjd1, min_degree, max_degree, gauss) except: continue if not os.path.isfile(tmp_grid): continue lon, lat, h, area_i, tws = np.genfromtxt(tmp_grid, skip_header=2, unpack=True) area = (area_i * defaults.EARTH_EQUATORIAL_RADIUS() * defaults.EARTH_EQUATORIAL_RADIUS()) * 1e-6 kg = tws * (area * 1e6) * density groops_interface.make_grid_file(groops_bin, pointmass_file, lon, lat, h, area, *(kg, np.array(0))) shutil.rmtree(temp_dir) logging.info('Computing grid: %s', pointmass_file) logging.info('Computation of M2 grids finished.')
def _compute_buffer_grid(ui, lon0, lat0): buffer_size_km = ui.dsb_id015.value() grid_file_global = pkg_resources.resource_filename( 'picasso.data', 'grids/geographical_30-30.grid') lon, lat, h, area = np.genfromtxt(grid_file_global, skip_header=2, unpack=True) central_point = (0, 0) u, v = map_projections.azimuthal_equidistant(lon, lat, central_point=central_point) ix = np.zeros(len(lon), dtype=bool) for jx in list(range(len(lon0))): point = points.Point2D(lon0[jx], lat0[jx]) ui, vi = map_projections.azimuthal_equidistant( point.x, point.y, central_point=central_point) d_test = np.sqrt((u - ui)**2.0 + (v - vi)**2.0) dx = d_test <= (buffer_size_km * 1e3) ix[dx] = True groops_interface.make_grid_file(groops_bin, buffer_grid_file, lon[ix], lat[ix], h[ix], area[ix])
def _compute_M1_syn_grids(ui): logging.info('Computation of M1_syn grids started.') # paths project_name = str(ui.le_id001.text()) project_dir = str(ui.le_id002.text()) internal_validation_path = os.path.join(project_dir, project_name, 'validation', 'internal') # global grid grid_file_global = pkg_resources.resource_filename( 'picasso.data', 'grids/geographical_30-30.grid') grid0 = np.array(np.genfromtxt(grid_file_global, skip_header=2), ndmin=2) # indices if (ui.rb_id057.isChecked() or ui.rb_id100.isChecked()): lon, lat, _, _ = np.genfromtxt(grid_file, skip_header=2, unpack=True) ix = [] for jx in list(range(np.size(lon))): ix.append( _return_index_closest_point(np.array([lon[jx], lat[jx]]), grid0[:, 0:2])) # constant density density = 1025.0 # loop over all dates mjd_start, mjd_end = compute_grace._date_settings(ui) mjd = mjd_start while mjd <= mjd_end: mjd1, mjd2 = date_functions.mjd2mjdmrange(mjd) mjd = (mjd2 + 1) ymstr = date_functions.mjd2ymstring(mjd1, '%Y-%m')[0].decode("utf-8") m1_file = os.path.join(internal_validation_path, 'grids', 'm1_%s-pointmass.txt' % (ymstr)) m2_file = os.path.join(internal_validation_path, 'grids', 'm2_%s-pointmass.txt' % (ymstr)) m1_star_file = os.path.join(internal_validation_path, 'grids', 'm1_star_%s-pointmass.txt' % (ymstr)) m2_syn_file = os.path.join(internal_validation_path, 'grids', 'm2_syn_%s-pointmass.txt' % (ymstr)) m1_syn_file = os.path.join(internal_validation_path, 'grids', 'm1_syn_%s-pointmass.txt' % (ymstr)) try: lon1, lat1, h1, area1, x1, sigma_x1 = np.genfromtxt(m1_file, skip_header=2, unpack=True) lon2, lat2, h2, area2, x2, sigma_x2 = np.genfromtxt(m2_file, skip_header=2, unpack=True) except: continue kg1 = np.sum(x1) kg2 = np.sum(x2) IAF = kg2 / kg1 m1_star = kg1 * IAF groops_interface.make_grid_file(groops_bin, m1_star_file, lon1, lat1, h1, area1, *(x1 * IAF, np.array(0))) if (ui.rb_id057.isChecked() or ui.rb_id100.isChecked()): m1_star_i = np.zeros(grid0.shape[0]) m1_star_i[ix] = x1 * IAF / ((area1 * 1e6) * density) else: m1_star_i = griddata((lon1, lat1), x1 * IAF / ((area1 * 1e6) * density), (grid0[:, 0], grid0[:, 1]), method='cubic') temp_dir = tempfile.mkdtemp() tmp_grid1 = os.path.join(temp_dir, 'tmp1.grid') tmp_grid2 = os.path.join(temp_dir, 'tmp2.grid') groops_interface.make_grid_file(groops_bin, tmp_grid1, grid0[:, 0], grid0[:, 1], grid0[:, 2], grid0[:, 3], *(m1_star_i, np.array(0))) groops_interface.compute_grid_to_gfc_to_grid(groops_bin, tmp_grid1, tmp_grid2, buffer_grid_file, gauss=gauss) if not os.path.isfile(tmp_grid2): continue lon, lat, h, area_i, tws = np.genfromtxt(tmp_grid2, skip_header=2, unpack=True) area = (area_i * defaults.EARTH_EQUATORIAL_RADIUS() * defaults.EARTH_EQUATORIAL_RADIUS()) * 1e-6 kg = tws * (area * 1e6) * density kg2_syn = np.sum(kg) groops_interface.make_grid_file(groops_bin, m2_syn_file, lon, lat, h, area, *(kg, np.array(0))) signal_loss = 100.0 * (1.0 - kg2_syn / kg2) kg2_syn_star = kg2 * (1.0 + (1.0 - kg2_syn / kg2)) FAF = kg2_syn_star / kg1 kg1_syn = FAF * kg1 groops_interface.make_grid_file(groops_bin, m1_syn_file, lon1, lat1, h1, area1, *(x1 * FAF, np.array(0))) shutil.rmtree(temp_dir) logging.info('Computing grid: %s', m1_syn_file) logging.info('Computation of M1_syn grids finished.')
def _update_LS(parameters): # global vars global LS mjd1, mjd2 = date_functions.mjd2mjdmrange(cg.current_mjd) # return if already updated LS_check = False for ls_dict_i in cg.ga_solutions: if len(ls_dict_i['parameters']) == len(parameters): LS_check = np.all([ parameters[ix].value == ls_dict_i['parameters'][ix].value for ix in range(len(parameters)) ]) if LS_check: LS = ls_dict_i['LS'] return # ls dictionary ls_dict = {} ls_dict['parameters'] = parameters # temporary directory ls_dict['ga_id'] = str(uuid.uuid4()) normals_path = cg.current_normals_path + ls_dict['ga_id'] grid_file = cg.current_normals_path.replace('grace_', '').replace( 'normals', 'grids') grid_file += ls_dict['ga_id'] + '-grid.txt' # possible optimization candidates pm_count = None pm_lon = [] pm_lat = [] pm_depth = [] pm_magnitude = [] pm_grid_resolution = None pm_grid_type = None regularization = 0 # loop over parameters for p in parameters: if p.label == 'pm_count': pm_count = np.round(p.value).astype(int) elif p.label == 'pm_lon': pm_lon.append(p.value) elif p.label == 'pm_lat': pm_lat.append(p.value) elif p.label == 'pm_depth': pm_depth.append(p.value) elif p.label == 'pm_magnitude': pm_magnitude.append(p.value) elif p.label == 'regularization': regularization = (10.0**p.value) elif p.label == 'pm_grid_resolution': pm_grid_resolution = p.value elif p.label == 'pm_grid_type': pm_grid_type = np.floor(p.value).astype(int) pm_grid_type = 6 if pm_grid_type > 6 else pm_grid_type # reassign values if not pm_grid_type: pm_grid_type = cg.current_pm_grid_type if pm_grid_resolution is not None: resolution = _map_grid_resolution(pm_grid_type, pm_grid_resolution) temp_dir = tempfile.mkdtemp() tmp_grid = os.path.join(temp_dir, 'tmp.grid') groops_interface.make_specific_grid_in_polygon_file( cg.current_groops_bin, tmp_grid, cg.current_polygon_file, pm_grid_type, resolution) loni, lati, _, _ = np.genfromtxt(tmp_grid, skip_header=2, unpack=True) pm_lon, pm_lat = loni.tolist(), lati.tolist() pm_count = np.size(pm_lon) shutil.rmtree(temp_dir) if not pm_count: pm_count = cg.current_pm_count if not pm_lon: pm_lon = cg.current_pm_lon if not pm_lat: pm_lat = cg.current_pm_lat if not pm_depth: pm_depth = cg.current_pm_depths # check if (re-)computation of normals is necessary if (not cg.pm_count_is_parameter) and ( not cg.pm_positions_are_parameters) and ( not cg.pm_depths_are_parameters) and ( not cg.pm_grid_type_is_parameter) and ( not cg.pm_grid_resolution_is_parameter): normals_path = normals_path.replace('genetic_algorithm', 'single_solution').replace( '-' + ls_dict['ga_id'], '') ls_dict['ga_id'] = None LS = groops_files.read_groops_normals(normals_path) else: groops_interface.make_grid_file(cg.current_groops_bin, grid_file, pm_lon, pm_lat, (-np.array(pm_depth) * 1e3), cg.current_pm_area, **{'point_count': pm_count}) if not cg.picasso_on_leo: groops_interface.build_pointmass_normals_ga( cg.current_groops_bin, mjd1, mjd2, grid_file, normals_path, love_enabled=cg.pm_magnitudes_are_parameters) else: ret = groops_interface.build_pointmass_normals_ga( cg.current_groops_bin, mjd1, mjd2, grid_file, normals_path, leo=True, love_enabled=cg.pm_magnitudes_are_parameters) #groops_command = 'module load mpich ; mpiexec -n %d %s ' % (cg.current_frontend_cores,ret) #groops_command = 'module load mpich ; mpiexec -n %d %s ' % (40,ret.replace('bin/groops','bin/pgroops')) groops_command = 'mpiexec -n %d %s ' % ( 40, ret.replace('bin/groops', 'bin/pgroops')) #print(groops_command) temp_dir = tempfile.mkdtemp() pbs_file = os.path.join(temp_dir, 'tmp.pbs') #pbs_file = '/home/sreimond/data/scenario/projects/tmp.pbs' f = open(pbs_file, "w") f.write('#!/bin/bash\n') f.write('#PBS -N picassoga\n') f.write('#PBS -q normal\n') f.write('#PBS -l walltime=23:59:00\n') f.write('#PBS -d /scratch/sreimond/myrun\n') f.write('#PBS -l nodes="1:ppn=40"\n') f.write('module load mpich\n') f.write(groops_command) f.close() os.system('/usr/bin/qsub %s' % pbs_file) qstat = 'busy' while qstat: # qstat = subprocess.check_output("/usr/bin/qstat") # print(qstat) try: #qstat = subprocess.check_output(["/usr/bin/pgrep","groops"]) qstat = subprocess.check_output("/usr/bin/qstat") except subprocess.CalledProcessError as grepexc: print("error code", grepexc.returncode, grepexc.output) qstat = False time.sleep(60) shutil.rmtree(temp_dir) groops_interface.eliminate_solve_pointmass_normals_ga( cg.current_groops_bin, mjd1, mjd2, grid_file, normals_path) LS = groops_files.read_groops_normals(normals_path) # solve R = matrices.IdentityMatrix(LS.col_count) R.elements *= regularization LS.define_regularization_matrix(R) LS.regularized_normal_equation = matrices.MatrixEquation() N_regularized = LS.normal_equation.A.add(LS.R) LS.regularized_normal_equation.define_coefficient_matrix(N_regularized) LS.regularized_normal_equation.define_right_hand_side(LS.normal_equation.b) LS.solve() if cg.current_regularization == -200: LS.determine_vce_regularization(sig1=1.0, sigu=1e2) elif cg.current_regularization == -300: LS = rg.regularization_matlab(normals_path, method='lcurve') elif cg.current_regularization == -400: LS = rg.regularization_matlab(normals_path, method='gcv') # update parameter vector if optimized if pm_magnitude: LS.x = matrices.return_matrix_from_numpy_array(np.array(pm_magnitude)) xNx = LS.x.return_transpose().multiply(LS.normal_equation.A).multiply( LS.x).elements nx2 = 2.0 * LS.normal_equation.b.return_transpose().multiply( LS.x).elements LS.rss = xNx - nx2 + LS.bb xRx = LS.x.return_transpose().multiply(LS.x).elements LS.xss = xRx ls_dict['LS'] = LS cg.ga_solutions += (ls_dict, )
def _compute_hydrology_grids(ui): logging.info('Computing hydrological grids...') # paths project_name = str(ui.le_id001.text()) project_dir = str(ui.le_id002.text()) grace_path = os.path.join(project_dir, project_name, 'grace', 'single_solution') grids_path = os.path.join(grace_path, 'grids') corrections_path = os.path.join(project_dir, project_name, 'corrections') # model names models = ('gldas', 'lsdm', 'wghm') if ui.rb_id109.isChecked(): models += ('goco', ) # loop over all dates mjd_start, mjd_end = compute_grace._date_settings(ui) mjd = mjd_start while mjd <= mjd_end: mjd1, mjd2 = date_functions.mjd2mjdmrange(mjd) mjd = (mjd2 + 1) ymstr = date_functions.mjd2ymstring(mjd1, '%Y-%m')[0].decode("utf-8") grid_file = os.path.join(grids_path, '%s-grid.txt' % (ymstr)) for name in models: hydro_grid = os.path.join(data_path, name.upper(), '%s_TWS_%s.txt' % (name, ymstr)) if name is 'goco': hydro_grid = os.path.join(data_path, 'GOCO', 'grids', 'GOCO-%s.txt' % (ymstr)) pointmass_file = os.path.join( corrections_path, name, 'grids', '%s_%s-pointmass.txt' % (name, ymstr)) # positions if ui.rb_id050.isChecked(): try: grid = np.array(np.genfromtxt(hydro_grid, skip_header=2), ndmin=2) grid = grid[pip_ix, :] lon, lat, h, area_i, tws_i = grid[:, 0], grid[:, 1], grid[:, 2], grid[:, 3], grid[:, 4] except: continue elif ui.rb_id051.isChecked(): try: lon0, lat0, h0, area0, tws0 = np.genfromtxt(hydro_grid, skip_header=2, unpack=True) lon, lat, h, area = np.genfromtxt(grid_file, skip_header=2, unpack=True) tws_i = griddata((lon0, lat0), (tws0), (lon, lat), method=ui.cob_id003.currentText()) area_i = griddata((lon0, lat0), (area0), (lon, lat), method=ui.cob_id003.currentText()) except: continue point_count = np.size(lon) # area if ui.rb_id052.isChecked(): area = ui.dsb_id001.value() / point_count elif ui.rb_id053.isChecked(): area = (area_i * defaults.EARTH_EQUATORIAL_RADIUS() * defaults.EARTH_EQUATORIAL_RADIUS()) * 1e-6 elif ui.rb_id054.isChecked(): area = ui.dsb_id012.value() / point_count # density density = 1025.0 if ui.rb_id056.isChecked(): density = ui.dsb_id013.value() # conversion to pointmass kg_i = tws_i * (area * 1e6) * density # write file groops_interface.make_grid_file(groops_bin, pointmass_file, lon, lat, h, area, *(kg_i, np.array(0))) logging.info('Current file: %s', pointmass_file) logging.info('Computing hydrological grids finished.')
def _compute_goco_grids_spline(ui): logging.info('Computing GOCO grids (splines)...') # paths project_name = str(ui.le_id001.text()) project_dir = str(ui.le_id002.text()) grace_path = os.path.join(project_dir, project_name, 'grace', 'single_solution') grids_path = os.path.join(grace_path, 'grids') corrections_path = os.path.join(project_dir, project_name, 'corrections', 'goco') # loop over all dates mjd_start, mjd_end = compute_grace._date_settings(ui) mjd = mjd_start while mjd <= mjd_end: mjd1, mjd2 = date_functions.mjd2mjdmrange(mjd) mjd = (mjd2 + 1) ymstr = date_functions.mjd2ymstring(mjd1, '%Y-%m')[0].decode("utf-8") grid_file = os.path.join(grids_path, '%s-grid.txt' % (ymstr)) goco_grid = os.path.join(data_path, 'GOCO', 'grids', 'GOCO-%s.txt' % (ymstr)) pointmass_file = os.path.join(corrections_path, 'grids', 'goco_%s-pointmass.txt' % (ymstr)) # positions try: temp_dir = tempfile.mkdtemp() tmp_input_grid_file = os.path.join(temp_dir, 'grid_in.txt') tmp_output_grid_file = os.path.join(temp_dir, 'grid_out.txt') lon, lat, h, area = np.genfromtxt(grid_file, skip_header=2, unpack=True) groops_interface.make_grid_file(groops_bin, tmp_input_grid_file, lon, lat, h, area) groops_interface.compute_goco_grid(groops_bin, tmp_input_grid_file, tmp_output_grid_file, mjd1) lon, lat, h, area_i, tws_i = np.genfromtxt(tmp_output_grid_file, skip_header=2, unpack=True) shutil.rmtree(temp_dir) lon0, lat0, h0, area0, tws0 = np.genfromtxt(goco_grid, skip_header=2, unpack=True) area_i = griddata((lon0, lat0), (area0), (lon, lat), method=ui.cob_id003.currentText()) except: continue point_count = np.size(lon) # area if ui.rb_id052.isChecked(): area = ui.dsb_id001.value() / point_count elif ui.rb_id053.isChecked(): area = (area_i * defaults.EARTH_EQUATORIAL_RADIUS() * defaults.EARTH_EQUATORIAL_RADIUS()) * 1e-6 elif ui.rb_id054.isChecked(): area = ui.dsb_id012.value() / point_count # density density = 1025.0 if ui.rb_id056.isChecked(): density = ui.dsb_id013.value() # conversion to pointmass kg_i = tws_i * (area * 1e6) * density # write file groops_interface.make_grid_file(groops_bin, pointmass_file, lon, lat, h, area, *(kg_i, np.array(0))) logging.info('Current file: %s', pointmass_file) logging.info('Computing GOCO grids (splines) done.')
def _compute_danubia_grid(ui): logging.info('Computing PROMET/DANUBIA grids...') # paths project_name = str(ui.le_id001.text()) project_dir = str(ui.le_id002.text()) grace_path = os.path.join(project_dir, project_name, 'grace', 'single_solution') grids_path = os.path.join(grace_path, 'grids') corrections_path = os.path.join(project_dir, project_name, 'corrections') fh = Dataset(os.path.join(data_path, 'DANUBIA', 'Data-Danubia_update', 'Danubia.nc'), mode='r') fh.set_auto_mask(False) lon0 = fh.variables['longitude'][:] lat0 = fh.variables['latitude'][:] LON0, LAT0 = np.meshgrid(lon0, lat0) mjd0 = fh.variables['time'][:] # loop over all dates mjd_start, mjd_end = compute_grace._date_settings(ui) mjd = mjd_start while mjd <= mjd_end: mjd1, mjd2 = date_functions.mjd2mjdmrange(mjd) mjd = (mjd2 + 1) ymstr = date_functions.mjd2ymstring(mjd1, '%Y-%m')[0].decode("utf-8") grid_file = os.path.join(grids_path, '%s-grid.txt' % (ymstr)) pointmass_file = os.path.join(corrections_path, 'promet', 'grids', 'promet_%s-pointmass.txt' % (ymstr)) ix = np.argwhere(mjd0 == mjd) if not np.any(ix): continue ix = int(np.asscalar(ix)) cr = fh.variables['channel_runoff'][ix, :] et = fh.variables['evapotranspiration'][ix, :] gs = fh.variables['glacier_snow'][ix, :] im = fh.variables['ice_melt'][ix, :] pc = fh.variables['precipitation'][ix, :] sm = fh.variables['soil_moisture'][ix, :] tr = fh.variables['total_runoff'][ix, :] ts = fh.variables['total_snow'][ix, :] tws0 = eval(ui.le_id225.text()) * 1e-3 # mkae grid grid = np.zeros((np.size(tws0), 5)) grid[:, 0] = LON0.flatten() grid[:, 1] = LAT0.flatten() grid[:, 4] = tws0.flatten() # positions if ui.rb_id050.isChecked(): try: if (polygon is not None) and (pip is not None): grid = grid[pip, :] elif (polygon is not None) and (pip is None): ix = list(range(grid.shape[0])) p = Pool(None) pip = p.map(partial(compute_grace._in_polygon_aux, polygon=polygon, grid=grid), ix, chunksize=25) pip = np.nonzero(pip)[0] grid = grid[pip, :] lon, lat, h, area_i, tws_i = grid[:, 0], grid[:, 1], grid[:, 2], grid[:, 3], grid[:, 4] except: continue elif ui.rb_id051.isChecked(): try: lon0, lat0, h0, area0, tws0 = grid[:, 0], grid[:, 1], grid[:, 2], grid[:, 3], grid[:, 4] lon, lat, h, area = np.genfromtxt(grid_file, skip_header=2, unpack=True) tws_i = griddata((lon0, lat0), (tws0), (lon, lat), method=ui.cob_id003.currentText()) area_i = griddata((lon0, lat0), (area0), (lon, lat), method=ui.cob_id003.currentText()) except: continue point_count = np.size(lon) # area if ui.rb_id052.isChecked(): area = ui.dsb_id001.value() / point_count elif ui.rb_id053.isChecked(): area = 1.0 # 1 km^2 per pixel elif ui.rb_id054.isChecked(): area = ui.dsb_id012.value() / point_count # density density = 1025.0 if ui.rb_id056.isChecked(): density = ui.dsb_id013.value() # conversion to pointmass kg_i = tws_i * (area * 1e6) * density # write file groops_interface.make_grid_file(groops_bin, pointmass_file, lon, lat, h, area, *(kg_i, np.array(0))) logging.info('Current file: %s', pointmass_file) fh.close() logging.info('Computing PROMET/DANUBIA grids finished.')