def main(inargs): """ Runs the main program Parameters ---------- inargs : argparse object Argparse object with all input arguments """ # Check if pre-processed file exists if (pp_exists(inargs) is False) or (inargs.recompute is True): print('Compute preprocessed file: ' + get_pp_fn(inargs)) # Call preprocessing routine with arguments compute_variance(inargs) else: print('Found pre-processed file: ' + get_pp_fn(inargs)) # Plotting if inargs.plot_type in [ 'r_v', 'alpha', 'beta', 'r_v_alpha', 'r_v_beta', 'r_v_alpha_beta', 'corr_m_N', 'mean_m' ]: plot_diurnal(inargs) elif inargs.plot_type == 'std_vs_mean': plot_std_vs_mean(inargs) elif inargs.plot_type == 'correlation': plot_correlation(inargs) elif inargs.plot_type == 'CC06b_fig9': plot_CC06b_fig9(inargs) else: print('No or wrong plot_type. Nothing plotted.')
def main(inargs): """ Runs the main program Parameters ---------- inargs : argparse object Argparse object with all input arguments """ # Check if pre-processed file exists if (pp_exists(inargs) is False) or (inargs.recompute is True): print('Compute preprocessed file: ' + get_pp_fn(inargs)) # Call preprocessing routine with arguments domain_mean_weather_ts(inargs) else: print('Found pre-processed file: ' + get_pp_fn(inargs)) # Call analyzing and plotting routine if 'prec_ind' in inargs.plot_type: plot_domain_mean_timeseries_individual(inargs, plot_var='precipitation') if 'prec_comp' in inargs.plot_type: plot_domain_mean_timeseries_composite(inargs, plot_var='precipitation') if 'cape_tauc_ind' in inargs.plot_type: plot_domain_mean_timeseries_individual(inargs, plot_var='cape_tauc') if 'cape_tauc_comp' in inargs.plot_type: plot_domain_mean_timeseries_composite(inargs, plot_var='cape_tauc') if 'prec_cape_comp' in inargs.plot_type: plot_domain_mean_timeseries_composite(inargs, plot_var='prec_cape')
def main(inargs): """ Runs the main program Parameters ---------- inargs : argparse object Argparse object with all input arguments """ # Check if pre-processed file exists if (pp_exists(inargs) is False) or (inargs.recompute is True): print('Compute preprocessed file: ' + get_pp_fn(inargs)) # Call preprocessing routine with arguments cloud_stats(inargs) else: print('Found pre-processed file: ' + get_pp_fn(inargs)) # Plotting if 'freq_hist' in inargs.plot_type: plot_prec_freq_hist(inargs) if 'size_hist' in inargs.plot_type: plot_cloud_size_hist(inargs) if 'rdf_individual' in inargs.plot_type: plot_rdf_individual(inargs) if 'rdf_composite' in inargs.plot_type: plot_rdf_composite(inargs) if 'm_evolution' in inargs.plot_type: plot_m_evolution(inargs)
def create_netcdf(inargs, groups, dimensions, variables, ensemble_dim=False): """ Creates a NetCDF object to store data. Parameters ---------- inargs : argparse object Argparse object with all input arguments groups : list List of groups dimensions : dict Dictionary with dimension name as key and dimension value as value variables : dict Dictionary with variable name as key and variable dimension list as value (must be numpy array). Attention: Type i8 is hardcoded for all dimensions. ensemble_dim : bool If true, group variables have additional dimension ens_no with size inargs.nens for group 'ens' and 1 for all other groups Returns ------- rootgroup : NetCDF object """ pp_fn = get_pp_fn(inargs) # Create NetCDF file rootgroup = Dataset(pp_fn, 'w', format='NETCDF4') rootgroup.log = create_log_str(inargs, 'Preprocessing') # Create root dimensions and variables for dim_name, dim_val in dimensions.items(): rootgroup.createDimension(dim_name, dim_val.shape[0]) tmp_var = rootgroup.createVariable(dim_name, 'i8', dim_name) tmp_var[:] = dim_val # Create group dimensions and variables if ensemble_dim: [b.append('ens_no') for a, b in variables.items()] dimensions['ens_no'] = 1 for g in groups: rootgroup.createGroup(g) if g == 'ens' and ensemble_dim: dimensions['ens_no'] = inargs.nens # Create dimensions for dim_name, dim_len in dimensions.items(): if type(dim_len) is not int: dim_len = dim_len.shape[0] rootgroup.groups[g].createDimension(dim_name, dim_len) # Create variables for var_name, var_dims in variables.items(): rootgroup.groups[g].createVariable(var_name, 'f8', var_dims) return rootgroup
def main(inargs): """ Runs the main program Parameters ---------- inargs : argparse object Argparse object with all input arguments """ # Check arguments assert inargs.plot in ['weather_ts', 'prec_stamps', 'prec_hist'], \ 'Plot not supported.' # Check if pre-processed file exists if (pp_exists(inargs) is False) or (inargs.recompute is True): print('Compute preprocessed file: ' + get_pp_fn(inargs)) # Call preprocessing routine with arguments preprocess(inargs) else: print('Found pre-processed file:' + get_pp_fn(inargs)) # Call analyzing and plotting routine plotting(inargs)
def create_netcdf(inargs): """ Parameters ---------- inargs : argparse object Argparse object with all input arguments Returns ------- rootgroup : NetCDF dataset object """ dimensions = { 'date': np.array(make_datelist(inargs, out_format='netcdf')), 'time': np.arange(inargs.time_start, inargs.time_end + inargs.time_inc, inargs.time_inc), 'n': np.array([256, 128, 64, 32, 16, 8, 4]), 'x': np.arange(get_config(inargs, 'domain', 'ana_irange')), 'y': np.arange(get_config(inargs, 'domain', 'ana_jrange')), 'cond_bins_mean_m': np.linspace(0, 2e8, 10)[1:], # TODO: Softcode this stuff 'cond_bins_m': np.linspace(0, 1e9, 50)[1:], } variables = { 'var_m': ['date', 'time', 'n', 'x', 'y'], 'var_M': ['date', 'time', 'n', 'x', 'y'], 'var_N': ['date', 'time', 'n', 'x', 'y'], 'mean_m': ['date', 'time', 'n', 'x', 'y'], 'mean_M': ['date', 'time', 'n', 'x', 'y'], 'mean_N': ['date', 'time', 'n', 'x', 'y'], 'corr_m_N': ['date', 'time', 'n', 'x', 'y'], 'cond_m_hist': ['n', 'cond_bins_mean_m', 'cond_bins_m'], } if inargs.var is 'm': variables.update({ 'var_TTENS': ['date', 'time', 'n', 'x', 'y'], 'mean_TTENS': ['date', 'time', 'n', 'x', 'y'] }) pp_fn = get_pp_fn(inargs) # Create NetCDF file rootgroup = Dataset(pp_fn, 'w', format='NETCDF4') rootgroup.log = create_log_str(inargs, 'Preprocessing') # Create root dimensions and variables for dim_name, dim_val in dimensions.items(): rootgroup.createDimension(dim_name, dim_val.shape[0]) tmp_var = rootgroup.createVariable(dim_name, 'f8', dim_name) tmp_var[:] = dim_val # Create variables for var_name, var_dims in variables.items(): tmp_var = rootgroup.createVariable(var_name, 'f8', var_dims) # Set all variables to nan by default to save time later tmp_var[:] = np.nan return rootgroup
def create_netcdf(inargs): """ Creates a NetCDF object to store data. Parameters ---------- inargs : argparse object Argparse object with all input arguments Returns ------- rootgroup : NetCDF object """ prec_freq_binedges, cld_size_binedges, cld_sum_binedges, \ cld_size_sep_binedges, cld_sum_sep_binedges = create_bin_edges(inargs) datearray = np.array(make_datelist(inargs, out_format='netcdf')) timearray = np.arange(inargs.time_start, inargs.time_end + inargs.time_inc, inargs.time_inc) rdf_radius = np.arange(0., inargs.rdf_r_max + inargs.rdf_dr, inargs.rdf_dr) rdf_radius = (rdf_radius[:-1] + rdf_radius[1:]) / 2. dimensions = { 'time': timearray, 'date': datearray, 'cld_size_bins': np.array(cld_size_binedges[1:]), 'cld_sum_bins': np.array(cld_sum_binedges[1:]), 'cld_size_sep_bins': np.array(cld_size_sep_binedges[1:]), 'cld_sum_sep_bins': np.array(cld_sum_sep_binedges[1:]), 'rdf_radius': rdf_radius } variables = { 'cld_size': ['date', 'time', 'cld_size_bins'], 'cld_sum': ['date', 'time', 'cld_sum_bins'], 'cld_size_sep': ['date', 'time', 'cld_size_sep_bins'], 'cld_sum_sep': ['date', 'time', 'cld_sum_sep_bins'], 'cld_size_mean': ['date', 'time'], 'cld_sum_mean': ['date', 'time'], 'cld_size_sep_mean': ['date', 'time'], 'cld_sum_sep_mean': ['date', 'time'], 'rdf': ['date', 'time', 'rdf_radius'], 'rdf_sep': ['date', 'time', 'rdf_radius'], } if inargs.var == 'PREC_ACCUM': groups = ['obs', 'det', 'ens'] dimensions.update({'prec_freq_bins': np.array(prec_freq_binedges[1:])}) variables.update({'prec_freq': ['date', 'time', 'prec_freq_bins']}) elif inargs.var == 'm': groups = ['det', 'ens'] else: raise Exception('Wrong variable.') pp_fn = get_pp_fn(inargs) # Create NetCDF file rootgroup = Dataset(pp_fn, 'w', format='NETCDF4') rootgroup.log = create_log_str(inargs, 'Preprocessing') # Create root dimensions and variables for dim_name, dim_val in dimensions.items(): rootgroup.createDimension(dim_name, dim_val.shape[0]) tmp_var = rootgroup.createVariable(dim_name, 'f8', dim_name) tmp_var[:] = dim_val # Create group dimensions and variables [b.append('ens_no') for a, b in variables.items()] dimensions['ens_no'] = 1 for g in groups: rootgroup.createGroup(g) if g == 'ens': dimensions['ens_no'] = inargs.nens # Create dimensions for dim_name, dim_len in dimensions.items(): if type(dim_len) is not int: dim_len = dim_len.shape[0] rootgroup.groups[g].createDimension(dim_name, dim_len) # Create variables for var_name, var_dims in variables.items(): rootgroup.groups[g].createVariable(var_name, 'f8', var_dims) return rootgroup