dataset['masked_land_transport']=(('months_ax','lat','lon'), masked_land_transport) if __name__ == "__main__": import nc_file_io_xarray as io import set_and_get_params as sagp input_dir='/scratch/rg419/GFDL_model/GFDLmoistModel/' base_dir='/scratch/rg419/Data_moist/' land_file='input/land.nc' base_exp_name='amip_10m/' exp_name='amip_10m/' start_file=121 end_file=480 land_present=True topo_present=False avg_or_daily='daily' model_params = sagp.model_params_set(input_dir, delta_t=720., ml_depth=10.) dataset, time_arr, size_list = io.read_data( base_dir,exp_name,start_file,end_file,avg_or_daily,topo_present) land_array, topo_array = io.read_land(input_dir,base_exp_name,land_present,True,size_list,land_file) dataset['land'] = (('lat','lon'),land_array) qflux_calc(dataset, model_params)
use_interpolated_pressure_level_data = False #Conditions the script on whether to expect data on sigma levels (if False) or pressure levels (if True). Script should be insensitive to this choice if both sets of files exist. #Set time increments of input files (e.g. `monthly` for `atmos_monthly` files. avg_or_daily = 'monthly' #Set the time frequency of output data. Valid options are 'months', 'all_time' or 'dayofyear'. time_divisions_of_qflux_to_be_calculated = 'months' model_params = sagp.model_params_set(input_dir, delta_t=720., ml_depth=20., res=42) dataset, time_arr, size_list = io.read_data( base_dir, exp_name, start_file, end_file, avg_or_daily, use_interpolated_pressure_level_data) land_array, topo_array = io.read_land( input_dir, base_exp_name, land_present, use_interpolated_pressure_level_data, size_list, land_file) dataset['land'] = (('lat', 'lon'), land_array) check_surface_flux_dims(dataset) qflux_calc(dataset, model_params, output_file_name, ice_file_name, groupby_name=time_divisions_of_qflux_to_be_calculated)