def gen_task_ctrl(): task_ctrl = TaskControl(__file__) task_ctrl.add( Task( plot_dem, [], [PATHS['figsdir'] / 'orog_precip' / f'dem_asia_with_regions.png'])) return task_ctrl
def gen_task_ctrl(): tc = TaskControl(__file__) datadir = PATHS['datadir'] / 'aphrodite_data/025deg' for year in ALL_YEARS: output_path = datadir / FILE_TPL.format(year=year) tc.add( Task(download_year, [], [output_path], func_args=(year, datadir), atomic_write=False)) return tc
def gen_task_ctrl(): tc = TaskControl(__file__) datadir = PATHS['datadir'] / 'aphrodite_data/025deg' filenames = [datadir / FILE_TPL.format(year=year) for year in ALL_YEARS] tc.add( Task(combine_years, filenames, [ datadir / 'aphrodite_combined_all.nc', datadir / 'aphrodite_combined_jja.nc' ])) return tc
def gen_task_ctrl(): tc = TaskControl(__file__) # pp_files = list((PATHS['datadir'] / 'u-al508' / 'ap8.pp' / 'lowlevel_wind_200901').glob('al508a.p8200901??.pp')) # for pp_file in pp_files: # tc.add(Task(convert_wrapper, [pp_file], [pp_file.with_suffix('.nc')])) pp_files = list( (PATHS['datadir'] / 'u-al508' / 'ap9.pp').glob('surface_wind_2006??/al508a.p9????????.pp')) pp_files += list( (PATHS['datadir'] / 'u-ak543' / 'ap9.pp').glob('surface_wind_2006??/ak543a.p9????????.pp')) for pp_file in pp_files: tc.add(Task(convert_wrapper, [pp_file], [pp_file.with_suffix('.nc')])) return tc
def gen_task_ctrl(): task_ctrl = TaskControl(__file__) hb_names = [f'S{i}' for i in range(11)] output_datadir = PATHS['output_datadir'] inputs = {f'basin_vector_{hb_name}': output_datadir / 'basin_weighted_analysis' / hb_name / f'hb_{hb_name}.shp' for hb_name in hb_names} task_ctrl.add(Task(gen_vector_basin_stats, inputs, [PATHS['figsdir'] / 'basin_stats' / 'basin_vector_stats.csv'], func_args=(hb_names,), )) resolutions = ['N1280', 'N512', 'N216', 'N96'] inputs = {f'basin_weights_{res}_{hb_name}': (output_datadir / 'basin_weighted_analysis' / hb_name / f'weights_{res}_{hb_name}.nc') for res in resolutions for hb_name in hb_names} task_ctrl.add(Task(gen_weights_basin_stats, inputs, [PATHS['figsdir'] / 'basin_stats' / 'basin_weights_stats.csv'], func_args=(resolutions, hb_names), )) return task_ctrl
def gen_task_ctrl(): years = [2019] months = range(1, 13) task_ctrl = TaskControl(__file__) for year, month in itertools.product(years, months): filename = (BASEDIR / 'raw' / f'precip_{year}{month:02}' / f'CMORPH_V1.0_ADJ_8km-30min_{year}{month:02}.tar') task_ctrl.add(Task(download_year, [], [filename], func_args=(year, month))) raw_filename = (BASEDIR / 'raw' / f'precip_{year}{month:02}' / f'CMORPH_V1.0_ADJ_8km-30min_{year}{month:02}.tar') nc_filenames = {day: (BASEDIR / f'precip_{year}{month:02}' / f'cmorph_ppt_{year}{month:02}{day:02}.nc') for day in range(1, calendar.monthrange(year, month)[1] + 1)} task_ctrl.add(Task(convert_year_month, [raw_filename], nc_filenames, func_args=(year, month))) nc_asia_filename = (BASEDIR / f'precip_{year}{month:02}' / f'cmorph_ppt_{year}{month:02}.asia.nc') task_ctrl.add(Task(extract_year_month, nc_filenames, [nc_asia_filename], func_args=(year, month))) regrid_inputs = {} regrid_inputs['target'] = Path(f'/gws/nopw/j04/cosmic/mmuetz/data/u-al508/ap9.pp/precip_200501/al508a.p9200501.asia_precip.nc') regrid_inputs['cmorph'] = BASEDIR / f'precip_{year}{month:02}/cmorph_ppt_{year}{month:02}.asia.nc' regrid_output = regrid_inputs['cmorph'].parent / (regrid_inputs['cmorph'].stem + '.N1280.nc') task_ctrl.add(Task(regrid_asia, regrid_inputs, [regrid_output])) return task_ctrl
def gen_task_ctrl(): tc = TaskControl(__file__) models = ['al508', 'ak543'] # models = ['ak543'] dist_threshs = [100] dotprod_threshs = [0.05] months = [6, 7, 8] # dist_threshs = [20, 100] # dotprod_threshs = [0.05, 0.1] year = 2006 for model, dotprod_thresh, dist_thresh in product(models, dotprod_threshs, dist_threshs): # 1 model at a time. orog_precip_paths = [fmtp(orog_precip_path_tpl, model=model, year=year, month=month, dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh) for month in months] orog_precip_mean_fields = [fmtp(orog_precip_mean_fields_tpl, model=model, year=year, season='jja', dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh)] tc.add(Task(extract_precip_mean_fields, orog_precip_paths, orog_precip_mean_fields)) for model, dotprod_thresh, dist_thresh in product(models, dotprod_threshs, dist_threshs): orog_precip_mean_fields = [fmtp(orog_precip_mean_fields_tpl, model=model, year=year, season='jja', dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh)] orog_precip_figs = [fmtp(orog_precip_fig_tpl, model=model, year=year, season='jja', dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh, precip_type=precip_type) for precip_type in ['orog', 'non_orog', 'ocean', 'orog_frac']] tc.add(Task(plot_mean_orog_precip, orog_precip_mean_fields, orog_precip_figs)) return tc for dotprod_thresh, dist_thresh in product(dotprod_threshs, dist_threshs): # Compare 2 models. orog_precip_paths = {(model, month): fmtp(orog_precip_path_tpl, model=model, year=year, month=month, dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh) for model in models for month in months} orog_precip_figs = [fmtp(orog_precip_fig_tpl, model='-'.join(models), year=year, season='jja', dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh, precip_type=precip_type) for precip_type in ['orog', 'non_orog', 'ocean']] tc.add(Task(plot_compare_mean_orog_precip, orog_precip_paths, orog_precip_figs, func_args=(models, months))) return tc
def gen_task_ctrl(): task_ctrl = TaskControl(__file__) aphrodite_dir = Path('aphrodite_data/025deg') inputs = { ' daily precipitation analysis interpolated onto 0.25deg grids': (PATHS['datadir'] / aphrodite_dir / 'APHRO_MA_025deg_V1901.2009.nc')} outputs = { 'asia': PATHS['figsdir'] / 'aphrodite' / 'asia_aphrodite_2009_jja.png', 'china': PATHS['figsdir'] / 'aphrodite' / 'china_aphrodite_2009_jja.png', } task_ctrl.add(Task(plot_aphrodite_seasonal_analysis, inputs, outputs, func_args=(True, ))) outputs = { 'asia': PATHS['figsdir'] / 'aphrodite' / 'asia_aphrodite_2009_jja.lognorm.png', 'china': PATHS['figsdir'] / 'aphrodite' / 'china_aphrodite_2009_jja.lognorm.png', } task_ctrl.add(Task(plot_aphrodite_seasonal_analysis, inputs, outputs, func_args=(False, ))) return task_ctrl
def gen_task_ctrl(): task_ctrl = TaskControl(__file__) # Can only do 1 or 3 runids ATM. all_runids = [['cmorph', 'u-al508', 'u-ak543'], ['cmorph'], ['cmorph', 'u-am754', 'u-ak543'], ['u-al508', 'u-am754', 'u-ak543'], ['cmorph', 'u-al508', 'u-am754', 'u-ak543'], ['u-al508', 'u-aj399', 'u-az035']] season = 'jja' durations = ['long'] for start_year in range(1998, 2016): durations.append(f'{start_year}06-{start_year + 3}08') precip_threshes = [0.1] methods = ['peak', 'harmonic'] # regions = ['china', 'asia', 'europe'] regions = ['china', 'asia'] # Run all durations for first runid. task_data = list( itertools.product([all_runids[0]], durations, precip_threshes, regions)) # Run first duration for all other runids. task_data.extend( itertools.product(all_runids[1:], [durations[0]], precip_threshes, regions)) for runids, duration, precip_thresh, region in task_data: task = AfiTask(fig_afi_mean, runids, PATHS['datadir'], PATHS['figsdir'], duration, precip_thresh, season, region) task_ctrl.add(task) for method in methods: task = AfiTask(fig_afi_diurnal_cycle, runids, PATHS['datadir'], PATHS['figsdir'], duration, precip_thresh, season, region, method) task_ctrl.add(task) return task_ctrl
def gen_task_ctrl(): task_ctrl = TaskControl(__file__) for basin_scales in ['small_medium_large', 'sliding']: hb_raster_cubes_fn = PATHS[ 'output_datadir'] / f'basin_weighted_analysis/hb_N1280_raster_{basin_scales}.nc' if basin_scales == 'small_medium_large': hb_names = HB_NAMES else: hb_names = [f'S{i}' for i in range(11)] shp_path_tpl = 'basin_weighted_analysis/{hb_name}/hb_{hb_name}.{ext}' for hb_name in hb_names: shp_inputs = { ext: PATHS['output_datadir'] / shp_path_tpl.format(hb_name=hb_name, ext=ext) for ext in ['shp', 'dbf', 'prj', 'cpg', 'shx'] } task_ctrl.add( Task( plot_hydrobasins_files, shp_inputs, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / 'hydrobasins_size' / f'map_{hb_name}.png' ], func_args=[hb_name], )) weighted_mean_precip_tpl = 'basin_weighted_analysis/{hb_name}/' \ '{dataset}.{hb_name}.area_weighted.mean_precip.hdf' weighted_mean_precip_filenames = defaultdict(list) for dataset, hb_name in itertools.product(DATASETS + ['aphrodite'], hb_names): fmt_kwargs = {'dataset': dataset, 'hb_name': hb_name} max_min_path = PATHS[ 'output_datadir'] / f'basin_weighted_analysis/{hb_name}/mean_precip_max_min.pkl' weighted_mean_precip_filename = PATHS[ 'output_datadir'] / weighted_mean_precip_tpl.format( **fmt_kwargs) weighted_mean_precip_filenames[hb_name].append( weighted_mean_precip_filename) task_ctrl.add( Task(plot_mean_precip, { 'weighted': weighted_mean_precip_filename, 'raster_cubes': hb_raster_cubes_fn, 'mean_precip_max_min': max_min_path }, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / 'mean_precip' / f'map_{dataset}.{hb_name}.area_weighted.png' ], func_args=[dataset, hb_name])) if dataset != 'cmorph': fmt_kwargs = {'dataset': 'cmorph', 'hb_name': hb_name} cmorph_weighted_mean_precip_filename = ( PATHS['output_datadir'] / weighted_mean_precip_tpl.format(**fmt_kwargs)) task_ctrl.add( Task(plot_obs_mean_precip_diff, { 'dataset_weighted': weighted_mean_precip_filename, 'obs_weighted': cmorph_weighted_mean_precip_filename, 'raster_cubes': hb_raster_cubes_fn, 'mean_precip_max_min': max_min_path }, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / 'cmorph_mean_precip_diff' / f'map_{dataset}.{hb_name}.area_weighted.png' ], func_args=[dataset, hb_name])) if dataset not in ['cmorph', 'aphrodite']: fmt_kwargs = {'dataset': 'aphrodite', 'hb_name': hb_name} obs_weighted_mean_precip_filename = ( PATHS['output_datadir'] / weighted_mean_precip_tpl.format(**fmt_kwargs)) task_ctrl.add( Task(plot_obs_mean_precip_diff, { 'dataset_weighted': weighted_mean_precip_filename, 'obs_weighted': obs_weighted_mean_precip_filename, 'raster_cubes': hb_raster_cubes_fn, 'mean_precip_max_min': max_min_path }, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / 'cmorph_mean_precip_diff' / f'map_aphrodite_vs_{dataset}.{hb_name}.area_weighted.png' ], func_args=[dataset, hb_name])) for obs in ['cmorph', 'aphrodite', 'u-al508', 'u-ak543']: mean_precip_rmse_data_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/{obs}.mean_precip_all_rmses.{basin_scales}.pkl' ) task_ctrl.add( Task(plot_obs_vs_all_datasets_mean_precip, inputs=[mean_precip_rmse_data_filename], outputs=[ PATHS['figsdir'] / 'basin_weighted_analysis' / 'cmorph_vs' / 'mean_precip' / f'{obs}_vs_all_datasets.all_{f}.{basin_scales}.pdf' for f in ['rmse', 'corr'] ])) if basin_scales == 'small_medium_large': input_paths = {'raster_cubes': hb_raster_cubes_fn} for name, datasets in zip( ['', 'full_'], (['cmorph', 'u-al508', 'u-ak543' ], ['cmorph', 'u-al508', 'u-am754', 'u-ak543'])): paths = { f'weighted_{hb_name}_{dataset}': (PATHS['output_datadir'] / weighted_mean_precip_tpl.format( hb_name=hb_name, dataset=dataset)) for hb_name, dataset in itertools.product( hb_names, datasets) } input_paths.update(paths) task_ctrl.add( Task(plot_mean_precip_asia_combined, input_paths, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / 'mean_precip_asia_combined' / f'{name}asia_combined_basin_scales.pdf' ], func_args=[datasets, hb_names])) weighted_phase_mag_tpl = 'basin_weighted_analysis/{hb_name}/' \ '{dataset}.{hb_name}.{mode}.area_weighted.phase_mag.hdf' for dataset, hb_name, mode in itertools.product( DATASETS, hb_names, PRECIP_MODES): fmt_kwargs = {'dataset': dataset, 'hb_name': hb_name, 'mode': mode} weighted_phase_mag_filename = PATHS[ 'output_datadir'] / weighted_phase_mag_tpl.format(**fmt_kwargs) task_ctrl.add( Task(plot_phase_mag, { 'weighted': weighted_phase_mag_filename, 'raster_cubes': hb_raster_cubes_fn }, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / mode / f'map_{dataset}.{hb_name}.{mode}.area_weighted.{v}.png' for v in ['phase', 'alpha_phase', 'mag'] ], func_args=[dataset, hb_name, mode])) if basin_scales == 'small_medium_large': for name, datasets in zip( ['', 'full_'], (['cmorph', 'u-al508', 'u-ak543' ], ['cmorph', 'u-al508', 'u-am754', 'u-ak543'])): for mode in PRECIP_MODES: input_paths = { f'weighted_{hb_name}_{dataset}': (PATHS['output_datadir'] / weighted_phase_mag_tpl.format( hb_name=hb_name, dataset=dataset, mode=mode)) for hb_name, dataset in itertools.product( hb_names, datasets) } input_paths.update({'raster_cubes': hb_raster_cubes_fn}) task_ctrl.add( Task(plot_phase_alpha_combined, input_paths, [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'map' / 'phase_alpha_combined' / f'{name}{mode}_phase_alpha_combined_asia.pdf' ], func_args=(datasets, hb_names, mode))) for area_weighted in [True, False]: weighted = 'area_weighted' if area_weighted else 'not_area_weighted' vrmse_data_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/all_rmses.{weighted}.{basin_scales}.pkl' ) task_ctrl.add( Task( plot_cmorph_vs_all_datasets_phase_mag, [vrmse_data_filename], [ PATHS['figsdir'] / 'basin_weighted_analysis' / 'cmorph_vs' / 'phase_mag' / f'cmorph_vs_all_datasets.all_rmse.{weighted}.{basin_scales}.pdf' ], )) return task_ctrl
def gen_task_ctrl(): tc = TaskControl(__file__) years = [2006] # years = [2005, 2006, 2007, 2008] models = ['al508', 'ak543'] dist_threshs = [50, 100] dotprod_threshs = [0.05] months = [6, 7, 8] # dist_threshs = [20, 100] # dotprod_threshs = [0.05, 0.1] for dist_thresh in dist_threshs: cache_key = fmtp(cache_key_tpl, dist_thresh=dist_thresh) tc.add( Task(gen_dist_cache, {'orog': orog_path}, [cache_key], func_args=(dist_thresh, ))) for year, model, dotprod_thresh, dist_thresh in product( years, models, dotprod_threshs, dist_threshs): cache_key = fmtp(cache_key_tpl, dist_thresh=dist_thresh) for month in months: surf_wind_path = fmtp(surf_wind_path_tpl, model=model, year=year, month=month) orog_mask_path = fmtp(orog_mask_path_tpl, model=model, year=year, month=month, dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh) inputs = { 'orog': orog_path, 'cache_key': cache_key, 'surf_wind': surf_wind_path } tc.add( Task(gen_orog_mask, inputs, [orog_mask_path], func_args=(dotprod_thresh, dist_thresh))) precip_path = fmtp(precip_path_tpl, model=model, year=year, month=month) orog_precip_path = fmtp(orog_precip_path_tpl, model=model, year=year, month=month, dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh) orog_precip_inputs = { 'orog_mask': orog_mask_path, 'land_sea_mask': land_sea_mask, 'precip': precip_path } tc.add( Task(calc_orog_precip, orog_precip_inputs, [orog_precip_path])) orog_precip_frac_inputs = { 'orog_mask': orog_mask_path, 'land_sea_mask': land_sea_mask, 'orog_precip': orog_precip_path } orog_precip_frac_path = fmtp(orog_precip_frac_path_tpl, model=model, year=year, month=month, dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh) tc.add( Task(calc_orog_precip_fracs, orog_precip_frac_inputs, [orog_precip_frac_path])) variables = list(product(models, dotprod_threshs, dist_threshs, months)) columns = ['model', 'dotprod_thresh', 'dist_thresh', 'month'] combine_inputs = [ fmtp(orog_precip_frac_path_tpl, model=model, year=year, month=month, dotprod_thresh=dotprod_thresh, dist_thresh=dist_thresh) for model, dotprod_thresh, dist_thresh, month in variables ] combine_fracs_output = [combine_frac_path] tc.add( Task(combine_orog_precip_fracs, combine_inputs, combine_fracs_output, func_args=(variables, columns))) return tc
def gen_task_ctrl(): task_ctrl = TaskControl(__file__) precip_thresh = 0.1 season = 'jja' runids = UmN1280SpaTask.RUNIDS # runids = ['aj399'] # regions = ['asia', 'europe'] regions = ['asia'] for region in regions: start_year_month = (1998, 1) end_year_month = (2018, 12) task_ctrl.add(CmorphSpaTask(start_year_month, end_year_month, precip_thresh, season, region)) for start_year in range(1998, 2016): start_year_month = (start_year, 6) end_year_month = (start_year + 3, 8) task_ctrl.add(CmorphSpaTask(start_year_month, end_year_month, precip_thresh, season, region)) start_year_month = (2005, 6) end_year_month = (2008, 8) for runid in runids: task_ctrl.add(UmN1280SpaTask(start_year_month, end_year_month, precip_thresh, season, region, runid)) # Disabled for now. # for model in UmHadgemSpaTask.MODELS: # task_ctrl.add(UmHadgemSpaTask(start_year_month, end_year_month, precip_thresh, season, region, model)) return task_ctrl # Individual analysis for each JJA season. for year in range(1998, 2019): start_year_month = (year, 6) end_year_month = (year, 8) task_ctrl.add(CmorphSpaTask(start_year_month, end_year_month, precip_thresh, season)) for year in range(2005, 2009): start_year_month = (year, 6) end_year_month = (year, 8) for runid in runids: task_ctrl.add(UmN1280SpaTask(start_year_month, end_year_month, precip_thresh, season, runid)) for model in UmHadgemSpaTask.MODELS: task_ctrl.add(UmHadgemSpaTask(start_year_month, end_year_month, precip_thresh, season, model)) return task_ctrl
def gen_task_ctrl(): task_ctrl = TaskControl(__file__) for basin_scales in ['small_medium_large', 'sliding']: hb_raster_cubes_fn = PATHS[ 'output_datadir'] / f'basin_weighted_analysis/hb_N1280_raster_{basin_scales}.nc' cmorph_path = get_dataset_path('cmorph') # cmorph_path = (PATHS['datadir'] / # 'cmorph_data/8km-30min/cmorph_ppt_jja.199801-201812.asia_precip.ppt_thresh_0p1.N1280.nc') task_ctrl.add( Task(gen_hydrobasins_raster_cubes, [cmorph_path], [hb_raster_cubes_fn], func_args=[ SLIDING_SCALES if basin_scales == 'sliding' else SCALES ])) if basin_scales == 'small_medium_large': hb_names = HB_NAMES else: hb_names = [f'S{i}' for i in range(11)] shp_path_tpl = 'basin_weighted_analysis/{hb_name}/hb_{hb_name}.{ext}' for hb_name in hb_names: # Creates a few different files with different extensions - need to have them all in outputs # so that they are moved to the right place after run by Task.atomic_write. task_ctrl.add( Task( gen_hydrobasins_files, [], { ext: PATHS['output_datadir'] / shp_path_tpl.format(hb_name=hb_name, ext=ext) for ext in ['shp', 'dbf', 'prj', 'cpg', 'shx'] }, func_args=[hb_name], )) # N.B. Need to do this once for one dataset at each resolution. # I.e. only need one N1280 res dataset -- u-ak543. for dataset, hb_name in itertools.product(DATASETS[:4] + ['aphrodite'], hb_names): if dataset == 'u-ak543': dataset_cube_path = PATHS[ 'datadir'] / 'u-ak543/ap9.pp/precip_200601/ak543a.p9200601.asia_precip.nc' elif dataset[:7] == 'HadGEM3': dataset_cube_path = HADGEM_FILENAMES[dataset] elif dataset == 'aphrodite': dataset_cube_path = PATHS[ 'datadir'] / 'aphrodite_data/025deg/aphrodite_combined_all.nc' input_filenames = { dataset: dataset_cube_path, hb_name: PATHS['output_datadir'] / shp_path_tpl.format(hb_name=hb_name, ext='shp') } resolution = DATASET_RESOLUTION[dataset] weights_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/{hb_name}/weights_{resolution}_{hb_name}.nc' ) task_ctrl.add( Task(gen_weights_cube, input_filenames, [weights_filename])) weighted_mean_precip_tpl = 'basin_weighted_analysis/{hb_name}/' \ '{dataset}.{hb_name}.area_weighted.mean_precip.hdf' weighted_mean_precip_filenames = defaultdict(list) for dataset, hb_name in itertools.product(DATASETS + ['aphrodite'], hb_names): fmt_kwargs = {'dataset': dataset, 'hb_name': hb_name} dataset_path = get_dataset_path(dataset) resolution = DATASET_RESOLUTION[dataset] weights_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/{hb_name}/weights_{resolution}_{hb_name}.nc' ) weighted_mean_precip_filename = PATHS[ 'output_datadir'] / weighted_mean_precip_tpl.format( **fmt_kwargs) weighted_mean_precip_filenames[hb_name].append( weighted_mean_precip_filename) task_ctrl.add( Task(native_weighted_basin_mean_precip_analysis, { 'dataset_path': dataset_path, 'weights': weights_filename }, [weighted_mean_precip_filename])) for obs in ['cmorph', 'aphrodite', 'u-al508', 'u-ak543']: mean_precip_rmse_data_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/{obs}.mean_precip_all_rmses.{basin_scales}.pkl' ) gen_mean_precip_rmses_inputs = { (ds, hb_name): PATHS['output_datadir'] / weighted_mean_precip_tpl.format(dataset=ds, hb_name=hb_name) for ds, hb_name in itertools.product(DATASETS + ['aphrodite'], hb_names) } task_ctrl.add( Task(gen_mean_precip_rmses_corrs, inputs=gen_mean_precip_rmses_inputs, outputs=[mean_precip_rmse_data_filename], func_kwargs={ 'hb_names': hb_names, 'obs': obs })) mean_precip_bias_data_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/{obs}.mean_precip_all_bias.{basin_scales}.csv' ) task_ctrl.add( Task(gen_mean_precip_highest_percentage_bias, inputs=gen_mean_precip_rmses_inputs, outputs=[mean_precip_bias_data_filename], func_kwargs={ 'hb_names': hb_names, 'obs': obs })) for hb_name in hb_names: # N.B. out of order. max_min_path = PATHS[ 'output_datadir'] / f'basin_weighted_analysis/{hb_name}/mean_precip_max_min.pkl' task_ctrl.add( Task(calc_mean_precip_max_min, weighted_mean_precip_filenames[hb_name], [max_min_path])) weighted_phase_mag_tpl = 'basin_weighted_analysis/{hb_name}/' \ '{dataset}.{hb_name}.{mode}.area_weighted.phase_mag.hdf' for dataset, hb_name, mode in itertools.product( DATASETS, hb_names, PRECIP_MODES): fmt_kwargs = {'dataset': dataset, 'hb_name': hb_name, 'mode': mode} if dataset[:7] == 'HadGEM3': cube_name = f'{mode}_of_precip_JJA' else: cube_name = f'{mode}_of_precip_jja' dataset_path = get_dataset_path(dataset) resolution = DATASET_RESOLUTION[dataset] weights_filename = PATHS[ 'output_datadir'] / f'basin_weighted_analysis/{hb_name}/weights_{resolution}_{hb_name}.nc' weighted_phase_mag_filename = PATHS[ 'output_datadir'] / weighted_phase_mag_tpl.format(**fmt_kwargs) task_ctrl.add( Task(native_weighted_basin_diurnal_cycle_analysis, { 'diurnal_cycle': dataset_path, 'weights': weights_filename }, [weighted_phase_mag_filename], func_args=[cube_name])) for area_weighted in [True, False]: weighted = 'area_weighted' if area_weighted else 'not_area_weighted' vrmse_data_filename = ( PATHS['output_datadir'] / f'basin_weighted_analysis/all_rmses.{weighted}.{basin_scales}.pkl' ) gen_rmses_inputs = { (ds, mode, hb_name): PATHS['output_datadir'] / weighted_phase_mag_tpl.format( dataset=ds, hb_name=hb_name, mode=mode) for ds, mode, hb_name in itertools.product( DATASETS, PRECIP_MODES, hb_names) } gen_rmses_inputs['raster_cubes'] = hb_raster_cubes_fn task_ctrl.add( Task(gen_phase_mag_rmses, inputs=gen_rmses_inputs, outputs=[vrmse_data_filename], func_kwargs={ 'area_weighted': area_weighted, 'hb_names': hb_names })) return task_ctrl