def main():

    model = 'icon-eu-eps'

    date = calc_latest_run_time(model)
    #date = dict(year = 2021, month = 9, day = 9, hour = 0)

    var_list = [
        't_2m', 'wind_mean_10m', 'vmax_10m', 'mslp', 'clct', 'direct_rad',
        't_850hPa'
    ]

    grid = 'icosahedral'

    #point = dict(lat = 49.014, lon =  8.404)
    point = dict(name='Karlsruhe')

    path = dict(
        base='/',
        points=
        'data/model_data/icon-eu-eps/point-forecasts/kit-weather-ensemble-point-forecast-karlsruhe/',
        credentials='data/additional_data/')

    for var in var_list:
        data = read_forecast_data(model, grid, date, var, point=point)
        save_values(data, path, date, point, var, model)

    commit_and_push_files(path)

    return
def main():

    model = 'icon-global-det'

    run = calc_latest_run_time(model)
    #run = dict(year = 2021, month = 2, day = 28, hour = 0)

    domains = []
    domains.append(get_domain('central_europe'))
    domains.append(get_domain('europe_and_north_atlantic'))
    domains.append(get_domain('mediterranean'))
    domains.append(get_domain('west_africa'))
    domains.append(get_domain('east_africa'))
    domains.append(get_domain('southern_africa'))
    domains.append(get_domain('north_america'))
    domains.append(get_domain('southern_south_america'))
    domains.append(get_domain('eastern_asia'))
    domains.append(get_domain('north_pole'))
    domains.append(get_domain('south_pole'))

    variable1 = dict(name='vmax_10m',
                     unit='km/h',
                     grid='icosahedral',
                     load_global_field=True)
    variable2 = dict(name='')

    det_contourplot(domains, variable1, variable2, model, run)

    return
def main():

    model = 'icon-global-det'

    run = calc_latest_run_time(model)
    run = dict(year=2021, month=2, day=21, hour=12)

    domains = []
    #domains.append(get_domain('europe'))
    #domains.append(get_domain('europe_and_north_atlantic'))
    #domains.append(get_domain('mediterranean'))
    #domains.append(get_domain('north_america'))
    #domains.append(get_domain('southern_south_america'))
    domains.append(get_domain('eastern_asia'))
    #domains.append(get_domain('north_pole'))
    #domains.append(get_domain('south_pole'))

    variable1 = dict(name='prec_6h',
                     unit='mm',
                     grid='icosahedral',
                     load_global_field=True)
    variable2 = dict(name='mslp',
                     unit='hPa',
                     grid='latlon_0.1',
                     load_global_field=True)

    det_contourplot(domains, variable1, variable2, model, run)

    return
Пример #4
0
def main():

    model = 'icon-eu-eps'

    run = calc_latest_run_time(model)
    if run['hour'] == 6 or run['hour'] == 18:
        run['hour'] -= 6
    #run = dict(year = 2020, month = 10, day = 15, hour = 0)

    domains = []
    domains.append(get_domain('EU-Nest'))

    variable1 = dict(name='mslp', unit='hPa', grid='icosahedral')
    variable2 = dict(name='mslp', unit='hPa', grid='latlon_0.2')

    ens_spread_contourplot(domains, variable1, variable2, model, run)

    return
def main():

    model = 'icon-eu-det'

    run = calc_latest_run_time(model)
    if run['hour'] == 6 or run['hour'] == 18:
        run['hour'] -= 6
    #run = dict(year = 2020, month = 9, day = 16, hour = 0)

    domains = []
    domains.append(get_domain('central_europe'))
    domains.append(get_domain('europe_and_north_atlantic'))
    domains.append(get_domain('mediterranean'))

    variable1 = dict(name='synth_bt_ir10.8', unit=' ~S~o~N~C', grid='latlon_0.0625', load_global_field=True)
    variable2 = dict(name='')

    det_contourplot(domains, variable1, variable2, model, run)

    return
Пример #6
0
def main():

    model = 'icon-eu-det'

    run = calc_latest_run_time(model)
    if run['hour'] == 6 or run['hour'] == 18:
        run['hour'] -= 6
    #run = dict(year = 2020, month = 8, day = 19, hour = 0)

    domains = []
    domains.append(get_domain('central_europe'))
    domains.append(get_domain('mediterranean'))

    variable1 = dict(name='vmax_10m',
                     unit='km/h',
                     grid='latlon_0.0625',
                     load_global_field=True)
    variable2 = dict(name='')

    det_contourplot(domains, variable1, variable2, model, run)

    return
def main():

    model = 'icon-global-det'

    run = calc_latest_run_time(model)
    #run = dict(year = 2020, month = 9, day = 12, hour = 0)

    domains = []
    domains.append(get_domain('europe_and_north_atlantic'))
    domains.append(get_domain('mediterranean'))
    domains.append(get_domain('usa'))

    variable1 = dict(name='shear_200-850hPa',
                     unit='kt',
                     grid='icosahedral',
                     load_global_field=True)
    variable2 = dict(name='mslp',
                     unit='hPa',
                     grid='latlon_0.1',
                     load_global_field=True)

    det_contourplot(domains, variable1, variable2, model, run)

    return
Пример #8
0
def plot_prob_of_exc_2x2_timespan(variable, thresholds, domains, model,\
                                  timestep_mode, title_pos, only_0utc_12utc_runs):

    # set basic paths #

    if model == 'icon-eu-eps':
        model_path_deprec = 'icon-eu-eps'
    elif model == 'icon-global-eps':
        model_path_deprec = 'icon-eps'

    path = dict(
        base='/lsdfos/kit/imk-tro/projects/MOD/Gruppe_Knippertz/nw5893/',
        scripts='scripts/operational/w2w_ensembleplots/cronscripts/',
        data='forecast_archive/{}/raw_grib/'.format(model_path_deprec),
        obs='plots/observations/argentina/',
        plots_general='plots/operational/prob_of_exc/forecast/{}/'.format(
            variable['name']),
        plots_leadtime='plots/operational/prob_of_exc/leadtime/{}/'.format(
            variable['name']))
    scriptname = 'callfile_probofexc_2x2_timespan.py'
    #colormap_name = '_greenblackblue'
    colormap_name = ''

    # automatic time settings #

    run = calc_latest_run_time(model)
    timespan = int(variable['name'][-3:-1])

    if timestep_mode == 'forecast':
        if model == 'icon-eu-eps':
            max_hour = 120
        elif model == 'icon-global-eps':
            if variable['name'] == 'tot_prec_24h'\
             or variable['name'] == 'tot_prec_48h':
                max_hour = 180
            elif variable['name'] == 'tot_prec_06h':
                max_hour = 120
            elif variable['name'] == 'tot_prec_03h':
                max_hour = 72
            elif variable['name'] == 'tot_prec_01h':
                max_hour = 48

        if variable['name'] == 'tot_prec_24h'\
         or variable['name'] == 'tot_prec_48h':
            if run['hour'] == 0:
                hours = list(range(timespan, max_hour + 1, 24))
            else:
                hours = list(
                    range(timespan + 24 - run['hour'], max_hour + 1, 24))
        else:
            hours = list(range(timespan, max_hour + 1, timespan))
        runs = [run for hour in hours]

    elif timestep_mode == 'leadtime':
        if model == 'icon-eu-eps':
            max_hour = 120
        elif model == 'icon-global-eps':
            max_hour = 180

        hours = list(range(24, max_hour + 1, 12))
        shifted_run = run
        runs = []
        for hour in hours:
            if hour % 24 == 0:
                shifted_run['hour'] = 12
                shifted_run['year'], shifted_run['month'], shifted_run['day'] \
                 = go_back_one_day(shifted_run['year'], shifted_run['month'], shifted_run['day'])
            else:
                shifted_run['hour'] = 0
            runs.append(
                dict(year=shifted_run['year'],
                     month=shifted_run['month'],
                     day=shifted_run['day'],
                     hour=shifted_run['hour']))
        run = runs[0]

    # explicit time settings #

    #run = dict(year = 2020, month = 1, day = 9, hour = 0)

    #hours = list(range(24, 120+1, 24))       # 00Z run
    #hours = list(range(24+18, 120+1, 24))    # 06Z run
    #hours = list(range(24+12, 120+1, 24))    # 12Z run
    #hours = list(range(24+6, 120+1, 24))     # 18Z run
    #hours = list(range(30, 120+1, 6))
    #hours = [12*15]

    stat_processing_list = []
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[0]))
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[1]))
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[2]))
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[3]))
    label_text_value = thresholds[4]

    # make label bar plots #

    domain = domains[0]
    stat_processing = dict(method='prob_of_exc', threshold=label_text_value)
    variable['hour_start'] = 0
    variable['hour_end'] = timespan

    print('plot labelBar1')
    plot_type = 'labelBar1'
    command = 'python {}{}{} '.format(path['base'], path['scripts'],
                                      scriptname)
    arguments = '{} {} {:d} {:d} {:d} {:d} {:d} {:d} {} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
        variable['name'], variable['unit'], variable['hour_start'],
        variable['hour_end'], run['year'], run['month'], run['day'],
        run['hour'], domain['method'], domain['radius'], domain['deltalat'],
        domain['deltalon'], domain['lat'], domain['lon'], domain['name'],
        model, stat_processing['method'], stat_processing['threshold'],
        plot_type)
    os.system(command + arguments)

    print('plot labelBar2')
    plot_type = 'labelBar2'
    command = 'python {}{}{} '.format(path['base'], path['scripts'],
                                      scriptname)
    arguments = '{} {} {:d} {:d} {:d} {:d} {:d} {:d} {} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
        variable['name'], variable['unit'], variable['hour_start'],
        variable['hour_end'], run['year'], run['month'], run['day'],
        run['hour'], domain['method'], domain['radius'], domain['deltalat'],
        domain['deltalon'], domain['lat'], domain['lon'], domain['name'],
        model, stat_processing['method'], stat_processing['threshold'],
        plot_type)
    os.system(command + arguments)
    print('---------------------------------------------------')

    # make small map plots #

    for i, hour in enumerate(hours):
        run = runs[i]
        print('run_{:02d}-{:02d}-{:02d}-{:02d}h...'.format(
            run['year'], run['month'], run['day'], run['hour']))

        subfolder = 'run_{:4d}{:02d}{:02d}{:02d}/tot_prec/'.format(
            run['year'], run['month'], run['day'], run['hour'])
        if model == 'icon-eu-eps':
            filename_beginning = 'icon-eu-eps_europe_icosahedral_single-level'
            model_max_hour = 120
        elif model == 'icon-global-eps':
            filename_beginning = 'icon-eps_global_icosahedral_single-level'
            model_max_hour = 180
        test_filename = '{}_{:4d}{:02d}{:02d}{:02d}_{:03d}_tot_prec.grib2'.format(
            filename_beginning, run['year'], run['month'], run['day'],
            run['hour'], model_max_hour)
        if not os.path.isfile(path['base'] + path['data'] + subfolder +
                              test_filename):
            print('no data!')
            print('---------------------------------------------------')
            continue

        variable['hour_start'] = hour - timespan
        variable['hour_end'] = hour
        for stat_processing in stat_processing_list:
            for domain in domains:
                print('plot {}, {:2d}-{:2d}h, prob_of_exc, {}{}, {}'.format(
                    variable['name'], variable['hour_start'],
                    variable['hour_end'], stat_processing['threshold'],
                    variable['unit'], domain['name']))

                plot_type = 'small_map_only'
                command = 'python {}{}{} '.format(path['base'],
                                                  path['scripts'], scriptname)
                arguments_1 = '{} {} {:d} {:d} {:d} {:d} {:d} {:d} '.format(
                    variable['name'], variable['unit'], variable['hour_start'],
                    variable['hour_end'], run['year'], run['month'],
                    run['day'], run['hour'])
                arguments_2 = '{} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
                    domain['method'], domain['radius'], domain['deltalat'],
                    domain['deltalon'], domain['lat'], domain['lon'],
                    domain['name'], model, stat_processing['method'],
                    stat_processing['threshold'], plot_type)
                os.system(command + arguments_1 + arguments_2)

        # make plots for time depending texts #

        print('plot text, {:2d}h'.format(hour))
        domain = domains[0]
        stat_processing = dict(method='prob_of_exc',
                               threshold=label_text_value)

        plot_type = 'text'
        command = 'python {}{}{} '.format(path['base'], path['scripts'],
                                          scriptname)
        arguments_1 = '{} {} {:d} {:d} {:d} {:d} {:d} {:d} '.format(
            variable['name'], variable['unit'], variable['hour_start'],
            variable['hour_end'], run['year'], run['month'], run['day'],
            run['hour'])
        arguments_2 = '{} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
            domain['method'], domain['radius'], domain['deltalat'],
            domain['deltalon'], domain['lat'], domain['lon'], domain['name'],
            model, stat_processing['method'], stat_processing['threshold'],
            plot_type)
        os.system(command + arguments_1 + arguments_2)

        # put all plot parts together #

        if model == 'icon-eu-eps':
            model_acr = 'iconeueps'
        elif model == 'icon-global-eps':
            model_acr = 'iconglobaleps'

        for domain in domains:
            print('paste together {}, {:2d}-{:2d}h, prob_of_exc, {}'.format(
                variable['name'], variable['hour_start'], variable['hour_end'],
                domain['name']))

            path['plots_maps'] = 'run_{:4d}{:02d}{:02d}{:02d}/{}/'.format(
                run['year'], run['month'], run['day'], run['hour'],
                domain['name'])
            path['plots_labelBar'] = 'run_{:4d}{:02d}{:02d}{:02d}/{}/'.format(
                runs[0]['year'], runs[0]['month'], runs[0]['day'],
                runs[0]['hour'], domains[0]['name'])
            path['plots_text'] = 'run_{:4d}{:02d}{:02d}{:02d}/{}/'.format(
                run['year'], run['month'], run['day'], run['hour'],
                domains[0]['name'])
            filenames = dict()
            for i, stat_processing in enumerate(stat_processing_list):
                if stat_processing['threshold'] >= 1.0:
                    threshold_str = '{:03d}'.format(
                        int(stat_processing['threshold']))
                else:
                    threshold_str = '{:.1f}'.format(
                        stat_processing['threshold'])
                file_str = 'small_map_plot_{:d}'.format(i + 1)
                filenames[
                    file_str] = '{}_prob_of_exc_{}_{}{}_{:03d}-{:03d}h_{}_small_map_only.png'.format(
                        model_acr, variable['name'], threshold_str,
                        variable['unit'], variable['hour_start'],
                        variable['hour_end'], domain['name'])
            filenames[
                'labelBar1'] = '{}_prob_of_exc_{}_{:.1f}{}_000-{:03d}h_{}_labelBar1.png'.format(
                    model_acr, variable['name'], label_text_value,
                    variable['unit'], timespan, domains[0]['name'])
            filenames[
                'labelBar2'] = '{}_prob_of_exc_{}_{:.1f}{}_000-{:03d}h_{}_labelBar2.png'.format(
                    model_acr, variable['name'], label_text_value,
                    variable['unit'], timespan, domains[0]['name'])
            filenames[
                'text'] = '{}_prob_of_exc_{}_{:.1f}{}_{:03d}-{:03d}h_{}_text.png'.format(
                    model_acr, variable['name'], label_text_value,
                    variable['unit'], variable['hour_start'],
                    variable['hour_end'], domains[0]['name'])
            filenames[
                'finalplot'] = '{}_prob_of_exc_2x2_{}_{:03d}-{:03d}h_{}{}.png'.format(
                    model_acr, variable['name'], variable['hour_start'],
                    variable['hour_end'], domain['name'], colormap_name)

            img_combined = Image.new('RGB', (750, 950), (255, 255, 255))

            image_smallmap1 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_1'])
            img_combined.paste(image_smallmap1.crop((0, 0, 360, 360)),
                               (10, 95))
            image_smallmap2 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_2'])
            img_combined.paste(image_smallmap2.crop((0, 0, 360, 360)),
                               (10 + 360 + 10, 95))
            image_smallmap3 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_3'])
            img_combined.paste(image_smallmap3.crop((0, 0, 360, 360)),
                               (10, 95 + 360 - 15))
            image_smallmap4 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_4'])
            img_combined.paste(image_smallmap4.crop((0, 0, 360, 360)),
                               (10 + 360 + 10, 95 + 360 - 15))


            image_labelbar1a = Image.open(path['base'] + path['plots_general'] + path['plots_labelBar']\
                                         + filenames['labelBar1'])
            img_combined.paste(image_labelbar1a.crop((25, 610, 775, 630)),
                               (3, 95 + 360 - 15 + 360 + 13))
            image_labelbar1b = Image.open(path['base'] + path['plots_general'] + path['plots_labelBar']\
                                         + filenames['labelBar1'])
            img_combined.paste(image_labelbar1b.crop((25, 630, 775, 650)),
                               (3, 95 + 360 - 15 + 360 - 12))
            image_labelbar2 = Image.open(path['base'] + path['plots_general'] + path['plots_labelBar']\
                                         + filenames['labelBar2'])
            img_combined.paste(image_labelbar2.crop((25, 575, 775, 650)),
                               (3, 95 + 360 - 15 + 360 + 29))

            image_title = Image.open(path['base'] + path['plots_general'] +
                                     path['plots_text'] + filenames['text'])
            img_combined.paste(image_title.crop((50, 0, 750, 30)),
                               (title_pos, 0))
            image_initial_time = Image.open(path['base'] +
                                            path['plots_general'] +
                                            path['plots_text'] +
                                            filenames['text'])
            img_combined.paste(image_initial_time.crop((5, 40, 300, 60)),
                               (3, 50))
            image_valid_time_start = Image.open(path['base'] +
                                                path['plots_general'] +
                                                path['plots_text'] +
                                                filenames['text'])
            img_combined.paste(image_valid_time_start.crop((450, 40, 740, 60)),
                               (445, 45))
            image_valid_time_end = Image.open(path['base'] +
                                              path['plots_general'] +
                                              path['plots_text'] +
                                              filenames['text'])
            img_combined.paste(image_valid_time_end.crop((560, 630, 775, 650)),
                               (525, 65))
            image_model = Image.open(path['base'] + path['plots_general'] +
                                     path['plots_text'] + filenames['text'])
            img_combined.paste(image_model.crop((5, 630, 550, 650)),
                               (3, 95 + 360 - 15 + 360 - 20 + 75 + 70))

            img_combined.save(path['base'] + path['plots_general'] + path['plots_maps']\
                              + filenames['finalplot'],'png')
            print('---------------------------------------------------')

            for i in range(4):
                os.remove(path['base'] + path['plots_general'] + path['plots_maps']\
                          + filenames['small_map_plot_{:d}'.format(i+1)])
        os.remove(path['base'] + path['plots_general'] + path['plots_text']\
                  + filenames['text'])
    os.remove(path['base'] + path['plots_general'] + path['plots_labelBar']\
              + filenames['labelBar1'])
    os.remove(path['base'] + path['plots_general'] + path['plots_labelBar']\
              + filenames['labelBar2'])

    if timestep_mode == 'leadtime':

        # download latest SMN observations

        obs_filename = 'smn_observations_prec24h_{:4d}{:02d}{:02d}{:02d}.gif'.format(
            runs[0]['year'], runs[0]['month'], runs[0]['day'] + 1,
            runs[0]['hour'])
        url = 'https://estaticos.smn.gob.ar/dpd/cartas/precipgr.gif'
        r = requests.get(url, timeout=3)
        with open(path['base'] + path['obs'] + obs_filename, 'wb') as file:
            file.write(r.content)

        # create images

        if model == 'icon-eu-eps':
            model_acr = 'iconeueps'
        elif model == 'icon-global-eps':
            model_acr = 'iconglobaleps'

        images = []
        for i, hour in enumerate(hours[::-1]):
            run = runs[-1 - i]
            fcst_path = 'run_{:4d}{:02d}{:02d}{:02d}/{}/'.format(
                run['year'], run['month'], run['day'], run['hour'],
                domain['name'])
            fcst_filename = '{}_prob_of_exc_2x2_tot_prec_{:02d}h_{:03d}-{:03d}h_central_argentina.png'.format(
                model_acr, timespan, hour - timespan, hour)
            if os.path.isfile(path['base'] + path['plots_general'] +
                              fcst_path + fcst_filename):
                fcst_image = Image.open(path['base'] + path['plots_general'] +
                                        fcst_path + fcst_filename)
            obs_image = Image.open(path['base'] + path['obs'] + obs_filename)
            mag = 1.33
            obs_image = obs_image.resize(
                (int(obs_image.width * mag), int(obs_image.height * mag)))

            comb_image = Image.new('RGB', (1250, 950), (255, 255, 255))
            comb_image.paste(fcst_image.crop((0, 0, 750, 950)), (500, 0))
            comb_image.paste(
                obs_image.crop((0, 0, 366 * mag - 1, 564 * mag - 1)), (0, 50))
            images.append(comb_image)

        # create gif

        gif_timestep = 800  # ms
        run = calc_latest_run_time(model)
        subfolder = 'run_{:4d}{:02d}{:02d}12/'.format(run['year'],
                                                      run['month'], run['day'])
        if not os.path.isdir(path['base'] + path['plots_leadtime'] +
                             subfolder):
            os.mkdir(path['base'] + path['plots_leadtime'] + subfolder)

        gifname = '{}_prob_of_exc_2x2_leadtime_tot_prec_{:02d}h_{:03d}-{:03d}h_central_argentina.gif'.format(
            model_acr, timespan, max_hour, 0)
        images[0].save(path['base'] + path['plots_leadtime'] + subfolder +
                       gifname,
                       save_all=True,
                       append_images=images[1:],
                       duration=gif_timestep,
                       loop=0)

    return
Пример #9
0
def plot_prob_of_exc_2x2_pointintime(variable, thresholds, domains, model,
                                     title_pos, only_0utc_12utc_runs):

    # set basic paths #

    path = dict(
        base='/lsdfos/kit/imk-tro/projects/MOD/Gruppe_Knippertz/nw5893/',
        scripts='scripts/operational/w2w_ensembleplots/cronscripts/',
        plots_general='plots/operational/prob_of_exc/forecast/{}/'.format(
            variable['name']))
    scriptname = 'callfile_probofexc_2x2_pointintime.py'
    #colormap_name = '_greenblackblue'
    colormap_name = ''

    if variable['name'] == 'acc_prec':
        if thresholds[0] < 30.0:
            colormap_name = '_low'
        else:
            colormap_name = '_high'

    # automatic time settings #

    run = calc_latest_run_time('icon-eu-eps')
    if only_0utc_12utc_runs:
        if run['hour'] == 6 or run['hour'] == 18:
            run['hour'] -= 6

    if model == 'icon-eu-eps':
        #hours = list(range(0, 120+1, 12))
        hours = list(range(0, 72, 3)) + list(range(72, 120 + 1, 6))
    elif model == 'icon-global-eps':
        #hours = list(range(0,72,3)) + list(range(72,120,6)) + list(range(120,180+1,12))
        if variable['name'] == 'acc_prec':
            hours = list(range(12, 180 + 1, 12))
        else:
            hours = list(range(0, 180 + 1, 12))

    # explicit time settings #

    #run = dict(year = 2019, month = 12, day = 25, hour = 0)

    #hours = list(range(24, 120+1, 24))       # 00Z run
    #hours = list(range(24+18, 120+1, 24))    # 06Z run
    #hours = list(range(24+12, 120+1, 24))    # 12Z run
    #hours = list(range(24+6, 120+1, 24))     # 18Z run
    #hours = [180]

    stat_processing_list = []
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[0]))
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[1]))
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[2]))
    stat_processing_list.append(
        dict(method='prob_of_exc', threshold=thresholds[3]))
    label_text_value = thresholds[4]

    # make label bar plots #

    domain = domains[0]
    stat_processing = dict(method='prob_of_exc', threshold=label_text_value)
    variable['hour'] = 0

    print('plot labelBar1')
    plot_type = 'labelBar1'
    command = 'python {}{}{} '.format(path['base'], path['scripts'],
                                      scriptname)
    arguments = '{} {} {:d} {:d} {:d} {:d} {:d} {} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
        variable['name'], variable['unit'], variable['hour'], run['year'],
        run['month'], run['day'], run['hour'], domain['method'],
        domain['radius'], domain['deltalat'], domain['deltalon'],
        domain['lat'], domain['lon'], domain['name'], model,
        stat_processing['method'], stat_processing['threshold'], plot_type)
    os.system(command + arguments)

    print('plot labelBar2')
    plot_type = 'labelBar2'
    command = 'python {}{}{} '.format(path['base'], path['scripts'],
                                      scriptname)
    arguments = '{} {} {:d} {:d} {:d} {:d} {:d} {} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
        variable['name'], variable['unit'], variable['hour'], run['year'],
        run['month'], run['day'], run['hour'], domain['method'],
        domain['radius'], domain['deltalat'], domain['deltalon'],
        domain['lat'], domain['lon'], domain['name'], model,
        stat_processing['method'], stat_processing['threshold'], plot_type)
    os.system(command + arguments)
    print('---------------------------------------------------')

    # make small map plots #

    for hour in hours:
        variable['hour'] = hour
        for stat_processing in stat_processing_list:
            for domain in domains:
                print('plot {}, {:d}h, prob_of_exc, {}{}, {}'.format(
                    variable['name'], hour, stat_processing['threshold'],
                    variable['unit'], domain['name']))

                plot_type = 'small_map_only'
                command = 'python {}{}{} '.format(path['base'],
                                                  path['scripts'], scriptname)
                arguments = '{} {} {:d} {:d} {:d} {:d} {:d} {} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'\
                            .format(variable['name'], variable['unit'], variable['hour'],
                            run['year'], run['month'], run['day'], run['hour'],
                            domain['method'], domain['radius'], domain['deltalat'], domain['deltalon'],
                            domain['lat'], domain['lon'], domain['name'], model,
                            stat_processing['method'], stat_processing['threshold'], plot_type)
                os.system(command + arguments)

        # make plots for time depending texts #

        print('plot text, {:d}h'.format(hour))
        domain = domains[0]
        stat_processing = dict(method='prob_of_exc',
                               threshold=label_text_value)

        plot_type = 'text'
        command = 'python {}{}{} '.format(path['base'], path['scripts'],
                                          scriptname)
        arguments = '{} {} {:d} {:d} {:d} {:d} {:d} {} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {} {} {} {:.2f} {}'.format(
            variable['name'], variable['unit'], variable['hour'], run['year'],
            run['month'], run['day'], run['hour'], domain['method'],
            domain['radius'], domain['deltalat'], domain['deltalon'],
            domain['lat'], domain['lon'], domain['name'], model,
            stat_processing['method'], stat_processing['threshold'], plot_type)
        os.system(command + arguments)

        # put all plot parts together #

        if model == 'icon-eu-eps':
            model_acr = 'iconeueps'
        elif model == 'icon-global-eps':
            model_acr = 'iconglobaleps'

        for domain in domains:
            print('paste together {}, {:d}h, prob_of_exc, {}'.format(
                variable['name'], variable['hour'], domain['name']))

            path['plots_maps'] = 'run_{:4d}{:02d}{:02d}{:02d}/{}/'.format(
                run['year'], run['month'], run['day'], run['hour'],
                domain['name'])
            path[
                'plots_labelBar_text'] = 'run_{:4d}{:02d}{:02d}{:02d}/{}/'.format(
                    run['year'], run['month'], run['day'], run['hour'],
                    domains[0]['name'])

            filenames = dict()
            for i, stat_processing in enumerate(stat_processing_list):
                file_str = 'small_map_plot_{:d}'.format(i + 1)
                filenames[
                    file_str] = '{}_prob_of_exc_{}_{:.0f}{}_{:03d}h_{}_small_map_only.png'.format(
                        model_acr, variable['name'],
                        stat_processing['threshold'], variable['unit'],
                        variable['hour'], domain['name'])
            filenames[
                'labelBar1'] = '{}_prob_of_exc_{}_{:.0f}{}_000h_{}_labelBar1.png'.format(
                    model_acr, variable['name'], label_text_value,
                    variable['unit'], domain['name'])
            filenames[
                'labelBar2'] = '{}_prob_of_exc_{}_{:.0f}{}_000h_{}_labelBar2.png'.format(
                    model_acr, variable['name'], label_text_value,
                    variable['unit'], domain['name'])
            filenames[
                'text'] = '{}_prob_of_exc_{}_{:.0f}{}_{:03d}h_{}_text.png'.format(
                    model_acr, variable['name'], label_text_value,
                    variable['unit'], variable['hour'], domain['name'])
            filenames[
                'finalplot'] = '{}_prob_of_exc_2x2_{}_{:03d}h_{}{}.png'.format(
                    model_acr, variable['name'], variable['hour'],
                    domain['name'], colormap_name)

            img_combined = Image.new('RGB', (750, 935), (255, 255, 255))

            image_smallmap1 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_1'])
            img_combined.paste(image_smallmap1.crop((0, 0, 360, 360)),
                               (10, 80))
            image_smallmap2 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_2'])
            img_combined.paste(image_smallmap2.crop((0, 0, 360, 360)),
                               (10 + 360 + 10, 80))
            image_smallmap3 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_3'])
            img_combined.paste(image_smallmap3.crop((0, 0, 360, 360)),
                               (10, 80 + 360 - 15))
            image_smallmap4 = Image.open(path['base'] + path['plots_general'] + path['plots_maps']\
                                         + filenames['small_map_plot_4'])
            img_combined.paste(image_smallmap4.crop((0, 0, 360, 360)),
                               (10 + 360 + 10, 80 + 360 - 15))


            image_labelbar1a = Image.open(path['base'] + path['plots_general'] + path['plots_labelBar_text']\
                                         + filenames['labelBar1'])
            img_combined.paste(image_labelbar1a.crop((25, 610, 775, 630)),
                               (3, 80 + 360 - 15 + 360 + 13))
            image_labelbar1b = Image.open(path['base'] + path['plots_general'] + path['plots_labelBar_text']\
                                         + filenames['labelBar1'])
            img_combined.paste(image_labelbar1b.crop((25, 630, 775, 650)),
                               (3, 80 + 360 - 15 + 360 - 12))
            image_labelbar2 = Image.open(path['base'] + path['plots_general'] + path['plots_labelBar_text']\
                                         + filenames['labelBar2'])
            img_combined.paste(image_labelbar2.crop((25, 575, 775, 650)),
                               (3, 80 + 360 - 15 + 360 + 29))

            image_title = Image.open(path['base'] + path['plots_general'] +
                                     path['plots_labelBar_text'] +
                                     filenames['text'])
            img_combined.paste(image_title.crop((50, 0, 750, 30)),
                               (title_pos, 0))
            image_initial_time = Image.open(path['base'] +
                                            path['plots_general'] +
                                            path['plots_labelBar_text'] +
                                            filenames['text'])
            img_combined.paste(image_initial_time.crop((5, 40, 300, 60)),
                               (3, 45))
            image_valid_time = Image.open(path['base'] +
                                          path['plots_general'] +
                                          path['plots_labelBar_text'] +
                                          filenames['text'])
            img_combined.paste(image_valid_time.crop((450, 40, 730, 60)),
                               (460, 45))
            image_model = Image.open(path['base'] + path['plots_general'] +
                                     path['plots_labelBar_text'] +
                                     filenames['text'])
            img_combined.paste(image_model.crop((5, 630, 550, 650)),
                               (3, 80 + 360 - 15 + 360 - 20 + 75 + 70))

            img_combined.save(path['base'] + path['plots_general'] + path['plots_maps']\
                              + filenames['finalplot'],'png')
            print('---------------------------------------------------')

            for i in range(4):
                os.remove(path['base'] + path['plots_general'] + path['plots_maps']\
                          + filenames['small_map_plot_{:d}'.format(i+1)])
        os.remove(path['base'] + path['plots_general'] + path['plots_labelBar_text']\
                  + filenames['text'])
    os.remove(path['base'] + path['plots_general'] + path['plots_labelBar_text']\
              + filenames['labelBar1'])
    os.remove(path['base'] + path['plots_general'] + path['plots_labelBar_text']\
              + filenames['labelBar2'])

    return
Пример #10
0
def main():

    # list of forecast hours #

    fcst_hours_list_6h = list(range(0, 120 + 1, 6))
    fcst_hours_list_1h3h = list(range(0, 48, 1)) + list(range(
        48, 72, 3)) + list(range(72, 120 + 1, 6))

    # get latest run time #

    date = calc_latest_run_time('icon-eu-eps')

    # explicit download options #

    ###########################################################
    #date = dict(year = 2021, month = 5, day = 14, hour = 12)
    ###########################################################

    print('download run_{}{:02}{:02}{:02}'.format(date['year'], date['month'],
                                                  date['day'], date['hour']))

    # list of dwd variable names and level types, sl: single level #

    if date['hour'] == 0 or date['hour'] == 12:
        var_list = [['tot_prec', 'sl'], ['snow_con', 'sl'], ['snow_gsp', 'sl'],
                    ['t_2m', 'sl'], ['u_10m', 'sl'], ['v_10m', 'sl'],
                    ['ps', 'sl'], ['clct', 'sl'], ['aswdir_s', 'sl'],
                    ['aswdifd_s', 'sl'], ['vmax_10m', 'sl'], ['tqv', 'sl'],
                    ['cape_ml', 'sl'], ['fi', '500hPa'], ['t', '500hPa'],
                    ['u', '500hPa'], ['v', '500hPa'], ['fi', '850hPa'],
                    ['t', '850hPa'], ['u', '850hPa'], ['v', '850hPa'],
                    ['fi', '300hPa'], ['u', '300hPa'], ['v', '300hPa']]
        vars_to_interpolate = [['t_2m', 'sl'], ['ps', 'sl'], ['t', '850hPa'],
                               ['fi', '500hPa']]
        #var_list = [['snow_con','sl'],['snow_gsp','sl']]
        #vars_to_interpolate = []

    elif date['hour'] == 6 or date['hour'] == 18:
        var_list = [['tot_prec', 'sl'], ['t_2m', 'sl'], ['u_10m', 'sl'],
                    ['v_10m', 'sl'], ['ps', 'sl'], ['clct', 'sl'],
                    ['aswdir_s', 'sl'], ['aswdifd_s', 'sl'],
                    ['vmax_10m', 'sl']]

    # create paths if necessary #

    path = dict(base='/')
    path['grid'] = 'data/model_data/icon-eu-eps/grid/'
    path[
        'data'] = 'data/model_data/icon-eu-eps/forecasts/run_{}{:02}{:02}{:02}'.format(
            date['year'], date['month'], date['day'], date['hour'])
    if not os.path.isdir(path['base'] + path['data']):
        os.mkdir(path['base'] + path['data'])
    path['data'] = path['data'] + '/'

    for var in var_list:
        if var[1] == 'sl':  # sl = single-level
            temp_subdir = path['data'] + var[0]
        else:
            temp_subdir = path['data'] + var[0] + '_' + var[1]

        if not os.path.isdir(path['base'] + temp_subdir):
            os.mkdir(path['base'] + temp_subdir)
        path['subdir'] = temp_subdir + '/'

        # download all grib files from website #

        for fcst_hour in fcst_hours_list_1h3h:
            if var[0] == 'vmax_10m' and fcst_hour == 0:
                continue
            if var[1] == 'sl':
                filename = 'icon-eu-eps_europe_icosahedral_single-level_{}{:02}{:02}{:02}_{:03}_{}.grib2.bz2'.format(
                    date['year'], date['month'], date['day'], date['hour'],
                    fcst_hour, var[0])
            else:
                level = var[1][:3]
                filename = 'icon-eu-eps_europe_icosahedral_pressure-level_{}{:02}{:02}{:02}_{:03}_{}_{}.grib2.bz2'\
                           .format( date['year'], date['month'], date['day'], date['hour'], fcst_hour, level, var[0])

            url = 'https://opendata.dwd.de/weather/nwp/icon-eu-eps/grib/{:02}/{}/'.format(
                date['hour'], var[0])

            if download(url, filename, path):
                filename = unzip(path, filename)

            if (date['hour'] == 0 or date['hour'] == 12)\
             and var in vars_to_interpolate\
             and fcst_hour in fcst_hours_list_6h:
                if var[1] == 'sl':
                    latlon_filename = 'icon-eu-eps_latlon_0.2_single-level_{}{:02}{:02}{:02}_{:03}h_{}.nc'.format(
                        date['year'], date['month'], date['day'], date['hour'],
                        fcst_hour, var[0])
                else:
                    level = var[1][:3]
                    latlon_filename = 'icon-eu-eps_latlon_0.2_pressure-level_{}{:02}{:02}{:02}_{:03}h_{}_{}.nc'.format(
                        date['year'], date['month'], date['day'], date['hour'],
                        fcst_hour, level, var[0])
                interpolate_icon_grib_to_latlon(path, filename,
                                                latlon_filename, 'icon-eu-eps')

        # read in all grib files of variable and save as one combined netcdf file #

        if var[1] == 'sl':
            grib_filename = 'icon-eu-eps_europe_icosahedral_single-level_{}{:02}{:02}{:02}_*_{}.grib2'.format(
                date['year'], date['month'], date['day'], date['hour'], var[0])
            netcdf_filename = 'icon-eu-eps_icosahedral_single-level_{}{:02}{:02}{:02}_{}.nc'.format(
                date['year'], date['month'], date['day'], date['hour'], var[0])
        else:
            level = var[1][:3]
            grib_filename = 'icon-eu-eps_europe_icosahedral_pressure-level_{}{:02}{:02}{:02}_*_{}_{}.grib2'.format(
                date['year'], date['month'], date['day'], date['hour'], level,
                var[0])
            netcdf_filename = 'icon-eu-eps_icosahedral_pressure-level_{}{:02}{:02}{:02}_{}_{}.nc'.format(
                date['year'], date['month'], date['day'], date['hour'], level,
                var[0])
        convert_gribfiles_to_one_netcdf(path, grib_filename, netcdf_filename,
                                        'icon-eu-eps')

    return
Пример #11
0
def main():

    # make lists of forecast hours and variables #

    fcst_hours_list_prec_rate = list(range(0, 78, 1)) + list(
        range(78, 180 + 1, 3))
    fcst_hours_list_6h = list(range(0, 180 + 1, 6))
    #fcst_hours_list_1h = list(range(0,78+1,1))

    # get latest run time #

    date = calc_latest_run_time('icon-global-det')

    # explicit download options #

    ###########################################################
    #date = dict(year = 2020, month = 9, day = 25, hour = 0)
    ###########################################################

    print('download run_{}{:02}{:02}{:02}'.format(date['year'], date['month'],
                                                  date['day'], date['hour']))

    # list of dwd variable names #

    var_list = [['tot_prec', 'sl'], ['pmsl', 'sl'], ['relhum', '850hPa'],
                ['t', '850hPa'], ['fi', '500hPa'], ['fi', '300hPa'],
                ['u', '300hPa'], ['v', '300hPa'], ['cape_ml', 'sl'],
                ['vmax_10m', 'sl'], ['u', '500hPa'], ['v', '500hPa'],
                ['u_10m', 'sl'], ['v_10m', 'sl']]
    vars_to_interpolate = [['pmsl', 'sl'], ['fi', '500hPa'], ['fi', '300hPa'],
                           ['u_10m', 'sl'], ['v_10m', 'sl'], ['u', '500hPa'],
                           ['v', '500hPa']]

    # create paths if necessary #

    path = dict(base='/')
    path['grid'] = 'data/model_data/icon-global-det/grid/'
    path[
        'data'] = 'data/model_data/icon-global-det/forecasts/run_{}{:02}{:02}{:02}'.format(
            date['year'], date['month'], date['day'], date['hour'])
    if not os.path.isdir(path['base'] + path['data']):
        os.mkdir(path['base'] + path['data'])
    path['data'] = path['data'] + '/'

    for var in var_list:
        if var[1] == 'sl':  # sl = single-level
            temp_subdir = path['data'] + var[0]
        else:
            temp_subdir = path['data'] + var[0] + '_' + var[1]

        if not os.path.isdir(path['base'] + temp_subdir):
            os.mkdir(path['base'] + temp_subdir)
        path['subdir'] = temp_subdir + '/'

        # download all grib files from website #

        if var[0] == 'tot_prec':
            fcst_hours_list = fcst_hours_list_prec_rate
        else:
            fcst_hours_list = fcst_hours_list_6h
        #fcst_hours_list = fcst_hours_list_1h

        for fcst_hour in fcst_hours_list:
            if var[0] == 'vmax_10m' and fcst_hour == 0:
                fcst_hour = 1

            if var[1] == 'sl':
                filename = 'icon_global_icosahedral_single-level_{}{:02}{:02}{:02}_{:03}_{}.grib2.bz2'.format(
                    date['year'], date['month'], date['day'], date['hour'],
                    fcst_hour, var[0].upper())
            else:
                level = var[1][:3]
                filename = 'icon_global_icosahedral_pressure-level_{}{:02}{:02}{:02}_{:03}_{}_{}.grib2.bz2'.format(
                    date['year'], date['month'], date['day'], date['hour'],
                    fcst_hour, level, var[0].upper())

            url = 'https://opendata.dwd.de/weather/nwp/icon/grib/{:02}/{}/'.format(
                date['hour'], var[0])

            if download(url, filename, path):
                filename = unzip(path, filename)

            if var in vars_to_interpolate:
                if var[1] == 'sl':
                    latlon_filename = 'icon-global-det_latlon_0.1_single-level_{}{:02}{:02}{:02}_{:03}h_{}.nc'.format(
                        date['year'], date['month'], date['day'], date['hour'],
                        fcst_hour, var[0])
                else:
                    level = var[1][:3]
                    latlon_filename = 'icon-global-det_latlon_0.1_pressure-level_{}{:02}{:02}{:02}_{:03}h_{}_{}.nc'.format(
                        date['year'], date['month'], date['day'], date['hour'],
                        fcst_hour, level, var[0])
                interpolate_icon_grib_to_latlon(path, filename,
                                                latlon_filename,
                                                'icon-global-det')

        # read in all grib files of variable and save as one combined netcdf file #

        if var[1] == 'sl':
            grib_filename = 'icon_global_icosahedral_single-level_{}{:02}{:02}{:02}_*_{}.grib2'.format(
                date['year'], date['month'], date['day'], date['hour'],
                var[0].upper())
            netcdf_filename = 'icon-global-det_icosahedral_single-level_{}{:02}{:02}{:02}_{}.nc'.format(
                date['year'], date['month'], date['day'], date['hour'], var[0])
        else:
            level = var[1][:3]
            grib_filename = 'icon_global_icosahedral_pressure-level_{}{:02}{:02}{:02}_*_{}_{}.grib2'.format(
                date['year'], date['month'], date['day'], date['hour'], level,
                var[0].upper())
            netcdf_filename = 'icon-global-det_icosahedral_pressure-level_{}{:02}{:02}{:02}_{}_{}.nc'.format(
                date['year'], date['month'], date['day'], date['hour'], level,
                var[0])
        convert_gribfiles_to_one_netcdf(path, grib_filename, netcdf_filename,
                                        'icon-global-det')

    return
def main():

    ###########################################################
    models = 'both-eps'  # plot icon-eu-eps only if global not available
    date = 'latest'
    #date = dict(year = 2021, month = 12, day = 14, hour = 6)
    var = 'all_available'
    pointnames_raw =   ['Karlsruhe','Mainz','Munich','Berlin','Hamburg','Offenbach',\
                        'Amsterdam','Athens','Bologna','Brussels','Copenhagen','Dublin',\
                        'Madrid','Leeds','Lisbon','London','Paris','Rome',\
                        'Toulouse','Valencia','Vienna','Warsaw','Zurich']
    #pointnames_raw = ['Karlsruhe']
    pointnames_pp = ['Karlsruhe', 'Mainz', 'Munich', 'Berlin', 'Hamburg']
    point_type = 'operational_city'
    verbose = True

    ###########################################################

    path = dict(
        base='/',
        plots=
        'data/plots/operational/meteogram_boxplot/forecast/operational_cities/',
        webserver=
        '/home/iconeps/Data3/plots/icon/meteogram_boxplot/forecast/operational_cities',
        rscripts='/home/benedikt/R_scripts/')

    # plot raw model output meteograms and save the German cities data to textfiles #

    for pointname in pointnames_raw:
        if verbose:
            print('--- next meteogram (raw) point is {} ---'.format(pointname))

        if pointname in [
                'Karlsruhe', 'Mainz', 'Munich', 'Offenbach', 'Berlin',
                'Hamburg'
        ]:
            save_point_data = True
        else:
            save_point_data = False

        y_axis_limits = 'raw'
        boxplot_forecast_raw(models, date, var, dict(name=pointname),
                             point_type, save_point_data, y_axis_limits,
                             verbose)

    # run post-processing #

    os.system('Rscript ' + path['rscripts'] + 'pp_init.R')
    if verbose:
        print('--------------------------------------')
        print('--------------------------------------')
        print('-------- pp calculation done ---------')
        print('--------------------------------------')
        print('--------------------------------------')

    # plot post-processed meteograms and replot with adjusted y-axis the corresponding raw meteograms #

    for pointname in pointnames_pp:
        if verbose:
            print('--- next meteogram (pp) point is {} ---'.format(pointname))
        boxplot_forecast_pp(date, var, dict(name=pointname), verbose)

        if verbose:
            print('--------------------------------------')
            print('--------------------------------------')
            print('--- next meteogram (raw replot) point is {} ---'.format(
                pointname))
        save_point_data = False
        y_axis_limits = 'raw_and_pp'
        boxplot_forecast_raw(models, date, var, dict(name=pointname),
                             point_type, save_point_data, y_axis_limits,
                             verbose)

    # create latest_run file #

    if date == 'latest':
        date = calc_latest_run_time('icon-eu-eps')

    filename_latest_run = 'latest_run_boxplot_meteograms.txt'
    with open(path['base'] + path['plots'] + filename_latest_run, 'w') as file:
        file.write('{:4d}{:02d}{:02d}{:02d}'.format(date['year'],
                                                    date['month'], date['day'],
                                                    date['hour']))

    # copy all meteograms and latest_run file to webserver #

    subfolder = 'run_{}{:02}{:02}{:02}'.format(date['year'], date['month'],
                                               date['day'], date['hour'])
    os.system('scp -q -r ' + path['base'] + path['plots'] + subfolder + ' '\
              + '[email protected]:' + path['webserver'])
    os.system('scp -q ' + path['base'] + path['plots'] + filename_latest_run + ' '\
              + '[email protected]:' + path['webserver'])

    return
Пример #13
0
def main():

    # list of forecast hours #

    fcst_hours_list = list(range(0, 48, 1)) + list(range(48, 72, 3)) + list(
        range(72, 120, 6)) + list(range(120, 180 + 1, 12))

    # get latest run time #

    date = calc_latest_run_time('icon-global-eps')

    # explicit download options #

    ###########################################################
    #date = dict(year = 2019, month = 5, day = 1, hour = 12)
    ###########################################################

    print('download run_{}{:02}{:02}{:02}'.format(date['year'], date['month'],
                                                  date['day'], date['hour']))

    # list of dwd variable names #

    var_list = ['tot_prec', 't_2m', 'u_10m', 'v_10m', 'clct']

    # create paths if necessary #

    path = dict(base='/')
    path[
        'data'] = 'data/model_data/icon-global-eps/forecasts/run_{}{:02}{:02}{:02}'.format(
            date['year'], date['month'], date['day'], date['hour'])
    if not os.path.isdir(path['base'] + path['data']):
        os.mkdir(path['base'] + path['data'])
    path['data'] = path['data'] + '/'

    for var in var_list:
        temp_subdir = path['data'] + var
        if not os.path.isdir(path['base'] + temp_subdir):
            os.mkdir(path['base'] + temp_subdir)
        path['subdir'] = temp_subdir + '/'

        # download all grib files from website #

        for fcst_hour in fcst_hours_list:
            filename = 'icon-eps_global_icosahedral_single-level_{}{:02}{:02}{:02}_{:03}_{}.grib2.bz2'.format(
                date['year'], date['month'], date['day'], date['hour'],
                fcst_hour, var)
            url = 'https://opendata.dwd.de/weather/nwp/icon-eps/grib/{:02}/{}/'.format(
                date['hour'], var)

            if download(url, filename, path):
                filename = unzip(path, filename)

        # read in all grib files of variable and save as one combined netcdf file #

        grib_filename = 'icon-eps_global_icosahedral_single-level_{}{:02}{:02}{:02}_*_{}.grib2'.format(
            date['year'], date['month'], date['day'], date['hour'], var)
        netcdf_filename = 'icon-global-eps_icosahedral_single-level_{}{:02}{:02}{:02}_{}.nc'.format(
            date['year'], date['month'], date['day'], date['hour'], var)
        convert_gribfiles_to_one_netcdf(path, grib_filename, netcdf_filename,
                                        'icon-global-eps')

    return
def boxplot_leadtime(pointnames, date_user, verbose):

    ##### variables #####

    var_list = [
        't_2m', 'wind_10m', 'mslp', 'clct', 'prec_rate', 'direct_rad',
        'diffuse_rad'
    ]
    #var_list = []

    date_fcst = calc_latest_run_time('icon-eu-eps')

    if date_user is not None:
        date_fcst = date_user


    print('-- Forecast time: {}{:02}{:02}-{:02}UTC --'.format(\
          date_fcst['year'], date_fcst['month'], date_fcst['day'], date_fcst['hour']))

    path = dict(
        base='/lsdfos/kit/imk-tro/projects/MOD/Gruppe_Knippertz/nw5893/',
        points_eu_eps='',
        points_eu_det='',
        plots='')

    ##### main loop #####

    for var in var_list:
        if var == 't_2m':
            meta = dict(var='2-m temperature', units='C')
            var_str = var
        elif var == 'prec_rate':
            meta = dict(var='Precipitation rate', units='mm/h')
            var_str = var
        elif var == 'wind_10m':
            meta = dict(var='10-m wind speed', units='km/h')
            var_str = var
        elif var == 'mslp':
            meta = dict(var='Mean sea level pressure', units='hPa')
            var_str = var
        elif var == 'clct':
            meta = dict(var='Total cloud cover', units='%')
            var_str = var
        elif var == 'direct_rad':
            meta = dict(var='Direct downward shortwave radiation', units='W/m')
            var_str = 'aswdir_s'
        elif var == 'diffuse_rad':
            meta = dict(var='Diffuse downward shortwave radiation',
                        units='W/m')
            var_str = 'aswdifd_s'

        ##### make list of lead times #####

        if var == 't_2m' or var == 'wind_10m' or var == 'mslp' or var == 'clct':
            max_lead_time = 120
            fcst_hours_list_eu_eps = list(range(0, 48, 1)) + list(
                range(48, 72, 3)) + list(range(72, 120 + 1, 6))
            fcst_hours_list_eu_det = list(range(0, 78, 1)) + list(
                range(78, 120 + 1, 3))

            date = date_fcst

        elif var == 'prec_rate' or var == 'direct_rad' or var == 'diffuse_rad':
            max_lead_time = 120 - 6
            fcst_hours_list_eu_eps = list(range(1, 48, 1)) + list(
                range(48, 72, 3)) + list(range(72, 120 + 1, 6))
            fcst_hours_list_eu_det = list(range(1, 78, 1)) + list(
                range(78, 120 + 1, 3))

            date_holder = datetime.datetime(date_fcst['year'],
                                            date_fcst['month'],
                                            date_fcst['day'],
                                            date_fcst['hour'])
            date_holder -= datetime.timedelta(0, 3600 * 6)
            date = dict(year=date_holder.year,
                        month=date_holder.month,
                        day=date_holder.day,
                        hour=date_holder.hour)

        lead_times = list(range(max_lead_time, 0 - 1, -6))

        for pointname in pointnames:
            if verbose:
                print('----- next point is {} -----'.format(pointname))
                print('----- next variable is {} -----'.format(var))

            path_old = dict(base=path['base'],
                            points_eu_eps=path['points_eu_eps'],
                            points_eu_det=path['points_eu_det'],
                            plots=path['plots'])

            point_values_eu_eps = np.zeros((len(lead_times), 40),
                                           dtype='float32')
            point_values_eu_det = np.zeros((len(lead_times)), dtype='float32')
            i = 0
            for lead_time in lead_times:
                time = datetime.datetime(date['year'], date['month'],
                                         date['day'], date['hour'])
                time -= datetime.timedelta(0, 3600 * lead_time)
                old_run_date = dict(year=time.year,
                                    month=time.month,
                                    day=time.day,
                                    hour=time.hour)

                ##### get data from icon-eu-eps #####

                path_old['points_eu_eps'] = 'forecast_archive/icon-eu-eps/extracted_points/run_{}{:02}{:02}{:02}/{}/'.format(\
                                    old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour'], var_str)
                filename = 'icon-eu-eps_{}{:02}{:02}{:02}_{}_{}.txt'.format(\
                    old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour'], var_str, pointname)

                try:
                    if not os.path.isdir(path_old['base'] +
                                         path_old['points_eu_eps']):
                        print('----- extracting data from eps run {}{:02}{:02}-{:02}UTC --'.format(\
                            old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour']))

                        point_savetofile_iconeueps(None, old_run_date, var_str,
                                                   pointname, True, True)

                    elif not os.path.isfile(path_old['base'] +
                                            path_old['points_eu_eps'] +
                                            filename):
                        print('----- extracting data from eps run {}{:02}{:02}-{:02}UTC --'.format(\
                            old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour']))

                        point_savetofile_iconeueps(None, old_run_date, var_str,
                                                   pointname, True, True)

                    else:
                        print('----- reading data from eps run {}{:02}{:02}-{:02}UTC -----'.format(\
                              old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour']))

                    point_values_eu_eps_one_run = read_data(
                        path_old, old_run_date, var_str, pointname,
                        'icon-eu-eps')
                    if var == 't_2m' or var == 'wind_10m' or var == 'mslp' or var == 'clct':
                        point_values_eu_eps[
                            i, :] = point_values_eu_eps_one_run[
                                fcst_hours_list_eu_eps.index(lead_time), :]
                    elif var == 'prec_rate' or var == 'direct_rad' or var == 'diffuse_rad':
                        n = fcst_hours_list_eu_eps.index(lead_time + 6)
                        if lead_time + 6 > 72:
                            timestep = 6
                        elif lead_time + 6 > 48:
                            timestep = 3
                        else:
                            timestep = 1
                        k = int(6 / timestep)
                        point_values_eu_eps[i, :] = np.nanmean(
                            point_values_eu_eps_one_run[n - k + 1:n + 1, :], 0)

                except (FileNotFoundError, AssertionError,
                        eccodes.CodesInternalError):
                    print(
                        '----- -> missing eps raw grib file --------------------'
                    )
                    point_values_eu_eps[i, :] = np.ones(
                        (40)) * -100  # NaN, will be out of plot

                ##### get data from icon-eu #####

                path_old['points_eu_det'] = 'forecast_archive/icon-eu/extracted_points/run_{}{:02}{:02}{:02}/{}/'.format(\
                                    old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour'], var_str)
                filename = 'icon-eu-det_{}{:02}{:02}{:02}_{}_{}.txt'.format(\
                    old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour'], var_str, pointname)

                try:
                    if not os.path.isdir(path_old['base'] +
                                         path_old['points_eu_det']):
                        print('----- extracting data from det run {}{:02}{:02}-{:02}UTC --'.format(\
                            old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour']))

                        point_savetofile_iconeudet(None, old_run_date, var_str,
                                                   pointname, True, True)

                    elif not os.path.isfile(path_old['base'] +
                                            path_old['points_eu_det'] +
                                            filename):
                        print('----- extracting data from det run {}{:02}{:02}-{:02}UTC --'.format(\
                            old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour']))

                        point_savetofile_iconeudet(None, old_run_date, var_str,
                                                   pointname, True, True)

                    else:
                        print('----- reading data from det run {}{:02}{:02}-{:02}UTC -----'.format(\
                              old_run_date['year'], old_run_date['month'], old_run_date['day'], old_run_date['hour']))

                    point_values_eu_det_one_run = read_data(
                        path_old, old_run_date, var_str, pointname,
                        'icon-eu-det')
                    if var == 't_2m' or var == 'wind_10m' or var == 'mslp' or var == 'clct':
                        point_values_eu_det[i] = point_values_eu_det_one_run[
                            fcst_hours_list_eu_det.index(lead_time)]
                    elif var == 'prec_rate' or var == 'direct_rad' or var == 'diffuse_rad':
                        n = fcst_hours_list_eu_det.index(lead_time + 6)
                        if lead_time + 6 > 78:
                            timestep = 3
                        else:
                            timestep = 1
                        k = int(6 / timestep)
                        point_values_eu_det[i] = np.nanmean(
                            point_values_eu_det_one_run[n - k + 1:n + 1])

                except (FileNotFoundError, AssertionError,
                        eccodes.CodesInternalError):
                    print(
                        '----- -> missing det raw grib file --------------------'
                    )
                    point_values_eu_det[i] = -100  # NaN, will be out of plot

                i += 1

            ##### calculate total value range #####

            point_values_eu_eps[np.where(point_values_eu_eps == -100)] = np.nan
            point_values_eu_det[np.where(point_values_eu_det == -100)] = np.nan

            y_axis_range = dict(min=np.nanmin(point_values_eu_eps[:]),
                                max=np.nanmax(point_values_eu_eps[:]))
            if np.nanmin(point_values_eu_det[:]) < y_axis_range['min']:
                y_axis_range['min'] = np.nanmin(point_values_eu_det[:])
            if np.nanmax(point_values_eu_det[:]) > y_axis_range['max']:
                y_axis_range['max'] = np.nanmax(point_values_eu_det[:])

            point_values_eu_eps[np.isnan(point_values_eu_eps)] = -100
            point_values_eu_det[np.isnan(point_values_eu_det)] = -100

            ##### set y-axis limits, tick interval and ref value #####

            y_axis_range['analysis'] = point_values_eu_det[-1]

            if var == 't_2m':
                mean = (y_axis_range['max'] + y_axis_range['min']) / 2
                if y_axis_range['min'] < mean - 9.0:
                    y_axis_range['min'] -= 1.0
                else:
                    y_axis_range['min'] = mean - 10.0
                if y_axis_range['max'] > mean + 8.0:
                    y_axis_range['max'] += 2.0
                else:
                    y_axis_range['max'] = mean + 10.0
                y_axis_range['interval'] = 5.0
                y_axis_range['ref'] = 0.0

            elif var == 'prec_rate':
                y_axis_range['min'] = -0.2
                if y_axis_range['max'] < 2.75:
                    y_axis_range['max'] = 3.0
                    y_axis_range['interval'] = 0.5
                elif y_axis_range['max'] < 6.0:
                    y_axis_range['max'] += 0.1 * y_axis_range['max']
                    y_axis_range['interval'] = 1.0
                elif y_axis_range['max'] < 12.0:
                    y_axis_range['max'] += 0.1 * y_axis_range['max']
                    y_axis_range['interval'] = 2.0
                else:
                    y_axis_range['max'] += 0.1 * y_axis_range['max']
                    y_axis_range['interval'] = 3.0
                y_axis_range['ref'] = 0.0

            elif var == 'wind_10m':
                y_axis_range['min'] = 0.0
                if y_axis_range['max'] < 27.5:
                    y_axis_range['max'] = 30.0
                    y_axis_range['interval'] = 5.0
                else:
                    y_axis_range['max'] += 0.1 * y_axis_range['max']
                    y_axis_range['interval'] = 10.0
                y_axis_range['ref'] = 0.0

            elif var == 'mslp':
                if y_axis_range['min'] < 990.0:
                    y_axis_range['min'] -= 5.0
                else:
                    y_axis_range['min'] = 990.0
                if y_axis_range['max'] > 1037.0:
                    y_axis_range['max'] += 5.0
                else:
                    y_axis_range['max'] = 1040.0
                y_axis_range['interval'] = 10.0
                y_axis_range['ref'] = 0.0

            elif var == 'clct':
                y_axis_range['min'] = -6.0
                y_axis_range['max'] = 110.0
                y_axis_range['interval'] = 20.0
                y_axis_range['ref'] = 0.0

            elif var == 'direct_rad':
                y_axis_range['min'] = -20.0
                if y_axis_range['max'] < 280.0:
                    y_axis_range['max'] = 300.0
                else:
                    y_axis_range['max'] += 0.1 * y_axis_range['max']
                y_axis_range['interval'] = 100.0
                y_axis_range['ref'] = 0.0

            elif var == 'diffuse_rad':
                y_axis_range['min'] = -20.0
                if y_axis_range['max'] < 280.0:
                    y_axis_range['max'] = 300.0
                else:
                    y_axis_range['max'] += 0.1 * y_axis_range['max']
                y_axis_range['interval'] = 100.0
                y_axis_range['ref'] = 0.0

            ##### make path #####

            temp_subdir = 'plots/operational/boxplots_leadtime/run_{}{:02}{:02}{:02}'.format(\
                            date_fcst['year'], date_fcst['month'], date_fcst['day'], date_fcst['hour'])

            if not os.path.isdir(path['base'] +
                                 temp_subdir):  #and pointname == 'Karlsruhe':
                os.mkdir(path['base'] + temp_subdir)

            temp_subdir = temp_subdir + '/' + pointname
            if not os.path.isdir(path['base'] + temp_subdir):
                os.mkdir(path['base'] + temp_subdir)
            path['plots'] = temp_subdir + '/'

            filename = 'boxplot_leadtime_{}{:02}{:02}{:02}_{}_{}'.format(\
                        date_fcst['year'], date_fcst['month'], date_fcst['day'], date_fcst['hour'], var, pointname)

            ##### calculate percentiles #####

            # data_percentiles_eu: 65 timesteps x 7 percentiles
            data_percentiles_eu_eps = np.percentile(
                point_values_eu_eps, [0, 10, 25, 50, 75, 90, 100], axis=1).T

            ##### plotting #####

            plot_in_magics_boxplot(path, date, pointname, var, meta, y_axis_range, filename, lead_times,\
                                   data_percentiles_eu_eps, point_values_eu_det)

            print('------------------------------------------------')

    return
Пример #15
0
def main():

    # make lists of forecast hours and variables #

    fcst_hours_list_3h = list(range(0, 120 + 1, 3))
    fcst_hours_list_1h3h = list(range(0, 78, 1)) + list(range(78, 120 + 1, 3))

    # get latest run time #

    date = calc_latest_run_time('icon-eu-det')
    if date['hour'] == 6 or date['hour'] == 18:
        date['hour'] -= 6

    # explicit download options #

###########################################################
#date = dict(year = 2019, month = 4, day = 10, hour = 12)
###########################################################

    print('download run_{}{:02}{:02}{:02}'.format(date['year'], date['month'],
                                                  date['day'], date['hour']))

    # list of dwd variable names #

    if date['hour'] == 0 or date['hour'] == 12:
        var_list = ['synmsg_bt_cl_ir10.8', 'tot_prec', 'vmax_10m']
        #var_list = ['synmsg_bt_cl_ir10.8','tot_prec','vmax_10m','t_2m','u_10m','v_10m','pmsl','clct',
        #            'aswdir_s','aswdifd_s']
    else:
        var_list = []
        #var_list = ['tot_prec','t_2m','u_10m','v_10m','pmsl','clct','aswdir_s','aswdifd_s','vmax_10m']

    # create paths if necessary

    path = dict(base='/')
    path[
        'data'] = 'data/model_data/icon-eu-det/forecasts/run_{}{:02}{:02}{:02}'.format(
            date['year'], date['month'], date['day'], date['hour'])
    if not os.path.isdir(path['base'] + path['data']):
        os.mkdir(path['base'] + path['data'])
    path['data'] = path['data'] + '/'

    for var in var_list:
        temp_subdir = path['data'] + var
        if not os.path.isdir(path['base'] + temp_subdir):
            os.mkdir(path['base'] + temp_subdir)
        path['subdir'] = temp_subdir + '/'

        # download all grib files from website

        if var == 'tot_prec':
            fcst_hours_list = fcst_hours_list_1h3h
        else:
            fcst_hours_list = fcst_hours_list_3h

        for fcst_hour in fcst_hours_list:
            filename = 'icon-eu_europe_regular-lat-lon_single-level_{}{:02}{:02}{:02}_{:03}_{}.grib2.bz2'.format(
                date['year'], date['month'], date['day'], date['hour'],
                fcst_hour, var.upper())
            url = 'https://opendata.dwd.de/weather/nwp/icon-eu/grib/{:02}/{}/'.format(
                date['hour'], var)

            if download(url, filename, path):
                filename = unzip(path, filename)

        # read in all grib files of variable and save as one combined netcdf file #

        grib_filename = 'icon-eu_europe_regular-lat-lon_single-level_{}{:02}{:02}{:02}_*_{}.grib2'.format(
            date['year'], date['month'], date['day'], date['hour'],
            var.upper())
        netcdf_filename = 'icon-eu-det_latlon_0.0625_single-level_{}{:02}{:02}{:02}_{}.nc'.format(
            date['year'], date['month'], date['day'], date['hour'], var)
        convert_gribfiles_to_one_netcdf(path, grib_filename, netcdf_filename,
                                        'icon-eu-det')

    return
def plot_t2m_uncertainty_shades(pointnames, date_user, mode, colorpalette_name, verbose):

    ##### make lists of forecast hours and variables #####

    fcst_hours_list_eu = np.concatenate((np.arange(0,48,1),\
                                        np.arange(48,72,3),\
                                        np.arange(72,120+1,6)))
    fcst_hours_list_global = np.arange(132,180+1,12)

    if mode == '180h_raw':
        model = 'icon-global-eps'
    else:
        model = 'icon-eu-eps'

    run = calc_latest_run_time(model)
    if date_user is not None:
        run = date_user

    print('-- Forecast time: {}{:02}{:02}-{:02}UTC --'.format(\
          run['year'], run['month'], run['day'], run['hour']))


    path = dict(base = '/lsdfos/kit/imk-tro/projects/MOD/Gruppe_Knippertz/nw5893/',
                points_eu_eps = '',
                points_global_eps = '',
                plots = 'plots/experimental/uncertainty_shades/',
                cityfolder = '',
                colorpalette = 'additional_data/colorpalettes/')

    subfolder = 'run_{:4d}{:02d}{:02d}{:02d}'.format(run['year'], run['month'], run['day'], run['hour'])
    if not os.path.isdir(path['base'] + path['plots'] + subfolder):
        os.mkdir(path['base'] + path['plots'] + subfolder)
    path['plots'] += subfolder + '/'

    for pointname in pointnames:
        if verbose:
            print('----- next point is {} -----'.format(pointname))

        #subfolder = pointname
        #if not os.path.isdir(path['base'] + path['plots'] + subfolder):
        #    os.mkdir(path['base'] + path['plots'] + subfolder)
        #path['cityfolder'] = subfolder + '/'


        ##### get data from icon-eu-eps #####

        try:
            pathstr = 'forecast_archive/icon-eu-eps/extracted_points/'
            path['points_eu_eps'] = '{}run_{}{:02}{:02}{:02}/t_2m/'.format(\
                                    pathstr, run['year'], run['month'], run['day'], run['hour'])
            point_values_eu_eps = read_data(path, run, 't_2m', pointname, 'icon-eu-eps')

        except (FileNotFoundError, AssertionError):
            print('no icon-eu-eps data')
            point_values_eu_eps = np.ones((65, 40)) * -100   # will be out of plot

        if mode == '48h_interpolated':
            point_values_eu_eps = point_values_eu_eps[:49]
            fcst_hours_list_eu = fcst_hours_list_eu[:49]

        ##### get data from icon-eps #####

        if mode == '180h_raw':
            try:
                pathstr = 'forecast_archive/icon-eps/extracted_points/'
                path['points_global_eps'] = '{}run_{}{:02}{:02}{:02}/t_2m/'.format(\
                                            pathstr, run['year'], run['month'], run['day'], run['hour'])
                point_values_global_eps = read_data(path, run, 't_2m', pointname, 'icon-global-eps')

            except (FileNotFoundError, AssertionError):
                print('no icon-global-eps data')
                point_values_global_eps = np.ones((5, 40)) * -100   # will be out of plot


        y_axis_range = dict()
        if mode == '180h_raw':
            y_axis_range['min'] = min(point_values_eu_eps.min(), point_values_global_eps.min())
            y_axis_range['max'] = max(point_values_eu_eps.max(), point_values_global_eps.max())
        else:
            y_axis_range['min'] = point_values_eu_eps.min()
            y_axis_range['max'] = point_values_eu_eps.max()

        mean = (y_axis_range['max'] + y_axis_range['min']) / 2

        if y_axis_range['min'] < mean - 9.0:
            y_axis_range['min'] -= 1.0
        else:
            y_axis_range['min'] = mean - 10.0

        if y_axis_range['max'] > mean + 9.0:
            y_axis_range['max'] += 1.0
        else:
            y_axis_range['max'] = mean + 10.0

        y_axis_range['min'] = np.around(y_axis_range['min'])
        y_axis_range['max'] = np.around(y_axis_range['max'])

########################################################################

        if mode == '120h_raw':
            data_values = point_values_eu_eps
        elif mode == '48h_interpolated':
            num_intersteps = 30
            data_values = np.zeros((48*num_intersteps+1, 40), dtype='float32')
            for member in range(40):
                for timestep in range(0, 48):
                    for n in range(num_intersteps):
                        data_values[timestep*num_intersteps+n, member] =\
                            (1-n/num_intersteps) * point_values_eu_eps[timestep, member]\
                            + (n/num_intersteps) * point_values_eu_eps[timestep+1, member]
                data_values[48*num_intersteps, member] = point_values_eu_eps[48, member]

########################################################################

        if colorpalette_name == 'palettable_orange':
            from palettable.colorbrewer.sequential import Oranges_9
            colorpalette = Oranges_9.mpl_colormap

        elif colorpalette_name == 'custom':
            colorpalette_source = 'tristenca'

            if colorpalette_source == 'tristenca':
                filename = 'colorscale_tristenca_t2m_monohue_blues_1.txt'
                with open(path['base'] + path['colorpalette'] + filename, 'r') as f:
                    line = f.read()

                num_colors = int(len(line) / 8)
                inverse_colorpalette = True


                hex_colors = []
                for i in range(num_colors):
                    start = i * 8 + 1
                    end = start + 6
                    hex_colors.append(line[start:end])

                if not inverse_colorpalette:
                    custom_palette_list = [[255, 255, 255]]
                    for hex_color in hex_colors[:]:
                        rgb_color = [int(hex_str, 16) for hex_str in [hex_color[:2], hex_color[2:4], hex_color[4:]]]
                        custom_palette_list.append(rgb_color)
                else:
                    custom_palette_list = []
                    for hex_color in hex_colors[:]:
                        rgb_color = [int(hex_str, 16) for hex_str in [hex_color[:2], hex_color[2:4], hex_color[4:]]]
                        custom_palette_list.append(rgb_color)
                    custom_palette_list.append([255, 255, 255])
                    custom_palette_list = custom_palette_list[::-1]


            elif colorpalette_source == 'hclwizard':
                filename = 'colorscale_hclwizard_t2m_prob_plasma.txt'
                with open(path['base'] + path['colorpalette'] + filename, 'r') as f:
                    lines = f.readlines()

                hex_colors = []
                for line in lines:
                    hex_colors.append(line[2:8])

                custom_palette_list = [[255, 255, 255]]           # extra white color
                for hex_color in hex_colors[:]:
                    rgb_color = [int(hex_str, 16) for hex_str in [hex_color[:2], hex_color[2:4], hex_color[4:]]]
                    custom_palette_list.append(rgb_color)
                #custom_palette_list.append(custom_palette_list[-1])     # extra color for correct LabelBar view

            colorpalette = mpl.colors.ListedColormap(np.array(custom_palette_list) / 255)


########################################################################

        image_height = 539

        if mode == '120h_raw':
            image_width = 12
            num_times = 65
        elif mode == '48h_interpolated':
            image_width = 1
            num_times = 48*num_intersteps+1

        kernel = 'epanechnikov'
        kde_width = 0.8
        vmax = 0.6

        y_vec = np.linspace(y_axis_range['min'], y_axis_range['max'], image_height)
        ens_density = np.empty((image_height, num_times))
        for i in range(num_times):
            kde = KernelDensity(kernel=kernel, bandwidth=kde_width).fit(data_values[i, :][:, None])
            ens_density[:, i] = np.exp(kde.score_samples(y_vec[:, None]))

            #plt.plot(y_vec, ens_density[:, i], 'o')
            #plt.show()


        image = np.empty((image_height, image_width, num_times))
        for i in range(num_times):
            image[:, :, i] = np.tile(ens_density[:, i],(image_width,1)).T
        background = np.zeros((image_height, 1426))
        background[10, 10] = 1

        fig, ax = plt.subplots(figsize = [1500 / 100, 600 / 100])
        fig.tight_layout()
        #figManager = plt.get_current_fig_manager()
        #figManager.window.showMaximized()

        y_axis_tick_min = ((y_axis_range['min'] + 204) // 5 - 40) * 5

        if mode == '120h_raw':
            fig.figimage(background, xo=48, yo=39,
                         cmap=colorpalette, origin='lower')
            for i in range(num_times):
                fig.figimage(image[:, :, i], xo=(81 + 272 / 24 * fcst_hours_list_eu[i] - image_width / 2),\
                             yo=39, vmax=vmax, cmap=colorpalette, origin='lower')

            ax.set(xlim=(-3, 120+3), ylim=(y_axis_range['min'], y_axis_range['max']),\
                   yticks=np.arange(y_axis_tick_min, y_axis_range['max'] + 0.1, 5))
            #ax.set(xticks=np.arange(0, 121, 24), xticklabels=['day {}'.format(i) for i in range(1, 6)])
            ax.set(xticks=np.arange(0, 121, 24))

        elif mode == '48h_interpolated':
            for i in range(num_times):
                fig.figimage(image[:, :, i], xo=(81 + 272 / 9.6 / num_intersteps * i - image_width / 2),\
                             yo=39, vmax=vmax, cmap=colorpalette, origin='lower')

            ax.set(xlim=(-1.2, 48+1.2), ylim=(y_axis_range['min'], y_axis_range['max']),\
                   yticks=np.arange(y_axis_tick_min, y_axis_range['max'] + 0.1, 5))
            ax.set(xticks=np.arange(0, 49, 6))

        ax.set_xlabel('forecast time', labelpad=10)
        ax.set_ylabel(r'$^\circ$C', labelpad=20, rotation='horizontal')
        plt.title('t2m forecast for {}, icon-eu-eps run from {:02d}.{:02d}.{:4d}, {:02d}UTC'.format(\
                    pointname, run['day'], run['month'], run['year'], run['hour']))


        #ax.plot(np.arange(0.5, 5, 1), point['measurements'], 'kx', zorder=15)

        filename = 'uncertainty_shades_{}_t2m_run_{:4d}{:02d}{:02d}{:02d}_{}.png'.format(\
                    mode, run['year'], run['month'], run['day'], run['hour'], pointname)
        plt.savefig(path['base'] + path['plots'] + filename)

        del fig, ax
        plt.close()

    return