コード例 #1
0
def domain_mean_weather_ts(inargs):
    """
    Calculate hourly time-series for domain mean variables:
    
    - hourly precipitation
    - CAPE
    - convective adjustment timescale
    - boundary layer height
    Precipitation is analyzed for ensemble, deterministic and observations.
    All other values are calculated for the ensemble mean and deterministic.

    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments
    log_str : str
      Log text for NetCDF file

    Returns
    -------

    """

    # Define NetCDF parameters and create rootgroup
    groups = ['obs', 'det', 'ens']
    datearray = np.array(make_datelist(inargs, out_format='netcdf'))
    timearray = np.arange(inargs.time_start, inargs.time_end + inargs.time_inc,
                          inargs.time_inc)
    dimensions = {
        'time': timearray,
        'date': datearray,
    }
    variables = {
        'PREC_ACCUM': ['date', 'time'],
        'CAPE_ML': ['date', 'time'],
        'TAU_C': ['date', 'time'],
        'HPBL': ['date', 'time'],
    }
    rootgroup = create_netcdf(inargs,
                              groups,
                              dimensions,
                              variables,
                              ensemble_dim=True)

    radar_mask = get_radar_mask(inargs)
    print('Number of masked grid points: ' + str(np.sum(radar_mask)) +
          ' from total grid points: ' + str(radar_mask.size))

    # Load analysis data and store in NetCDF
    for idate, date in enumerate(make_datelist(inargs)):
        print('Computing time series for: ' + date)
        for group in rootgroup.groups:
            for ie in range(rootgroup.groups[group].dimensions['ens_no'].size):
                for var in rootgroup.groups[group].variables:

                    compute_ts_mean(inargs, idate, date, group, ie, var,
                                    rootgroup, radar_mask)

    # Close NetCDF file
    rootgroup.close()
コード例 #2
0
def prec_hist(inargs):

    # Define bins TODO: Read from config!
    histbinedges = [0, 0.1, 0.2, 0.5, 1, 2, 5, 10, 1000]

    # Make netCDF file
    datearray = np.array(make_datelist(inargs, out_format='netcdf'))
    timearray = np.arange(inargs.time_start, inargs.time_end + inargs.time_inc,
                          inargs.time_inc)
    groups = ['obs', 'det', 'ens']
    dimensions = {
        'time': timearray,
        'date': datearray,
        'bins': np.array(histbinedges[1:]),
    }
    variables = {
        'prec_hist': ['date', 'time', 'bins'],
    }
    rootgroup = create_netcdf(inargs,
                              groups,
                              dimensions,
                              variables,
                              ensemble_dim=True)

    # TODO: This is somewhat the same as domain_mean_weather_ts
    radar_mask = get_radar_mask(inargs)
    print('Number of masked grid points: ' + str(np.sum(radar_mask)) +
          ' from total grid points: ' + str(radar_mask.size))

    # Load analysis data and store in NetCDF
    for idate, date in enumerate(make_datelist(inargs)):
        print('Computing prec_hist for: ' + date)
        for group in rootgroup.groups:
            for ie in range(rootgroup.groups[group].dimensions['ens_no'].size):
                if group in ['det', 'ens']:
                    if group == 'det':
                        ens_no = 'det'
                    else:
                        ens_no = ie + 1
                    datalist = get_datalist_model(inargs, date, ens_no,
                                                  'PREC_ACCUM', radar_mask)
                elif group == 'obs':
                    datalist = get_datalist_radar(inargs, date, radar_mask)
                else:
                    raise Exception('Wrong group.')

                # Now do the actually new calculation
                for it, data in enumerate(datalist):

                    rootgroup.groups[group].variables['prec_hist']\
                        [idate, it, :, ie] = np.histogram(data, histbinedges)[0]
                    print np.histogram(data, histbinedges)
コード例 #3
0
def plot_prec_stamps(inargs):
    """
    Plots precipitation stamps of obs, det and ensemble every hour for each
    date and time specified

    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments

    """

    # Loop over dates
    for idate, date in enumerate(make_datelist(inargs)):
        print('Date ' + date)
        # Loop over times
        for t in make_timelist(timedelta(hours=inargs.time_start),
                               timedelta(hours=inargs.time_end),
                               timedelta(hours=1)):
            print('Time ' + str(t))
            # Load all CU objects
            fobjlist = []
            titlelist = []

            # 1st: Radar
            radarpref = (get_config(inargs, 'paths', 'radar_data') +
                         get_config(inargs, 'paths', 'radar_prefx'))
            radarsufx = get_config(inargs, 'paths', 'radar_sufix')
            dtradar = timedelta(minutes=10)

            radartime = yymmddhhmm(yyyymmddhh_strtotime(date) + t - dtradar)
            radarfn = radarpref + radartime + radarsufx
            radar_fobj = getfobj_ncdf(radarfn, fieldn='pr', dwdradar=True)
            # Crop data
            l11, l12, l21, l22, l11_rad, l12_rad, l21_rad, l22_rad = \
                get_domain_limits(inargs)
            l11_diff = l11_rad - l11
            l12_diff = l12_rad - l12
            l21_diff = l21_rad - l21
            l22_diff = l22_rad - l22
            radar_fobj.data = radar_fobj.data[l11_diff:l12_diff,
                                              l21_diff:l22_diff]
            radar_fobj.lats = radar_fobj.lats[l11_diff:l12_diff,
                                              l21_diff:l22_diff]
            radar_fobj.lons = radar_fobj.lons[l11_diff:l12_diff,
                                              l21_diff:l22_diff]
            fobjlist.append(radar_fobj)
            titlelist.append('Radar')

            # 2nd: det
            ncdffn_pref = (get_config(inargs, 'paths', 'raw_data') + date +
                           '/deout_ceu_pspens/' + 'det' + '/OUTPUT/lfff')
            fobjlist.append(
                getfobj_ncdf(ncdffn_pref + ddhhmmss(t) + '.nc_30m_surf',
                             'PREC_ACCUM'))
            titlelist.append('Det')

            # 3rd: ens
            ncdffn = 'lfff' + ddhhmmss(t) + '.nc_30m_surf'
            date_dir = (get_config(inargs, 'paths', 'raw_data') + date +
                        '/deout_ceu_pspens/')
            fobjlist.extend(
                getfobj_ncdf_ens(date_dir,
                                 'sub',
                                 inargs.nens,
                                 ncdffn,
                                 dir_suffix='/OUTPUT/',
                                 fieldn='PREC_ACCUM',
                                 nfill=1))
            titlelist.extend(
                ['Member ' + str(i + 1) for i in range(inargs.nens)])

            # Now plot
            n_panels = len(fobjlist)
            n_cols = 4
            n_rows = int(np.ceil(float(n_panels) / n_cols))

            pw = get_config(inargs, 'plotting', 'page_width')
            fig, axmat = plt.subplots(n_rows,
                                      n_cols,
                                      figsize=(pw, 3.0 * n_rows))
            axflat = np.ravel(axmat)

            for i in range(len(fobjlist)):
                plt.sca(axflat[i])
                cf = plot_stamp(inargs, fobjlist[i], cmPrec, levelsPrec,
                                axflat[i], 'PREC_ACCUM')
                axflat[i].set_title(titlelist[i])
            cb = fig.colorbar(cf,
                              cax=fig.add_axes([0.4, 0.15, 0.2, 0.02]),
                              orientation='horizontal')
            cb.set_label('Accumulation [mm/h]')
            titlestr = ((yyyymmddhh_strtotime(date) +
                         t).strftime('%d %b - %H UTC'))
            fig.suptitle(titlestr)
            plt.subplots_adjust(wspace=0.02, left=0.02, right=0.98)

            # Save figure and log
            save_fig_and_log(fig,
                             None,
                             inargs,
                             'prec_stamps',
                             datestr=((yyyymmddhh_strtotime(date) +
                                       t).strftime('%Y%m%d_%H')))
コード例 #4
0
def create_netcdf(inargs):
    """
    
    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments

    Returns
    -------
    rootgroup : NetCDF dataset object

    """

    dimensions = {
        'date':
        np.array(make_datelist(inargs, out_format='netcdf')),
        'time':
        np.arange(inargs.time_start, inargs.time_end + inargs.time_inc,
                  inargs.time_inc),
        'n':
        np.array([256, 128, 64, 32, 16, 8, 4]),
        'x':
        np.arange(get_config(inargs, 'domain', 'ana_irange')),
        'y':
        np.arange(get_config(inargs, 'domain', 'ana_jrange')),
        'cond_bins_mean_m':
        np.linspace(0, 2e8, 10)[1:],  # TODO: Softcode this stuff
        'cond_bins_m':
        np.linspace(0, 1e9, 50)[1:],
    }

    variables = {
        'var_m': ['date', 'time', 'n', 'x', 'y'],
        'var_M': ['date', 'time', 'n', 'x', 'y'],
        'var_N': ['date', 'time', 'n', 'x', 'y'],
        'mean_m': ['date', 'time', 'n', 'x', 'y'],
        'mean_M': ['date', 'time', 'n', 'x', 'y'],
        'mean_N': ['date', 'time', 'n', 'x', 'y'],
        'corr_m_N': ['date', 'time', 'n', 'x', 'y'],
        'cond_m_hist': ['n', 'cond_bins_mean_m', 'cond_bins_m'],
    }
    if inargs.var is 'm':
        variables.update({
            'var_TTENS': ['date', 'time', 'n', 'x', 'y'],
            'mean_TTENS': ['date', 'time', 'n', 'x', 'y']
        })

    pp_fn = get_pp_fn(inargs)

    # Create NetCDF file
    rootgroup = Dataset(pp_fn, 'w', format='NETCDF4')
    rootgroup.log = create_log_str(inargs, 'Preprocessing')

    # Create root dimensions and variables
    for dim_name, dim_val in dimensions.items():
        rootgroup.createDimension(dim_name, dim_val.shape[0])
        tmp_var = rootgroup.createVariable(dim_name, 'f8', dim_name)
        tmp_var[:] = dim_val

    # Create variables
    for var_name, var_dims in variables.items():
        tmp_var = rootgroup.createVariable(var_name, 'f8', var_dims)
        # Set all variables to nan by default to save time later
        tmp_var[:] = np.nan
    return rootgroup
コード例 #5
0
def compute_variance(inargs):
    """
    Main analysis routine to coarse grain fields and compute variances.
    
    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments


    """

    # Some preliminaries
    dx = float(get_config(inargs, 'domain', 'dx'))

    # Make the pp NetCDF file
    rootgroup = create_netcdf(inargs)
    rootgroup.variables['cond_m_hist'][:] = 0

    # Load the raw_data
    if inargs.var == 'm':  # Load data for mass flux calculation
        raw_data = load_raw_data(inargs,
                                 ['W', 'QC', 'QI', 'QS', 'RHO', 'TTENS_MPHY'],
                                 'ens',
                                 lvl=inargs.lvl)
    elif inargs.var == 'prec':  # Load data for precipitation calculation
        raw_data = load_raw_data(inargs, ['PREC_ACCUM'], 'ens')
    else:
        raise Exception('Wrong var! ' + inargs.var)

    # Loop over each time
    for idate, date in enumerate(make_datelist(inargs)):
        print('Computing variance for ' + date)
        for it in range(rootgroup.dimensions['time'].size):
            # Loop over ensemble members
            # Temporarily save the centers of mass and sums
            com_ens_list = []
            sum_ens_list = []
            for ie in range(raw_data.dimensions['ens_no'].size):
                # Identify the clouds
                if inargs.var is 'm':
                    field = raw_data.variables['W'][idate, it, ie]
                    opt_field = (raw_data.variables['QC'][idate, it, ie] +
                                 raw_data.variables['QI'][idate, it, ie] +
                                 raw_data.variables['QS'][idate, it, ie])
                    rho = raw_data.variables['RHO'][idate, it, ie]
                    opt_thresh = 0.

                else:
                    field = raw_data.variables['PREC_ACCUM'][idate, it, ie]
                    opt_field = None
                    rho = None
                    opt_thresh = None

                labels, size_list, sum_list, com_list = \
                    identify_clouds(field, inargs.thresh, opt_field=opt_field,
                                    water=inargs.sep, rho=rho,
                                    dx=dx, neighborhood=inargs.footprint,
                                    return_com=True, opt_thresh=opt_thresh)

                if com_list.shape[
                        0] == 0:  # Accout for empty arrays, Need that?
                    com_list = np.empty((0, 2))

                if inargs.var == 'm':
                    sum_list = sum_list * dx * dx  # to convert to mass flux
                com_ens_list.append(com_list)
                sum_ens_list.append(sum_list)

            # Compute the variances and means
            comp_var_mean(inargs, idate, it, rootgroup, com_ens_list,
                          sum_ens_list, raw_data)
    rootgroup.close()
コード例 #6
0
def plot_spectra(inargs):
    """
    For now this loads previously computed files.

    Returns
    -------

    """
    savesuf = '_ana-spectra_wat-True_height-3000_nens-50_tstart-3_tend-24_tinc-180_minmem-5_dr-2.nc'
    # Load data
    dke_spec_list = []
    bgke_spec_list = []
    dprec_spec_list = []
    bgprec_spec_list = []
    for d in make_datelist(inargs):
        dataset = Dataset(inargs.preproc_dir + d + savesuf, 'r')
        dke_spec_list.append(dataset.variables['dkespec'][:])
        bgke_spec_list.append(dataset.variables['bgkespec'][:])
        dprec_spec_list.append(dataset.variables['dprecspec'][:])
        bgprec_spec_list.append(dataset.variables['bgprecspec'][:])
    dke_spec = np.nanmean(dke_spec_list, axis = 0)
    bgke_spec = np.nanmean(bgke_spec_list, axis = 0)
    dprec_spec = np.nanmean(dprec_spec_list, axis = 0)
    bgprec_spec = np.nanmean(bgprec_spec_list, axis = 0)
    timelist = [timedelta(seconds=ts) for ts in dataset.variables['time']]
    timelist_plot = [(dt.total_seconds() / 3600) for dt in timelist]

    # Define colors
    cyc = [plt.cm.jet(i) for i in np.linspace(0, 1, len(timelist))]
    cyc = ("#E7A7FF", "#FF84DB", "#EF8974", "#AF9300", "#529324", "#008768",
           "#006C88", "#2D3184")
    speclam = dataset.variables['speclam'][:]

    # Set up figures
    for diff, bg, name in zip([dke_spec, dprec_spec],
                              [bgke_spec, bgprec_spec],
                              ['Kinetic energy', 'Precipitation']):
        pw = get_config(inargs, 'plotting', 'page_width')
        width_fraction = 3. / 9.
        ratio = 1.
        fig, ax = plt.subplots(1, 1, figsize=(pw * width_fraction,
                                              pw * width_fraction * ratio))

        ############# Time loop ##############
        for it, t in enumerate(timelist):
            print 'time: ', t
            # Get ratio
            ratio = diff[it] / bg[it] / 2.
            ax.plot(speclam / 1000., ratio, c=cyc[it],
                       label=str(int(timelist_plot[it])).zfill(2),
                       linewidth=1.5)

        ax.legend(loc=3, ncol=2, fontsize=8, title='Time [UTC]')
        ax.plot([5, 1000.], [1, 1], c='gray', alpha=0.5)
        ax.set_xlabel('Wavelength [km]')
        # ax.set_ylabel('Saturation ratio')
        # ax.set_title("Saturation of KE spectrum")
        ax.set_ylim(1e-2, 1.1)
        ax.set_xlim(5, 1000.)
        ax.set_yscale('log')
        ax.set_xscale('log')
        ax.spines['top'].set_visible(False)
        ax.spines['right'].set_visible(False)
        ax.spines['bottom'].set_position(('outward', 3))
        ax.spines['left'].set_position(('outward', 3))
        plt.yticks(rotation=90)

        plt.subplots_adjust(left=0.15, right=0.95, bottom=0.3, top=0.85)

        save_fig_and_log(fig, None, inargs, name[:4], tight=False)
コード例 #7
0
def create_netcdf(inargs):
    """
    Creates a NetCDF object to store data.

    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments

    Returns
    -------
    rootgroup : NetCDF object

    """

    prec_freq_binedges, cld_size_binedges, cld_sum_binedges, \
        cld_size_sep_binedges, cld_sum_sep_binedges = create_bin_edges(inargs)

    datearray = np.array(make_datelist(inargs, out_format='netcdf'))
    timearray = np.arange(inargs.time_start, inargs.time_end + inargs.time_inc,
                          inargs.time_inc)
    rdf_radius = np.arange(0., inargs.rdf_r_max + inargs.rdf_dr, inargs.rdf_dr)
    rdf_radius = (rdf_radius[:-1] + rdf_radius[1:]) / 2.

    dimensions = {
        'time': timearray,
        'date': datearray,
        'cld_size_bins': np.array(cld_size_binedges[1:]),
        'cld_sum_bins': np.array(cld_sum_binedges[1:]),
        'cld_size_sep_bins': np.array(cld_size_sep_binedges[1:]),
        'cld_sum_sep_bins': np.array(cld_sum_sep_binedges[1:]),
        'rdf_radius': rdf_radius
    }
    variables = {
        'cld_size': ['date', 'time', 'cld_size_bins'],
        'cld_sum': ['date', 'time', 'cld_sum_bins'],
        'cld_size_sep': ['date', 'time', 'cld_size_sep_bins'],
        'cld_sum_sep': ['date', 'time', 'cld_sum_sep_bins'],
        'cld_size_mean': ['date', 'time'],
        'cld_sum_mean': ['date', 'time'],
        'cld_size_sep_mean': ['date', 'time'],
        'cld_sum_sep_mean': ['date', 'time'],
        'rdf': ['date', 'time', 'rdf_radius'],
        'rdf_sep': ['date', 'time', 'rdf_radius'],
    }
    if inargs.var == 'PREC_ACCUM':
        groups = ['obs', 'det', 'ens']
        dimensions.update({'prec_freq_bins': np.array(prec_freq_binedges[1:])})
        variables.update({'prec_freq': ['date', 'time', 'prec_freq_bins']})
    elif inargs.var == 'm':
        groups = ['det', 'ens']
    else:
        raise Exception('Wrong variable.')

    pp_fn = get_pp_fn(inargs)

    # Create NetCDF file
    rootgroup = Dataset(pp_fn, 'w', format='NETCDF4')
    rootgroup.log = create_log_str(inargs, 'Preprocessing')

    # Create root dimensions and variables
    for dim_name, dim_val in dimensions.items():
        rootgroup.createDimension(dim_name, dim_val.shape[0])
        tmp_var = rootgroup.createVariable(dim_name, 'f8', dim_name)
        tmp_var[:] = dim_val

    # Create group dimensions and variables
    [b.append('ens_no') for a, b in variables.items()]
    dimensions['ens_no'] = 1

    for g in groups:
        rootgroup.createGroup(g)
        if g == 'ens':
            dimensions['ens_no'] = inargs.nens

        # Create dimensions
        for dim_name, dim_len in dimensions.items():
            if type(dim_len) is not int:
                dim_len = dim_len.shape[0]
            rootgroup.groups[g].createDimension(dim_name, dim_len)

        # Create variables
        for var_name, var_dims in variables.items():
            rootgroup.groups[g].createVariable(var_name, 'f8', var_dims)

    return rootgroup
コード例 #8
0
def cloud_stats(inargs):
    """
    Compute and save precipitation amount and cloud size and cloud 
    precipitation histograms and radial distrubution function.
    
    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments


    """

    # TODO: This function is also called in create_ncdf, could do better!
    prec_freq_binedges, cld_size_binedges, cld_sum_binedges, \
        cld_size_sep_binedges, cld_sum_sep_binedges = create_bin_edges(inargs)

    # Make netCDF file
    rootgroup = create_netcdf(inargs)

    for group in rootgroup.groups:
        if inargs.var == 'PREC_ACCUM':
            raw_data = load_raw_data(inargs, 'PREC_ACCUM', group,
                                     radar_mask_type=inargs.radar_mask)
        else:
            raw_data = load_raw_data(inargs, ['W', 'QC', 'QI', 'QS', 'RHO'],
                                     group, radar_mask_type=False,
                                     lvl=inargs.lvl)

        for idate, date in enumerate(make_datelist(inargs)):
            for ie in range(rootgroup.groups[group].dimensions['ens_no'].size):
                # Now do the actually new calculation
                for it in range(rootgroup.groups[group].dimensions['time'].
                                size):

                    if inargs.var == 'PREC_ACCUM':
                        # 1st: calculate totla precipitation histogram
                        data = raw_data.variables['PREC_ACCUM'][idate, it, ie]
                        rootgroup.groups[group].variables['prec_freq']\
                            [idate, it, :, ie] = np.histogram(data,
                                                        prec_freq_binedges)[0]
                    else:
                        data = raw_data.variables['W'][idate, it, ie]

                    # 2nd: compute cloud size and precipitation histograms
                    tmp = compute_cloud_histograms(inargs, raw_data, rootgroup,
                                                   group, idate, it, ie,
                                                   cld_size_binedges,
                                                   cld_sum_binedges,
                                                   cld_size_sep_binedges,
                                                   cld_sum_sep_binedges)
                    labels, labels_sep = tmp

                    # 3rd: Compute radial distribution function
                    if inargs.radar_mask in ['total', 'day']:
                        raise Exception('radar_mask type no longer supported \
                                        for RDF')
                    if inargs.radar_mask == 'hour' and \
                                    inargs.var == 'PREC_ACCUM':
                        compute_rdfs(inargs, labels, labels_sep, data,
                                     raw_data.variables[
                                         'mask'][idate, it].astype(int),
                                     rootgroup, group, idate, it, ie)
                    else:
                        compute_rdfs(inargs, labels, labels_sep, data,
                                     None, rootgroup, group, idate, it, ie)
        raw_data.close()

    # Close NetCDF file
    rootgroup.close()
コード例 #9
0
def plot_prec_stamps(inargs):
    """
    Plots precipitation stamps of obs, det and ensemble every hour for each
    date and time specified
    
    Parameters
    ----------
    inargs : argparse object
      Argparse object with all input arguments

    """

    # TODO: Update colors
    cmPrec = ((1, 1, 1), (0, 0.627, 1), (0.137, 0.235, 0.98),
              (0.392, 0, 0.627), (0.784, 0, 0.627))
    # (0.1  , 0.1   , 0.784),
    levelsPrec = [0, 1, 3, 10, 30, 100.]

    # Loop over dates
    for idate, date in enumerate(make_datelist(inargs)):

        # Loop over times
        for t in make_timelist(timedelta(hours=inargs.time_start),
                               timedelta(hours=inargs.time_end),
                               timedelta(hours=1)):

            # Load all CU objects
            fobjlist = []
            titlelist = []

            # 1st: Radar
            fobjlist.append(get_and_crop_radar_fobj(inargs, date, t))
            titlelist.append('Radar')

            # 2nd: det
            ncdffn_pref = (get_config(inargs, 'paths', 'raw_data') + date +
                           '/deout_ceu_pspens/' + 'det' + '/OUTPUT/lfff')
            fobjlist.append(
                getfobj_ncdf(ncdffn_pref + ddhhmmss(t) + '.nc_30m_surf',
                             'PREC_ACCUM'))
            titlelist.append('Det')

            # 3rd: ens
            ncdffn = 'lfff' + ddhhmmss(t) + '.nc_30m_surf'
            date_dir = (get_config(inargs, 'paths', 'raw_data') + date +
                        '/deout_ceu_pspens/')
            fobjlist.extend(
                getfobj_ncdf_ens(date_dir,
                                 'sub',
                                 inargs.nens,
                                 ncdffn,
                                 dir_suffix='/OUTPUT/',
                                 fieldn='PREC_ACCUM',
                                 nfill=1))
            titlelist.extend(['Mem ' + str(i + 1) for i in range(inargs.nens)])

            # Now plot
            n_panels = len(fobjlist)
            n_cols = 4
            n_rows = int(np.ceil(float(n_panels) / n_cols))
            fig, axmat = plt.subplots(n_rows,
                                      n_cols,
                                      figsize=(10, 3.5 * n_rows))
            axflat = np.ravel(axmat)

            for i in range(len(fobjlist)):
                plt.sca(axflat[i])
                cf, tmp = ax_contourf(axflat[i],
                                      fobjlist[i],
                                      colors=cmPrec,
                                      pllevels=levelsPrec,
                                      ji0=(50 + inargs.zoom_lat1,
                                           50 + inargs.zoom_lon1),
                                      ji1=(357 - 51 + inargs.zoom_lat2,
                                           357 - 51 + inargs.zoom_lon2),
                                      sp_title=titlelist[i],
                                      Basemap_drawrivers=False,
                                      npars=0,
                                      nmers=0)
            cb = fig.colorbar(cf,
                              cax=fig.add_axes([0.4, 0.1, 0.2, 0.02]),
                              orientation='horizontal')
            cb.set_label('Accumulation [mm/h]')
        titlestr = (yyyymmddhh_strtotime(date).strftime(
            get_config(inargs, 'plotting', 'date_fmt')) + ' ' +
                    str(t.seconds / 3600).zfill(2) + 'UTC')
        fig.suptitle(titlestr)
        plt.tight_layout(rect=[0, 0.1, 1, 0.93])

        # Save figure and log
        save_fig_and_log(fig,
                         None,
                         inargs,
                         'prec_stamps',
                         date=date,
                         time=str(t.seconds / 3600).zfill(2))