Beispiel #1
0
def get_time_slicer(ts, f, time_interval):
    """get time slicer from the time_interval
    Following options are available

    1. time_interval with [ts_begin, ts_end]
    2. only one timestamp is selected and the found
        right one would be beyond the ts range -> argnearest instead searchsorted
    3. only one is timestamp
    """

    # select first timestamp right of begin (not left if nearer as above)
    #print(f'start time {h.ts_to_dt(ts[0])}')
    it_b = 0 if ts.shape[0] == 1 else np.searchsorted(
        ts, h.dt_to_ts(time_interval[0]), side='right')
    if len(time_interval) == 2:
        it_e = h.argnearest(ts, h.dt_to_ts(time_interval[1]))

        if it_b == ts.shape[0]: it_b = it_b - 1
        valid_step = 3 * np.median(np.diff(ts))
        if ts[it_e] < h.dt_to_ts(
                time_interval[0]) - valid_step or ts[it_b] < h.dt_to_ts(
                    time_interval[0]):
            # second condition is to ensure that no timestamp before
            # the selected interval is choosen
            # (problem with limrad after change of sampling frequency)
            str = 'found last profile of file {}\n at ts[it_e] {} too far ({}s) from {}\n'.format(
                    f, h.ts_to_dt(ts[it_e]), valid_step, time_interval[0]) \
                 + 'or begin too early {} < {}\n returning None'.format(h.ts_to_dt(ts[it_b]), time_interval[0])
            logger.warning(str)
            return None

        it_e = it_e + 1 if not it_e == ts.shape[0] - 1 else None
        slicer = [slice(it_b, it_e)]
    elif it_b == ts.shape[0]:
        # only one timestamp is selected
        # and the found right one would be beyond the ts range
        it_b = h.argnearest(ts, h.dt_to_ts(time_interval[0]))
        slicer = [slice(it_b, it_b + 1)]
    else:
        slicer = [slice(it_b, it_b + 1)]
    return slicer
Beispiel #2
0
def random_choice(xr_ds, rg_int, N=4, iclass=4, var='voodoo_classification'):
    nts, nrg = xr_ds.ZSpec.ts.size, xr_ds.ZSpec.rg.size

    icnt = 0
    indices = np.zeros((N, 2), dtype=np.int)
    nnearest = h.argnearest(xr_ds.ZSpec.rg.values, rg_int)

    while icnt < N:
        while True:
            idxts = int(np.random.randint(0, high=nts, size=1))
            idxrg = int(np.random.randint(0, high=nnearest, size=1))
            if ~xr_ds.mask[idxts, idxrg] and xr_ds[var].values[
                    idxts, idxrg] == iclass:
                indices[icnt, :] = [idxts, idxrg]
                icnt += 1
                break
    return indices
def get_1st_cloud_base_idx(cb_first_ts, range_list):
    """Extract the indices of the first cloud base.

    Args:
        cb_first_ts (list) : list or np.array of fist cloud base occurrence, fill_value=nan
        range_list (list) : range bins

    Return:
        idx_1st_cloud_base (np.array) : sliced container
    """
    idx_1st_cloud_base = []
    for i_cb_ts in cb_first_ts:
        if np.isnan(i_cb_ts):
            idx_1st_cloud_base.append(np.nan)
        else:
            idx_1st_cloud_base.append(h.argnearest(range_list, i_cb_ts))

    return np.array(idx_1st_cloud_base)
Beispiel #4
0
    def t_r(f, time_interval, *further_intervals):
        """function that converts the trace netCDF to the data container
        """
        logger.debug("filename at reader {}".format(f))
        with netCDF4.Dataset(f, 'r') as ncD:

            times = ncD.variables[paraminfo['time_variable']][:].astype(
                np.float64)

            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            ts = timeconverter(times)

            #print('timestamps ', ts[:5])
            # setup slice to load base on time_interval
            it_b = h.argnearest(ts, h.dt_to_ts(time_interval[0]))
            if len(time_interval) == 2:
                it_e = h.argnearest(ts, h.dt_to_ts(time_interval[1]))
                if ts[it_e] < h.dt_to_ts(
                        time_interval[0]) - 3 * np.median(np.diff(ts)):
                    logger.warning(
                        'last profile of file {}\n at {} too far from {}'.
                        format(f, h.ts_to_dt(ts[it_e]), time_interval[0]))
                    return None

                it_e = it_e + 1 if not it_e == ts.shape[0] - 1 else None
                slicer = [slice(it_b, it_e)]
            else:
                slicer = [slice(it_b, it_b + 1)]
            print(slicer)

            range_interval = further_intervals[0]
            ranges = ncD.variables[paraminfo['range_variable']]
            logger.debug('loader range conversion {}'.format(
                paraminfo['range_conversion']))
            rangeconverter, _ = h.get_converter_array(
                paraminfo['range_conversion'], altitude=paraminfo['altitude'])
            ir_b = h.argnearest(rangeconverter(ranges[:]), range_interval[0])
            if len(range_interval) == 2:
                if not range_interval[1] == 'max':
                    ir_e = h.argnearest(rangeconverter(ranges[:]),
                                        range_interval[1])
                    ir_e = ir_e + 1 if not ir_e == ranges.shape[0] - 1 else None
                else:
                    ir_e = None
                slicer.append(slice(ir_b, ir_e))
            else:
                slicer.append(slice(ir_b, ir_b + 1))

            varconverter, maskconverter = h.get_converter_array(
                paraminfo['var_conversion'])

            its = np.arange(ts.shape[0])[tuple(slicer)[0]]
            irs = np.arange(ranges.shape[0])[tuple(slicer)[1]]
            var = np.empty((its.shape[0], irs.shape[0]))
            mask = np.empty((its.shape[0], irs.shape[0]))
            mask[:] = False

            var = ncD.variables[paraminfo['variable_name']][
                tuple(slicer)[0], tuple(slicer)[1], :]

            data = {}
            data['dimlabel'] = ['time', 'range', 'cat']

            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            if 'meta' in paraminfo:
                data['meta'] = NcReader.get_meta_from_nc(
                    ncD, paraminfo['meta'], paraminfo['variable_name'])

            variable = ncD.variables[paraminfo['variable_name']]
            var_definition = ast.literal_eval(
                variable.getncattr(paraminfo['identifier_var_def']))
            if var_definition[1] == "forrest":
                var_definition[1] = "forest"

            data['var_definition'] = var_definition

            data['rg'] = rangeconverter(ranges[tuple(slicer)[1]])
            data['rg_unit'] = NcReader.get_var_attr_from_nc(
                "identifier_rg_unit", paraminfo, ranges)
            logger.debug('shapes {} {} {}'.format(ts.shape, ranges.shape,
                                                  var.shape))

            data['var_unit'] = NcReader.get_var_attr_from_nc(
                "identifier_var_unit", paraminfo, var)
            data['var_lims'] = [float(e) for e in \
                                NcReader.get_var_attr_from_nc("identifier_var_lims",
                                                    paraminfo, var)]

            data['var'] = varconverter(var)
            data['mask'] = maskconverter(mask)

            return data
Beispiel #5
0
    def pt_ret(f, time_interval, *further_intervals):
        """function that converts the peakTree netCDF to the data container
        """
        logger.debug("filename at reader {}".format(f))
        with netCDF4.Dataset(f, 'r') as ncD:

            times = ncD.variables[paraminfo['time_variable']][:].astype(np.float64)
            if 'time_millisec_variable' in paraminfo.keys() and \
                    paraminfo['time_millisec_variable'] in ncD.variables:
                subsec = ncD.variables[paraminfo['time_millisec_variable']][:]/1.0e3
                times += subsec
            if 'time_microsec_variable' in paraminfo.keys() and \
                    paraminfo['time_microsec_variable'] in ncD.variables:
                subsec = ncD.variables[paraminfo['time_microsec_variable']][:]/1.0e6
                times += subsec

            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            ts = timeconverter(times)

            #print('timestamps ', ts[:5])
            # setup slice to load base on time_interval
            it_b = h.argnearest(ts, h.dt_to_ts(time_interval[0]))
            if len(time_interval) == 2:
                it_e = h.argnearest(ts, h.dt_to_ts(time_interval[1]))
                if ts[it_e] < h.dt_to_ts(time_interval[0])-3*np.median(np.diff(ts)):
                    logger.warning(
                            'last profile of file {}\n at {} too far from {}'.format(
                                f, h.ts_to_dt(ts[it_e]), time_interval[0]))
                    return None

                it_e = it_e+1 if not it_e == ts.shape[0]-1 else None
                slicer = [slice(it_b, it_e)]
            else:
                slicer = [slice(it_b, it_b+1)]
            print(slicer)

            if paraminfo['ncreader'] == 'peakTree':
                range_tg = True

                range_interval = further_intervals[0]
                ranges = ncD.variables[paraminfo['range_variable']]
                logger.debug('loader range conversion {}'.format(paraminfo['range_conversion']))
                rangeconverter, _ = h.get_converter_array(
                    paraminfo['range_conversion'],
                    altitude=paraminfo['altitude'])
                ir_b = h.argnearest(rangeconverter(ranges[:]), range_interval[0])
                if len(range_interval) == 2:
                    if not range_interval[1] == 'max':
                        ir_e = h.argnearest(rangeconverter(ranges[:]), range_interval[1])
                        ir_e = ir_e+1 if not ir_e == ranges.shape[0]-1 else None
                    else:
                        ir_e = None
                    slicer.append(slice(ir_b, ir_e))
                else:
                    slicer.append(slice(ir_b, ir_b+1))

            varconverter, maskconverter = h.get_converter_array(
                paraminfo['var_conversion'])

            its = np.arange(ts.shape[0])[tuple(slicer)[0]]
            irs = np.arange(ranges.shape[0])[tuple(slicer)[1]]
            var = np.empty((its.shape[0], irs.shape[0]), dtype=object)
            mask = np.empty((its.shape[0], irs.shape[0]), dtype=bool)
            mask[:] = True

            param_list = [
                ncD.variables['parent'][tuple(slicer)[0],tuple(slicer)[1],:], #0
                ncD.variables['Z'][tuple(slicer)[0],tuple(slicer)[1],:],      #1
                ncD.variables['v'][tuple(slicer)[0],tuple(slicer)[1],:],      #2
                ncD.variables['width'][tuple(slicer)[0],tuple(slicer)[1],:],  #3
                ncD.variables['skew'][tuple(slicer)[0],tuple(slicer)[1],:],   #4
                ncD.variables['threshold'][tuple(slicer)[0],tuple(slicer)[1],:], #5
                ncD.variables['prominence'][tuple(slicer)[0],tuple(slicer)[1],:], #6
                ncD.variables['bound_l'][tuple(slicer)[0],tuple(slicer)[1],:],    #7
                ncD.variables['bound_r'][tuple(slicer)[0],tuple(slicer)[1],:]     #8
            ]
            if 'LDR' in ncD.variables.keys():
                ldr_avail = True
                param_list.append(ncD.variables['LDR'][tuple(slicer)[0],tuple(slicer)[1],:])  #9
                param_list.append(ncD.variables['ldrmax'][tuple(slicer)[0],tuple(slicer)[1],:]) #10
            else:
                ldr_avail = False
            data = np.stack(tuple(param_list), axis=3)
            print(data.shape)
            if fastbuilder:
                var, mask = peakTree_fastbuilder.array_to_tree_c(data.astype(float), ldr_avail)
            else:
                var, mask = array_to_tree_py(data, ldr_avail)

            data = {}
            data['dimlabel'] = ['time', 'range', 'dict']

            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            data['rg'] = rangeconverter(ranges[tuple(slicer)[1]])
            data['rg_unit'] = NcReader.get_var_attr_from_nc("identifier_rg_unit", 
                                                paraminfo, ranges)
            logger.debug('shapes {} {} {}'.format(ts.shape, ranges.shape, var.shape))

            logger.debug('shapes {} {}'.format(ts.shape, var.shape))
            data['var_unit'] = NcReader.get_var_attr_from_nc("identifier_var_unit", 
                                                    paraminfo, var)
            data['var_lims'] = [float(e) for e in \
                                NcReader.get_var_attr_from_nc("identifier_var_lims", 
                                                    paraminfo, var)]

            data['var'] = varconverter(var)
            data['mask'] = maskconverter(mask)

            return data
Beispiel #6
0
    def retfunc(f, time_interval, range_interval):
        """function that converts the netCDF to the larda-data-format
        """
        logger.debug("filename at reader {}".format(f))

        with netCDF4.Dataset(f, 'r') as ncD:
            ranges = ncD.variables[paraminfo['range_variable']]
            times = ncD.variables[paraminfo['time_variable']][:].astype(
                np.float64)
            locator_mask = ncD.variables[paraminfo['mask_var']][:].astype(
                np.int)
            if 'time_millisec_variable' in paraminfo.keys() and \
                    paraminfo['time_millisec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_millisec_variable']][:] / 1.0e3
                times += subsec
            if 'time_microsec_variable' in paraminfo.keys() and \
                    paraminfo['time_microsec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_microsec_variable']][:] / 1.0e6
                times += subsec
            if 'base_time_variable' in paraminfo.keys() and \
                    paraminfo['base_time_variable'] in ncD.variables:
                basetime = ncD.variables[
                    paraminfo['base_time_variable']][:].astype(np.float64)
                times += basetime
            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            ts = timeconverter(times)

            it_b = np.searchsorted(ts,
                                   h.dt_to_ts(time_interval[0]),
                                   side='right')
            if len(time_interval) == 2:
                it_e = h.argnearest(ts, h.dt_to_ts(time_interval[1]))
                if it_b == ts.shape[0]: it_b = it_b - 1
                if ts[it_e] < h.dt_to_ts(time_interval[0]) - 3 * np.median(np.diff(ts)) \
                        or ts[it_b] < h.dt_to_ts(time_interval[0]):
                    # second condition is to ensure that no timestamp before
                    # the selected interval is chosen
                    # (problem with limrad after change of sampling frequency)
                    logger.warning(
                        'last profile of file {}\n at {} too far from {}'.
                        format(f, h.ts_to_dt(ts[it_e]), time_interval[0]))
                    return None
                it_e = it_e + 1 if not it_e == ts.shape[0] - 1 else None
                slicer = [slice(it_b, it_e)]
            elif it_b == ts.shape[0]:
                # only one timestamp is selected
                # and the found right one would be beyond the ts range
                it_b = h.argnearest(ts, h.dt_to_ts(time_interval[0]))
                slicer = [slice(it_b, it_b + 1)]
            else:
                slicer = [slice(it_b, it_b + 1)]

            rangeconverter, _ = h.get_converter_array(
                paraminfo['range_conversion'])

            varconverter, _ = h.get_converter_array(
                paraminfo['var_conversion'])

            ir_b = h.argnearest(rangeconverter(ranges[:]), range_interval[0])
            if len(range_interval) == 2:
                if not range_interval[1] == 'max':
                    ir_e = h.argnearest(rangeconverter(ranges[:]),
                                        range_interval[1])
                    ir_e = ir_e + 1 if not ir_e == ranges.shape[0] - 1 else None
                else:
                    ir_e = None
                slicer.append(slice(ir_b, ir_e))
            else:
                slicer.append(slice(ir_b, ir_b + 1))

            range_out = rangeconverter(ranges[tuple(slicer)[1]])
            cal = getattr(ncD, paraminfo['cal_const'])
            var = ncD.variables[paraminfo['variable_name']][:].astype(
                np.float64)
            var = var[locator_mask]
            vel = ncD.variables[paraminfo['vel_variable']][:].astype(
                np.float64)
            # print('var dict ',ch1var.__dict__)
            # print('shapes ', ts.shape, ch1range.shape, ch1var.shape)
            # print("time indices ", it_b, it_e)

            data = {}
            data['dimlabel'] = ['time', 'range', 'vel']
            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]
            data['rg'] = range_out

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            # also experimental: vis_varconverter
            if 'plot_varconverter' in paraminfo and paraminfo[
                    'plot_varconverter'] != 'none':
                data['plot_varconverter'] = paraminfo['plot_varconverter']
            else:
                data['plot_varconverter'] = ''

            data['rg_unit'] = get_var_attr_from_nc("identifier_rg_unit",
                                                   paraminfo, ranges)
            #data['var_unit'] = get_var_attr_from_nc("identifier_var_unit",
            #                                        paraminfo, var)
            data['var_unit'] = 'dBZ m-1 s'
            data['var_lims'] = [float(e) for e in \
                                get_var_attr_from_nc("identifier_var_lims",
                                                     paraminfo, var)]
            data['vel'] = vel

            if "identifier_fill_value" in paraminfo.keys(
            ) and not "fill_value" in paraminfo.keys():
                fill_value = var.getncattr(paraminfo['identifier_fill_value'])
                data['mask'] = (var[tuple(slicer)].data == fill_value)
            elif "fill_value" in paraminfo.keys():
                fill_value = paraminfo["fill_value"]
                data['mask'] = np.isclose(var[tuple(slicer)], fill_value)
            elif "mask_var" in paraminfo.keys():
                # combine locator mask and mask of infinite values
                mask = locator_mask.mask[tuple(slicer)]
                data["mask"] = np.logical_or(
                    ~np.isfinite(var[tuple(slicer)].data),
                    np.repeat(mask[:, :, np.newaxis], len(data['vel']),
                              axis=2))
            else:
                data['mask'] = ~np.isfinite(var[tuple(slicer)].data)
            if isinstance(times, np.ma.MaskedArray):
                var = varconverter(var[tuple(slicer)].data)
            else:
                var = varconverter(var[tuple(slicer)])

            var2 = h.z2lin(var) * h.z2lin(float(
                cal[:-3])) * (range_out**2)[np.newaxis, :, np.newaxis]
            data['var'] = var2

            return data
Beispiel #7
0
    def retfunc(f, time_interval, *further_intervals):
        """function that converts the netCDF to the larda-data-format
        """
        logger.debug("filename at reader {}".format(f))
        with netCDF4.Dataset(f, 'r') as ncD:

            if 'auto_mask_scale' in paraminfo and paraminfo[
                    'auto_mask_scale'] == False:
                ncD.set_auto_mask(False)

            varconv_args = {}
            times = ncD.variables[paraminfo['time_variable']][:].astype(
                np.float64)
            if 'time_millisec_variable' in paraminfo.keys() and \
                    paraminfo['time_millisec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_millisec_variable']][:] / 1.0e3
                times += subsec
            if 'time_microsec_variable' in paraminfo.keys() and \
                    paraminfo['time_microsec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_microsec_variable']][:] / 1.0e6
                times += subsec
            if 'base_time_variable' in paraminfo.keys() and \
                    paraminfo['base_time_variable'] in ncD.variables:
                basetime = ncD.variables[
                    paraminfo['base_time_variable']][:].astype(np.float64)
                times += basetime

            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            if isinstance(times, np.ma.MaskedArray):
                ts = timeconverter(times.data)
            else:
                ts = timeconverter(times)
            # get the time slicer from time_interval
            slicer = get_time_slicer(ts, f, time_interval)
            if slicer is None and paraminfo['ncreader'] != 'pollynet_profile':
                logger.critical(f'No time slice found!\nfile :: {f}\n')
                return None

            if paraminfo['ncreader'] == "pollynet_profile":
                slicer = [slice(None)]

            if paraminfo['ncreader'] in [
                    'timeheight', 'spec', 'mira_noise', 'pollynet_profile'
            ]:
                range_tg = True

                try:
                    range_interval = further_intervals[0]
                except IndexError as e:
                    logger.error('No range interval was given.')

                ranges = ncD.variables[paraminfo['range_variable']]
                logger.debug('loader range conversion {}'.format(
                    paraminfo['range_conversion']))
                rangeconverter, _ = h.get_converter_array(
                    paraminfo['range_conversion'],
                    altitude=paraminfo['altitude'])
                ir_b = h.argnearest(rangeconverter(ranges[:]),
                                    range_interval[0])
                if len(range_interval) == 2:
                    if not range_interval[1] == 'max':
                        ir_e = h.argnearest(rangeconverter(ranges[:]),
                                            range_interval[1])
                        ir_e = ir_e + 1 if not ir_e == ranges.shape[
                            0] - 1 else None
                    else:
                        ir_e = None
                    slicer.append(slice(ir_b, ir_e))
                else:
                    slicer.append(slice(ir_b, ir_b + 1))

            if paraminfo['ncreader'] == 'spec':
                if 'compute_velbins' in paraminfo:
                    if paraminfo['compute_velbins'] == "mrrpro":
                        wl = 1.238 * 10**(-2)  # wavelength (fixed) - 24 GHz
                        varconv_args.update({"wl": wl})
                vel_tg = True
                slicer.append(slice(None))
            varconverter, maskconverter = h.get_converter_array(
                paraminfo['var_conversion'], **varconv_args)
            if 'vel_conversion' in paraminfo:
                velconverter, _ = h.get_converter_array(
                    paraminfo['vel_conversion'])

            var = ncD.variables[paraminfo['variable_name']]
            # print('var dict ',ncD.variables[paraminfo['variable_name']].__dict__)
            # print("time indices ", it_b, it_e)
            data = {}
            if paraminfo['ncreader'] == 'timeheight':
                data['dimlabel'] = ['time', 'range']
            elif paraminfo['ncreader'] == 'time':
                data['dimlabel'] = ['time']
            elif paraminfo['ncreader'] == 'spec':
                data['dimlabel'] = ['time', 'range', 'vel']
            elif paraminfo['ncreader'] == 'mira_noise':
                data['dimlabel'] = ['time', 'range']
            elif paraminfo['ncreader'] == "pollynet_profile":
                data['dimlabel'] = ['time', 'range']

            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            # experimental: put history into data container
            if 'identifier_history' in paraminfo and paraminfo[
                    'identifier_history'] != 'none':
                data['file_history'] = [
                    ncD.getncattr(paraminfo['identifier_history'])
                ]

            # also experimental: vis_varconverter
            if 'plot_varconverter' in paraminfo and paraminfo[
                    'plot_varconverter'] != 'none':
                data['plot_varconverter'] = paraminfo['plot_varconverter']
            else:
                data['plot_varconverter'] = ''

            if paraminfo['ncreader'] in [
                    'timeheight', 'spec', 'mira_noise', 'pollynet_profile'
            ]:
                if isinstance(times, np.ma.MaskedArray):
                    data['rg'] = rangeconverter(ranges[tuple(slicer)[1]].data)
                else:
                    data['rg'] = rangeconverter(ranges[tuple(slicer)[1]])

                data['rg_unit'] = get_var_attr_from_nc("identifier_rg_unit",
                                                       paraminfo, ranges)
                logger.debug('shapes {} {} {}'.format(ts.shape, ranges.shape,
                                                      var.shape))
            if paraminfo['ncreader'] == 'spec':
                if 'vel_ext_variable' in paraminfo:
                    # this special field is needed to load limrad spectra
                    vel_ext = ncD.variables[paraminfo['vel_ext_variable'][0]][
                        int(paraminfo['vel_ext_variable'][1])]
                    vel_res = 2 * vel_ext / float(var[:].shape[2])
                    data['vel'] = np.linspace(-vel_ext + (0.5 * vel_res),
                                              +vel_ext - (0.5 * vel_res),
                                              var[:].shape[2])
                elif 'compute_velbins' in paraminfo:
                    if paraminfo['compute_velbins'] == 'mrrpro':
                        # this is used to read in MRR-PRO spectra
                        fs = 500000  # sampling rate of MRR-Pro (fixed)
                        vel_ext = fs / 4 / ncD.dimensions['range'].size * wl
                        vel_res = vel_ext / float(var[:].shape[2])
                        data['vel'] = np.linspace(0 - (0.5 * vel_res),
                                                  -vel_ext + (0.5 * vel_res),
                                                  var[:].shape[2])
                else:
                    data['vel'] = ncD.variables[paraminfo['vel_variable']][:]
                if 'vel_conversion' in paraminfo:
                    data['vel'] = velconverter(data['vel'])

            logger.debug('shapes {} {}'.format(ts.shape, var.shape))
            data['var_unit'] = get_var_attr_from_nc("identifier_var_unit",
                                                    paraminfo, var)
            data['var_lims'] = [float(e) for e in \
                                get_var_attr_from_nc("identifier_var_lims",
                                                     paraminfo, var)]

            # by default assume dimensions of (time, range, ...)
            # or define a custom order in the param toml file
            if 'dimorder' in paraminfo:
                slicer = [slicer[i] for i in paraminfo['dimorder']]

            if paraminfo['ncreader'] == "pollynet_profile":
                del slicer[0]

            # read in the variable definition dictionary
            #
            if "identifier_var_def" in paraminfo.keys(
            ) and not "var_def" in paraminfo.keys():
                data['var_definition'] = h.guess_str_to_dict(
                    var.getncattr(paraminfo['identifier_var_def']))
            elif "var_def" in paraminfo.keys():
                data['var_definition'] = paraminfo['var_def']

            if paraminfo['ncreader'] == 'mira_noise':
                r_c = ncD.variables[paraminfo['radar_const']][:]
                snr_c = ncD.variables[paraminfo['SNR_corr']][:]
                npw = ncD.variables[paraminfo['noise_pow']][:]
                calibrated_noise = r_c[slicer[0], np.newaxis] * var[tuple(slicer)].data * snr_c[tuple(slicer)].data / \
                                   npw[slicer[0], np.newaxis] * (data['rg'][np.newaxis, :] / 5000.) ** 2
                data['var'] = calibrated_noise
            else:
                data['var'] = varconverter(var[:])[tuple(slicer)]

                #if paraminfo['compute_velbins'] == "mrrpro":
                #    data['var'] = data['var'] * wl** 4 / (np.pi** 5) / 0.93 * 10**6

            if "identifier_fill_value" in paraminfo.keys(
            ) and not "fill_value" in paraminfo.keys():
                fill_value = var.getncattr(paraminfo['identifier_fill_value'])
                mask = (data['var'] == fill_value)
            elif "fill_value" in paraminfo.keys():
                fill_value = paraminfo['fill_value']
                mask = np.isclose(data['var'], fill_value)
            else:
                mask = ~np.isfinite(data['var'])

            data['mask'] = mask

            if paraminfo['ncreader'] == "pollynet_profile":
                data['var'] = data['var'][np.newaxis, :]
                data['mask'] = data['mask'][np.newaxis, :]

            return data
Beispiel #8
0
    def retfunc(f, time_interval, *further_intervals):
        """function that converts the netCDF to the larda-data-format
        """
        logger.debug("filename at reader {}".format(f))
        with netCDF4.Dataset(f, 'r') as ncD:

            times = ncD.variables[paraminfo['time_variable']][:].astype(
                np.float64)
            if 'time_millisec_variable' in paraminfo.keys() and \
                    paraminfo['time_millisec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_millisec_variable']][:] / 1.0e3
                times += subsec
            if 'time_microsec_variable' in paraminfo.keys() and \
                    paraminfo['time_microsec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_microsec_variable']][:] / 1.0e6
                times += subsec

            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            if isinstance(times, np.ma.MaskedArray):
                ts = timeconverter(times.data)
            else:
                ts = timeconverter(times)

            # load the whole time-range from the file
            slicer = [slice(None)]

            if paraminfo['ncreader'] == 'scan_timeheight':
                range_tg = True

                range_interval = further_intervals[0]
                ranges = ncD.variables[paraminfo['range_variable']]
                logger.debug('loader range conversion {}'.format(
                    paraminfo['range_conversion']))
                rangeconverter, _ = h.get_converter_array(
                    paraminfo['range_conversion'],
                    altitude=paraminfo['altitude'])
                ir_b = h.argnearest(rangeconverter(ranges[:]),
                                    range_interval[0])
                if len(range_interval) == 2:
                    if not range_interval[1] == 'max':
                        ir_e = h.argnearest(rangeconverter(ranges[:]),
                                            range_interval[1])
                        ir_e = ir_e + 1 if not ir_e == ranges.shape[
                            0] - 1 else None
                    else:
                        ir_e = None
                    slicer.append(slice(ir_b, ir_e))
                else:
                    slicer.append(slice(ir_b, ir_b + 1))

            varconverter, maskconverter = h.get_converter_array(
                paraminfo['var_conversion'],
                mira_azi_zero=paraminfo['mira_azi_zero'])

            var = ncD.variables[paraminfo['variable_name']]
            # print('var dict ',ncD.variables[paraminfo['variable_name']].__dict__)
            # print("time indices ", it_b, it_e)
            data = {}
            if paraminfo['ncreader'] == 'scan_timeheight':
                data['dimlabel'] = ['time', 'range']
            elif paraminfo['ncreader'] == 'scan_time':
                data['dimlabel'] = ['time']
            # elif paraminfo['ncreader'] == 'spec':
            #    data['dimlabel'] = ['time', 'range', 'vel']

            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            # also experimental: vis_varconverter
            if 'plot_varconverter' in paraminfo and paraminfo[
                    'plot_varconverter'] != 'none':
                data['plot_varconverter'] = paraminfo['plot_varconverter']
            else:
                data['plot_varconverter'] = ''

            if paraminfo['ncreader'] == 'scan_timeheight':
                if isinstance(times, np.ma.MaskedArray):
                    data['rg'] = rangeconverter(ranges[tuple(slicer)[1]].data)
                else:
                    data['rg'] = rangeconverter(ranges[tuple(slicer)[1]])

                data['rg_unit'] = get_var_attr_from_nc("identifier_rg_unit",
                                                       paraminfo, ranges)
                logger.debug('shapes {} {} {}'.format(ts.shape, ranges.shape,
                                                      var.shape))
            logger.debug('shapes {} {}'.format(ts.shape, var.shape))
            data['var_unit'] = get_var_attr_from_nc("identifier_var_unit",
                                                    paraminfo, var)
            data['var_lims'] = [float(e) for e in \
                                get_var_attr_from_nc("identifier_var_lims",
                                                     paraminfo, var)]

            # by default assume dimensions of (time, range, ...)
            # or define a custom order in the param toml file
            if 'dimorder' in paraminfo:
                slicer = [slicer[i] for i in paraminfo['dimorder']]

            if "identifier_fill_value" in paraminfo.keys(
            ) and not "fill_value" in paraminfo.keys():
                fill_value = var.getncattr(paraminfo['identifier_fill_value'])
                mask = (var[tuple(slicer)].data == fill_value)
            elif "fill_value" in paraminfo.keys():
                fill_value = paraminfo['fill_value']
                mask = np.isclose(var[tuple(slicer)].data, fill_value)
            else:
                mask = ~np.isfinite(var[tuple(slicer)].data)

            data['var'] = varconverter(var[tuple(slicer)].data)
            data['mask'] = maskconverter(mask)

            return data
Beispiel #9
0
    def retfunc(f, time_interval, range_interval):
        """function that converts the netCDF to the larda-data-format
        """
        logger.debug("filename at reader {}".format(f))

        with netCDF4.Dataset(f, 'r') as ncD:

            times = ncD.variables[paraminfo['time_variable']][:].astype(
                np.float64)
            if 'time_millisec_variable' in paraminfo.keys() and \
                    paraminfo['time_millisec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_millisec_variable']][:] / 1.0e3
                times += subsec
            if 'time_microsec_variable' in paraminfo.keys() and \
                    paraminfo['time_microsec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_microsec_variable']][:] / 1.0e6
                times += subsec
            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            ts = timeconverter(times)

            no_chirps = ncD.dimensions['Chirp'].size

            ranges_per_chirp = [
                ncD.variables['C{}Range'.format(i + 1)]
                for i in range(no_chirps)
            ]
            ch1range = ranges_per_chirp[0]

            ranges = np.hstack([rg[:] for rg in ranges_per_chirp])

            # get the time slicer from time_interval
            slicer = get_time_slicer(ts, f, time_interval)
            if slicer == None:
                return None

            rangeconverter, _ = h.get_converter_array(
                paraminfo['range_conversion'])

            varconverter, _ = h.get_converter_array(
                paraminfo['var_conversion'])

            ir_b = h.argnearest(rangeconverter(ranges[:]), range_interval[0])
            if len(range_interval) == 2:
                if not range_interval[1] == 'max':
                    ir_e = h.argnearest(rangeconverter(ranges[:]),
                                        range_interval[1])
                    ir_e = ir_e + 1 if not ir_e == ranges.shape[0] - 1 else None
                else:
                    ir_e = None
                slicer.append(slice(ir_b, ir_e))
            else:
                slicer.append(slice(ir_b, ir_b + 1))

            vars_per_chirp = [
                ncD.variables['C{}{}'.format(i + 1,
                                             paraminfo['variable_name'])]
                for i in range(no_chirps)
            ]
            ch1var = vars_per_chirp[0]
            # print('var dict ',ch1var.__dict__)
            # print('shapes ', ts.shape, ch1range.shape, ch1var.shape)
            # print("time indices ", it_b, it_e)

            data = {}
            data['dimlabel'] = ['time', 'range', 'vel']
            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]
            data['rg'] = rangeconverter(ranges[tuple(slicer)[1]])

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            # also experimental: vis_varconverter
            if 'plot_varconverter' in paraminfo and paraminfo[
                    'plot_varconverter'] != 'none':
                data['plot_varconverter'] = paraminfo['plot_varconverter']
            else:
                data['plot_varconverter'] = ''

            data['rg_unit'] = get_var_attr_from_nc("identifier_rg_unit",
                                                   paraminfo, ch1range)
            data['var_unit'] = get_var_attr_from_nc("identifier_var_unit",
                                                    paraminfo, ch1var)
            data['var_lims'] = [float(e) for e in \
                                get_var_attr_from_nc("identifier_var_lims",
                                                     paraminfo, ch1var)]
            if 'vel_ext_variable' in paraminfo:
                # define the function
                get_vel_ext = lambda i: ncD.variables[paraminfo[
                    'vel_ext_variable'][0]][:][i]
                # apply it to every chirp
                vel_ext_per_chirp = [get_vel_ext(i) for i in range(no_chirps)]

                vel_dim_per_chirp = [v.shape[2] for v in vars_per_chirp]
                calc_vel_res = lambda v_e, v_dim: 2.0 * v_e / float(v_dim)
                vel_res_per_chirp = [calc_vel_res(v_e, v_dim) for v_e, v_dim \
                                     in zip(vel_ext_per_chirp, vel_dim_per_chirp)]

                # for some very obscure reason lambda is not able to unpack 3 values
                def calc_vel(vel_ext, vel_res, v_dim):
                    return np.linspace(-vel_ext + (0.5 * vel_res),
                                       +vel_ext - (0.5 * vel_res), v_dim)

                vel_per_chirp = [calc_vel(v_e, v_res, v_dim) for v_e, v_res, v_dim \
                                 in zip(vel_ext_per_chirp, vel_res_per_chirp, vel_dim_per_chirp)]
            else:
                raise NotImplemented(
                    "other means of getting the var dimension are not implemented yet"
                )
            data['vel'] = vel_per_chirp[0]

            # interpolate the variables here
            if 'var_conversion' in paraminfo and paraminfo[
                    'var_conversion'] == 'keepNyquist':
                # the interpolation is only done for the number of spectral lines, not the velocity itself
                quot = [
                    i / vel_dim_per_chirp[0] for i in vel_dim_per_chirp[1:]
                ]
                vars_interp = [vars_per_chirp[0]]
                ich = 1
                for var, vel in zip(vars_per_chirp[1:], vel_per_chirp[1:]):
                    data['vel_ch{}'.format(ich + 1)] = vel_per_chirp[ich]
                    new_vel = np.linspace(vel[0], vel[-1],
                                          vel_dim_per_chirp[0])
                    vars_interp.append(
                        interp_only_3rd_dim(var[:] * quot[ich - 1],
                                            vel,
                                            new_vel,
                                            kind='nearest'))
                    ich += 1
            else:
                vars_interp = [vars_per_chirp[0]] + \
                              [interp_only_3rd_dim(var, vel, vel_per_chirp[0]) \
                               for var, vel in zip(vars_per_chirp[1:], vel_per_chirp[1:])]

            var = np.hstack([v[:] for v in vars_interp])
            logger.debug('interpolated spectra from\n{}\n{} to\n{}'.format(
                [v[:].shape for v in vars_per_chirp],
                ['{:5.3f}'.format(vel[0])
                 for vel in vel_per_chirp], [v[:].shape for v in vars_interp]))
            logger.info('var.shape interpolated spectra {}'.format(var.shape))

            if "identifier_fill_value" in paraminfo.keys(
            ) and not "fill_value" in paraminfo.keys():
                fill_value = var.getncattr(paraminfo['identifier_fill_value'])
                data['mask'] = (var[tuple(slicer)].data == fill_value)
            elif "fill_value" in paraminfo.keys():
                fill_value = paraminfo["fill_value"]
                data['mask'] = np.isclose(var[tuple(slicer)], fill_value)
            else:
                data['mask'] = ~np.isfinite(var[tuple(slicer)].data)
            if isinstance(times, np.ma.MaskedArray):
                data['var'] = varconverter(var[tuple(slicer)].data)
            else:
                data['var'] = varconverter(var[tuple(slicer)])

            return data
Beispiel #10
0
    def retfunc(f, time_interval, range_interval):
        """function that converts the netCDF to the larda-data-format
        """
        logger.debug("filename at reader {}".format(f))
        with netCDF4.Dataset(f, 'r') as ncD:

            no_chirps = ncD.dimensions['Chirp'].size

            ranges_per_chirp = [
                ncD.variables['C{}Range'.format(i + 1)]
                for i in range(no_chirps)
            ]
            ch1range = ranges_per_chirp[0]

            ranges = np.hstack([rg[:] for rg in ranges_per_chirp])

            times = ncD.variables[paraminfo['time_variable']][:].astype(
                np.float64)
            if 'time_millisec_variable' in paraminfo.keys() and \
                    paraminfo['time_millisec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_millisec_variable']][:] / 1.0e3
                times += subsec
            if 'time_microsec_variable' in paraminfo.keys() and \
                    paraminfo['time_microsec_variable'] in ncD.variables:
                subsec = ncD.variables[
                    paraminfo['time_microsec_variable']][:] / 1.0e6
                times += subsec
            timeconverter, _ = h.get_converter_array(
                paraminfo['time_conversion'], ncD=ncD)
            ts = timeconverter(times)

            # get the time slicer from time_interval
            slicer = get_time_slicer(ts, f, time_interval)
            if slicer == None:
                return None

            rangeconverter, _ = h.get_converter_array(
                paraminfo['range_conversion'])

            varconverter, _ = h.get_converter_array(
                paraminfo['var_conversion'])

            ir_b = h.argnearest(rangeconverter(ranges[:]), range_interval[0])
            if len(range_interval) == 2:
                if not range_interval[1] == 'max':
                    ir_e = h.argnearest(rangeconverter(ranges[:]),
                                        range_interval[1])
                    ir_e = ir_e + 1 if not ir_e == ranges.shape[0] - 1 else None
                else:
                    ir_e = None
                slicer.append(slice(ir_b, ir_e))
            else:
                slicer.append(slice(ir_b, ir_b + 1))

            no_chirps = ncD.dimensions['Chirp'].size

            var_per_chirp = [
                ncD.variables['C{}'.format(i + 1) + paraminfo['variable_name']]
                for i in range(no_chirps)
            ]
            ch1var = var_per_chirp[0]

            # ch1var = ncD.variables['C1'+paraminfo['variable_name']]
            # ch2var = ncD.variables['C2'+paraminfo['variable_name']]
            # ch3var = ncD.variables['C3'+paraminfo['variable_name']]

            # print('var dict ',ch1var.__dict__)
            # print('shapes ', ts.shape, ch1range.shape, ch1var.shape)
            # print("time indices ", it_b, it_e)
            data = {}
            data['dimlabel'] = ['time', 'range']
            data["filename"] = f
            data["paraminfo"] = paraminfo
            data['ts'] = ts[tuple(slicer)[0]]
            data['rg'] = rangeconverter(ranges[tuple(slicer)[1]])

            data['system'] = paraminfo['system']
            data['name'] = paraminfo['paramkey']
            data['colormap'] = paraminfo['colormap']

            # also experimental: vis_varconverter
            if 'plot_varconverter' in paraminfo and paraminfo[
                    'plot_varconverter'] != 'none':
                data['plot_varconverter'] = paraminfo['plot_varconverter']
            else:
                data['plot_varconverter'] = ''

            data['rg_unit'] = get_var_attr_from_nc("identifier_rg_unit",
                                                   paraminfo, ch1range)
            data['var_unit'] = get_var_attr_from_nc("identifier_var_unit",
                                                    paraminfo, ch1var)
            data['var_lims'] = [float(e) for e in \
                                get_var_attr_from_nc("identifier_var_lims",
                                                     paraminfo, ch1var)]
            var = np.hstack([v[:] for v in var_per_chirp])
            # var = np.hstack([ch1var[:], ch2var[:], ch3var[:]])

            if "identifier_fill_value" in paraminfo.keys(
            ) and not "fill_value" in paraminfo.keys():
                fill_value = var.getncattr(paraminfo['identifier_fill_value'])
                data['mask'] = (var[tuple(slicer)].data == fill_value)
            elif "fill_value" in paraminfo.keys():
                fill_value = paraminfo["fill_value"]
                data['mask'] = np.isclose(var[tuple(slicer)], fill_value)
            else:
                data['mask'] = ~np.isfinite(var[tuple(slicer)].data)
            data['var'] = varconverter(var[tuple(slicer)].data)

            return data
Beispiel #11
0
    cloudnet_dn    = classification_tot['ts_class_time'].reshape((n_tot_ts_class,))
    cloudnet_dt    = np.array([datenum2datetime(ts) for ts in cloudnet_dn])
    cloudnet_rg    = classification_tot['h_class'].reshape((n_tot_rg_class,))

    for case in case_list[cases:cases+1]:
        # if case['notes'] == 'ex': continue  # exclude this case and check the next one

        begin_dt, end_dt = case['begin_dt'], case['end_dt']

        # create directory for plots
        h.change_dir(f'{PLOTS_PATH}case_study_{begin_dt:%Y%m%d%H%M%S}-{end_dt:%Y%m%d%H%M%S}/')
        logging.basicConfig(filename=f'case_study_{begin_dt:%Y%m%d%H%M%S}-{end_dt:%Y%m%d%H%M%S}.log', level=logging.INFO)

        # find indices for slicing
        rg0val, rgNval = case['plot_range']
        rg0idx, rgNidx = h.argnearest(cloudnet_rg, rg0val), h.argnearest(cloudnet_rg, rgNval)
        rgN = rgNidx - rg0idx
        rg_case        = cloudnet_rg[rg0idx:rgNidx]

        ts0val, tsNval = datetime2datenum(begin_dt), datetime2datenum(end_dt)
        ts0idx, tsNidx = h.argnearest(cloudnet_dn, ts0val), h.argnearest(cloudnet_dn, tsNval)
        tsN = tsNidx - ts0idx
        dt_case        = cloudnet_dt[ts0idx:tsNidx]

        assert tsN * rgN > 0, ValueError('Error occurred! Number of time steps or range bins invalid, check rgN and tsN!')

        ts_smth = '_smoothed' if span_smoo_MWRlwp > 0 else ''

        logger.info(f'******************BEGIN CASE STUDY******************\n')
        logger.info(f'Slicing time from {begin_dt:%Y-%m-%d %H:%M:%S} (UTC) to {end_dt:%Y-%m-%d %H:%M:%S} (UTC)')
        logger.info(f'Slicing range from {rg0val:.2f} (km) to {rgNval:.2f} (km)\n')