def read(*fields): block = np.fromfile(file, np.dtype(list(fields)), 1) for name in block.dtype.names: array = block[name][0] if utils.isscalar(array): header[name] = array else: header[name] = np.array(array, dtype=_get_dtype(array))
def init_data(self, keymap: dict) -> None: """Reads selected fields and fixes the names.""" for key in keymap: name = keymap[key] array = self.getvar(key) array = np.array(array) if utils.isscalar(array) else array array[~np.isfinite(array)] = ma.masked self.append_data(array, name)
def _get_sequence_indices(self) -> list: """Mira has only one sequence and one folding velocity. RPG has several sequences with different folding velocities.""" all_indices = np.arange(len(self.height)) if not utils.isscalar(self.folding_velocity): starting_indices = self.getvar('chirp_start_indices') return np.split(all_indices, starting_indices[1:]) return [all_indices]
def _get_folding_velocity_full(self): folding_velocity = [] if utils.isscalar(self.folding_velocity): folding_velocity = np.repeat(self.folding_velocity, len(self.sequence_indices[0])) else: for indices, velocity in zip(self.sequence_indices, self.folding_velocity): folding_velocity.append(np.repeat(velocity, len(indices))) folding_velocity = np.hstack(folding_velocity) self.append_data(folding_velocity, "nyquist_velocity")
def test_screen_rain(class_objects, result): from cloudnetpy.products.drizzle import _screen_rain d_source, d_class, s_width = class_objects result = _screen_rain(result, d_class) compare = True for key in result.keys(): if not utils.isscalar(result[key]): if not np.any(result[key][-1]) == np.any(np.array([0, 0, 0])): compare = False assert compare is True
def _get_dimensions(nc: netCDF4.Dataset, data: np.ndarray) -> tuple: """Finds correct dimensions for a variable.""" if utils.isscalar(data): return () variable_size = () file_dims = nc.dimensions array_dims = data.shape for length in array_dims: dim = [key for key in file_dims.keys() if file_dims[key].size == length][0] variable_size = variable_size + (dim,) return variable_size
def _append_data(ceilo: Union[ClCeilo, Ct25k, LufftCeilo], beta_variants: tuple): """Adds data / metadata as CloudnetArrays to ceilo.data.""" for data, name in zip(beta_variants, ('beta_raw', 'beta', 'beta_smooth')): ceilo.data[name] = CloudnetArray(data, name) for field in ('range', 'time', 'wavelength', 'calibration_factor'): ceilo.data[field] = CloudnetArray(np.array(getattr(ceilo, field)), field) for field, data in ceilo.metadata.items(): first_element = data if utils.isscalar(data) else data[0] if not isinstance(first_element, str): # String array writing not yet supported ceilo.data[field] = CloudnetArray( np.array(ceilo.metadata[field], dtype=float), field)
def _get_dimensions(array): """Finds correct dimensions for a variable.""" if utils.isscalar(array): return () variable_size = () file_dims = rootgrp.dimensions array_dims = array.shape for length in array_dims: dim = [ key for key in file_dims.keys() if file_dims[key].size == length ][0] variable_size = variable_size + (dim, ) return variable_size
def _append_data(ceilo, beta_variants): """Add data and metadata as CloudnetArray's to ceilo.data attribute.""" for data, name in zip(beta_variants, ('beta_raw', 'beta', 'beta_smooth')): ceilo.data[name] = CloudnetArray(data, name) for field in ('range', 'time'): ceilo.data[field] = CloudnetArray(getattr(ceilo, field), field) for field, data in ceilo.metadata.items(): first_element = data if utils.isscalar(data) else data[0] if not isinstance(first_element, str): # String array writing not yet supported ceilo.data[field] = CloudnetArray( np.array(ceilo.metadata[field], dtype=float), field) if hasattr(ceilo, 'wavelength'): ceilo.data['wavelength'] = CloudnetArray(ceilo.wavelength, 'wavelength', 'nm')
def _get_dim(self, array: np.ndarray) -> tuple: """Returns tuple of dimension names, e.g., ('time', 'range') that match the array size.""" if utils.isscalar(array): return () variable_size = () file_dims = self.concatenated_file.dimensions for length in array.shape: try: dim = [ key for key in file_dims.keys() if file_dims[key].size == length ][0] except IndexError: dim = self.concat_dimension variable_size += (dim, ) return variable_size
def _screen_bad_time_indices(valid_indices: list) -> None: n_time_full = len(time) data["radar"].time = time[valid_indices] for var in ("radar", "lidar", "mwr", "model"): for key, item in data[var].data.items(): if utils.isscalar(item.data): continue array = item[:] if array.shape[0] == n_time_full: if array.ndim == 1: array = array[valid_indices] elif array.ndim == 2: array = array[valid_indices, :] else: continue data[var].data[key].data = array for key, item in data["model"].data_dense.items(): data["model"].data_dense[key] = item[valid_indices, :]
def get_drizzle_error(categorize: DrizzleSource, drizzle_parameters: DrizzleSolver) -> dict: """Estimates error and bias for drizzle classification. Args: categorize: The :class:`DrizzleSource` instance. drizzle_parameters: The :class:`DrizzleSolver` instance. Returns: dict: Dictionary containing information of estimated error and bias for drizzle """ parameters = drizzle_parameters.params drizzle_indices = _get_drizzle_indices(parameters["Do"]) error_input = _read_input_uncertainty(categorize, "error") if utils.isscalar(error_input[0]) is True: # Constant Z error z_error, bias_error = error_input z_error = np.full(categorize.z.shape, z_error) error_input = z_error, bias_error bias_input = _read_input_uncertainty(categorize, "bias") errors = _calc_errors(drizzle_indices, error_input, bias_input) return errors
def _screen_rain(results: dict, classification: DrizzleClassification): """Removes rainy profiles from drizzle variables..""" for key in results.keys(): if not utils.isscalar(results[key]): results[key][classification.is_rain, :] = 0 return results
def test_isscalar(input, output): assert output == utils.isscalar(input)
def _getvar(self, *args): """Reads data of variable (array or scalar) from netcdf-file.""" for arg in args: if arg in self.variables: var = self.variables[arg] return var[0] if utils.isscalar(var) else var[:]
def _getvar(self, *args): for arg in args: if arg in self.dataset.variables: var = self.dataset.variables[arg] return var[0] if utils.isscalar(var) else var[:] raise ValueError("Unknown variable")
def _getvar(self, *args) -> Union[np.ndarray, float, None]: for arg in args: if arg in self.variables: var = self.variables[arg] return var[0] if utils.isscalar(var) else var[:] return None