def test_unknown_method(self): """Test functionality with an unexpected method.""" expected = None method = 'not_a_valid_method' result = get_method_prerequisites(method, self.data_directory) self.assertArrayEqual(expected, result)
def test_known_method(self): """Test functionality with an expected method.""" expected = self.additional_data method = 'model_level_temperature_lapse_rate' result = get_method_prerequisites(method, self.data_directory) self.assertArrayEqual(expected.keys(), result.keys()) for diagnostic in expected.keys(): self.assertArrayEqual(expected[diagnostic][0].data, result[diagnostic][0].data)
def process_diagnostic(diagnostic, neighbours, sites, forecast_times, data_path, ancillary_data, output_path=None): """ Extract data and write output for a given diagnostic. Args: ----- diagnostic : string String naming the diagnostic to be processed. neighbours : numpy.array Array of neigbouring grid points that are associated with sites in the SortedDictionary of sites. sites : dict A dictionary containing the properties of spotdata sites. forecast_times : list[datetime.datetime objects] A list of datetimes representing forecast times for which data is required. data_path : string Path to diagnostic data files. ancillary_data : dict A dictionary containing additional model data that is needed. e.g. {'orography': <cube of orography>} output_path : str Path to which output file containing processed diagnostic should be written. Returns: -------- None Raises: ------- IOError : If no relevant data cubes are found at given path. Exception : No spotdata returned. """ # Search directory structure for all files relevant to current diagnostic. files_to_read = [ os.path.join(dirpath, filename) for dirpath, _, files in os.walk(data_path) for filename in files if diagnostic['filepath'] in filename ] if not files_to_read: raise IOError('No relevant data files found in {}.'.format(data_path)) # Load cubes into an iris.cube.CubeList. cubes = Load('multi_file').process(files_to_read, diagnostic['diagnostic_name']) # Grab the relevant set of grid point neighbours for the neighbour finding # method being used by this diagnostic. neighbour_hash = construct_neighbour_hash(diagnostic['neighbour_finding']) neighbour_list = neighbours[neighbour_hash] # Check if additional diagnostics are needed (e.g. multi-level data). # If required, load into the additional_diagnostics dictionary. additional_diagnostics = get_method_prerequisites( diagnostic['interpolation_method'], data_path) # Create empty iris.cube.CubeList to hold extracted data cubes. resulting_cubes = CubeList() # Get optional kwargs that may be set to override defaults. optionals = [ 'upper_level', 'lower_level', 'no_neighbours', 'dz_tolerance', 'dthetadz_threshold', 'dz_max_adjustment' ] kwargs = {} if ancillary_data.get('config_constants') is not None: for optional in optionals: constant = ancillary_data.get('config_constants').get(optional) if constant is not None: kwargs[optional] = constant # Loop over forecast times. for a_time in forecast_times: # Extract Cube from CubeList at current time. time_extract = datetime_constraint(a_time) cube = extract_cube_at_time(cubes, a_time, time_extract) if cube is None: # If no cube is available at given time, try the next time. continue ad = {} if additional_diagnostics is not None: # Extract additional diagnostcs at current time. ad = extract_ad_at_time(additional_diagnostics, a_time, time_extract) args = (cube, sites, neighbour_list, ancillary_data, ad) # Extract diagnostic data using defined method. resulting_cubes.append( ExtractData(diagnostic['interpolation_method']).process( *args, **kwargs)) # Concatenate CubeList into Cube, creating a time DimCoord, and write out. if resulting_cubes: cube_out, = resulting_cubes.concatenate() WriteOutput('as_netcdf', dir_path=output_path).process(cube_out) else: raise Exception('No data available at given forecast times.') # If set in the configuration, extract the diagnostic maxima and minima # values. if diagnostic['extrema']: extrema_cubes = ExtractExtrema(24, start_hour=9).process(cube_out) extrema_cubes = extrema_cubes.merge() for extrema_cube in extrema_cubes: WriteOutput('as_netcdf', dir_path=output_path).process(extrema_cube)