def test_product_definition(self, mock_set): cube = self.cube cell_method = CellMethod(method='sum', coords=['time']) cube.add_cell_method(cell_method) product_definition_template_8(cube, mock.sentinel.grib) mock_set.assert_any_call(mock.sentinel.grib, "productDefinitionTemplateNumber", 8)
def test_stats_type_max(self, mock_set_long): grib = None cube = iris.cube.Cube(np.array([1.0])) time_unit = iris.unit.Unit('hours since 1970-01-01 00:00:00') time_coord = iris.coords.DimCoord([0.0], bounds=[0.0, 1], standard_name='time', units=time_unit) cube.add_aux_coord(time_coord, ()) cube.add_cell_method(iris.coords.CellMethod('minimum', time_coord)) grib_save_rules.type_of_statistical_processing(cube, grib, time_coord) mock_set_long.assert_any_call(grib, "typeOfStatisticalProcessing", 3)
def test_stats_type_min(self, mock_set): grib = None cube = iris.cube.Cube(np.array([1.0])) time_unit = cf_units.Unit('hours since 1970-01-01 00:00:00') time_coord = iris.coords.DimCoord([0.0], bounds=[0.0, 1], standard_name='time', units=time_unit) cube.add_aux_coord(time_coord, ()) cube.add_cell_method(iris.coords.CellMethod('maximum', time_coord)) grib_save_rules.product_definition_template_8(cube, grib) mock_set.assert_any_call(grib, "typeOfStatisticalProcessing", 2)
def test_stats_type_max(self, mock_set): grib = None cube = iris.cube.Cube(np.array([1.0])) time_unit = Unit("hours since 1970-01-01 00:00:00") time_coord = iris.coords.DimCoord([0.0], bounds=[0.0, 1], standard_name="time", units=time_unit) cube.add_aux_coord(time_coord, ()) cube.add_cell_method(iris.coords.CellMethod("minimum", time_coord)) product_definition_template_8(cube, grib) mock_set.assert_any_call(grib, "typeOfStatisticalProcessing", 3)
def _process_action_result(self, obj, cube): """Process the result of an action.""" factory = None # NB. The names such as 'CoordAndDims' and 'CellMethod' are defined by # the "deferred import" performed by Rule.run_actions() above. if isinstance(obj, CoordAndDims): obj.add_coord(cube) #cell methods - not yet implemented elif isinstance(obj, CellMethod): cube.add_cell_method(obj) elif isinstance(obj, CMAttribute): # Temporary code to deal with invalid standard names from the translation table. # TODO: when name is "standard_name" force the value to be a real standard name if obj.name == 'standard_name' and obj.value is not None: cube.rename(obj.value) elif obj.name == 'units': # Graceful loading of units. try: setattr(cube, obj.name, obj.value) except ValueError: msg = 'Ignoring PP invalid units {!r}'.format(obj.value) warnings.warn(msg) cube.attributes['invalid_units'] = obj.value cube.units = cf_units._UNKNOWN_UNIT_STRING else: setattr(cube, obj.name, obj.value) elif isinstance(obj, CMCustomAttribute): cube.attributes[obj.name] = obj.value elif isinstance(obj, Factory): factory = obj elif isinstance(obj, DebugString): print(obj) # The function returned nothing, like the pp save actions, "lbft = 3" elif obj is None: pass else: raise Exception( "Object could not be added to cube. Unknown type: " + obj.__class__.__name__) return factory
def _process_action_result(self, obj, cube): """Process the result of an action.""" factory = None # NB. The names such as 'CoordAndDims' and 'CellMethod' are defined by # the "deferred import" performed by Rule.run_actions() above. if isinstance(obj, CoordAndDims): obj.add_coord(cube) #cell methods - not yet implemented elif isinstance(obj, CellMethod): cube.add_cell_method(obj) elif isinstance(obj, CMAttribute): # Temporary code to deal with invalid standard names from the translation table. # TODO: when name is "standard_name" force the value to be a real standard name if obj.name == 'standard_name' and obj.value is not None: cube.rename(obj.value) elif obj.name == 'units': # Graceful loading of units. try: setattr(cube, obj.name, obj.value) except ValueError: msg = 'Ignoring PP invalid units {!r}'.format(obj.value) warnings.warn(msg) cube.attributes['invalid_units'] = obj.value cube.units = iris.unit._UNKNOWN_UNIT_STRING else: setattr(cube, obj.name, obj.value) elif isinstance(obj, CMCustomAttribute): cube.attributes[obj.name] = obj.value elif isinstance(obj, Factory): factory = obj elif isinstance(obj, DebugString): print obj # The function returned nothing, like the pp save actions, "lbft = 3" elif obj is None: pass else: raise Exception("Object could not be added to cube. Unknown type: " + obj.__class__.__name__) return factory
def test_cube_summary_cell_methods(self): cube = self.cube_2d.copy() # Create a list of values used to create cell methods test_values = ((("mean",), (u'longitude', 'latitude'), (u'6 minutes', '12 minutes'), (u'This is a test comment',)), (("average",), (u'longitude', 'latitude'), (u'6 minutes', '15 minutes'), (u'This is another test comment', 'This is another comment')), (("average",), (u'longitude', 'latitude'), (), ()), (("percentile",), (u'longitude',), (u'6 minutes',), (u'This is another test comment',))) for x in test_values: # Create a cell method cm = iris.coords.CellMethod(method=x[0][0], coords=x[1], intervals=x[2], comments=x[3]) cube.add_cell_method(cm) self.assertString(str(cube), ('cdm', 'str_repr', 'cell_methods.__str__.txt'))
def _process_action_result(self, obj, cube): """Process the result of an action.""" factory = None # NB. The names such as 'Coord' and 'CellMethod' are defined by # the "deferred import" performed by Rule.run_actions() above. if isinstance(obj, Coord): cube.add_coord(obj) elif isinstance(obj, CoordAndDims): obj.add_coord(cube) elif isinstance(obj, Factory): factory = obj #cell methods - not yet implemented elif isinstance(obj, CellMethod): cube.add_cell_method(obj) elif isinstance(obj, DebugString): print obj elif isinstance(obj, CMAttribute): # Temporary code to deal with invalid standard names from the translation table. # TODO: when name is "standard_name" force the value to be a real standard name if obj.name == 'standard_name' and obj.value is not None: cube.rename(obj.value) else: setattr(cube, obj.name, obj.value) elif isinstance(obj, CMCustomAttribute): cube.attributes[obj.name] = obj.value # The function returned nothing, like the pp save actions, "lbft = 3" elif obj is None: pass else: raise Exception("Object could not be added to cube. Unknown type: " + obj.__class__.__name__) return factory
def test_cube_summary_cell_methods(self): cube = self.cube_2d.copy() # Create a list of values used to create cell methods test_values = ( (("mean",), (u"longitude", "latitude"), (u"6 minutes", "12 minutes"), (u"This is a test comment",)), ( ("average",), (u"longitude", "latitude"), (u"6 minutes", "15 minutes"), (u"This is another test comment", "This is another comment"), ), (("average",), (u"longitude", "latitude"), (), ()), (("percentile",), (u"longitude",), (u"6 minutes",), (u"This is another test comment",)), ) for x in test_values: # Create a cell method cm = iris.coords.CellMethod(method=x[0][0], coords=x[1], intervals=x[2], comments=x[3]) cube.add_cell_method(cm) self.assertString(str(cube), ("cdm", "str_repr", "cell_methods.__str__.txt"))
def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): """ Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. """ for i, data_array in enumerate(data_arrays): # Turn the dictionary of column headings with a list of header # information for each field into a dictionary of headings for # just this field. field_headings = {k: v[i] for k, v in six.iteritems(column_headings)} # Make a cube. cube = iris.cube.Cube(data_array) # Determine the name and units. name = '{} {}'.format(field_headings['Species'], field_headings['Quantity']) name = name.upper().replace(' ', '_') cube.rename(name) # Some units are not in SI units, are missing spaces or typed # in the wrong case. _parse_units returns units that are # recognised by Iris. cube.units = _parse_units(field_headings['Unit']) # Define and add the singular coordinates of the field (flight # level, time etc.) z_coord = _cf_height_from_name(field_headings['Z']) cube.add_aux_coord(z_coord) # Define the time unit and use it to serialise the datetime for # the time coordinate. time_unit = cf_units.Unit( 'hours since epoch', calendar=cf_units.CALENDAR_GREGORIAN) # Build time, latitude and longitude coordinates. for coord in coords: pts = coord.values coord_sys = None if coord.name == 'latitude' or coord.name == 'longitude': coord_units = 'degrees' coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) if coord.name == 'time': coord_units = time_unit pts = time_unit.date2num(coord.values) if coord.dimension is not None: if coord.name == 'longitude': circular = iris.util._is_circular(pts, 360.0) else: circular = False icoord = DimCoord(points=pts, standard_name=coord.name, units=coord_units, coord_system=coord_sys, circular=circular) if coord.name == 'time' and 'Av or Int period' in \ field_headings: dt = coord.values - \ field_headings['Av or Int period'] bnds = time_unit.date2num( np.vstack((dt, coord.values)).T) icoord.bounds = bnds else: icoord.guess_bounds() cube.add_dim_coord(icoord, coord.dimension) else: icoord = AuxCoord(points=pts[i], standard_name=coord.name, coord_system=coord_sys, units=coord_units) if coord.name == 'time' and 'Av or Int period' in \ field_headings: dt = coord.values - \ field_headings['Av or Int period'] bnds = time_unit.date2num( np.vstack((dt, coord.values)).T) icoord.bounds = bnds[i, :] cube.add_aux_coord(icoord) # Headings/column headings which are encoded elsewhere. headings = ['X', 'Y', 'Z', 'Time', 'Unit', 'Av or Int period', 'X grid origin', 'Y grid origin', 'X grid size', 'Y grid size', 'X grid resolution', 'Y grid resolution', ] # Add the Main Headings as attributes. for key, value in six.iteritems(header): if value is not None and value != '' and \ key not in headings: cube.attributes[key] = value # Add the Column Headings as attributes for key, value in six.iteritems(field_headings): if value is not None and value != '' and \ key not in headings: cube.attributes[key] = value if cell_methods is not None: cube.add_cell_method(cell_methods[i]) yield cube
def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): """ Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. """ for i, data_array in enumerate(data_arrays): # Turn the dictionary of column headings with a list of header # information for each field into a dictionary of headings for # just this field. field_headings = {k: v[i] for k, v in column_headings.items()} # Make a cube. cube = iris.cube.Cube(data_array) # Determine the name and units. name = "{} {}".format(field_headings["Species"], field_headings["Quantity"]) name = name.upper().replace(" ", "_") cube.rename(name) # Some units are not in SI units, are missing spaces or typed # in the wrong case. _parse_units returns units that are # recognised by Iris. cube.units = _parse_units(field_headings["Units"]) # Define and add the singular coordinates of the field (flight # level, time etc.) if "Z" in field_headings: (upper_bound, ) = [ field_headings["... to [Z]"] if "... to [Z]" in field_headings else None ] (lower_bound, ) = [ field_headings["... from [Z]"] if "... from [Z]" in field_headings else None ] z_coord = _cf_height_from_name( field_headings["Z"], upper_bound=upper_bound, lower_bound=lower_bound, ) cube.add_aux_coord(z_coord) # Define the time unit and use it to serialise the datetime for # the time coordinate. time_unit = cf_units.Unit("hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN) # Build time, height, latitude and longitude coordinates. for coord in coords: pts = coord.values coord_sys = None if coord.name == "latitude" or coord.name == "longitude": coord_units = "degrees" coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) if (coord.name == "projection_x_coordinate" or coord.name == "projection_y_coordinate"): coord_units = "m" coord_sys = iris.coord_systems.OSGB() if coord.name == "height": coord_units = "m" long_name = "height above ground level" pts = coord.values if coord.name == "altitude": coord_units = "m" long_name = "altitude above sea level" pts = coord.values if coord.name == "air_pressure": coord_units = "Pa" pts = coord.values if coord.name == "flight_level": pts = coord.values long_name = "flight_level" coord_units = _parse_units("FL") if coord.name == "time": coord_units = time_unit pts = time_unit.date2num(coord.values).astype(float) if coord.dimension is not None: if coord.name == "longitude": circular = iris.util._is_circular(pts, 360.0) else: circular = False if coord.name == "flight_level": icoord = DimCoord(points=pts, units=coord_units, long_name=long_name) else: icoord = DimCoord( points=pts, standard_name=coord.name, units=coord_units, coord_system=coord_sys, circular=circular, ) if coord.name == "height" or coord.name == "altitude": icoord.long_name = long_name if (coord.name == "time" and "Av or Int period" in field_headings): dt = coord.values - field_headings["Av or Int period"] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds.astype(float) else: icoord.guess_bounds() cube.add_dim_coord(icoord, coord.dimension) else: icoord = AuxCoord( points=pts[i], standard_name=coord.name, coord_system=coord_sys, units=coord_units, ) if (coord.name == "time" and "Av or Int period" in field_headings): dt = coord.values - field_headings["Av or Int period"] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds[i, :].astype(float) cube.add_aux_coord(icoord) # Headings/column headings which are encoded elsewhere. headings = [ "X", "Y", "Z", "Time", "T", "Units", "Av or Int period", "... from [Z]", "... to [Z]", "X grid origin", "Y grid origin", "X grid size", "Y grid size", "X grid resolution", "Y grid resolution", "Number of field cols", "Number of preliminary cols", "Number of fields", "Number of series", "Output format", ] # Add the Main Headings as attributes. for key, value in header.items(): if value is not None and value != "" and key not in headings: cube.attributes[key] = value # Add the Column Headings as attributes for key, value in field_headings.items(): if value is not None and value != "" and key not in headings: cube.attributes[key] = value if cell_methods is not None: cube.add_cell_method(cell_methods[i]) yield cube
def test_handmade(self): # Test xml output of a handmade cube. data = numpy.array( [ [1, 2, 3, 4, 5], [2, 3, 4, 5, 6], [3, 4, 5, 6, 7], [4, 5, 6, 7, 8], [5, 6, 7, 8, 9] ], dtype=numpy.int32) cubes = [] # Different types of test for ll_dtype in [numpy.float32, numpy.int32]: for rotated in [False, True]: for forecast_or_time_mean in ["forecast", "time_mean"]: for TEST_COMPAT_i in xrange(2): # TODO: remove with TEST_COMPAT purge - # adds two copies of each cube to cube list # in line with redundant data first option cube = iris.cube.Cube(data) cube.attributes['my_attribute'] = 'foobar' if rotated == False: pole_pos = coord_systems.GeoPosition(90, 0) else: pole_pos = coord_systems.GeoPosition(30, 150) lonlat_cs = coord_systems.LatLonCS("datum?", "prime_meridian?", pole_pos, "reference_longitude?") cube.add_dim_coord(coords.DimCoord(numpy.array([-180, -90, 0, 90, 180], dtype=ll_dtype), 'longitude', units='degrees', coord_system=lonlat_cs), 1) cube.add_dim_coord(coords.DimCoord(numpy.array([-90, -45, 0, 45, 90], dtype=ll_dtype), 'latitude', units='degrees', coord_system=lonlat_cs), 0) # height cube.add_aux_coord(coords.AuxCoord(numpy.array([1000], dtype=numpy.int32), long_name='pressure', units='Pa')) # phenom cube.rename("temperature") cube.units = "K" # source cube.add_aux_coord(coords.AuxCoord(points=["itbb"], long_name='source', units="no_unit")) # forecast dates if forecast_or_time_mean == "forecast": unit = iris.unit.Unit('hours since epoch', calendar=iris.unit.CALENDAR_GREGORIAN) dt = datetime.datetime(2010, 12, 31, 12, 0) cube.add_aux_coord(coords.AuxCoord(numpy.array([6], dtype=numpy.int32), standard_name='forecast_period', units='hours')) cube.add_aux_coord(coords.AuxCoord(numpy.array([unit.date2num(dt)], dtype=numpy.float64), standard_name='time', units=unit)) # time mean dates if forecast_or_time_mean == "time_mean": unit = iris.unit.Unit('hours since epoch', calendar=iris.unit.CALENDAR_GREGORIAN) dt1 = datetime.datetime(2010, 12, 31, 6, 0) dt2 = datetime.datetime(2010, 12, 31, 12, 0) dt_mid = datetime.datetime(2010, 12, 31, 9, 0) cube.add_aux_coord(coords.AuxCoord(numpy.array([6], dtype=numpy.int32), standard_name='forecast_period', units='hours')) cube.add_aux_coord(coords.AuxCoord(numpy.array(unit.date2num(dt_mid), dtype=numpy.float64), standard_name='time', units=unit, bounds=numpy.array([unit.date2num(dt1), unit.date2num(dt2)], dtype=numpy.float64))) cube.add_cell_method(coords.CellMethod('mean', cube.coord('forecast_period'))) cubes.append(cube) # Now we've made all sorts of cube, check the xml... self.assertCML(cubes, ('xml', 'handmade.cml'))
def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): """ Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. """ for i, data_array in enumerate(data_arrays): # Turn the dictionary of column headings with a list of header # information for each field into a dictionary of headings for # just this field. field_headings = {k: v[i] for k, v in column_headings.iteritems()} # Make a cube. cube = iris.cube.Cube(data_array) # Determine the name and units. name = '{} {}'.format(field_headings['Species'], field_headings['Quantity']) name = name.upper().replace(' ', '_') cube.rename(name) # Some units are not in SI units, are missing spaces or typed # in the wrong case. _parse_units returns units that are # recognised by Iris. cube.units = _parse_units(field_headings['Unit']) # Define and add the singular coordinates of the field (flight # level, time etc.) z_coord = _cf_height_from_name(field_headings['Z']) cube.add_aux_coord(z_coord) # Define the time unit and use it to serialise the datetime for # the time coordinate. time_unit = iris.unit.Unit('hours since epoch', calendar=iris.unit.CALENDAR_GREGORIAN) # Build time, latitude and longitude coordinates. for coord in coords: pts = coord.values coord_sys = None if coord.name == 'latitude' or coord.name == 'longitude': coord_units = 'degrees' coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) if coord.name == 'time': coord_units = time_unit pts = time_unit.date2num(coord.values) if coord.dimension is not None: icoord = DimCoord(points=pts, standard_name=coord.name, units=coord_units, coord_system=coord_sys) if coord.name == 'time' and 'Av or Int period' in \ field_headings: dt = coord.values - \ field_headings['Av or Int period'] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds else: icoord.guess_bounds() cube.add_dim_coord(icoord, coord.dimension) else: icoord = AuxCoord(points=pts[i], standard_name=coord.name, coord_system=coord_sys, units=coord_units) if coord.name == 'time' and 'Av or Int period' in \ field_headings: dt = coord.values - \ field_headings['Av or Int period'] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds[i, :] cube.add_aux_coord(icoord) # Headings/column headings which are encoded elsewhere. headings = [ 'X', 'Y', 'Z', 'Time', 'Unit', 'Av or Int period', 'X grid origin', 'Y grid origin', 'X grid size', 'Y grid size', 'X grid resolution', 'Y grid resolution', ] # Add the Main Headings as attributes. for key, value in header.iteritems(): if value is not None and value != '' and \ key not in headings: cube.attributes[key] = value # Add the Column Headings as attributes for key, value in field_headings.iteritems(): if value is not None and value != '' and \ key not in headings: cube.attributes[key] = value if cell_methods is not None: cube.add_cell_method(cell_methods[i]) yield cube
def _generate_cubes(header, column_headings, coords, data_arrays, cell_methods=None): """ Yield :class:`iris.cube.Cube` instances given the headers, column headings, coords and data_arrays extracted from a NAME file. """ for i, data_array in enumerate(data_arrays): # Turn the dictionary of column headings with a list of header # information for each field into a dictionary of headings for # just this field. field_headings = {k: v[i] for k, v in six.iteritems(column_headings)} # Make a cube. cube = iris.cube.Cube(data_array) # Determine the name and units. name = "{} {}".format(field_headings["Species"], field_headings["Quantity"]) name = name.upper().replace(" ", "_") cube.rename(name) # Some units are not in SI units, are missing spaces or typed # in the wrong case. _parse_units returns units that are # recognised by Iris. cube.units = _parse_units(field_headings["Units"]) # Define and add the singular coordinates of the field (flight # level, time etc.) if "Z" in field_headings: upper_bound, = [field_headings["... to [Z]"] if "... to [Z]" in field_headings else None] lower_bound, = [field_headings["... from [Z]"] if "... from [Z]" in field_headings else None] z_coord = _cf_height_from_name(field_headings["Z"], upper_bound=upper_bound, lower_bound=lower_bound) cube.add_aux_coord(z_coord) # Define the time unit and use it to serialise the datetime for # the time coordinate. time_unit = cf_units.Unit("hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN) # Build time, height, latitude and longitude coordinates. for coord in coords: pts = coord.values coord_sys = None if coord.name == "latitude" or coord.name == "longitude": coord_units = "degrees" coord_sys = iris.coord_systems.GeogCS(EARTH_RADIUS) if coord.name == "projection_x_coordinate" or coord.name == "projection_y_coordinate": coord_units = "m" coord_sys = iris.coord_systems.OSGB() if coord.name == "height": coord_units = "m" long_name = "height above ground level" pts = coord.values if coord.name == "altitude": coord_units = "m" long_name = "altitude above sea level" pts = coord.values if coord.name == "air_pressure": coord_units = "Pa" pts = coord.values if coord.name == "flight_level": pts = coord.values long_name = "flight_level" coord_units = _parse_units("FL") if coord.name == "time": coord_units = time_unit pts = time_unit.date2num(coord.values) if coord.dimension is not None: if coord.name == "longitude": circular = iris.util._is_circular(pts, 360.0) else: circular = False if coord.name == "flight_level": icoord = DimCoord(points=pts, units=coord_units, long_name=long_name) else: icoord = DimCoord( points=pts, standard_name=coord.name, units=coord_units, coord_system=coord_sys, circular=circular, ) if coord.name == "height" or coord.name == "altitude": icoord.long_name = long_name if coord.name == "time" and "Av or Int period" in field_headings: dt = coord.values - field_headings["Av or Int period"] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds else: icoord.guess_bounds() cube.add_dim_coord(icoord, coord.dimension) else: icoord = AuxCoord(points=pts[i], standard_name=coord.name, coord_system=coord_sys, units=coord_units) if coord.name == "time" and "Av or Int period" in field_headings: dt = coord.values - field_headings["Av or Int period"] bnds = time_unit.date2num(np.vstack((dt, coord.values)).T) icoord.bounds = bnds[i, :] cube.add_aux_coord(icoord) # Headings/column headings which are encoded elsewhere. headings = [ "X", "Y", "Z", "Time", "T", "Units", "Av or Int period", "... from [Z]", "... to [Z]", "X grid origin", "Y grid origin", "X grid size", "Y grid size", "X grid resolution", "Y grid resolution", "Number of field cols", "Number of preliminary cols", "Number of fields", "Number of series", "Output format", ] # Add the Main Headings as attributes. for key, value in six.iteritems(header): if value is not None and value != "" and key not in headings: cube.attributes[key] = value # Add the Column Headings as attributes for key, value in six.iteritems(field_headings): if value is not None and value != "" and key not in headings: cube.attributes[key] = value if cell_methods is not None: cube.add_cell_method(cell_methods[i]) yield cube