예제 #1
0
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        varname = self.getarg('varname', context)
        self.log_info(f"Create map for ocean variable {varname} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        leg_cube = helpers.load_input_cube(src, varname)

        # Remove auxiliary time coordinate before collapsing cube
        leg_cube.remove_coord(leg_cube.coord('time', dim_coords=False))

        time_weights = helpers.compute_time_weights(leg_cube, leg_cube.shape)
        leg_average = leg_cube.collapsed('time',
                                         iris.analysis.MEAN,
                                         weights=time_weights)

        leg_average.coord('time').climatological = True
        leg_average = self.set_cell_methods(leg_average)

        leg_average = helpers.set_metadata(
            leg_average,
            title=f'{leg_average.long_name.title()} (Annual Mean Climatology)',
            comment=f"Simulation Average of **{varname}**.",
            map_type='global ocean',
        )

        self.save(leg_average, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        self.log_info(
            f"Create map for atmosphere variable {grib_code} at {dst}.")
        src = [path for path in src if not path.endswith('000000')]
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )
        leg_mean.coord('time').climatological = True
        leg_mean.cell_methods = ()
        leg_mean.add_cell_method(
            iris.coords.CellMethod('mean within years',
                                   coords='time',
                                   intervals=f'{step * 3600} seconds'))
        leg_mean.add_cell_method(
            iris.coords.CellMethod('mean over years', coords='time'))
        leg_mean.add_cell_method(
            iris.coords.CellMethod('point', coords=['latitude', 'longitude']))

        leg_mean.long_name = leg_mean.long_name.replace("_", " ")

        leg_mean = helpers.set_metadata(
            leg_mean,
            title=f'{leg_mean.long_name.title()} (Annual Mean Climatology)',
            comment=f"Simulation Average of **{grib_code}**.",
            map_type='global atmosphere',
        )
        if leg_mean.units.name == 'kelvin':
            leg_mean.convert_units('degC')

        self.save(leg_mean, dst)
 def time_operation(self, varname, leg_cube):
     self.log_debug("Creating monthly means.")
     leg_cube = helpers.set_metadata(
         leg_cube,
         title=f'{leg_cube.long_name.title()} (Monthly Mean Map)',
         comment=f"Monthly Mean of **{varname}**.",
         map_type='global ocean',
     )
     leg_cube.add_cell_method(
         iris.coords.CellMethod('point', coords=['latitude', 'longitude']))
     return leg_cube
 def adjust_metadata(self, temporalmap_cube, varname: str):
     """Do further adjustments to the cube metadata before saving."""
     temporalmap_cube = helpers.set_metadata(
         temporalmap_cube,
         title=f"{temporalmap_cube.long_name.title()} (Annual Mean Map)",
         comment=f"Annual Mean of **{varname}**.",
         map_type="global atmosphere",
     )
     # Convert unit to °C if varname is given in K
     if temporalmap_cube.units.name == "kelvin":
         temporalmap_cube.convert_units("degC")
     return temporalmap_cube
예제 #5
0
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        hemisphere = self.getarg('hemisphere', context)
        varname = self.getarg('varname', context)
        self.log_info(
            f"Create {varname} map for {hemisphere}ern hemisphere at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        if varname not in meta_dict:
            msg = (
                f"'varname' must be one of the following: {meta_dict.keys()} "
                f"Diagnostic will not be treated, returning now.")
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        if not hemisphere in ('north', 'south'):
            msg = (
                f"'hemisphere' must be 'north' or 'south' but is '{hemisphere}'."
                f"Diagnostic will not be treated, returning now.")
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.check_file_extension(dst)

        month_cube = helpers.load_input_cube(src, varname)
        # Remove auxiliary time coordinate
        month_cube.remove_coord(month_cube.coord('time', dim_coords=False))
        month_cube = month_cube[0]
        time_coord = month_cube.coord('time')
        time_coord.bounds = self.get_time_bounds(time_coord)
        latitudes = np.broadcast_to(
            month_cube.coord('latitude').points, month_cube.shape)
        if hemisphere == "north":
            month_cube.data = np.ma.masked_where(latitudes < 0,
                                                 month_cube.data)
        else:
            month_cube.data = np.ma.masked_where(latitudes > 0,
                                                 month_cube.data)

        month_cube.long_name = f"{meta_dict[varname]} {hemisphere} {self.get_month(time_coord)}"
        month_cube.data = np.ma.masked_equal(month_cube.data, 0)

        month_cube.data = month_cube.data.astype('float64')
        comment = f"Simulation Average of {meta_dict[varname]} / **{varname}** on {hemisphere}ern hemisphere."
        month_cube = helpers.set_metadata(
            month_cube,
            title=f'{month_cube.long_name} (Climatology)',
            comment=comment,
            map_type='polar ice sheet',
        )
        time_coord.climatological = True
        month_cube = self.set_cell_methods(month_cube, hemisphere)

        self.save(month_cube, dst)
    def run(self, context):
        # load input parameters
        title = self.getarg('title', context)
        dst = self.getarg('dst', context)
        self.log_info(f"Time series {title} at {dst}.")

        # Convert coordinate value to number and get unit
        coord_value = self.getarg("coord_value", context, default=None)
        if type(coord_value) in (datetime.datetime, datetime.date):
            try:
                coord_value = (coord_value - datetime.datetime(1900, 1, 1)).total_seconds()
            except TypeError:
                coord_value = (coord_value - datetime.date(1900, 1, 1)).total_seconds()
            coord_unit = "second since 1900-01-01 00:00:00"
        else:
            coord_unit = self.getarg("coord_unit", context, default="1")
        
        data_value = self.getarg("data_value", context)
        data_unit = self.getarg("data_unit", context, default="1")

        coord_name = self.getarg('coord_name', context, default='time')
        data_name = self.getarg('data_name', context, default=title)
        comment = self.getarg('comment', context, default=".")
        
        self.log_debug(f"Value: {data_value} at time: {coord_value}, title: {title}")

        self.check_file_extension(dst)

        # create coord
        coord = iris.coords.DimCoord(
            points=np.array([coord_value]),
            long_name=coord_name,
            var_name=coord_name.replace(" ", "_"),
            units=coord_unit,
        )

        # create cube
        data_cube = iris.cube.Cube(
            data=np.array([data_value]),
            long_name=data_name,
            var_name=data_name.replace(" ", "_"),
            units=data_unit,
            dim_coords_and_dims=[(coord, 0)],
        )

        # set metadata
        data_cube = helpers.set_metadata(
            data_cube,
            title=title,
            comment=comment,
        )
        self.save(data_cube, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        domain = self.getarg('domain', context)
        varname = self.getarg('varname', context)
        comment = (f"Global average time series of **{varname}**. "
                   f"Each data point represents the (spatial and temporal) "
                   f"average over one leg.")
        self.log_info(f"Create time series for ocean variable {varname} at {dst}.")
        self.log_debug(f"Domain: {domain}, Source file(s): {src}")

        self.check_file_extension(dst)

        leg_cube = hlp.load_input_cube(src, varname)

        grid = self.getarg('grid', context, default='T')
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a multi-dimensional coordinate.",
                UserWarning,
                )
            spatial_avg = leg_cube.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.MEAN,
                weights=hlp.compute_spatial_weights(domain, leg_cube.shape, grid=grid),
                )
        # Remove auxiliary time coordinate before collapsing cube
        spatial_avg.remove_coord(spatial_avg.coord('time', dim_coords=False))
        ann_spatial_avg = spatial_avg.collapsed(
            'time',
            iris.analysis.MEAN,
            weights=hlp.compute_time_weights(spatial_avg),
        )
        # Promote time from scalar to dimension coordinate
        ann_spatial_avg = iris.util.new_axis(ann_spatial_avg, 'time')

        ann_spatial_avg = hlp.set_metadata(
            ann_spatial_avg,
            title=f'{ann_spatial_avg.long_name} (Annual Mean)',
            comment=comment,
            )

        ann_spatial_avg.cell_methods = ()
        ann_spatial_avg.add_cell_method(
            iris.coords.CellMethod('mean', coords='time', intervals='1 month')
            )
        ann_spatial_avg.add_cell_method(iris.coords.CellMethod('mean', coords='area'))

        self.save(ann_spatial_avg, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        src = [path for path in src if not path.endswith('000000')]
        self.log_info(
            f"Create time map for atmosphere variable {grib_code} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        leg_cube.long_name = leg_cube.long_name.replace("_", " ")

        if leg_cube.units.name == 'kelvin':
            leg_cube.convert_units('degC')

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )
        # Promote time from scalar to dimension coordinate
        leg_mean = iris.util.new_axis(leg_mean, 'time')

        leg_mean = self.set_cell_methods(leg_mean, step)

        leg_mean = helpers.set_metadata(
            leg_mean,
            title=f'{leg_mean.long_name.title()} (Annual Mean Map)',
            comment=f"Leg Mean of **{grib_code}**.",
            map_type="global atmosphere",
        )

        self.save(leg_mean, dst)
예제 #9
0
 def adjust_metadata(self, timeseries_cube, varname: str):
     """Do further adjustments to the cube metadata before saving."""
     # Add File Metadata
     comment = (f"Global average time series of **{varname}**. "
                f"Each data point represents the (spatial and temporal) "
                f"average over one year.")
     timeseries_cube = helpers.set_metadata(
         timeseries_cube,
         title=f"{timeseries_cube.long_name} (Annual Mean)",
         comment=comment,
     )
     # Convert unit to °C if varname is given in K
     if timeseries_cube.units.name == "kelvin":
         timeseries_cube.convert_units("degC")
     return timeseries_cube
예제 #10
0
def test_set_metadata():
    cube = iris.cube.Cube([1])
    cube.attributes = {
        'description': None,
        'interval_operation': None,
        'interval_write': None,
        'name': None,
        'online_operation': None,
    }
    updated_cube = file_handling.set_metadata(cube)
    assert updated_cube.attributes == {
        'source': 'EC-Earth 4',
        'Conventions': 'CF-1.8'
        }
    new_metadata = {
        'title': 'Title',
        'comment': 'Comment',
        'diagnostic_type': 'Type',
        'source': 'EC-Earth 4',
        'Conventions': 'CF-1.8',
        'custom': 'Custom',
    }
    updated_cube = file_handling.set_metadata(updated_cube, **new_metadata)
    assert updated_cube.attributes == new_metadata
 def adjust_metadata(self, map_cube, varname: str):
     """Do further adjustments to the cube metadata before saving."""
     # Prevent float32/float64 concatenation errors
     map_cube.data = map_cube.data.astype("float64")
     # Add File Metadata
     map_cube = helpers.set_metadata(
         map_cube,
         title=f"{map_cube.long_name.title()} (Annual Mean Climatology)",
         comment=f"Simulation Average of **{varname}**.",
         map_type="global atmosphere",
     )
     # Convert unit to °C if varname is given in K
     if map_cube.units.name == "kelvin":
         map_cube.convert_units("degC")
     return map_cube
 def time_operation(self, varname, leg_cube):
     self.log_debug("Creating an annual mean.")
     month_weights = helpers.compute_time_weights(leg_cube, leg_cube.shape)
     leg_average = leg_cube.collapsed('time',
                                      iris.analysis.MEAN,
                                      weights=month_weights)
     # Promote time from scalar to dimension coordinate
     leg_average = iris.util.new_axis(leg_average, 'time')
     leg_average = helpers.set_metadata(
         leg_average,
         title=f'{leg_average.long_name.title()} (Annual Mean Map)',
         comment=f"Leg Mean of **{varname}**.",
         map_type="global ocean",
     )
     leg_average.cell_methods = ()
     leg_average.add_cell_method(
         iris.coords.CellMethod('mean', coords='time', intervals='1 month'))
     leg_average.add_cell_method(
         iris.coords.CellMethod('point', coords=['latitude', 'longitude']))
     return leg_average
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        src = [path for path in src if not path.endswith('000000')]
        self.log_info(
            f"Create time series for atmosphere variable {grib_code} at {dst}."
        )
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        self.log_debug("Averaging over the leg.")
        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )

        area_weights = self.get_area_weights(leg_mean)
        self.log_debug("Averaging over space.")
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a non-contiguous coordinate.",
                UserWarning,
            )
            spatial_mean = leg_mean.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.MEAN,
                weights=area_weights,
            )

        spatial_mean.cell_methods = ()
        spatial_mean.add_cell_method(
            iris.coords.CellMethod('mean',
                                   coords='time',
                                   intervals=f'{step * 3600} seconds'))
        spatial_mean.add_cell_method(
            iris.coords.CellMethod('mean', coords='area'))

        # Promote time from scalar to dimension coordinate
        spatial_mean = iris.util.new_axis(spatial_mean, 'time')

        spatial_mean.long_name = spatial_mean.long_name.replace("_", " ")

        comment = (f"Global average time series of **{grib_code}**. "
                   f"Each data point represents the (spatial and temporal) "
                   f"average over one leg.")
        spatial_mean = helpers.set_metadata(
            spatial_mean,
            title=f'{spatial_mean.long_name} (Annual Mean)',
            comment=comment,
        )

        if spatial_mean.units.name == 'kelvin':
            spatial_mean.convert_units('degC')
        self.save(spatial_mean, dst)
    def run(self, context):
        dst = self.getarg('dst', context)
        varname = self.getarg('varname', context)
        hemisphere = self.getarg('hemisphere', context)

        self.log_info(
            f"Create {varname} time series for {hemisphere}ern hemisphere at {dst}."
        )

        if varname not in meta_dict:
            self.log_warning(
                (f"'varname' must be one of the following: {meta_dict.keys()} "
                 f"Diagnostic will not be treated, returning now."))
            return
        long_name = meta_dict[varname]['long_name']

        src = self.getarg('src', context)
        domain = self.getarg('domain', context)

        self.log_debug(f"Domain: {domain}, Source file(s): {src}")

        if not hemisphere in ('north', 'south'):
            self.log_warning((
                f"'hemisphere' must be 'north' or 'south' but is '{hemisphere}'."
                f"Diagnostic will not be treated, returning now."))
            return
        self.check_file_extension(dst)

        leg_cube = helpers.load_input_cube(src, varname)
        cell_weights = helpers.compute_spatial_weights(domain, leg_cube.shape,
                                                       'T')
        latitudes = np.broadcast_to(
            leg_cube.coord('latitude').points, leg_cube.shape)
        if hemisphere == "north":
            leg_cube.data = np.ma.masked_where(latitudes < 0, leg_cube.data)
        else:
            leg_cube.data = np.ma.masked_where(latitudes > 0, leg_cube.data)
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a multi-dimensional coordinate.",
                UserWarning,
            )
            hemispheric_sum = leg_cube.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.SUM,
                weights=cell_weights,
            )

        # Remove auxiliary time coordinate
        hemispheric_sum.remove_coord(
            hemispheric_sum.coord('time', dim_coords=False))
        hemispheric_sum.standard_name = meta_dict[varname]['standard_name']
        hemispheric_sum.units = cf_units.Unit(meta_dict[varname]['old_unit'])
        hemispheric_sum.convert_units(meta_dict[varname]['new_unit'])
        hemispheric_sum.long_name = f"{long_name} {hemisphere.capitalize()}"
        hemispheric_sum.var_name = meta_dict[varname]['var_name'] + hemisphere[
            0]

        metadata = {
            'comment':
            (f"Sum of {long_name} / **{varname}** on {hemisphere}ern hemisphere."
             ),
            'title':
            f"{long_name} (Seasonal Cycle)",
        }
        hemispheric_sum = helpers.set_metadata(hemispheric_sum, **metadata)
        hemispheric_sum = self.set_cell_methods(hemispheric_sum, hemisphere)
        self.save(hemispheric_sum, dst)