Exemple #1
0
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        varname = self.getarg('varname', context)
        self.log_info(f"Create map for ocean variable {varname} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        leg_cube = helpers.load_input_cube(src, varname)

        # Remove auxiliary time coordinate before collapsing cube
        leg_cube.remove_coord(leg_cube.coord('time', dim_coords=False))

        time_weights = helpers.compute_time_weights(leg_cube, leg_cube.shape)
        leg_average = leg_cube.collapsed('time',
                                         iris.analysis.MEAN,
                                         weights=time_weights)

        leg_average.coord('time').climatological = True
        leg_average = self.set_cell_methods(leg_average)

        leg_average = helpers.set_metadata(
            leg_average,
            title=f'{leg_average.long_name.title()} (Annual Mean Climatology)',
            comment=f"Simulation Average of **{varname}**.",
            map_type='global ocean',
        )

        self.save(leg_average, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        self.log_info(
            f"Create map for atmosphere variable {grib_code} at {dst}.")
        src = [path for path in src if not path.endswith('000000')]
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )
        leg_mean.coord('time').climatological = True
        leg_mean.cell_methods = ()
        leg_mean.add_cell_method(
            iris.coords.CellMethod('mean within years',
                                   coords='time',
                                   intervals=f'{step * 3600} seconds'))
        leg_mean.add_cell_method(
            iris.coords.CellMethod('mean over years', coords='time'))
        leg_mean.add_cell_method(
            iris.coords.CellMethod('point', coords=['latitude', 'longitude']))

        leg_mean.long_name = leg_mean.long_name.replace("_", " ")

        leg_mean = helpers.set_metadata(
            leg_mean,
            title=f'{leg_mean.long_name.title()} (Annual Mean Climatology)',
            comment=f"Simulation Average of **{grib_code}**.",
            map_type='global atmosphere',
        )
        if leg_mean.units.name == 'kelvin':
            leg_mean.convert_units('degC')

        self.save(leg_mean, dst)
Exemple #3
0
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        hemisphere = self.getarg('hemisphere', context)
        varname = self.getarg('varname', context)
        self.log_info(
            f"Create {varname} map for {hemisphere}ern hemisphere at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        if varname not in meta_dict:
            msg = (
                f"'varname' must be one of the following: {meta_dict.keys()} "
                f"Diagnostic will not be treated, returning now.")
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        if not hemisphere in ('north', 'south'):
            msg = (
                f"'hemisphere' must be 'north' or 'south' but is '{hemisphere}'."
                f"Diagnostic will not be treated, returning now.")
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.check_file_extension(dst)

        month_cube = helpers.load_input_cube(src, varname)
        # Remove auxiliary time coordinate
        month_cube.remove_coord(month_cube.coord('time', dim_coords=False))
        month_cube = month_cube[0]
        time_coord = month_cube.coord('time')
        time_coord.bounds = self.get_time_bounds(time_coord)
        latitudes = np.broadcast_to(
            month_cube.coord('latitude').points, month_cube.shape)
        if hemisphere == "north":
            month_cube.data = np.ma.masked_where(latitudes < 0,
                                                 month_cube.data)
        else:
            month_cube.data = np.ma.masked_where(latitudes > 0,
                                                 month_cube.data)

        month_cube.long_name = f"{meta_dict[varname]} {hemisphere} {self.get_month(time_coord)}"
        month_cube.data = np.ma.masked_equal(month_cube.data, 0)

        month_cube.data = month_cube.data.astype('float64')
        comment = f"Simulation Average of {meta_dict[varname]} / **{varname}** on {hemisphere}ern hemisphere."
        month_cube = helpers.set_metadata(
            month_cube,
            title=f'{month_cube.long_name} (Climatology)',
            comment=comment,
            map_type='polar ice sheet',
        )
        time_coord.climatological = True
        month_cube = self.set_cell_methods(month_cube, hemisphere)

        self.save(month_cube, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        domain = self.getarg('domain', context)
        varname = self.getarg('varname', context)
        comment = (f"Global average time series of **{varname}**. "
                   f"Each data point represents the (spatial and temporal) "
                   f"average over one leg.")
        self.log_info(f"Create time series for ocean variable {varname} at {dst}.")
        self.log_debug(f"Domain: {domain}, Source file(s): {src}")

        self.check_file_extension(dst)

        leg_cube = hlp.load_input_cube(src, varname)

        grid = self.getarg('grid', context, default='T')
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a multi-dimensional coordinate.",
                UserWarning,
                )
            spatial_avg = leg_cube.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.MEAN,
                weights=hlp.compute_spatial_weights(domain, leg_cube.shape, grid=grid),
                )
        # Remove auxiliary time coordinate before collapsing cube
        spatial_avg.remove_coord(spatial_avg.coord('time', dim_coords=False))
        ann_spatial_avg = spatial_avg.collapsed(
            'time',
            iris.analysis.MEAN,
            weights=hlp.compute_time_weights(spatial_avg),
        )
        # Promote time from scalar to dimension coordinate
        ann_spatial_avg = iris.util.new_axis(ann_spatial_avg, 'time')

        ann_spatial_avg = hlp.set_metadata(
            ann_spatial_avg,
            title=f'{ann_spatial_avg.long_name} (Annual Mean)',
            comment=comment,
            )

        ann_spatial_avg.cell_methods = ()
        ann_spatial_avg.add_cell_method(
            iris.coords.CellMethod('mean', coords='time', intervals='1 month')
            )
        ann_spatial_avg.add_cell_method(iris.coords.CellMethod('mean', coords='area'))

        self.save(ann_spatial_avg, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        src = [path for path in src if not path.endswith('000000')]
        self.log_info(
            f"Create time map for atmosphere variable {grib_code} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        leg_cube.long_name = leg_cube.long_name.replace("_", " ")

        if leg_cube.units.name == 'kelvin':
            leg_cube.convert_units('degC')

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )
        # Promote time from scalar to dimension coordinate
        leg_mean = iris.util.new_axis(leg_mean, 'time')

        leg_mean = self.set_cell_methods(leg_mean, step)

        leg_mean = helpers.set_metadata(
            leg_mean,
            title=f'{leg_mean.long_name.title()} (Annual Mean Map)',
            comment=f"Leg Mean of **{grib_code}**.",
            map_type="global atmosphere",
        )

        self.save(leg_mean, dst)
    def run(self, context):
        src = self.getarg("src", context)
        dst = self.getarg("dst", context)
        varname = self.getarg("varname", context)
        self.log_info(f"Create time map for atmosphere variable {varname} at {dst}.")
        self.log_debug(f"Source file: {src}")

        self.check_file_extension(dst)

        oifs_cube = helpers.load_input_cube(src, varname)

        temporalmap_cube = self.compute_time_mean(oifs_cube)

        temporalmap_cube = self.set_cell_methods(temporalmap_cube)
        temporalmap_cube = self.adjust_metadata(temporalmap_cube, varname)
        self.save(temporalmap_cube, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        varname = self.getarg('varname', context)
        self.log_info(
            f"Create temporal map for ocean variable {varname} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        leg_cube = helpers.load_input_cube(src, varname)

        # Remove auxiliary time coordinate before collapsing cube
        leg_cube.remove_coord(leg_cube.coord('time', dim_coords=False))

        processed_cube = self.time_operation(varname, leg_cube)
        self.save(processed_cube, dst)
Exemple #8
0
    def run(self, context):
        src = self.getarg("src", context)
        dst = self.getarg("dst", context)
        varname = self.getarg("varname", context)
        self.log_info(
            f"Create time series for atmosphere variable {varname} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        oifs_cube = helpers.load_input_cube(src, varname)

        time_mean_cube = self.compute_time_mean(oifs_cube)

        area_weights = self.compute_area_weights(time_mean_cube)
        timeseries_cube = self.compute_spatial_mean(time_mean_cube,
                                                    area_weights)

        self.set_cell_methods(timeseries_cube)
        timeseries_cube = self.adjust_metadata(timeseries_cube, varname)
        self.save(timeseries_cube, dst)
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        src = [path for path in src if not path.endswith('000000')]
        self.log_info(
            f"Create time series for atmosphere variable {grib_code} at {dst}."
        )
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        self.log_debug("Averaging over the leg.")
        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )

        area_weights = self.get_area_weights(leg_mean)
        self.log_debug("Averaging over space.")
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a non-contiguous coordinate.",
                UserWarning,
            )
            spatial_mean = leg_mean.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.MEAN,
                weights=area_weights,
            )

        spatial_mean.cell_methods = ()
        spatial_mean.add_cell_method(
            iris.coords.CellMethod('mean',
                                   coords='time',
                                   intervals=f'{step * 3600} seconds'))
        spatial_mean.add_cell_method(
            iris.coords.CellMethod('mean', coords='area'))

        # Promote time from scalar to dimension coordinate
        spatial_mean = iris.util.new_axis(spatial_mean, 'time')

        spatial_mean.long_name = spatial_mean.long_name.replace("_", " ")

        comment = (f"Global average time series of **{grib_code}**. "
                   f"Each data point represents the (spatial and temporal) "
                   f"average over one leg.")
        spatial_mean = helpers.set_metadata(
            spatial_mean,
            title=f'{spatial_mean.long_name} (Annual Mean)',
            comment=comment,
        )

        if spatial_mean.units.name == 'kelvin':
            spatial_mean.convert_units('degC')
        self.save(spatial_mean, dst)
Exemple #10
0
def test_load_input_cube():
    src = "./tests/testdata/tos_nemo_all_mean_map.nc"
    varname = "tos"
    assert isinstance(file_handling.load_input_cube(src, varname), iris.cube.Cube)
    def run(self, context):
        dst = self.getarg('dst', context)
        varname = self.getarg('varname', context)
        hemisphere = self.getarg('hemisphere', context)

        self.log_info(
            f"Create {varname} time series for {hemisphere}ern hemisphere at {dst}."
        )

        if varname not in meta_dict:
            self.log_warning(
                (f"'varname' must be one of the following: {meta_dict.keys()} "
                 f"Diagnostic will not be treated, returning now."))
            return
        long_name = meta_dict[varname]['long_name']

        src = self.getarg('src', context)
        domain = self.getarg('domain', context)

        self.log_debug(f"Domain: {domain}, Source file(s): {src}")

        if not hemisphere in ('north', 'south'):
            self.log_warning((
                f"'hemisphere' must be 'north' or 'south' but is '{hemisphere}'."
                f"Diagnostic will not be treated, returning now."))
            return
        self.check_file_extension(dst)

        leg_cube = helpers.load_input_cube(src, varname)
        cell_weights = helpers.compute_spatial_weights(domain, leg_cube.shape,
                                                       'T')
        latitudes = np.broadcast_to(
            leg_cube.coord('latitude').points, leg_cube.shape)
        if hemisphere == "north":
            leg_cube.data = np.ma.masked_where(latitudes < 0, leg_cube.data)
        else:
            leg_cube.data = np.ma.masked_where(latitudes > 0, leg_cube.data)
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a multi-dimensional coordinate.",
                UserWarning,
            )
            hemispheric_sum = leg_cube.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.SUM,
                weights=cell_weights,
            )

        # Remove auxiliary time coordinate
        hemispheric_sum.remove_coord(
            hemispheric_sum.coord('time', dim_coords=False))
        hemispheric_sum.standard_name = meta_dict[varname]['standard_name']
        hemispheric_sum.units = cf_units.Unit(meta_dict[varname]['old_unit'])
        hemispheric_sum.convert_units(meta_dict[varname]['new_unit'])
        hemispheric_sum.long_name = f"{long_name} {hemisphere.capitalize()}"
        hemispheric_sum.var_name = meta_dict[varname]['var_name'] + hemisphere[
            0]

        metadata = {
            'comment':
            (f"Sum of {long_name} / **{varname}** on {hemisphere}ern hemisphere."
             ),
            'title':
            f"{long_name} (Seasonal Cycle)",
        }
        hemispheric_sum = helpers.set_metadata(hemispheric_sum, **metadata)
        hemispheric_sum = self.set_cell_methods(hemispheric_sum, hemisphere)
        self.save(hemispheric_sum, dst)