Exemple #1
0
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        hemisphere = self.getarg('hemisphere', context)
        varname = self.getarg('varname', context)
        self.log_info(
            f"Create {varname} map for {hemisphere}ern hemisphere at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        if varname not in meta_dict:
            msg = (
                f"'varname' must be one of the following: {meta_dict.keys()} "
                f"Diagnostic will not be treated, returning now.")
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        if not hemisphere in ('north', 'south'):
            msg = (
                f"'hemisphere' must be 'north' or 'south' but is '{hemisphere}'."
                f"Diagnostic will not be treated, returning now.")
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.check_file_extension(dst)

        month_cube = helpers.load_input_cube(src, varname)
        # Remove auxiliary time coordinate
        month_cube.remove_coord(month_cube.coord('time', dim_coords=False))
        month_cube = month_cube[0]
        time_coord = month_cube.coord('time')
        time_coord.bounds = self.get_time_bounds(time_coord)
        latitudes = np.broadcast_to(
            month_cube.coord('latitude').points, month_cube.shape)
        if hemisphere == "north":
            month_cube.data = np.ma.masked_where(latitudes < 0,
                                                 month_cube.data)
        else:
            month_cube.data = np.ma.masked_where(latitudes > 0,
                                                 month_cube.data)

        month_cube.long_name = f"{meta_dict[varname]} {hemisphere} {self.get_month(time_coord)}"
        month_cube.data = np.ma.masked_equal(month_cube.data, 0)

        month_cube.data = month_cube.data.astype('float64')
        comment = f"Simulation Average of {meta_dict[varname]} / **{varname}** on {hemisphere}ern hemisphere."
        month_cube = helpers.set_metadata(
            month_cube,
            title=f'{month_cube.long_name} (Climatology)',
            comment=comment,
            map_type='polar ice sheet',
        )
        time_coord.climatological = True
        month_cube = self.set_cell_methods(month_cube, hemisphere)

        self.save(month_cube, dst)
 def check_file_extension(self, dst):
     """check if destination file has a valid netCDF extension"""
     if not dst.endswith(".nc"):
         msg = (
             f"{dst} does not end in valid netCDF file extension. "
             f"Diagnostic will not be treated, returning now."
         )
         self.log_error(msg)
         raise ScriptEngineTaskArgumentInvalidError()
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        self.log_info(
            f"Create map for atmosphere variable {grib_code} at {dst}.")
        src = [path for path in src if not path.endswith('000000')]
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )
        leg_mean.coord('time').climatological = True
        leg_mean.cell_methods = ()
        leg_mean.add_cell_method(
            iris.coords.CellMethod('mean within years',
                                   coords='time',
                                   intervals=f'{step * 3600} seconds'))
        leg_mean.add_cell_method(
            iris.coords.CellMethod('mean over years', coords='time'))
        leg_mean.add_cell_method(
            iris.coords.CellMethod('point', coords=['latitude', 'longitude']))

        leg_mean.long_name = leg_mean.long_name.replace("_", " ")

        leg_mean = helpers.set_metadata(
            leg_mean,
            title=f'{leg_mean.long_name.title()} (Annual Mean Climatology)',
            comment=f"Simulation Average of **{grib_code}**.",
            map_type='global atmosphere',
        )
        if leg_mean.units.name == 'kelvin':
            leg_mean.convert_units('degC')

        self.save(leg_mean, dst)
Exemple #4
0
    def run(self, context):
        path = os.path.normpath(self.getarg('path', context))
        path_depth = path.count(os.path.sep)

        pattern = self.getarg('pattern', context, default='*')

        find_type = self.getarg('type', context, default='file')
        if find_type not in ('file', 'dir'):
            msg = (f'Invalid "type" argument '
                   f'(must be either "file" or "dir"): {find_type}')
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError(msg)

        max_depth = self.getarg('depth', context, default=-1)
        try:
            max_depth = max(-1, int(max_depth))
        except (ValueError, TypeError):
            msg = f'Invalid "depth" argument (not an integer): {max_depth}'
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError(msg)

        self.log_info(f'Find {find_type} with pattern "{pattern}" '
                      f'in {path} (with max depth={max_depth})')
        result = []
        for root, dirs, files in os.walk(path):
            if find_type == 'file':
                files_or_dirs = files
            else:
                files_or_dirs = dirs
            for name in files_or_dirs:
                if fnmatch.fnmatch(name, pattern):
                    result.append(os.path.normpath(os.path.join(root, name)))
            if (max_depth >= 0
                    and root.count(os.path.sep) >= path_depth + max_depth):
                del dirs[:]
        if result:
            self.log_debug(f'Found: {result}')
        else:
            self.log_debug('Nothing found')
        result_key = self.getarg('set', context, default='result')
        self.log_debug(f'Store result under context key "{result_key}"')
        context[result_key] = result
 def save(self, dst, **kwargs):
     """Saves a scalar diagnostic in a YAML file."""
     self.log_debug(f"Saving scalar diagnostic to {dst}")
     filtered_dict = {k: v for k, v in kwargs.items() if v is not None}
     filtered_dict['diagnostic_type'] = 'scalar'
     if dst.endswith(".yml") or dst.endswith(".yaml"):
         with open(dst, 'w') as outfile:
             yaml.dump(filtered_dict, outfile, sort_keys=False)
     else:
         msg = (f"{dst} does not end in valid YAML file extension. "
                f"Diagnostic will not be saved.")
         self.log_error(msg)
         raise ScriptEngineTaskArgumentInvalidError()
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        src = [path for path in src if not path.endswith('000000')]
        self.log_info(
            f"Create time map for atmosphere variable {grib_code} at {dst}.")
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        leg_cube.long_name = leg_cube.long_name.replace("_", " ")

        if leg_cube.units.name == 'kelvin':
            leg_cube.convert_units('degC')

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )
        # Promote time from scalar to dimension coordinate
        leg_mean = iris.util.new_axis(leg_mean, 'time')

        leg_mean = self.set_cell_methods(leg_mean, step)

        leg_mean = helpers.set_metadata(
            leg_mean,
            title=f'{leg_mean.long_name.title()} (Annual Mean Map)',
            comment=f"Leg Mean of **{grib_code}**.",
            map_type="global atmosphere",
        )

        self.save(leg_mean, dst)
Exemple #7
0
    def __init__(self, arguments=None):

        self._identifier = uuid.uuid4()

        if arguments is not None:
            try:
                Task.check_arguments(arguments)
            except ScriptEngineTaskArgumentError as e:
                self.log_error(e)
                raise
            for name, value in arguments.items():
                if hasattr(self, name):
                    self.log_error(f'Invalid task argument (reserved): {name}')
                    raise ScriptEngineTaskArgumentInvalidError(
                        f'Invalid task argument (reserved): {name}')
                setattr(self, name, value)
        self.log_debug(f'Created task: {self}')
Exemple #8
0
 def check_arguments(cls, arguments):
     """Checks arguments against class members '_invalid_arguments' and
     '_required_arguments', if present. Returns None or throws either
     ScriptEngineTaskArgumentInvalidError or
     ScriptEngineTaskArgumentMissingError.
     """
     for name in arguments:
         if name in getattr(cls, '_invalid_arguments', ()):
             logmsg = (f'Invalid argument "{name}" found while '
                       f'trying to create {cls.__name__} task')
             logging.getLogger('se.task').error(logmsg, extra={'id': None})
             raise ScriptEngineTaskArgumentInvalidError(logmsg)
     for name in getattr(cls, '_required_arguments', ()):
         if name not in arguments:
             logmsg = (f'Missing required argument "{name}" while '
                       f'trying to create {cls.__name__} task')
             logging.getLogger('se.task').error(logmsg, extra={'id': None})
             raise ScriptEngineTaskArgumentMissingError(logmsg)
Exemple #9
0
def load_input_cube(src, varname):
    """Load input file(s) into one cube."""
    with warnings.catch_warnings():
        # Suppress psu warning
        warnings.filterwarnings(
            action='ignore',
            message="Ignoring netCDF variable",
            category=UserWarning,
        )
        month_cubes = iris.load(src, varname)
    if len(month_cubes) == 0:
        raise ScriptEngineTaskArgumentInvalidError(
            f"varname {varname} not found in {src}")
    if len(month_cubes) == 1:
        month_cube = remove_unique_attributes(month_cubes[0])
        return month_cube
    equalise_attributes(
        month_cubes)  # 'timeStamp' and 'uuid' would cause ConcatenateError
    leg_cube = month_cubes.concatenate_cube()
    return leg_cube
    def run(self, context):
        src = self.getarg('src', context)
        dst = self.getarg('dst', context)
        grib_code = self.getarg('grib_code', context)
        src = [path for path in src if not path.endswith('000000')]
        self.log_info(
            f"Create time series for atmosphere variable {grib_code} at {dst}."
        )
        self.log_debug(f"Source file(s): {src}")

        self.check_file_extension(dst)

        update_grib_mappings()
        cf_phenomenon = iris_grib.grib_phenom_translation.grib1_phenom_to_cf_info(
            128,  # table
            98,  # institution: ECMWF
            grib_code)
        if not cf_phenomenon:
            msg = f"CF Phenomenon for {grib_code} not found. Update local table?"
            self.log_error(msg)
            raise ScriptEngineTaskArgumentInvalidError()
        self.log_debug(f"Getting variable {cf_phenomenon.standard_name}")
        leg_cube = helpers.load_input_cube(src, cf_phenomenon.standard_name)

        time_coord = leg_cube.coord('time')
        step = time_coord.points[1] - time_coord.points[0]
        time_coord.bounds = np.array([[point - step, point]
                                      for point in time_coord.points])

        self.log_debug("Averaging over the leg.")
        leg_mean = leg_cube.collapsed(
            'time',
            iris.analysis.MEAN,
        )

        area_weights = self.get_area_weights(leg_mean)
        self.log_debug("Averaging over space.")
        with warnings.catch_warnings():
            # Suppress warning about insufficient metadata.
            warnings.filterwarnings(
                'ignore',
                "Collapsing a non-contiguous coordinate.",
                UserWarning,
            )
            spatial_mean = leg_mean.collapsed(
                ['latitude', 'longitude'],
                iris.analysis.MEAN,
                weights=area_weights,
            )

        spatial_mean.cell_methods = ()
        spatial_mean.add_cell_method(
            iris.coords.CellMethod('mean',
                                   coords='time',
                                   intervals=f'{step * 3600} seconds'))
        spatial_mean.add_cell_method(
            iris.coords.CellMethod('mean', coords='area'))

        # Promote time from scalar to dimension coordinate
        spatial_mean = iris.util.new_axis(spatial_mean, 'time')

        spatial_mean.long_name = spatial_mean.long_name.replace("_", " ")

        comment = (f"Global average time series of **{grib_code}**. "
                   f"Each data point represents the (spatial and temporal) "
                   f"average over one leg.")
        spatial_mean = helpers.set_metadata(
            spatial_mean,
            title=f'{spatial_mean.long_name} (Annual Mean)',
            comment=comment,
        )

        if spatial_mean.units.name == 'kelvin':
            spatial_mean.convert_units('degC')
        self.save(spatial_mean, dst)