def validate(self): super(Recipe, self).validate() if "time_crs" not in self.options or self.options['time_crs'] == "": raise RecipeValidationException("No valid time crs provided") if 'time_start' not in self.options: raise RecipeValidationException( "No valid time start parameter provided") if 'time_step' not in self.options: raise RecipeValidationException( "You have to provide a valid time step indicating both the value and the unit of time" ) if 'tiling' not in self.options: self.options['tiling'] = None if 'band_names' not in self.options: self.options['band_names'] = None if 'wms_import' not in self.options: self.options['wms_import'] = False else: self.options['wms_import'] = bool(self.options['wms_import']) if 'scale_levels' not in self.options: self.options['scale_levels'] = None
def validate(self): super(Recipe, self).validate() if "time_crs" not in self.options or self.options['time_crs'] == "": raise RecipeValidationException("No valid time crs provided") if 'time_tag' not in self.options: raise RecipeValidationException("No valid time tag parameter provided") if 'time_format' not in self.options: raise RecipeValidationException("You have to provide a valid time format")
def validate(self): super(Recipe, self).validate() if len(self.resolutions) == 0: raise RecipeValidationException("No resolutions to import provided.") for res in self.resolutions: if res not in self.SUBDATASETS: raise RecipeValidationException("Invalid resolution '" + str(res) + "' provided, expected a subset of " + str(self.SUBDATASETS)) for lvl in self.levels: if lvl not in self.LEVELS: raise RecipeValidationException("Invalid level '" + str(lvl) + "' provided, expected a subset of " + str(self.LEVELS))
def validate_base(self, ignore_no_files=False): """ Validates the configuration and the input files :param bool ignore_no_files: if the extending recipe does not work with files, set this to true to skip the validation check for no files (used in wcs_extract recipe). """ if self.session.get_wcs_service( ) is None or self.session.get_wcs_service() == "": raise RecipeValidationException("No valid wcs endpoint provided") if self.session.get_crs_resolver( ) is None or self.session.get_crs_resolver() == "": raise RecipeValidationException("No valid crs resolver provided") if self.session.get_coverage_id( ) is None or self.session.get_coverage_id() == "": raise RecipeValidationException("No valid coverage id provided") if not FileUtil.check_dir_writable(ConfigManager.tmp_directory): raise RecipeValidationException( "Cannot write to tmp directory '{}'".format( ConfigManager.tmp_directory)) checked_files = [] for file in self.session.get_files(): if FileUtil.validate_file_path(file.get_filepath()): checked_files.append(file) if not ignore_no_files: # If no input file is available, exit wcst_import. FileUtil.validate_input_file_paths(checked_files) self.session.files = checked_files if 'wms_import' not in self.options: self.options['wms_import'] = False else: self.options['wms_import'] = bool(self.options['wms_import']) if 'tiling' not in self.options: self.options['tiling'] = None if 'scale_levels' not in self.options: self.options['scale_levels'] = None if "import_order" in self.options: if self.options['import_order'] != AbstractToCoverageConverter.IMPORT_ORDER_ASCENDING \ and self.options['import_order'] != AbstractToCoverageConverter.IMPORT_ORDER_DESCENDING: error_message = "'import_order' option must be '{}' or '{}', given '{}'.".\ format(AbstractToCoverageConverter.IMPORT_ORDER_ASCENDING, AbstractToCoverageConverter.IMPORT_ORDER_DESCENDING, self.options['import_order']) raise RecipeValidationException(error_message) else: self.options['import_order'] = None
def __init__(self, identifier, name, description, definition, nilReason="", nilValues=None, uomCode=None): """ Definition of a band as provided by a user in an ingredient file :param str identifier: the identifier of this band in the data provider (e.g. the gdal band id or the netcdf variable name) :param str name: the name of the band :param str description: a description for the band :param str definition: the definition of a band :param str nilReason: the reason for which the value is a nil :param list[str] | None nilValues: a list of nil values :param str uomCode: the unit of measure """ # NOTE: band identifier must be defined in ingredient file self.identifier = identifier if identifier is None or identifier == "": raise RecipeValidationException( "Band identifier of band name {} has not been specified.". format(name)) # band name is the name which is shown in DescribeCoverage bands (so normally band name = band identifier) self.name = name self.description = description self.definition = definition self.nilReason = nilReason self.nilValues = nilValues self.uomCode = uomCode
def run_recipe(self, session): """ Recipe session :param Session session: the session of the import :rtype BaseRecipe """ if session.get_recipe()['name'] not in self.registry: raise RecipeValidationException("Recipe '" + session.get_recipe()['name'] + "' not found; " "if it's a custom recipe, please put it in the " "'$RMANHOME/share/rasdaman/wcst_import/recipes_custom' folder.") else: recipe = self.registry[session.get_recipe()['name']](session) log.title("Initialization") log.info("Collected files: " + str(map(lambda f: str(f), session.get_files()[:10])) + "...") log.title("\nValidation") recipe.validate() recipe.describe() if not session.is_automated(): raw_input("Press Enter to Continue...: ") t = Thread(target=run_status, args=(recipe,)) t.daemon = True log.title("\nRunning") t.start() recipe.run() t.join() log.success("Recipe executed successfully")
def __raise_exception(self, missing_option): """ Raise a validation error when an required option is missing from ingredient files :param str missing_option: """ raise RecipeValidationException( "{} option is required".format(missing_option))
def _generate_timeseries_tuples(self, limit=None): """ Generate the timeseries tuples from the original files based on the recipe. And sort the files in order of time. :rtype: list[TimeFileTuple] """ ret = [] if limit is None: limit = len(self.session.get_files()) time_format = None if 'datetime_format' in self.options['time_parameter']: time_format = self.options['time_parameter']['datetime_format'] if 'metadata_tag' in self.options['time_parameter']: mtag = self.options['time_parameter']['metadata_tag']['tag_name'] for tfile in self.session.get_files(): if len(ret) == limit: break valid_file = True try: gdal_file = GDALGmlUtil(tfile.get_filepath()) except Exception as ex: FileUtil.ignore_coverage_slice_from_file_if_possible( tfile.get_filepath(), ex) valid_file = False if valid_file: dtutil = DateTimeUtil(gdal_file.get_datetime(mtag), time_format, self.options['time_crs']) ret.append(TimeFileTuple(dtutil, tfile)) elif 'filename' in self.options['time_parameter'] and len(ret) < limit: regex = self.options['time_parameter']['filename']['regex'] group = int(self.options['time_parameter']['filename']['group']) for tfile in self.session.get_files(): if len(ret) == limit: break dtutil = DateTimeUtil( re.search(regex, tfile.filepath).group(group), time_format, self.options['time_crs']) ret.append(TimeFileTuple(dtutil, tfile)) else: raise RecipeValidationException( "No method to get the time parameter, you should either choose " "metadata_tag or filename.") # Currently, only sort by datetime to import coverage slices (default is ascending), option: to sort descending if self.options[ "import_order"] == AbstractToCoverageConverter.IMPORT_ORDER_DESCENDING: return sorted(ret, reverse=True) return sorted(ret)
def validate_pyramid_bases(self): """ Check if pyramid bases coverage ids exist """ if 'pyramid_bases' in self.options: for pyramid_base_coverage_id in self.options['pyramid_bases']: cov = CoverageUtil(pyramid_base_coverage_id) if not cov.exists(): error_message = "Pyramid base coverage '" + pyramid_base_coverage_id \ + "' does not exist locally'" raise RecipeValidationException(error_message)
def _init_slicer_options(self, covopts): sliceropts = covopts['slicer'] self._set_option(sliceropts, 'type', 'gdal') self._set_option(sliceropts, 'pixelIsPoint', False) axesopts = self._init_axes_options() if 'axes' in sliceropts: for axis in sliceropts['axes']: if axis not in axesopts: raise RecipeValidationException("Invalid axis '" + axis + "', expected one of ansi/E/N.") for k in sliceropts['axes'][axis]: axesopts[axis][k] = sliceropts['axes'][axis][k] sliceropts['axes'] = axesopts
def validate(self): super(Recipe, self).validate() if "time_crs" not in self.options or self.options['time_crs'] == "": raise RecipeValidationException("No valid time crs provided") if 'time_parameter' not in self.options: raise RecipeValidationException("No valid time parameter provided") if 'metadata_tag' not in self.options['time_parameter'] and 'filename' not in self.options['time_parameter']: raise RecipeValidationException( "You have to provide either a metadata_tag or a filename pattern for the time parameter") if 'datetime_format' not in self.options['time_parameter']: raise RecipeValidationException("No valid datetime_format provided") if 'metadata_tag' in self.options['time_parameter'] and \ "tag_name" not in self.options['time_parameter']['metadata_tag']: raise RecipeValidationException("No metadata tag to extract time from gdal was provided") if 'filename' in self.options['time_parameter'] \ and self.options['time_parameter']['filename']['regex'] == "" \ and self.options['time_parameter']['filename']['group'] == "": raise RecipeValidationException("No filename regex and group to extract time from gdal was provided") if 'band_names' not in self.options: self.options['band_names'] = None
def validate_base(self, ignore_no_files=False): """ Validates the configuration and the input files :param bool ignore_no_files: if the extending recipe does not work with files, set this to true to skip the validation check for no files """ if self.session.get_wcs_service( ) is None or self.session.get_wcs_service() == "": raise RecipeValidationException("No valid wcs endpoint provided") if self.session.get_crs_resolver( ) is None or self.session.get_crs_resolver() == "": raise RecipeValidationException("No valid crs resolver provided") if self.session.get_coverage_id( ) is None or self.session.get_coverage_id() == "": raise RecipeValidationException("No valid coverage id provided") if ConfigManager.tmp_directory is None or (not os.access( ConfigManager.tmp_directory, os.W_OK)): raise RecipeValidationException("No valid tmp directory provided") if len(self.session.get_files()) == 0 and not ignore_no_files: raise RecipeValidationException( "No files provided. Check that the paths you provided are correct." ) for file in self.session.get_files(): if not os.access(file.get_filepath(), os.R_OK): raise RecipeValidationException("File on path " + file.get_filepath() + " is not accessible")
def _read_axes(self, crs): """ Returns a list of user axes extracted from the ingredients file :param str crs: the crs of the coverage :rtype: list[UserAxis] """ axes = self.options['coverage']['slicer']['axes'] user_axes = [] crs_axes = CRSUtil(crs).get_axes() default_order = 0 for crs_axis in crs_axes: if crs_axis.label not in axes: raise RecipeValidationException( "Could not find a definition for axis " + crs_axis.label + " in the axes parameter.") axis = axes[crs_axis.label] max = axis["max"] if "max" in axis else None if "type" in axis: type = axis["type"] elif crs_axis.is_date(): type = UserAxisType.DATE else: type = UserAxisType.NUMBER order = axis["gridOrder"] if "gridOrder" in axis else default_order irregular = axis["irregular"] if "irregular" in axis else False data_bound = axis["dataBound"] if "dataBound" in axis else True # for irregular axes we consider the resolution 1 / -1 as gmlcov requires resolution for all axis types, # even irregular if "resolution" in axis: resolution = axis["resolution"] else: resolution = 1 default_order += 1 if "statements" in axis: if isinstance(axis["statements"], list): statements = axis["statements"] else: statements = [axis["statements"]] else: statements = [] if not irregular: user_axes.append( RegularUserAxis(crs_axis.label, resolution, order, axis["min"], max, type, data_bound, statements=statements)) else: user_axes.append( IrregularUserAxis(crs_axis.label, resolution, order, axis["min"], axis["directPositions"], max, type, data_bound, statements=statements)) return user_axes
def __add_color_palette_table_to_global_metadata(self, metadata_dict, file_path): """ If colorPaletteTable is added in ingredient file, then add it to coverage's global metadata """ supported_recipe = (self.options['coverage']['slicer']['type'] == "gdal") color_palette_table = None if "metadata" in self.options['coverage']: if "colorPaletteTable" in self.options['coverage']['metadata']: value = self.options['coverage']['metadata']['colorPaletteTable'] if value.strip() != "": if value == "auto" and not supported_recipe: raise RecipeValidationException("colorPaletteTable auto is only supported" " in general recipe with slicer's type: gdal.") elif value == "auto": # Get colorPaletteTable automatically from first file gdal_dataset = GDALGmlUtil(file_path) color_palette_table = gdal_dataset.get_color_table() else: # file_path can be relative path or full path file_paths = FileUtil.get_file_paths_by_regex(self.session.get_ingredients_dir_path(), value) if len(file_paths) == 0: raise RecipeValidationException( "Color palette table file does not exist, given: '" + value + "'.") else: file_path = file_paths[0] # Add the content of colorPaletteTable to coverage's metadata with open(file_path, 'r') as file_reader: color_palette_table = file_reader.read() elif supported_recipe: # If colorPaletteTable is not mentioned in the ingredient, automatically fetch it gdal_dataset = GDALGmlUtil(file_path) color_palette_table = gdal_dataset.get_color_table() if color_palette_table is not None: metadata_dict["colorPaletteTable"] = color_palette_table
def run_recipe(self, session): """ Recipe session :param Session session: the session of the import :rtype BaseRecipe """ recipe_name = session.get_recipe()['name'] if recipe_name not in self.registry: raise RecipeValidationException( "Recipe '" + session.get_recipe()['name'] + "' not found; " "if it's a custom recipe, please put it in the " "'$RMANHOME/share/rasdaman/wcst_import/recipes_custom' folder." ) else: recipe = self.registry[session.get_recipe()['name']](session) log.title("Initialization") if recipe_name != virtual_coverage_recipe.RECIPE_NAME: number_of_files = len(session.get_files()) if number_of_files > 10: number_of_files = 10 log.info("Collected first " + str(number_of_files) + " files: " + str([str(f) for f in session.get_files()[:10]]) + "...") log.title("\nValidation") recipe.validate() # Show what recipe and coverage are imported only once super(recipe.__class__, recipe).describe() if session.blocking is True: # Default blocking import mode (analyze all files -> import) self.__run_recipe(session, recipe) else: # Non blocking import mode (analyze 1 file -> import then next file) files = list(session.get_files()) for file in files: session.files = [file] self.__run_recipe(session, recipe) log.success("Recipe executed successfully")
def validate_base(self, ignore_no_files=False): """ Validates the configuration and the input files :param bool ignore_no_files: if the extending recipe does not work with files, set this to true to skip the validation check for no files (used in wcs_extract recipe). """ if self.session.get_wcs_service( ) is None or self.session.get_wcs_service() == "": raise RecipeValidationException("No valid wcs endpoint provided") if self.session.get_crs_resolver( ) is None or self.session.get_crs_resolver() == "": raise RecipeValidationException("No valid crs resolver provided") if self.session.get_coverage_id( ) is None or self.session.get_coverage_id() == "": raise RecipeValidationException("No valid coverage id provided") import recipes.virtual_coverage.recipe as super_coverage if self.session.get_recipe_name() == super_coverage.Recipe.RECIPE_NAME: # NOTE: virtual_coverage recipe does not require any input files return if not FileUtil.check_dir_writable(ConfigManager.tmp_directory): raise RecipeValidationException( "Cannot write to tmp directory '{}'".format( ConfigManager.tmp_directory)) checked_files = [] for file in self.session.get_files(): if FileUtil.validate_file_path(file.get_filepath()): checked_files.append(file) if not ignore_no_files: # If no input file is available, exit wcst_import. FileUtil.validate_input_file_paths(checked_files) self.session.files = checked_files if 'wms_import' not in self.options: self.options['wms_import'] = False else: self.options['wms_import'] = bool(self.options['wms_import']) if 'tiling' not in self.options: self.options['tiling'] = None if 'scale_levels' not in self.options: self.options['scale_levels'] = None if 'scale_factors' not in self.options: self.options['scale_factors'] = None if self.options['scale_levels'] is not None \ and self.options['scale_factors'] is not None: raise RecipeValidationException( "Only one of 'scale_levels' or 'scale_factors' " "setting can exist in the ingredients file.") if self.options['scale_factors'] is not None: # as scale_factors and scale_levels are only valid when initializing a new coverage cov = CoverageUtil(self.session.get_coverage_id()) if not cov.exists(): for obj in self.options['scale_factors']: if 'coverage_id' not in obj or 'factors' not in obj: raise RecipeValidationException( "All elements of 'scale_factors' list must contain " "'coverage_id' and 'factors' properties") coverage_id = obj['coverage_id'] cov = CoverageUtil(coverage_id) if cov.exists(): raise RecipeValidationException( "Downscaled level coverage '" + coverage_id + "' already exists, " "please use a different 'coverage_id' in 'scale_factors' list" ) self.validate_pyramid_members() self.validate_pyramid_bases() if "import_order" in self.options: if self.options['import_order'] != AbstractToCoverageConverter.IMPORT_ORDER_ASCENDING \ and self.options['import_order'] != AbstractToCoverageConverter.IMPORT_ORDER_DESCENDING: error_message = "'import_order' option must be '{}' or '{}', given '{}'.".\ format(AbstractToCoverageConverter.IMPORT_ORDER_ASCENDING, AbstractToCoverageConverter.IMPORT_ORDER_DESCENDING, self.options['import_order']) raise RecipeValidationException(error_message) else: self.options['import_order'] = None
def validate(self): """ Implementation of the base recipe validate method """ super(Recipe, self).validate() if 'coverage' not in self.options: raise RecipeValidationException("No coverage parameter supplied in the recipe parameters.") else: # NOTE: only general coverage support this grid coverage type if 'grid_coverage' not in self.options['coverage']: self.options['coverage']['grid_coverage'] = False else: self.options['coverage']['grid_coverage'] = bool(self.options['coverage']['grid_coverage']) if 'crs' not in self.options['coverage']: raise RecipeValidationException("No crs parameter in the coverage parameter of the recipe parameters.") if 'slicer' not in self.options['coverage']: raise RecipeValidationException("No slicer parameter in the coverage parameter of the recipe parameters") if 'type' not in self.options['coverage']['slicer']: raise RecipeValidationException("No type parameter in the slicer parameter of the recipe parameters") if 'bands' not in self.options['coverage']['slicer'] \ and (self.options['coverage']['slicer']['type'] == GRIBToCoverageConverter.RECIPE_TYPE \ or self.options['coverage']['slicer']['type'] == NetcdfToCoverageConverter.RECIPE_TYPE): raise RecipeValidationException( "The netcdf/grib slicer requires the existence of a band parameter inside the slicer parameter.") if 'axes' not in self.options['coverage']['slicer']: raise RecipeValidationException("No axes parameter in the slicer parameter of the recipe parameters") for name, axis in self.options['coverage']['slicer']['axes'].items(): if "min" not in axis: raise RecipeValidationException("No min value given for axis " + name) if "type" in axis and axis["type"] == "ansidate": """backwards compatibility, support axis type 'ansidate' after moving to 'date'""" axis["type"] = UserAxisType.DATE if "type" in axis and not UserAxisType.valid_type(axis["type"]): raise RecipeValidationException("Invalid axis type \"" + axis[ "type"] + "\" for axis " + name + ". Only \"" + UserAxisType.DATE + "\" and \"" + UserAxisType.NUMBER + "\" are supported.") if "resolution" not in axis and "irregular" in axis and not axis["irregular"]: raise RecipeValidationException("No resolution value given for regular axis " + name) if "directPositions" not in axis and "irregular" in axis and axis["irregular"]: log.warning("No direct positions found for irregular axis, assuming slice.") # NOTE: if directPositions was not specified, it means the file does not contains the irregular axis # so the irregular axis must be fetched from file name and considered as slice with coefficient is [0] # However, [0] could be miscalculated with arrow so set it to [None] and return [0] later axis["directPositions"] = AbstractToCoverageConverter.DIRECT_POSITIONS_SLICING if "metadata" in self.options['coverage'] and "type" not in self.options['coverage']['metadata']: raise RecipeValidationException("No type given for the metadata parameter.") if "metadata" in self.options['coverage'] and "type" in self.options['coverage']['metadata']: if not ExtraMetadataSerializerFactory.is_encoding_type_valid(self.options['coverage']['metadata']['type']): raise RecipeValidationException( "No valid type given for the metadata parameter, accepted values are xml and json") if "metadata" in self.options['coverage']: supported_recipe = (self.options['coverage']['slicer']['type'] == "netcdf" or self.options['coverage']['slicer']['type'] == "gdal") if not supported_recipe: # global metadata auto is supported for netCDF/GDAL recipe if "global" in self.options['coverage']['metadata']: # NOTE: if global is not specified in netCDF ingredient file, it is considered auto # which means extract all the global attributes of netcdf file to create global metadata if self.options['coverage']['metadata']['global'] == "auto": raise RecipeValidationException( "Global auto metadata only supported in general recipe with slicer's type: netcdf/gdal.") # bands metadata auto is supported for netCDF recipe if "bands" in self.options['coverage']['metadata']: bands_metadata = self.options['coverage']['metadata']['bands'] if bands_metadata == "auto": raise RecipeValidationException( "Bands auto metadata only supported in general recipe with slicer's type: netcdf.") elif type(bands_metadata) is dict: # Check if one band of bands specified with "auto" for key, value in bands_metadata.items(): if value == "auto": raise RecipeValidationException( "Band auto metadata only supported in general recipe with slicer's type: netcdf, " "violated for band '" + key + "'.")
def _read_axes(self, crs): """ Returns a list of user axes extracted from the ingredients file :param str crs: the crs of the coverage :rtype: list[UserAxis] """ axes_configurations = self.options['coverage']['slicer']['axes'] user_axes = [] crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id, axes_configurations) default_order = 0 for index, crs_axis in enumerate(crs_axes): exist = False for axis_label, axis_configuration_dicts in axes_configurations.items(): # If axis label configured in ingredient file does not exist in CRS definition, # then "crsOrder" configuration must match with the crs axis order. if crs_axis.label == axis_label \ or ("crsOrder" in axis_configuration_dicts and int(axis_configuration_dicts["crsOrder"]) == index): crs_axes[index].label = axis_label exist = True break if not exist: raise RecipeValidationException( "Could not find a definition for axis '" + crs_axis.label + "' in the axes parameter.") axis = axes_configurations[crs_axis.label] max = axis["max"] if "max" in axis else None if "type" in axis: type = axis["type"] elif crs_axis.is_date_axis(): type = UserAxisType.DATE else: type = UserAxisType.NUMBER order = axis["gridOrder"] if "gridOrder" in axis else default_order irregular = axis["irregular"] if "irregular" in axis else False data_bound = axis["dataBound"] if "dataBound" in axis else True # for irregular axes we consider the resolution 1 / -1 as gmlcov requires resolution for all axis types, # even irregular if "resolution" in axis: resolution = axis["resolution"] else: resolution = 1 default_order += 1 if "statements" in axis: if isinstance(axis["statements"], list): statements = axis["statements"] else: statements = [axis["statements"]] else: statements = [] slice_group_size = None if "sliceGroupSize" in axis: if not irregular: raise RuntimeException("Cannot set 'sliceGroupSize' for regular axis '{}' in ingredient file.".format(crs_axis.label)) else: # Irregular axis with dataBound:false only can use sliceGroupSize (!) value_str = axis["sliceGroupSize"] if "dataBound" not in axis or axis["dataBound"] is True: raise RuntimeException("Option 'sliceGroupSize' can be set only for irregular axis '{}'" " with \"dataBound\": false in ingredient file.".format(crs_axis.label)) try: slice_group_size = float(value_str) if slice_group_size <= 0: raise ValueError except ValueError: raise RuntimeException("Option 'sliceGroupSize' for irregular axis '{}'" " in ingredient file must be positive number. Given '{}'.".format(crs_axis.label, value_str)) if not irregular: user_axes.append( RegularUserAxis(crs_axis.label, resolution, order, axis["min"], max, type, data_bound, statements)) else: # NOTE: irregular axis cannot set any resolution != 1 if int(resolution) != IrregularUserAxis.DEFAULT_RESOLUTION: raise RuntimeException("Cannot set 'resolution' value for irregular axis '{}' in ingredient file." " Given '{}'.".format(crs_axis.label, resolution)) user_axes.append( IrregularUserAxis(crs_axis.label, resolution, order, axis["min"], axis["directPositions"], max, type, data_bound, statements, slice_group_size)) number_of_specified_axes = len(axes_configurations.items()) number_of_crs_axes = len(crs_axes) if number_of_specified_axes != number_of_crs_axes: raise RuntimeException("Number of axes in the coverage CRS ({}) does not match " "the number of axes specified in the ingredients file ({}).".format(number_of_crs_axes, number_of_specified_axes)) return user_axes
def _read_bands(self): """ Returns a user band extracted from the ingredients if specified (required for netCDF/GRIB) :rtype: list[UserBand] """ if "bands" in self.options['coverage']['slicer']: bands = self.options['coverage']['slicer']['bands'] number_of_bands = len(bands) # NOTE: rasdaman supports 1 band grib only to import recipe_type = self.options['coverage']['slicer']['type'] if recipe_type == GRIBToCoverageConverter.RECIPE_TYPE and number_of_bands > 1: raise RuntimeError("Only single band grib files are currently supported. " "Given " + str(number_of_bands) + " bands in ingredient file.") ret_bands = [] i = 0 for band in bands: identifier = self._read_or_empty_string(band, "identifier") if recipe_type == GdalToCoverageConverter.RECIPE_TYPE: # NOTE: for old ingredients with wrong defined "identifier" with band name instead of index 0-based if not identifier.isdigit(): identifier = str(i) band_name = self._read_or_empty_string(band, "name") if band_name != "": if not is_band_name_valid(band_name): raise RecipeValidationException("Specified band name is not valid. " "Given: '" + band_name + "'. " "Hint: it must match this pattern '" + BAND_NAME_PATTERN + "'.") ret_bands.append(UserBand( identifier, self._read_or_empty_string(band, "name"), self._read_or_empty_string(band, "description"), self._read_or_empty_string(band, "definition"), self._read_or_empty_string(band, "nilReason"), self._read_or_empty_string(band, "nilValue").split(","), self._read_or_empty_string(band, "uomCode") )) i += 1 return ret_bands else: if self.options['coverage']['slicer']['type'] == GdalToCoverageConverter.RECIPE_TYPE: # If gdal does not specify bands in ingredient file, just fetch all bands from first file for file in self.session.get_files(): try: gdal_util = GDALGmlUtil(file) gdal_fields = gdal_util.get_fields_range_type() ret_bands = [] for field in gdal_fields: ret_bands.append(UserBand( field.field_name, field.field_name, None, None, None, field.nill_values )) break except Exception as e: if ConfigManager.skip == True: pass else: raise e return ret_bands else: raise RuntimeError("'bands' must be specified in ingredient file for netCDF/GRIB recipes.")