def _get_update_crs(self, slice, crs): """ Returns the crs corresponding to the axes that are data bound :param slice: the slice for which the gml should be created :param crs: the crs of the coverage :return: String """ crsAxes = [] for axis_subset in slice.axis_subsets: if axis_subset.coverage_axis.data_bound: crsAxes.append(axis_subset.coverage_axis.axis.crs_axis) crsUtil = CRSUtil(crs) return crsUtil.get_crs_for_axes(crsAxes)
def _get_slices(self, crs): # Let's first extract all the axes from our crs crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) # Prepare a list container for our slices slices = [] # Iterate over the files and create a slice for each one for infile in self.session.get_files(): # We need to create the exact position in time and space in which to place this slice # For the space coordinates we can use the GDAL helper to extract it for us # The helper will return a list of subsets based on the crs axes that we extracted # and will fill the coordinates for the ones that it can (the easting and northing axes) subsets = GdalAxisFiller(crs_axes, GDALGmlUtil( infile.get_filepath())).fill() # Now we must fill the time axis as well and indicate the position in time for subset in subsets: # Find the time axis if subset.coverage_axis.axis.crs_axis.is_time_axis(): # Set the time position for it. Our recipe extracts it from a GDAL tag provided by the user # datetime format needs enquoted (e.g: "2015-01") subset.interval.low = '"' + GDALGmlUtil( infile).get_datetime(self.options["time_tag"]) + '"' slices.append(Slice(subsets, FileDataProvider(infile))) return slices
def _get_slices(self, crs): """ Returns the slices for the collection of files given """ files = self.session.get_files() crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) slices = [] count = 1 for file in files: # NOTE: don't process any imported file from *.resume.json as it is just waisted time if not self.resumer.is_file_imported(file.filepath): timer = Timer() # print which file is analyzing FileUtil.print_feedback(count, len(files), file.filepath) if not FileUtil.validate_file_path(file.filepath): continue valid_coverage_slice = True try: subsets = GdalAxisFiller(crs_axes, GDALGmlUtil(file.get_filepath())).fill() except Exception as ex: # If skip: true then just ignore this file from importing, else raise exception FileUtil.ignore_coverage_slice_from_file_if_possible(file.get_filepath(), ex) valid_coverage_slice = False if valid_coverage_slice: slices.append(Slice(subsets, FileDataProvider(file))) timer.print_elapsed_time() count += 1 return slices
def _get_coverage_slices(self, crs, gdal_coverage_converter): """ Returns the slices for the collection of files given """ crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) slices_dict = self.create_dict_of_slices(self.session.import_overviews) timeseries = self._generate_timeseries_tuples() count = 1 for tpair in timeseries: file = tpair.file file_path = tpair.file.get_filepath() timer = Timer() # print which file is analyzing FileUtil.print_feedback(count, len(timeseries), file_path) if not FileUtil.validate_file_path(file_path): continue valid_coverage_slice = True gdal_file = GDALGmlUtil(file.get_filepath()) try: subsets = GdalAxisFiller(crs_axes, gdal_file).fill(True) subsets = self._fill_time_axis(tpair, subsets) except Exception as ex: # If skip: true then just ignore this file from importing, else raise exception FileUtil.ignore_coverage_slice_from_file_if_possible( file_path, ex) valid_coverage_slice = False if valid_coverage_slice: # Generate local metadata string for current coverage slice self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( self.recipe_type, tpair.file) local_metadata = gdal_coverage_converter._generate_local_metadata( subsets, self.evaluator_slice) if self.session.import_overviews_only is False: slices_dict["base"].append( Slice(subsets, FileDataProvider(tpair.file), local_metadata)) # Then, create slices for selected overviews from user for overview_index in self.session.import_overviews: subsets_overview = self.create_subsets_for_overview( subsets, overview_index, gdal_file) slices_dict[str(overview_index)].append( Slice(subsets_overview, FileDataProvider(file), local_metadata)) timer.print_elapsed_time() count += 1 return slices_dict
def __init__(self, datetime, dt_format=None, time_crs=None): """ :param str datetime: the datetime value :param str dt_format: the datetime format, if none is given we'll try to guess """ try: if dt_format is None: self.datetime = arrow.get(datetime) else: self.datetime = arrow.get(datetime, dt_format) if time_crs is None: self.time_crs_code = self.CRS_CODE_ANSI_DATE else: tmp_crs = CRSUtil(time_crs) self.time_crs_code = tmp_crs.get_crs_code() except ParserError as pe: dt_format_err = "auto" if dt_format is None else dt_format raise RuntimeException("Failed to parse the date " + datetime + " using format " + dt_format_err)
def _get_coverages(self): """ Returns the list of coverages to be used for the importer """ gdal_dataset = GDALGmlUtil.open_gdal_dataset_from_any_file( self.session.get_files()) crs = CRSUtil.get_compound_crs( [self.options['time_crs'], gdal_dataset.get_crs()]) general_recipe = GeneralRecipe(self.session) global_metadata_fields = general_recipe._global_metadata_fields() local_metadata_fields = general_recipe._local_metadata_fields() sentence_evaluator = SentenceEvaluator(ExpressionEvaluatorFactory()) gdal_coverage_converter = GdalToCoverageConverter( self.resumer, self.session.get_default_null_values(), self.recipe_type, sentence_evaluator, self.session.get_coverage_id(), None, self.session.get_files(), crs, None, None, global_metadata_fields, local_metadata_fields, None, None, general_recipe._metadata_type(), None, None, self.session) coverage_slices_dict = self._get_coverage_slices( crs, gdal_coverage_converter) fields = GdalRangeFieldsGenerator( gdal_dataset, self.options['band_names']).get_range_fields() global_metadata = None if len(coverage_slices_dict["base"]) > 0: global_metadata = gdal_coverage_converter._generate_global_metadata( coverage_slices_dict["base"][0], self.evaluator_slice) results = [] base_coverage_id = self.session.get_coverage_id() for key, value in coverage_slices_dict.items(): if key == "base": # base coverage coverage = Coverage(base_coverage_id, coverage_slices_dict[key], fields, crs, gdal_dataset.get_band_gdal_type(), self.options['tiling'], global_metadata) else: # overview coverage (key = overview_index) coverage_id = create_coverage_id_for_overview( base_coverage_id, key) coverage = Coverage(coverage_id, coverage_slices_dict[key], fields, crs, gdal_dataset.get_band_gdal_type(), self.options['tiling'], global_metadata, base_coverage_id, key) results.append(coverage) return results
def __init__(self, datetime, dt_format=None, time_crs=None): """ :param str datetime: the datetime value :param str dt_format: the datetime format, if none is given we'll try to guess """ self.init_cache() try: if dt_format is None: self.datetime = arrow.get(datetime) else: self.datetime = arrow.get(datetime, dt_format) if time_crs is None: self.time_crs_code = self.CRS_CODE_ANSI_DATE else: tmp_crs = CRSUtil(time_crs) self.time_crs_code = tmp_crs.get_crs_code() except ParserError as pe: dt_format_err = "auto" if dt_format is None else dt_format raise RuntimeException("Failed to parse the date " + datetime + " using format " + dt_format_err)
def _get_coverage(self): # Get the crs of one of the images using a GDAL helper class. We are assuming all images have the same CRS gdal_dataset = GDALGmlUtil(self.session.get_files()[0].get_filepath()) # Get the crs of the coverage by compounding the two crses crs = CRSUtil.get_compound_crs( [gdal_dataset.get_crs(), self.options['time_crs']]) fields = GdalRangeFieldsGenerator(gdal_dataset).get_range_fields() pixel_type = gdal_dataset.get_band_gdal_type() coverage_id = self.session.get_coverage_id() slices = self._get_slices(crs) return Coverage(coverage_id, slices, fields, crs, pixel_type)
def _get_coverage(self): """ Returns the coverage to be used for the importer """ gdal_dataset = GDALGmlUtil(self.session.get_files()[0].get_filepath()) crs = CRSUtil.get_compound_crs([gdal_dataset.get_crs(), self.options['time_crs']]) slices = self._get_slices(crs) fields = GdalRangeFieldsGenerator(gdal_dataset, self.options['band_names']).get_range_fields() coverage = Coverage(self.session.get_coverage_id(), slices, fields, crs, gdal_dataset.get_band_gdal_type(), self.options['tiling']) return coverage
def _resolve_crs(self, crs): """ Resolves a crs string to an url :param str crs: the crs string :rtype: str """ crs_resolver = self.session.get_crs_resolver() + "crs/" if not crs.startswith("http"): crs_parts = crs.split("@") for i in range(0, len(crs_parts)): crs_parts[i] = crs_resolver + crs_parts[i] return CRSUtil.get_compound_crs(crs_parts) return crs
def __get_epsg_xy_axes_labels(self): """ Return a tuple of axis labels for X and Y axes """ axes_labels = CRSUtil.get_axis_labels_from_single_crs(self.epsg_xy_crs) axis_type1 = CRSAxis.get_axis_type_by_name(axes_labels[0]) # XY order (e.g: EPSG:3857) if axis_type1 == CRSAxis.AXIS_TYPE_X: return axes_labels[0], axes_labels[1] else: # YX order (e.g: EPSG:4326) needs to swap order return axes_labels[1], axes_labels[0]
def _get_crs_axis_by_user_axis_name(self, user_axis_name): """ Returns the crs axis from the list by user_axis_name :param user_axis_name: :return: crs_axis """ crs_axes = CRSUtil(self.crs).get_axes(self.coverage_id) for i in range(0, len(crs_axes)): crs_axis = crs_axes[i] if crs_axis.label == user_axis_name: return crs_axis
def _read_axes(self, crs): """ Returns a list of user axes extracted from the ingredients file :param str crs: the crs of the coverage :rtype: list[UserAxis] """ axes = self.options['coverage']['slicer']['axes'] user_axes = [] crs_axes = CRSUtil(crs).get_axes() default_order = 0 for crs_axis in crs_axes: if crs_axis.label not in axes: raise RecipeValidationException( "Could not find a definition for axis " + crs_axis.label + " in the axes parameter.") axis = axes[crs_axis.label] max = axis["max"] if "max" in axis else None if "type" in axis: type = axis["type"] elif crs_axis.is_date(): type = UserAxisType.DATE else: type = UserAxisType.NUMBER order = axis["gridOrder"] if "gridOrder" in axis else default_order irregular = axis["irregular"] if "irregular" in axis else False data_bound = axis["dataBound"] if "dataBound" in axis else True # for irregular axes we consider the resolution 1 / -1 as gmlcov requires resolution for all axis types, # even irregular if "resolution" in axis: resolution = axis["resolution"] else: resolution = 1 default_order += 1 if "statements" in axis: if isinstance(axis["statements"], list): statements = axis["statements"] else: statements = [axis["statements"]] else: statements = [] if not irregular: user_axes.append( RegularUserAxis(crs_axis.label, resolution, order, axis["min"], max, type, data_bound, statements=statements)) else: user_axes.append( IrregularUserAxis(crs_axis.label, resolution, order, axis["min"], axis["directPositions"], max, type, data_bound, statements=statements)) return user_axes
def __filter_invalid_geo_bounds(self, slices_dict): """ Filter any coverage slices (scenes) which have invalid lat and long bounds in EPSG:4326 """ results = OrderedDict() for key, slices in slices_dict.items(): results[key] = [] for slice in slices: input_file = slice.data_provider.file.filepath axis_subsets = slice.axis_subsets is_valid = True for axis_subset in axis_subsets: axis = axis_subset.coverage_axis.axis geo_lower_bound = axis.low geo_upper_bound = axis.high axis_label = axis.label if axis.crs_axis.uri.endswith(self.EPSG_4326): if CRSUtil.is_latitude_axis(axis_label): is_valid = geo_lower_bound >= -90 and geo_upper_bound <= 90 elif CRSUtil.is_longitude_axis(axis_label): is_valid = geo_lower_bound >= -180 and geo_upper_bound <= 180 if not is_valid: log.warn( "File '" + input_file + "' has invalid lat or long axes geo bounds in EPSG:4326 CRS, ignored for further processing." ) break if is_valid: results[key].append(slice) return results
def _get_convertors(self): """ Returns a map of coverage id -> GdalToCoverageConverter """ convertors = {} band_data_type = self.DEFAULT_BAND_DATA_TYPE if self.product == self.SLC_PRODUCT: band_data_type = self.SLC_BAND_DATA_TYPE for file in self.session.get_files(): # Check if this file still exists when preparing to import if not FileUtil.validate_file_path(file.get_filepath()): continue # Check if this file belongs to this coverage id modebeam, polarisation = self._get_modebeam_polarisation( file.filepath) cov_id = self._get_coverage_id(self.coverage_id, modebeam, polarisation) conv = self._get_convertor(convertors, cov_id) file_pair = FilePair(file.filepath, file.filepath) conv.files = [file_pair] crs_axes = CRSUtil(conv.crs).get_axes(self.coverage_id) # Different file contains different datetime from its name evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( GdalToCoverageConverter.RECIPE_TYPE, file) conv.data_type = band_data_type slices_dict = conv._create_coverage_slices(crs_axes, evaluator_slice) slices_dict = self.__filter_invalid_geo_bounds(slices_dict) if conv.coverage_slices == {}: conv.coverage_slices = slices_dict else: for key, val in slices_dict.items(): conv.coverage_slices[key] += slices_dict[key] if len(conv.coverage_slices) > 0 and len( conv.coverage_slices["base"]) > 0: first_slice = conv.coverage_slices["base"][0] # This needs one available file to extract metadata later conv.files = [first_slice.data_provider.file] return convertors
def _get_coverage(self): """ Returns the coverage to be used for the importer """ gdal_dataset = GDALGmlUtil(self.session.get_files()[0].get_filepath()) crs = CRSUtil.get_compound_crs( [gdal_dataset.get_crs(), self.options['time_crs']]) slices = self._get_slices(crs) fields = GdalRangeFieldsGenerator( gdal_dataset, self.options['band_names']).get_range_fields() coverage = Coverage(self.session.get_coverage_id(), slices, fields, crs, gdal_dataset.get_band_gdal_type(), self.options['tiling']) return coverage
def get_crs(self): """ Returns the CRS associated with this dataset. If none is found the default for the session is returned :rtype: str """ import osgeo.osr as osr wkt = self.gdal_dataset.GetProjection() spatial_ref = osr.SpatialReference() spatial_ref.ImportFromWkt(wkt) crs = ConfigManager.default_crs if spatial_ref.GetAuthorityName(None) is not None: crs = CRSUtil.get_crs_url(spatial_ref.GetAuthorityName(None), spatial_ref.GetAuthorityCode(None)) return crs
def to_coverage(self): """ Returns a Coverage from all the importing files (gdal|grib|netcdf) :rtype: Coverage """ crs_axes = CRSUtil(self.crs).get_axes() slices = self._slices(crs_axes) # generate coverage extra_metadata from ingredient files metadata = self._metadata(slices) # Evaluate all the swe bands's metadata (each file should have same swe bands's metadata), so first file is ok self._evaluate_swe_bands_metadata(self.files[0], self.bands) coverage = Coverage(self.coverage_id, slices, self._range_fields(), self.crs, self._data_type(), self.tiling, metadata) return coverage
def get_crs(self): """ Returns the CRS associated with this dataset. If none is found the default for the session is returned :rtype: str """ wkt = self.gdal_dataset.GetProjection() spatial_ref = self._get_spatial_ref(wkt) crs = ConfigManager.default_crs if spatial_ref.GetAuthorityName(None) is not None: crs = CRSUtil.get_crs_url(spatial_ref.GetAuthorityName(None), spatial_ref.GetAuthorityCode(None)) if crs is None: raise RuntimeException("Cannot implicitly detect EPSG code from WKT of input file. " "Please explicitly specify the CRS in the ingredient (option \"default_crs\").") return crs
def _get_coverage_slices(self, crs, gdal_coverage_converter): """ Returns the slices for the collection of files given """ crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) slices = [] timeseries = self._generate_timeseries_tuples() count = 1 for tpair in timeseries: file_path = tpair.file.get_filepath() # NOTE: don't process any imported file from *.resume.json as it is just waisted time if not self.resumer.is_file_imported(file_path): timer = Timer() # print which file is analyzing FileUtil.print_feedback(count, len(timeseries), file_path) if not FileUtil.validate_file_path(file_path): continue valid_coverage_slice = True try: subsets = GdalAxisFiller(crs_axes, GDALGmlUtil(file_path)).fill(True) subsets = self._fill_time_axis(tpair, subsets) except Exception as ex: # If skip: true then just ignore this file from importing, else raise exception FileUtil.ignore_coverage_slice_from_file_if_possible( file_path, ex) valid_coverage_slice = False if valid_coverage_slice: # Generate local metadata string for current coverage slice self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( self.recipe_type, tpair.file) local_metadata = gdal_coverage_converter._generate_local_metadata( subsets, self.evaluator_slice) slices.append( Slice(subsets, FileDataProvider(tpair.file), local_metadata)) timer.print_elapsed_time() count += 1 return slices
def to_coverages(self, coverage_slices_dict=None): """ Returns list coverages from all the importing files (gdal|grib|netcdf) :rtype: Array[Coverage] """ crs_axes = CRSUtil(self.crs).get_axes(self.coverage_id) if coverage_slices_dict is None: # Build list of coverage slices from input files coverage_slices_dict = self._create_coverage_slices(crs_axes) global_metadata = None if len(coverage_slices_dict) > 0: for coverage_level, slices in coverage_slices_dict.items(): if len(slices) > 0: first_coverage_slice = slices[0] break # generate coverage extra_metadata from ingredient file based on first input file of first coverage slice. global_metadata = self._generate_global_metadata( first_coverage_slice) # Evaluate all the swe bands's metadata (each file should have same swe bands's metadata), so first file is ok self._evaluate_swe_bands_metadata(self.files[0], self.bands) results = [] base_coverage_id = self.coverage_id for key, value in coverage_slices_dict.items(): slices = coverage_slices_dict[key] if key == "base": coverage = Coverage(base_coverage_id, slices, self._range_fields(), self.crs, self._data_type(), self.tiling, global_metadata) else: # overview coverage (key = overview_index) coverage_id = create_coverage_id_for_overview( self.coverage_id, key) coverage = Coverage(coverage_id, slices, self._range_fields(), self.crs, self._data_type(), self.tiling, global_metadata, base_coverage_id, key) results.append(coverage) return results
def _get_slices(self, gdal_dataset): """ Returns the slices for the collection of files given """ files = self.session.get_files() crs = gdal_dataset.get_crs() crs_axes = CRSUtil(crs).get_axes() slices = [] count = 1 for file in files: # print which file is analyzing FileUtil.print_feedback(count, len(files), file.filepath) subsets = GdalAxisFiller(crs_axes, GDALGmlUtil(file.get_filepath())).fill() slices.append(Slice(subsets, FileDataProvider(file))) count += 1 return slices
def _get_slices(self, crs): """ Returns the slices for the collection of files given """ crs_axes = CRSUtil(crs).get_axes() slices = [] timeseries = self._generate_timeseries_tuples() count = 1 for tpair in timeseries: # print which file is analyzing FileUtil.print_feedback(count, len(timeseries), tpair.file.filepath) subsets = GdalAxisFiller(crs_axes, GDALGmlUtil( tpair.file.get_filepath())).fill() subsets = self._fill_time_axis(tpair, subsets) slices.append(Slice(subsets, FileDataProvider(tpair.file))) count += 1 return slices
def _get_coverage(self): # Get the crs of one of the images using a GDAL helper class. We are assuming all images have the same CRS for file in self.session.get_files(): try: file_path = file.get_filepath() gdal_dataset = GDALGmlUtil(file_path) break except Exception as e: if ConfigManager.skip == True: pass else: raise e # Get the crs of the coverage by compounding the two crses crs = CRSUtil.get_compound_crs([gdal_dataset.get_crs(), self.options['time_crs']]) fields = GdalRangeFieldsGenerator(gdal_dataset).get_range_fields() pixel_type = gdal_dataset.get_band_gdal_type() coverage_id = self.session.get_coverage_id() slices = self._get_slices(crs) return Coverage(coverage_id, slices, fields, crs, pixel_type)
def _get_convertors(self): """ Returns a map of coverage id -> GdalToCoverageConverter """ convertors = {} band_data_type = self.DEFAULT_BAND_DATA_TYPE if self.product == self.SLC_PRODUCT: band_data_type = self.SLC_BAND_DATA_TYPE for file in self.session.get_files(): # Check if this file still exists when preparing to import if not FileUtil.validate_file_path(file.get_filepath()): continue # Check if this file belongs to this coverage id modebeam, polarisation = self._get_modebeam_polarisation( file.filepath) cov_id = self._get_coverage_id(self.coverage_id, modebeam, polarisation) # This file already imported in coverage_id.resume.json self.resumer = Resumer(cov_id) if self.resumer.is_file_imported(file.filepath): continue conv = self._get_convertor(convertors, cov_id) file_pair = FilePair(file.filepath, file.filepath) conv.files = [file_pair] crs_axes = CRSUtil(conv.crs).get_axes(self.coverage_id) # Different file contains different datetime from its name evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( GdalToCoverageConverter.RECIPE_TYPE, file) conv.data_type = band_data_type slices = conv._create_coverage_slices(crs_axes, evaluator_slice) conv.coverage_slices += slices return convertors
def _read(self): """ Reads the metadata from the describe coverage and creates the virtual coverage object :rtype: Coverage """ try: xmlstr = self.description() root = etree.fromstring(xmlstr) crs = self._get_crs(root) crs_axes = CRSUtil(crs).get_axes(self.coverage_id) range_type = self._get_range_types(root) raster_coords = self._get_raster_coords(root) geo_coords = self._get_geo_coords(root) coverage_id = self._get_coverage_id(root) resolutions = self._get_resolutions(root, crs_axes) origin = self._get_origin(root) coverage_axes = self._get_coverage_axes(geo_coords, raster_coords, origin, crs_axes, resolutions) intervals = self._get_intervals(coverage_axes, self.partitioning_scheme) slices = self._get_slices(coverage_axes, intervals) pixel_data_type, generated_file_path = self._get_data_type( slices[0]) coverage = Coverage(coverage_id, slices, range_type, crs, pixel_data_type) self.coverage = coverage except IOError as e: os.remove(generated_file_path) raise RuntimeException( "Could not read the coverage description for url: " + self._get_description_url() + ". Check that the url is accessible and try again. More details: " + str(e)) except XMLSyntaxError as e: os.remove(generated_file_path) raise RuntimeException( "Could not decode the xml description for url " + self._get_description_url() + ". Check that the url is correct and try again. More details: " + str(e))
def _get_coverage(self): """ Returns the coverage to be used for the importer """ gdal_dataset = GDALGmlUtil.open_gdal_dataset_from_any_file( self.session.get_files()) crs = CRSUtil.get_compound_crs( [self.options['time_crs'], gdal_dataset.get_crs()]) general_recipe = GeneralRecipe(self.session) global_metadata_fields = general_recipe._global_metadata_fields() local_metadata_fields = general_recipe._local_metadata_fields() sentence_evaluator = SentenceEvaluator(ExpressionEvaluatorFactory()) gdal_coverage_converter = GdalToCoverageConverter( self.resumer, self.session.get_default_null_values(), self.recipe_type, sentence_evaluator, self.session.get_coverage_id(), None, self.session.get_files(), crs, None, None, global_metadata_fields, local_metadata_fields, None, None, general_recipe._metadata_type(), None, None) coverage_slices = self._get_coverage_slices(crs, gdal_coverage_converter) fields = GdalRangeFieldsGenerator( gdal_dataset, self.options['band_names']).get_range_fields() global_metadata = None if len(coverage_slices) > 0: global_metadata = gdal_coverage_converter._generate_global_metadata( coverage_slices[0], self.evaluator_slice) coverage = Coverage(self.session.get_coverage_id(), coverage_slices, fields, crs, gdal_dataset.get_band_gdal_type(), self.options['tiling'], global_metadata) return coverage
def to_coverage(self, coverage_slices=None): """ Returns a Coverage from all the importing files (gdal|grib|netcdf) :rtype: Coverage """ crs_axes = CRSUtil(self.crs).get_axes(self.coverage_id) if coverage_slices is None: # Build list of coverage slices from input files coverage_slices = self._create_coverage_slices(crs_axes) global_metadata = None if len(coverage_slices) > 0: first_coverage_slice = coverage_slices[0] # generate coverage extra_metadata from ingredient file based on first input file of first coverage slice. global_metadata = self._generate_global_metadata(first_coverage_slice) # Evaluate all the swe bands's metadata (each file should have same swe bands's metadata), so first file is ok self._evaluate_swe_bands_metadata(self.files[0], self.bands) coverage = Coverage(self.coverage_id, coverage_slices, self._range_fields(), self.crs, self._data_type(), self.tiling, global_metadata) return coverage
def get_running_crs_resolver(self, crs_resolvers, embedded_petascope_port): """ From a list of SECORE configured in petascope.properties, find the first running SECORE to be used for wcst_import :param string[] crs_resolvers: list of SECORE URLs :return: string (the running SECORE) """ i = 0 crs_resolvers_tmp = [] for url_prefix in crs_resolvers: url_prefix = url_prefix.strip() # NOTE: if secore_urls=internal in petascope.properties, it means # wcst_import will use the internal SECORE inside petascope with the sub endpoint at /rasdaman/def if url_prefix == INTERNAL_SECORE or url_prefix == DEFAULT_SECORE_URL: url_prefix = INTERNAL_SECORE_URL crs_resolvers_tmp.append(url_prefix) # Also, in case petascope runs at different port than 8080 if embedded_petascope_port != "8080": url_prefix = "http://localhost:" + embedded_petascope_port + INTERNAL_SECORE_URL_CONTEXT_PATH crs_resolvers_tmp.append(url_prefix) else: crs_resolvers_tmp.append(url_prefix) for url_prefix in crs_resolvers_tmp: try: url_prefix = url_prefix.strip() test_url = url_prefix + "/crs/EPSG/0/4326" from util.crs_util import CRSUtil Session.RUNNING_SECORE_URL = url_prefix CRSUtil.get_axis_labels_from_single_crs(test_url) # e.g: http://localhost:8080/def return url_prefix except Exception as ex: log.warn("CRS resolver '" + url_prefix + "' is not working.") if i < len(crs_resolvers) - 1: log.warn("Trying with another fallback resolver...") i += 1 # none of resolvers work, then assume there is SECORE embedded in petascope internal_secore_url = self.wcs_service.replace( "/rasdaman/ows", "/rasdaman/def").strip() try: test_url = internal_secore_url + "/crs/EPSG/0/4326" from util.crs_util import CRSUtil Session.RUNNING_SECORE_URL = internal_secore_url CRSUtil.get_axis_labels_from_single_crs(test_url) log.warn( "None of the configured secore_urls in petascope.properties respond to requests. " "wcst_import will use the internal CRS resolver at endpoint '" + internal_secore_url + "'. " "Hint: set secore_urls=internal in petascope.properties to suppress this warning." ) return internal_secore_url except Exception as ex: raise RuntimeException( "No configured CRS resolvers in petascope.properties work. Given: " + ",".join(crs_resolvers))
def _read_axes(self, crs): """ Returns a list of user axes extracted from the ingredients file :param str crs: the crs of the coverage :rtype: list[UserAxis] """ axes_configurations = self.options['coverage']['slicer']['axes'] user_axes = [] crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id, axes_configurations) default_order = 0 for index, crs_axis in enumerate(crs_axes): exist = False for axis_label, axis_configuration_dicts in axes_configurations.items(): # If axis label configured in ingredient file does not exist in CRS definition, # then "crsOrder" configuration must match with the crs axis order. if crs_axis.label == axis_label \ or ("crsOrder" in axis_configuration_dicts and int(axis_configuration_dicts["crsOrder"]) == index): crs_axes[index].label = axis_label exist = True break if not exist: raise RecipeValidationException( "Could not find a definition for axis '" + crs_axis.label + "' in the axes parameter.") axis = axes_configurations[crs_axis.label] max = axis["max"] if "max" in axis else None if "type" in axis: type = axis["type"] elif crs_axis.is_date_axis(): type = UserAxisType.DATE else: type = UserAxisType.NUMBER order = axis["gridOrder"] if "gridOrder" in axis else default_order irregular = axis["irregular"] if "irregular" in axis else False data_bound = axis["dataBound"] if "dataBound" in axis else True # for irregular axes we consider the resolution 1 / -1 as gmlcov requires resolution for all axis types, # even irregular if "resolution" in axis: resolution = axis["resolution"] else: resolution = 1 default_order += 1 if "statements" in axis: if isinstance(axis["statements"], list): statements = axis["statements"] else: statements = [axis["statements"]] else: statements = [] slice_group_size = None if "sliceGroupSize" in axis: if not irregular: raise RuntimeException("Cannot set 'sliceGroupSize' for regular axis '{}' in ingredient file.".format(crs_axis.label)) else: # Irregular axis with dataBound:false only can use sliceGroupSize (!) value_str = axis["sliceGroupSize"] if "dataBound" not in axis or axis["dataBound"] is True: raise RuntimeException("Option 'sliceGroupSize' can be set only for irregular axis '{}'" " with \"dataBound\": false in ingredient file.".format(crs_axis.label)) try: slice_group_size = float(value_str) if slice_group_size <= 0: raise ValueError except ValueError: raise RuntimeException("Option 'sliceGroupSize' for irregular axis '{}'" " in ingredient file must be positive number. Given '{}'.".format(crs_axis.label, value_str)) if not irregular: user_axes.append( RegularUserAxis(crs_axis.label, resolution, order, axis["min"], max, type, data_bound, statements)) else: # NOTE: irregular axis cannot set any resolution != 1 if int(resolution) != IrregularUserAxis.DEFAULT_RESOLUTION: raise RuntimeException("Cannot set 'resolution' value for irregular axis '{}' in ingredient file." " Given '{}'.".format(crs_axis.label, resolution)) user_axes.append( IrregularUserAxis(crs_axis.label, resolution, order, axis["min"], axis["directPositions"], max, type, data_bound, statements, slice_group_size)) number_of_specified_axes = len(axes_configurations.items()) number_of_crs_axes = len(crs_axes) if number_of_specified_axes != number_of_crs_axes: raise RuntimeException("Number of axes in the coverage CRS ({}) does not match " "the number of axes specified in the ingredients file ({}).".format(number_of_crs_axes, number_of_specified_axes)) return user_axes
def _get_convertors(self): """ Returns a map of coverage id -> GdalToCoverageConverter """ convertors = {} for f in self.session.get_files(): # This one does not contain any information for geo bounds if not FileUtil.validate_file_path(f.get_filepath()): continue gdal_ds = GDALGmlUtil(f.get_filepath()) subdatasets = self._get_subdatasets(gdal_ds, f) gdal_ds.close() level = self._get_level(f.get_filepath()) if len(self.levels) > 0 and level not in self.levels: # skip file, as it's not in the list of levels provided in the ingredients file log.debug("Skipping " + level + " data") continue crs_code = "" evaluator_slice = None for res in self.resolutions: subds_file = self._get_subdataset_file(subdatasets, res) crs_code = self._get_crs_code(subds_file.get_filepath(), crs_code) if len(self.crss) > 0 and crs_code not in self.crss: # skip CRS, it's not in the list of CRSs provided in the ingredients file log.debug("Skipping data with CRS " + crs_code) continue cov_id = self._get_coverage_id(self.coverage_id, crs_code, level, res) conv = self._get_convertor(convertors, cov_id, crs_code, level, res) file_pair = FilePair(subds_file.filepath, f.filepath) conv.files = [file_pair] crs_axes = CRSUtil(conv.crs).get_axes(self.coverage_id) if evaluator_slice is None: # This one contains information for geo bounds evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( GdalToCoverageConverter.RECIPE_TYPE, subds_file) # Resolution 10m, 20m and 60m have same data type (UInt16) while TCI has data type (Byte) if res == self.RES_TCI: conv.data_type = "Byte" else: conv.data_type = "UInt16" # Fixed values for 3 axes of Sentinel 2 coverage axis_resolutions = self.RES_DICT[res] slices_dict = conv._create_coverage_slices( crs_axes, evaluator_slice, axis_resolutions) if conv.coverage_slices == {}: conv.coverage_slices = slices_dict else: for key, val in slices_dict.items(): conv.coverage_slices[key] += slices_dict[key] if len(conv.coverage_slices) != 0: first_slice = conv.coverage_slices["base"][0] # This needs one available file to extract metadata later conv.files = [first_slice.data_provider.file] return convertors