def _get_coverage_slices(self, crs, gdal_coverage_converter): """ Returns the slices for the collection of files given """ crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) slices_dict = self.create_dict_of_slices(self.session.import_overviews) timeseries = self._generate_timeseries_tuples() count = 1 for tpair in timeseries: file = tpair.file file_path = tpair.file.get_filepath() timer = Timer() # print which file is analyzing FileUtil.print_feedback(count, len(timeseries), file_path) if not FileUtil.validate_file_path(file_path): continue valid_coverage_slice = True gdal_file = GDALGmlUtil(file.get_filepath()) try: subsets = GdalAxisFiller(crs_axes, gdal_file).fill(True) subsets = self._fill_time_axis(tpair, subsets) except Exception as ex: # If skip: true then just ignore this file from importing, else raise exception FileUtil.ignore_coverage_slice_from_file_if_possible( file_path, ex) valid_coverage_slice = False if valid_coverage_slice: # Generate local metadata string for current coverage slice self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( self.recipe_type, tpair.file) local_metadata = gdal_coverage_converter._generate_local_metadata( subsets, self.evaluator_slice) if self.session.import_overviews_only is False: slices_dict["base"].append( Slice(subsets, FileDataProvider(tpair.file), local_metadata)) # Then, create slices for selected overviews from user for overview_index in self.session.import_overviews: subsets_overview = self.create_subsets_for_overview( subsets, overview_index, gdal_file) slices_dict[str(overview_index)].append( Slice(subsets_overview, FileDataProvider(file), local_metadata)) timer.print_elapsed_time() count += 1 return slices_dict
def _get_slices(self, crs): # Let's first extract all the axes from our crs crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) # Prepare a list container for our slices slices = [] # Iterate over the files and create a slice for each one for infile in self.session.get_files(): # We need to create the exact position in time and space in which to place this slice # For the space coordinates we can use the GDAL helper to extract it for us # The helper will return a list of subsets based on the crs axes that we extracted # and will fill the coordinates for the ones that it can (the easting and northing axes) subsets = GdalAxisFiller(crs_axes, GDALGmlUtil( infile.get_filepath())).fill() # Now we must fill the time axis as well and indicate the position in time for subset in subsets: # Find the time axis if subset.coverage_axis.axis.crs_axis.is_time_axis(): # Set the time position for it. Our recipe extracts it from a GDAL tag provided by the user # datetime format needs enquoted (e.g: "2015-01") subset.interval.low = '"' + GDALGmlUtil( infile).get_datetime(self.options["time_tag"]) + '"' slices.append(Slice(subsets, FileDataProvider(infile))) return slices
def _create_coverage_slice(self, grib_file, crs_axes, evaluator_slice=None, axis_resolutions=None): """ Returns a slice for a grib file :param File grib_file: the path to the grib file :param list[CRSAxis] crs_axes: the crs axes for the coverage :param FileEvaluatorSlice evaluator_slice :param list[number] axis_resolutions :rtype: Slice """ evaluated_messages = self._evaluated_messages(grib_file) axis_subsets = [] # Build slice for grib files which contains all the axes (i.e: min, max, origin, resolution of geo, grid bounds) for i in range(0, len(crs_axes)): crs_axis = crs_axes[i] axis_subset = self._axis_subset(grib_file, evaluated_messages, crs_axis) axis_subsets.append(axis_subset) return Slice( axis_subsets, FileDataProvider( grib_file, self._evaluated_messages_to_dict(evaluated_messages), self.MIMETYPE))
def _create_coverage_slice(self, file, crs_axes, evaluator_slice, axis_resolutions=None): """ Returns a coverage slice for a file :param File file: the path to the importing file :param list[CRSAxis] crs_axes: the crs axes for the coverage :param FileEvaluatorSlice evaluator_slice: depend on kind of recipe (gdal/netcdf/grib) to pass corresponding evaluator :param list[number] axis_resolutions: if they are known values (e.g: in Sentinel 2 recipes: 10m, 20m, 60m), just keep them for calculating grid bounds. :rtype: Slice """ axis_subsets = [] file_structure = self._file_structure() for i in range(0, len(crs_axes)): resolution = None if axis_resolutions is not None: resolution = axis_resolutions[i] axis_subset = self._axis_subset(crs_axes[i], evaluator_slice, resolution) axis_subsets.append(axis_subset) # Generate local metadata string for current coverage slice local_metadata = self._generate_local_metadata(axis_subsets, evaluator_slice) return Slice(axis_subsets, FileDataProvider(file, file_structure), local_metadata)
def _get_slices(self, crs): """ Returns the slices for the collection of files given """ files = self.session.get_files() crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) slices = [] count = 1 for file in files: # NOTE: don't process any imported file from *.resume.json as it is just waisted time if not self.resumer.is_file_imported(file.filepath): timer = Timer() # print which file is analyzing FileUtil.print_feedback(count, len(files), file.filepath) if not FileUtil.validate_file_path(file.filepath): continue valid_coverage_slice = True try: subsets = GdalAxisFiller(crs_axes, GDALGmlUtil(file.get_filepath())).fill() except Exception as ex: # If skip: true then just ignore this file from importing, else raise exception FileUtil.ignore_coverage_slice_from_file_if_possible(file.get_filepath(), ex) valid_coverage_slice = False if valid_coverage_slice: slices.append(Slice(subsets, FileDataProvider(file))) timer.print_elapsed_time() count += 1 return slices
def _slice(self, file, crs_axes): """ Returns a slice for a file :param File file: the path to the importing file :param list[CRSAxis] crs_axes: the crs axes for the coverage :rtype: Slice """ axis_subsets = [] file_structure = self._file_structure() for i in range(0, len(crs_axes)): axis_subsets.append(self._axis_subset(crs_axes[i], file)) return Slice(axis_subsets, FileDataProvider(file, file_structure))
def _get_coverage_slices(self, crs, gdal_coverage_converter): """ Returns the slices for the collection of files given """ crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id) slices = [] timeseries = self._generate_timeseries_tuples() count = 1 for tpair in timeseries: file_path = tpair.file.get_filepath() # NOTE: don't process any imported file from *.resume.json as it is just waisted time if not self.resumer.is_file_imported(file_path): timer = Timer() # print which file is analyzing FileUtil.print_feedback(count, len(timeseries), file_path) if not FileUtil.validate_file_path(file_path): continue valid_coverage_slice = True try: subsets = GdalAxisFiller(crs_axes, GDALGmlUtil(file_path)).fill(True) subsets = self._fill_time_axis(tpair, subsets) except Exception as ex: # If skip: true then just ignore this file from importing, else raise exception FileUtil.ignore_coverage_slice_from_file_if_possible( file_path, ex) valid_coverage_slice = False if valid_coverage_slice: # Generate local metadata string for current coverage slice self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice( self.recipe_type, tpair.file) local_metadata = gdal_coverage_converter._generate_local_metadata( subsets, self.evaluator_slice) slices.append( Slice(subsets, FileDataProvider(tpair.file), local_metadata)) timer.print_elapsed_time() count += 1 return slices
def _get_slices(self, gdal_dataset): """ Returns the slices for the collection of files given """ files = self.session.get_files() crs = gdal_dataset.get_crs() crs_axes = CRSUtil(crs).get_axes() slices = [] count = 1 for file in files: # print which file is analyzing FileUtil.print_feedback(count, len(files), file.filepath) subsets = GdalAxisFiller(crs_axes, GDALGmlUtil(file.get_filepath())).fill() slices.append(Slice(subsets, FileDataProvider(file))) count += 1 return slices
def _get_slices(self, crs): """ Returns the slices for the collection of files given """ crs_axes = CRSUtil(crs).get_axes() slices = [] timeseries = self._generate_timeseries_tuples() count = 1 for tpair in timeseries: # print which file is analyzing FileUtil.print_feedback(count, len(timeseries), tpair.file.filepath) subsets = GdalAxisFiller(crs_axes, GDALGmlUtil( tpair.file.get_filepath())).fill() subsets = self._fill_time_axis(tpair, subsets) slices.append(Slice(subsets, FileDataProvider(tpair.file))) count += 1 return slices
def _get_slices(self, coverage_axes, intervals): """ Returns the slices :param list[CoverageAxis] coverage_axes: the coverage axes :param list[list[Interval]] intervals: all the possible intervals defining the coverage space :return: """ slices = [] for interval_list in intervals: subsets = [] for index in range(0, len(coverage_axes)): subsets.append( AxisSubset(coverage_axes[index], interval_list[index])) if len(coverage_axes) == 1: # For 1D we have to parse the gml and create a tuple data provider data_provider = TupleListDataProvider( self._get_coverage_data_as_array( self._get_coverage_url(subsets))) else: data_provider = UrlDataProvider( self._get_coverage_url(subsets)) slices.append(Slice(subsets, data_provider)) return slices