示例#1
0
    def _get_coverage_slices(self, crs, gdal_coverage_converter):
        """
        Returns the slices for the collection of files given
        """
        crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id)

        slices_dict = self.create_dict_of_slices(self.session.import_overviews)

        timeseries = self._generate_timeseries_tuples()
        count = 1
        for tpair in timeseries:
            file = tpair.file
            file_path = tpair.file.get_filepath()

            timer = Timer()

            # print which file is analyzing
            FileUtil.print_feedback(count, len(timeseries), file_path)
            if not FileUtil.validate_file_path(file_path):
                continue

            valid_coverage_slice = True

            gdal_file = GDALGmlUtil(file.get_filepath())
            try:
                subsets = GdalAxisFiller(crs_axes, gdal_file).fill(True)
                subsets = self._fill_time_axis(tpair, subsets)
            except Exception as ex:
                # If skip: true then just ignore this file from importing, else raise exception
                FileUtil.ignore_coverage_slice_from_file_if_possible(
                    file_path, ex)
                valid_coverage_slice = False

            if valid_coverage_slice:
                # Generate local metadata string for current coverage slice
                self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice(
                    self.recipe_type, tpair.file)
                local_metadata = gdal_coverage_converter._generate_local_metadata(
                    subsets, self.evaluator_slice)
                if self.session.import_overviews_only is False:
                    slices_dict["base"].append(
                        Slice(subsets, FileDataProvider(tpair.file),
                              local_metadata))

                # Then, create slices for selected overviews from user
                for overview_index in self.session.import_overviews:
                    subsets_overview = self.create_subsets_for_overview(
                        subsets, overview_index, gdal_file)

                    slices_dict[str(overview_index)].append(
                        Slice(subsets_overview, FileDataProvider(file),
                              local_metadata))

            timer.print_elapsed_time()
            count += 1

        return slices_dict
示例#2
0
    def _get_coverage_slices(self, crs, gdal_coverage_converter):
        """
        Returns the slices for the collection of files given
        """
        crs_axes = CRSUtil(crs).get_axes(self.session.coverage_id)

        slices = []
        timeseries = self._generate_timeseries_tuples()
        count = 1
        for tpair in timeseries:
            file_path = tpair.file.get_filepath()

            # NOTE: don't process any imported file from *.resume.json as it is just waisted time
            if not self.resumer.is_file_imported(file_path):
                timer = Timer()

                # print which file is analyzing
                FileUtil.print_feedback(count, len(timeseries), file_path)

                if not FileUtil.validate_file_path(file_path):
                    continue

                valid_coverage_slice = True
                try:
                    subsets = GdalAxisFiller(crs_axes,
                                             GDALGmlUtil(file_path)).fill(True)
                    subsets = self._fill_time_axis(tpair, subsets)
                except Exception as ex:
                    # If skip: true then just ignore this file from importing, else raise exception
                    FileUtil.ignore_coverage_slice_from_file_if_possible(
                        file_path, ex)
                    valid_coverage_slice = False

                if valid_coverage_slice:
                    # Generate local metadata string for current coverage slice
                    self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice(
                        self.recipe_type, tpair.file)
                    local_metadata = gdal_coverage_converter._generate_local_metadata(
                        subsets, self.evaluator_slice)
                    slices.append(
                        Slice(subsets, FileDataProvider(tpair.file),
                              local_metadata))

                timer.print_elapsed_time()
                count += 1

        return slices
    def _create_coverage_slices(self, crs_axes, calculated_evaluator_slice=None, axis_resolutions=None):
        """
        Returns all the coverage slices for this coverage
        :param crs_axes:
        :rtype: list[Slice]
        """
        slices = []
        count = 1
        for file in self.files:
            # NOTE: don't process any previously imported file (recorded in *.resume.json)
            if not self.resumer.is_file_imported(file.filepath):
                timer = Timer()

                FileUtil.print_feedback(count, len(self.files), file.filepath)

                # print which file is analyzing
                if not FileUtil.validate_file_path(file.filepath):
                    continue

                valid_coverage_slice = True

                try:
                    if calculated_evaluator_slice is None:
                        # get the evaluator for the current recipe_type (each recipe has different evaluator)
                        self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice(self.recipe_type, file)
                    else:
                        self.evaluator_slice = calculated_evaluator_slice

                    if self.data_type is None:
                        self.data_type = self.evaluator_slice.get_data_type(self)

                    coverage_slice = self._create_coverage_slice(file, crs_axes, self.evaluator_slice, axis_resolutions)
                except Exception as ex:
                    # If skip: true then just ignore this file from importing, else raise exception
                    FileUtil.ignore_coverage_slice_from_file_if_possible(file.get_filepath(), ex)
                    valid_coverage_slice = False

                if valid_coverage_slice:
                    slices.append(coverage_slice)

                timer.print_elapsed_time()
                count += 1

        # Currently, only sort by datetime to import coverage slices (default is ascending)
        reverse = (self.import_order == self.IMPORT_ORDER_DESCENDING)
        return sort_slices_by_datetime(slices, reverse)
示例#4
0
    def _create_coverage_slices(self,
                                crs_axes,
                                calculated_evaluator_slice=None,
                                axis_resolutions=None):
        """
        Returns the slices for the collection of files given
        :param crs_axes:
        :rtype: list[Slice]
        """
        from master.recipe.base_recipe import BaseRecipe
        slices_dict = BaseRecipe.create_dict_of_slices(
            self.session.import_overviews)

        count = 1
        for file in self.files:
            timer = Timer()

            FileUtil.print_feedback(count, len(self.files), file.filepath)

            # print which file is analyzing
            if not FileUtil.validate_file_path(file.filepath):
                continue

            valid_coverage_slice = True

            try:
                if calculated_evaluator_slice is None:
                    # get the evaluator for the current recipe_type (each recipe has different evaluator)
                    self.evaluator_slice = EvaluatorSliceFactory.get_evaluator_slice(
                        self.recipe_type, file)
                else:
                    self.evaluator_slice = calculated_evaluator_slice

                if self.data_type is None:
                    self.data_type = self.evaluator_slice.get_data_type(self)

                coverage_slice = self._create_coverage_slice(
                    file, crs_axes, self.evaluator_slice, axis_resolutions)
            except Exception as ex:
                # If skip: true then just ignore this file from importing, else raise exception
                FileUtil.ignore_coverage_slice_from_file_if_possible(
                    file.get_filepath(), ex)
                valid_coverage_slice = False

            if valid_coverage_slice:
                if self.session.import_overviews_only is False:
                    slices_dict["base"].append(coverage_slice)

                if self.session.recipe["options"]["coverage"]["slicer"][
                        "type"] == "gdal":
                    gdal_file = GDALGmlUtil(file.get_filepath())

                    # Then, create slices for selected overviews from user
                    for overview_index in self.session.import_overviews:
                        axis_subsets_overview = BaseRecipe.create_subsets_for_overview(
                            coverage_slice.axis_subsets, overview_index,
                            gdal_file)

                        coverage_slice_overview = copy.deepcopy(coverage_slice)
                        coverage_slice_overview.axis_subsets = axis_subsets_overview

                        slices_dict[str(overview_index)].append(
                            coverage_slice_overview)

            timer.print_elapsed_time()
            count += 1

        # Currently, only sort by datetime to import coverage slices (default is ascending)
        reverse = (self.import_order == self.IMPORT_ORDER_DESCENDING)
        for key, value in slices_dict.items():
            slices_dict[key] = sort_slices_by_datetime(value, reverse)

        return slices_dict