def _execute(self):

        # Setup the user credentials and logger
        self._setup()

        if len(self.required_bands) != len(self.strds_ids):
            raise AsyncProcessError(
                "The number of bands and the number of strds must be equal")

        for band in self.required_bands:
            if self.required_bands.count(band) > 2:
                raise AsyncProcessError("The band names must be unique")

        for strds in self.strds_ids:
            if self.strds_ids.count(strds) > 2:
                raise AsyncProcessError("The strds names must be unique")

        # Check and lock the target and temp mapsets
        self._check_lock_target_mapset()

        if self.target_mapset_exists is True:
            raise AsyncProcessError(
                "Sentinel time series can only be create in a new mapset. "
                "Mapset <%s> already exists." % self.target_mapset_name)

        # Init GRASS environment and create the temporary mapset with the same name as the target mapset
        # This is required to register the raster maps in the temporary directory, but use them in
        # persistent directory

        # Create the temp database and link the
        # required mapsets into it
        self._create_temp_database(self.required_mapsets)

        # Initialize the GRASS environment and switch into PERMANENT
        # mapset, which is always linked
        self._create_grass_environment(
            grass_data_base=self.temp_grass_data_base, mapset_name="PERMANENT")

        # Create the temporary mapset and switch into it
        self._create_temporary_mapset(temp_mapset_name=self.target_mapset_name)

        # Setup the download cache and query the BigQuery database of google for product_ids
        self._prepare_sentinel2_download()

        # Check if all product ids were found
        missing_product_ids = []
        for product_id in self.product_ids:
            if product_id not in self.query_result:
                missing_product_ids.append(product_id)

        # Abort if a single scene is missing
        if len(missing_product_ids) > 0:
            raise AsyncProcessError("Unable to find product ids <%s> in the "
                                    "Google BigQuery database" %
                                    str(missing_product_ids))

        self._import_sentinel2_scenes()

        # Copy local mapset to original location
        self._copy_merge_tmp_mapset_to_target_mapset()
Example #2
0
    def _prepare_download(self):
        """Check the download cache if the file already exists, to avoid redundant downloads.
        The downloaded files will be stored in a temporary directory. After the download of all files
        completes, the downloaded files will be moved to the download cache. This avoids broken
        files in case a download was interrupted or stopped by termination.

        """
        # Create the download cache directory if it does not exists
        if os.path.exists(self.config.DOWNLOAD_CACHE):
            pass
        else:
            os.mkdir(self.config.DOWNLOAD_CACHE)

        # Create the user specific download cache directory to put the downloaded files into it
        if os.path.exists(self.user_download_cache_path):
            pass
        else:
            os.mkdir(self.user_download_cache_path)

        # Switch into the tempfile directory
        os.chdir(self.temp_file_path)

        # We have to set the home directory to create the grass location
        os.putenv("HOME", "/tmp")

        sensor_ids = {}
        for scene_id in self.scene_ids:

            # Check if the the list of scenes contains scenes from different satellites
            self.sensor_id = extract_sensor_id_from_scene_id(scene_id=scene_id)
            sensor_ids[self.sensor_id] = self.sensor_id

            if len(sensor_ids) > 2:
                raise AsyncProcessError(
                    "Different satellites are in the list of scenes to be imported. "
                    "Only scenes from a single satellite an be imported at once."
                )

        # All bands are imported, except the MTL file
        self.required_bands = SCENE_BANDS[self.sensor_id][0:-1]

        self._send_resource_update("Sending Google BigQuery request.")

        try:
            self.query_result = self.query_interface.get_landsat_urls(
                self.scene_ids, self.required_bands)
        except Exception as e:
            raise AsyncProcessError("Error in querying Landsat product <%s> "
                                    "in Google BigQuery Landsat database. "
                                    "Error: %s" % (self.scene_ids, str(e)))

        if not self.query_result:
            raise AsyncProcessError("Unable to find Landsat product <%s> "
                                    "in Google BigQuery Landsat database" %
                                    self.scene_ids)
    def _create_temp_database(self, mapsets=[]):
        """Create a temporary gis database and location with a PERMANENT mapset for processing

        Raises:
            This function raises AsyncProcessError in case of an error.

        """
        if not self.sentinel2_band_file_list:
            raise AsyncProcessError(
                "Unable to create a temporary GIS database, no data is available"
            )

        try:
            geofile = self.sentinel2_band_file_list[self.required_bands[0]][0]
            self._send_resource_update(geofile)
            # We have to set the home directory to create the grass location
            os.putenv("HOME", "/tmp")

            # Switch into the GRASS temporary database directory
            os.chdir(self.temp_grass_data_base)

            executable_params = list()
            executable_params.append(self.config.GRASS_GIS_START_SCRIPT)
            executable_params.append("-e")
            executable_params.append("-c")
            executable_params.append(geofile)
            executable_params.append(
                os.path.join(self.temp_grass_data_base, self.location_name))

            self.message_logger.info(
                "%s %s" %
                (self.config.GRASS_GIS_START_SCRIPT, executable_params))

            self._update_num_of_steps(1)

            p = Process(exec_type="exec",
                        executable="python2",
                        executable_params=executable_params)

            # Create the GRASS location, this will create the location and mapset paths
            self._run_process(p)
        except Exception as e:
            raise AsyncProcessError(
                "Unable to create a temporary GIS database and location at <%s>"
                ", Exception: %s" %
                (os.path.join(self.temp_grass_data_base, self.location_name,
                              "PERMANENT"), str(e)))
    def _prepare_sentinel2_download(self):
        """Check the download cache if the file already exists, to avoid redundant downloads.
        The downloaded files will be stored in a temporary directory. After the download of all files
        completes, the downloaded files will be moved to the download cache. This avoids broken
        files in case a download was interrupted or stopped by termination.

        """
        # Create the download cache directory if it does not exists
        if os.path.exists(self.config.DOWNLOAD_CACHE):
            pass
        else:
            os.mkdir(self.config.DOWNLOAD_CACHE)

        # Create the user specific download cache directory to put the downloaded files into it
        if os.path.exists(self.user_download_cache_path):
            pass
        else:
            os.mkdir(self.user_download_cache_path)

        # Switch into the tempfile directory
        os.chdir(self.temp_file_path)

        # We have to set the home directory to create the grass location
        os.putenv("HOME", "/tmp")

        self._send_resource_update("Sending Google BigQuery request.")

        try:
            self.query_result = self.query_interface.get_sentinel_urls(
                self.product_ids, self.required_bands)
        except Exception as e:
            raise AsyncProcessError(
                "Error in querying Sentinel 2A product <%s> "
                "in Google BigQuery Sentinel 2A database. "
                "Error: %s" % (self.product_ids, str(e)))

        if not self.query_result:
            raise AsyncProcessError("Unable to find Sentinel 2A product <%s> "
                                    "in Google BigQuery Sentinel 2A database" %
                                    self.product_ids)
    def _execute(self):

        self._setup()
        where = None

        # Points and where statement are stored in self.request_data
        strds_name = self.map_name
        points = self.request_data["points"]
        if "where" in self.request_data:
            where = self.request_data["where"]

        if not points or len(points) == 0:
            raise AsyncProcessError("Empty coordinate list")

        point_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                                 delete=True)
        result_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                                  delete=True)

        for tuple in points:
            if len(tuple) != 3:
                raise AsyncProcessError("Wrong number of coordinate entries")

            id, x, y = tuple
            row = "%s|%s|%s\n" % (id, x, y)
            point_file.write(row.encode())

        point_file.flush()

        pc = dict()
        pc["1"] = {
            "module": "v.in.ascii",
            "inputs": {
                "input": point_file.name,
                "format": "point",
                "column": "id text, x double precision, y double precision",
                "x": 2,
                "y": 3
            },
            "outputs": {
                "output": {
                    "name": "input_points"
                }
            }
        }

        pc["2"] = {
            "module": "t.rast.sample",
            "inputs": {
                "strds": "%s@%s" % (strds_name, self.mapset_name),
                "points": "input_points",
                "column": "id"
            },
            "outputs": {
                "output": {
                    "name": result_file.name
                }
            },
            "flags": "rn",
            "overwrite": True,
            "verbose": True
        }

        if where is not None:
            pc["2"]["inputs"]["where"] = where

        self.request_data = pc

        # Run the process chain
        EphemeralProcessing._execute(self, skip_permission_check=True)

        result = open(result_file.name, "r").readlines()

        output_list = []
        for line in result:
            output_list.append(line.strip().split("|"))

        self.module_results = output_list

        point_file.close()
        result_file.close()
    def _execute(self):

        self._setup()

        strds_name = self.map_name
        timestamp = self.rdc.user_data

        self.required_mapsets.append(self.mapset_name)
        gml_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                               delete=True)

        tmp_file = open(gml_file.name, "w")
        if isinstance(self.request_data, str):
            tmp_file.write(str(self.request_data).strip())
        else:
            tmp_file.write(dumps(self.request_data))
        tmp_file.close()

        pc = {}
        # v.in.ogr
        pc["1"] = {
            "module": "v.import",
            "inputs": {
                "input": gml_file.name
            },
            "outputs": {
                "output": {
                    "name": "polygon"
                }
            },
            "superquiet": True
        }
        # t.create
        pc["2"] = {
            "module": "t.create",
            "inputs": {
                "type": "stvds",
                "temporaltype": "absolute",
                "semantictype": "mean",
                "title": "Polygon",
                "description": "Polygon"
            },
            "outputs": {
                "output": {
                    "name": "polygon_stvds"
                }
            },
            "superquiet": True
        }
        # t.register
        pc["3"] = {
            "module": "t.register",
            "inputs": {
                "type": "vector",
                "input": "polygon_stvds",
                "maps": "polygon",
                "start": timestamp,
                "increment": "1 second"
            },
            "flags": "i",
            "superquiet": False
        }
        # t.sample
        pc["4"] = {
            "module": "t.sample",
            "inputs": {
                "sample": "polygon_stvds",
                "inputs": strds_name + "@" + self.mapset_name,
                "samtype": "stvds",
                "intype": "strds"
            },
            "superquiet": False
        }

        # Setup the grass environment, check the process chain and run the modules
        self.skip_region_check = True
        process_list = self._create_temporary_grass_environment_and_process_list(
            process_chain=pc, skip_permission_check=True)
        self._execute_process_list(process_list)

        gml_file.close()

        # Extract raster name
        map_list = self.module_output_log[3]["stdout"]
        self.message_logger.info("Maplist: " + str(map_list))
        # Check if a map was found
        try:
            raster_name = map_list.split("|")[1]
            # Select the first raster name from a list of names
            if "," in raster_name:
                raster_name = raster_name.split(",")[0]
        except:
            raise AsyncProcessError("No raster maps found for timestamp: " +
                                    timestamp)

        if raster_name == "None":
            raise AsyncProcessError("No raster maps found for timestamp: " +
                                    timestamp)

        result_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                                  delete=True)

        pc = {}
        # g.region
        pc["5"] = {"module": "g.region", "inputs": {"vector": "polygon"}}
        # v.rast.stats
        pc["6"] = {
            "module": "v.rast.stats",
            "inputs": {
                "map": "polygon",
                "method":
                "number,minimum,maximum,range,average,median,stddev,sum,variance,coeff_var",
                "raster": raster_name,
                "column_prefix": "raster"
            },
            "superquiet": True
        }
        # v.db.select
        pc["7"] = {
            "module": "v.db.select",
            "inputs": {
                "map": "polygon"
            },
            "outputs": {
                "file": {
                    "name": result_file.name
                }
            }
        }

        # Check the process chain and run the modules
        self.skip_region_check = False
        process_list = self._validate_process_chain(process_chain=pc,
                                                    skip_permission_check=True)
        self._execute_process_list(process_list)

        result = open(result_file.name, "r").readlines()

        # cat|fid|raster_number|raster_minimum|raster_maximum|raster_range|raster_average|raster_median|raster_stddev|raster_sum|raster_variance|raster_coeff_var
        # 1|swwake_10m.0|2025000|1|6|5|4.27381481481481|5|1.54778017556735|8654475|2.39562347187929|36.2154244540989

        # Empty looks like:
        #cat|fid|raster_number|raster_minimum|raster_maximum|raster_range|raster_average|raster_median|raster_stddev|raster_sum|raster_variance|raster_coeff_var
        # 1|tile||||||||||
        output_list = []
        first = False
        keys = []
        for line in result:
            values = line.strip().split("|")
            if first is False:
                keys = values
                first = True
                continue

            result = {}
            i = 0
            for key in keys:
                if key in ["cat", "fid"]:
                    try:
                        result[key] = values[i]
                    except ValueError:
                        pass
                else:
                    # Store only valid numbers
                    try:
                        result[key] = float(values[i])
                    except ValueError:
                        pass

                i += 1
            output_list.append(AreaUnivarResultModel(**result))

        self.module_results = output_list

        result_file.close()
    def _execute(self):

        self._setup()

        strds_name = self.map_name
        timestamp = self.rdc.user_data

        self.required_mapsets.append(self.mapset_name)
        gml_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                               delete=True)

        tmp_file = open(gml_file.name, "w")
        tmp_file.write(dumps(self.request_data))
        tmp_file.close()

        pc = {}
        # v.in.ogr
        pc["1"] = {
            "module": "v.import",
            "inputs": {
                "input": gml_file.name
            },
            "outputs": {
                "output": {
                    "name": "polygon"
                }
            },
            "superquiet": True
        }
        # t.create
        pc["2"] = {
            "module": "t.create",
            "inputs": {
                "type": "stvds",
                "temporaltype": "absolute",
                "semantictype": "mean",
                "title": "Polygon",
                "description": "Polygon"
            },
            "outputs": {
                "output": {
                    "name": "polygon_stvds"
                }
            },
            "superquiet": True
        }
        # t.register
        pc["3"] = {
            "module": "t.register",
            "inputs": {
                "type": "vector",
                "input": "polygon_stvds",
                "maps": "polygon",
                "start": timestamp,
                "increment": "1 second"
            },
            "flags": "i",
            "superquiet": False
        }
        # t.sample
        pc["4"] = {
            "module": "t.sample",
            "inputs": {
                "sample": "polygon_stvds",
                "inputs": strds_name + "@" + self.mapset_name,
                "samtype": "stvds",
                "intype": "strds"
            },
            "superquiet": False
        }

        # Setup the grass environment, check the process chain and run the modules
        self.skip_region_check = True
        process_list = self._create_temporary_grass_environment_and_process_list(
            process_chain=pc, skip_permission_check=True)
        self._execute_process_list(process_list)

        gml_file.close()

        # Extract raster name
        map_list = self.module_output_log[3]["stdout"]

        self.message_logger.debug("Maplist: " + str(map_list))
        # Check if a map was found
        try:
            raster_name = map_list.split("|")[1]
            # Select the first raster name from a list of names
            if "," in raster_name:
                raster_name = raster_name.split(",")[0]
        except:
            raise AsyncProcessError("No raster maps found for timestamp: " +
                                    timestamp)

        if raster_name == "None":
            raise AsyncProcessError("No raster maps found for timestamp: " +
                                    timestamp)

        result_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                                  delete=True)

        pc = {}
        # g.region
        pc["5"] = {
            "module": "g.region",
            "inputs": {
                "vector": "polygon",
                "align": raster_name
            }
        }
        # r.mask
        pc["6"] = {
            "module": "r.mask",
            "inputs": {
                "vector": "polygon"
            },
            "superquiet": True
        }
        # r.stats
        pc["7"] = {
            "module": "r.stats",
            "inputs": {
                "input": raster_name,
                "separator": "|"
            },
            "outputs": {
                "output": {
                    "name": result_file.name
                }
            },
            "flags": "acpl",
            "superquiet": True
        }

        # Check the process chain and run the modules
        self.skip_region_check = False
        process_list = self._validate_process_chain(process_chain=pc,
                                                    skip_permission_check=True)
        self._execute_process_list(process_list)

        result = open(result_file.name, "r").readlines()

        output_list = []
        for line in result:
            stat_list = line.strip().split("|")

            output_list.append(
                CategoricalStatisticsResultModel(
                    cat=stat_list[0],
                    name=stat_list[1],
                    area=float(stat_list[2]),
                    cell_count=int(stat_list[3]),
                    percent=float(stat_list[4].split("%")[0])))

        self.module_results = output_list

        result_file.close()