def _execute_process_list(self, process_list):
        """Extend the mapset lock and execute the provided process list

        Args:
            process_list: The process list to execute

        Raises:
            This method will raise an AsyncProcessError or AsyncProcessTermination
        """
        for process in process_list:
            # Extent the lock for each process by max processing time * 2
            if self.target_mapset_lock_set is True:
                ret = self.lock_interface.extend(resource_id=self.target_mapset_lock_id,
                                                 expiration=self.process_time_limit * 2)
                if ret == 0:
                    raise AsyncProcessError(
                        "Unable to extend lock for mapset <%s>"
                        % self.target_mapset_name)

            if self.temp_mapset_lock_set is True:
                # Extent the lock for each process by max processing time * 2
                ret = self.lock_interface.extend(resource_id=self.temp_mapset_lock_id,
                                                 expiration=self.process_time_limit * 2)
                if ret == 0:
                    raise AsyncProcessError(
                        "Unable to extend lock for "
                        "temporary mapset <%s>" % self.temp_mapset_name)

            if process.exec_type == "grass":
                self._run_module(process)
            elif process.exec_type == "exec":
                self._run_process(process)
            elif process.exec_type == "python":
                eval(process.executable)
Beispiel #2
0
    def _check_urls(self):
        """Check the urls for access and supported mimetypes.
        If all files are already in the download cache, then
        nothing needs to be downloaded and checked.
        """
        for url in self.url_list:
            # Send a resource update
            if self.send_resource_update is not None:
                self.send_resource_update(message="Checking access to URL: %s" % url)

            # Check if thr URL exists by investigating the HTTP header
            resp = requests.head(url)
            if self.message_logger:
                self.message_logger.info("%i %s %s" % (resp.status_code,
                                                       resp.text, resp.headers))

            if resp.status_code != 200:
                raise AsyncProcessError("The URL <%s> can not be accessed." % url)

            # Download 256 bytes from the url and check its mimetype
            response = urlopen(url)
            mime_type = magic.from_buffer(response.read(256), mime=True).lower()
            if self.message_logger:
                self.message_logger.info(mime_type)

            if mime_type not in SUPPORTED_MIMETYPES:
                raise AsyncProcessError("Mimetype <%s> of url <%s> is not supported. "
                                        "Supported mimetypes are: %s" % (
                                            mime_type, url,
                                            ",".join(SUPPORTED_MIMETYPES)))

            self.detected_mime_types.append(mime_type)
Beispiel #3
0
    def _check_lock_mapset(self, mapset_name):
        """Check if the mapset exists and lock it

        If the mapset is a global mapset and Error will be raised.

        The duration of the lock is process_time_limit * process_num_limit
        and should be extended if needed.

        Only mapsets of the user database are locked.

        Unlock the mapset after the processing finished.
        """
        # check if the resource is accessible
        mapset_exists = self._check_mapset(mapset_name)

        if mapset_exists is False:
            raise AsyncProcessError(
                "Mapset <%s> does not exist and can not be locked." % mapset_name)

        # Finally lock the mapset for the time that the user can allocate at maximum
        lock_id = "%s/%s/%s" % (self.user_group, self.location_name, mapset_name)
        ret = self.lock_interface.lock(
            resource_id=lock_id,
            expiration=self.process_time_limit * self.process_num_limit)

        if ret == 0:
            raise AsyncProcessError(
                "Unable to lock mapset <%s>, resource is already locked" % mapset_name)
        self.message_logger.info("Mapset <%s> locked" % mapset_name)

        # if we manage to come here, the lock was correctly set, hence store
        # the lock id for later unlocking
        self.lock_ids[lock_id] = mapset_name
Beispiel #4
0
    def perform_file_validation(self, filepath, mimetype=None):
        """Perform a file validation check of mimetypes and zip bombs.
        This function checks zip files and returns the file names of the
        extracted file(s).
        If mimetype is None all supported mimetypes will be checked.

        Args:
            filepath (str): The path to a file that should be checked against
                      supported mimetypes and zip-bomb security.
            mimetype (str): A specific mimetype that should be checked

        Returns:
            (list)
            file_list A list of files that will be in the current working directory
        """
        file_name = os.path.basename(filepath)
        file_list = [file_name]

        if not os.path.isfile(filepath):
            raise AsyncProcessError("File <%s> does not exist." % filepath)

        mime_type = magic.from_file(filepath, mime=True)
        if self.message_logger:
            self.message_logger.info(mime_type)

        if mime_type not in SUPPORTED_MIMETYPES:
            raise AsyncProcessError("Mimetype of url <%s> is not supported. "
                                    "Supported mimetypes are: %s" % (
                                        filepath, ",".join(SUPPORTED_MIMETYPES)))

        if mime_type.lower() == "application/zip":
            z = zipfile.ZipFile(filepath)
            total_sum = sum(e.file_size for e in z.infolist())
            compressed_sum = sum(e.compress_size for e in z.infolist())
            compression_ratio = total_sum / compressed_sum

            print(compressed_sum, total_sum, compression_ratio)

            if compression_ratio > 10000:
                raise AsyncProcessError("Compression ratio is larger than 10000.")

            if total_sum > 2 ** 32:
                raise AsyncProcessError(
                    "Files larger than 4GB are not supported in zip files.")

            for name in z.namelist():
                file_name, suffix = os.path.splitext(name)
                file_list.append(file_name)
                if suffix not in SUPPORTED_SUFFIXES:
                    raise AsyncProcessError("Suffix %s of zipped file <%s> is not "
                                            "supported. Supported suffixes in zip "
                                            "files are: %s" % (
                                                suffix, name,
                                                ",".join(SUPPORTED_SUFFIXES)))
            z.close()

        return file_list
    def _execute(self):

        self._setup()

        args, layer_type = self.data
        self.required_mapsets.append(self.target_mapset_name)

        # List format must be
        # [(a, a_new),(b, b_new),(c, c_new), ...]
        name_list = list()
        for old_name, new_name in self.request_data:
            name_list.append("%s,%s" % (old_name, new_name))
        name_string = ",".join(name_list)

        pc = {"1": {"module": "g.rename", "inputs": {layer_type: name_string}}}

        self.skip_region_check = True
        process_list = self._validate_process_chain(skip_permission_check=True,
                                                    process_chain=pc)
        self._create_temp_database(self.required_mapsets)
        self._check_lock_target_mapset()
        self._create_grass_environment(
            grass_data_base=self.temp_grass_data_base,
            mapset_name=self.target_mapset_name)

        self._execute_process_list(process_list)

        if "WARNING: " in "\n".join(self.module_output_log[0]["stderr"]):
            if "not found" in "\n".join(self.module_output_log[0]["stderr"]):
                raise AsyncProcessError("Error while renaming map layers")

        self.finish_message = "Successfully renamed %s layers." % layer_type
    def _execute(self):

        self._setup()
        # Create temporary database
        self._create_temp_database()

        pc = {"1": {"module": "g.mapsets",
                    "flags": "l"}}

        process_list = self._validate_process_chain(process_chain=pc,
                                                    skip_permission_check=True)
        self._create_grass_environment(grass_data_base=self.temp_grass_data_base,
                                       mapset_name="PERMANENT")

        self._execute_process_list(process_list)

        mapset_list = []
        mapsets = self.module_output_log[0]["stdout"].split()

        for mapset in mapsets:
            mapset_list.append(mapset.strip())

        if self.target_mapset_name in mapset_list:
            raise AsyncProcessError("Mapset <%s> exists." % self.target_mapset_name)

        # Create the new temporary mapset and merge it into the user database location
        self._check_lock_target_mapset()
        self.required_mapsets = ["PERMANENT"]
        self._create_temporary_mapset(temp_mapset_name=self.temp_mapset_name)
        self._copy_merge_tmp_mapset_to_target_mapset()

        self.finish_message = \
            "Mapset <%s> successfully created." % self.target_mapset_name
Beispiel #7
0
    def _execute(self):

        self._setup()

        if (os.path.exists(self.user_download_cache_path)
                and os.path.isdir(self.user_download_cache_path)):

            executable = "/usr/bin/du"
            args = ["-sb", self.user_download_cache_path]

            self._run_process(
                Process(exec_type="exec",
                        executable=executable,
                        id="compute_download_cache_size",
                        executable_params=args))
            print("Disk usage ", self.module_output_log[0]["stdout"])
            dc_size = int(self.module_output_log[0]["stdout"].split("\t")[0])
            quota_size = int(self.config.DOWNLOAD_CACHE_QUOTA * 1024 * 1024 *
                             1024)

            model = StorageModel(
                used=dc_size,
                free=quota_size - dc_size,
                quota=quota_size,
                free_percent=int(100 * (quota_size - dc_size) / quota_size))
            self.module_results = model

            self.finish_message = "Download cache size successfully computed"
        else:
            raise AsyncProcessError(
                "Download cache directory <%s> does not exist." %
                self.user_download_cache_path)
    def _execute(self):

        self._setup()

        args, layer_type = self.data
        self.required_mapsets.append(self.target_mapset_name)

        options = extract_glist_parameters(args)

        pc = {"1": {"module": "g.remove", "inputs": {}, "flags": "f"}}
        for key in options:
            pc["1"]["inputs"][key] = options[key]
        pc["1"]["inputs"]["type"] = layer_type

        self.skip_region_check = True
        process_list = self._validate_process_chain(skip_permission_check=True,
                                                    process_chain=pc)
        self._create_temp_database(self.required_mapsets)
        self._check_lock_target_mapset()
        self._create_grass_environment(
            grass_data_base=self.temp_grass_data_base,
            mapset_name=self.target_mapset_name)

        self._execute_process_list(process_list)

        if "WARNING: No data base element files found" in "\n".join(
                self.module_output_log[0]["stderr"]):
            raise AsyncProcessError("<%s> layer not found" % layer_type)

        self.finish_message = "Successfully removed %s layers." % layer_type
    def _execute(self):
        """Delete a specific raster layer

        Use the original mapset for processing
        """
        self._setup()

        raster_name = self.map_name
        self.required_mapsets.append(self.target_mapset_name)

        pc = {}
        pc["1"] = {"module": "g.remove", "inputs": {"type": "raster",
                                                    "name": raster_name},
                   "flags": "f"}

        self.skip_region_check = True
        process_list = self._validate_process_chain(process_chain=pc,
                                                    skip_permission_check=True)
        self._check_lock_target_mapset()
        self._create_temp_database(self.required_mapsets)
        self._create_grass_environment(grass_data_base=self.temp_grass_data_base,
                                       mapset_name=self.target_mapset_name)

        self._execute_process_list(process_list)

        if "WARNING: No data base element files found" in "\n".join(
                self.module_output_log[0]["stderr"]):
            raise AsyncProcessError("Raster layer <%s> not found" % raster_name)

        self.finish_message = "Raster layer <%s> successfully removed." % raster_name
    def _execute(self):
        self._setup()
        self._check_lock_target_mapset()
        if self.target_mapset_exists is False:
            raise AsyncProcessError(
                "Unable to lock mapset <%s>. Mapset doesn not exists."
                % self.target_mapset_name)

        self.finish_message = \
            "Mapset <%s> successfully locked" % self.target_mapset_name
Beispiel #11
0
    def _stac_import(self, stac_collection_id=None, semantic_label=None,
                     interval=None, bbox=None, filter=None):

        if has_plugin:
            try:
                stac_name = stac_collection_id.split(".")[3]
            except Exception:
                raise AsyncProcessError("The source has not the right structure")

            stac_root = self._get_search_root(stac_collection_id)

            stac_filtered = self._apply_filter(stac_root, stac_name,
                                               interval, bbox, filter)

            stac_result = self._get_filtered_bands(stac_filtered, semantic_label)

            stac_processes = []

            for key, value in stac_result.items():

                for name_id, url in value.items():

                    output_name = stac_name + "_" + key + "_" + name_id

                    # From Here Onwards, the Process build starts
                    exec_params = ["input=%s" % "/vsicurl/"+url,
                                   "output=%s" % output_name,
                                   "-o"]

                    p = Process(
                        exec_type="grass",
                        executable="r.in.gdal",
                        executable_params=exec_params,
                        id=f"r_gdal_{os.path.basename(output_name)}",
                        skip_permission_check=True
                    )

                    stac_processes.append(p)
        else:
            raise AsyncProcessError("Actinia STAC plugin is not installed")

        return stac_processes
 def _execute(self):
     self._setup()
     self._check_target_mapset_exists()
     if self.target_mapset_exists is False:
         raise AsyncProcessError(
             ("Unable to unlock mapset <%s> in location <%s>:"
              " Mapset does not exist")
             % (self.mapset_name, self.location_name))
     else:
         self.lock_interface.unlock(self.target_mapset_lock_id)
         self.finish_message = \
             "Mapset <%s> successfully unlocked" % self.target_mapset_name
Beispiel #13
0
    def _execute(self):

        self._setup()
        self.required_mapsets.append(self.target_mapset_name)

        pc_1 = {}
        pc_1["1"] = {
            "module": "t.list",
            "inputs": {
                "type":
                "strds",
                "where":
                "id = \'%s@%s\'" % (self.map_name, self.target_mapset_name)
            }
        }
        # Check the first process chain
        pc_1 = self._validate_process_chain(skip_permission_check=True,
                                            process_chain=pc_1)

        pc_2 = {
            "1": {
                "module": "t.create",
                "inputs": {
                    "type": "strds",
                    "output": self.map_name
                }
            }
        }

        if self.request_data:
            for key in self.request_data:
                pc_2["1"]["inputs"][key] = self.request_data[key]

        pc_2 = self._validate_process_chain(skip_permission_check=True,
                                            process_chain=pc_2)
        self._create_temp_database()
        self._check_lock_target_mapset()

        self._create_grass_environment(
            grass_data_base=self.temp_grass_data_base,
            mapset_name=self.target_mapset_name)

        self._execute_process_list(pc_1)

        # check if STRDS exists
        raster_list = self.module_output_log[0]["stdout"].split("\n")

        if len(raster_list[0]) > 0:
            raise AsyncProcessError("STRDS <%s> exists." % self.map_name)

        self._execute_process_list(pc_2)

        self.finish_message = "STRDS <%s> successfully created" % self.map_name
 def _execute(self):
     self._setup()
     self._check_target_mapset_exists()
     if self.target_mapset_exists is False:
         raise AsyncProcessError(
             ("Unable to get lock status of mapset <%s> in location <%s>:"
              " Mapset does not exist")
             % (self.mapset_name, self.location_name))
     else:
         self.module_results = self.lock_interface.get(
             self.target_mapset_lock_id)
         self.finish_message = "Mapset lock state: %s" % str(
             self.module_results)
    def _execute(self):

        self._setup()

        # For debug purpose
        # self.lock_interface.unlock(self.target_mapset_lock_id)

        if "PERMANENT" == self.target_mapset_name:
            raise AsyncProcessError("The PERMANENT mapset can not be deleted. "
                                    "You must remove the location to get rid of it.")

        # Delete existing mapset
        self._check_lock_target_mapset()
        # The variable self.orig_mapset_path is set by _check_lock_target_mapset()
        if self.target_mapset_exists is True:
            shutil.rmtree(self.orig_mapset_path)
            self.lock_interface.unlock(self.target_mapset_lock_id)
            self.finish_message = \
                "Mapset <%s> successfully removed." % self.target_mapset_name
        else:
            raise AsyncProcessError("Mapset <%s> does not exits" %
                                    self.target_mapset_name)
Beispiel #16
0
    def _merge_mapsets(self):
        """Merge mapsets in a target mapset

            - Check the target mapset and lock it for the maximum time
              a user can consume -> process_num_limit*process_time_limit
            - Check and lock all source mapsets with the same scheme
            - Copy each source mapset into the target mapset
            - Extend the locks each copy run
            - Cleanup and unlock the mapsets

        """
        # Lock the target mapset
        self._check_lock_mapset(self.target_mapset_name)
        # Lock the source mapsets
        self._check_lock_source_mapsets(self.request_data)

        step = 1
        steps = len(self.request_data)

        mapsets_to_merge = []

        # Copy each mapset into the target
        for lock_id in self.lock_ids:
            # Check for termination requests
            if self.resource_logger.get_termination(
                    self.user_id, self.resource_id) is True:
                raise AsyncProcessTermination(
                    "Mapset merging was terminated "
                    "by user request at setp %i of %i" % (step, steps))

            mapset_name = self.lock_ids[lock_id]
            mapsets_to_merge.append(mapset_name)

            for lock_id in self.lock_ids:
                # Extent the lock for each process by max processing time * 2
                ret = self.lock_interface.extend(resource_id=lock_id,
                                                 expiration=self.process_time_limit * 2)
                if ret == 0:
                    raise AsyncProcessError(
                        "Unable to extend lock for mapset <%s>" % mapset_name)

            message = "Step %i of %i: Copy content from source " \
                      "mapset <%s> into target mapset <%s>" % (step, steps, mapset_name,
                                                               self.target_mapset_name)
            self._send_resource_update(message)
            self.message_logger.info(message)

            # Copy the source mapset into the target mapset
            if mapset_name != self.target_mapset_name:
                step += 1
                self._merge_mapset_into_target(mapset_name, self.target_mapset_name)
    def _merge_mapset_into_target(self, source_mapset, target_mapset):
        """Link the source mapset content into the target mapset

        Attention: Not all directories and files in the mapset are copied.
            See list directories.
        """
        self.message_logger.info(
            "Copy source mapset <%s> content "
            "into the target mapset <%s>" % (source_mapset, target_mapset))

        # Raster, vector, group and space time data set directories/files
        directories = ["cell", "misc", "fcell",
                       "cats", "cellhd",
                       "cell_misc", "colr", "colr2",
                       "hist", "vector", "group", "tgis", "VAR"]

        for directory in directories:
            source_path = os.path.join(
                self.user_location_path, source_mapset, directory)
            target_path = os.path.join(self.user_location_path, target_mapset)

            if os.path.exists(source_path) is True:
                if directory == "group":
                    self._change_mapsetname_in_group(
                        source_path, source_mapset, target_mapset)
                if directory == "tgis":
                    target_tgis_db = None
                    if os.path.isdir(os.path.join(target_path, 'tgis')):
                        target_tgis_db = os.path.join(target_path, 'tgis', 'sqlite.db')
                    self._change_mapsetname_in_tgis(
                        source_path, source_mapset, target_mapset,
                        target_tgis_db)

            if os.path.exists(source_path) is True:
                # Hardlink the sources into the target
                stdout = subprocess.PIPE
                stderr = subprocess.PIPE

                p = subprocess.Popen(["/bin/cp", "-flr",
                                      "%s" % source_path,
                                      "%s/." % target_path],
                                     stdout=stdout,
                                     stderr=stderr)
                (stdout_buff, stderr_buff) = p.communicate()
                if p.returncode != 0:
                    raise AsyncProcessError(
                        "Unable to merge mapsets. Error in linking:"
                        " stdout: %s stderr: %s" % (stdout_buff, stderr_buff))
Beispiel #18
0
    def get_stac_import_download_commands(self,
                                          stac_entry,
                                          config=None,
                                          temp_file_path=None,
                                          message_logger=None,
                                          send_resource_update=None):

        """Helper method to get the stac import and download commands.

            Args:
                stac_entry (dict): stac_entry of the import description list

            Returns:
                stac_commands: The stac download and import commands
        """
        # Check for band information
        # TODO check config, temp_file_path, message_logger, send_resource_update
        stac_entry_source = stac_entry["import_descr"]["source"]

        if "semantic_label" in stac_entry["import_descr"]:
            stac_semantic_label = stac_entry["import_descr"]["semantic_label"]

        if "extent" in stac_entry["import_descr"]:
            if "spatial" and "temporal" not in stac_entry["import_descr"]["extent"]:
                raise AsyncProcessError("Unknown spatial or/and temporal parameters"
                                        "in the process chain definition")

            if "bbox" in stac_entry["import_descr"]["extent"]["spatial"]:
                stac_extent = stac_entry["import_descr"]["extent"]["spatial"]["bbox"][0]

            if "interval" in stac_entry["import_descr"]["extent"]["temporal"]:
                interval = stac_entry["import_descr"]["extent"]
                interval = interval["temporal"]["interval"][0]
                stac_interval = interval

            if "filter" in stac_entry["import_descr"]:
                stac_filter = stac_entry["import_descr"]["filter"]

            stac_command = \
                self._stac_import(
                    stac_collection_id=stac_entry_source,
                    semantic_label=stac_semantic_label,
                    interval=stac_interval,
                    bbox=stac_extent,
                    filter=stac_filter)
            return stac_command
    def _lock_temp_mapset(self):
        """Lock the temporary mapset

        This method sets in case of success: self.tmp_mapset_lock_set = True
        """
        # Lock the temporary mapset for the time that the user can allocate at maximum
        ret = self.lock_interface.lock(
            resource_id=self.temp_mapset_lock_id,
            expiration=self.process_time_limit * self.process_num_limit)

        if ret == 0:
            raise AsyncProcessError(
                "Unable to lock temporary mapset <%s>, "
                "resource is already locked" % self.target_mapset_name)
        self.message_logger.info("Mapset <%s> locked" % self.target_mapset_name)

        # if we manage to come here, the lock was correctly set
        self.temp_mapset_lock_set = True
Beispiel #20
0
    def _check_lock_source_mapsets(self, source_mapsets):
        """Check and lock the source mapsets from the merging list

        Args:
            source_mapsets: A list of source mapsets that should be checked
                            and locked

        Raises:
            This method will raise an AsyncProcessError

        """
        # Expect a list of mapset names
        if len(source_mapsets) == 0:
            raise AsyncProcessError("Empty source mapset list.")

        # Check and lock the mapsets
        for mapset in source_mapsets:
            self._check_lock_mapset(mapset)
Beispiel #21
0
    def _execute(self):

        self._setup()

        if (os.path.exists(self.user_download_cache_path)
                and os.path.isdir(self.user_download_cache_path)):
            executable = "/bin/rm"
            args = ["-rf", self.user_download_cache_path]

            self._run_process(
                Process(exec_type="exec",
                        executable=executable,
                        id="delete_download_cache_directory",
                        executable_params=args))

            os.mkdir(self.user_download_cache_path)
            self.finish_message = "Download cache successfully removed."
        else:
            raise AsyncProcessError(
                "Download cache directory <%s> does not exist." %
                self.user_download_cache_path)
    def _change_mapsetname_in_group(self, group_path, source_mapset, target_mapset):
        """Replaces the mapset name in the group file

        Args:
            group_path(str): path of the group folder in the source mapset
            source_mapset(str): name of source mapset
            target_mapset(str): name of target mapset

        Raises:
            This method will raise an AsyncProcessError if a group has no REF file
        """
        group_dirs = os.listdir(group_path)
        for group_dir in group_dirs:
            group_file = os.path.join(group_path, group_dir, "REF")
            if os.path.isfile(group_file):
                for line in fileinput.input(group_file, inplace=True):
                    print(line.replace(
                        source_mapset, target_mapset), end='')
            else:
                raise AsyncProcessError("group %s has no REF file"
                                        % (group_dir))
Beispiel #23
0
    def _execute(self):

        new_location = self.location_name

        self.location_name = self.config.GRASS_DEFAULT_LOCATION

        self._setup()

        epsg_code = self.request_data["epsg"]

        self._create_temp_database()

        pc = {
            "1": {
                "module": "g.proj",
                "inputs": {
                    "epsg": epsg_code,
                    "location": new_location
                },
                "flags": "t"
            }
        }

        process_list = self._validate_process_chain(process_chain=pc,
                                                    skip_permission_check=True)

        self._create_grass_environment(
            grass_data_base=self.temp_grass_data_base, mapset_name="PERMANENT")

        self._execute_process_list(process_list)

        if os.path.isdir(os.path.join(self.temp_grass_data_base,
                                      new_location)):
            shutil.move(os.path.join(self.temp_grass_data_base, new_location),
                        self.grass_user_data_base)
        else:
            raise AsyncProcessError("Unable to create location <%s>" %
                                    new_location)

        self.finish_message = "Location <%s> successfully created" % new_location
Beispiel #24
0
    def _apply_filter(stac_root_search, stac_name, interval, bbox, filter):

        search_body = {
            "collections": [stac_name],
        }
        search_body["query"] = filter

        search_body["bbox"] = bbox

        search_body["interval"] = interval

        stac_search = requests.post(
            stac_root_search,
            json=search_body
        )

        full_filtered_result = stac_search.json()

        if "features" in full_filtered_result:
            return full_filtered_result
        else:
            raise AsyncProcessError(full_filtered_result)
Beispiel #25
0
    def _execute(self, skip_permission_check=False):
        """Overwrite this function in subclasses

        Setup the user credentials, check the executable access and run the process
        """
        # Setup the user credentials and logger
        self._setup(init_grass=False)

        # Check if the user has access to the required process executable
        resp = check_location_mapset_module_access(self.user_credentials,
                                                   self.config,
                                                   module_name=self.executable)

        if resp is not None:
            raise AsyncProcessError("Executable <%s> is not supported" %
                                    self.executable)

        p = Process(exec_type="exec",
                    executable=self.executable,
                    executable_params=self.executable_params,
                    id=f"exec_{self.executable}",
                    stdin_source=None)

        self._run_process(p)
    def _lock_target_mapset(self):
        """Lock the target mapset

        Raises:
            AsyncProcessError

        """

        # Lock the mapset for the time that the user can allocate at maximum
        ret = self.lock_interface.lock(
            resource_id=self.target_mapset_lock_id,
            expiration=self.process_time_limit * self.process_num_limit)

        if ret == 0:
            raise AsyncProcessError(
                "Unable to lock location/mapset <%s/%s>, "
                "resource is already locked" % (self.location_name,
                                                self.target_mapset_name))
        self.message_logger.info(
            "location/mapset <%s/%s> locked" % (self.location_name,
                                                self.target_mapset_name))

        # if we manage to come here, the lock was correctly set
        self.target_mapset_lock_set = True
Beispiel #27
0
    def _execute(self):

        self._setup()
        where = None

        # Points and where statement are stored in self.request_data
        strds_name = self.map_name
        points = self.request_data["points"]
        if "where" in self.request_data:
            where = self.request_data["where"]

        if not points or len(points) == 0:
            raise AsyncProcessError("Empty coordinate list")

        point_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                                 delete=True)
        result_file = tempfile.NamedTemporaryFile(dir=self.temp_file_path,
                                                  delete=True)

        for tuple in points:
            if len(tuple) != 3:
                raise AsyncProcessError("Wrong number of coordinate entries")

            id, x, y = tuple
            row = "%s|%s|%s\n" % (id, x, y)
            point_file.write(row.encode())

        point_file.flush()

        pc = dict()
        pc["1"] = {
            "module": "v.in.ascii",
            "inputs": {
                "input": point_file.name,
                "format": "point",
                "column": "id text, x double precision, y double precision",
                "x": 2,
                "y": 3
            },
            "outputs": {
                "output": {
                    "name": "input_points"
                }
            }
        }

        pc["2"] = {
            "module": "t.rast.sample",
            "inputs": {
                "strds": "%s@%s" % (strds_name, self.mapset_name),
                "points": "input_points",
                "column": "id"
            },
            "outputs": {
                "output": {
                    "name": result_file.name
                }
            },
            "flags": "rn",
            "overwrite": True,
            "verbose": True
        }

        if where is not None:
            pc["2"]["inputs"]["where"] = where

        self.request_data = pc

        # Run the process chain
        EphemeralProcessing._execute(self, skip_permission_check=True)

        result = open(result_file.name, "r").readlines()

        output_list = []
        for line in result:
            output_list.append(line.strip().split("|"))

        self.module_results = output_list

        point_file.close()
        result_file.close()
Beispiel #28
0
    def _execute(self):
        """Create a specific vector layer

        This approach is complex, since the vector generation is performed in a local
        temporary mapset that is later merged into the target mapset. Workflow:

        1. Check the process chain
        2. Lock the temp and target mapsets
        3. Setup GRASS and create the temporary mapset
        4. Execute g.list of the first process chain to check if the target
           vector exists
        5. If the target vector does not exist then run v.import
        6. Copy the local temporary mapset to the storage and merge it into the
           target mapset
        """
        self._setup()

        vector_name = self.map_name
        self.required_mapsets.append(self.target_mapset_name)

        pc_1 = {}
        pc_1["1"] = {
            "module": "g.list",
            "inputs": {
                "type": "vector",
                "pattern": vector_name,
                "mapset": self.target_mapset_name
            }
        }
        # Check the first process chain
        self.skip_region_check = True
        pc_1 = self._validate_process_chain(skip_permission_check=True,
                                            process_chain=pc_1)

        pc_2 = {}
        pc_2["1"] = {
            "module": "v.import",
            "inputs": {
                "input": self.rdc.request_data
            },
            "outputs": {
                "output": {
                    "name": vector_name
                }
            }
        }
        # Check the second process chain
        self.skip_region_check = True
        pc_2 = self._validate_process_chain(skip_permission_check=True,
                                            process_chain=pc_2)

        self._check_lock_target_mapset()
        self._lock_temp_mapset()
        self._create_temporary_grass_environment(
            source_mapset_name=self.target_mapset_name)
        self._execute_process_list(pc_1)

        # check if vector exists
        raster_list = self.module_output_log[0]["stdout"].split("\n")

        if len(raster_list[0]) > 0:
            raise AsyncProcessError("Vector layer <%s> exists." % vector_name)

        self._execute_process_list(pc_2)
        self._copy_merge_tmp_mapset_to_target_mapset()

        # Delete imported file
        msg = ""
        try:
            if self.rdc.request_data.endswith('.shp'):
                rmtree(os.path.dirname(self.rdc.request_data),
                       ignore_errors=True)
            else:
                os.remove(self.rdc.request_data)
        except Exception:
            msg = " WARNING: Uploaded file cannot be removed."

        self.finish_message = (f"Vector layer <{vector_name}> successfully "
                               f"imported.{msg}")
    def _copy_merge_tmp_mapset_to_target_mapset(self):
        """Copy the temporary mapset into the original location

        In case the mapset does not exists, then use the target mapset name,
        otherwise use the temporary mapset name for copying which is later on
        merged into the target mapset and then removed
        """

        # Extent the mapset lock for an hour, since copying can take long
        if self.target_mapset_lock_set is True:
            ret = self.lock_interface.extend(resource_id=self.target_mapset_lock_id,
                                             expiration=3600)
            if ret == 0:
                raise AsyncProcessError("Unable to extend lock for mapset "
                                        "<%s>" % self.target_mapset_name)

        if self.temp_mapset_lock_set is True:
            ret = self.lock_interface.extend(resource_id=self.temp_mapset_lock_id,
                                             expiration=3600)
            if ret == 0:
                raise AsyncProcessError("Unable to extend lock for "
                                        "temporary mapset <%s>" % self.temp_mapset_name)

        self.message_logger.info(
            "Copy temporary mapset from %s to %s" % (
                self.temp_mapset_path, os.path.join(
                    self.user_location_path, self.target_mapset_name)))

        source_path = self.temp_mapset_path

        # In case the mapset does not exists, then use the target mapset name,
        # otherwise use the temporary mapset name for copying which is later
        # on merged into the target mapset and then removed
        if self.target_mapset_exists is True:
            target_path = self.user_location_path + "/."
            message = "Copy temporary mapset <%s> to target location " \
                      "<%s>" % (self.temp_mapset_name, self.location_name)
        else:
            target_path = os.path.join(self.user_location_path, self.target_mapset_name)
            message = "Copy temporary mapset <%s> to target location " \
                      "<%s>" % (self.target_mapset_name, self.location_name)

        self._send_resource_update(message)

        try:
            stdout = subprocess.PIPE
            stderr = subprocess.PIPE
            p = subprocess.Popen(["/bin/cp", "-fr",
                                  "%s" % source_path,
                                  "%s" % target_path],
                                 stdout=stdout,
                                 stderr=stderr)
            (stdout_buff, stderr_buff) = p.communicate()
            if p.returncode != 0:
                raise AsyncProcessError(
                    "Unable to copy temporary mapset to "
                    "original location. Copy error "
                    "stdout: %s stderr: %s returncode: %i" % (stdout_buff,
                                                              stderr_buff,
                                                              p.returncode))
        except Exception as e:
            raise AsyncProcessError("Unable to copy temporary mapset to "
                                    "original location. Exception %s" % str(e))

        # Merge the temp mapset into the target mapset in case the target already exists
        if self.target_mapset_exists is True:
            self._merge_mapset_into_target(
                self.temp_mapset_name, self.target_mapset_name)
            shutil.rmtree(os.path.join(self.user_location_path, self.temp_mapset_name))
            # remove interim results
            if self.interim_result.saving_interim_results is True:
                interim_dir = os.path.join(
                    self.interim_result.user_resource_interim_storage_path,
                    self.resource_id)
                self.message_logger.info(
                    "Remove interim results %s" % interim_dir)
                if os.path.isdir(interim_dir):
                    shutil.rmtree(interim_dir)
    def _check_mapset(self, mapset):
        """Check if the target mapset exists

        This method will check if the target mapset exists in the global and
        user group locations.
        If the mapset is in the global database, then an AsyncProcessError
        will be raised, since global location/mapsets can not be modified.

        This method sets in case of success:

            self.target_mapset_lock_set = True
            self.tmp_mapset_lock_set = True

        Raises:
            AsyncProcessError

        """
        mapset_exists = False

        # Check if the global location is accessible and that the target mapset
        # does not exist
        if self.is_global_database is True:
            # Break if the target mapset exists in the global database
            if os.path.exists(self.global_location_path) and \
                    os.path.isdir(self.global_location_path) and \
                    os.access(
                        self.global_location_path,
                        os.R_OK | os.X_OK | os.W_OK) is True:
                self.orig_mapset_path = os.path.join(self.global_location_path, mapset)

                if os.path.exists(self.orig_mapset_path) is True:
                    if os.access(
                            self.orig_mapset_path, os.R_OK | os.X_OK | os.W_OK) is True:
                        raise AsyncProcessError(
                            "Mapset <%s> exists in the global "
                            "dataset and can not be modified." % mapset)
            else:
                raise AsyncProcessError(
                    "Unable to access global location <%s>" % self.location_name)

        # Always check if the target mapset already exists and set the flag accordingly
        if (os.path.exists(self.user_location_path)
                and os.path.isdir(self.user_location_path)
                and os.access(
                    self.user_location_path, os.R_OK | os.X_OK | os.W_OK) is True):

            self.orig_mapset_path = os.path.join(self.user_location_path, mapset)

            if os.path.exists(self.orig_mapset_path) is True:
                if os.access(
                        self.orig_mapset_path, os.R_OK | os.X_OK | os.W_OK) is True:
                    mapset_exists = True
                    # Add the existing mapset to the required ones for mapset
                    # search path settings
                    self.required_mapsets.append(mapset)
                else:
                    raise AsyncProcessError("Unable to access mapset <%s> "
                                            "path %s" % (mapset,
                                                         self.orig_mapset_path))
            else:
                mapset_exists = False
        else:
            raise AsyncProcessError(
                "Unable to access user location <%s>" % self.location_name)

        return mapset_exists