def _execute(self, location_name, mapset_name, strds_name, timestamp):
        """Prepare and enqueue the raster area statistics

        Raises:
            InvalidUsage: In case the timestamp is wrong or the XML content is missing

        """
        # Check the time stamp
        try:
            datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S")
        except ValueError as e:
            msg = "Wrong timestamp format. Required format is: " \
                  "YYYY-MM-DDTHH:MM:SS for example 2001-03-16T12:30:15"
            self.create_error_response(message=msg)
            return False

        rdc = self.preprocess(has_json=True,
                              has_xml=False,
                              location_name=location_name,
                              mapset_name=mapset_name,
                              map_name=strds_name)
        if rdc:
            rdc.set_user_data(timestamp)
            enqueue_job(self.job_timeout, start_job, rdc)
            return True

        return False
예제 #2
0
def preprocess_build_pc_and_enqueue(self, preprocess_kwargs, start_job):
    """ This method looks up the lists of GRASS GIS and actinia modules to
    parse the incoming process chain. If an actinia-module is found, it is
    translated to a process chain via the stored template. The process chain is
    then passed to actinia-core.
    """

    # get grass and actinia module lists
    module_list = createModuleList(self)
    pc_list = createProcessChainTemplateList()
    grass_module_list = []
    actinia_module_list = []

    for module in module_list:
        grass_module_list.append(module['id'])

    for module in pc_list:
        actinia_module_list.append(module['id'])

    # run preprocess again after createModuleList
    rdc = self.preprocess(**preprocess_kwargs)

    if rdc:
        rdc.set_storage_model_to_file()

        new_pc = []
        for module in rdc.request_data['list']:
            if "module" in module:
                name = module["module"]
                if name in ["importer", "exporter"]:
                    new_pc.append(module)
                elif name in grass_module_list:
                    new_pc.append(module)
                elif name in actinia_module_list:
                    module_pc = fillTemplateFromProcessChain(module)
                    if isinstance(module_pc, str):
                        # then return value is a missing attribute
                        msg = (
                            "Required parameter '%s' missing in actinia-module "
                            " '%s'." % (module_pc, name))
                        log_error_to_resource_logger(self, msg, rdc)
                        return
                    elif module_pc is None:
                        msg = "Invalid request for %s" % (name)
                        log_error_to_resource_logger(self, msg, rdc)
                        return
                    else:
                        new_pc.extend(module_pc)
                else:
                    msg = ("Module %s is not of type importer, exporter, "
                           "grass-module or an actinia-module." % name)
                    log_error_to_resource_logger(self, msg, rdc)
                    return
            else:
                new_pc.append(module)

        rdc.request_data['list'] = new_pc

        enqueue_job(self.job_timeout, start_job, rdc)
    def _execute(self, location_name, mapset_name, raster_name):

        rdc = self.preprocess(has_json=True, has_xml=False,
                              location_name=location_name,
                              mapset_name=mapset_name,
                              map_name=raster_name)
        if rdc:
            enqueue_job(self.job_timeout, start_job, rdc)

        return rdc
    def post(self, product_id):
        """NDVI computation of an arbitrary Sentinel 2A scene. The results are stored in the Google Cloud Storage.
        """
        rdc = self.preprocess(has_json=False, location_name="sentinel2")
        rdc.set_user_data(product_id)
        rdc.set_storage_model_to_gcs()

        enqueue_job(self.job_timeout, start_job, rdc)
        html_code, response_model = pickle.loads(self.response_data)
        return make_response(jsonify(response_model), html_code)
예제 #5
0
    def post(self, location_name):
        """Execute a user defined process chain in an ephemeral location/mapset and store the processing results
        for download.
        """
        # get grass and actinia module lists
        module_list = createModuleList(self)
        pc_list = createProcessChainTemplateList()
        # TODO: find out size before ?
        grass_module_list = []
        actinia_module_list = []

        for module in module_list:
            grass_module_list.append(module['id'])

        for module in pc_list:
            actinia_module_list.append(module['id'])

        rdc = self.preprocess(has_json=True, location_name=location_name)

        if rdc:
            rdc.set_storage_model_to_file()

            new_pc = []
            for module in rdc.request_data['list']:
                if "module" in module:
                    name = module["module"]
                    if name == "importer" or name == "exporter":
                        new_pc.append(module)
                    elif name in grass_module_list:
                        new_pc.append(module)
                    elif name in actinia_module_list:
                        module_pc = fillTemplateFromProcessChain(module)
                        new_pc.extend(module_pc)
                    else:
                        msg = "Module %s is not of type importer, exporter, grass-module or an actinia-module." % name
                        return make_response(
                            jsonify(
                                SimpleResponseModel(status="error",
                                                    message=msg)), 409)
                else:
                    new_pc.append(module)

            rdc.request_data['list'] = new_pc

            enqueue_job(self.job_timeout, start_job, rdc)

        html_code, response_model = pickle.loads(self.response_data)
        return make_response(jsonify(response_model), html_code)
예제 #6
0
    def post(self, location_name, mapset_name):
        """Download and import Landsat scenes into a new mapset and create a space time dataset for each imported band.

        Args:
            location_name (str): The name of the location
            target_mapset_name (str): The name of the mapset that should be created

        Process arguments must be provided as JSON document in the POST request::

              {"strds":"Landsat_4_1983_09_01_01_30_00",
               "atcor_method": "TOAR",
               "scene_ids":["LM41130251983244HAJ00",
                            "LM41130271983244FFF03",
                            "LM41130261983244FFF03",
                            "LM41130241983244HAJ00"]}
        Returns:
            flask.Response:
            The HTTP status and a JSON document that includes the
            status URL of the task that must be polled for updates.

        Example::

            {
              "HTTP code": 200,
              "Messages": "Resource accepted",
              "Resource id": "resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31",
              "Status": "accepted",
              "URLs": {
                "Resources": [],
                "Status": "http://104.155.60.87/status/soeren/resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31"
              },
              "User id": "soeren"
            }


        """
        # Preprocess the post call
        rdc = self.preprocess(has_json=True,
                              location_name=location_name,
                              mapset_name=mapset_name)

        # RedisQueue approach
        enqueue_job(self.job_timeout, start_job, rdc)

        html_code, response_model = pickle.loads(self.response_data)
        return make_response(jsonify(response_model), html_code)
    def post(self, location_name, mapset_name):
        """Download and import Sentinel2A scenes into a new mapset and create a space-time raster dataset for each imported band.

        Args:
            location_name (str): The name of the location
            target_mapset_name (str): The name of the mapset that should be created

        Process arguments must be provided as JSON document in the POST request::

              {"bands":["B04","B08"],
               "strds":["Sentinel_B04", "Sentinel_b08"],
               "product_ids":["S2A_MSIL1C_20170212T104141_N0204_R008_T31TGJ_20170212T104138",
                              "S2A_MSIL1C_20170227T095021_N0204_R079_T34TBM_20170227T095613",
                              "S2A_MSIL1C_20170202T104241_N0204_R008_T32UNE_20170202T104236"]}

        Returns:
            flask.Response:
            The HTTP status and a JSON document that includes the
            status URL of the task that must be polled for updates.

        Example::

            {
              "HTTP code": 200,
              "Messages": "Resource accepted",
              "Resource id": "resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31",
              "Status": "accepted",
              "URLs": {
                "Resources": [],
                "Status": "http://104.155.60.87/status/soeren/resource_id-985164c9-1db9-49cf-b2c4-3e8e48500e31"
              },
              "User id": "soeren"
            }


        """
        # Preprocess the post call
        rdc = self.preprocess(has_json=True,
                              location_name=location_name,
                              mapset_name=mapset_name)

        # RedisQueue approach
        enqueue_job(self.job_timeout, start_job, rdc)

        html_code, response_model = pickle.loads(self.response_data)
        return make_response(jsonify(response_model), html_code)
예제 #8
0
    def post(self, landsat_id, atcor_method, processing_method):
        """Vegetation index computation from an atmospherically corrected Landsat scene.

        This method will download a single Landsat scene with all bands,
        create a temporary GRASS location and imports the data into it. Then it will
        apply a TOAR or DOS4/1 atmospheric correction, depending on the users choice.
        The imported scenes are then processed via i.vi. The result is analyzed with r.univar
        and rendered via d.rast and d.legend. The preview image and the resulting ndvi raster map
        are stored in the download location.
        As download location are available:
            - local node storage
            - NFS/GlusterFS storage
            - Amazaon S3 storage
            - Google Cloud Storage

        """
        supported_sensors = ["LT04", "LT05", "LE07", "LC08"]
        supported_atcor = ["TOAR", "DOS1", "DOS4"]
        supported_methods = ["NDVI", "ARVI", "DVI", "EVI", "EVI2", "GVI", "GARI",
                             "GEMI", "IPVI", "PVI", "SR", "VARI", "WDVI"]
        sensor_id = extract_sensor_id_from_scene_id(landsat_id)
        if sensor_id not in supported_sensors:
            return self.get_error_response(message="Wrong scene name. "
                                                   "Available sensors are: %s" % ",".join(supported_sensors))

        if atcor_method not in supported_atcor:
            return self.get_error_response(message="Wrong atmospheric correction name. "
                                                   "Available atmospheric corrections are: %s" % ",".join(
                supported_atcor))

        if processing_method not in supported_methods:
            return self.get_error_response(message="Wrong processing method name. "
                                                   "Available methods are: %s" % ",".join(supported_methods))

        # Preprocess the post call
        rdc = self.preprocess(has_json=False, location_name="Landsat")
        rdc.set_user_data((landsat_id, atcor_method, processing_method))
        # rdc.set_storage_model_to_gcs()

        # RedisQueue approach
        enqueue_job(self.job_timeout, start_job, rdc)
        # http_code, data = self.wait_until_finish(0.5)
        html_code, response_model = pickle.loads(self.response_data)
        return make_response(jsonify(response_model), html_code)