Ejemplo n.º 1
0
def setup_environment():
    global redis_pid
    # Set the port to the test redis server
    global_config.REDIS_SERVER_SERVER = "localhost"
    global_config.REDIS_SERVER_PORT = 7000
    # Set the path to redis WORKER_LOGFILE
    # global_config.WORKER_LOGFILE = "/var/log/redis/redis"

    # home = os.getenv("HOME")

    # GRASS GIS
    # Setup the test environment
    global_config.GRASS_GIS_BASE = "/usr/local/grass78/"
    global_config.GRASS_GIS_START_SCRIPT = "/usr/local/bin/grass78"
    # global_config.GRASS_DATABASE= "/usr/local/grass_test_db"
    # global_config.GRASS_DATABASE = "%s/actinia/grass_test_db" % home
    global_config.GRASS_TMP_DATABASE = "/tmp"

    if server_test is False and custom_actinia_cfg is False:
        # Start the redis server for user and logging management
        redis_pid = os.spawnl(os.P_NOWAIT, "/usr/bin/redis-server",
                              "common/redis.conf",
                              "--port %i" % global_config.REDIS_SERVER_PORT)
        time.sleep(1)

    if server_test is False and custom_actinia_cfg is not False:
        global_config.read(custom_actinia_cfg)
Ejemplo n.º 2
0
def setup_environment():

    # If docker config
    if custom_actinia_cfg is not False:
        global_config.read(custom_actinia_cfg)
        return

    global redis_pid
    # Set the port to the test redis server
    global_config.REDIS_SERVER_SERVER = "localhost"
    global_config.REDIS_SERVER_PORT = 7000

    # home = os.getenv("HOME")

    # GRASS

    # Setup the test environment
    global_config.GRASS_GIS_BASE = "/usr/local/grass78/"
    global_config.GRASS_GIS_START_SCRIPT = "/usr/local/bin/grass78"
    # global_config.GRASS_DATABASE= "/usr/local/grass_test_db"
    # global_config.GRASS_DATABASE = "%s/actinia/grass_test_db" % home

    # Start the redis server for user and logging management
    redis_pid = os.spawnl(os.P_NOWAIT, "/usr/bin/redis-server",
                          "common/redis.conf",
                          "--port %i" % global_config.REDIS_SERVER_PORT)
    time.sleep(1)
Ejemplo n.º 3
0
    def tearDownClass(cls):
        # reset config
        if cls.save_interim_results_value is False:
            cls.save_config(cls.tmp_cfg_file, cls.cfg_file, 'False')
            os.remove(cls.tmp_cfg_file)
        global_config.read(cls.cfg_file)

        super(JobResumptionProcessingTestCase, cls).tearDownClass()
Ejemplo n.º 4
0
    def setUpClass(cls):

        if custom_actinia_cfg is not False:
            global_config.read(custom_actinia_cfg)
            print(global_config)
        else:
            global_config.REDIS_SERVER_URL = "localhost"
            global_config.REDIS_SERVER_PORT = 7000

        args = (global_config.REDIS_SERVER_URL,
                global_config.REDIS_SERVER_PORT)
        if global_config.REDIS_SERVER_PW and global_config.REDIS_SERVER_PW is not None:
            args = (*args, global_config.REDIS_SERVER_PW)

        redis_interface.connect(*args)
Ejemplo n.º 5
0
 def __init__(self, user_id, resource_id, iteration):
     """Init method for InterimResult class
     Args:
         user_id (str): The unique user name/id
         resource_id (str): The id of the resource
         old_pc_step (int): The number of the successfully finished steps of
                            the process chain in the previous iteration
     """
     global_config.read(DEFAULT_CONFIG_PATH)
     self.logger = MessageLogger()
     self.user_resource_interim_storage_path = os.path.join(
         global_config.GRASS_RESOURCE_DIR, user_id, "interim")
     self.saving_interim_results = global_config.SAVE_INTERIM_RESULTS
     self.resource_id = resource_id
     self.iteration = iteration if iteration is not None else 1
     self.old_pc_step = None
Ejemplo n.º 6
0
Logging interface
"""

import logging
import logging.handlers
import sys
import platform
from datetime import datetime

from pythonjsonlogger import jsonlogger

from actinia_core.core.common.config import global_config

# unfortunately, the config is read twice because of the read call here but it
# is needed to load the correct interface and log level at this time
global_config.read()

if "colored" in [
        global_config.LOG_STDOUT_FORMAT, global_config.LOG_FILE_FORMAT
]:
    from colorlog import ColoredFormatter

if global_config.LOG_INTERFACE == "fluentd":
    from fluent import handler

__license__ = "GPLv3"
__author__ = "Sören Gebbert, Carmen Tawalika"
__copyright__ = "Copyright 2016-present, Sören Gebbert and mundialis GmbH & Co. KG"


class BasicLogger(object):
Ejemplo n.º 7
0
from actinia_core.core.common.app import flask_app
from actinia_core.core.common.config import global_config, DEFAULT_CONFIG_PATH
from actinia_core.core.common.redis_interface import connect, create_job_queues
from actinia_core.core.common.process_queue import create_process_queue

__license__ = "GPLv3"
__author__ = "Sören Gebbert"
__copyright__ = "Copyright 2016-2018, Sören Gebbert and mundialis GmbH & Co. KG"
__maintainer__ = "Sören Gebbert"
__email__ = "*****@*****.**"

# if os.environ.get('DEFAULT_CONFIG_PATH'):
#     DEFAULT_CONFIG_PATH = os.environ['DEFAULT_CONFIG_PATH']
if os.path.exists(DEFAULT_CONFIG_PATH) is True and os.path.isfile(
        DEFAULT_CONFIG_PATH):
    global_config.read(DEFAULT_CONFIG_PATH)

# Create the endpoints based on the global config
create_endpoints()
init_versions()

# TODO: Implement a better error handler
# @flask_app.errorhandler(InvalidUsage)
# def handle_invalid_usage(error):
#    response = error.to_json()
#    response.status_code = error.status_code
#    return response

# Connect the redis interfaces
redis_args = (global_config.REDIS_SERVER_URL, global_config.REDIS_SERVER_PORT)
if global_config.REDIS_SERVER_PW and global_config.REDIS_SERVER_PW is not None:
Ejemplo n.º 8
0
    def put(self, user_id, resource_id):
        """Updates/Resumes the status of a resource."""
        global_config.read(DEFAULT_CONFIG_PATH)
        if global_config.SAVE_INTERIM_RESULTS is not True:
            return make_response(
                jsonify(
                    SimpleResponseModel(
                        status="error",
                        message=
                        "Interim results are not set in the configureation")),
                404)

        ret = self.check_permissions(user_id=user_id)
        if ret:
            return ret

        # check if latest iteration is found
        old_iteration, response_data = self.resource_logger.get_latest_iteration(
            user_id, resource_id)
        old_iteration = 1 if old_iteration is None else old_iteration
        if response_data is None:
            return make_response(
                jsonify(
                    SimpleResponseModel(status="error",
                                        message="Resource does not exist")),
                400)

        # check if a new iteration is possible
        _, response_model = pickle.loads(response_data)
        err_msg = self._check_possibility_of_new_iteration(
            response_model, user_id, resource_id)
        if err_msg is not None:
            return make_response(
                jsonify(SimpleResponseModel(status="error", message=err_msg)),
                404)
        # get step of the process chain
        pc_step = response_model['progress']['step'] - 1
        for iter in range(old_iteration - 1, 0, -1):
            if iter == 1:
                old_response_data = self.resource_logger.get(
                    user_id, resource_id)
            else:
                old_response_data = self.resource_logger.get(
                    user_id, resource_id, iter)
            if old_response_data is None:
                return None
            _, old_response_model = pickle.loads(old_response_data)
            pc_step += old_response_model['progress']['step'] - 1

        # start new iteration
        iteration = old_iteration + 1

        # use post_url if iteration > 1
        if old_iteration and old_iteration == 1:
            post_url = response_model['api_info']['request_url']
        elif old_iteration and old_iteration > 1:
            post_url = response_model['api_info']['post_url']
        else:
            post_url = None

        rdc, processing_resource, start_job = \
            self._create_ResourceDataContainer_for_resumption(
                post_url, pc_step, user_id, resource_id, iteration)

        # enqueue job
        if rdc:
            enqueue_job(processing_resource.job_timeout, start_job, rdc)
        html_code, response_model = pickle.loads(
            processing_resource.response_data)
        return make_response(jsonify(response_model), html_code)
Ejemplo n.º 9
0
    def setUpClass(cls):

        if cls.server_test is False and cls.custom_actinia_cfg is False:
            global_config.REDIS_SERVER_SERVER = "localhost"
            global_config.REDIS_SERVER_PORT = 7000
            global_config.GRASS_RESOURCE_DIR = "/tmp"
            global_config.DOWNLOAD_CACHE = "/tmp/download_cache"
            global_config.REDIS_QUEUE_SERVER_URL = "localhost"
            global_config.REDIS_QUEUE_SERVER_PORT = 6379
            global_config.NUMBER_OF_WORKERS = 3
            # Create the job queue
            # redis_interface.create_job_queues(
            #     global_config.REDIS_QUEUE_SERVER_URL,
            #     global_config.REDIS_QUEUE_SERVER_PORT,
            #     global_config.NUMBER_OF_WORKERS)

        # If the custom_actinia_cfg variable is set, then the actinia config
        # file will be read to configure Redis queue
        if cls.server_test is False and cls.custom_actinia_cfg is not False:
            global_config.read(cls.custom_actinia_cfg)

            # Create the job queue
            # redis_interface.create_job_queues(global_config.REDIS_QUEUE_SERVER_URL,
            #                                  global_config.REDIS_QUEUE_SERVER_PORT,
            #                                  global_config.NUMBER_OF_WORKERS)

        # Start the redis interface
        redis_args = (global_config.REDIS_SERVER_URL,
                      global_config.REDIS_SERVER_PORT)
        if global_config.REDIS_SERVER_PW and global_config.REDIS_SERVER_PW is not None:
            redis_args = (*redis_args, global_config.REDIS_SERVER_PW)

        redis_interface.connect(*redis_args)

        # Process queue
        create_process_queue(config=global_config)

        # We create 4 user for all roles: guest, user, admin, root
        accessible_datasets = {
            "nc_spm_08":
            ["PERMANENT", "user1", "landsat", "modis_lst", "test_mapset"],
            "ECAD": ["PERMANENT"],
            "latlong_wgs84": ["PERMANENT"]
        }

        # Create users
        cls.guest_id, cls.guest_group, cls.guest_auth_header = cls.create_user(
            name="guest",
            role="guest",
            process_num_limit=3,
            process_time_limit=2,
            accessible_datasets=accessible_datasets)
        cls.user_id, cls.user_group, cls.user_auth_header = cls.create_user(
            name="user",
            role="user",
            process_num_limit=3,
            process_time_limit=4,
            accessible_datasets=accessible_datasets)
        cls.admin_id, cls.admin_group, cls.admin_auth_header = cls.create_user(
            name="admin",
            role="admin",
            accessible_datasets=accessible_datasets)
        cls.root_id, cls.root_group, cls.root_auth_header = cls.create_user(
            name="superadmin",
            role="superadmin",
            accessible_datasets=accessible_datasets)