Beispiel #1
0
 def test_create_host_pipe_dir_with_real_dir(self, temp_dir):
     runtime_paths = RunTimePaths('account', {'host_root': temp_dir})
     runtime_paths.create_host_pipe_dir()
     path = runtime_paths.host_pipe_dir
     self.assertTrue(os.path.exists(path))
     self.assertTrue(os.path.isdir(path))
     permission = oct(os.stat(path)[ST_MODE])[-3:]
     # TODO(kota_): make sure if this is really acceptable
     self.assertEqual('777', permission)
Beispiel #2
0
 def test_create_host_pipe_prefix_with_real_dir(self, temp_dir):
     runtime_paths = RunTimePaths('account', {'host_root': temp_dir})
     runtime_paths.create_host_pipe_prefix()
     path = runtime_paths.host_pipe_prefix()
     self.assertTrue(os.path.exists(path))
     self.assertTrue(os.path.isdir(path))
     permission = oct(os.stat(path)[ST_MODE])[-3:]
     # TODO(kota_): make sure if this is really acceptable
     self.assertEqual('777', permission)
Beispiel #3
0
 def __init__(self, conf, logger, scope):
     """
     :param conf: a dict for gateway conf
     :param logger: a logger instance
     :param scope: scope name to identify the container
     """
     super(StorletGatewayDocker, self).__init__(conf, logger, scope)
     self.storlet_timeout = int(self.conf.get('storlet_timeout', 40))
     self.paths = RunTimePaths(scope, conf)
Beispiel #4
0
    def _initialize(self):
        # TODO(takashi): take these values from config file
        base_dir = '/home/docker_device'
        self.script_dir = os.path.join(base_dir, 'scripts')
        self.pipes_dir = os.path.join(base_dir, 'pipes', 'scopes')
        self.storlets_dir = os.path.join(base_dir, 'storlets', 'scopes')
        self.log_dir = os.path.join(base_dir, 'logs', 'scopes')
        self.cache_dir = os.path.join(base_dir, 'cache', 'scopes')

        self.conf = {}
        self.storlet_id = 'org.openstack.storlet.mystorlet'
        self.paths = RunTimePaths(self.scope, self.conf)
Beispiel #5
0
 def __init__(self, conf, logger, scope):
     """
     :param conf: a dict for gateway conf
     :param logger: a logger instance
     :param scope: scope name to identify the container
     """
     super(StorletGatewayDocker, self).__init__(conf, logger, scope)
     self.storlet_timeout = int(self.conf.get('storlet_timeout', 40))
     self.paths = RunTimePaths(scope, conf)
Beispiel #6
0
    def _initialize(self):
        # TODO(takashi): take these values from config file
        base_dir = '/home/docker_device'
        self.script_dir = os.path.join(base_dir, 'scripts')
        self.pipes_dir = os.path.join(base_dir, 'pipes', 'scopes')
        self.storlets_dir = os.path.join(base_dir, 'storlets', 'scopes')
        self.log_dir = os.path.join(base_dir, 'logs', 'scopes')
        self.cache_dir = os.path.join(base_dir, 'cache', 'scopes')

        self.conf = {}
        self.storlet_id = 'org.openstack.storlet.mystorlet'
        self.paths = RunTimePaths(self.scope, self.conf)
Beispiel #7
0
    def test_runtime_paths_default(self):
        # CHECK: docs  says we need 4 dirs for communicate
        # ====================================================================
        # |1| host_factory_pipe_path    | <pipes_dir>/<scope>/factory_pipe   |
        # ====================================================================
        # |2| host_storlet_pipe_path    | <pipes_dir>/<scope>/<storlet_id>   |
        # ====================================================================
        # |3| sandbox_factory_pipe_path | /mnt/channels/factory_pipe         |
        # ====================================================================
        # |4| sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>         |
        # ====================================================================
        #
        # With this test,  the scope value is "account" and the storlet_id is
        # "Storlet-1.0.jar" (app name?)
        # ok, let's check for these values

        runtime_paths = RunTimePaths('account', {})
        storlet_id = 'Storlet-1.0.jar'

        # For pipe
        self.assertEqual('/home/docker_device/pipes/scopes/account',
                         runtime_paths.host_pipe_prefix())

        # 1. host_factory_pipe_path <pipes_dir>/<scope>/factory_pipe
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/factory_pipe',
            runtime_paths.host_factory_pipe())
        # 2. host_storlet_pipe_path <pipes_dir>/<scope>/<storlet_id>
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/Storlet-1.0.jar',
            runtime_paths.host_storlet_pipe(storlet_id))
        # 3. Yes, right now, we don't have the path for #3 in Python
        # 4. sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>
        self.assertEqual('/mnt/channels/Storlet-1.0.jar',
                         runtime_paths.sbox_storlet_pipe(storlet_id))

        # This looks like for jar load?
        self.assertEqual('/home/docker_device/storlets/scopes/account',
                         runtime_paths.host_storlet_prefix())
        self.assertEqual(
            '/home/docker_device/storlets/scopes/account/Storlet-1.0.jar',
            runtime_paths.host_storlet(storlet_id))
        # And this one is a mount poit in sand box?
        self.assertEqual('/home/swift/Storlet-1.0.jar',
                         runtime_paths.sbox_storlet_exec(storlet_id))
Beispiel #8
0
    def test_runtime_paths_default(self):
        # CHECK: docs  says we need 4 dirs for communicate
        # ====================================================================
        # |1| host_factory_pipe_path    | <pipes_dir>/<scope>/factory_pipe   |
        # ====================================================================
        # |2| host_storlet_pipe_path    | <pipes_dir>/<scope>/<storlet_id>   |
        # ====================================================================
        # |3| sandbox_factory_pipe_path | /mnt/channels/factory_pipe         |
        # ====================================================================
        # |4| sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>         |
        # ====================================================================
        #
        # With this test,  the scope value is "account" and the storlet_id is
        # "Storlet-1.0.jar" (app name?)
        # ok, let's check for these values

        runtime_paths = RunTimePaths('account', {})
        storlet_id = 'Storlet-1.0.jar'

        # For pipe
        self.assertEqual('/home/docker_device/pipes/scopes/account',
                         runtime_paths.host_pipe_prefix())

        # 1. host_factory_pipe_path <pipes_dir>/<scope>/factory_pipe
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/factory_pipe',
            runtime_paths.host_factory_pipe())
        # 2. host_storlet_pipe_path <pipes_dir>/<scope>/<storlet_id>
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/Storlet-1.0.jar',
            runtime_paths.host_storlet_pipe(storlet_id))
        # 3. Yes, right now, we don't have the path for #3 in Python
        # 4. sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>
        self.assertEqual('/mnt/channels/Storlet-1.0.jar',
                         runtime_paths.sbox_storlet_pipe(storlet_id))

        # This looks like for jar load?
        self.assertEqual('/home/docker_device/storlets/scopes/account',
                         runtime_paths.host_storlet_prefix())
        self.assertEqual(
            '/home/docker_device/storlets/scopes/account/Storlet-1.0.jar',
            runtime_paths.host_storlet(storlet_id))
        # And this one is a mount poit in sand box?
        self.assertEqual('/home/swift/Storlet-1.0.jar',
                         runtime_paths.sbox_storlet_exec(storlet_id))
Beispiel #9
0
class TestRuntimePaths(unittest.TestCase):

    def setUp(self):
        self.scope = '0123456789abc'
        self._initialize()

    def _initialize(self):
        # TODO(takashi): take these values from config file
        base_dir = '/home/docker_device'
        self.script_dir = os.path.join(base_dir, 'scripts')
        self.pipes_dir = os.path.join(base_dir, 'pipes', 'scopes')
        self.storlets_dir = os.path.join(base_dir, 'storlets', 'scopes')
        self.log_dir = os.path.join(base_dir, 'logs', 'scopes')
        self.cache_dir = os.path.join(base_dir, 'cache', 'scopes')

        self.conf = {}
        self.storlet_id = 'org.openstack.storlet.mystorlet'
        self.paths = RunTimePaths(self.scope, self.conf)

    def tearDown(self):
        pass

    def test_host_pipe_dir(self):
        self.assertEqual(
            os.path.join(self.pipes_dir, self.scope),
            self.paths.host_pipe_dir)

    def test_create_host_pipe_dir(self):
        pipedir = self.paths.host_pipe_dir

        # When the directory exists
        with mock.patch('os.path.exists', return_value=True), \
                mock.patch('os.makedirs') as m, \
                mock.patch('os.chmod') as c:
            self.assertEqual(os.path.join(self.pipes_dir, self.scope),
                             self.paths.create_host_pipe_dir())
            self.assertEqual(0, m.call_count)
            cargs, ckwargs = c.call_args
            # Make sure about the target directory
            self.assertEqual(cargs[0], pipedir)

        # When the directory does not exist
        with mock.patch('os.path.exists', return_value=False), \
                mock.patch('os.makedirs') as m, \
                mock.patch('os.chmod') as c:
            self.assertEqual(os.path.join(self.pipes_dir, self.scope),
                             self.paths.create_host_pipe_dir())
            self.assertEqual(1, m.call_count)
            # Make sure about the target directory
            margs, mkwargs = m.call_args
            self.assertEqual(margs[0], pipedir)
            cargs, ckwargs = c.call_args
            self.assertEqual(cargs[0], pipedir)

    def test_host_factory_pipe(self):
        self.assertEqual(
            self.paths.host_factory_pipe,
            os.path.join(self.pipes_dir, self.scope, 'factory_pipe'))

    def test_get_host_storlet_pipe(self):
        self.assertEqual(
            os.path.join(self.pipes_dir, self.scope, self.storlet_id),
            self.paths.get_host_storlet_pipe(self.storlet_id))

    def test_get_sbox_storlet_pipe(self):
        self.assertEqual(
            os.path.join('/mnt/channels', self.storlet_id),
            self.paths.get_sbox_storlet_pipe(self.storlet_id))

    def test_get_sbox_storlet_dir(self):
        self.assertEqual(
            os.path.join('/home/swift', self.storlet_id),
            self.paths.get_sbox_storlet_dir(self.storlet_id))

    def test_host_storlet_base_dir(self):
        self.assertEqual(
            self.paths.host_storlet_base_dir,
            os.path.join(self.storlets_dir, self.scope))

    def test_get_host_storlet_dir(self):
        self.assertEqual(
            os.path.join(self.storlets_dir, self.scope, self.storlet_id),
            self.paths.get_host_storlet_dir(self.storlet_id))

    def test_get_host_slog_path(self):
        self.assertEqual(
            os.path.join(self.log_dir, self.scope, self.storlet_id,
                         'storlet_invoke.log'),
            self.paths.get_host_slog_path(self.storlet_id))

    def test_host_storlet_cache_dir(self):
        self.assertEqual(
            os.path.join(self.cache_dir, self.scope, 'storlet'),
            self.paths.host_storlet_cache_dir)

    def test_host_dependency_cache_dir(self):
        self.assertEqual(
            os.path.join(self.cache_dir, self.scope, 'dependency'),
            self.paths.host_dependency_cache_dir)

    def test_runtime_paths_default(self):
        # CHECK: docs  says we need 4 dirs for communicate
        # ====================================================================
        # |1| host_factory_pipe_path    | <pipes_dir>/<scope>/factory_pipe   |
        # ====================================================================
        # |2| host_storlet_pipe_path    | <pipes_dir>/<scope>/<storlet_id>   |
        # ====================================================================
        # |3| sandbox_factory_pipe_path | /mnt/channels/factory_pipe         |
        # ====================================================================
        # |4| sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>         |
        # ====================================================================
        #
        # With this test,  the scope value is "account" and the storlet_id is
        # "Storlet-1.0.jar" (app name?)
        # ok, let's check for these values

        runtime_paths = RunTimePaths('account', {})
        storlet_id = 'Storlet-1.0.jar'

        # For pipe
        self.assertEqual('/home/docker_device/pipes/scopes/account',
                         runtime_paths.host_pipe_dir)

        # 1. host_factory_pipe_path <pipes_dir>/<scope>/factory_pipe
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/factory_pipe',
            runtime_paths.host_factory_pipe)
        # 2. host_storlet_pipe_path <pipes_dir>/<scope>/<storlet_id>
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/Storlet-1.0.jar',
            runtime_paths.get_host_storlet_pipe(storlet_id))
        # 3. Yes, right now, we don't have the path for #3 in Python
        # 4. sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>
        self.assertEqual('/mnt/channels/Storlet-1.0.jar',
                         runtime_paths.get_sbox_storlet_pipe(storlet_id))

        # This looks like for jar load?
        self.assertEqual('/home/docker_device/storlets/scopes/account',
                         runtime_paths.host_storlet_base_dir)
        self.assertEqual(
            '/home/docker_device/storlets/scopes/account/Storlet-1.0.jar',
            runtime_paths.get_host_storlet_dir(storlet_id))
        # And this one is a mount poit in sand box?
        self.assertEqual('/home/swift/Storlet-1.0.jar',
                         runtime_paths.get_sbox_storlet_dir(storlet_id))

    @with_tempdir
    def test_create_host_pipe_dir_with_real_dir(self, temp_dir):
        runtime_paths = RunTimePaths('account', {'host_root': temp_dir})
        runtime_paths.create_host_pipe_dir()
        path = runtime_paths.host_pipe_dir
        self.assertTrue(os.path.exists(path))
        self.assertTrue(os.path.isdir(path))
        permission = oct(os.stat(path)[ST_MODE])[-3:]
        # TODO(kota_): make sure if this is really acceptable
        self.assertEqual('777', permission)
Beispiel #10
0
class StorletGatewayDocker(StorletGatewayBase):

    request_class = DockerStorletRequest

    def __init__(self, conf, logger, scope):
        """
        :param conf: a dict for gateway conf
        :param logger: a logger instance
        :param scope: scope name to identify the container
        """
        super(StorletGatewayDocker, self).__init__(conf, logger, scope)
        self.storlet_timeout = int(self.conf.get('storlet_timeout', 40))
        self.paths = RunTimePaths(scope, conf)

    @classmethod
    def validate_storlet_registration(cls, params, name):
        """
        Validate required parameters for storlet file

        :param params: parameters related to the storlet file
        :param name: name of the storlet file
        :raises ValueError: if some of the required parameters are missing,
                            or some of the parameters are invalid
        """
        mandatory = [
            'Language', 'Interface-Version', 'Object-Metadata', 'Main'
        ]
        cls._check_mandatory_params(params, mandatory)

        if params['Language'].lower() == 'java':
            if '-' not in name or '.' not in name:
                raise ValueError('Storlet name is incorrect')
        elif params['Language'].lower() == 'python':
            try:
                version = int(float(params.get('Language-Version', 3)))
            except ValueError:
                raise ValueError('Language-Version is invalid')

            # TODO(takashi): Drop Py2 support
            if version not in [2, DEFAULT_PY2, 3, DEFAULT_PY3]:
                # TODO(kota_): more strict version check should be nice.
                raise ValueError('Not supported version specified')

            if name.endswith('.py'):
                cls_name = params['Main']
                if not cls_name.startswith(name[:-3] + '.'):
                    raise ValueError('Main class should be included in '
                                     'storlet file')

                if len(cls_name.split('.')) != 2:
                    raise ValueError('Submodule is currently not supported')
            # TODO(takashi): Add support for sdist tar.gz
            else:
                raise ValueError('Storlet name is incorrect')
        else:
            raise ValueError('Unsupported Language')

        dep = params.get('Dependency')
        if dep:
            deps = dep.split(',')
            if name in deps:
                raise ValueError('Using the same name for storlet and '
                                 'dependency is not allowed')
            if len(deps) != len(set(deps)):
                raise ValueError('Duplicated name in dependencies')

    @classmethod
    def validate_dependency_registration(cls, params, name):
        """
        Validate required parameters for dependency file

        :param params: parameters related to the dependency file
        :param name: name of the dependency file
        :raises ValueError: if some of the required parameters are missing,
                            or some of the parameters are invalid
        """
        mandatory = ['Dependency-Version']
        cls._check_mandatory_params(params, mandatory)

        perm = params.get('Dependency-Permissions')
        if perm is not None:
            try:
                perm_int = int(perm, 8)
            except ValueError:
                raise ValueError('Dependency permission is incorrect')
            if (perm_int & int('600', 8)) != int('600', 8):
                raise ValueError('The owner should have rw permission')

    @classmethod
    def _check_mandatory_params(cls, params, mandatory):
        """
        Ensure that we have all mandatory parameters in the given parameters

        :param params: file parameters
        :param mandatory: required parameters
        :raises ValueError: if some of the required parameters are missing
        """
        for md in mandatory:
            if md not in params:
                raise ValueError('Mandatory parameter is missing'
                                 ': {0}'.format(md))

    def invocation_flow(self, sreq, extra_sources=None):
        """
        Invoke the backend protocl with gateway

        :param sreq: StorletRequest instance
        :param extra_sources (WIP): A list of StorletRequest instance to gather
                                    as extra resoureces to feed to storlet
                                    container as data source
        :return: StorletResponse instance
        """
        run_time_sbox = RunTimeSandbox(self.scope, self.conf, self.logger)
        docker_updated = self.update_docker_container_from_cache(sreq)
        run_time_sbox.activate_storlet_daemon(sreq, docker_updated)
        self._add_system_params(sreq)

        slog_path = self.paths.get_host_slog_path(sreq.storlet_main)
        storlet_pipe_path = \
            self.paths.get_host_storlet_pipe(sreq.storlet_main)

        sprotocol = StorletInvocationProtocol(sreq,
                                              storlet_pipe_path,
                                              slog_path,
                                              self.storlet_timeout,
                                              self.logger,
                                              extra_sources=extra_sources)

        sresp = sprotocol.communicate()

        self._upload_storlet_logs(slog_path, sreq)

        return sresp

    def _add_system_params(self, sreq):
        """
        Adds Storlet engine specific parameters to the invocation

        currently, this consists only of the execution path of the
        Storlet within the Docker container.

        :params params: Request parameters
        """
        sreq.params['storlet_execution_path'] = self. \
            paths.get_sbox_storlet_dir(sreq.storlet_main)

    def _upload_storlet_logs(self, slog_path, sreq):
        """
        Upload storlet execution log as a swift object

        :param slog_path: target path
        :params sreq: DockerStorletRequest instance
        """
        if sreq.generate_log:
            with open(slog_path, 'rb') as logfile:
                storlet_name = sreq.storlet_id.split('-')[0]
                log_obj_name = '%s.log' % storlet_name
                sreq.file_manager.put_log(log_obj_name, logfile)

    def bring_from_cache(self, obj_name, sreq, is_storlet):
        """
        Auxiliary function that:

        (1) Brings from Swift obj_name, either this is in a
            storlet or a storlet dependency.
        (2) Copies from local cache into the Docker conrainer
        If this is a Storlet then also validates that the cache is updated
        with most recent copy of the Storlet compared to the copy residing in
        Swift.

        :params obj_name: name of the object
        :params sreq: DockerStorletRequest instance
        :params is_storlet: True if the object is a storlet object
                            False if the object is a dependency object
        :returns: Wheather the Docker container was updated with obj_name
        """
        # Determine the cache we are to work with
        # e.g. dependency or storlet
        if is_storlet:
            cache_dir = self.paths.host_storlet_cache_dir
            get_func = sreq.file_manager.get_storlet
        else:
            cache_dir = self.paths.host_dependency_cache_dir
            get_func = sreq.file_manager.get_dependency

        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir, 0o755)

        # cache_target_path is the actual object we need to deal with
        # e.g. a concrete storlet or dependency we need to bring/update
        cache_target_path = os.path.join(cache_dir, obj_name)

        # Determine if we need to update the cache for cache_target_path
        # We default for no
        update_cache = False

        # If it does not exist in cache, we obviously need to bring
        if not os.path.isfile(cache_target_path):
            update_cache = True
        elif is_storlet:
            # The cache_target_path exists, we test if it is up-to-date
            # with the metadata we got.
            # We mention that this is currenlty applicable for storlets
            # only, and not for dependencies.
            # This will change when we will head dependencies as well
            fstat = os.stat(cache_target_path)
            storlet_or_size = int(
                sreq.options['storlet_content_length'].rstrip("L"))
            storlet_or_time = float(sreq.options['storlet_x_timestamp'])
            b_storlet_size_changed = fstat.st_size != storlet_or_size
            b_storlet_file_updated = float(fstat.st_mtime) < storlet_or_time
            if b_storlet_size_changed or b_storlet_file_updated:
                update_cache = True

        if update_cache:
            # If the cache needs to be updated, then get on with it
            # bring the object from storge
            data_iter, perm = get_func(obj_name)

            # TODO(takashi): Do not directly write to target path
            with open(cache_target_path, 'wb') as fn:
                for data in data_iter:
                    fn.write(data)

            if not is_storlet:
                if not perm:
                    perm = '0600'
                os.chmod(cache_target_path, int(perm, 8))

        # The node's local cache is now updated.
        # We now verify if we need to update the
        # Docker container itself.
        # The Docker container needs to be updated if:
        # 1. The Docker container does not hold a copy of the object
        # 2. The Docker container holds an older version of the object
        update_docker = False
        docker_storlet_path = \
            self.paths.get_host_storlet_dir(sreq.storlet_main)
        docker_target_path = os.path.join(docker_storlet_path, obj_name)

        if not os.path.exists(docker_storlet_path):
            os.makedirs(docker_storlet_path, 0o755)
            update_docker = True
        elif not os.path.isfile(docker_target_path):
            update_docker = True
        else:
            fstat_cached_object = os.stat(cache_target_path)
            fstat_docker_object = os.stat(docker_target_path)
            b_size_changed = fstat_cached_object.st_size \
                != fstat_docker_object.st_size
            b_time_changed = float(fstat_cached_object.st_mtime) < \
                float(fstat_docker_object.st_mtime)
            if (b_size_changed or b_time_changed):
                update_docker = True

        if update_docker:
            # need to copy from cache to docker
            # copy2 also copies the permissions
            shutil.copy2(cache_target_path, docker_target_path)

        return update_docker

    def update_docker_container_from_cache(self, sreq):
        """
        Iterates over the storlet name and its dependencies appearing

        in the invocation data and make sure they are brought to the
        local cache, and from there to the Docker container.
        Uses the bring_from_cache auxiliary function.

        :params sreq: DockerStorletRequest instance
        :returns: True if the Docker container was updated
        """
        # where at the host side, reside the storlet containers
        storlet_path = self.paths.host_storlet_base_dir
        if not os.path.exists(storlet_path):
            os.makedirs(storlet_path, 0o755)

        # Iterate over storlet and dependencies, and make sure
        # they are updated within the Docker container.
        # return True if any of them wea actually
        # updated within the Docker container
        docker_updated = False

        updated = self.bring_from_cache(sreq.storlet_id, sreq, True)
        docker_updated = docker_updated or updated

        for dep in sreq.dependencies:
            updated = self.bring_from_cache(dep, sreq, False)
            docker_updated = docker_updated or updated

        return docker_updated
Beispiel #11
0
class StorletGatewayDocker(StorletGatewayBase):

    request_class = DockerStorletRequest

    def __init__(self, conf, logger, scope):
        """
        :param conf: a dict for gateway conf
        :param logger: a logger instance
        :param scope: scope name to identify the container
        """
        super(StorletGatewayDocker, self).__init__(conf, logger, scope)
        self.storlet_timeout = int(self.conf.get('storlet_timeout', 40))
        self.paths = RunTimePaths(scope, conf)

    @classmethod
    def validate_storlet_registration(cls, params, name):
        """
        Validate required parameters for storlet file

        :param params: parameters related to the storlet file
        :param name: name of the storlet file
        :raises ValueError: if some of the required parameters are missing,
                            or some of the parameters are invalid
        """
        mandatory = ['Language', 'Interface-Version', 'Object-Metadata',
                     'Main']
        cls._check_mandatory_params(params, mandatory)

        if params['Language'].lower() == 'java':
            if '-' not in name or '.' not in name:
                raise ValueError('Storlet name is incorrect')
        elif params['Language'].lower() == 'python':
            if name.endswith('.py'):
                cls_name = params['Main']
                if not cls_name.startswith(name[:-3] + '.'):
                    raise ValueError('Main class should be included in '
                                     'storlet file')

                if len(cls_name.split('.')) != 2:
                    raise ValueError('Submodule is currently not supported')
            # TODO(takashi): Add support for sdist tar.gz
            else:
                raise ValueError('Storlet name is incorrect')
        else:
            raise ValueError('Unsupported Language')

        dep = params.get('Dependency')
        if dep:
            deps = dep.split(',')
            if name in deps:
                raise ValueError('Using the same name for storlet and '
                                 'dependency is not allowed')
            if len(deps) != len(set(deps)):
                raise ValueError('Duplicated name in dependencies')

    @classmethod
    def validate_dependency_registration(cls, params, name):
        """
        Validate required parameters for dependency file

        :param params: parameters related to the dependency file
        :param name: name of the dependency file
        :raises ValueError: if some of the required parameters are missing,
                            or some of the parameters are invalid
        """
        mandatory = ['Dependency-Version']
        cls._check_mandatory_params(params, mandatory)

        perm = params.get('Dependency-Permissions')
        if perm is not None:
            try:
                perm_int = int(perm, 8)
            except ValueError:
                raise ValueError('Dependency permission is incorrect')
            if (perm_int & int('600', 8)) != int('600', 8):
                raise ValueError('The owner should have rw permission')

    @classmethod
    def _check_mandatory_params(cls, params, mandatory):
        """
        Ensure that we have all mandatory parameters in the given parameters

        :param params: file parameters
        :param mandatory: required parameters
        :raises ValueError: if some of the required parameters are missing
        """
        for md in mandatory:
            if md not in params:
                raise ValueError('Mandatory parameter is missing'
                                 ': {0}'.format(md))

    def invocation_flow(self, sreq, extra_sources=None):
        """
        Invoke the backend protocl with gateway

        :param sreq: StorletRequest instance
        :param extra_sources (WIP): A list of StorletRequest instance to gather
                                    as extra resoureces to feed to storlet
                                    container as data source
        :return: StorletResponse instance
        """
        run_time_sbox = RunTimeSandbox(self.scope, self.conf, self.logger)
        docker_updated = self.update_docker_container_from_cache(sreq)
        run_time_sbox.activate_storlet_daemon(sreq, docker_updated)
        self._add_system_params(sreq)

        slog_path = self.paths.slog_path(sreq.storlet_main)
        storlet_pipe_path = self.paths.host_storlet_pipe(sreq.storlet_main)

        sprotocol = StorletInvocationProtocol(sreq,
                                              storlet_pipe_path,
                                              slog_path,
                                              self.storlet_timeout,
                                              self.logger,
                                              extra_sources=extra_sources)

        sresp = sprotocol.communicate()

        self._upload_storlet_logs(slog_path, sreq)

        return sresp

    def _add_system_params(self, sreq):
        """
        Adds Storlet engine specific parameters to the invocation

        currently, this consists only of the execution path of the
        Storlet within the Docker container.

        :params params: Request parameters
        """
        sreq.params['storlet_execution_path'] = self. \
            paths.sbox_storlet_exec(sreq.storlet_main)

    def _upload_storlet_logs(self, slog_path, sreq):
        """
        Upload storlet execution log as a swift object

        :param slog_path: target path
        :params sreq: DockerStorletRequest instance
        """
        if sreq.generate_log:
            with open(slog_path, 'r') as logfile:
                storlet_name = sreq.storlet_id.split('-')[0]
                log_obj_name = '%s.log' % storlet_name
                sreq.file_manager.put_log(log_obj_name, logfile)

    def bring_from_cache(self, obj_name, sreq, is_storlet):
        """
        Auxiliary function that:

        (1) Brings from Swift obj_name, either this is in a
            storlet or a storlet dependency.
        (2) Copies from local cache into the Docker conrainer
        If this is a Storlet then also validates that the cache is updated
        with most recent copy of the Storlet compared to the copy residing in
        Swift.

        :params obj_name: name of the object
        :params sreq: DockerStorletRequest instance
        :params is_storlet: True if the object is a storlet object
                            False if the object is a dependency object
        :returns: Wheather the Docker container was updated with obj_name
        """
        # Determine the cache we are to work with
        # e.g. dependency or storlet
        if is_storlet:
            cache_dir = self.paths.get_host_storlet_cache_dir()
            get_func = sreq.file_manager.get_storlet
        else:
            cache_dir = self.paths.get_host_dependency_cache_dir()
            get_func = sreq.file_manager.get_dependency

        if not os.path.exists(cache_dir):
            os.makedirs(cache_dir, 0o755)

        # cache_target_path is the actual object we need to deal with
        # e.g. a concrete storlet or dependency we need to bring/update
        cache_target_path = os.path.join(cache_dir, obj_name)

        # Determine if we need to update the cache for cache_target_path
        # We default for no
        update_cache = False

        # If it does not exist in cache, we obviously need to bring
        if not os.path.isfile(cache_target_path):
            update_cache = True
        elif is_storlet:
            # The cache_target_path exists, we test if it is up-to-date
            # with the metadata we got.
            # We mention that this is currenlty applicable for storlets
            # only, and not for dependencies.
            # This will change when we will head dependencies as well
            fstat = os.stat(cache_target_path)
            storlet_or_size = long(sreq.options['storlet_content_length'])
            storlet_or_time = float(sreq.options['storlet_x_timestamp'])
            b_storlet_size_changed = fstat.st_size != storlet_or_size
            b_storlet_file_updated = float(fstat.st_mtime) < storlet_or_time
            if b_storlet_size_changed or b_storlet_file_updated:
                update_cache = True

        if update_cache:
            # If the cache needs to be updated, then get on with it
            # bring the object from storge
            data_iter, perm = get_func(obj_name)

            # TODO(takashi): Do not directly write to target path
            with open(cache_target_path, 'w') as fn:
                for data in data_iter:
                    fn.write(data)

            if not is_storlet:
                if not perm:
                    perm = '0600'
                os.chmod(cache_target_path, int(perm, 8))

        # The node's local cache is now updated.
        # We now verify if we need to update the
        # Docker container itself.
        # The Docker container needs to be updated if:
        # 1. The Docker container does not hold a copy of the object
        # 2. The Docker container holds an older version of the object
        update_docker = False
        docker_storlet_path = self.paths.host_storlet(sreq.storlet_main)
        docker_target_path = os.path.join(docker_storlet_path, obj_name)

        if not os.path.exists(docker_storlet_path):
            os.makedirs(docker_storlet_path, 0o755)
            update_docker = True
        elif not os.path.isfile(docker_target_path):
            update_docker = True
        else:
            fstat_cached_object = os.stat(cache_target_path)
            fstat_docker_object = os.stat(docker_target_path)
            b_size_changed = fstat_cached_object.st_size \
                != fstat_docker_object.st_size
            b_time_changed = float(fstat_cached_object.st_mtime) < \
                float(fstat_docker_object.st_mtime)
            if (b_size_changed or b_time_changed):
                update_docker = True

        if update_docker:
            # need to copy from cache to docker
            # copy2 also copies the permissions
            shutil.copy2(cache_target_path, docker_target_path)

        return update_docker

    def update_docker_container_from_cache(self, sreq):
        """
        Iterates over the storlet name and its dependencies appearing

        in the invocation data and make sure they are brought to the
        local cache, and from there to the Docker container.
        Uses the bring_from_cache auxiliary function.

        :params sreq: DockerStorletRequest instance
        :returns: True if the Docker container was updated
        """
        # where at the host side, reside the storlet containers
        storlet_path = self.paths.host_storlet_prefix()
        if not os.path.exists(storlet_path):
            os.makedirs(storlet_path, 0o755)

        # Iterate over storlet and dependencies, and make sure
        # they are updated within the Docker container.
        # return True if any of them wea actually
        # updated within the Docker container
        docker_updated = False

        updated = self.bring_from_cache(sreq.storlet_id, sreq, True)
        docker_updated = docker_updated or updated

        for dep in sreq.dependencies:
            updated = self.bring_from_cache(dep, sreq, False)
            docker_updated = docker_updated or updated

        return docker_updated
Beispiel #12
0
class TestRuntimePaths(unittest.TestCase):

    def setUp(self):
        self.scope = '0123456789abc'
        self._initialize()

    def _initialize(self):
        # TODO(takashi): take these values from config file
        base_dir = '/home/docker_device'
        self.script_dir = os.path.join(base_dir, 'scripts')
        self.pipes_dir = os.path.join(base_dir, 'pipes', 'scopes')
        self.storlets_dir = os.path.join(base_dir, 'storlets', 'scopes')
        self.log_dir = os.path.join(base_dir, 'logs', 'scopes')
        self.cache_dir = os.path.join(base_dir, 'cache', 'scopes')

        self.conf = {}
        self.storlet_id = 'org.openstack.storlet.mystorlet'
        self.paths = RunTimePaths(self.scope, self.conf)

    def tearDown(self):
        pass

    def test_host_pipe_prefix(self):
        self.assertEqual(
            self.paths.host_pipe_prefix(),
            os.path.join(self.pipes_dir, self.scope))

    def test_create_host_pipe_prefix(self):
        pipedir = self.paths.host_pipe_prefix()

        # When the directory exists
        with mock.patch('os.path.exists', return_value=True), \
                mock.patch('os.makedirs') as m, \
                mock.patch('os.chmod') as c:
            self.paths.create_host_pipe_prefix()
            self.assertEqual(m.call_count, 0)
            cargs, ckwargs = c.call_args
            # Make sure about the target directory
            self.assertEqual(cargs[0], pipedir)

        # When the directory does not exist
        with mock.patch('os.path.exists', return_value=False), \
                mock.patch('os.makedirs') as m, \
                mock.patch('os.chmod') as c:
            self.paths.create_host_pipe_prefix(),
            self.assertEqual(m.call_count, 1)
            # Make sure about the target directory
            margs, mkwargs = m.call_args
            self.assertEqual(margs[0], pipedir)
            cargs, ckwargs = c.call_args
            self.assertEqual(cargs[0], pipedir)

    def test_host_factory_pipe(self):
        self.assertEqual(
            self.paths.host_factory_pipe(),
            os.path.join(self.pipes_dir, self.scope, 'factory_pipe'))

    def test_host_storlet_pipe(self):
        self.assertEqual(
            self.paths.host_storlet_pipe(self.storlet_id),
            os.path.join(self.pipes_dir, self.scope, self.storlet_id))

    def test_sbox_storlet_pipe(self):
        self.assertEqual(
            self.paths.sbox_storlet_pipe(self.storlet_id),
            os.path.join('/mnt/channels', self.storlet_id))

    def test_sbox_storlet_exec(self):
        self.assertEqual(
            self.paths.sbox_storlet_exec(self.storlet_id),
            os.path.join('/home/swift', self.storlet_id))

    def test_host_storlet_prefix(self):
        self.assertEqual(
            self.paths.host_storlet_prefix(),
            os.path.join(self.storlets_dir, self.scope))

    def test_host_storlet(self):
        self.assertEqual(
            self.paths.host_storlet(self.storlet_id),
            os.path.join(self.storlets_dir, self.scope,
                         self.storlet_id))

    def test_slog_path(self):
        with mock.patch('os.path.exists', return_value=True), \
            mock.patch('os.makedirs') as m:
            self.assertEqual(
                self.paths.slog_path(self.storlet_id),
                os.path.join(self.log_dir, self.scope,
                             self.storlet_id))
            self.assertEqual(m.call_count, 0)

        with mock.patch('os.path.exists', return_value=False), \
            mock.patch('os.makedirs') as m:
            self.assertEqual(
                self.paths.slog_path(self.storlet_id),
                os.path.join(self.log_dir, self.scope,
                             self.storlet_id))
            self.assertEqual(m.call_count, 1)

    def test_get_host_storlet_cache_dir(self):
        self.assertEqual(
            self.paths.get_host_storlet_cache_dir(),
            os.path.join(self.cache_dir, self.scope, 'storlet'))

    def test_get_host_dependency_cache_dir(self):
        self.assertEqual(
            self.paths.get_host_dependency_cache_dir(),
            os.path.join(self.cache_dir, self.scope, 'dependency'))

    def test_runtime_paths_default(self):
        # CHECK: docs  says we need 4 dirs for communicate
        # ====================================================================
        # |1| host_factory_pipe_path    | <pipes_dir>/<scope>/factory_pipe   |
        # ====================================================================
        # |2| host_storlet_pipe_path    | <pipes_dir>/<scope>/<storlet_id>   |
        # ====================================================================
        # |3| sandbox_factory_pipe_path | /mnt/channels/factory_pipe         |
        # ====================================================================
        # |4| sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>         |
        # ====================================================================
        #
        # With this test,  the scope value is "account" and the storlet_id is
        # "Storlet-1.0.jar" (app name?)
        # ok, let's check for these values

        runtime_paths = RunTimePaths('account', {})
        storlet_id = 'Storlet-1.0.jar'

        # For pipe
        self.assertEqual('/home/docker_device/pipes/scopes/account',
                         runtime_paths.host_pipe_prefix())

        # 1. host_factory_pipe_path <pipes_dir>/<scope>/factory_pipe
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/factory_pipe',
            runtime_paths.host_factory_pipe())
        # 2. host_storlet_pipe_path <pipes_dir>/<scope>/<storlet_id>
        self.assertEqual(
            '/home/docker_device/pipes/scopes/account/Storlet-1.0.jar',
            runtime_paths.host_storlet_pipe(storlet_id))
        # 3. Yes, right now, we don't have the path for #3 in Python
        # 4. sandbox_storlet_pipe_path | /mnt/channels/<storlet_id>
        self.assertEqual('/mnt/channels/Storlet-1.0.jar',
                         runtime_paths.sbox_storlet_pipe(storlet_id))

        # This looks like for jar load?
        self.assertEqual('/home/docker_device/storlets/scopes/account',
                         runtime_paths.host_storlet_prefix())
        self.assertEqual(
            '/home/docker_device/storlets/scopes/account/Storlet-1.0.jar',
            runtime_paths.host_storlet(storlet_id))
        # And this one is a mount poit in sand box?
        self.assertEqual('/home/swift/Storlet-1.0.jar',
                         runtime_paths.sbox_storlet_exec(storlet_id))

    @with_tempdir
    def test_create_host_pipe_prefix_with_real_dir(self, temp_dir):
        runtime_paths = RunTimePaths('account', {'host_root': temp_dir})
        runtime_paths.create_host_pipe_prefix()
        path = runtime_paths.host_pipe_prefix()
        self.assertTrue(os.path.exists(path))
        self.assertTrue(os.path.isdir(path))
        permission = oct(os.stat(path)[ST_MODE])[-3:]
        # TODO(kota_): make sure if this is really acceptable
        self.assertEqual('777', permission)