Exemple #1
0
    def get_host_requirements(self, host_type, accelerator):
        """
        Gets accelerators requirements to use with host.

        Args:
            host_type (str): Host type.
            accelerator (str): Name of the accelerator

        Returns:
            dict: AcceleratorClient requirements for host.
        """
        # Get host_type configuration
        try:
            provider_config = self.get_host_configurations()[host_type]
        except KeyError:
            raise _exc.ClientConfigurationException(
                "Host '%s' is not supported. Available hosts are: %s" %
                (host_type, ', '.join(self.get_host_configurations().keys())))

        # Get accelerator configuration
        try:
            accelerator_config = provider_config[accelerator]
        except KeyError:
            raise _exc.ClientConfigurationException(
                "AcceleratorClient '%s' is not supported on '%s'." %
                (accelerator, host_type))

        accelerator_config['accelerator'] = accelerator
        return accelerator_config
Exemple #2
0
    def _process(self, src, dst, parameters):
        """
        Client specific process implementation.

        Args:
            src (file-like object): Input data.
            dst (file-like object): Output data.
            parameters (dict): Parameters dict.

        Returns:
            dict: response dict.
        """
        # Check if configuration was done
        if self._configuration_url is None:
            raise _exc.ClientConfigurationException(
                "AcceleratorClient has not been configured. "
                "Use 'start' function.")

        # Post processing request
        fields = {
            'parameters': _json.dumps(parameters),
            'configuration': self._configuration_url
        }
        if src:
            fields['datafile'] = 'src', src, 'application/octet-stream'
        multipart = _MultipartEncoder(fields=fields)

        response = self._session.post(
            self._endpoints['process'],
            data=multipart,
            headers={'Content-Type': multipart.content_type})

        # Check response and append process ID to process URL
        process_url = self._endpoints['process'] + str(
            self._raise_for_error(response)['id'])

        # Get result
        try:
            # Wait processing
            while True:
                response_dict = self._raise_for_error(
                    self._session.get(process_url))
                if response_dict['processed']:
                    break

            # Gets result file
            if dst:
                response = self._session.get(response_dict['datafileresult'],
                                             stream=True)
                _shutil.copyfileobj(response.raw, dst)

            # Gets result dict
            return response_dict['parametersresult']

        finally:
            # Deletes process result on server
            self._session.delete(process_url)
Exemple #3
0
    def url(self, url):
        """
        URL of the accelerator host.

        Args:
            url (str): URL.
        """
        # Check URL
        if not url:
            raise _exc.ClientConfigurationException("Host URL is not valid.")
        self._url = url = _utl.format_url(url,
                                          force_secure=bool(
                                              self._ssl_cert_crt))

        # Updates endpoints
        for route in self._REST_API:
            self._endpoints[route] = url + self._REST_API[route]
Exemple #4
0
    def _configuration_url(self):
        """Last configuration URL"""
        # Get last configuration, if any
        try:
            endpoint = self._endpoints['start']
        except KeyError:
            raise _exc.ClientConfigurationException(
                'Unknown host URL, please run accelerator "start" method.')

        response = self._session.get(endpoint)

        try:
            last_config = response.json()['results'][0]
        except (KeyError, IndexError, ValueError):
            return

        # The last configuration URL should be keep in order to not request
        # it to user.
        if last_config['used'] != 0:
            return last_config['url']
Exemple #5
0
    def __init__(self,
                 accelerator=None,
                 host_ip=None,
                 ssl_cert_crt=None,
                 *args,
                 **kwargs):
        # Initialize client
        _Client.__init__(self, accelerator=accelerator, *args, **kwargs)

        # Initializes HTTP client
        self._ssl_cert_crt = ssl_cert_crt
        self._endpoints = {}

        # Mandatory parameters
        if not accelerator:
            raise _exc.ClientConfigurationException(
                "'accelerator' argument is mandatory.")

        # Pass host URL if already defined.
        if host_ip:
            self.url = host_ip
Exemple #6
0
    def _checks_apyfal_version(self, config_env):
        """
        Checks if client Apyfal version is compatible.

        Args:
            config_env (dict): environment.

        Raises:
            apyfal.exceptions.ClientConfigurationException:
                Apyfal version is not compatible.
        """
        try:
            if _LooseVersion(config_env['apyfal_version']) < _LooseVersion(
                    self.APYFAL_MINIMUM_VERSION):
                raise _exc.ClientConfigurationException(
                    'Apyfal version needs to be at least %s. Please upgrade it.'
                    % self.APYFAL_MINIMUM_VERSION)

        # Version not available: Return, can come from REST API directly.
        except KeyError:
            return
Exemple #7
0
    def process_map(self,
                    srcs=None,
                    dsts=None,
                    timeout=None,
                    info_list=None,
                    **parameters):
        """
        Map process execution on multiples files.

        Args:
            srcs (iterable of path-like object or file-like object):
                Iterable of input data to process.
                Must be an iterable of "src" parameters of the "process" method.
                Path-like object can be path, URL or cloud object URL.
            dsts (iterable of path-like object or file-like object):
                Iterable of output data.
                Must be an iterable of "dst" parameters of the "process" method.
                Path-like object can be path, URL or cloud object URL.
            timeout (float): The maximum number of seconds to wait. If None,
                then there is no limit on the wait time.
            parameters (path-like object, str or dict): Accelerator process
                specific parameters
                Can also be a full process parameters dictionary
                (Or JSON equivalent as str literal) Parameters dictionary
                override default configuration
                values, individuals specific parameters overrides parameters
                dictionary values. Take a look to accelerator documentation for
                more information on possible parameters.
                Path-like object can be path, URL or cloud object URL.
            info_list (list): If a list passed, this list is updated
                with "info_dict" extra information dicts for each process
                operation.

        Returns:
            generator: Results.

        Raises:
            concurrent.futures.TimeoutError: "timeout" reached on at least one
                task.
        """
        # Based on "concurrent.futures.Executor.map"

        # Initializes timeout
        if timeout is not None:
            end_time = timeout + time()

        # Get file count
        src = dst = None
        if srcs is not None:
            size_src = len(srcs)
        else:
            size_src = 0

        if dsts is not None:
            size_dst = len(dsts)
        else:
            size_dst = 0

        if size_src and size_dst and size_src != size_dst:
            raise _exc.ClientConfigurationException(
                '"files_in" and "files_out" must contain the same count of'
                ' files.')

        # Submit process
        futures = []
        for index in range(size_src or size_dst):
            if size_src:
                src = srcs[index]
            if size_dst:
                dst = dsts[index]

            futures.append(
                self.process_submit(src=src,
                                    dst=dst,
                                    info_dict=self._get_info_dict(info_list),
                                    **parameters))

        def result_iterator():
            """
            Yield must be hidden in closure so that the futures are submitted
            before the first iterator value is required.
            """
            try:
                # reverse to keep finishing order
                futures.reverse()
                while futures:
                    # Careful not to keep a reference to the popped future
                    if timeout is None:
                        yield futures.pop().result()
                    else:
                        yield futures.pop().result(end_time - time())
            finally:
                for future in futures:
                    future.cancel()

        return result_iterator()
Exemple #8
0
    def _data_file(self, url, parameters, parameter_name, mode):
        """Get files with apyfal.storage.

        Args:
            url (str or file-like object): Input URL.
            parameters (dict): Parameters dict.
            parameter_name (str or tuple of str): Parameter name for input URL.
            mode (str): Access mode. 'r' or 'w'.

        Returns:
            str or file-like object or None:
                Local version of input path.
        """
        # Apyfal 1.1.0 Compatibility
        if isinstance(parameter_name, tuple):
            parameter_name, ap110_name = parameter_name
        else:
            ap110_name = parameter_name

        # No URL
        if url is None:
            # Get URL from parameters if not provided directly
            try:
                url = parameters['app']['specific'].pop(parameter_name)

            # Still no URL, yields directly
            except KeyError:

                # Apyfal 1.1.0 Compatibility
                url = parameters['app']['specific'].pop(ap110_name, None)
                if url is None:

                    yield None
                    return

        # Gets scheme and path from URL
        scheme, path = _srg.parse_url(url, not self.REMOTE)

        # File scheme: Check paths
        if scheme == 'file':
            path = _os_path.abspath(path)

            # Only authorises files in whitelisted directories on host
            if not self.REMOTE and url.startswith('host://'):
                for authorized in self._authorized_host_dirs:
                    if not path.startswith(authorized):
                        raise _exc.ClientSecurityException(
                            "Unauthorized path: '%s'" % path)

            # Checks input file exists
            if 'r' in mode and not _os_path.isfile(path):
                raise _exc.ClientConfigurationException(
                    gen_msg=('not_found_named', parameter_name, path))

            # Ensures output parent directory exists
            elif 'w' in mode:
                _utl.makedirs(_os_path.dirname(path), exist_ok=True)

        # Client side:
        # Sends URL to host side as parameters and
        # yields None to client
        if self.REMOTE and scheme not in ('stream', 'file'):
            parameters['app']['specific'][parameter_name] = url

            # Apyfal 1.1.0 Compatibility
            if ap110_name != parameter_name:
                parameters['app']['specific'][ap110_name] = url

            yield None

        # Other case, yields file in expected format (file or stream)
        else:
            # As file
            if self._PARAMETER_IO_FORMAT.get(parameter_name,
                                             'stream') == 'file':

                # Already a file
                if scheme == 'file':
                    yield path

                # Use temporary file
                else:
                    with self.as_tmp_file(url, mode) as file:
                        yield file
            # As stream
            else:
                with _srg.open(url, mode) as stream:
                    yield stream