Beispiel #1
0
class ObjectCache(object):
    """Artifact cache handling that encapsulates artifacts handling on worker side

    >>> epv_cache = ObjectCache.get(ecosystem='npm', name='serve-static', version='1.7.1')
    >>> extracted_tarball_path = epv_cache.get_extracted_source_tarball()
    """
    _cache = {}
    _base_cache_dir = get_configuration().worker_data_dir

    def __init__(self):
        raise NotImplementedError()

    @classmethod
    def wipe(cls):
        """Wipe all files that are stored in the current cache"""
        for item in cls._cache.values():
            item.remove_files()
        cls._cache = {}

    @classmethod
    def _cache_dir(cls, ecosystem, name, version):
        """Get cache dir for the given EPV"""
        return os.path.join(cls._base_cache_dir, ecosystem, name, version)

    @classmethod
    def get(cls, ecosystem, name, version):
        """Get EPVCache for the given EPV"""
        # This code just stores info about downloaded objects, once we will want to optimize number of retrievals and
        # do some caching, remove wipe() call in the base task and implement caching logic here
        key = (ecosystem, name, version)
        if key not in cls._cache:
            cache_dir = cls._cache_dir(ecosystem, name, version)
            # Artifacts bucket used for caching can be expanded based on env variables
            item = EPVCache(ecosystem, name, version, cache_dir)
            cls._cache[key] = item
            return item
        else:
            return cls._cache[key]

    @classmethod
    def get_from_dict(cls, dictionary):
        """Sugar for self.get() that respects arguments from a dict"""
        return cls.get(dictionary['ecosystem'], dictionary['name'],
                       dictionary['version'])
import os
import logging
from urllib.parse import quote
from cucoslib.conf import get_configuration, get_postgres_connection_string

_logger = logging.getLogger(__name__)
configuration = get_configuration()


def _use_sqs():
    """
    :return: True if worker should use Amazon SQS
    """
    key_id = len(os.environ.get('AWS_SQS_ACCESS_KEY_ID', '')) > 0
    access_key = len(os.environ.get('AWS_SQS_SECRET_ACCESS_KEY', '')) > 0

    res = int(key_id) + int(access_key)

    if res == 1:
        raise RuntimeError(
            "In order to use AWS SQS you have to provide both 'AWS_SQS_ACCESS_KEY_ID' and "
            "'AWS_SQS_SECRET_ACCESS_KEY' environment variables")

    # Make sure we do not pass these env variables - according to Celery docs they can be used only with 'sqs://'
    if "AWS_ACCESS_KEY_ID" in os.environ:
        raise RuntimeError(
            "Do not use AWS_ACCESS_KEY_ID in order to access SQS, use 'AWS_SQS_ACCESS_KEY_ID'"
        )

    if "AWS_SECRET_ACCESS_KEY" in os.environ:
        raise RuntimeError(
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     self.log = get_task_logger(self.__class__.__name__)
     self.configuration = get_configuration()
def test_configuration_is_a_singleton():
    assert id(get_configuration()) == id(get_configuration())