def _create_bucket(self, tagged=True): # Yes boto3, you are doing it right: # https://github.com/boto/boto3/issues/125 if self.region_name == 'us-east-1': self._s3.create_bucket(Bucket=self.bucket_name) else: self._s3.create_bucket(Bucket=self.bucket_name, CreateBucketConfiguration={ 'LocationConstraint': self.region_name }) if self.versioned and not configuration.is_local_deployment(): # Do not enable versioning when running locally. # Our S3 alternatives are not capable to handle it. self._s3.BucketVersioning(self.bucket_name).enable() bucket_tag = configuration.DEPLOYMENT_PREFIX if tagged and bucket_tag and not configuration.is_local_deployment(): self._s3.BucketTagging(self.bucket_name).put( Tagging={'TagSet': [{ 'Key': 'ENV', 'Value': bucket_tag }]})
def retrieve_latest_version_id(self, object_key): """Retrieve latest version identifier for the given object. :param object_key: key under which the object is stored :return: version identifier """ if not self.versioned: raise AttributeError( "Cannot retrieve version of object '{}': " "bucket '{}' is not configured to be versioned".format( object_key, self.bucket_name)) if configuration.is_local_deployment(): return self._get_fake_version_id() return self._s3.Object(self.bucket_name, object_key).version_id
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, bucket_name=None, region_name=None, endpoint_url=None, use_ssl=False, encryption=None, versioned=None): """Initialize object, setup connection to the AWS S3.""" # TODO: reduce cyclomatic complexity # Priority for configuration options: # 1. environment variables # 2. arguments passed to constructor # 3. defaults as listed in self._DEFAULT_* super().__init__() self._s3 = None self.region_name = configuration.AWS_S3_REGION or region_name or self._DEFAULT_REGION_NAME self.bucket_name = bucket_name or self._DEFAULT_BUCKET_NAME self.bucket_name = self.bucket_name.format(**os.environ) self._aws_access_key_id = configuration.AWS_S3_ACCESS_KEY_ID or aws_access_key_id self._aws_secret_access_key = \ configuration.AWS_S3_SECRET_ACCESS_KEY or aws_secret_access_key # let boto3 decide if we don't have local development proper values self._endpoint_url = None self._use_ssl = True # 'encryption' (argument) might be False - means don't encrypt self.encryption = self._DEFAULT_ENCRYPTION if encryption is None else encryption self.versioned = self._DEFAULT_VERSIONED if versioned is None else versioned # if we run locally, make connection properties configurable if configuration.is_local_deployment(): self._endpoint_url = configuration.S3_ENDPOINT_URL or endpoint_url or \ self._DEFAULT_LOCAL_ENDPOINT self._use_ssl = use_ssl self.encryption = False if self._aws_access_key_id is None or self._aws_secret_access_key is None: raise ValueError("AWS configuration not provided correctly, " "both key id and key is needed")
def store_blob(self, blob, object_key): """Store blob onto S3. :param blob: bytes or stream to be stored :param object_key: object key under which the blob should be stored :return: object version or None if versioning is off """ self._create_bucket_if_needed() put_kwargs = {'Body': blob} if self.encryption: put_kwargs['ServerSideEncryption'] = self.encryption response = self._s3.Object(self.bucket_name, object_key).put(**put_kwargs) if 'VersionId' not in response and configuration.is_local_deployment( ) and self.versioned: # If we run local deployment, our local S3 alternative does not # support versioning. Return a fake one. return self._get_fake_version_id() return response.get('VersionId')
def configure_logging(**kwargs): """Set up logging for worker.""" level = 'DEBUG' if configuration.is_local_deployment() else 'INFO' handlers = { 'default': { 'level': 'INFO', 'formatter': 'default', 'class': 'logging.StreamHandler', }, 'selinon_trace': { 'level': level, 'formatter': 'selinon_trace_formatter', 'class': 'logging.StreamHandler', }, 'verbose': { 'level': level, 'formatter': 'default', 'class': 'logging.StreamHandler', }, } # If you would like to track some library, place it's handler here with # appropriate entry - see celery as an example loggers = { '': { 'handlers': ['default'], 'level': 'INFO', }, 'selinon': { 'handlers': ['verbose'], 'level': 'DEBUG', 'propagate': False }, 'selinon': { 'handlers': ['verbose'], 'level': 'DEBUG', 'propagate': False }, 'f8a_worker.dispatcher.trace': { 'handlers': ['selinon_trace'], 'level': 'DEBUG', 'propagate': False }, 'f8a_worker': { 'handlers': ['verbose'], 'level': 'DEBUG', 'propagate': False }, 'kombu': { 'handlers': ['verbose'], 'level': 'DEBUG', 'propagate': False }, 'celery': { 'handlers': ['verbose'], 'level': 'DEBUG', 'propagate': False } } logging.config.dictConfig({ 'version': 1, 'loggers': loggers, 'formatters': { 'default': { 'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s' }, 'selinon_trace_formatter': { # no prefixes to parse JSON when aggregating 'format': '%(message)s' } }, 'handlers': handlers })