def get_url(self, fieldname, transcoded=False, expiry_seconds=60):
        """
        Get URL of fieldname from S3

        :param fieldname: Name of the field on the adapted object
        :type fieldname: str
        :param transcoded: Whether or not to attempt to get a transcoded
                           version of a video
        :return: URL on S3 of given field's content
        :rtype: str
        """
        storage = self._getStorage()
        if fieldname not in storage['cloud_available']:
            logger.warn('%s on %s not available in the cloud', fieldname,
                        self.context.absolute_url())
            return None
        fieldinfo = self.field_info(fieldname)
        if not fieldinfo:
            logger.error('Field info for %s missing from context %s', fieldname,
                         self.context.absolute_url())
            return
        bucket_name = 'netsight-cloudstorage-%s' % get_value_from_registry(
            'bucket_name'
        )
        s3 = self._get_s3_connection()
        response_headers = {}
        key = None
        if transcoded:
            transcoded_bucket_name = '%s-transcoded' % bucket_name
            bucket = s3.lookup(transcoded_bucket_name)
            if bucket is not None:
                # Check if it is available in the transcoded bucket
                key = bucket.get_key(
                    '%s-%s' % (fieldname, self.context.UID())
                )
            else:
                logger.warn('Transcode bucket does not exist %s',
                            transcoded_bucket_name)
        if key is None:
            bucket = s3.lookup(bucket_name)
            if bucket is not None:
                key = bucket.get_key(
                    '%s-%s' % (fieldname, self.context.UID())
                )
                if key is None:
                    logger.error('File not found: %s on %s', fieldname,
                                 self.context.absolute_url())
                    return
            else:
                logger.error('Bucket %s does not exist', bucket_name)
                return
            if not transcoded:
                response_headers = {
                    'response-content-disposition':
                        'attachment; filename="%s"' % fieldinfo['filename'],
                    'response-content-type': fieldinfo['mimetype']
                }
        return key.generate_url(expiry_seconds, response_headers=response_headers)
 def _get_bucket(self, postfix=None):
     """Look up the a bucket set in the registry"""
     s3 = self._get_s3_connection()
     bucket_name = 'netsight-cloudstorage-{reg_value}'.format(
         reg_value=get_value_from_registry('bucket_name')
     )
     if postfix:
         bucket_name += postfix
     bucket = s3.lookup(bucket_name)
     if bucket is None:
         logger.warn('Bucket does not exist %s', bucket_name)
     return bucket
 def _get_s3_connection(self):
     """Set up an S3 connection using the registry settings"""
     aws_key = get_value_from_registry('aws_access_key')
     aws_secret_key = get_value_from_registry('aws_secret_access_key')
     return S3Connection(aws_key, aws_secret_key)
    def enqueue(self, enforce_file_size=True):
        """
        Dispatch any relevant file fields off to Celery

        :param enforce_file_size: Allow manually uploading files smaller than
                                  the configured minimum
        :type enforce_file_size: bool
        """
        logger.info('Enqueue called for %s' % self.context.absolute_url())
        in_progress = self._getStorage()['in_progress']
        cloud_available = self._getStorage()['cloud_available']

        for field in self._getFields():
            if not field['size'] > 0:
                # Ignore empty fields
                continue

            # Remove existing cloud info, assuming file data has changed
            if field['name'] in cloud_available:
                del cloud_available[field['name']]

            min_size = get_value_from_registry('min_file_size')
            if field['size'] < min_size * 1024 * 1024 and enforce_file_size:
                logger.info('Field %s on %s is too small (< %sMB)',
                            field['name'],
                            self.context.absolute_url(),
                            min_size)
                continue

            # unique token for this job
            security_token = uuid4().hex
            in_progress[field['name']] = security_token
            # make sure storage token is stored before
            # job goes on queue
            transaction.commit()

            path = '/'.join(self.context.getPhysicalPath())
            plone_url = get_value_from_config('plone_url')
            root_url = '%s/%s' % (plone_url, path)

            logger.info('Queuing field %s on %s to be uploaded', field['name'],
                        self.context.absolute_url())
            source_url = '%s/@@cloudstorage-retrieve' % root_url
            callback_url = '%s/@@cloudstorage-callback' % root_url
            errorback_url = '%s/@@cloudstorage-error' % root_url
            bucket_name = 'netsight-cloudstorage-%s' % get_value_from_registry(
                'bucket_name'
            )
            aws_key = get_value_from_registry('aws_access_key')
            aws_secret_key = get_value_from_registry('aws_secret_access_key')
            pipeline_name = get_value_from_registry('pipeline_name')
            upload_task = upload_to_s3.s(
                bucket_name=bucket_name,
                source_url=source_url,
                callback_url=callback_url,
                errorback_url=errorback_url,
                field=field,
                security_token=security_token,
                aws_key=aws_key,
                aws_secret_key=aws_secret_key,
                pipeline_name=pipeline_name,
            )
            logger.info('File mimetype: %s', field['mimetype'])
            transcoding_enabled = get_value_from_registry(
                'transcoding_enabled'
            )
            if transcoding_enabled and field['mimetype'].startswith('video'):
                links = group(upload_callback.s(),
                              transcode_video.s(),
                              transcode_callback.s())
            else:
                links = group(upload_callback.s())
            upload_task.link(links)
            upload_task.apply_async()