Пример #1
0
    def __init__(self, accesskey, sharedkey):
        self._accesskey = accesskey
        self._connection = GSConnection(accesskey, sharedkey)

        self._buckets = None
        self._bucketbackups = {}
        self._backups = None
Пример #2
0
    def delete(self, *args, **kwargs):
        try:
            logger.info("Intentado eliminar archivos del bucket GCS")
            logger.info("Para el QuerySet Email")
            conn = GSConnection(settings.GS_ACCESS_KEY_ID,
                                settings.GS_SECRET_ACCESS_KEY)
            bucket = conn.get_bucket(settings.GS_BUCKET_NAME)

            for obj in self:
                if obj.xml.name != '':
                    logger.info("borrando xml")
                    bucket.delete_key(obj.xml)
                if obj.pdf.name != '':
                    logger.info("borrando pdf")
                    bucket.delete_key(obj.pdf)
                if obj.adjunto1.name != '':
                    logger.info("borrando adjunto1")
                    bucket.delete_key(obj.adjunto1)
                logger.info("Archivos eliminados")
        except Exception as e:
            logger.error("Error al intentar eliminar archivo del GCS")
            logger.error(e)
        finally:
            logger.info("Borrando QuerySet")
            super(FileQuerySet, self).delete(*args, **kwargs)
            logger.info("QuerySet eliminado")
Пример #3
0
    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
                       secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
                       gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
                       querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
                       reduced_redundancy=REDUCED_REDUNDANCY,
                       custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
                       location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
                       preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):

        self.bucket_acl = bucket_acl
        self.bucket_name = bucket
        self.acl = acl
        self.headers = headers
        self.preload_metadata = preload_metadata
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.querystring_expire = querystring_expire
        self.reduced_redundancy = reduced_redundancy
        self.custom_domain = custom_domain
        self.secure_urls = secure_urls
        self.location = location or ''
        self.location = self.location.lstrip('/')
        self.file_name_charset = file_name_charset

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = GSConnection(access_key, secret_key)
        
        self._entries = {}
Пример #4
0
  def get(self):
    """Handle GET requests.

    For the time being, we just provide an information page.  In the future
    there will be a web UI here.
    """
    path = os.path.join(os.path.dirname(__file__), 'index.html')
    user = users.get_current_user()

    connection = GSConnection(config.gs_access_key,config.gs_secret_key)
    bucket = connection.get_bucket(config.gs_bucket)

    # Find all of the batches.
    batches = []
    logging.info('Loading batches')
    rs = bucket.list(prefix=user.email() + '/', delimiter='/')
    for r in rs:
      logging.info(r.name)
      batch_name = r.name.split('/')[1]
      batches.append(self.LoadBatchInfo(user.email(), batch_name, bucket))
    batches.sort(key=lambda i: i.get('update_time', 0), reverse=True)
    self.response.out.write(
        template.render(path, {
            'url': self.request.url,
            'user_id': user.user_id(),
            'email': user.email(),
            'batches': batches,
            }))
Пример #5
0
def upload_to_gs(bucket_name, client_id, client_secret, file, key, acl='public-read'):
    conn = GSConnection(client_id, client_secret, calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    # generate key
    filename = secure_filename(file.filename)
    key_dir = key + '/' + generate_hash(key) + '/'
    k.key = key_dir + filename
    # delete old data
    for item in bucket.list(prefix='/' + key_dir):
        item.delete()
    # set object settings

    file_data = file.read()
    file_mime = magic.from_buffer(file_data, mime=True)
    size = len(file_data)
    sent = k.set_contents_from_string(
        file_data,
        headers={
            'Content-Disposition': 'attachment; filename=%s' % filename,
            'Content-Type': '%s' % file_mime
        }
    )
    k.set_acl(acl)
    gs_url = 'https://storage.googleapis.com/%s/' % bucket_name
    if sent == size:
        return gs_url + k.key
    return False
Пример #6
0
    def get(self):
        """Handle GET requests.

    For the time being, we just provide an information page.  In the future
    there will be a web UI here.
    """
        path = os.path.join(os.path.dirname(__file__), 'index.html')
        user = users.get_current_user()

        connection = GSConnection(config.gs_access_key, config.gs_secret_key)
        bucket = connection.get_bucket(config.gs_bucket)

        # Find all of the batches.
        batches = []
        logging.info('Loading batches')
        rs = bucket.list(prefix=user.email() + '/', delimiter='/')
        for r in rs:
            logging.info(r.name)
            batch_name = r.name.split('/')[1]
            batches.append(self.LoadBatchInfo(user.email(), batch_name,
                                              bucket))
        batches.sort(key=lambda i: i.get('update_time', 0), reverse=True)
        self.response.out.write(
            template.render(
                path, {
                    'url': self.request.url,
                    'user_id': user.user_id(),
                    'email': user.email(),
                    'batches': batches,
                }))
Пример #7
0
def upload_to_gs(bucket_name,
                 client_id,
                 client_secret,
                 file,
                 key,
                 acl='public-read'):
    conn = GSConnection(client_id,
                        client_secret,
                        calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    # generate key
    filename = secure_filename(file.filename)
    key_dir = key + '/' + generate_hash(key) + '/'
    k.key = key_dir + filename
    # delete old data
    for item in bucket.list(prefix='/' + key_dir):
        item.delete()
    # set object settings

    file_data = file.read()
    file_mime = magic.from_buffer(file_data, mime=True)
    size = len(file_data)
    sent = k.set_contents_from_string(file_data,
                                      headers={
                                          'Content-Disposition':
                                          'attachment; filename=%s' % filename,
                                          'Content-Type':
                                          '%s' % file_mime
                                      })
    k.set_acl(acl)
    gs_url = 'https://storage.googleapis.com/%s/' % bucket_name
    if sent == size:
        return gs_url + k.key
    return False
Пример #8
0
def save_image_to_gs(key_name,bf,mime="image/png"):
    try:
        conn = GSConnection(gs_access_key_id = settings.gs_access_key_id,gs_secret_access_key =settings.gs_secret_access_key)
        bucket = conn.get_bucket(settings.bucket_name)
        gs_file = bucket.new_key(key_name)
        gs_file.set_contents_from_string(bf,policy="public-read",headers={"Content-Type":mime})
    except:
        return False
    return True
Пример #9
0
    def test_4_cors_xml(self):
        """test setting and getting of CORS XML documents"""
        # regexp for matching project-private default object ACL
        cors_empty = "<CorsConfig></CorsConfig>"
        cors_doc = (
            "<CorsConfig><Cors><Origins><Origin>origin1.example.com"
            "</Origin><Origin>origin2.example.com</Origin></Origins>"
            "<Methods><Method>GET</Method><Method>PUT</Method>"
            "<Method>POST</Method></Methods><ResponseHeaders>"
            "<ResponseHeader>foo</ResponseHeader>"
            "<ResponseHeader>bar</ResponseHeader></ResponseHeaders>"
            "</Cors></CorsConfig>"
        )
        c = GSConnection()
        # create a new bucket
        bucket_name = "test-%d" % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now call get_bucket to see if it's really there
        bucket = c.get_bucket(bucket_name)
        # get new bucket cors and make sure it's empty
        cors = re.sub(r"\s", "", bucket.get_cors().to_xml())
        assert cors == cors_empty
        # set cors document on new bucket
        bucket.set_cors(cors_doc)
        cors = re.sub(r"\s", "", bucket.get_cors().to_xml())
        assert cors == cors_doc
        # delete bucket
        c.delete_bucket(bucket)

        # repeat cors tests using boto's storage_uri interface
        # create a new bucket
        bucket_name = "test-%d" % int(time.time())
        uri = storage_uri("gs://" + bucket_name)
        uri.create_bucket()
        # get new bucket cors and make sure it's empty
        cors = re.sub(r"\s", "", uri.get_cors().to_xml())
        assert cors == cors_empty
        # set cors document on new bucket
        cors_obj = Cors()
        h = handler.XmlHandler(cors_obj, None)
        xml.sax.parseString(cors_doc, h)
        uri.set_cors(cors_obj)
        cors = re.sub(r"\s", "", uri.get_cors().to_xml())
        assert cors == cors_doc
        # delete bucket
        uri.delete_bucket()

        print "--- tests completed ---"
Пример #10
0
    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
        """
        Opens a connection to appropriate provider, depending on provider
        portion of URI. Requires Credentials defined in boto config file (see
        boto/pyami/config.py).
        @type storage_uri: StorageUri
        @param storage_uri: StorageUri specifying a bucket or a bucket+object
        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
        @return: A connection to storage service provider of the given URI.
        """

        if not self.connection:
            if self.scheme == 's3':
                from boto.s3.connection import S3Connection
                self.connection = S3Connection(access_key_id,
                                               secret_access_key, **kwargs)
            elif self.scheme == 'gs':
                from boto.gs.connection import GSConnection
                self.connection = GSConnection(access_key_id,
                                               secret_access_key, **kwargs)
            elif self.scheme == 'file':
                from boto.file.connection import FileConnection
                self.connection = FileConnection(self)
            else:
                raise InvalidUriError('Unrecognized scheme "%s"' % self.scheme)
        self.connection.debug = self.debug
        return self.connection
Пример #11
0
class GcsCache(S3Cache):
    '''A cache that transfers files to and from GCS
    
     '''

    def __init__(self, bucket=None, prefix=None, account=None, upstream=None,**kwargs):
        '''Init a new S3Cache Cache

        '''
        from boto.gs.connection import GSConnection

        super(S3Cache, self).__init__(upstream=upstream) # Skip parent __init__

        self.is_remote = False
        self.access_key = account['access']
        self.secret = account['secret']
        self.project = account['project']
        self.bucket_name = bucket
        self.prefix = prefix

        self.conn = GSConnection(self.access_key, self.secret, is_secure = False )
        self.bucket = self.conn.get_bucket(self.bucket_name)

    def __repr__(self):
        return "GcsCache: bucket={} prefix={} access={} ".format(self.bucket, self.prefix, self.access_key, self.upstream)
Пример #12
0
def gs_upload_file(file_name):
    with open(file_name, 'r') as training_file:
        con = GSConnection(settings.GS_ACCESS_KEY, settings.GS_SECRET)
        try:
            bucket = con.create_bucket(GOOGLE_BUCKET_NAME)
        except:
            # Bucket exists
            bucket = Bucket(connection=con, name=GOOGLE_BUCKET_NAME)

        key = Key(bucket)
        key.key = file_name
        key.set_contents_from_file(training_file)
        key.make_public()

    # Remove file from disc
    os.system('rm %s' % file_name)
Пример #13
0
class GSTestCase(unittest.TestCase):
    gs = True

    def setUp(self):
        self._conn = GSConnection()
        self._buckets = []
        self._tempdirs = []

    # Retry with an exponential backoff if a server error is received. This
    # ensures that we try *really* hard to clean up after ourselves.
    @retry(GSResponseError)
    def tearDown(self):
        while len(self._tempdirs):
            tmpdir = self._tempdirs.pop()
            shutil.rmtree(tmpdir, ignore_errors=True)

        while(len(self._buckets)):
            b = self._buckets[-1]
            try:
                bucket = self._conn.get_bucket(b)
                while len(list(bucket.list_versions())) > 0:
                    for k in bucket.list_versions():
                        try:
                            bucket.delete_key(k.name, generation=k.generation)
                        except GSResponseError, e:
                            if e.status != 404:
                                raise
                bucket.delete()
            except GSResponseError, e:
                if e.status != 404:
                    raise
            self._buckets.pop()
Пример #14
0
    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
                 secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
                 gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
                 querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
                 reduced_redundancy=REDUCED_REDUNDANCY,
                 custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
                 location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
                 preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):

        self.bucket_acl = bucket_acl
        self.bucket_name = bucket
        self.acl = acl
        self.headers = headers
        self.preload_metadata = preload_metadata
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.querystring_expire = querystring_expire
        self.reduced_redundancy = reduced_redundancy
        self.custom_domain = custom_domain
        self.secure_urls = secure_urls
        self.location = location or ''
        self.location = self.location.lstrip('/')
        self.file_name_charset = file_name_charset

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = GSConnection(access_key, secret_key)

        self._entries = {}
Пример #15
0
 def _create_connection(self):
     """Returns a GSConnection object we can use to access Google Storage."""
     if self._gs_access_key_id:
         return GSConnection(
             gs_access_key_id=self._gs_access_key_id,
             gs_secret_access_key=self._gs_secret_access_key)
     else:
         return AnonymousGSConnection()
Пример #16
0
    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
        """
        Opens a connection to appropriate provider, depending on provider
        portion of URI. Requires Credentials defined in boto config file (see
        boto/pyami/config.py).
        @type storage_uri: StorageUri
        @param storage_uri: StorageUri specifying a bucket or a bucket+object
        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
        @return: A connection to storage service provider of the given URI.
        """
        connection_args = dict(self.connection_args or ())

        if (hasattr(self, 'suppress_consec_slashes') and
                'suppress_consec_slashes' not in connection_args):
            connection_args['suppress_consec_slashes'] = (
                self.suppress_consec_slashes)
        connection_args.update(kwargs)
        if not self.connection:
            if self.scheme in self.provider_pool:
                self.connection = self.provider_pool[self.scheme]
            elif self.scheme == 's3':
                from boto.s3.connection import S3Connection
                self.connection = S3Connection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
                self.provider_pool[self.scheme] = self.connection
            elif self.scheme == 'gs':
                from boto.gs.connection import GSConnection
                # Use OrdinaryCallingFormat instead of boto-default
                # SubdomainCallingFormat because the latter changes the hostname
                # that's checked during cert validation for HTTPS connections,
                # which will fail cert validation (when cert validation is
                # enabled).
                #
                # The same is not true for S3's HTTPS certificates. In fact,
                # we don't want to do this for S3 because S3 requires the
                # subdomain to match the location of the bucket. If the proper
                # subdomain is not used, the server will return a 301 redirect
                # with no Location header.
                #
                # Note: the following import can't be moved up to the
                # start of this file else it causes a config import failure when
                # run from the resumable upload/download tests.
                from boto.s3.connection import OrdinaryCallingFormat
                connection_args['calling_format'] = OrdinaryCallingFormat()
                self.connection = GSConnection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
                self.provider_pool[self.scheme] = self.connection
            elif self.scheme == 'file':
                from boto.file.connection import FileConnection
                self.connection = FileConnection(self)
            else:
                raise InvalidUriError('Unrecognized scheme "%s"' %
                                      self.scheme)
        self.connection.debug = self.debug
        return self.connection
Пример #17
0
def connect_gs(gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
    """
    @type gs_access_key_id: string
    @param gs_access_key_id: Your Google Cloud Storage Access Key ID

    @type gs_secret_access_key: string
    @param gs_secret_access_key: Your Google Cloud Storage Secret Access Key

    @rtype: L{GSConnection<boto.gs.connection.GSConnection>}
    @return: A connection to Google's Storage service
    """
    from boto.gs.connection import GSConnection
    return GSConnection(gs_access_key_id, gs_secret_access_key, **kwargs)
Пример #18
0
 def test_2_copy_key(self):
     """test copying a key from one bucket to another"""
     c = GSConnection()
     # create two new, empty buckets
     bucket_name_1 = "test1-%d" % int(time.time())
     bucket_name_2 = "test2-%d" % int(time.time())
     bucket1 = c.create_bucket(bucket_name_1)
     bucket2 = c.create_bucket(bucket_name_2)
     # verify buckets got created
     bucket1 = c.get_bucket(bucket_name_1)
     bucket2 = c.get_bucket(bucket_name_2)
     # create a key in bucket1 and give it some content
     key_name = "foobar"
     k1 = bucket1.new_key(key_name)
     assert isinstance(k1, bucket1.key_class)
     k1.name = key_name
     s = "This is a test."
     k1.set_contents_from_string(s)
     # copy the new key from bucket1 to bucket2
     k1.copy(bucket_name_2, key_name)
     # now copy the contents from bucket2 to a local file
     k2 = bucket2.lookup(key_name)
     assert isinstance(k2, bucket2.key_class)
     fp = open("foobar", "wb")
     k2.get_contents_to_file(fp)
     fp.close()
     fp = open("foobar")
     # check to make sure content read is identical to original
     assert s == fp.read(), "move test failed!"
     fp.close()
     # delete keys
     bucket1.delete_key(k1)
     bucket2.delete_key(k2)
     # delete test buckets
     c.delete_bucket(bucket1)
     c.delete_bucket(bucket2)
     # delete temp file
     os.unlink("foobar")
Пример #19
0
    def __init__(self, bucket=None, prefix=None, account=None, upstream=None,**kwargs):
        '''Init a new S3Cache Cache

        '''
        from boto.gs.connection import GSConnection

        super(S3Cache, self).__init__(upstream=upstream) # Skip parent __init__

        self.is_remote = False
        self.access_key = account['access']
        self.secret = account['secret']
        self.project = account['project']
        self.bucket_name = bucket
        self.prefix = prefix

        self.conn = GSConnection(self.access_key, self.secret, is_secure = False )
        self.bucket = self.conn.get_bucket(self.bucket_name)
Пример #20
0
    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
        """
        Opens a connection to appropriate provider, depending on provider
        portion of URI. Requires Credentials defined in boto config file (see
        boto/pyami/config.py).
        @type storage_uri: StorageUri
        @param storage_uri: StorageUri specifying a bucket or a bucket+object
        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
        @return: A connection to storage service provider of the given URI.
        """

        connection_args = dict(self.connection_args or ())
        # Use OrdinaryCallingFormat instead of boto-default
        # SubdomainCallingFormat because the latter changes the hostname
        # that's checked during cert validation for HTTPS connections,
        # which will fail cert validation (when cert validation is enabled).
        # Note: the following import can't be moved up to the start of
        # this file else it causes a config import failure when run from
        # the resumable upload/download tests.
        from boto.s3.connection import OrdinaryCallingFormat
        connection_args['calling_format'] = OrdinaryCallingFormat()
        connection_args.update(kwargs)
        if not self.connection:
            if self.scheme == 's3':
                from boto.s3.connection import S3Connection
                self.connection = S3Connection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
            elif self.scheme == 'gs':
                from boto.gs.connection import GSConnection
                self.connection = GSConnection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
            elif self.scheme == 'file':
                from boto.file.connection import FileConnection
                self.connection = FileConnection(self)
            else:
                raise InvalidUriError('Unrecognized scheme "%s"' %
                                      self.scheme)
        self.connection.debug = self.debug
        return self.connection
Пример #21
0
    def test_4_cors_xml(self):
        """test setting and getting of CORS XML documents"""
        # regexp for matching project-private default object ACL
        cors_empty = '<CorsConfig></CorsConfig>'
        cors_doc = ('<CorsConfig><Cors><Origins><Origin>origin1.example.com'
                    '</Origin><Origin>origin2.example.com</Origin></Origins>'
                    '<Methods><Method>GET</Method><Method>PUT</Method>'
                    '<Method>POST</Method></Methods><ResponseHeaders>'
                    '<ResponseHeader>foo</ResponseHeader>'
                    '<ResponseHeader>bar</ResponseHeader></ResponseHeaders>'
                    '</Cors></CorsConfig>')
        c = GSConnection()
        # create a new bucket
        bucket_name = 'test-%d' % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now call get_bucket to see if it's really there
        bucket = c.get_bucket(bucket_name)
        # get new bucket cors and make sure it's empty
        cors = re.sub(r'\s', '', bucket.get_cors().to_xml())
        assert cors == cors_empty
        # set cors document on new bucket
        bucket.set_cors(cors_doc)
        cors = re.sub(r'\s', '', bucket.get_cors().to_xml())
        assert cors == cors_doc
        # delete bucket
        c.delete_bucket(bucket)

        # repeat cors tests using boto's storage_uri interface
        # create a new bucket
        bucket_name = 'test-%d' % int(time.time())
        uri = storage_uri('gs://' + bucket_name)
        uri.create_bucket()
        # get new bucket cors and make sure it's empty
        cors = re.sub(r'\s', '', uri.get_cors().to_xml())
        assert cors == cors_empty
        # set cors document on new bucket
        cors_obj = Cors()
        h = handler.XmlHandler(cors_obj, None)
        xml.sax.parseString(cors_doc, h)
        uri.set_cors(cors_obj)
        cors = re.sub(r'\s', '', uri.get_cors().to_xml())
        assert cors == cors_doc
        # delete bucket
        uri.delete_bucket()

        print '--- tests completed ---'
Пример #22
0
 def test_2_copy_key(self):
     """test copying a key from one bucket to another"""
     c = GSConnection()
     # create two new, empty buckets
     bucket_name_1 = 'test1-%d' % int(time.time())
     bucket_name_2 = 'test2-%d' % int(time.time())
     bucket1 = c.create_bucket(bucket_name_1)
     bucket2 = c.create_bucket(bucket_name_2)
     # verify buckets got created
     bucket1 = c.get_bucket(bucket_name_1)
     bucket2 = c.get_bucket(bucket_name_2)
     # create a key in bucket1 and give it some content
     key_name = 'foobar'
     k1 = bucket1.new_key(key_name)
     assert isinstance(k1, bucket1.key_class)
     k1.name = key_name
     s = 'This is a test.'
     k1.set_contents_from_string(s)
     # copy the new key from bucket1 to bucket2
     k1.copy(bucket_name_2, key_name)
     # now copy the contents from bucket2 to a local file
     k2 = bucket2.lookup(key_name)
     assert isinstance(k2, bucket2.key_class)
     fp = open('foobar', 'wb')
     k2.get_contents_to_file(fp)
     fp.close()
     fp = open('foobar')
     # check to make sure content read is identical to original
     assert s == fp.read(), 'move test failed!'
     fp.close()
     # delete keys
     bucket1.delete_key(k1)
     bucket2.delete_key(k2)
     # delete test buckets
     c.delete_bucket(bucket1)
     c.delete_bucket(bucket2)
     # delete temp file
     os.unlink('foobar')
Пример #23
0
from datetime import datetime
from time import sleep

captured_path = '/home/pi/photobooth/captured'
uploaded_path = '/home/pi/photobooth/uploaded'
py_path = '/home/pi/photobooth/py'

config = ConfigParser()
config.read(os.path.join(py_path, 'config.secret'))

gs_project_id = config.get('Google', 'ProjectId')  # my project
gs_bucket_name = gs_project_id + '.appspot.com'
gs_bucket_destination_prefix = 'photobooth'

conn = GSConnection()
bucket = conn.get_bucket(gs_bucket_name)

firebase_secret = config.get('Firebase', 'Secret')
firebase_destination_prefix = 'images'

auth = firebase.FirebaseAuthentication(firebase_secret,
                                       config.get('Firebase',
                                                  'Email'))  # my email
user = auth.get_user()
app = firebase.FirebaseApplication(config.get('Firebase', 'App'),
                                   authentication=None)
app.authentication = auth

#Find the maximum key in Firebase right now, assume format is [0-9]{5}"""
all_things = app.get('/images', None)
Пример #24
0
    def test_1_basic(self):
        """basic regression test for Google Cloud Storage"""
        print "--- running GSConnection tests ---"
        c = GSConnection()
        # create a new, empty bucket
        bucket_name = "test-%d" % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now try a get_bucket call and see if it's really there
        bucket = c.get_bucket(bucket_name)
        key_name = "foobar"
        k = bucket.new_key(key_name)
        s1 = "This is a test of file upload and download"
        s2 = "This is a second string to test file upload and download"
        k.set_contents_from_string(s1)
        fp = open(key_name, "wb")
        # now get the contents from s3 to a local file
        k.get_contents_to_file(fp)
        fp.close()
        fp = open(key_name)
        # check to make sure content read from s3 is identical to original
        assert s1 == fp.read(), "corrupted file"
        fp.close()
        bucket.delete_key(k)
        # test a few variations on get_all_keys - first load some data
        # for the first one, let's override the content type
        phony_mimetype = "application/x-boto-test"
        headers = {"Content-Type": phony_mimetype}
        k.name = "foo/bar"
        k.set_contents_from_string(s1, headers)
        k.name = "foo/bas"
        k.set_contents_from_filename("foobar")
        k.name = "foo/bat"
        k.set_contents_from_string(s1)
        k.name = "fie/bar"
        k.set_contents_from_string(s1)
        k.name = "fie/bas"
        k.set_contents_from_string(s1)
        k.name = "fie/bat"
        k.set_contents_from_string(s1)
        # try resetting the contents to another value
        md5 = k.md5
        k.set_contents_from_string(s2)
        assert k.md5 != md5
        # Test for stream API
        fp2 = open("foobar", "rb")
        k.md5 = None
        k.base64md5 = None
        k.set_contents_from_stream(fp2, headers=headers)
        fp = open("foobar1", "wb")
        k.get_contents_to_file(fp)
        fp.close()
        fp2.seek(0, 0)
        fp = open("foobar1", "rb")
        assert fp2.read() == fp.read(), "Chunked Transfer corrupted the Data"
        fp.close()
        fp2.close()
        os.unlink("foobar1")
        os.unlink("foobar")
        all = bucket.get_all_keys()
        assert len(all) == 6
        rs = bucket.get_all_keys(prefix="foo")
        assert len(rs) == 3
        rs = bucket.get_all_keys(prefix="", delimiter="/")
        assert len(rs) == 2
        rs = bucket.get_all_keys(maxkeys=5)
        assert len(rs) == 5
        # test the lookup method
        k = bucket.lookup("foo/bar")
        assert isinstance(k, bucket.key_class)
        assert k.content_type == phony_mimetype
        k = bucket.lookup("notthere")
        assert k == None
        # try some metadata stuff
        key_name = "has_metadata"
        k = bucket.new_key(key_name)
        mdkey1 = "meta1"
        mdval1 = "This is the first metadata value"
        k.set_metadata(mdkey1, mdval1)
        mdkey2 = "meta2"
        mdval2 = "This is the second metadata value"
        k.set_metadata(mdkey2, mdval2)
        # try a unicode metadata value

        mdval3 = u"föö"
        mdkey3 = "meta3"
        k.set_metadata(mdkey3, mdval3)
        k.set_contents_from_string(s1)

        k = bucket.lookup(key_name)
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        k = bucket.new_key(key_name)
        k.get_contents_as_string()
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        bucket.delete_key(k)
        # test list and iterator
        rs1 = bucket.list()
        num_iter = 0
        for r in rs1:
            num_iter = num_iter + 1
        rs = bucket.get_all_keys()
        num_keys = len(rs)
        assert num_iter == num_keys
        # try some acl stuff
        bucket.set_acl("public-read")
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 2
        bucket.set_acl("private")
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 1
        k = bucket.lookup("foo/bar")
        k.set_acl("public-read")
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 2
        k.set_acl("private")
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 1
        #
        # Test case-insensitivity of XML ACL parsing.
        acl_xml = (
            "<ACCESSControlList><EntrIes><Entry>"
            + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>'
            + "</Entry></EntrIes></ACCESSControlList>"
        )
        acl = boto.gs.acl.ACL()
        h = handler.XmlHandler(acl, bucket)
        xml.sax.parseString(acl_xml, h)
        bucket.set_acl(acl)
        assert len(acl.entries.entry_list) == 1
        #
        # try set/get raw logging subresource
        empty_logging_str = "<?xml version='1.0' encoding='UTF-8'?><Logging/>"
        logging_str = (
            "<?xml version='1.0' encoding='UTF-8'?><Logging>"
            "<LogBucket>log-bucket</LogBucket>" + "<LogObjectPrefix>example</LogObjectPrefix>" + "</Logging>"
        )
        bucket.set_subresource("logging", logging_str)
        assert bucket.get_subresource("logging") == logging_str
        # try disable/enable logging
        bucket.disable_logging()
        assert bucket.get_subresource("logging") == empty_logging_str
        bucket.enable_logging("log-bucket", "example")
        assert bucket.get_subresource("logging") == logging_str
        # now delete all keys in bucket
        for k in bucket:
            bucket.delete_key(k)
        # now delete bucket
        time.sleep(5)
        c.delete_bucket(bucket)
Пример #25
0
class GSTestCase(unittest.TestCase):
    gs = True

    def setUp(self):
        self._conn = GSConnection()
        self._buckets = []
        self._tempdirs = []

    # Retry with an exponential backoff if a server error is received. This
    # ensures that we try *really* hard to clean up after ourselves.
    @retry(GSResponseError)
    def tearDown(self):
        while len(self._tempdirs):
            tmpdir = self._tempdirs.pop()
            shutil.rmtree(tmpdir, ignore_errors=True)

        while(len(self._buckets)):
            b = self._buckets[-1]
            try:
                bucket = self._conn.get_bucket(b)
                while len(list(bucket.list_versions())) > 0:
                    for k in bucket.list_versions():
                        try:
                            bucket.delete_key(k.name, generation=k.generation)
                        except GSResponseError as e:
                            if e.status != 404:
                                raise
                bucket.delete()
            except GSResponseError as e:
                if e.status != 404:
                    raise
            self._buckets.pop()

    def _GetConnection(self):
        """Returns the GSConnection object used to connect to GCS."""
        return self._conn

    def _MakeTempName(self):
        """Creates and returns a temporary name for testing that is likely to be
        unique."""
        return "boto-gs-test-%s" % repr(time.time()).replace(".", "-")

    def _MakeBucketName(self):
        """Creates and returns a temporary bucket name for testing that is
        likely to be unique."""
        b = self._MakeTempName()
        self._buckets.append(b)
        return b

    def _MakeBucket(self):
        """Creates and returns temporary bucket for testing. After the test, the
        contents of the bucket and the bucket itself will be deleted."""
        b = self._conn.create_bucket(self._MakeBucketName())
        return b

    def _MakeKey(self, data='', bucket=None, set_contents=True):
        """Creates and returns a Key with provided data. If no bucket is given,
        a temporary bucket is created."""
        if data and not set_contents:
            # The data and set_contents parameters are mutually exclusive. 
            raise ValueError('MakeKey called with a non-empty data parameter '
                             'but set_contents was set to False.')
        if not bucket:
            bucket = self._MakeBucket()
        key_name = self._MakeTempName()
        k = bucket.new_key(key_name)
        if set_contents:
            k.set_contents_from_string(data)
        return k

    def _MakeVersionedBucket(self):
        """Creates and returns temporary versioned bucket for testing. After the
        test, the contents of the bucket and the bucket itself will be
        deleted."""
        b = self._MakeBucket()
        b.configure_versioning(True)
        return b

    def _MakeTempDir(self):
        """Creates and returns a temporary directory on disk. After the test,
        the contents of the directory and the directory itself will be
        deleted."""
        tmpdir = tempfile.mkdtemp(prefix=self._MakeTempName())
        self._tempdirs.append(tmpdir)
        return tmpdir
Пример #26
0
 def test_3_default_object_acls(self):
     """test default object acls"""
     # regexp for matching project-private default object ACL
     project_private_re = (
         "\s*<AccessControlList>\s*<Entries>\s*<Entry>"
         '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'
         "\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>"
         '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'
         "\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>"
         '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'
         "\s*<Permission>READ</Permission></Entry>\s*</Entries>"
         "\s*</AccessControlList>\s*"
     )
     c = GSConnection()
     # create a new bucket
     bucket_name = "test-%d" % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now call get_bucket to see if it's really there
     bucket = c.get_bucket(bucket_name)
     # get default acl and make sure it's project-private
     acl = bucket.get_def_acl()
     assert re.search(project_private_re, acl.to_xml())
     # set default acl to a canned acl and verify it gets set
     bucket.set_def_acl("public-read")
     acl = bucket.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == (
         "<AccessControlList><Entries><Entry>"
         + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>'
         + "</Entry></Entries></AccessControlList>"
     )
     # back to private acl
     bucket.set_def_acl("private")
     acl = bucket.get_def_acl()
     assert acl.to_xml() == "<AccessControlList></AccessControlList>"
     # set default acl to an xml acl and verify it gets set
     bucket.set_def_acl(public_read_acl)
     acl = bucket.get_def_acl()
     assert acl.to_xml() == (
         "<AccessControlList><Entries><Entry>"
         + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>'
         + "</Entry></Entries></AccessControlList>"
     )
     # back to private acl
     bucket.set_def_acl("private")
     acl = bucket.get_def_acl()
     assert acl.to_xml() == "<AccessControlList></AccessControlList>"
     # delete bucket
     c.delete_bucket(bucket)
     # repeat default acl tests using boto's storage_uri interface
     # create a new bucket
     bucket_name = "test-%d" % int(time.time())
     uri = storage_uri("gs://" + bucket_name)
     uri.create_bucket()
     # get default acl and make sure it's project-private
     acl = uri.get_def_acl()
     assert re.search(project_private_re, acl.to_xml())
     # set default acl to a canned acl and verify it gets set
     uri.set_def_acl("public-read")
     acl = uri.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == (
         "<AccessControlList><Entries><Entry>"
         + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>'
         + "</Entry></Entries></AccessControlList>"
     )
     # back to private acl
     uri.set_def_acl("private")
     acl = uri.get_def_acl()
     assert acl.to_xml() == "<AccessControlList></AccessControlList>"
     # set default acl to an xml acl and verify it gets set
     uri.set_def_acl(public_read_acl)
     acl = uri.get_def_acl()
     assert acl.to_xml() == (
         "<AccessControlList><Entries><Entry>"
         + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>'
         + "</Entry></Entries></AccessControlList>"
     )
     # back to private acl
     uri.set_def_acl("private")
     acl = uri.get_def_acl()
     assert acl.to_xml() == "<AccessControlList></AccessControlList>"
     # delete bucket
     uri.delete_bucket()
Пример #27
0
    def test_1_basic(self):
        """basic regression test for Google Cloud Storage"""
        print '--- running GSConnection tests ---'
        c = GSConnection()
        # create a new, empty bucket
        bucket_name = 'test-%d' % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now try a get_bucket call and see if it's really there
        bucket = c.get_bucket(bucket_name)
        k = bucket.new_key()
        k.name = 'foobar'
        s1 = 'This is a test of file upload and download'
        s2 = 'This is a second string to test file upload and download'
        k.set_contents_from_string(s1)
        fp = open('foobar', 'wb')
        # now get the contents from s3 to a local file
        k.get_contents_to_file(fp)
        fp.close()
        fp = open('foobar')
        # check to make sure content read from s3 is identical to original
        assert s1 == fp.read(), 'corrupted file'
        fp.close()
        bucket.delete_key(k)
        # test a few variations on get_all_keys - first load some data
        # for the first one, let's override the content type
        phony_mimetype = 'application/x-boto-test'
        headers = {'Content-Type': phony_mimetype}
        k.name = 'foo/bar'
        k.set_contents_from_string(s1, headers)
        k.name = 'foo/bas'
        k.set_contents_from_filename('foobar')
        k.name = 'foo/bat'
        k.set_contents_from_string(s1)
        k.name = 'fie/bar'
        k.set_contents_from_string(s1)
        k.name = 'fie/bas'
        k.set_contents_from_string(s1)
        k.name = 'fie/bat'
        k.set_contents_from_string(s1)
        # try resetting the contents to another value
        md5 = k.md5
        k.set_contents_from_string(s2)
        assert k.md5 != md5
        # Test for stream API
        fp2 = open('foobar', 'rb')
        k.md5 = None
        k.base64md5 = None
        k.set_contents_from_stream(fp2, headers=headers)
        fp = open('foobar1', 'wb')
        k.get_contents_to_file(fp)
        fp.close()
        fp2.seek(0,0)
        fp = open('foobar1', 'rb')
        assert (fp2.read() == fp.read()), 'Chunked Transfer corrupted the Data'
        fp.close()
        fp2.close()
        os.unlink('foobar1')
        os.unlink('foobar')
        all = bucket.get_all_keys()
        assert len(all) == 6
        rs = bucket.get_all_keys(prefix='foo')
        assert len(rs) == 3
        rs = bucket.get_all_keys(prefix='', delimiter='/')
        assert len(rs) == 2
        rs = bucket.get_all_keys(maxkeys=5)
        assert len(rs) == 5
        # test the lookup method
        k = bucket.lookup('foo/bar')
        assert isinstance(k, bucket.key_class)
        assert k.content_type == phony_mimetype
        k = bucket.lookup('notthere')
        assert k == None
        # try some metadata stuff
        k = bucket.new_key()
        k.name = 'has_metadata'
        mdkey1 = 'meta1'
        mdval1 = 'This is the first metadata value'
        k.set_metadata(mdkey1, mdval1)
        mdkey2 = 'meta2'
        mdval2 = 'This is the second metadata value'
        k.set_metadata(mdkey2, mdval2)
        # try a unicode metadata value

        mdval3 = u'föö'
        mdkey3 = 'meta3'
        k.set_metadata(mdkey3, mdval3)
        k.set_contents_from_string(s1)

        k = bucket.lookup('has_metadata')
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        k = bucket.new_key()
        k.name = 'has_metadata'
        k.get_contents_as_string()
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        bucket.delete_key(k)
        # test list and iterator
        rs1 = bucket.list()
        num_iter = 0
        for r in rs1:
            num_iter = num_iter + 1
        rs = bucket.get_all_keys()
        num_keys = len(rs)
        assert num_iter == num_keys
        # try some acl stuff
        bucket.set_acl('public-read')
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 2
        bucket.set_acl('private')
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 1
        k = bucket.lookup('foo/bar')
        k.set_acl('public-read')
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 2
        k.set_acl('private')
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 1
        # try set/get raw logging subresource
        empty_logging_str="<?xml version='1.0' encoding='UTF-8'?><Logging/>"
        logging_str = (
            "<?xml version='1.0' encoding='UTF-8'?><Logging>"
            "<LogBucket>log-bucket</LogBucket>" +
            "<LogObjectPrefix>example</LogObjectPrefix>" +
            "<PredefinedAcl>bucket-owner-full-control</PredefinedAcl>" +
            "</Logging>")
        bucket.set_subresource('logging', logging_str);
        assert bucket.get_subresource('logging') == logging_str;
        # try disable/enable logging
        bucket.disable_logging()
        assert bucket.get_subresource('logging') == empty_logging_str
        bucket.enable_logging('log-bucket', 'example',
                             canned_acl='bucket-owner-full-control')
        assert bucket.get_subresource('logging') == logging_str;
        # now delete all keys in bucket
        for k in bucket:
            bucket.delete_key(k)
        # now delete bucket
        time.sleep(5)
        c.delete_bucket(bucket)
Пример #28
0
class GoogleStorage(Storage):
    def __init__(self,
                 bucket=STORAGE_BUCKET_NAME,
                 access_key=None,
                 secret_key=None,
                 bucket_acl=BUCKET_ACL,
                 acl=DEFAULT_ACL,
                 headers=HEADERS,
                 gzip=IS_GZIPPED,
                 gzip_content_types=GZIP_CONTENT_TYPES,
                 querystring_auth=QUERYSTRING_AUTH,
                 querystring_expire=QUERYSTRING_EXPIRE,
                 reduced_redundancy=REDUCED_REDUNDANCY,
                 custom_domain=CUSTOM_DOMAIN,
                 secure_urls=SECURE_URLS,
                 location=LOCATION,
                 file_name_charset=FILE_NAME_CHARSET,
                 preload_metadata=PRELOAD_METADATA,
                 calling_format=CALLING_FORMAT):

        self.bucket_acl = bucket_acl
        self.bucket_name = bucket
        self.acl = acl
        self.headers = headers
        self.preload_metadata = preload_metadata
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.querystring_expire = querystring_expire
        self.reduced_redundancy = reduced_redundancy
        self.custom_domain = custom_domain
        self.secure_urls = secure_urls
        self.location = location or ''
        self.location = self.location.lstrip('/')
        self.file_name_charset = file_name_charset

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = GSConnection(access_key, secret_key)

        self._entries = {}

    @property
    def bucket(self):
        if not hasattr(self, '_bucket'):
            self._bucket = self._get_or_create_bucket(self.bucket_name)
        return self._bucket

    @property
    def entries(self):
        if self.preload_metadata and not self._entries:
            self._entries = dict((self._decode_name(entry.key), entry)
                                 for entry in self.bucket.list())
        return self._entries

    def _get_access_keys(self):
        access_key = ACCESS_KEY_ID
        secret_key = SECRET_ACCESS_KEY
        if (access_key or secret_key) and (not access_key or not secret_key):
            access_key = os.environ.get(ACCESS_KEY_NAME)
            secret_key = os.environ.get(SECRET_KEY_NAME)

        if access_key and secret_key:
            # Both were provided, so use them
            return access_key, secret_key

        return None, None

    def _get_or_create_bucket(self, name):
        """Retrieves a bucket if it exists, otherwise creates it."""
        try:
            return self.connection.get_bucket(name,
                                              validate=AUTO_CREATE_BUCKET)
        except Exception as e:
            if AUTO_CREATE_BUCKET:
                bucket = self.connection.create_bucket(name)
                bucket.set_acl(self.bucket_acl)
                return bucket
            raise ImproperlyConfigured("%s" % str(e))

    def _clean_name(self, name):
        # Useful for windows' paths
        return os.path.normpath(name).replace('\\', '/')

    def _normalize_name(self, name):
        try:
            return safe_join(self.location, name).lstrip('/')
        except ValueError:
            raise SuspiciousOperation("Attempted access to '%s' denied." %
                                      name)

    def _encode_name(self, name):
        return smart_bytes(name, encoding=self.file_name_charset)

    def _decode_name(self, name):
        return force_text(name, encoding=self.file_name_charset)

    def _open(self, name, mode='rb'):
        name = self._normalize_name(self._clean_name(name))
        f = GSBotoStorageFile(name, mode, self)
        if not f.key:
            raise IOError('File does not exist: %s' % name)
        return f

    def _save(self, name, content):
        cleaned_name = self._clean_name(name)
        name = self._normalize_name(cleaned_name)
        headers = self.headers.copy()
        content_type = getattr(
            content, 'content_type',
            mimetypes.guess_type(name)[0] or Key.DefaultContentType)

        content.name = cleaned_name
        k = self.bucket.get_key(self._encode_name(name))
        if not k:
            k = self.bucket.new_key(self._encode_name(name))

        k.set_metadata('Content-Type', content_type)
        k.set_contents_from_file(content,
                                 headers=headers,
                                 policy=self.acl,
                                 rewind=True)
        # reduced_redundancy=self.reduced_redundancy)
        return cleaned_name

    def delete(self, name):
        name = self._normalize_name(self._clean_name(name))
        self.bucket.delete_key(self._encode_name(name))

    def exists(self, name):
        name = self._normalize_name(self._clean_name(name))
        if self.entries:
            return name in self.entries
        k = self.bucket.new_key(self._encode_name(name))
        return k.exists()

    def listdir(self, name):
        name = self._normalize_name(self._clean_name(name))
        dirlist = self.bucket.list(self._encode_name(name))
        files = []
        dirs = set()
        base_parts = name.split("/") if name else []
        for item in dirlist:
            parts = item.name.split("/")
            parts = parts[len(base_parts):]
            if len(parts) == 1:
                # File
                files.append(parts[0])
            elif len(parts) > 1:
                # Directory
                dirs.add(parts[0])
        return list(dirs), files

    def size(self, name):
        name = self._normalize_name(self._clean_name(name))
        if self.entries:
            entry = self.entries.get(name)
            if entry:
                return entry.size
            return 0
        return self.bucket.get_key(self._encode_name(name)).size

    def modified_time(self, name):
        try:
            from dateutil import parser, tz
        except ImportError:
            raise NotImplementedError()
        name = self._normalize_name(self._clean_name(name))
        entry = self.entries.get(name)
        # only call self.bucket.get_key() if the key is not found
        # in the preloaded metadata.
        if entry is None:
            entry = self.bucket.get_key(self._encode_name(name))
        # convert to string to date
        last_modified_date = parser.parse(entry.last_modified)
        # if the date has no timzone, assume UTC
        if last_modified_date.tzinfo == None:
            last_modified_date = last_modified_date.replace(tzinfo=tz.tzutc())
        # convert date to local time w/o timezone
        return last_modified_date.astimezone(tz.tzlocal()).replace(tzinfo=None)

    def url(self, name):
        name = self._normalize_name(self._clean_name(name))
        if self.custom_domain:
            return "%s://%s/%s" % ('https' if self.secure_urls else 'http',
                                   self.custom_domain, name)
        else:
            return self.connection.generate_url(self.querystring_expire, method='GET', \
                                                bucket=self.bucket.name, key=self._encode_name(name),
                                                query_auth=self.querystring_auth, \
                                                force_http=not self.secure_urls)

    def get_available_name(self, name, max_length=None):
        """ Overwrite existing file with the same name. """
        if FILE_OVERWRITE:
            name = self._clean_name(name)
            return name
        return super(GoogleStorage, self).get_available_name(name)
Пример #29
0
class GoogleStorage(Storage):
    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
                 secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
                 gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
                 querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
                 reduced_redundancy=REDUCED_REDUNDANCY,
                 custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
                 location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
                 preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):

        self.bucket_acl = bucket_acl
        self.bucket_name = bucket
        self.acl = acl
        self.headers = headers
        self.preload_metadata = preload_metadata
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.querystring_expire = querystring_expire
        self.reduced_redundancy = reduced_redundancy
        self.custom_domain = custom_domain
        self.secure_urls = secure_urls
        self.location = location or ''
        self.location = self.location.lstrip('/')
        self.file_name_charset = file_name_charset

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = GSConnection(access_key, secret_key)

        self._entries = {}

    @property
    def bucket(self):
        if not hasattr(self, '_bucket'):
            self._bucket = self._get_or_create_bucket(self.bucket_name)
        return self._bucket

    @property
    def entries(self):
        if self.preload_metadata and not self._entries:
            self._entries = dict((self._decode_name(entry.key), entry)
                                 for entry in self.bucket.list())
        return self._entries

    def _get_access_keys(self):
        access_key = ACCESS_KEY_ID
        secret_key = SECRET_ACCESS_KEY
        if (access_key or secret_key) and (not access_key or not secret_key):
            access_key = os.environ.get(ACCESS_KEY_NAME)
            secret_key = os.environ.get(SECRET_KEY_NAME)

        if access_key and secret_key:
            # Both were provided, so use them
            return access_key, secret_key

        return None, None

    def _get_or_create_bucket(self, name):
        """Retrieves a bucket if it exists, otherwise creates it."""
        try:
            return self.connection.get_bucket(name, validate=AUTO_CREATE_BUCKET)
        except Exception, e:
            if AUTO_CREATE_BUCKET:
                bucket = self.connection.create_bucket(name)
                bucket.set_acl(self.bucket_acl)
                return bucket
            raise ImproperlyConfigured("%s" % str(e))
Пример #30
0
 def test_3_default_object_acls(self):
     """test default object acls"""
     c = GSConnection()
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now call get_bucket to see if it's really there
     bucket = c.get_bucket(bucket_name)
     # get default acl and make sure it's empty
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to a canned acl and verify it gets set
     bucket.set_def_acl('public-read')
     acl = bucket.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     bucket.set_def_acl(public_read_acl)
     acl = bucket.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     c.delete_bucket(bucket)
     # repeat default acl tests using boto's storage_uri interface
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     uri = storage_uri('gs://' + bucket_name)
     uri.create_bucket()
     # get default acl and make sure it's empty
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to a canned acl and verify it gets set
     uri.set_def_acl('public-read')
     acl = uri.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     uri.set_def_acl(public_read_acl)
     acl = uri.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     uri.delete_bucket()
     
     print '--- tests completed ---'
Пример #31
0
class GoogleStorage(Storage):
    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
                       secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
                       gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
                       querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
                       reduced_redundancy=REDUCED_REDUNDANCY,
                       custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
                       location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
                       preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):

        self.bucket_acl = bucket_acl
        self.bucket_name = bucket
        self.acl = acl
        self.headers = headers
        self.preload_metadata = preload_metadata
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.querystring_expire = querystring_expire
        self.reduced_redundancy = reduced_redundancy
        self.custom_domain = custom_domain
        self.secure_urls = secure_urls
        self.location = location or ''
        self.location = self.location.lstrip('/')
        self.file_name_charset = file_name_charset

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = GSConnection(access_key, secret_key)
        
        self._entries = {}

    @property
    def bucket(self):
        if not hasattr(self, '_bucket'):
            self._bucket = self._get_or_create_bucket(self.bucket_name)
        return self._bucket

    @property
    def entries(self):
        if self.preload_metadata and not self._entries:
            self._entries = dict((self._decode_name(entry.key), entry)
                                for entry in self.bucket.list())
        return self._entries

    def _get_access_keys(self):
        access_key = ACCESS_KEY_ID
        secret_key = SECRET_ACCESS_KEY
        if (access_key or secret_key) and (not access_key or not secret_key):
            access_key = os.environ.get(ACCESS_KEY_NAME)
            secret_key = os.environ.get(SECRET_KEY_NAME)

        if access_key and secret_key:
            # Both were provided, so use them
            return access_key, secret_key

        return None, None

    def _get_or_create_bucket(self, name):
        """Retrieves a bucket if it exists, otherwise creates it."""
        try:
            return self.connection.get_bucket(name, validate=AUTO_CREATE_BUCKET)
        except Exception, e:
            if AUTO_CREATE_BUCKET:
                bucket = self.connection.create_bucket(name)
                bucket.set_acl(self.bucket_acl)
                return bucket
            raise ImproperlyConfigured("%s" % str(e))
Пример #32
0
 def test_3_default_object_acls(self):
     """test default object acls"""
     c = GSConnection()
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now call get_bucket to see if it's really there
     bucket = c.get_bucket(bucket_name)
     bucket.set_def_acl('public-read')
     acl = bucket.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     bucket.set_def_acl(public_read_acl)
     acl = bucket.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     c.delete_bucket(bucket)
     # repeat default acl tests using boto's storage_uri interface
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     uri = storage_uri('gs://' + bucket_name)
     uri.create_bucket()
     uri.set_def_acl('public-read')
     acl = uri.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     uri.set_def_acl(public_read_acl)
     acl = uri.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     uri.delete_bucket()
     
     print '--- tests completed ---'
Пример #33
0
    def test_1_basic(self):
        """basic regression test for Google Cloud Storage"""
        print '--- running GSConnection tests ---'
        c = GSConnection()
        # create a new, empty bucket
        bucket_name = 'test-%d' % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now try a get_bucket call and see if it's really there
        bucket = c.get_bucket(bucket_name)
        k = bucket.new_key()
        k.name = 'foobar'
        s1 = 'This is a test of file upload and download'
        s2 = 'This is a second string to test file upload and download'
        k.set_contents_from_string(s1)
        fp = open('foobar', 'wb')
        # now get the contents from s3 to a local file
        k.get_contents_to_file(fp)
        fp.close()
        fp = open('foobar')
        # check to make sure content read from s3 is identical to original
        assert s1 == fp.read(), 'corrupted file'
        fp.close()
        bucket.delete_key(k)
        # test a few variations on get_all_keys - first load some data
        # for the first one, let's override the content type
        phony_mimetype = 'application/x-boto-test'
        headers = {'Content-Type': phony_mimetype}
        k.name = 'foo/bar'
        k.set_contents_from_string(s1, headers)
        k.name = 'foo/bas'
        k.set_contents_from_filename('foobar')
        k.name = 'foo/bat'
        k.set_contents_from_string(s1)
        k.name = 'fie/bar'
        k.set_contents_from_string(s1)
        k.name = 'fie/bas'
        k.set_contents_from_string(s1)
        k.name = 'fie/bat'
        k.set_contents_from_string(s1)
        # try resetting the contents to another value
        md5 = k.md5
        k.set_contents_from_string(s2)
        assert k.md5 != md5
        # Test for stream API
        fp2 = open('foobar', 'rb')
        k.md5 = None
        k.base64md5 = None
        k.set_contents_from_stream(fp2, headers=headers)
        fp = open('foobar1', 'wb')
        k.get_contents_to_file(fp)
        fp.close()
        fp2.seek(0,0)
        fp = open('foobar1', 'rb')
        assert (fp2.read() == fp.read()), 'Chunked Transfer corrupted the Data'
        fp.close()
        fp2.close()
        os.unlink('foobar1')
        os.unlink('foobar')
        all = bucket.get_all_keys()
        assert len(all) == 6
        rs = bucket.get_all_keys(prefix='foo')
        assert len(rs) == 3
        rs = bucket.get_all_keys(prefix='', delimiter='/')
        assert len(rs) == 2
        rs = bucket.get_all_keys(maxkeys=5)
        assert len(rs) == 5
        # test the lookup method
        k = bucket.lookup('foo/bar')
        assert isinstance(k, bucket.key_class)
        assert k.content_type == phony_mimetype
        k = bucket.lookup('notthere')
        assert k == None
        # try some metadata stuff
        k = bucket.new_key()
        k.name = 'has_metadata'
        mdkey1 = 'meta1'
        mdval1 = 'This is the first metadata value'
        k.set_metadata(mdkey1, mdval1)
        mdkey2 = 'meta2'
        mdval2 = 'This is the second metadata value'
        k.set_metadata(mdkey2, mdval2)
        # try a unicode metadata value

        mdval3 = u'föö'
        mdkey3 = 'meta3'
        k.set_metadata(mdkey3, mdval3)
        k.set_contents_from_string(s1)

        k = bucket.lookup('has_metadata')
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        k = bucket.new_key()
        k.name = 'has_metadata'
        k.get_contents_as_string()
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        bucket.delete_key(k)
        # test list and iterator
        rs1 = bucket.list()
        num_iter = 0
        for r in rs1:
            num_iter = num_iter + 1
        rs = bucket.get_all_keys()
        num_keys = len(rs)
        assert num_iter == num_keys
        # try some acl stuff
        bucket.set_acl('public-read')
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 2
        bucket.set_acl('private')
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 1
        k = bucket.lookup('foo/bar')
        k.set_acl('public-read')
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 2
        k.set_acl('private')
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 1
        # try set/get raw logging subresource
        empty_logging_str="<?xml version='1.0' encoding='UTF-8'?><Logging/>"
        logging_str = (
            "<?xml version='1.0' encoding='UTF-8'?><Logging>"
            "<LogBucket>log-bucket</LogBucket>" +
            "<LogObjectPrefix>example</LogObjectPrefix>" +
            "</Logging>")
        bucket.set_subresource('logging', logging_str);
        assert bucket.get_subresource('logging') == logging_str;
        # try disable/enable logging
        bucket.disable_logging()
        assert bucket.get_subresource('logging') == empty_logging_str
        bucket.enable_logging('log-bucket', 'example')
        assert bucket.get_subresource('logging') == logging_str;
        # now delete all keys in bucket
        for k in bucket:
            bucket.delete_key(k)
        # now delete bucket
        time.sleep(5)
        c.delete_bucket(bucket)
Пример #34
0
 def test_3_default_object_acls(self):
     """test default object acls"""
     # regexp for matching project-private default object ACL
     project_private_re = '\s*<AccessControlList>\s*<Entries>\s*<Entry>' \
       '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'        \
       '\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>'   \
       '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'        \
       '\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>'   \
       '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'        \
       '\s*<Permission>READ</Permission></Entry>\s*</Entries>'           \
       '\s*</AccessControlList>\s*'
     c = GSConnection()
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now call get_bucket to see if it's really there
     bucket = c.get_bucket(bucket_name)
     # get default acl and make sure it's project-private
     acl = bucket.get_def_acl()
     assert re.search(project_private_re, acl.to_xml())
     # set default acl to a canned acl and verify it gets set
     bucket.set_def_acl('public-read')
     acl = bucket.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     bucket.set_def_acl(public_read_acl)
     acl = bucket.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     c.delete_bucket(bucket)
     # repeat default acl tests using boto's storage_uri interface
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     uri = storage_uri('gs://' + bucket_name)
     uri.create_bucket()
     # get default acl and make sure it's project-private
     acl = uri.get_def_acl()
     assert re.search(project_private_re, acl.to_xml())
     # set default acl to a canned acl and verify it gets set
     uri.set_def_acl('public-read')
     acl = uri.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     uri.set_def_acl(public_read_acl)
     acl = uri.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     uri.delete_bucket()
     
     print '--- tests completed ---'
Пример #35
0
 def setUp(self):
     self.conn = GSConnection()
     self.buckets = []
Пример #36
0
class GSVersioningTest(unittest.TestCase):
    gs = True

    def setUp(self):
        self.conn = GSConnection()
        self.buckets = []

    def tearDown(self):
        for b in self.buckets:
            bucket = self.conn.get_bucket(b)
            while len(list(bucket.list_versions())) > 0:
                for k in bucket.list_versions():
                    bucket.delete_key(k.name, generation=k.generation)
            bucket.delete()

    def _MakeBucketName(self):
        b = "boto-gs-test-%s" % repr(time.time()).replace(".", "-")
        self.buckets.append(b)
        return b

    def _MakeVersionedBucket(self):
        b = self.conn.create_bucket(self._MakeBucketName())
        b.configure_versioning(True)
        return b

    def testVersioningToggle(self):
        b = self.conn.create_bucket(self._MakeBucketName())
        self.assertFalse(b.get_versioning_status())
        b.configure_versioning(True)
        self.assertTrue(b.get_versioning_status())
        b.configure_versioning(False)
        self.assertFalse(b.get_versioning_status())

    def testDeleteVersionedKey(self):
        b = self._MakeVersionedBucket()
        k = b.new_key("foo")
        s1 = "test1"
        k.set_contents_from_string(s1)

        k = b.get_key("foo")
        g1 = k.generation

        s2 = "test2"
        k.set_contents_from_string(s2)
        k = b.get_key("foo")
        g2 = k.generation

        versions = list(b.list_versions())
        self.assertEqual(len(versions), 2)
        self.assertEqual(versions[0].name, "foo")
        self.assertEqual(versions[1].name, "foo")
        generations = [k.generation for k in versions]
        self.assertIn(g1, generations)
        self.assertIn(g2, generations)

        # Delete "current" version and make sure that version is no longer
        # visible from a basic GET call.
        k = b.get_key("foo")
        k.delete()
        self.assertIsNone(b.get_key("foo"))

        # Both old versions should still be there when listed using the versions
        # query parameter.
        versions = list(b.list_versions())
        self.assertEqual(len(versions), 2)
        self.assertEqual(versions[0].name, "foo")
        self.assertEqual(versions[1].name, "foo")
        generations = [k.generation for k in versions]
        self.assertIn(g1, generations)
        self.assertIn(g2, generations)

        # Delete generation 2 and make sure it's gone.
        b.delete_key("foo", generation=g2)
        versions = list(b.list_versions())
        self.assertEqual(len(versions), 1)
        self.assertEqual(versions[0].name, "foo")
        self.assertEqual(versions[0].generation, g1)

        # Delete generation 1 and make sure it's gone.
        b.delete_key("foo", generation=g1)
        versions = list(b.list_versions())
        self.assertEqual(len(versions), 0)

    def testGetVersionedKey(self):
        b = self._MakeVersionedBucket()
        k = b.new_key("foo")
        s1 = "test1"
        k.set_contents_from_string(s1)

        k = b.get_key("foo")
        g1 = k.generation
        o1 = k.get_contents_as_string()
        self.assertEqual(o1, s1)

        s2 = "test2"
        k.set_contents_from_string(s2)
        k = b.get_key("foo")
        g2 = k.generation
        self.assertNotEqual(g2, g1)
        o2 = k.get_contents_as_string()
        self.assertEqual(o2, s2)

        k = b.get_key("foo", generation=g1)
        self.assertEqual(k.get_contents_as_string(), s1)
        k = b.get_key("foo", generation=g2)
        self.assertEqual(k.get_contents_as_string(), s2)

    def testVersionedAcl(self):
        b = self._MakeVersionedBucket()
        k = b.new_key("foo")
        s1 = "test1"
        k.set_contents_from_string(s1)

        k = b.get_key("foo")
        g1 = k.generation

        s2 = "test2"
        k.set_contents_from_string(s2)
        k = b.get_key("foo")
        g2 = k.generation

        acl1g1 = b.get_acl("foo", generation=g1)
        acl1g2 = b.get_acl("foo", generation=g2)
        owner1g1 = acl1g1.owner.id
        owner1g2 = acl1g2.owner.id
        self.assertEqual(owner1g1, owner1g2)
        entries1g1 = acl1g1.entries.entry_list
        entries1g2 = acl1g2.entries.entry_list
        self.assertEqual(len(entries1g1), len(entries1g2))

        b.set_acl("public-read", key_name="foo", generation=g1)

        acl2g1 = b.get_acl("foo", generation=g1)
        acl2g2 = b.get_acl("foo", generation=g2)
        entries2g1 = acl2g1.entries.entry_list
        entries2g2 = acl2g2.entries.entry_list
        self.assertEqual(len(entries2g2), len(entries1g2))
        public_read_entries1 = [e for e in entries2g1 if e.permission == "READ"
                                and e.scope.type == acl.ALL_USERS]
        public_read_entries2 = [e for e in entries2g2 if e.permission == "READ"
                                and e.scope.type == acl.ALL_USERS]
        self.assertEqual(len(public_read_entries1), 1)
        self.assertEqual(len(public_read_entries2), 0)

    def testCopyVersionedKey(self):
        b = self._MakeVersionedBucket()
        k = b.new_key("foo")
        s1 = "test1"
        k.set_contents_from_string(s1)

        k = b.get_key("foo")
        g1 = k.generation

        s2 = "test2"
        k.set_contents_from_string(s2)

        b2 = self._MakeVersionedBucket()
        b2.copy_key("foo2", b.name, "foo", src_generation=g1)

        k2 = b2.get_key("foo2")
        s3 = k2.get_contents_as_string()
        self.assertEqual(s3, s1)
Пример #37
0
 def setUp(self):
     self._conn = GSConnection()
     self._buckets = []
     self._tempdirs = []
Пример #38
0
 def test_1_basic(self):
     print '--- running GSConnection tests ---'
     c = GSConnection()
     # create a new, empty bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now try a get_bucket call and see if it's really there
     bucket = c.get_bucket(bucket_name)
     k = bucket.new_key()
     k.name = 'foobar'
     s1 = 'This is a test of file upload and download'
     s2 = 'This is a second string to test file upload and download'
     k.set_contents_from_string(s1)
     fp = open('foobar', 'wb')
     # now get the contents from s3 to a local file
     k.get_contents_to_file(fp)
     fp.close()
     fp = open('foobar')
     # check to make sure content read from s3 is identical to original
     assert s1 == fp.read(), 'corrupted file'
     fp.close()
     bucket.delete_key(k)
     # test a few variations on get_all_keys - first load some data
     # for the first one, let's override the content type
     phony_mimetype = 'application/x-boto-test'
     headers = {'Content-Type': phony_mimetype}
     k.name = 'foo/bar'
     k.set_contents_from_string(s1, headers)
     k.name = 'foo/bas'
     k.set_contents_from_filename('foobar')
     k.name = 'foo/bat'
     k.set_contents_from_string(s1)
     k.name = 'fie/bar'
     k.set_contents_from_string(s1)
     k.name = 'fie/bas'
     k.set_contents_from_string(s1)
     k.name = 'fie/bat'
     k.set_contents_from_string(s1)
     # try resetting the contents to another value
     md5 = k.md5
     k.set_contents_from_string(s2)
     assert k.md5 != md5
     os.unlink('foobar')
     all = bucket.get_all_keys()
     assert len(all) == 6
     rs = bucket.get_all_keys(prefix='foo')
     assert len(rs) == 3
     rs = bucket.get_all_keys(prefix='', delimiter='/')
     assert len(rs) == 2
     rs = bucket.get_all_keys(maxkeys=5)
     assert len(rs) == 5
     # test the lookup method
     k = bucket.lookup('foo/bar')
     assert isinstance(k, bucket.key_class)
     assert k.content_type == phony_mimetype
     k = bucket.lookup('notthere')
     assert k == None
     # try some metadata stuff
     k = bucket.new_key()
     k.name = 'has_metadata'
     mdkey1 = 'meta1'
     mdval1 = 'This is the first metadata value'
     k.set_metadata(mdkey1, mdval1)
     mdkey2 = 'meta2'
     mdval2 = 'This is the second metadata value'
     k.set_metadata(mdkey2, mdval2)
     # try a unicode metadata value
     
     mdval3 = u'föö'
     mdkey3 = 'meta3'
     k.set_metadata(mdkey3, mdval3)
     k.set_contents_from_string(s1)
     
     k = bucket.lookup('has_metadata')
     assert k.get_metadata(mdkey1) == mdval1
     assert k.get_metadata(mdkey2) == mdval2
     assert k.get_metadata(mdkey3) == mdval3
     k = bucket.new_key()
     k.name = 'has_metadata'
     k.get_contents_as_string()
     assert k.get_metadata(mdkey1) == mdval1
     assert k.get_metadata(mdkey2) == mdval2
     assert k.get_metadata(mdkey3) == mdval3
     bucket.delete_key(k)
     # test list and iterator
     rs1 = bucket.list()
     num_iter = 0
     for r in rs1:
         num_iter = num_iter + 1
     rs = bucket.get_all_keys()
     num_keys = len(rs)
     assert num_iter == num_keys
     # try a key with a funny character
     k = bucket.new_key()
     k.name = 'testnewline\n'
     k.set_contents_from_string('This is a test')
     rs = bucket.get_all_keys()
     assert len(rs) == num_keys + 1
     bucket.delete_key(k)
     rs = bucket.get_all_keys()
     assert len(rs) == num_keys
     # try some acl stuff
     bucket.set_acl('public-read')
     acl = bucket.get_acl()
     assert len(acl.entries.entry_list) == 2
     bucket.set_acl('private')
     acl = bucket.get_acl()
     assert len(acl.entries.entry_list) == 1
     k = bucket.lookup('foo/bar')
     k.set_acl('public-read')
     acl = k.get_acl()
     assert len(acl.entries.entry_list) == 2
     k.set_acl('private')
     acl = k.get_acl()
     assert len(acl.entries.entry_list) == 1
     # now delete all keys in bucket
     for k in bucket:
         bucket.delete_key(k)
     # now delete bucket
     time.sleep(5)
     c.delete_bucket(bucket)
     print '--- tests completed ---'
Пример #39
0
 def setUp(self):
     self._conn = GSConnection()
     self._buckets = []
     self._tempdirs = []
Пример #40
0
class GSTestCase(unittest.TestCase):
    gs = True

    def setUp(self):
        self._conn = GSConnection()
        self._buckets = []
        self._tempdirs = []

    # Retry with an exponential backoff if a server error is received. This
    # ensures that we try *really* hard to clean up after ourselves.
    @retry(GSResponseError)
    def tearDown(self):
        while len(self._tempdirs):
            tmpdir = self._tempdirs.pop()
            shutil.rmtree(tmpdir, ignore_errors=True)

        while(len(self._buckets)):
            b = self._buckets[-1]
            bucket = self._conn.get_bucket(b)
            while len(list(bucket.list_versions())) > 0:
                for k in bucket.list_versions():
                    bucket.delete_key(k.name, generation=k.generation)
            bucket.delete()
            self._buckets.pop()

    def _GetConnection(self):
        """Returns the GSConnection object used to connect to GCS."""
        return self._conn

    def _MakeTempName(self):
        """Creates and returns a temporary name for testing that is likely to be
        unique."""
        return "boto-gs-test-%s" % repr(time.time()).replace(".", "-")

    def _MakeBucketName(self):
        """Creates and returns a temporary bucket name for testing that is
        likely to be unique."""
        b = self._MakeTempName()
        self._buckets.append(b)
        return b

    def _MakeBucket(self):
        """Creates and returns temporary bucket for testing. After the test, the
        contents of the bucket and the bucket itself will be deleted."""
        b = self._conn.create_bucket(self._MakeBucketName())
        return b

    def _MakeKey(self, data='', bucket=None, set_contents=True):
        """Creates and returns a Key with provided data. If no bucket is given,
        a temporary bucket is created."""
        if data and not set_contents:
            # The data and set_contents parameters are mutually exclusive. 
            raise ValueError('MakeKey called with a non-empty data parameter '
                             'but set_contents was set to False.')
        if not bucket:
            bucket = self._MakeBucket()
        key_name = self._MakeTempName()
        k = bucket.new_key(key_name)
        if set_contents:
            k.set_contents_from_string(data)
        return k

    def _MakeVersionedBucket(self):
        """Creates and returns temporary versioned bucket for testing. After the
        test, the contents of the bucket and the bucket itself will be
        deleted."""
        b = self._MakeBucket()
        b.configure_versioning(True)
        return b

    def _MakeTempDir(self):
        """Creates and returns a temporary directory on disk. After the test,
        the contents of the directory and the directory itself will be
        deleted."""
        tmpdir = tempfile.mkdtemp(prefix=self._MakeTempName())
        self._tempdirs.append(tmpdir)
        return tmpdir
Пример #41
0
class BackupManager:

    def __init__(self, accesskey, sharedkey):
        self._accesskey = accesskey
        self._connection = GSConnection(accesskey, sharedkey)

        self._buckets = None
        self._bucketbackups = {}
        self._backups = None

    def _generate_backup_buckets(self):
        bucket_prefix = 'bkup-'
        bucket_suffix = '-' + self._accesskey.lower()
        buckets = self._connection.get_all_buckets()
        self._buckets = []

        for bucket in buckets:
            if bucket.name.startswith(bucket_prefix) and bucket.name.endswith(bucket_suffix):
                self._buckets.append(bucket)

    @property
    def backup_buckets(self):   # property
        if self._buckets is None:
            self._generate_backup_buckets()
        return self._buckets

    def _list_backups(self, bucket):
        """Returns a dict of backups in a bucket, with dicts of:
        {hostname (str):
            {Backup number (int):
                {'date': Timestamp of backup (int),
                 'keys': A list of keys comprising the backup,
                 'hostname': Hostname (str),
                 'backupnum': Backup number (int),
                 'finalized': 0, or the timestamp the backup was finalized
                }
            }
        }
        """

        backups = {}

        for key in bucket.list():
            keyparts = key.key.split('.')
            final = False

            if keyparts[-1] == 'COMPLETE':
                final = True
                keyparts.pop() # back to tar
                keyparts.pop() # back to backup number
            else:
                if keyparts[-1] == 'gpg':
                    keyparts.pop()

                if keyparts[-1] != 'tar' and len(keyparts[-1]) is 2:
                    keyparts.pop()

                if keyparts[-1] == 'tar':
                    keyparts.pop()

            nextpart = keyparts.pop()
            if nextpart == 'COMPLETE':
                print("Stray file: %s" % key.key)
                continue
            backupnum = int(nextpart)
            hostname = '.'.join(keyparts)

            lastmod = time.strptime(key.last_modified,
                                    '%Y-%m-%dT%H:%M:%S.%fZ')

            if hostname in backups.keys():
                if not backupnum in backups[hostname].keys():
                    backups[hostname][backupnum] = {
                        'date': lastmod,
                        'hostname': hostname,
                        'backupnum': backupnum,
                        'finalized': 0,
                        'keys': [],
                        'finalkey': None,
                        'finalized_age': -1,
                    }
            else:
                backups[hostname] = {
                    backupnum: {
                        'date': lastmod,
                        'hostname': hostname,
                        'backupnum': backupnum,
                        'finalized': 0,
                        'keys': [],
                        'finalkey': None,
                        'finalized_age': -1,
                    }
                }
            if final:
                backups[hostname][backupnum]['finalized'] = lastmod
                backups[hostname][backupnum]['finalkey'] = key
                timestamp = time.mktime(lastmod)
                delta = int(time.time() - timestamp + time.timezone)
                backups[hostname][backupnum]['finalized_age'] = delta
            else:
                if lastmod < backups[hostname][backupnum]['date']:
                    backups[hostname][backupnum]['date'] = lastmod
                backups[hostname][backupnum]['keys'].append(key)
        return backups

    def get_backups_by_bucket(self, bucket):
        if bucket.name not in self._bucketbackups:
            self._bucketbackups[bucket.name] = self._list_backups(bucket)

        return self._bucketbackups[bucket.name]

    @property
    def all_backups(self):  # property
        if self._backups is None:
            sys.stderr.write("Enumerating backups")
            self._backups = {}
            for bucket in self.backup_buckets:
                backups_dict = self.get_backups_by_bucket(bucket)
                for hostname, backups in backups_dict.items():
                    sys.stderr.write('.')
                    sys.stderr.flush()
                    if hostname not in self._backups:
                        self._backups[hostname] = {}
                    self._backups[hostname].update(backups)
            sys.stderr.write("\n")
        return self._backups

    def invalidate_host_cache(self, hostname):
        nuke = []
        for bucket in self._bucketbackups:
            if hostname in self._bucketbackups[bucket]:
                nuke.append(bucket)

        for bucket in nuke:
            if bucket in self._bucketbackups:
                del self._bucketbackups[bucket]
                self._backups = None

    @property
    def backups_by_age(self):   # property
        "Returns a dict of {hostname: [(backupnum, age), ...]}"
        results = defaultdict(list)
        for hostname, backups in self.all_backups.items():
            for backupnum, statusdict in backups.items():
                results[hostname].append((backupnum,
                                          statusdict['finalized_age']))
        return results