Example #1
0
    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
        """
        Opens a connection to appropriate provider, depending on provider
        portion of URI. Requires Credentials defined in boto config file (see
        boto/pyami/config.py).
        @type storage_uri: StorageUri
        @param storage_uri: StorageUri specifying a bucket or a bucket+object
        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
        @return: A connection to storage service provider of the given URI.
        """

        if not self.connection:
            if self.scheme == 's3':
                from boto.s3.connection import S3Connection
                self.connection = S3Connection(access_key_id,
                                               secret_access_key, **kwargs)
            elif self.scheme == 'gs':
                from boto.gs.connection import GSConnection
                self.connection = GSConnection(access_key_id,
                                               secret_access_key, **kwargs)
            elif self.scheme == 'file':
                from boto.file.connection import FileConnection
                self.connection = FileConnection(self)
            else:
                raise InvalidUriError('Unrecognized scheme "%s"' % self.scheme)
        self.connection.debug = self.debug
        return self.connection
Example #2
0
    def __init__(self, accesskey, sharedkey):
        self._accesskey = accesskey
        self._connection = GSConnection(accesskey, sharedkey)

        self._buckets = None
        self._bucketbackups = {}
        self._backups = None
Example #3
0
    def delete(self, *args, **kwargs):
        try:
            logger.info("Intentado eliminar archivos del bucket GCS")
            logger.info("Para el QuerySet Email")
            conn = GSConnection(settings.GS_ACCESS_KEY_ID,
                                settings.GS_SECRET_ACCESS_KEY)
            bucket = conn.get_bucket(settings.GS_BUCKET_NAME)

            for obj in self:
                if obj.xml.name != '':
                    logger.info("borrando xml")
                    bucket.delete_key(obj.xml)
                if obj.pdf.name != '':
                    logger.info("borrando pdf")
                    bucket.delete_key(obj.pdf)
                if obj.adjunto1.name != '':
                    logger.info("borrando adjunto1")
                    bucket.delete_key(obj.adjunto1)
                logger.info("Archivos eliminados")
        except Exception as e:
            logger.error("Error al intentar eliminar archivo del GCS")
            logger.error(e)
        finally:
            logger.info("Borrando QuerySet")
            super(FileQuerySet, self).delete(*args, **kwargs)
            logger.info("QuerySet eliminado")
Example #4
0
def upload_to_gs(bucket_name,
                 client_id,
                 client_secret,
                 file,
                 key,
                 acl='public-read'):
    conn = GSConnection(client_id,
                        client_secret,
                        calling_format=OrdinaryCallingFormat())
    bucket = conn.get_bucket(bucket_name)
    k = Key(bucket)
    # generate key
    filename = secure_filename(file.filename)
    key_dir = key + '/' + generate_hash(key) + '/'
    k.key = key_dir + filename
    # delete old data
    for item in bucket.list(prefix='/' + key_dir):
        item.delete()
    # set object settings

    file_data = file.read()
    file_mime = magic.from_buffer(file_data, mime=True)
    size = len(file_data)
    sent = k.set_contents_from_string(file_data,
                                      headers={
                                          'Content-Disposition':
                                          'attachment; filename=%s' % filename,
                                          'Content-Type':
                                          '%s' % file_mime
                                      })
    k.set_acl(acl)
    gs_url = 'https://storage.googleapis.com/%s/' % bucket_name
    if sent == size:
        return gs_url + k.key
    return False
Example #5
0
    def __init__(self, bucket=STORAGE_BUCKET_NAME, access_key=None,
                       secret_key=None, bucket_acl=BUCKET_ACL, acl=DEFAULT_ACL, headers=HEADERS,
                       gzip=IS_GZIPPED, gzip_content_types=GZIP_CONTENT_TYPES,
                       querystring_auth=QUERYSTRING_AUTH, querystring_expire=QUERYSTRING_EXPIRE,
                       reduced_redundancy=REDUCED_REDUNDANCY,
                       custom_domain=CUSTOM_DOMAIN, secure_urls=SECURE_URLS,
                       location=LOCATION, file_name_charset=FILE_NAME_CHARSET,
                       preload_metadata=PRELOAD_METADATA, calling_format=CALLING_FORMAT):

        self.bucket_acl = bucket_acl
        self.bucket_name = bucket
        self.acl = acl
        self.headers = headers
        self.preload_metadata = preload_metadata
        self.gzip = gzip
        self.gzip_content_types = gzip_content_types
        self.querystring_auth = querystring_auth
        self.querystring_expire = querystring_expire
        self.reduced_redundancy = reduced_redundancy
        self.custom_domain = custom_domain
        self.secure_urls = secure_urls
        self.location = location or ''
        self.location = self.location.lstrip('/')
        self.file_name_charset = file_name_charset

        if not access_key and not secret_key:
            access_key, secret_key = self._get_access_keys()

        self.connection = GSConnection(access_key, secret_key)
        
        self._entries = {}
    def get(self):
        """Handle GET requests.

    For the time being, we just provide an information page.  In the future
    there will be a web UI here.
    """
        path = os.path.join(os.path.dirname(__file__), 'index.html')
        user = users.get_current_user()

        connection = GSConnection(config.gs_access_key, config.gs_secret_key)
        bucket = connection.get_bucket(config.gs_bucket)

        # Find all of the batches.
        batches = []
        logging.info('Loading batches')
        rs = bucket.list(prefix=user.email() + '/', delimiter='/')
        for r in rs:
            logging.info(r.name)
            batch_name = r.name.split('/')[1]
            batches.append(self.LoadBatchInfo(user.email(), batch_name,
                                              bucket))
        batches.sort(key=lambda i: i.get('update_time', 0), reverse=True)
        self.response.out.write(
            template.render(
                path, {
                    'url': self.request.url,
                    'user_id': user.user_id(),
                    'email': user.email(),
                    'batches': batches,
                }))
Example #7
0
 def _create_connection(self):
     """Returns a GSConnection object we can use to access Google Storage."""
     if self._gs_access_key_id:
         return GSConnection(
             gs_access_key_id=self._gs_access_key_id,
             gs_secret_access_key=self._gs_secret_access_key)
     else:
         return AnonymousGSConnection()
Example #8
0
    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
        """
        Opens a connection to appropriate provider, depending on provider
        portion of URI. Requires Credentials defined in boto config file (see
        boto/pyami/config.py).
        @type storage_uri: StorageUri
        @param storage_uri: StorageUri specifying a bucket or a bucket+object
        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
        @return: A connection to storage service provider of the given URI.
        """
        connection_args = dict(self.connection_args or ())

        if (hasattr(self, 'suppress_consec_slashes') and
                'suppress_consec_slashes' not in connection_args):
            connection_args['suppress_consec_slashes'] = (
                self.suppress_consec_slashes)
        connection_args.update(kwargs)
        if not self.connection:
            if self.scheme in self.provider_pool:
                self.connection = self.provider_pool[self.scheme]
            elif self.scheme == 's3':
                from boto.s3.connection import S3Connection
                self.connection = S3Connection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
                self.provider_pool[self.scheme] = self.connection
            elif self.scheme == 'gs':
                from boto.gs.connection import GSConnection
                # Use OrdinaryCallingFormat instead of boto-default
                # SubdomainCallingFormat because the latter changes the hostname
                # that's checked during cert validation for HTTPS connections,
                # which will fail cert validation (when cert validation is
                # enabled).
                #
                # The same is not true for S3's HTTPS certificates. In fact,
                # we don't want to do this for S3 because S3 requires the
                # subdomain to match the location of the bucket. If the proper
                # subdomain is not used, the server will return a 301 redirect
                # with no Location header.
                #
                # Note: the following import can't be moved up to the
                # start of this file else it causes a config import failure when
                # run from the resumable upload/download tests.
                from boto.s3.connection import OrdinaryCallingFormat
                connection_args['calling_format'] = OrdinaryCallingFormat()
                self.connection = GSConnection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
                self.provider_pool[self.scheme] = self.connection
            elif self.scheme == 'file':
                from boto.file.connection import FileConnection
                self.connection = FileConnection(self)
            else:
                raise InvalidUriError('Unrecognized scheme "%s"' %
                                      self.scheme)
        self.connection.debug = self.debug
        return self.connection
Example #9
0
def connect_gs(gs_access_key_id=None, gs_secret_access_key=None, **kwargs):
    """
    @type gs_access_key_id: string
    @param gs_access_key_id: Your Google Cloud Storage Access Key ID

    @type gs_secret_access_key: string
    @param gs_secret_access_key: Your Google Cloud Storage Secret Access Key

    @rtype: L{GSConnection<boto.gs.connection.GSConnection>}
    @return: A connection to Google's Storage service
    """
    from boto.gs.connection import GSConnection
    return GSConnection(gs_access_key_id, gs_secret_access_key, **kwargs)
Example #10
0
    def test_4_cors_xml(self):
        """test setting and getting of CORS XML documents"""
        # regexp for matching project-private default object ACL
        cors_empty = '<CorsConfig></CorsConfig>'
        cors_doc = ('<CorsConfig><Cors><Origins><Origin>origin1.example.com'
                    '</Origin><Origin>origin2.example.com</Origin></Origins>'
                    '<Methods><Method>GET</Method><Method>PUT</Method>'
                    '<Method>POST</Method></Methods><ResponseHeaders>'
                    '<ResponseHeader>foo</ResponseHeader>'
                    '<ResponseHeader>bar</ResponseHeader></ResponseHeaders>'
                    '</Cors></CorsConfig>')
        c = GSConnection()
        # create a new bucket
        bucket_name = 'test-%d' % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now call get_bucket to see if it's really there
        bucket = c.get_bucket(bucket_name)
        # get new bucket cors and make sure it's empty
        cors = re.sub(r'\s', '', bucket.get_cors().to_xml())
        assert cors == cors_empty
        # set cors document on new bucket
        bucket.set_cors(cors_doc)
        cors = re.sub(r'\s', '', bucket.get_cors().to_xml())
        assert cors == cors_doc
        # delete bucket
        c.delete_bucket(bucket)

        # repeat cors tests using boto's storage_uri interface
        # create a new bucket
        bucket_name = 'test-%d' % int(time.time())
        uri = storage_uri('gs://' + bucket_name)
        uri.create_bucket()
        # get new bucket cors and make sure it's empty
        cors = re.sub(r'\s', '', uri.get_cors().to_xml())
        assert cors == cors_empty
        # set cors document on new bucket
        cors_obj = Cors()
        h = handler.XmlHandler(cors_obj, None)
        xml.sax.parseString(cors_doc, h)
        uri.set_cors(cors_obj)
        cors = re.sub(r'\s', '', uri.get_cors().to_xml())
        assert cors == cors_doc
        # delete bucket
        uri.delete_bucket()

        print '--- tests completed ---'
Example #11
0
    def connect(self, access_key_id=None, secret_access_key=None, **kwargs):
        """
        Opens a connection to appropriate provider, depending on provider
        portion of URI. Requires Credentials defined in boto config file (see
        boto/pyami/config.py).
        @type storage_uri: StorageUri
        @param storage_uri: StorageUri specifying a bucket or a bucket+object
        @rtype: L{AWSAuthConnection<boto.gs.connection.AWSAuthConnection>}
        @return: A connection to storage service provider of the given URI.
        """

        connection_args = dict(self.connection_args or ())
        # Use OrdinaryCallingFormat instead of boto-default
        # SubdomainCallingFormat because the latter changes the hostname
        # that's checked during cert validation for HTTPS connections,
        # which will fail cert validation (when cert validation is enabled).
        # Note: the following import can't be moved up to the start of
        # this file else it causes a config import failure when run from
        # the resumable upload/download tests.
        from boto.s3.connection import OrdinaryCallingFormat
        connection_args['calling_format'] = OrdinaryCallingFormat()
        connection_args.update(kwargs)
        if not self.connection:
            if self.scheme == 's3':
                from boto.s3.connection import S3Connection
                self.connection = S3Connection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
            elif self.scheme == 'gs':
                from boto.gs.connection import GSConnection
                self.connection = GSConnection(access_key_id,
                                               secret_access_key,
                                               **connection_args)
            elif self.scheme == 'file':
                from boto.file.connection import FileConnection
                self.connection = FileConnection(self)
            else:
                raise InvalidUriError('Unrecognized scheme "%s"' %
                                      self.scheme)
        self.connection.debug = self.debug
        return self.connection
Example #12
0
 def test_2_copy_key(self):
     """test copying a key from one bucket to another"""
     c = GSConnection()
     # create two new, empty buckets
     bucket_name_1 = 'test1-%d' % int(time.time())
     bucket_name_2 = 'test2-%d' % int(time.time())
     bucket1 = c.create_bucket(bucket_name_1)
     bucket2 = c.create_bucket(bucket_name_2)
     # verify buckets got created
     bucket1 = c.get_bucket(bucket_name_1)
     bucket2 = c.get_bucket(bucket_name_2)
     # create a key in bucket1 and give it some content
     key_name = 'foobar'
     k1 = bucket1.new_key(key_name)
     assert isinstance(k1, bucket1.key_class)
     k1.name = key_name
     s = 'This is a test.'
     k1.set_contents_from_string(s)
     # copy the new key from bucket1 to bucket2
     k1.copy(bucket_name_2, key_name)
     # now copy the contents from bucket2 to a local file
     k2 = bucket2.lookup(key_name)
     assert isinstance(k2, bucket2.key_class)
     fp = open('foobar', 'wb')
     k2.get_contents_to_file(fp)
     fp.close()
     fp = open('foobar')
     # check to make sure content read is identical to original
     assert s == fp.read(), 'move test failed!'
     fp.close()
     # delete keys
     bucket1.delete_key(k1)
     bucket2.delete_key(k2)
     # delete test buckets
     c.delete_bucket(bucket1)
     c.delete_bucket(bucket2)
     # delete temp file
     os.unlink('foobar')
 def test_1_basic(self):
     print '--- running GSConnection tests ---'
     c = GSConnection()
     # create a new, empty bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now try a get_bucket call and see if it's really there
     bucket = c.get_bucket(bucket_name)
     k = bucket.new_key()
     k.name = 'foobar'
     s1 = 'This is a test of file upload and download'
     s2 = 'This is a second string to test file upload and download'
     k.set_contents_from_string(s1)
     fp = open('foobar', 'wb')
     # now get the contents from s3 to a local file
     k.get_contents_to_file(fp)
     fp.close()
     fp = open('foobar')
     # check to make sure content read from s3 is identical to original
     assert s1 == fp.read(), 'corrupted file'
     fp.close()
     bucket.delete_key(k)
     # test a few variations on get_all_keys - first load some data
     # for the first one, let's override the content type
     phony_mimetype = 'application/x-boto-test'
     headers = {'Content-Type': phony_mimetype}
     k.name = 'foo/bar'
     k.set_contents_from_string(s1, headers)
     k.name = 'foo/bas'
     k.set_contents_from_filename('foobar')
     k.name = 'foo/bat'
     k.set_contents_from_string(s1)
     k.name = 'fie/bar'
     k.set_contents_from_string(s1)
     k.name = 'fie/bas'
     k.set_contents_from_string(s1)
     k.name = 'fie/bat'
     k.set_contents_from_string(s1)
     # try resetting the contents to another value
     md5 = k.md5
     k.set_contents_from_string(s2)
     assert k.md5 != md5
     os.unlink('foobar')
     all = bucket.get_all_keys()
     assert len(all) == 6
     rs = bucket.get_all_keys(prefix='foo')
     assert len(rs) == 3
     rs = bucket.get_all_keys(prefix='', delimiter='/')
     assert len(rs) == 2
     rs = bucket.get_all_keys(maxkeys=5)
     assert len(rs) == 5
     # test the lookup method
     k = bucket.lookup('foo/bar')
     assert isinstance(k, bucket.key_class)
     assert k.content_type == phony_mimetype
     k = bucket.lookup('notthere')
     assert k == None
     # try some metadata stuff
     k = bucket.new_key()
     k.name = 'has_metadata'
     mdkey1 = 'meta1'
     mdval1 = 'This is the first metadata value'
     k.set_metadata(mdkey1, mdval1)
     mdkey2 = 'meta2'
     mdval2 = 'This is the second metadata value'
     k.set_metadata(mdkey2, mdval2)
     # try a unicode metadata value
     
     mdval3 = u'föö'
     mdkey3 = 'meta3'
     k.set_metadata(mdkey3, mdval3)
     k.set_contents_from_string(s1)
     
     k = bucket.lookup('has_metadata')
     assert k.get_metadata(mdkey1) == mdval1
     assert k.get_metadata(mdkey2) == mdval2
     assert k.get_metadata(mdkey3) == mdval3
     k = bucket.new_key()
     k.name = 'has_metadata'
     k.get_contents_as_string()
     assert k.get_metadata(mdkey1) == mdval1
     assert k.get_metadata(mdkey2) == mdval2
     assert k.get_metadata(mdkey3) == mdval3
     bucket.delete_key(k)
     # test list and iterator
     rs1 = bucket.list()
     num_iter = 0
     for r in rs1:
         num_iter = num_iter + 1
     rs = bucket.get_all_keys()
     num_keys = len(rs)
     assert num_iter == num_keys
     # try a key with a funny character
     k = bucket.new_key()
     k.name = 'testnewline\n'
     k.set_contents_from_string('This is a test')
     rs = bucket.get_all_keys()
     assert len(rs) == num_keys + 1
     bucket.delete_key(k)
     rs = bucket.get_all_keys()
     assert len(rs) == num_keys
     # try some acl stuff
     bucket.set_acl('public-read')
     acl = bucket.get_acl()
     assert len(acl.entries.entry_list) == 2
     bucket.set_acl('private')
     acl = bucket.get_acl()
     assert len(acl.entries.entry_list) == 1
     k = bucket.lookup('foo/bar')
     k.set_acl('public-read')
     acl = k.get_acl()
     assert len(acl.entries.entry_list) == 2
     k.set_acl('private')
     acl = k.get_acl()
     assert len(acl.entries.entry_list) == 1
     # now delete all keys in bucket
     for k in bucket:
         bucket.delete_key(k)
     # now delete bucket
     time.sleep(5)
     c.delete_bucket(bucket)
     print '--- tests completed ---'
Example #14
0
    def test_1_basic(self):
        """basic regression test for Google Cloud Storage"""
        print '--- running GSConnection tests ---'
        c = GSConnection()
        # create a new, empty bucket
        bucket_name = 'test-%d' % int(time.time())
        bucket = c.create_bucket(bucket_name)
        # now try a get_bucket call and see if it's really there
        bucket = c.get_bucket(bucket_name)
        k = bucket.new_key()
        k.name = 'foobar'
        s1 = 'This is a test of file upload and download'
        s2 = 'This is a second string to test file upload and download'
        k.set_contents_from_string(s1)
        fp = open('foobar', 'wb')
        # now get the contents from s3 to a local file
        k.get_contents_to_file(fp)
        fp.close()
        fp = open('foobar')
        # check to make sure content read from s3 is identical to original
        assert s1 == fp.read(), 'corrupted file'
        fp.close()
        bucket.delete_key(k)
        # test a few variations on get_all_keys - first load some data
        # for the first one, let's override the content type
        phony_mimetype = 'application/x-boto-test'
        headers = {'Content-Type': phony_mimetype}
        k.name = 'foo/bar'
        k.set_contents_from_string(s1, headers)
        k.name = 'foo/bas'
        k.set_contents_from_filename('foobar')
        k.name = 'foo/bat'
        k.set_contents_from_string(s1)
        k.name = 'fie/bar'
        k.set_contents_from_string(s1)
        k.name = 'fie/bas'
        k.set_contents_from_string(s1)
        k.name = 'fie/bat'
        k.set_contents_from_string(s1)
        # try resetting the contents to another value
        md5 = k.md5
        k.set_contents_from_string(s2)
        assert k.md5 != md5
        # Test for stream API
        fp2 = open('foobar', 'rb')
        k.md5 = None
        k.base64md5 = None
        k.set_contents_from_stream(fp2, headers=headers)
        fp = open('foobar1', 'wb')
        k.get_contents_to_file(fp)
        fp.close()
        fp2.seek(0,0)
        fp = open('foobar1', 'rb')
        assert (fp2.read() == fp.read()), 'Chunked Transfer corrupted the Data'
        fp.close()
        fp2.close()
        os.unlink('foobar1')
        os.unlink('foobar')
        all = bucket.get_all_keys()
        assert len(all) == 6
        rs = bucket.get_all_keys(prefix='foo')
        assert len(rs) == 3
        rs = bucket.get_all_keys(prefix='', delimiter='/')
        assert len(rs) == 2
        rs = bucket.get_all_keys(maxkeys=5)
        assert len(rs) == 5
        # test the lookup method
        k = bucket.lookup('foo/bar')
        assert isinstance(k, bucket.key_class)
        assert k.content_type == phony_mimetype
        k = bucket.lookup('notthere')
        assert k == None
        # try some metadata stuff
        k = bucket.new_key()
        k.name = 'has_metadata'
        mdkey1 = 'meta1'
        mdval1 = 'This is the first metadata value'
        k.set_metadata(mdkey1, mdval1)
        mdkey2 = 'meta2'
        mdval2 = 'This is the second metadata value'
        k.set_metadata(mdkey2, mdval2)
        # try a unicode metadata value

        mdval3 = u'föö'
        mdkey3 = 'meta3'
        k.set_metadata(mdkey3, mdval3)
        k.set_contents_from_string(s1)

        k = bucket.lookup('has_metadata')
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        k = bucket.new_key()
        k.name = 'has_metadata'
        k.get_contents_as_string()
        assert k.get_metadata(mdkey1) == mdval1
        assert k.get_metadata(mdkey2) == mdval2
        assert k.get_metadata(mdkey3) == mdval3
        bucket.delete_key(k)
        # test list and iterator
        rs1 = bucket.list()
        num_iter = 0
        for r in rs1:
            num_iter = num_iter + 1
        rs = bucket.get_all_keys()
        num_keys = len(rs)
        assert num_iter == num_keys
        # try some acl stuff
        bucket.set_acl('public-read')
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 2
        bucket.set_acl('private')
        acl = bucket.get_acl()
        assert len(acl.entries.entry_list) == 1
        k = bucket.lookup('foo/bar')
        k.set_acl('public-read')
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 2
        k.set_acl('private')
        acl = k.get_acl()
        assert len(acl.entries.entry_list) == 1
        # try set/get raw logging subresource
        empty_logging_str="<?xml version='1.0' encoding='UTF-8'?><Logging/>"
        logging_str = (
            "<?xml version='1.0' encoding='UTF-8'?><Logging>"
            "<LogBucket>log-bucket</LogBucket>" +
            "<LogObjectPrefix>example</LogObjectPrefix>" +
            "<PredefinedAcl>bucket-owner-full-control</PredefinedAcl>" +
            "</Logging>")
        bucket.set_subresource('logging', logging_str);
        assert bucket.get_subresource('logging') == logging_str;
        # try disable/enable logging
        bucket.disable_logging()
        assert bucket.get_subresource('logging') == empty_logging_str
        bucket.enable_logging('log-bucket', 'example',
                             canned_acl='bucket-owner-full-control')
        assert bucket.get_subresource('logging') == logging_str;
        # now delete all keys in bucket
        for k in bucket:
            bucket.delete_key(k)
        # now delete bucket
        time.sleep(5)
        c.delete_bucket(bucket)
Example #15
0
 def test_3_default_object_acls(self):
     """test default object acls"""
     c = GSConnection()
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now call get_bucket to see if it's really there
     bucket = c.get_bucket(bucket_name)
     # get default acl and make sure it's empty
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to a canned acl and verify it gets set
     bucket.set_def_acl('public-read')
     acl = bucket.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     bucket.set_def_acl(public_read_acl)
     acl = bucket.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     c.delete_bucket(bucket)
     # repeat default acl tests using boto's storage_uri interface
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     uri = storage_uri('gs://' + bucket_name)
     uri.create_bucket()
     # get default acl and make sure it's empty
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to a canned acl and verify it gets set
     uri.set_def_acl('public-read')
     acl = uri.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     uri.set_def_acl(public_read_acl)
     acl = uri.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     uri.delete_bucket()
     
     print '--- tests completed ---'
Example #16
0
from datetime import datetime
from time import sleep

captured_path = '/home/pi/photobooth/captured'
uploaded_path = '/home/pi/photobooth/uploaded'
py_path = '/home/pi/photobooth/py'

config = ConfigParser()
config.read(os.path.join(py_path, 'config.secret'))

gs_project_id = config.get('Google', 'ProjectId')  # my project
gs_bucket_name = gs_project_id + '.appspot.com'
gs_bucket_destination_prefix = 'photobooth'

conn = GSConnection()
bucket = conn.get_bucket(gs_bucket_name)

firebase_secret = config.get('Firebase', 'Secret')
firebase_destination_prefix = 'images'

auth = firebase.FirebaseAuthentication(firebase_secret,
                                       config.get('Firebase',
                                                  'Email'))  # my email
user = auth.get_user()
app = firebase.FirebaseApplication(config.get('Firebase', 'App'),
                                   authentication=None)
app.authentication = auth

#Find the maximum key in Firebase right now, assume format is [0-9]{5}"""
all_things = app.get('/images', None)
Example #17
0
 def test_3_default_object_acls(self):
     """test default object acls"""
     # regexp for matching project-private default object ACL
     project_private_re = '\s*<AccessControlList>\s*<Entries>\s*<Entry>' \
       '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'        \
       '\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>'   \
       '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'        \
       '\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>'   \
       '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>'        \
       '\s*<Permission>READ</Permission></Entry>\s*</Entries>'           \
       '\s*</AccessControlList>\s*'
     c = GSConnection()
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     bucket = c.create_bucket(bucket_name)
     # now call get_bucket to see if it's really there
     bucket = c.get_bucket(bucket_name)
     # get default acl and make sure it's project-private
     acl = bucket.get_def_acl()
     assert re.search(project_private_re, acl.to_xml())
     # set default acl to a canned acl and verify it gets set
     bucket.set_def_acl('public-read')
     acl = bucket.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     bucket.set_def_acl(public_read_acl)
     acl = bucket.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     bucket.set_def_acl('private')
     acl = bucket.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     c.delete_bucket(bucket)
     # repeat default acl tests using boto's storage_uri interface
     # create a new bucket
     bucket_name = 'test-%d' % int(time.time())
     uri = storage_uri('gs://' + bucket_name)
     uri.create_bucket()
     # get default acl and make sure it's project-private
     acl = uri.get_def_acl()
     assert re.search(project_private_re, acl.to_xml())
     # set default acl to a canned acl and verify it gets set
     uri.set_def_acl('public-read')
     acl = uri.get_def_acl()
     # save public-read acl for later test
     public_read_acl = acl
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # set default acl to an xml acl and verify it gets set
     uri.set_def_acl(public_read_acl)
     acl = uri.get_def_acl()
     assert acl.to_xml() == ('<AccessControlList><Entries><Entry>'    +
       '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
       '</Entry></Entries></AccessControlList>')
     # back to private acl
     uri.set_def_acl('private')
     acl = uri.get_def_acl()
     assert acl.to_xml() == '<AccessControlList></AccessControlList>'
     # delete bucket
     uri.delete_bucket()
     
     print '--- tests completed ---'
Example #18
0
 def setUp(self):
     self._conn = GSConnection()
     self._buckets = []
     self._tempdirs = []