def test_4_cors_xml(self): """test setting and getting of CORS XML documents""" # regexp for matching project-private default object ACL cors_empty = "<CorsConfig></CorsConfig>" cors_doc = ( "<CorsConfig><Cors><Origins><Origin>origin1.example.com" "</Origin><Origin>origin2.example.com</Origin></Origins>" "<Methods><Method>GET</Method><Method>PUT</Method>" "<Method>POST</Method></Methods><ResponseHeaders>" "<ResponseHeader>foo</ResponseHeader>" "<ResponseHeader>bar</ResponseHeader></ResponseHeaders>" "</Cors></CorsConfig>" ) c = GSConnection() # create a new bucket bucket_name = "test-%d" % int(time.time()) bucket = c.create_bucket(bucket_name) # now call get_bucket to see if it's really there bucket = c.get_bucket(bucket_name) # get new bucket cors and make sure it's empty cors = re.sub(r"\s", "", bucket.get_cors().to_xml()) assert cors == cors_empty # set cors document on new bucket bucket.set_cors(cors_doc) cors = re.sub(r"\s", "", bucket.get_cors().to_xml()) assert cors == cors_doc # delete bucket c.delete_bucket(bucket) # repeat cors tests using boto's storage_uri interface # create a new bucket bucket_name = "test-%d" % int(time.time()) uri = storage_uri("gs://" + bucket_name) uri.create_bucket() # get new bucket cors and make sure it's empty cors = re.sub(r"\s", "", uri.get_cors().to_xml()) assert cors == cors_empty # set cors document on new bucket cors_obj = Cors() h = handler.XmlHandler(cors_obj, None) xml.sax.parseString(cors_doc, h) uri.set_cors(cors_obj) cors = re.sub(r"\s", "", uri.get_cors().to_xml()) assert cors == cors_doc # delete bucket uri.delete_bucket() print "--- tests completed ---"
def test_4_cors_xml(self): """test setting and getting of CORS XML documents""" # regexp for matching project-private default object ACL cors_empty = '<CorsConfig></CorsConfig>' cors_doc = ('<CorsConfig><Cors><Origins><Origin>origin1.example.com' '</Origin><Origin>origin2.example.com</Origin></Origins>' '<Methods><Method>GET</Method><Method>PUT</Method>' '<Method>POST</Method></Methods><ResponseHeaders>' '<ResponseHeader>foo</ResponseHeader>' '<ResponseHeader>bar</ResponseHeader></ResponseHeaders>' '</Cors></CorsConfig>') c = GSConnection() # create a new bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now call get_bucket to see if it's really there bucket = c.get_bucket(bucket_name) # get new bucket cors and make sure it's empty cors = re.sub(r'\s', '', bucket.get_cors().to_xml()) assert cors == cors_empty # set cors document on new bucket bucket.set_cors(cors_doc) cors = re.sub(r'\s', '', bucket.get_cors().to_xml()) assert cors == cors_doc # delete bucket c.delete_bucket(bucket) # repeat cors tests using boto's storage_uri interface # create a new bucket bucket_name = 'test-%d' % int(time.time()) uri = storage_uri('gs://' + bucket_name) uri.create_bucket() # get new bucket cors and make sure it's empty cors = re.sub(r'\s', '', uri.get_cors().to_xml()) assert cors == cors_empty # set cors document on new bucket cors_obj = Cors() h = handler.XmlHandler(cors_obj, None) xml.sax.parseString(cors_doc, h) uri.set_cors(cors_obj) cors = re.sub(r'\s', '', uri.get_cors().to_xml()) assert cors == cors_doc # delete bucket uri.delete_bucket() print '--- tests completed ---'
def test_2_copy_key(self): """test copying a key from one bucket to another""" c = GSConnection() # create two new, empty buckets bucket_name_1 = "test1-%d" % int(time.time()) bucket_name_2 = "test2-%d" % int(time.time()) bucket1 = c.create_bucket(bucket_name_1) bucket2 = c.create_bucket(bucket_name_2) # verify buckets got created bucket1 = c.get_bucket(bucket_name_1) bucket2 = c.get_bucket(bucket_name_2) # create a key in bucket1 and give it some content key_name = "foobar" k1 = bucket1.new_key(key_name) assert isinstance(k1, bucket1.key_class) k1.name = key_name s = "This is a test." k1.set_contents_from_string(s) # copy the new key from bucket1 to bucket2 k1.copy(bucket_name_2, key_name) # now copy the contents from bucket2 to a local file k2 = bucket2.lookup(key_name) assert isinstance(k2, bucket2.key_class) fp = open("foobar", "wb") k2.get_contents_to_file(fp) fp.close() fp = open("foobar") # check to make sure content read is identical to original assert s == fp.read(), "move test failed!" fp.close() # delete keys bucket1.delete_key(k1) bucket2.delete_key(k2) # delete test buckets c.delete_bucket(bucket1) c.delete_bucket(bucket2) # delete temp file os.unlink("foobar")
def test_2_copy_key(self): """test copying a key from one bucket to another""" c = GSConnection() # create two new, empty buckets bucket_name_1 = 'test1-%d' % int(time.time()) bucket_name_2 = 'test2-%d' % int(time.time()) bucket1 = c.create_bucket(bucket_name_1) bucket2 = c.create_bucket(bucket_name_2) # verify buckets got created bucket1 = c.get_bucket(bucket_name_1) bucket2 = c.get_bucket(bucket_name_2) # create a key in bucket1 and give it some content key_name = 'foobar' k1 = bucket1.new_key(key_name) assert isinstance(k1, bucket1.key_class) k1.name = key_name s = 'This is a test.' k1.set_contents_from_string(s) # copy the new key from bucket1 to bucket2 k1.copy(bucket_name_2, key_name) # now copy the contents from bucket2 to a local file k2 = bucket2.lookup(key_name) assert isinstance(k2, bucket2.key_class) fp = open('foobar', 'wb') k2.get_contents_to_file(fp) fp.close() fp = open('foobar') # check to make sure content read is identical to original assert s == fp.read(), 'move test failed!' fp.close() # delete keys bucket1.delete_key(k1) bucket2.delete_key(k2) # delete test buckets c.delete_bucket(bucket1) c.delete_bucket(bucket2) # delete temp file os.unlink('foobar')
def test_1_basic(self): print '--- running GSConnection tests ---' c = GSConnection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now try a get_bucket call and see if it's really there bucket = c.get_bucket(bucket_name) k = bucket.new_key() k.name = 'foobar' s1 = 'This is a test of file upload and download' s2 = 'This is a second string to test file upload and download' k.set_contents_from_string(s1) fp = open('foobar', 'wb') # now get the contents from s3 to a local file k.get_contents_to_file(fp) fp.close() fp = open('foobar') # check to make sure content read from s3 is identical to original assert s1 == fp.read(), 'corrupted file' fp.close() bucket.delete_key(k) # test a few variations on get_all_keys - first load some data # for the first one, let's override the content type phony_mimetype = 'application/x-boto-test' headers = {'Content-Type': phony_mimetype} k.name = 'foo/bar' k.set_contents_from_string(s1, headers) k.name = 'foo/bas' k.set_contents_from_filename('foobar') k.name = 'foo/bat' k.set_contents_from_string(s1) k.name = 'fie/bar' k.set_contents_from_string(s1) k.name = 'fie/bas' k.set_contents_from_string(s1) k.name = 'fie/bat' k.set_contents_from_string(s1) # try resetting the contents to another value md5 = k.md5 k.set_contents_from_string(s2) assert k.md5 != md5 os.unlink('foobar') all = bucket.get_all_keys() assert len(all) == 6 rs = bucket.get_all_keys(prefix='foo') assert len(rs) == 3 rs = bucket.get_all_keys(prefix='', delimiter='/') assert len(rs) == 2 rs = bucket.get_all_keys(maxkeys=5) assert len(rs) == 5 # test the lookup method k = bucket.lookup('foo/bar') assert isinstance(k, bucket.key_class) assert k.content_type == phony_mimetype k = bucket.lookup('notthere') assert k == None # try some metadata stuff k = bucket.new_key() k.name = 'has_metadata' mdkey1 = 'meta1' mdval1 = 'This is the first metadata value' k.set_metadata(mdkey1, mdval1) mdkey2 = 'meta2' mdval2 = 'This is the second metadata value' k.set_metadata(mdkey2, mdval2) # try a unicode metadata value mdval3 = u'föö' mdkey3 = 'meta3' k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) k = bucket.lookup('has_metadata') assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 k = bucket.new_key() k.name = 'has_metadata' k.get_contents_as_string() assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 bucket.delete_key(k) # test list and iterator rs1 = bucket.list() num_iter = 0 for r in rs1: num_iter = num_iter + 1 rs = bucket.get_all_keys() num_keys = len(rs) assert num_iter == num_keys # try a key with a funny character k = bucket.new_key() k.name = 'testnewline\n' k.set_contents_from_string('This is a test') rs = bucket.get_all_keys() assert len(rs) == num_keys + 1 bucket.delete_key(k) rs = bucket.get_all_keys() assert len(rs) == num_keys # try some acl stuff bucket.set_acl('public-read') acl = bucket.get_acl() assert len(acl.entries.entry_list) == 2 bucket.set_acl('private') acl = bucket.get_acl() assert len(acl.entries.entry_list) == 1 k = bucket.lookup('foo/bar') k.set_acl('public-read') acl = k.get_acl() assert len(acl.entries.entry_list) == 2 k.set_acl('private') acl = k.get_acl() assert len(acl.entries.entry_list) == 1 # now delete all keys in bucket for k in bucket: bucket.delete_key(k) # now delete bucket time.sleep(5) c.delete_bucket(bucket) print '--- tests completed ---'
def test_1_basic(self): """basic regression test for Google Cloud Storage""" print "--- running GSConnection tests ---" c = GSConnection() # create a new, empty bucket bucket_name = "test-%d" % int(time.time()) bucket = c.create_bucket(bucket_name) # now try a get_bucket call and see if it's really there bucket = c.get_bucket(bucket_name) key_name = "foobar" k = bucket.new_key(key_name) s1 = "This is a test of file upload and download" s2 = "This is a second string to test file upload and download" k.set_contents_from_string(s1) fp = open(key_name, "wb") # now get the contents from s3 to a local file k.get_contents_to_file(fp) fp.close() fp = open(key_name) # check to make sure content read from s3 is identical to original assert s1 == fp.read(), "corrupted file" fp.close() bucket.delete_key(k) # test a few variations on get_all_keys - first load some data # for the first one, let's override the content type phony_mimetype = "application/x-boto-test" headers = {"Content-Type": phony_mimetype} k.name = "foo/bar" k.set_contents_from_string(s1, headers) k.name = "foo/bas" k.set_contents_from_filename("foobar") k.name = "foo/bat" k.set_contents_from_string(s1) k.name = "fie/bar" k.set_contents_from_string(s1) k.name = "fie/bas" k.set_contents_from_string(s1) k.name = "fie/bat" k.set_contents_from_string(s1) # try resetting the contents to another value md5 = k.md5 k.set_contents_from_string(s2) assert k.md5 != md5 # Test for stream API fp2 = open("foobar", "rb") k.md5 = None k.base64md5 = None k.set_contents_from_stream(fp2, headers=headers) fp = open("foobar1", "wb") k.get_contents_to_file(fp) fp.close() fp2.seek(0, 0) fp = open("foobar1", "rb") assert fp2.read() == fp.read(), "Chunked Transfer corrupted the Data" fp.close() fp2.close() os.unlink("foobar1") os.unlink("foobar") all = bucket.get_all_keys() assert len(all) == 6 rs = bucket.get_all_keys(prefix="foo") assert len(rs) == 3 rs = bucket.get_all_keys(prefix="", delimiter="/") assert len(rs) == 2 rs = bucket.get_all_keys(maxkeys=5) assert len(rs) == 5 # test the lookup method k = bucket.lookup("foo/bar") assert isinstance(k, bucket.key_class) assert k.content_type == phony_mimetype k = bucket.lookup("notthere") assert k == None # try some metadata stuff key_name = "has_metadata" k = bucket.new_key(key_name) mdkey1 = "meta1" mdval1 = "This is the first metadata value" k.set_metadata(mdkey1, mdval1) mdkey2 = "meta2" mdval2 = "This is the second metadata value" k.set_metadata(mdkey2, mdval2) # try a unicode metadata value mdval3 = u"föö" mdkey3 = "meta3" k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) k = bucket.lookup(key_name) assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 k = bucket.new_key(key_name) k.get_contents_as_string() assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 bucket.delete_key(k) # test list and iterator rs1 = bucket.list() num_iter = 0 for r in rs1: num_iter = num_iter + 1 rs = bucket.get_all_keys() num_keys = len(rs) assert num_iter == num_keys # try some acl stuff bucket.set_acl("public-read") acl = bucket.get_acl() assert len(acl.entries.entry_list) == 2 bucket.set_acl("private") acl = bucket.get_acl() assert len(acl.entries.entry_list) == 1 k = bucket.lookup("foo/bar") k.set_acl("public-read") acl = k.get_acl() assert len(acl.entries.entry_list) == 2 k.set_acl("private") acl = k.get_acl() assert len(acl.entries.entry_list) == 1 # # Test case-insensitivity of XML ACL parsing. acl_xml = ( "<ACCESSControlList><EntrIes><Entry>" + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + "</Entry></EntrIes></ACCESSControlList>" ) acl = boto.gs.acl.ACL() h = handler.XmlHandler(acl, bucket) xml.sax.parseString(acl_xml, h) bucket.set_acl(acl) assert len(acl.entries.entry_list) == 1 # # try set/get raw logging subresource empty_logging_str = "<?xml version='1.0' encoding='UTF-8'?><Logging/>" logging_str = ( "<?xml version='1.0' encoding='UTF-8'?><Logging>" "<LogBucket>log-bucket</LogBucket>" + "<LogObjectPrefix>example</LogObjectPrefix>" + "</Logging>" ) bucket.set_subresource("logging", logging_str) assert bucket.get_subresource("logging") == logging_str # try disable/enable logging bucket.disable_logging() assert bucket.get_subresource("logging") == empty_logging_str bucket.enable_logging("log-bucket", "example") assert bucket.get_subresource("logging") == logging_str # now delete all keys in bucket for k in bucket: bucket.delete_key(k) # now delete bucket time.sleep(5) c.delete_bucket(bucket)
def test_3_default_object_acls(self): """test default object acls""" # regexp for matching project-private default object ACL project_private_re = ( "\s*<AccessControlList>\s*<Entries>\s*<Entry>" '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>' "\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>" '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>' "\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>" '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>' "\s*<Permission>READ</Permission></Entry>\s*</Entries>" "\s*</AccessControlList>\s*" ) c = GSConnection() # create a new bucket bucket_name = "test-%d" % int(time.time()) bucket = c.create_bucket(bucket_name) # now call get_bucket to see if it's really there bucket = c.get_bucket(bucket_name) # get default acl and make sure it's project-private acl = bucket.get_def_acl() assert re.search(project_private_re, acl.to_xml()) # set default acl to a canned acl and verify it gets set bucket.set_def_acl("public-read") acl = bucket.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ( "<AccessControlList><Entries><Entry>" + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + "</Entry></Entries></AccessControlList>" ) # back to private acl bucket.set_def_acl("private") acl = bucket.get_def_acl() assert acl.to_xml() == "<AccessControlList></AccessControlList>" # set default acl to an xml acl and verify it gets set bucket.set_def_acl(public_read_acl) acl = bucket.get_def_acl() assert acl.to_xml() == ( "<AccessControlList><Entries><Entry>" + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + "</Entry></Entries></AccessControlList>" ) # back to private acl bucket.set_def_acl("private") acl = bucket.get_def_acl() assert acl.to_xml() == "<AccessControlList></AccessControlList>" # delete bucket c.delete_bucket(bucket) # repeat default acl tests using boto's storage_uri interface # create a new bucket bucket_name = "test-%d" % int(time.time()) uri = storage_uri("gs://" + bucket_name) uri.create_bucket() # get default acl and make sure it's project-private acl = uri.get_def_acl() assert re.search(project_private_re, acl.to_xml()) # set default acl to a canned acl and verify it gets set uri.set_def_acl("public-read") acl = uri.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ( "<AccessControlList><Entries><Entry>" + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + "</Entry></Entries></AccessControlList>" ) # back to private acl uri.set_def_acl("private") acl = uri.get_def_acl() assert acl.to_xml() == "<AccessControlList></AccessControlList>" # set default acl to an xml acl and verify it gets set uri.set_def_acl(public_read_acl) acl = uri.get_def_acl() assert acl.to_xml() == ( "<AccessControlList><Entries><Entry>" + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + "</Entry></Entries></AccessControlList>" ) # back to private acl uri.set_def_acl("private") acl = uri.get_def_acl() assert acl.to_xml() == "<AccessControlList></AccessControlList>" # delete bucket uri.delete_bucket()
def test_1_basic(self): """basic regression test for Google Cloud Storage""" print '--- running GSConnection tests ---' c = GSConnection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now try a get_bucket call and see if it's really there bucket = c.get_bucket(bucket_name) k = bucket.new_key() k.name = 'foobar' s1 = 'This is a test of file upload and download' s2 = 'This is a second string to test file upload and download' k.set_contents_from_string(s1) fp = open('foobar', 'wb') # now get the contents from s3 to a local file k.get_contents_to_file(fp) fp.close() fp = open('foobar') # check to make sure content read from s3 is identical to original assert s1 == fp.read(), 'corrupted file' fp.close() bucket.delete_key(k) # test a few variations on get_all_keys - first load some data # for the first one, let's override the content type phony_mimetype = 'application/x-boto-test' headers = {'Content-Type': phony_mimetype} k.name = 'foo/bar' k.set_contents_from_string(s1, headers) k.name = 'foo/bas' k.set_contents_from_filename('foobar') k.name = 'foo/bat' k.set_contents_from_string(s1) k.name = 'fie/bar' k.set_contents_from_string(s1) k.name = 'fie/bas' k.set_contents_from_string(s1) k.name = 'fie/bat' k.set_contents_from_string(s1) # try resetting the contents to another value md5 = k.md5 k.set_contents_from_string(s2) assert k.md5 != md5 # Test for stream API fp2 = open('foobar', 'rb') k.md5 = None k.base64md5 = None k.set_contents_from_stream(fp2, headers=headers) fp = open('foobar1', 'wb') k.get_contents_to_file(fp) fp.close() fp2.seek(0,0) fp = open('foobar1', 'rb') assert (fp2.read() == fp.read()), 'Chunked Transfer corrupted the Data' fp.close() fp2.close() os.unlink('foobar1') os.unlink('foobar') all = bucket.get_all_keys() assert len(all) == 6 rs = bucket.get_all_keys(prefix='foo') assert len(rs) == 3 rs = bucket.get_all_keys(prefix='', delimiter='/') assert len(rs) == 2 rs = bucket.get_all_keys(maxkeys=5) assert len(rs) == 5 # test the lookup method k = bucket.lookup('foo/bar') assert isinstance(k, bucket.key_class) assert k.content_type == phony_mimetype k = bucket.lookup('notthere') assert k == None # try some metadata stuff k = bucket.new_key() k.name = 'has_metadata' mdkey1 = 'meta1' mdval1 = 'This is the first metadata value' k.set_metadata(mdkey1, mdval1) mdkey2 = 'meta2' mdval2 = 'This is the second metadata value' k.set_metadata(mdkey2, mdval2) # try a unicode metadata value mdval3 = u'föö' mdkey3 = 'meta3' k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) k = bucket.lookup('has_metadata') assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 k = bucket.new_key() k.name = 'has_metadata' k.get_contents_as_string() assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 bucket.delete_key(k) # test list and iterator rs1 = bucket.list() num_iter = 0 for r in rs1: num_iter = num_iter + 1 rs = bucket.get_all_keys() num_keys = len(rs) assert num_iter == num_keys # try some acl stuff bucket.set_acl('public-read') acl = bucket.get_acl() assert len(acl.entries.entry_list) == 2 bucket.set_acl('private') acl = bucket.get_acl() assert len(acl.entries.entry_list) == 1 k = bucket.lookup('foo/bar') k.set_acl('public-read') acl = k.get_acl() assert len(acl.entries.entry_list) == 2 k.set_acl('private') acl = k.get_acl() assert len(acl.entries.entry_list) == 1 # try set/get raw logging subresource empty_logging_str="<?xml version='1.0' encoding='UTF-8'?><Logging/>" logging_str = ( "<?xml version='1.0' encoding='UTF-8'?><Logging>" "<LogBucket>log-bucket</LogBucket>" + "<LogObjectPrefix>example</LogObjectPrefix>" + "<PredefinedAcl>bucket-owner-full-control</PredefinedAcl>" + "</Logging>") bucket.set_subresource('logging', logging_str); assert bucket.get_subresource('logging') == logging_str; # try disable/enable logging bucket.disable_logging() assert bucket.get_subresource('logging') == empty_logging_str bucket.enable_logging('log-bucket', 'example', canned_acl='bucket-owner-full-control') assert bucket.get_subresource('logging') == logging_str; # now delete all keys in bucket for k in bucket: bucket.delete_key(k) # now delete bucket time.sleep(5) c.delete_bucket(bucket)
def test_3_default_object_acls(self): """test default object acls""" c = GSConnection() # create a new bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now call get_bucket to see if it's really there bucket = c.get_bucket(bucket_name) # get default acl and make sure it's empty acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to a canned acl and verify it gets set bucket.set_def_acl('public-read') acl = bucket.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to an xml acl and verify it gets set bucket.set_def_acl(public_read_acl) acl = bucket.get_def_acl() assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # delete bucket c.delete_bucket(bucket) # repeat default acl tests using boto's storage_uri interface # create a new bucket bucket_name = 'test-%d' % int(time.time()) uri = storage_uri('gs://' + bucket_name) uri.create_bucket() # get default acl and make sure it's empty acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to a canned acl and verify it gets set uri.set_def_acl('public-read') acl = uri.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to an xml acl and verify it gets set uri.set_def_acl(public_read_acl) acl = uri.get_def_acl() assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # delete bucket uri.delete_bucket() print '--- tests completed ---'
def test_1_basic(self): """basic regression test for Google Cloud Storage""" print '--- running GSConnection tests ---' c = GSConnection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now try a get_bucket call and see if it's really there bucket = c.get_bucket(bucket_name) k = bucket.new_key() k.name = 'foobar' s1 = 'This is a test of file upload and download' s2 = 'This is a second string to test file upload and download' k.set_contents_from_string(s1) fp = open('foobar', 'wb') # now get the contents from s3 to a local file k.get_contents_to_file(fp) fp.close() fp = open('foobar') # check to make sure content read from s3 is identical to original assert s1 == fp.read(), 'corrupted file' fp.close() bucket.delete_key(k) # test a few variations on get_all_keys - first load some data # for the first one, let's override the content type phony_mimetype = 'application/x-boto-test' headers = {'Content-Type': phony_mimetype} k.name = 'foo/bar' k.set_contents_from_string(s1, headers) k.name = 'foo/bas' k.set_contents_from_filename('foobar') k.name = 'foo/bat' k.set_contents_from_string(s1) k.name = 'fie/bar' k.set_contents_from_string(s1) k.name = 'fie/bas' k.set_contents_from_string(s1) k.name = 'fie/bat' k.set_contents_from_string(s1) # try resetting the contents to another value md5 = k.md5 k.set_contents_from_string(s2) assert k.md5 != md5 # Test for stream API fp2 = open('foobar', 'rb') k.md5 = None k.base64md5 = None k.set_contents_from_stream(fp2, headers=headers) fp = open('foobar1', 'wb') k.get_contents_to_file(fp) fp.close() fp2.seek(0,0) fp = open('foobar1', 'rb') assert (fp2.read() == fp.read()), 'Chunked Transfer corrupted the Data' fp.close() fp2.close() os.unlink('foobar1') os.unlink('foobar') all = bucket.get_all_keys() assert len(all) == 6 rs = bucket.get_all_keys(prefix='foo') assert len(rs) == 3 rs = bucket.get_all_keys(prefix='', delimiter='/') assert len(rs) == 2 rs = bucket.get_all_keys(maxkeys=5) assert len(rs) == 5 # test the lookup method k = bucket.lookup('foo/bar') assert isinstance(k, bucket.key_class) assert k.content_type == phony_mimetype k = bucket.lookup('notthere') assert k == None # try some metadata stuff k = bucket.new_key() k.name = 'has_metadata' mdkey1 = 'meta1' mdval1 = 'This is the first metadata value' k.set_metadata(mdkey1, mdval1) mdkey2 = 'meta2' mdval2 = 'This is the second metadata value' k.set_metadata(mdkey2, mdval2) # try a unicode metadata value mdval3 = u'föö' mdkey3 = 'meta3' k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) k = bucket.lookup('has_metadata') assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 k = bucket.new_key() k.name = 'has_metadata' k.get_contents_as_string() assert k.get_metadata(mdkey1) == mdval1 assert k.get_metadata(mdkey2) == mdval2 assert k.get_metadata(mdkey3) == mdval3 bucket.delete_key(k) # test list and iterator rs1 = bucket.list() num_iter = 0 for r in rs1: num_iter = num_iter + 1 rs = bucket.get_all_keys() num_keys = len(rs) assert num_iter == num_keys # try some acl stuff bucket.set_acl('public-read') acl = bucket.get_acl() assert len(acl.entries.entry_list) == 2 bucket.set_acl('private') acl = bucket.get_acl() assert len(acl.entries.entry_list) == 1 k = bucket.lookup('foo/bar') k.set_acl('public-read') acl = k.get_acl() assert len(acl.entries.entry_list) == 2 k.set_acl('private') acl = k.get_acl() assert len(acl.entries.entry_list) == 1 # try set/get raw logging subresource empty_logging_str="<?xml version='1.0' encoding='UTF-8'?><Logging/>" logging_str = ( "<?xml version='1.0' encoding='UTF-8'?><Logging>" "<LogBucket>log-bucket</LogBucket>" + "<LogObjectPrefix>example</LogObjectPrefix>" + "</Logging>") bucket.set_subresource('logging', logging_str); assert bucket.get_subresource('logging') == logging_str; # try disable/enable logging bucket.disable_logging() assert bucket.get_subresource('logging') == empty_logging_str bucket.enable_logging('log-bucket', 'example') assert bucket.get_subresource('logging') == logging_str; # now delete all keys in bucket for k in bucket: bucket.delete_key(k) # now delete bucket time.sleep(5) c.delete_bucket(bucket)
def test_3_default_object_acls(self): """test default object acls""" # regexp for matching project-private default object ACL project_private_re = '\s*<AccessControlList>\s*<Entries>\s*<Entry>' \ '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>' \ '\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>' \ '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>' \ '\s*<Permission>FULL_CONTROL</Permission>\s*</Entry>\s*<Entry>' \ '\s*<Scope type="GroupById"><ID>[0-9a-fA-F]+</ID></Scope>' \ '\s*<Permission>READ</Permission></Entry>\s*</Entries>' \ '\s*</AccessControlList>\s*' c = GSConnection() # create a new bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now call get_bucket to see if it's really there bucket = c.get_bucket(bucket_name) # get default acl and make sure it's project-private acl = bucket.get_def_acl() assert re.search(project_private_re, acl.to_xml()) # set default acl to a canned acl and verify it gets set bucket.set_def_acl('public-read') acl = bucket.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to an xml acl and verify it gets set bucket.set_def_acl(public_read_acl) acl = bucket.get_def_acl() assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # delete bucket c.delete_bucket(bucket) # repeat default acl tests using boto's storage_uri interface # create a new bucket bucket_name = 'test-%d' % int(time.time()) uri = storage_uri('gs://' + bucket_name) uri.create_bucket() # get default acl and make sure it's project-private acl = uri.get_def_acl() assert re.search(project_private_re, acl.to_xml()) # set default acl to a canned acl and verify it gets set uri.set_def_acl('public-read') acl = uri.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to an xml acl and verify it gets set uri.set_def_acl(public_read_acl) acl = uri.get_def_acl() assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # delete bucket uri.delete_bucket() print '--- tests completed ---'
def test_3_default_object_acls(self): """test default object acls""" c = GSConnection() # create a new bucket bucket_name = 'test-%d' % int(time.time()) bucket = c.create_bucket(bucket_name) # now call get_bucket to see if it's really there bucket = c.get_bucket(bucket_name) bucket.set_def_acl('public-read') acl = bucket.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to an xml acl and verify it gets set bucket.set_def_acl(public_read_acl) acl = bucket.get_def_acl() assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl bucket.set_def_acl('private') acl = bucket.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # delete bucket c.delete_bucket(bucket) # repeat default acl tests using boto's storage_uri interface # create a new bucket bucket_name = 'test-%d' % int(time.time()) uri = storage_uri('gs://' + bucket_name) uri.create_bucket() uri.set_def_acl('public-read') acl = uri.get_def_acl() # save public-read acl for later test public_read_acl = acl assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # set default acl to an xml acl and verify it gets set uri.set_def_acl(public_read_acl) acl = uri.get_def_acl() assert acl.to_xml() == ('<AccessControlList><Entries><Entry>' + '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' + '</Entry></Entries></AccessControlList>') # back to private acl uri.set_def_acl('private') acl = uri.get_def_acl() assert acl.to_xml() == '<AccessControlList></AccessControlList>' # delete bucket uri.delete_bucket() print '--- tests completed ---'