def uplodeFile(fileName): printlog("start uplodeFile process.") conn = S3Connection( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, host=meituanHost, ) if (conn.lookup(bucketName) == None): bucket = conn.create_bucket(bucketName) bucket = conn.get_bucket(bucketName) k1 = Key( bucket, fileName[:4] + "/" + fileName[:7] + "/" + fileName[:10] + "/" + fileName) headers = {} headers["Content-Type"] = "application/x-gzip" k1.set_contents_from_file(open(fileName, 'r+'), headers) conn.close() os.remove(fileName) printlog("end uplodeFile process.")
import mssapi from mssapi.s3.key import Key def create_file(fn, cont): with open(fn, 'w+') as fp: fp.write(cont) def read_file(fn): with open(fn, 'r') as fp: return fp.read() conn = test_util.get_conn() b = conn.create_bucket('test_bucket_0') #test set_contents_from_string k = Key(b, 'key_0') k.set_contents_from_string('key0 cont0') cont = k.get_contents_as_string() test_util.assert_eq( cont, 'key0 cont0', 'test set_metadata') k.set_contents_from_string('key0 cont1') cont = k.get_contents_as_string() test_util.assert_eq( cont, 'key0 cont1', 'test set_metadata replace') #test set_metadata k.set_metadata('name', 'chen') k.set_contents_from_string('key0 cont1') val = k.get_metadata('name') test_util.assert_eq( val, 'chen', 'test set_metadata' )
conn.delete_bucket(b1) conn.head_bucket('tmpbucket0') 'tmpbucket0' in conn keys = b0.get_all_keys() for k in keys: print k.name bucket = conn.get_bucket('tmpbucket0') k0 = bucket.new_key('key0') k0.set_contents_from_string('hello key0') k1 = Key(bucket, 'key1') k1.set_contents_from_filename('./tmp/file_w1') k0 = bucket.get_key('key0') cont = k0.get_contents_as_string() print cont k1 = Key(bucket, 'key1') k1.get_contents_to_filename('./tmp/file_r1') bucket.delete_key('key0') bucket.lookup('key0') print k1.generate_url(expires_in = 300)