def uplodeFile(fileName): printlog("start uplodeFile process.") conn = S3Connection( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, host=meituanHost, ) if (conn.lookup(bucketName) == None): bucket = conn.create_bucket(bucketName) bucket = conn.get_bucket(bucketName) k1 = Key( bucket, fileName[:4] + "/" + fileName[:7] + "/" + fileName[:10] + "/" + fileName) headers = {} headers["Content-Type"] = "application/x-gzip" k1.set_contents_from_file(open(fileName, 'r+'), headers) conn.close() os.remove(fileName) printlog("end uplodeFile process.")
#test read k.open() cont = k.read() k.close() test_util.assert_eq( cont, 'key0 cont2', 'test read') #test delete k.delete() res = k.exists() test_util.assert_false( res, 'test delete') #test set_contents_from_file k = Key(b, 'key_1') create_file('file_w1', '1'*1024) with open('file_w1', 'r') as fp: k.set_contents_from_file(fp, rewind=True) cont = k.get_contents_as_string() test_util.assert_eq( '1'*1024, cont, 'test set_contents_from_file') #test md5 md5 = hashlib.md5('1'*1024).hexdigest() test_util.assert_eq( md5, k.md5, 'test k.md5') os.remove("file_w1") k.delete() #test set_contents_from_filename k = Key(b, 'key_1') create_file('file_w2', '2'*1024) k.set_contents_from_filename('file_w2')