def get_contents_to_buf(self): #download file by using file name(string) #flow : download each cloud storage -> combine -> decryption -> save complete file -> flush tmp dir #download each cloud storage #download from s3 combiner = file_mng(self.tmp_dir) tmp_dir = combiner.tmp_dir print "download to s3" self.s3_key.get_contents_to_filename(tmp_dir+"tmp1") print "download to ssg" self.ssg_key.get_contents_to_filename(tmp_dir+"tmp2") #combine files enc_buf = combiner.combine_tmp_files() #decryption(save complete file) dec = Encryption(self.tree.connection._cre.meta_pw) #decrpytion(filename) dec_buf = dec.decrypt(enc_buf) #flush tmp dir combiner.flush_tmp_dir() #error시엔 에러 메시지 출력 후 finally로 무조건 tmp dir 날리기 return dec_buf
def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False): self.ssg_key = self.tree.ssg_bucket.new_key(self.name) enc_file_path = self.tmp_dir + 'enc_file' #upload file by using file name(string) #flow : encrypt file -> split file -> upload each cloud -> flush tmp dir #encrypt file enc = Encryption(self.tree.connection._cre.meta_pw) enc_buf = enc.encrypt(fp.read()) with open(enc_file_path, 'wb') as f: f.write(enc_buf) #split file splitter = file_mng(self.tmp_dir) s3_tmp, ssg_tmp = splitter.split(enc_file_path) #upload request print "upload request to api server" self.ssg_key.set_contents_from_filename(ssg_tmp, path.getsize(s3_tmp)) real_name = self.ssg_key.real_name self.s3_key = self.tree.s3_bucket.new_key(real_name) # if self.s3_key.exists(real_name): # raise ValueError("Key already exists. Input another Key.") #upload to each cloud #upload to s3 print "upload to S3" self.s3_key.set_contents_from_filename(s3_tmp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key) #upload to ssg #exception 필요 print "upload to ssg" self.ssg_key.send_file() #flush tmp dir splitter.flush_tmp_dir() #error시엔 둘다 지우는거로...... print "upload success!"