Exemple #1
0
 def backup(self):
     if debug_fds: fds_open = fds_open_now()
     #Metoda SourceDir.incremental_backup() je zmatocna.
     #Ak neexistuje self.target_object (teda stara cielova verzia aktualneho adresara),
     #metodu initial_backup() treba volat na podobjekt v adresari
     #(vytvoreny pomocou SourceObject.create(next_path,self.target,None)),
     #nie na (self teda na aktualny adresar). Takto spraveny incremental_backup()
     #bude potom v pripade neexistujuceho target_object fungovat rovnako
     #ako initial_backup() a teda nemusite mat dve metody
     #(ale podobne treba spravit aj incremental_backup() v SourceFile a SourceLnk).
     main_dict = {}
     for F in os.listdir(self.source_path):
             if debug_fds: fds_open_in_loop = fds_open_now()
             next_path = os.path.join(self.source_path, F)
             if self.target_object != None:
                 oldF = self.target_object.get_object(F)
             else:
                 oldF = None
             new_object = SourceObject.create(next_path, self.store, oldF)
             if new_object != None:
                 side_dict = new_object.backup()
                 main_dict[F] = side_dict
             if debug_fds: check_fds(fds_open_in_loop, F)
     #print main_dict
     hash = self.pickling(main_dict)
     if debug_fds: check_fds(fds_open)
     return self.make_side_dict(hash)
Exemple #2
0
 def backup(self):
     if debug_fds: fds_open = fds_open_now()
     if self.target_object != None:
         if not self.compare_stat(self.lstat, self.target_object.lstat): # ak sa nerovnaju lstaty
             if (self.lstat.st_mtime == self.target_object.lstat.st_mtime
                 and self.lstat.st_size == self.target_object.lstat.st_size):
                 if verbose : print("Lnk mTime bez zmeny. return novy side_dict(stary_hash) !")
                 # rovanky mtime
                 # vyrob side dict stary hash + aktualny lstat
                 #tu incIndex???
                 if debug_fds: check_fds(fds_open)
                 return self.make_side_dict(self.target_object.side_dict['hash']) #stary hash
             else:
                 # rozny mtime
                 link_target = os.readlink(self.source_path)
                 new_hash = hashlib.sha1(link_target).hexdigest() # spocitaj hash a porovnaj
                 if (new_hash == self.target_object.side_dict[self.file_name]['hash']
                     or os.path.exists(self.store.get_object_path(new_hash))):
                     if verbose : print("Lnk mTime zmeneny. return novy side_dict(novy_hash) !")
                     #tu incIndex???
                     if debug_fds: check_fds(fds_open)
                     return self.make_side_dict(new_hash)
                 else:
                     if verbose : print("Lnk Novy object zalohy !")
                     if debug_fds: check_fds(fds_open)
                     return self.make_side_dict(self.make_lnk())
         else:
             if verbose : print("Lnk mTime zmeneny. rovnake meta")
             #tu incIndex???
             if debug_fds: check_fds(fds_open)
             return self.target_object.side_dict # ak sa rovnaju staty
     else:
         if verbose : print("Lnk Novy object zalohy.")
         if debug_fds: check_fds(fds_open)
         return self.make_side_dict(self.make_lnk())
Exemple #3
0
 def backup(self):
     if debug_fds: fds_open = fds_open_now()
     # ak sa zmenil mtime, tak ma zmysel pozerat sa na obsah suboru
     # inak sa mozno zmenili zaujimave metadata
     if self.target_object != None:
         if not self.compare_stat(self.lstat, self.target_object.lstat): # ak sa nerovnaju lstaty
             if (self.lstat.st_mtime == self.target_object.lstat.st_mtime
                 and self.lstat.st_size == self.target_object.lstat.st_size):
                 if verbose : print("Lnk mTime bez zmeny. return novy side_dict(stary_hash) !")
                 # rovanky mtime
                 # vyrob side dict stary hash + aktualny lstat
                 if debug_fds: check_fds(fds_open)
                 return self.make_side_dict(self.target_object.side_dict['hash']) #stary hash
             else:
                 # rozny mtime
                 new_hash = self.store.get_hash(self.source_path) # spocitaj hash a porovnaj
                 # ak je to delta treba zrekonstruovat koncovy subor a pytat sa na ten?
                 if (new_hash == self.target_object.side_dict['hash']
                     or os.path.exists(self.store.get_object_path(new_hash))):
                     if verbose : print("File mTime zmeneny. return novy side_dict(novy_hash) !")
                     if debug_fds: check_fds(fds_open)
                     return self.make_side_dict(new_hash)
                 else:
                     if verbose : print("File Novy object zalohy.")
                     hash = self.save_file(self.target_object.side_dict['hash'])
                     if debug_fds: check_fds(fds_open)
                     return self.make_side_dict(hash)
         else:
             if verbose : print("Lnk mTime zmeneny. rovnake meta")
             #tu incIndex???
             if debug_fds: check_fds(fds_open)
             return self.target_object.side_dict # ak sa rovnaju staty
     else:
         if verbose : print("File Novy object zalohy.")
         hash = self.save_file()
         if debug_fds: check_fds(fds_open)
         return self.make_side_dict(hash)
Exemple #4
0
 def save_file(self, source_path, name, previous_hash = None, block_size = constants.CONST_BLOCK_SIZE):
     if debug_fds: fds_open = fds_open_now()
     file_hash = hashlib.sha1()
     target_file = self.get_journal_object_path(name)
     target_file_header = self.get_journal_object_header_path(name)
     if not previous_hash == None:
         previous_type = self.get_object_type(previous_hash)
         if previous_type == "gz\n" or previous_type == "delta\n" :
             previous_file = self.get_object_file_header(previous_hash, "rb")
             previous_file.readline()
             previous_file.readline()
             sig_size = previous_file.readline()
             sig_data = previous_file.read(int(sig_size))
             deltaProcess = subprocess.Popen(['rdiff', 'delta', '-', source_path], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
             deltaProcess.stdin.write(sig_data)
             deltaProcess.stdin.close()
             with open(target_file, "wb") as TF: #bol gzip
                 while True:
                     deltaData = deltaProcess.stdout.read(16)
                     if deltaData:
                         file_hash.update(deltaData)
                         TF.write(deltaData)
                     else:
                          with open(target_file_header, "wb") as THF:
                             THF.write("delta\n")
                             THF.write("signature\n")
                             sigProcess = subprocess.Popen(['rdiff', 'signature', source_path], stdout=subprocess.PIPE)
                             signature, signatureErr = sigProcess.communicate()
                             if (signatureErr is None):
                                 THF.write(str(len(signature)))
                                 THF.write("\n")
                                 THF.write(signature)
                             else:
                                 THF.write(str(0))
                             THF.write("\n")
                             THF.write("previous\n")
                             THF.write(previous_hash)
                             THF.close()
                             self.file_rename(target_file, file_hash.hexdigest() + ".data")
                             self.file_rename(target_file_header, file_hash.hexdigest() + ".meta")
                             break
                 TF.close()
                 self.write_to_journal("move " + self.get_journal_object_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest() + ".data"))
                 self.write_to_journal("move " + self.get_journal_object_header_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest() + ".meta"))
             if debug_fds: check_fds(fds_open)
             return file_hash.hexdigest()
         # elif self.get_object_type(previous_hash) == "delta\n":
         #
         #     # treba zrekonstruovat subor, z neho si vypocitat signaturu a ulozit deltu k najnovsiemu
         #     return
     else:
         with open(source_path, "rb") as SF:
             with open(target_file, "wb") as TF: #bol gzip
                 while True:
                     block = SF.read(block_size)
                     file_hash.update(block)
                     TF.write(block)
                     if not block:
                         self.file_rename(target_file, file_hash.hexdigest() + ".data")
                         with open(target_file_header, "wb") as THF:
                             THF.write("gz\n")
                             THF.write("signature\n")
                             sigProcess = subprocess.Popen(['rdiff', 'signature', source_path], stdout=subprocess.PIPE)
                             signature, signatureErr = sigProcess.communicate()
                             if (signatureErr is None):
                                 THF.write(str(len(signature)))
                                 THF.write("\n")
                                 THF.write(signature)
                             else:
                                 THF.write(str(0))
                             self.file_rename(target_file_header, file_hash.hexdigest() + ".meta")
                             THF.close()
                         break
                 TF.close()
                 self.write_to_journal("move " + self.get_journal_object_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest() + ".data"))
                 self.write_to_journal("move " + self.get_journal_object_header_path(file_hash.hexdigest()) + " " + os.path.join(self.store_path, "objects", file_hash.hexdigest() + ".meta"))
             SF.close()
         if debug_fds: check_fds(fds_open)
         return file_hash.hexdigest()