Beispiel #1
0
 def down(self):
     PySFTP().sftp_down(self.server_file, self.compressed_files)
     data = {
         'local': READ_DB,
         'source': self.compressed_files,
         'destination': self.server_file,
         'handle': 'download'
     }
     Mongodb_server(mongo_host, mongo_port).insert('log', now_time_s(), data)
     archive.unzip(self.compressed_files, READ_DB)
     showinfo('提示', '下载成功')
Beispiel #2
0
 def _up_down(self):
     # 上传和下载,视作同步
     archive(self.zip_temp, READ_DB).seven_zip(self.folder_file)
     sftp_up = PySFTP()
     Mongodb_server(mongo_host, mongo_port).insert("log", "automation", {'time': now_time_s(), 'handle': '上传'})
     sftp_up.sftp_up(self.compressed_files, self.server_file)
     sftp_up.sftp_close()
     sftp_down = PySFTP()
     sftp_down.sftp_down(self.server_file, self.compressed_files)
     sftp_down.sftp_close()
     Mongodb_server(mongo_host, mongo_port).insert("log", "automation", {'time': now_time_s(), 'handle': '下载'})
     archive.unzip(self.compressed_files, READ_DB)
     Transmit().clean_temp()
Beispiel #3
0
    def ok(self):

        for root, dirs, file in os.walk(READ_DB):
            [os.remove(os.path.join(root, f)) for f in file
             if root and file]  # 清空源文件夹的内容

        t, d = recovery_win.handle(self)
        archive.unzip(os.path.join(BACKUP_FILE,
                                   get_file()[-2:][0]),
                      READ_DB)  # 先还原完全备份然后再恢复指定日期还原
        differ_archive = os.path.join(BACKUP_FILE,
                                      f"{d}{'.7z'}").replace('/', '\\')
        archive.unzip(differ_archive, READ_DB)
    def ok(self):
        # 清空源文件夹的内容
        for root, dirs, file in os.walk(READ_DB):
            [os.remove(os.path.join(root, f)) for f in file if root and file]

        t, d = RecoveryIncremental.handle(self)
        differ_archive = f"{d}{'.7z'}"
        archive.unzip(os.path.join(BACKUP_FILE,
                                   get_file()[-2:][0]),
                      READ_DB)  # 恢复一次完全备份后,依次恢复每次增量备份
        for archive_7z in get_file(
        )[:-2][:get_file()[:-2].index(differ_archive) + 1]:
            archive.unzip(
                os.path.join(BACKUP_FILE, archive_7z).replace('/', '\\'),
                READ_DB)
Beispiel #5
0
def upgrade(file):
    archive.unzip(file, r"./")
Beispiel #6
0
 def arg_down(server, clint, source):
     # 传参下载
     PySFTP().sftp_down(server, clint)
     archive.unzip(clint, source)
Beispiel #7
0
def unzip_backup():
    # 将底包解压到临时文件夹总,然后与现存文件夹的内容进行对比
    archive.unzip(original_file()[0], TEMP_PATH)
Beispiel #8
0
def differential_backup():
    # if original_file()[0] not in os.listdir(fr'.\backups\TimeBackup\{os.path.basename(READ_DB)}'):
    # 如果完全备份文件不存在就拷贝过来
    #    copy_ready()

    # unzip_backup()
    time = str(type('now', (), {'__repr__': lambda s: now_time()})())
    basename_folder = os.path.basename(READ_DB)  # 不包含文件夹路径,只取文件夹名字
    new_backup = r'.\backups\TimeBackup\{}\{}'.format(basename_folder,
                                                      time)  # 生成以日期命名的文件夹
    if os.path.exists(new_backup) is False:
        os.makedirs(new_backup)

    read_full_db = Create_db(db_table, db_mode, db_data, original_file()[3])
    old_file_hash = [
        list_to_str(i) for i in read_full_db.search_sql('file_hash')
    ]  # 原始完整备份的文件哈希值

    for root, dirs, filename in os.walk(TEMP_PATH):
        for file in filename:
            old_file_list.append(os.path.join(root, file))
        for dir_name in dirs:
            old_folder_list.append(os.path.join(root, dir_name))

    # 这里,因为需要与现在的文件夹里的内容做对比,但是因为将完全备份的文件解压到临时文件的原因,导致路径不同,会被视为两个不同的文件所以无法准确地对比
    # 因此,多做一步,将临时文件夹路径改成源文件的备份路径
    old_file = [file.replace(r'.\Temp', READ_DB) for file in old_file_list]
    old_folder = [
        folder.replace(r'.\Temp', READ_DB) for folder in old_folder_list
    ]

    for root, dirs, filename in os.walk(READ_DB):
        for file in filename:
            new_file_list.append(os.path.join(root, file))
        for dir_name in dirs:
            new_folder_list.append(os.path.join(root, dir_name))

    for _old_file, _old_hash in zip(old_file_list, old_file_hash):
        old_dict[_old_file] = _old_hash

    for file in new_file_list:
        if file in old_file and Hash(file).md5() not in old_file_hash:
            # 判断文件是否备份修改,如果文件名相同,但哈希值不同可以判定为被修改
            add_backup.append(file)

        elif file not in old_file and Hash(file).md5() not in old_file_hash:
            # 如果文件不在备份文件里,而且哈希值也不在旧备份文件的哈希值里,可以判断是新增的文件
            # 记录一下问题,如果说原备份文件里存在空白文件,然后新增文件中也存在空白文件,就无法判定文件存在
            add_backup.append(file)

        elif file not in old_file and Hash(file).md5() in old_file_hash:
            # 如果文件不在备份文件里,但哈希值存在于旧备份文件,可以判定是被重命名
            add_backup.append(file)

    for folder in new_folder_list:
        if folder not in old_folder:
            # 如果文件夹不在备份文件夹,在新数据里,表示文件夹是新增文件夹
            add_backup.append(folder)

    backup_sys_temp = os.path.join(SYSTEM_TEMP, "OpenBackup")
    if not os.path.exists(backup_sys_temp):
        os.mkdir(backup_sys_temp)

    for data in add_backup:
        if os.path.isdir(data):
            shutil.copytree(
                data, os.path.join(backup_sys_temp, os.path.basename(data)))
        elif os.path.isfile(data):
            shutil.copy(data,
                        os.path.join(backup_sys_temp, os.path.basename(data)))

    time_backup = fr'.\backups\TimeBackup\{os.path.basename(READ_DB)}'
    for archive_7z in os.listdir(time_backup)[0:-3]:
        archive.unzip(os.path.join(time_backup, archive_7z), backup_sys_temp)

    os.system(r'{} -mx5 -t7z a {} {}\* -mmt -sdel'.format(
        '7z', '{}'.format(new_backup), backup_sys_temp))
    os.system(f'rd {new_backup}')
    showinfo('提示', '备份成功')