Exemple #1
0
def vague_search(name):
    # 从数据查询歌手主页ID
    db = Create_db(db_table, path=db_name)
    search_data = db.search_sql("singer_name, singer_id")
    for i in search_data:
        if name == i[0]:
            return i[1]
Exemple #2
0
class r_db_file(object):
    def __init__(self):
        self.db = Create_db(db_table, db_mode, db_data, path=None)
        self.db_li_path = self.db.search_sql('file_path, file_name')

    def get_file(self, find_str):
        # 得到文件所在路径
        for s in self.db_li_path:
            if s[1] == find_str:
                return os.path.join(s[0], s[1])

    def r_list(self):
        # 获取数据库里的所有文件名并以列表的形式返回
        return [list_to_str(f) for f in self.db.search_sql('file_name')]
Exemple #3
0
def table_from_db(value):
    if value in [
            list_to_str(i) for i in Create_db(path=cache_db).search_table()
    ]:
        return True
    else:
        return False
Exemple #4
0
def get_song(singer_name, song_name):
    try:
        for i in Create_db(table=singer_name,
                           path=cache_db).search_sql("song_name, song_id"):
            if i[0] == song_name:
                return i[1]
    except OperationalError:
        return False
Exemple #5
0
def down_all_index_song(singer_name, rule=None):
    if table_from_db(singer_name) is False:
        get_singer_song(singer_name)
    if rule is None:
        pass
    elif rule == '名称':
        if os.path.exists(f'./song/{singer_name}') is False:
            os.makedirs(f'./song/{singer_name}')

    for i in Create_db(table=singer_name,
                       path=cache_db).search_sql("song_name, song_id"):
        down_song(i[1], i[0], f'./song/{singer_name}')
Exemple #6
0
class quick(object):
    def __init__(self, table, mode, data, d_path):
        self.table = table  # 表名
        self.mode = mode  # 建字段
        self.data = data  # 插入数据SQL语句
        self.db = Create_db(self.table, self.mode, self.data, d_path)

    def new_db(self):
        self.db.new_sql()

    def new_index(self, s_path):
        # 插入数据
        t = time.time()
        try:
            for root, file_p, filename in os.walk(s_path):
                if filename and root:
                    for name in filename:
                        self.db.add_sql(_control(root, name))
                        # self.file_li.append(name)
            self.db.com_clone()
        except OSError:
            # 因为文件访问权限问题,不可能所有都能搜索,因此忽略掉这部分
            pass
        t2 = time.time()
        print(t2 - t)

    @staticmethod
    def search():
        return r_db_file().r_list()  # 返回得到到的文件列表
Exemple #7
0
def get_data(url):
    try:
        Create_db(db_table, db_mode, db_sql, db_name).new_sql()
    except:
        pass

    db = Create_db(db_table, db_mode, db_sql, db_name)
    for singer_index, index_id in zip(
            GETNameID(url).song_name(),
            GETNameID(url).song_id()):
        data_dict = {"singer_name": singer_index, "singer_id": index_id}
        db.add_sql(data_dict)
    db.com_clone()
Exemple #8
0
def get_singer_song(name):
    # 歌手全名
    db_mode_singer = f"""
        create table {name} (
            [id] integer PRIMARY KEY AUTOINCREMENT,
            song_name text,
            song_id text
        )
    """
    db_sql_singer = f"""
        insert into {name} (song_name, song_id) 
        values (:song_name, :song_id)
    """
    try:
        Create_db(name, db_mode_singer, db_sql_singer, cache_db).new_sql()
    except:
        pass
    m = Create_db(name, db_mode_singer, db_sql_singer, cache_db)
    id_id = vague_search(name)
    for id, song in zip(
            GETIndex(f'{song_index}{id_id}').song_id(),
            GETIndex(f'{song_index}{id_id}').song_name()):
        m.add_sql({"song_name": song, "song_id": id})
    m.com_clone()
Exemple #9
0
 def __init__(self):
     self.db = Create_db(db_table, db_mode, db_data, path=None)
     self.db_li_path = self.db.search_sql('file_path, file_name')
Exemple #10
0
 def __init__(self, table, mode, data, d_path):
     self.table = table  # 表名
     self.mode = mode  # 建字段
     self.data = data  # 插入数据SQL语句
     self.db = Create_db(self.table, self.mode, self.data, d_path)
Exemple #11
0
def incremental_backup():
    if original_file()[0] not in os.listdir(
            fr'.\backups\incremental\{os.path.basename(READ_DB)}'):
        # 如果完全备份文件不存在就拷贝过来
        copy_ready()

    unzip_backup()
    time = str(type('now', (), {'__repr__': lambda s: now_time()})())
    basename_folder = os.path.basename(READ_DB)  # 不包含文件夹路径,只取文件夹名字
    new_backup = r'.\backups\incremental\{}\{}'.format(basename_folder,
                                                       time)  # 生成以日期命名的文件夹
    if os.path.exists(new_backup) is False:
        os.makedirs(new_backup)

    read_full_db = Create_db(db_table, db_mode, db_data, original_file()[3])
    old_file_hash = [
        list_to_str(i) for i in read_full_db.search_sql('file_hash')
    ]  # 原始完整备份的文件哈希值

    for root, dirs, filename in os.walk(TEMP_PATH):
        for file in filename:
            old_file_list.append(os.path.join(root, file))
        for dir_name in dirs:
            old_folder_list.append(os.path.join(root, dir_name))

    # 这里,因为需要与现在的文件夹里的内容做对比,但是因为将完全备份的文件解压到临时文件的原因,导致路径不同,会被视为两个不同的文件所以无法准确地对比
    # 因此,多做一步,将临时文件夹路径改成源文件的备份路径
    old_file = [file.replace(r'.\Temp', READ_DB) for file in old_file_list]
    old_folder = [
        folder.replace(r'.\Temp', READ_DB) for folder in old_folder_list
    ]

    for root, dirs, filename in os.walk(READ_DB):
        for file in filename:
            new_file_list.append(os.path.join(root, file))
        for dir_name in dirs:
            new_folder_list.append(os.path.join(root, dir_name))

    for _old_file, _old_hash in zip(old_file_list, old_file_hash):
        old_dict[_old_file] = _old_hash

    for file in new_file_list:
        if file in old_file and Hash(file).md5() not in old_file_hash:
            # 判断文件是否备份修改,如果文件名相同,但哈希值不同可以判定为被修改
            add_backup.add(file)

        elif file not in old_file and Hash(file).md5() not in old_file_hash:
            # 如果文件不在备份文件里,而且哈希值也不在旧备份文件的哈希值里,可以判断是新增的文件
            # 记录一下问题,如果说原备份文件里存在空白文件,然后新增文件中也存在空白文件,就无法判定文件存在
            add_backup.add(file)

        elif file not in old_file and Hash(file).md5() in old_file_hash:
            # 如果文件不在备份文件里,但哈希值存在于旧备份文件,可以判定是被重命名
            add_backup.add(file)

    for folder in new_folder_list:
        if folder not in old_folder:
            # 如果文件夹不在备份文件夹,在新数据里,表示文件夹是新增文件夹
            add_backup.add(folder)

    for file in old_file:
        if file not in new_file_list:
            # 如果新文件不在备份文件里,则判定被删除
            del_backup.add(file)

    for folder in old_folder:
        if folder not in new_folder_list:
            # 如果新文件夹不在备份数据里,则判定文件夹被删除
            del_backup.add(folder)

    for data in add_backup:
        if os.path.isdir(data):
            shutil.copytree(data,
                            os.path.join(TEMP_PATH, os.path.basename(data)))
        elif os.path.isfile(data):
            shutil.copy(data, os.path.join(TEMP_PATH, os.path.basename(data)))

    for data in del_backup:
        # 这里,因为系统是无法判断,是空文件夹还是文件,比如 D:\text 可能是空文件夹或者是一个没有后缀的文件,因此
        # 第一步是用删除文件夹的方法强制删除,第二步是删除文件
        del_folder(os.path.join(TEMP_PATH, find_child_folder(data, READ_DB)))
        del_file(os.path.join(TEMP_PATH, find_child_folder(data, READ_DB)))

    os.system(r'{} -mx5 -t7z a {} {}\* -mmt -sdel'.format(
        '7z', '{}'.format(new_backup), TEMP_PATH))
    os.system(f'rd {new_backup}')
    showinfo('提示', '备份成功')
def differential_backup():
    # if original_file()[0] not in os.listdir(fr'.\backups\TimeBackup\{os.path.basename(READ_DB)}'):
    # 如果完全备份文件不存在就拷贝过来
    #    copy_ready()

    # unzip_backup()
    time = str(type('now', (), {'__repr__': lambda s: now_time()})())
    basename_folder = os.path.basename(READ_DB)  # 不包含文件夹路径,只取文件夹名字
    new_backup = r'.\backups\TimeBackup\{}\{}'.format(basename_folder,
                                                      time)  # 生成以日期命名的文件夹
    if os.path.exists(new_backup) is False:
        os.makedirs(new_backup)

    read_full_db = Create_db(db_table, db_mode, db_data, original_file()[3])
    old_file_hash = [
        list_to_str(i) for i in read_full_db.search_sql('file_hash')
    ]  # 原始完整备份的文件哈希值

    for root, dirs, filename in os.walk(TEMP_PATH):
        for file in filename:
            old_file_list.append(os.path.join(root, file))
        for dir_name in dirs:
            old_folder_list.append(os.path.join(root, dir_name))

    # 这里,因为需要与现在的文件夹里的内容做对比,但是因为将完全备份的文件解压到临时文件的原因,导致路径不同,会被视为两个不同的文件所以无法准确地对比
    # 因此,多做一步,将临时文件夹路径改成源文件的备份路径
    old_file = [file.replace(r'.\Temp', READ_DB) for file in old_file_list]
    old_folder = [
        folder.replace(r'.\Temp', READ_DB) for folder in old_folder_list
    ]

    for root, dirs, filename in os.walk(READ_DB):
        for file in filename:
            new_file_list.append(os.path.join(root, file))
        for dir_name in dirs:
            new_folder_list.append(os.path.join(root, dir_name))

    for _old_file, _old_hash in zip(old_file_list, old_file_hash):
        old_dict[_old_file] = _old_hash

    for file in new_file_list:
        if file in old_file and Hash(file).md5() not in old_file_hash:
            # 判断文件是否备份修改,如果文件名相同,但哈希值不同可以判定为被修改
            add_backup.append(file)

        elif file not in old_file and Hash(file).md5() not in old_file_hash:
            # 如果文件不在备份文件里,而且哈希值也不在旧备份文件的哈希值里,可以判断是新增的文件
            # 记录一下问题,如果说原备份文件里存在空白文件,然后新增文件中也存在空白文件,就无法判定文件存在
            add_backup.append(file)

        elif file not in old_file and Hash(file).md5() in old_file_hash:
            # 如果文件不在备份文件里,但哈希值存在于旧备份文件,可以判定是被重命名
            add_backup.append(file)

    for folder in new_folder_list:
        if folder not in old_folder:
            # 如果文件夹不在备份文件夹,在新数据里,表示文件夹是新增文件夹
            add_backup.append(folder)

    backup_sys_temp = os.path.join(SYSTEM_TEMP, "OpenBackup")
    if not os.path.exists(backup_sys_temp):
        os.mkdir(backup_sys_temp)

    for data in add_backup:
        if os.path.isdir(data):
            shutil.copytree(
                data, os.path.join(backup_sys_temp, os.path.basename(data)))
        elif os.path.isfile(data):
            shutil.copy(data,
                        os.path.join(backup_sys_temp, os.path.basename(data)))

    time_backup = fr'.\backups\TimeBackup\{os.path.basename(READ_DB)}'
    for archive_7z in os.listdir(time_backup)[0:-3]:
        archive.unzip(os.path.join(time_backup, archive_7z), backup_sys_temp)

    os.system(r'{} -mx5 -t7z a {} {}\* -mmt -sdel'.format(
        '7z', '{}'.format(new_backup), backup_sys_temp))
    os.system(f'rd {new_backup}')
    showinfo('提示', '备份成功')