def __init__(self):
     self.config = read_config()
     self.task = None
     self.file_obj= None
     self.file_obj_list = []  # {name:文件名,size:文件大小,type:'local' or 'oss' or 'qinu'}
     self.oss_conf = self.config.oss
     if self.oss_conf:
         self.oss = OssHelper(self.oss_conf.accessKey, self.oss_conf.secretKey, self.oss_conf.url, self.oss_conf.bucket)
     else:
         self.oss = None
     self.db_helper = None
def remote_save(localFilePath, config, taskName):
    ossConf = config.oss
    if ossConf:
        oss = OssHelper(ossConf.accessKey, ossConf.secretKey, ossConf.url,
                        ossConf.bucket)
        ossPath = ossConf.prefix + taskName + \
            "/" + os.path.basename(localFilePath)
        starttime = datetime.datetime.now()
        logger.info(f'start upload file to oss:{ossPath}')
        oss.upload(ossPath, localFilePath)
        endtime = datetime.datetime.now()
        logger.info(
            f'end upload file to oss, takes {endtime - starttime}seconds')
Exemplo n.º 3
0
def clear_old_backup(config, db_file, task_name):
    if 'oss' in config:
        # 清除oss旧文件
        ossConf = config.oss
        oss = OssHelper(ossConf.accessKey, ossConf.secretKey, ossConf.url,
                        ossConf.bucket)
        file_list = oss.get_file_list(os.path.join(ossConf.prefix, task_name))
        file_dict = dict(
            zip(
                map(lambda x: os.path.basename(x['name']).split(".")[0],
                    file_list), file_list))
        ret_file_dict = clean_rule(file_dict, config.oss)
        for key, value in ret_file_dict.items():
            if value: continue
            delete_file = ossConf.prefix + task_name + "/" + file_dict[key][
                'name']
            logger.info(f'delete remote file  {delete_file}')
            oss.delete(delete_file)

    if 'local' in config:
        local_path = os.path.dirname(db_file)
        zip_files = os.listdir(local_path)
        file_dict = dict(
            zip(map(lambda x: x.split(".")[0], zip_files), zip_files))
        ret_file_dict = clean_rule(file_dict, config.local)
        for key, value in ret_file_dict.items():
            if value: continue
            delete_file = os.path.join(local_path, file_dict[key])
            logger.info(f'delete local file  {delete_file}')
            os.remove(delete_file)  # 删除value为空的文件
        # else:
        #     #清除本地文件, 默认保留时间是365天
        #     expireDays = config.expireDays if config.expireDays else 365
        #     for zip_file in zip_files:
        #         # 文件创建时间
        #         create_time = datetime.datetime.fromtimestamp(os.path.getctime(zip_file))
        #         # 计算文件现在的差值
        #         diff_days = (datetime.datetime.now() - create_time).days
        #         if diff_days > expireDays:
        #             os.remove(zip_file)
    else:  # 没有配置local,表示不进行本地存档
        os.remove(db_file)
Exemplo n.º 4
0
def clear_old_backup():
    if 'ftp' in REMOTE_SAVE_TYPE:
        #清除远程FTP上旧备份文件
        for option in FTP_OPTIONS:
            ftp = FtpHelper(option['host'], option['username'],
                            option['password'], option['port'], option['pasv'])

            for filename in ftp.get_files(option['site_save_path']):
                if is_oldfile(filename):
                    ftp.delete_file(option['site_save_path'], filename)

            for filename in ftp.get_files(option['db_save_path']):
                if is_oldfile(filename):
                    ftp.delete_file(option['db_save_path'], filename)
            ftp.quit()

    #清除本地网站旧文件
    for root, dirs, files in os.walk(LOCAL_SAVE_PATH['sites']):
        for filename in files:
            if is_oldfile(filename):
                FileHelper.delete(os.path.join(root, filename))

    #清除本地数据库旧文件
    for root, dirs, files in os.walk(LOCAL_SAVE_PATH['databases']):
        for filename in files:
            if is_oldfile(filename):
                FileHelper.delete(os.path.join(root, filename))
    if 'oss' in REMOTE_SAVE_TYPE:
        #清除oss旧文件
        for option in OSS_OPTIONS:
            oss = OssHelper(option['accesskeyid'], option['accesskeysecret'],
                            option['url'], option['bucket'])
            for file in oss.get_file_list(
                    option['sitedir'].rstrip('/') + '/') + oss.get_file_list(
                        option['databasedir'].rstrip('/') + '/'):
                if is_oldfile(os.path.basename(file)):
                    oss.delete(file)
    if 'css' in REMOTE_SAVE_TYPE:
        #清除cos旧文件
        for option in COS_OPTIONS:
            cos = CosHelper(option['accesskeyid'], option['accesskeysecret'],
                            option['region'], option['bucket'])
            for file in cos.get_file_list(
                    option['sitedir'].rstrip('/') + '/') + cos.get_file_list(
                        option['databasedir'].rstrip('/') + '/'):
                if is_oldfile(os.path.basename(file)):
                    cos.delete(file)
    if 'onedrive' in REMOTE_SAVE_TYPE:
        #清除onedrive旧文件
        for option in ONE_DRIVE_OPTION:
            od = OneDriveHelper(option['name'])
            for file in od.get_file_list(option['sitedir'].rstrip('/') + '/'):
                if is_oldfile(os.path.basename(file['name'])):
                    od.delete(os.path.join(option['sitedir'], file['name']))
            for file in od.get_file_list(option['databasedir'].rstrip('/') +
                                         '/'):
                if is_oldfile(os.path.basename(file['name'])):
                    od.delete(os.path.join(option['databasedir'],
                                           file['name']))
    log('清除旧备份文件 完成')
Exemplo n.º 5
0
def remote_save_oss(site_files, db_files):
    log('开始上传到oss')
    for option in OSS_OPTIONS:
        oss = OssHelper(option['accesskeyid'], option['accesskeysecret'],
                        option['url'], option['bucket'])
        for file in site_files:
            if not file:
                continue
            filename = os.path.basename(file)
            oss.upload(option['sitedir'].rstrip('/') + '/' + filename, file)
        for file in db_files:
            if not file:
                continue
            filename = os.path.basename(file)
            oss.upload(option['databasedir'].rstrip('/') + '/' + filename,
                       file)
    log('远程保存到oss 完成')
class RestoreHelper(object):
    def __init__(self):
        self.config = read_config()
        self.task = None
        self.file_obj= None
        self.file_obj_list = []  # {name:文件名,size:文件大小,type:'local' or 'oss' or 'qinu'}
        self.oss_conf = self.config.oss
        if self.oss_conf:
            self.oss = OssHelper(self.oss_conf.accessKey, self.oss_conf.secretKey, self.oss_conf.url, self.oss_conf.bucket)
        else:
            self.oss = None
        self.db_helper = None

    def start(self):
        print('******************* welcome to use database restore program ****************')
        return 'choice_task'

    def choice_task(self):
        print('please choice the task to restore')
        for i, task in enumerate(self.config.tasks):
            print(f'{i}) {task.name}')
        print('-1) return last step')

        task_idx = input('(choice task)->').strip()
        task_idx = int(task_idx)
        if task_idx < 0:
            return 'wait_uri'
        elif task_idx >= len(self.config.tasks):
            print('the index no exist,please input the number again!')
            return 'choice_task'
        else:
            self.task = self.config.tasks[task_idx]
            db_type = self.task['type']
            if db_type == 'mongodb':
                self.db_helper = MongodbHelper()
            elif db_type == 'mysql':
                self.db_helper = MysqlHelper()
            else:
                raise Exception(f"unsupported db_type [{db_type}]")

            return 'get_file_list'


    def get_file_list(self):
        # 先获取本地文件列表
        archivePath = pydash.get(self.config, 'archivePath')
        if not archivePath:
            raise Exception("配置缺少archivePath")
        local_dir = archivePath + "/" + self.task.name
        if os.path.exists(local_dir):
            for _dir in os.listdir(local_dir):
                self.file_obj_list.append(AttrDict({
                    "name":_dir,
                    "size":os.path.getsize(os.path.join(local_dir, _dir)),
                    "type":"local",
                    "path":os.path.join(local_dir, _dir)
                }))
        if self.oss:
            fileList = self.oss.get_file_list(os.path.join(self.config.oss.prefix, self.task.name))
            self.file_obj_list.extend(fileList)
        print('please choice the following file to restore')
        if not len(self.file_obj_list):
            print(f'task:{self.task.name} has no data to restore, please re-select the task again')
            return 'choice_task'

        self.file_obj_list = sorted(self.file_obj_list,key=lambda x:x['name'])
        for i, file_obj in enumerate(self.file_obj_list):
            # print(
            #     f' {i}) {file_obj["name"]} {int(file_obj["size"]/1024/1024)}MB ({_local_or_remote})')
            print(
                f' {i}) {file_obj["name"]} {FileHelper.get_size(file_obj["size"])}  ({file_obj["type"]})')
        print('-1) return last step')
        return 'choice_file'

    def choice_file(self):
        file_idx = input('(choice task)->').strip()
        file_idx = int(file_idx)
        if file_idx < 0:
            return 'choice_task'
        elif file_idx >= len(self.file_obj_list):
            print('the index no exist,print input  again!')
            return 'choice_file'
        else:
            self.file_obj= self.file_obj_list[file_idx]
            return 'wait_uri'

    def wait_uri(self):
        # uri format  [Uniform Resource Identifier (URI): Generic Syntax](https://tools.ietf.org/html/rfc3986)
        print('please input the destination db uri,format is [scheme://][user[:[password]]@]host[:port][/schema][?attribute1=value1&attribute2=value2]')
        print(f'(such as {self.db_helper.sample()})')
        self.uri = input('(uri)->').strip()
        if len(self.uri) == 0:
            return 'wait_uri'
        return 'check_uri'

    def check_uri(self):
        # 检查数据库的联通性和权限问题
        print("now is check uri ....")
        u = urlparse(self.uri)
        if u.scheme and u.hostname and u.path:
            return 'download_unzip'
        else:
            print('the uri format invalid ,please input again!')
            return 'wait_uri'



    def download_unzip(self):
        # 创建临时目录,执行完毕后再删除
        _temp_dir = ''.join(random.sample(
            string.ascii_letters + string.digits, 8))
        db_filepath = os.path.join(self.config.tmpPath.replace('./', ''), _temp_dir)
        if self.file_obj['type'] == "local":
            zip_file =  self.file_obj.path
        else:
            # 从oss下载
            oss_path = f"{self.oss_conf.prefix}{self.task.name}/{self.file_obj['name']}"
            zip_file = os.path.join(db_filepath.replace('./', ''), self.file_obj['name'])
            print(f'download file from oss:{oss_path}')
            self.oss.download(oss_path, zip_file)

        print(f'unzip file:{zip_file}')
        self.db_file = self.db_helper.extract(zip_file,db_filepath)
        return 'exec_restore'

    def exec_restore(self):

        self.db_helper.restore(self.db_file,self.uri)
        # 删除原始目录
        shutil.rmtree(os.path.dirname(self.db_file))
        return 'exit'

    def exit(self):
        print('exited success!')
        exit()


    def _get_local_file(self,_path):
        print(_path)
        _local_files = [f for f in os.listdir(_path) if f.endswith('.zip')]
        ret = []
        for f in _local_files:
            file_path = os.path.join(_path,f)
            ret.append(AttrDict({"name":f,
                    "size":FileHelper.sizeof_fmt(os.path.getsize(file_path)),
                    "type":'local',
                    "path":file_path
                    }))
        return ret