def remote_save(site_files, db_files): errcount = 0 for type in REMOTE_SAVE_TYPE: if type not in 'ftp,email,cos,oss,onedrive' or not type: log('远程保存配置类型"' + type + '"错误,应该为ftp,email,cos,oss,onedrive') continue while errcount < ERROR_COUNT: try: if type == 'ftp': remote_save_ftp(site_files, db_files) elif type == 'email': remote_save_email(site_files, db_files) elif type == 'oss': remote_save_oss(site_files, db_files) elif type == 'cos': remote_save_cos(site_files, db_files) elif type == 'onedrive': remote_save_onedrive(site_files, db_files) break except Exception as e: log(str(e)) print(str(e)) errcount = errcount + 1 print('备份' + type + '方式第' + str(errcount) + '次出错') FileHelper.move_bulk(site_files, LOCAL_SAVE_PATH['sites']) FileHelper.move_bulk(db_files, LOCAL_SAVE_PATH['databases'])
def scan(self, param): ip = param['ip'] port = param['port'] try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, port)) #print('{0}:{1} open'.format(ip,port)) ipinfo = '{0}:{1}\n'.format(ip, port) lock.acquire() FileHelper.append(self.savepath, ipinfo) model = dict(ip=ip, port=int(port), flag=self.data_flag, createdatetime=datetime.datetime.now()) row_affect = SqlHelper.add(model) lock.release() s.close() except socket.timeout as e: s.close() try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, port)) #print('{0}:{1} open'.format(ip,port)) s.close() except Exception as e: s.close() except Exception as e: s.close() self.scancount = self.scancount + 1 sys.stdout.write('\r' + '已扫描:{0},剩余{1}'.format( self.scancount, self.taskcount - self.scancount)) sys.stdout.flush()
def backup_db_mysql(db): db_filename = db['database_name'] + '_' + get_datestr() + '.sql' db_filepath = TEMP_SAVE_PATH + os.path.sep + db_filename archive_type = db['archive_type'] if archive_type not in 'zip,tar,gztar': log('archive_type存档类型"' + archive_type + '"错误,应该为zip,tar,gztar') return None host = '' if not db['host'] else '-h ' + db['host'] mysqldump = 'mysqldump' if not db['mysqldump_path'] else db[ 'mysqldump_path'] cmd = '{0} {1} -P{2} -u{3} -p{4} --databases {5} > {6}'.format( mysqldump, host, db['port'], db['username'], db['password'], db['database_name'], db_filepath) status, result = subprocess.getstatusoutput(cmd) if status != 0: log('备份数据库{0}出错,返回值为{1},执行的命令为{2}'.format(db['database_name'], result, cmd)) return None else: flag, msg = FileHelper.compress(archive_type, db_filepath, TEMP_SAVE_PATH, db_filename, db['archive_password']) archive_path = msg if flag: FileHelper.delete(db_filepath) return archive_path else: log('打包数据库文件出错,' + msg) return None
def backup_db_mssql(db): db_filename = db['database_name'] + '_' + get_datestr() + '.bak' db_filepath = TEMP_SAVE_PATH + os.path.sep + db_filename archive_type = db['archive_type'] if archive_type not in 'zip,tar,gztar': log('archive_type存档类型"' + archive_type + '"错误,应该为zip,tar,gztar') return None sqlcmd = 'sqlcmd' if not db['sqlcmd_path'] else db['sqlcmd_path'] cmd = '{0} -S {1},{2} -U {3} -P {4} -Q "BACKUP DATABASE {5} to disk="{6}"'.format( sqlcmd, db['host'], db['port'], db['username'], db['password'], db['database_name'], db_filepath) status, result = subprocess.getstatusoutput(cmd) if status != 0: log('备份数据库{0}出错,返回值为{1},执行的命令为{2}'.format(db['database_name'], result, cmd)) return None else: flag, msg = FileHelper.compress(archive_type, db_filepath, TEMP_SAVE_PATH, db_filename, db['archive_password']) archive_path = msg if flag: FileHelper.delete(db_filepath) return archive_path else: log('打包数据库文件出错,' + msg) return None
def scan(self, param): ip = param['ip'] port = param['port'] try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, port)) str = '{0}:{1}\n'.format(ip, port) FileHelper.append(RESULT_PATH, str) s.close() except socket.timeout as e: s.close() try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((ip, port)) #print('{0}:{1} open'.format(ip,port)) self.open_ports.add({'ip': ip, 'port': port}) s.close() except Exception as e: s.close() except Exception as e: s.close() self.scancount = self.scancount + 1 sys.stdout.write('\r' + '已扫描:{0},剩余{1}'.format( self.scancount, self.taskcount - self.scancount)) sys.stdout.flush()
def clear_old_backup(): if 'ftp' in REMOTE_SAVE_TYPE: #清除远程FTP上旧备份文件 for option in FTP_OPTIONS: ftp = FtpHelper(option['host'], option['username'], option['password'], option['port'], option['pasv']) for filename in ftp.get_files(option['site_save_path']): if is_oldfile(filename): ftp.delete_file(option['site_save_path'], filename) for filename in ftp.get_files(option['db_save_path']): if is_oldfile(filename): ftp.delete_file(option['db_save_path'], filename) ftp.quit() #清除本地网站旧文件 for root, dirs, files in os.walk(LOCAL_SAVE_PATH['sites']): for filename in files: if is_oldfile(filename): FileHelper.delete(os.path.join(root, filename)) #清除本地数据库旧文件 for root, dirs, files in os.walk(LOCAL_SAVE_PATH['databases']): for filename in files: if is_oldfile(filename): FileHelper.delete(os.path.join(root, filename)) if 'oss' in REMOTE_SAVE_TYPE: #清除oss旧文件 for option in OSS_OPTIONS: oss = OssHelper(option['accesskeyid'], option['accesskeysecret'], option['url'], option['bucket']) for file in oss.get_file_list( option['sitedir'].rstrip('/') + '/') + oss.get_file_list( option['databasedir'].rstrip('/') + '/'): if is_oldfile(os.path.basename(file)): oss.delete(file) if 'css' in REMOTE_SAVE_TYPE: #清除cos旧文件 for option in COS_OPTIONS: cos = CosHelper(option['accesskeyid'], option['accesskeysecret'], option['region'], option['bucket']) for file in cos.get_file_list( option['sitedir'].rstrip('/') + '/') + cos.get_file_list( option['databasedir'].rstrip('/') + '/'): if is_oldfile(os.path.basename(file)): cos.delete(file) if 'onedrive' in REMOTE_SAVE_TYPE: #清除onedrive旧文件 for option in ONE_DRIVE_OPTION: od = OneDriveHelper(option['name']) for file in od.get_file_list(option['sitedir'].rstrip('/') + '/'): if is_oldfile(os.path.basename(file['name'])): od.delete(os.path.join(option['sitedir'], file['name'])) for file in od.get_file_list(option['databasedir'].rstrip('/') + '/'): if is_oldfile(os.path.basename(file['name'])): od.delete(os.path.join(option['databasedir'], file['name'])) log('清除旧备份文件 完成')
def backup_site(): site_files = [] log('开始备份站点') for site in SITES: if not site: continue site_path = site['path'] if site['type'] == 'ftp': ftp = FtpHelper(site['host'],site['username'], site['password'],site['port']) log('开始下载FTP远程目录:' + site['path']) ftp.download_dir(os.path.join(TEMP_SAVE_PATH ,os.path.basename(site['path'])),site['path']) log('下载FTP远程目录结束') site_path = os.path.join(TEMP_SAVE_PATH,os.path.basename(site['path'])) archive_type = site['archive_type'] if not os.path.exists(site_path): log('站点路径%s不存在' % site_path) continue if archive_type not in 'zip,tar,gztar': log('archive_type存档类型"' + archive_type + '"错误,应该为zip,tar,gztar') continue dirname = os.path.basename(site_path) site_filename = dirname + '_' + get_datestr() flag,msg = FileHelper.compress(archive_type,site_path,TEMP_SAVE_PATH,site_filename,site['archive_password']) if site['type'] == 'ftp': FileHelper.delete(site_path) if not flag: log('创建' + site_path + '存档出错:' + msg) continue site_archive_path = msg site_files.append(site_archive_path) log('备份站点结束') return site_files
def start(): starttime = datetime.datetime.now() log('开始备份') try: backup() except Exception as e: FileHelper.error('哎呀 出错了:' + str(e)) endtime = datetime.datetime.now() log('本次备份完成,耗时{0}秒'.format((endtime - starttime).seconds))
def check_proxy_and_save(self, ip, port): if self.proxy_valid(ip, port): self.proxies.add( config.PROXY_DATA_TEMPLATE.format(ip=ip, port=port)) lock.acquire() FileHelper.append(self.savepath, '{0}:{1}\n'.format(ip, port)) lock.release() print('{0}:{1} 有效√'.format(ip, port)) else: print('{0}:{1} 无效×'.format(ip, port))
def remote_save(site_files, db_files): for type in REMOTE_SAVE_TYPE: if type not in 'ftp,email,cos,oss': log('远程保存配置类型"' + type + '"错误,应该为ftp,email') continue if type == 'ftp': remote_save_ftp(site_files, db_files) elif type == 'email': remote_save_email(site_files, db_files) elif type == 'oss': remote_save_oss(site_files, db_files) elif type == 'cos': remote_save_cos(site_files, db_files) FileHelper.move_bulk(site_files, LOCAL_SAVE_PATH['sites']) FileHelper.move_bulk(db_files, LOCAL_SAVE_PATH['databases'])
def prn(self, pkt): ip = pkt.sprintf('%IP.src%') port = pkt.sprintf('%IP.sport%') info = '{0}:{1}'.format(ip, port) fileipinfo = '{0}:{1}\n'.format(ip, port) #记得port要转换为整数 if (int(port) in self.portlist) and (info not in self.result): self.result.add(info) FileHelper.append(self.savepath, fileipinfo) model = dict(ip=ip, port=int(port), flag=self.data_flag, createdatetime=datetime.datetime.now()) SqlHelper.add(model)
def __init__(self, scannerparam): self.scancount = 0 self.scannerparam = scannerparam self.savepath = FileHelper.get_save_path() self.data_flag = datetime.datetime.now().strftime('%Y%m%d%H%M%S') if scannerparam.save: self.savepath = scannerparam.save
def __init__(self, scannerparam): #self.ifacestr = "Intel(R) Dual Band Wireless-AC 3160" self.sendcount = 0 self.scannerparam = scannerparam self.savepath = FileHelper.get_save_path() self.data_flag = datetime.datetime.now().strftime('%Y%m%d%H%M%S') self.result = set() if scannerparam.save: self.savepath = scannerparam.save
def auth(self): print( auth_url.format(client_id=self.client_id, redirect_uri=redirect_uri)) print( '请用浏览器打开上面链接,登陆等待跳转后,复制URL地址中code参数的值,注意不要多复制后面的&session_state参数') code = input('请输入code:') auth_param = 'client_id={client_id}&client_secret={client_secret}&code={code}&grant_type=authorization_code&redirect_uri={redirect_uri}&resource=https://api.office.com/discovery/' auth_param = auth_param.format(client_id=self.client_id, code=code, client_secret=self.client_secret, redirect_uri=redirect_uri) headers = {'content-type': 'application/x-www-form-urlencoded'} response = requests.post(api_auth_url, data=auth_param, headers=headers) data = json.loads(response.text) access_token = data['access_token'] discovery_token = self.refresh_token(data.get('refresh_token'), api_discovery_id) url, urlid = self.get_onedrive_api_urls( discovery_token.get('access_token')) token = self.refresh_token(discovery_token.get('refresh_token'), urlid) if os.path.exists(self.token_filepath): data = FileHelper.open_json(self.token_filepath) try: data[self.name] = token data[self.name]['api_url'] = url data[self.name]['api_url_id'] = urlid except: token['api_url'] = url token['api_url_id'] = urlid data = {self.name: token} else: token['api_url'] = url token['api_url_id'] = urlid data = {self.name: token} FileHelper.write_json(self.token_filepath, data) #print(data[self.name]) print('保存token完成')
def _get_local_file(self,_path): print(_path) _local_files = [f for f in os.listdir(_path) if f.endswith('.zip')] ret = [] for f in _local_files: file_path = os.path.join(_path,f) ret.append(AttrDict({"name":f, "size":FileHelper.sizeof_fmt(os.path.getsize(file_path)), "type":'local', "path":file_path })) return ret
def get_token(self): if os.path.exists(self.token_filepath): token = FileHelper.open_json(self.token_filepath).get(self.name) if time.time() > float(token.get('expires_on')): print('token已过期 重新获取') refresh_token = token.get('refresh_token') discovery_token = self.refresh_token(refresh_token, api_discovery_id) url, urlid = self.get_onedrive_api_urls( discovery_token.get('access_token')) token = self.refresh_token( discovery_token.get('refresh_token'), urlid) if token.get('access_token'): data = FileHelper.open_json(self.token_filepath) data[self.name] = token data[self.name]['api_url'] = url data[self.name]['api_url_id'] = urlid FileHelper.write_json(self.token_filepath, data) return token.get('access_token') else: print('获取失败,没有access_token:') print(token) else: return token.get('access_token')
def get_api_url(self): return FileHelper.open_json(self.token_filepath).get( self.name).get('api_url')
def remote_save_email(site_files, db_files): log('开始发送到Email') for option in EMAIL_OPTIONS_SENDERS: email = EmailHelper(option['host'], option['username'], option['password'], option['port'], option['is_ssl']) for file in site_files: if not file: continue if option['partSize']: part_file_path = os.path.join(TEMP_SAVE_PATH, 'EmailPart') if not os.path.exists(part_file_path): os.makedirs(part_file_path) flag, msg = FileHelper.compress(option['archive_type'], file, part_file_path, os.path.basename(file), None, None, None, None, option['partSize']) part_files = FileHelper.get_file_list(part_file_path) for part_file in part_files: flag, msg = email.send( '新的站点备份', '站点备份:' + os.path.basename(part_file), '站点备份:' + os.path.basename(part_file), EMAIL_OPTIONS_RECEIVERS, [part_file]) FileHelper.delete(part_file_path) else: flag, msg = email.send('新的站点备份', '站点备份:' + os.path.basename(file), '站点备份:' + os.path.basename(file), EMAIL_OPTIONS_RECEIVERS, [file]) if flag: log('使用 {0} 发送邮件 {1} 成功'.format(option['username'], file)) else: log('使用 {0} 发送邮件 {1} 失败,原因:'.format(option['username'], file, msg)) for file in db_files: if not file: continue if option['partSize']: if not os.path.exists(part_file_path): os.makedirs(part_file_path) part_file_path = os.path.join(TEMP_SAVE_PATH, 'EmailPart') flag, msg = FileHelper.compress(option['archive_type'], file, part_file_path, os.path.basename(file), None, None, None, None, option['partSize']) part_files = FileHelper.get_file_list(part_file_path) for part_file in part_files: flag, msg = email.send( '新的数据库备份', '数据库备份:' + os.path.basename(part_file), '数据库备份:' + os.path.basename(part_file), EMAIL_OPTIONS_RECEIVERS, [part_file]) FileHelper.delete(part_file_path) else: flag, msg = email.send('新的数据库备份', '数据库备份:' + os.path.basename(file), '数据库备份:' + os.path.basename(file), EMAIL_OPTIONS_RECEIVERS, [file]) if flag: log('使用 {0} 发送邮件 {1} 成功'.format(option['username'], file)) else: log('使用 {0} 发送邮件 {1} 失败,原因:'.format(option['username'], file, msg)) email.quit() log('发送到Email 完成')
def __init__(self, scannerparam): self.scancount = 0 self.scannerparam = scannerparam self.savepath = FileHelper.get_save_path() if scannerparam.save: self.savepath = scannerparam.save