def backupConfig(self): """ create a backup of encfs config file into local config folder so in cases of the config file get deleted or corrupt user can restore it from there """ cfg = self.configFile() if not os.path.isfile(cfg): logger.warning( 'No encfs config in %s. Skip backup of config file.' % cfg, self) return backup_folder = self.config.encfsconfigBackupFolder(self.profile_id) tools.makeDirs(backup_folder) old_backups = os.listdir(backup_folder) old_backups.sort(reverse=True) if len(old_backups): last_backup = os.path.join(backup_folder, old_backups[0]) #don't create a new backup if config hasn't changed if tools.md5sum(cfg) == \ tools.md5sum(last_backup): logger.debug('Encfs config did not change. Skip backup', self) return new_backup_file = '.'.join( (os.path.basename(cfg), datetime.now().strftime('%Y%m%d%H%M'))) new_backup = os.path.join(backup_folder, new_backup_file) logger.debug( 'Create backup of encfs config %s to %s' % (cfg, new_backup), self) shutil.copy2(cfg, new_backup)
def backupConfig(self): """ create a backup of encfs config file into local config folder so in cases of the config file get deleted or corrupt user can restore it from there """ cfg = self.configFile() if not os.path.isfile(cfg): logger.warning('No encfs config in %s. Skip backup of config file.' %cfg, self) return backup_folder = self.config.encfsconfigBackupFolder(self.profile_id) tools.makeDirs(backup_folder) old_backups = os.listdir(backup_folder) old_backups.sort(reverse = True) if len(old_backups): last_backup = os.path.join(backup_folder, old_backups[0]) #don't create a new backup if config hasn't changed if tools.md5sum(cfg) == \ tools.md5sum(last_backup): logger.debug('Encfs config did not change. Skip backup', self) return new_backup_file = '.'.join((os.path.basename(cfg), datetime.now().strftime('%Y%m%d%H%M'))) new_backup = os.path.join(backup_folder, new_backup_file) logger.debug('Create backup of encfs config %s to %s' %(cfg, new_backup), self) shutil.copy2(cfg, new_backup)
def test_md5sum(self): with NamedTemporaryFile() as f: f.write(b'foo') f.flush() self.assertEqual(tools.md5sum(f.name), 'acbd18db4cc2f85cedef654fccc4a4d8')
def get_file(url): file_info = {} if mutex.acquire(1): time_stamp = time.time() mutex.release() file_name = str(long(time_stamp * 1000000)) + ".apk" file_path = os.path.join(TMP_FOLDER, file_name) try: os.chdir(TMP_FOLDER) """" res_code = subprocess.call("wget --tries=%s -O %s %s" %(RETRY_TIME, file_name, url)) """ res_code = subprocess.call(["wget", "--tries={0}".format(RETRY_TIME), "-nv", "-O", file_name, url]) if res_code: del_tmpfile(file_path) logger.error("wget res code: {0}".format(res_code)) return False # in case wget have not create file because of the function of buffer while not os.path.isfile(file_path): time.sleep(0.1) ft = get_file_type(file_path) if not ft: del_tmpfile(file_path) logger.error("fail to get file type") return False if not "Java" in ft and not "Zip" in ft: del_tmpfile(file_path) logger.error("file type not match:%s -> %s" %(file_path, ft)) return False file_info["name"] = file_path md5 = tools.md5sum(file_path) if not md5: del_tmpfile(file_path) logger.error("fail to get file md5") return False if check_dup(md5): del_tmpfile(file_path) logger.error("file dup: %s -- %s" %(file_path, md5)) return False file_info["md5"] = md5 file_info["url"] = url file_info["date"] = time.strftime( "%Y%m%d-%H:%M:%S", time.localtime()) file_info["type_tag"] = "none" file_info["scan_status"] = "none" file_info["archive_flag"] = "none" except Exception, e: logger.error(str(e)) return False
def main(): count_all = 0 count_notfound = 0 file_list = walk_dir(TMP_FOLDER) count_all = len(file_list) for i in file_list: md5 = md5sum(i) if not check_db(md5): count_notfound += 1 logger.info("not found {0} in db".format(i)) del_file(i) continue logger.info("ALL: {0}, NOTFOUND: {1}".format(count_all, count_notfound, ))
def main(): count_all = 0 count_found = 0 count_notfound = 0 file_list = walk_dir(TMP_FOLDER) count_all = len(file_list) for i in file_list: md5 = md5sum(i) if not check_db(md5): count_notfound += 1 #logger.info("not found {0} in db".format(i)) else: count_found += 1 logger.info("found {0} in db".format(md5)) #del_file(i) logger.info("ALL: {0}, FOUND: {1} NOTFOUND: {2}".format(count_all, count_found, count_notfound))
def check_cons(): count_all = 0 count_notfound = 0 count_uncons = 0 file_list = get_file_list() count_all = len(file_list) for i in file_list: if not "md5" in i.keys() or not "name" in i.keys(): logger.warning("unexpected file: {0}".format(i)) continue md5_db = i["md5"] file_path = i["name"] md5_local = md5sum(file_path) if not os.path.isfile(file_path): logger.error("{0} not exist.".format(file_path)) count_notfound += 1 if md5_db != md5_local: logger.error("{0} unconsistent.".format(file_path)) count_uncons += 1 continue logger.info("ALL: {0}, NOTFOUND: {1}, UNCONS: {2}".format(count_all, count_notfound, count_uncons))
def uploadFile(sock, FileSrc, FileDst): try: file_md5 = md5sum(FileSrc) filename = os.path.basename(FileSrc) f = open(FileSrc, 'rb') sock.send(genFileHead(filename, file_md5, FileDst)) bytes = 0 while 1: fileinfo = f.read(cfg.DEFAULT_RECV) if not fileinfo: break bytes = bytes + len(fileinfo) #print('Send ' + str(bytes) + ' bytes ...') sock.send(fileinfo) #time.sleep(0.001) sock.send(cfg.TIP_INFO_EOF) rtn = cfg.CMD_SUCC except Exception as e: rtn = cfg.CMD_FAIL print(traceback.format_exc()) finally: return rtn
def test_backupConfig(self): self.sn.backupConfig(self.sid) self.assertIsFile(self.sid.path('config')) self.assertEqual(tools.md5sum(self.sid.path('config')), tools.md5sum(self.cfgFile))
def test_backupConfig(self): self.sn.backupConfig(self.sid) self.assertTrue(os.path.isfile(self.sid.path('config'))) self.assertEqual(tools.md5sum(self.sid.path('config')), tools.md5sum(self.cfgFile))
def dispatchObject(object, version): """ 版本发布 Args: object:指定发布对象 version:指定发布版本信息 Returns: True:表示有需要发布的信息 False:表示没有需要发布的信息 """ try: ## like: .../Release/server/1.0.0.0 dest_path = cfg.DEPLOY_PATH_RELEASE + cfg.SEP + object + cfg.SEP + version back_flag = False ## backup if os.path.isdir(dest_path): back_path = dest_path + '_bak.' + cfg.PID shutil.move(dest_path, back_path) back_flag = True os.mkdir(dest_path) count = 0 ## like: .../Latest/server object_path = cfg.DEPLOY_PATH_LATEST + cfg.SEP + object if os.path.isdir(object_path): os_list = os.listdir(object_path) for os_name in os_list: ## like: .../Latest/server/linux64 os_path = object_path + cfg.SEP + os_name if os.path.isdir(os_path): srv_list = os.listdir(os_path) for srv_name in srv_list: ## like: .../Latest/server/linux64/sysagent srv_path = os_path + cfg.SEP + srv_name zip_filename = srv_name + '.' + os_name + cfg.FILE_SUF_ZIP zip_filepathSrc = os_path + cfg.SEP + zip_filename zip_dir([(srv_path, cfg.FLAG_NULL)], zip_filepathSrc) zip_filepathDst = dest_path + cfg.SEP + zip_filename if os.path.isfile(zip_filepathDst): os.remove(zip_filepathDst) shutil.move(zip_filepathSrc, zip_filepathDst) count += 1 md5_filepath = zip_filepathDst + cfg.FILE_SUF_MD5 if os.path.isfile(md5_filepath): os.remove(md5_filepath) with open(md5_filepath, 'w') as f: f.write(md5sum(zip_filepathDst)) except Exception as e: ## restore if back_flag: if os.path.isdir(dest_path): shutil.rmtree(dest_path) shutil.move(back_path, dest_path) print(traceback.format_exc()) info = "Dispatch Error" doExit(1, info) else: if count > 0: ## drop backup if back_flag: shutil.rmtree(back_path) return True else: ## restore ?? not yet print("Dispatch nothing !!") return False