def restore_vault_data(did): info = get_vault_backup_info(did) if not info: return None update_vault_backup_state(did, VAULT_BACKUP_STATE_RESTORE, VAULT_BACKUP_MSG_SUCCESS) did_folder = HiveBackup.get_did_vault_path(did) if not did_folder.exists(): create_full_path_dir(did_folder) if info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE: HiveBackup.__restore_google_drive(did_folder, info[VAULT_BACKUP_INFO_DRIVE]) elif info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_HIVE_NODE: HiveBackup.__restore_hive_node(did_folder, info[VAULT_BACKUP_INFO_FTP], did, info[VAULT_BACKUP_INFO_DRIVE], info[VAULT_BACKUP_INFO_TOKEN]) else: logger.error("restore_vault_data not support backup type:" + info[VAULT_BACKUP_INFO_TYPE]) HiveBackup.import_did_mongodb_data(did) update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP, VAULT_BACKUP_MSG_SUCCESS) HiveBackup.delete_did_mongodb_export_data(did) return info
def init_app(self, app, mode): backup_path = Path(hive_setting.BACKUP_VAULTS_BASE_DIR) if not backup_path.exists: create_full_path_dir(backup_path) self.app = app HiveBackup.mode = mode if mode != HIVE_MODE_TEST: print("hive_setting.BACKUP_VAULTS_BASE_DIR:" + hive_setting.BACKUP_VAULTS_BASE_DIR) self.backup_ftp = FtpServer(hive_setting.BACKUP_VAULTS_BASE_DIR, hive_setting.BACKUP_FTP_PORT) self.backup_ftp.max_cons = 256 self.backup_ftp.max_cons_per_ip = 10 _thread.start_new_thread(self.backup_ftp.run, ())
def put_file(self): did, content, response = did_get_param_pre_proc(self.response, "file", access_backup=BACKUP_ACCESS) if response is not None: return response file_name = filter_path_root(content["file"]) backup_path = get_vault_backup_path(did) full_path_name = (backup_path / file_name).resolve() if not full_path_name.parent.exists(): if not create_full_path_dir(full_path_name.parent): return self.response.response_err(SERVER_MKDIR_ERROR, "internal put_file error to create dir:" + full_path_name.parent.as_posix()) temp_file = gene_temp_file_name() try: with open(temp_file, "bw") as f: chunk_size = CHUNK_SIZE while True: chunk = request.stream.read(chunk_size) if len(chunk) == 0: break f.write(chunk) except Exception as e: logger.error(f"exception of put_file error is {str(e)}") return self.response.response_err(SERVER_SAVE_FILE_ERROR, f"Exception: {str(e)}") if full_path_name.exists(): full_path_name.unlink() shutil.move(temp_file.as_posix(), full_path_name.as_posix()) return self.response.response_ok()
def get_backup_service(self): did, content, err = did_get_param_pre_proc(self.response) if err: return self.response.response_err(UNAUTHORIZED, "Backup internal backup_communication_start auth failed") # check backup service exist info = get_vault_backup_service(did) if not info: return self.response.response_err(BAD_REQUEST, "There is no backup service of " + did) backup_path = get_vault_backup_path(did) if not backup_path.exists(): create_full_path_dir(backup_path) del info["_id"] data = {"backup_service": info} return self.response.response_ok(data)
def export_mongo_db(did, app_id): save_path = get_save_mongo_db_path(did, app_id) if not save_path.exists(): if not create_full_path_dir(save_path): return False db_name = gene_mongo_db_name(did, app_id) line2 = 'mongodump -h %s --port %s -d %s -o %s' % ( hive_setting.MONGO_HOST, hive_setting.MONGO_PORT, db_name, save_path) subprocess.call(line2, shell=True) return True
def move(self, is_copy): did, app_id, content, response = post_json_param_pre_proc( self.response, "src_path", "dst_path", access_vault=VAULT_ACCESS_WR) if response is not None: return response src_name = content.get('src_path') src_name = filter_path_root(src_name) dst_name = content.get('dst_path') dst_name = filter_path_root(dst_name) path = get_save_files_path(did, app_id) src_full_path_name = (path / src_name).resolve() dst_full_path_name = (path / dst_name).resolve() if not src_full_path_name.exists(): return self.response.response_err(NOT_FOUND, "src_name not exists") if dst_full_path_name.exists() and dst_full_path_name.is_file(): return self.response.response_err(METHOD_NOT_ALLOWED, "dst_name file exists") dst_parent_folder = dst_full_path_name.parent if not dst_parent_folder.exists(): if not create_full_path_dir(dst_parent_folder): return self.response.response_err( INTERNAL_SERVER_ERROR, "make dst parent path dir error") try: if is_copy: if src_full_path_name.is_file(): shutil.copy2(src_full_path_name.as_posix(), dst_full_path_name.as_posix()) file_size = os.path.getsize(dst_full_path_name.as_posix()) inc_vault_file_use_storage_byte(did, file_size) else: shutil.copytree(src_full_path_name.as_posix(), dst_full_path_name.as_posix()) dir_size = 0.0 get_dir_size(dst_full_path_name.as_posix(), dir_size) inc_vault_file_use_storage_byte(did, dir_size) else: shutil.move(src_full_path_name.as_posix(), dst_full_path_name.as_posix()) except Exception as e: return self.response.response_err(INTERNAL_SERVER_ERROR, "Exception:" + str(e)) return self.response.response_ok()
def restore_vault_data(did): info = get_vault_backup_info(did) if not info: return None update_vault_backup_state(did, VAULT_BACKUP_STATE_RESTORE, VAULT_BACKUP_MSG_SUCCESS) vault_folder = get_vault_path(did) if not vault_folder.exists(): create_full_path_dir(vault_folder) vault_backup_msg = VAULT_BACKUP_MSG_SUCCESS if info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_GOOGLE_DRIVE: HiveBackup.__restore_google_drive(vault_folder, info[VAULT_BACKUP_INFO_DRIVE]) elif info[VAULT_BACKUP_INFO_TYPE] == VAULT_BACKUP_INFO_TYPE_HIVE_NODE: HiveBackup.restore_from_hive_node_start( vault_folder, did, info[VAULT_BACKUP_INFO_DRIVE], info[VAULT_BACKUP_INFO_TOKEN]) ret = HiveBackup.restore_backup_finish( did, info[VAULT_BACKUP_INFO_DRIVE] + INTER_BACKUP_RESTORE_FINISH_URL, info[VAULT_BACKUP_INFO_TOKEN]) if not ret: vault_backup_msg = VAULT_BACKUP_MSG_FAILED else: logging.getLogger("HiveBackup").error( "restore_vault_data not support backup type:" + info[VAULT_BACKUP_INFO_TYPE]) info = None if vault_backup_msg == VAULT_BACKUP_MSG_SUCCESS: import_mongo_db(did) update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP, vault_backup_msg) delete_mongo_db_export(did) else: update_vault_backup_state(did, VAULT_BACKUP_STATE_STOP, vault_backup_msg) return info
def inter_backup_ftp_start(self): did, content, err = did_post_json_param_pre_proc(self.response) if err: return self.response.response_err( UNAUTHORIZED, "Backup internal backup_communication_start auth failed") # check backup service exist info = get_vault_backup_service(did) if not info: return self.response.response_err( BAD_REQUEST, "There is no backup service of " + did) backup_path = get_vault_backup_path(did) if not backup_path.exists(): create_full_path_dir(backup_path) # add user to backup ftp server user, passwd = gene_vault_backup_ftp_record(did) if self.mode != HIVE_MODE_TEST: self.backup_ftp.add_user(user, passwd, backup_path, 'elradfmwMT') del info["_id"] if VAULT_BACKUP_SERVICE_FTP in info: del info[VAULT_BACKUP_SERVICE_FTP] if VAULT_BACKUP_SERVICE_DATA in info: del info[VAULT_BACKUP_SERVICE_DATA] data = { "token": HiveBackup.__data_to_node_backup_token( hive_setting.BACKUP_FTP_PORT, user, passwd), "backup_service": info } return self.response.response_ok(data)
def get_files(file_get_list, host, token): if not file_get_list: return for info in file_get_list: src_file = info[0] dst_file = Path(info[1]) dst_file.resolve() temp_file = gene_temp_file_name() if not dst_file.parent.exists(): if not create_full_path_dir(dst_file.parent): logging.getLogger("HiveBackup").error( f"__get_files error mkdir :{dst_file.parent.as_posix()}, host:{host}" ) continue try: r = requests.get(host + INTER_BACKUP_FILE_URL + "?file=" + src_file, stream=True, headers={"Authorization": "token " + token}) with open(temp_file, 'bw') as f: f.seek(0) for chunk in r.iter_content(chunk_size=CHUNK_SIZE): if chunk: f.write(chunk) except Exception as e: logging.getLogger("HiveBackup").error( f"__get_files exception:{str(e)}, host:{host}") temp_file.unlink() continue if r.status_code != SUCCESS: logging.getLogger("HiveBackup").error( f"__get_files err code:{r.status_code}, host:{host}") temp_file.unlink() continue if dst_file.exists(): dst_file.unlink() shutil.move(temp_file.as_posix(), dst_file.as_posix())
def query_upload_get_filepath(did, app_id, file_name): """ Return: full file path """ err = {} path = get_save_files_path(did, app_id) full_path_name = (path / file_name).resolve() if not create_full_path_dir(full_path_name.parent): err["status_code"], err[ "description"] = INTERNAL_SERVER_ERROR, "make path dir error" return full_path_name, err if not full_path_name.exists(): full_path_name.touch(exist_ok=True) if full_path_name.is_dir(): err["status_code"], err[ "description"] = NOT_FOUND, "file name is a directory" return full_path_name, err return full_path_name, err
def move_file(self, is_copy): did, content, response = did_post_json_param_pre_proc(self.response, "src_file", "dst_file", access_backup=BACKUP_ACCESS) if response is not None: return response src_name = content.get('src_file') src_name = filter_path_root(src_name) dst_name = content.get('dst_file') dst_name = filter_path_root(dst_name) backup_path = get_vault_backup_path(did) src_full_path_name = (backup_path / src_name).resolve() dst_full_path_name = (backup_path / dst_name).resolve() if not src_full_path_name.exists(): return self.response.response_err(NOT_FOUND, "src_name not exists") if dst_full_path_name.exists(): dst_full_path_name.unlink() dst_parent_folder = dst_full_path_name.parent if not dst_parent_folder.exists(): if not create_full_path_dir(dst_parent_folder): return self.response.response_err(SERVER_MKDIR_ERROR, "move_file make dst parent path dir error") try: if is_copy: shutil.copy2(src_full_path_name.as_posix(), dst_full_path_name.as_posix()) else: shutil.move(src_full_path_name.as_posix(), dst_full_path_name.as_posix()) except Exception as e: logger.error(f"exception of move_file error is {str(e)}") return self.response.response_err(SERVER_MOVE_FILE_ERROR, "Exception:" + str(e)) return self.response.response_ok()
def init_app(self, app, mode): backup_path = Path(hive_setting.BACKUP_VAULTS_BASE_DIR) if not backup_path.exists: create_full_path_dir(backup_path) self.app = app HiveInternal.mode = mode