def execute(work_item, session, background=False): # type: (WorkItem, Session, bool) -> Dict[str, Any] """Submit a work item to Batfish. :param work_item: work to submit :type work_item: :py:class:`~pybatfish.client.WorkItem` :param session: Batfish session to use. :type session: :py:class:`~pybatfish.client.session.Session` :param background: Whether to background the job. If `True`, this function only returns the result of submitting the job. :type background: bool :return: If `background=True`, a dict containing a single key 'result' with a string description of the result. If `background=False`, a dict containing a single key 'status' with a string describing work status. """ snapshot = work_item.requestParams.get(BfConsts.ARG_TESTRIG) if snapshot is None: raise ValueError('Work item {} does not include a snapshot name'.format( work_item.to_json())) json_data = { CoordConsts.SVC_KEY_WORKITEM: work_item.to_json(), CoordConsts.SVC_KEY_API_KEY: session.apiKey, } # Submit the work item response = resthelper.get_json_response( session, CoordConsts.SVC_RSC_QUEUE_WORK, json_data) if background: # TODO: this is ugly and messes with return types: design and write async replacement # After we drop 2.7 support return {"result": str(response["result"])} try: answer = get_work_status(work_item.id, session) status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS]) task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS] while not WorkStatusCode.is_terminated(status): print_work_status(session, status, task_details) time.sleep(1) answer = get_work_status(work_item.id, session) status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS]) task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS] print_work_status(session, status, task_details) if status == WorkStatusCode.ASSIGNMENTERROR: raise BatfishException( "Work finished with status {}\n{}".format(status, work_item.to_json())) return {"status": status} except KeyboardInterrupt: response = kill_work(session, work_item.id) raise KeyboardInterrupt( "Killed ongoing work: {}. Server response: {}".format( work_item.id, json.dumps(response)))
def _bf_init_snapshot(upload, name, overwrite, background): if bf_session.network is None: bf_set_network() if name is None: name = Options.default_snapshot_prefix + get_uuid() validate_name(name) if name in bf_list_snapshots(): if overwrite: bf_delete_snapshot(name) else: raise ValueError('A snapshot named ' '{}' ' already exists in network ' '{}' ''.format(name, bf_session.network)) file_to_send = upload if os.path.isdir(upload): tempFile = tempfile.NamedTemporaryFile() zip_dir(upload, tempFile) file_to_send = tempFile.name json_data = workhelper.get_data_upload_snapshot(bf_session, name, file_to_send) resthelper.get_json_response(bf_session, CoordConsts.SVC_RSC_UPLOAD_SNAPSHOT, json_data) bf_session.baseSnapshot = name work_item = workhelper.get_workitem_parse(bf_session, name) parse_execute = workhelper.execute(work_item, bf_session, background=background) if not background: status = parse_execute["status"] if WorkStatusCode(status) != WorkStatusCode.TERMINATEDNORMALLY: bf_session.baseSnapshot = None bf_logger.info("Default snapshot is now unset") else: bf_logger.info("Default snapshot is now set to %s", bf_session.baseSnapshot) return parse_execute
def _parse_snapshot(self, name, background, extra_args): # type: (str, bool, Optional[Dict[str, Any]]) -> Union[str, Dict[str, str]] """ Parse specified snapshot. :param name: name of the snapshot to initialize :type name: str :param background: whether or not to run the task in the background :type background: bool :param extra_args: extra arguments to be passed to the parse command. :type extra_args: dict :return: name of initialized snapshot, or JSON dictionary of task status if background=True :rtype: Union[str, Dict] """ work_item = workhelper.get_workitem_parse(self, name) answer_dict = workhelper.execute( work_item, self, background=background, extra_args=extra_args ) if background: self.snapshot = name return answer_dict status = WorkStatusCode(answer_dict["status"]) if status != WorkStatusCode.TERMINATEDNORMALLY: init_log = restv2helper.get_work_log(self, name, work_item.id) raise BatfishException( "Initializing snapshot {ss} failed with status {status}\n{log}".format( ss=name, status=status, log=init_log ) ) else: self.snapshot = name logging.getLogger(__name__).info( "Default snapshot is now set to %s", self.snapshot ) if self.enable_diagnostics: warn_on_snapshot_failure(self) return self.snapshot
def execute(work_item, session, background=False, extra_args=None): # type: (WorkItem, Session, bool, Optional[Dict[str, Any]]) -> Dict[str, Any] """Submit a work item to Batfish. :param work_item: work to submit :type work_item: :py:class:`~pybatfish.client.WorkItem` :param session: Batfish session to use. :type session: :py:class:`~pybatfish.client.session.Session` :param background: Whether to background the job. If `True`, this function only returns the result of submitting the job. :type background: bool :param extra_args: extra arguments to be passed to Batfish. :type extra_args: dict :return: If `background=True`, a dict containing a single key 'result' with a string description of the result. If `background=False`, a dict containing a single key 'status' with a string describing work status. """ if extra_args is not None: work_item.requestParams.update(extra_args) snapshot = work_item.requestParams.get(BfConsts.ARG_TESTRIG) if snapshot is None: raise ValueError( "Work item {} does not include a snapshot name".format( work_item.to_json())) json_data = { CoordConsts.SVC_KEY_WORKITEM: work_item.to_json(), CoordConsts.SVC_KEY_API_KEY: session.api_key, } # Submit the work item response = resthelper.get_json_response(session, CoordConsts.SVC_RSC_QUEUE_WORK, json_data) if background: # TODO: this is ugly and messes with return types: design and write async replacement # After we drop 2.7 support return {"result": str(response["result"])} answer = get_work_status(work_item.id, session) status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS]) task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS] cur_sleep = 0.1 # seconds while not WorkStatusCode.is_terminated(status): _print_work_status(session, status, task_details) time.sleep(cur_sleep) cur_sleep = min(1.0, cur_sleep * 1.5) answer = get_work_status(work_item.id, session) status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS]) task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS] _print_work_status(session, status, task_details) # Handle fail conditions not producing logs if status in [ WorkStatusCode.ASSIGNMENTERROR, WorkStatusCode.REQUEUEFAILURE ]: raise BatfishException( "Work finished with status {}\nwork_item: {}\ntask_details: {}". format(status, work_item.to_json(), json.loads(task_details))) # Handle fail condition with logs if status == WorkStatusCode.TERMINATEDABNORMALLY: log = restv2helper.get_work_log(session, snapshot, work_item.id) log_file_msg = "" if len(log) > MAX_LOG_LENGTH: log_file = tempfile.NamedTemporaryFile().name with open(log_file, "w") as log_file_handle: log_file_handle.write(str(log)) log_file_msg = "Full log written to {}\n".format(log_file) raise BatfishException( "Work terminated abnormally\nwork_item: {item}\n\n{msg}log: {prefix}{log}" .format( item=work_item.to_json(), msg=log_file_msg, log=log[-MAX_LOG_LENGTH:], prefix="..." if log_file_msg else "", )) return {"status": status}
def execute(work_item, session, background=False): # type: (WorkItem, Session, bool) -> Dict[str, str] """Submit a work item to Batfish. :param work_item: work to submit :type work_item: :py:class:`~pybatfish.client.WorkItem` :param session: Batfish session to use. :type session: :py:class:`~pybatfish.client.session.Session` :param background: Whether to background the job. If `True`, this function only returns the result of submitting the job. :type background: bool :return: If `background=True`, a dict containing a single key 'result' with a string description of the result. If `background=False`, a dict containing "status" and "answer" keys, both strings. """ json_data = { CoordConsts.SVC_KEY_WORKITEM: work_item.to_json(), CoordConsts.SVC_KEY_API_KEY: session.apiKey } # Submit the work item response = resthelper.get_json_response(session, CoordConsts.SVC_RSC_QUEUE_WORK, json_data) if background: return {"result": str(response["result"])} try: answer = get_work_status(work_item.id, session) status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS]) task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS] while not WorkStatusCode.is_terminated(status): print_work_status(session, status, task_details) time.sleep(1) answer = get_work_status(work_item.id, session) status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS]) task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS] print_work_status(session, status, task_details) if status == WorkStatusCode.ASSIGNMENTERROR: raise BatfishException("Work finished with status {}\n{}".format( status, work_item.to_json())) # get the answer answer_file_name = _compute_batfish_answer_file_name(work_item) answer_bytes = resthelper.get_object(session, answer_file_name) # In Python 3.x, answer needs to be decoded before it can be used # for things like json.loads (<= 3.6). if six.PY3: answer_string = answer_bytes.decode(encoding="utf-8") else: answer_string = answer_bytes return {"status": status, "answer": answer_string} except KeyboardInterrupt: response = kill_work(session, work_item.id) raise KeyboardInterrupt( "Killed ongoing work: {}. Server response: {}".format( work_item.id, json.dumps(response)))
def bf_init_snapshot(upload, name=None, overwrite=False, background=False): # type: (str, Optional[str], bool, bool) -> Union[str, Dict[str, str]] """Initialize a new snapshot. :param upload: snapshot to upload :type upload: zip file or directory :param name: name of the snapshot to initialize :type name: string :param overwrite: whether or not to overwrite an existing snapshot with the same name :type overwrite: bool :param background: whether or not to run the task in the background :type background: bool :return: name of initialized snapshot, or JSON dictionary of task status if background=True :rtype: Union[str, Dict] """ if bf_session.network is None: bf_set_network() if name is None: name = Options.default_snapshot_prefix + get_uuid() validate_name(name) if name in bf_list_snapshots(): if overwrite: bf_delete_snapshot(name) else: raise ValueError('A snapshot named ' '{}' ' already exists in network ' '{}' ''.format(name, bf_session.network)) file_to_send = upload if os.path.isdir(upload): temp_zip_file = tempfile.NamedTemporaryFile() zip_dir(upload, temp_zip_file) file_to_send = temp_zip_file.name json_data = workhelper.get_data_upload_snapshot(bf_session, name, file_to_send) resthelper.get_json_response(bf_session, CoordConsts.SVC_RSC_UPLOAD_SNAPSHOT, json_data) work_item = workhelper.get_workitem_parse(bf_session, name) answer_dict = workhelper.execute(work_item, bf_session, background=background) if background: bf_session.baseSnapshot = name return answer_dict status = WorkStatusCode(answer_dict["status"]) if status != WorkStatusCode.TERMINATEDNORMALLY: raise BatfishException( 'Initializing snapshot {ss} failed with status {status}'.format( ss=name, status=status)) else: bf_session.baseSnapshot = name bf_logger.info("Default snapshot is now set to %s", bf_session.baseSnapshot) return bf_session.baseSnapshot
def bf_fork_snapshot(base_name, name=None, overwrite=False, background=False, deactivate_interfaces=None, deactivate_links=None, deactivate_nodes=None, restore_interfaces=None, restore_links=None, restore_nodes=None, add_files=None): # type: (str, Optional[str], bool, bool, Optional[List[Interface]], Optional[List[Edge]], Optional[List[str]], Optional[List[Interface]], Optional[List[Edge]], Optional[List[str]], Optional[str]) -> Union[str, Dict, None] """Copy an existing snapshot and deactivate or reactivate specified interfaces, nodes, and links on the copy. :param base_name: name of the snapshot to copy :type base_name: string :param name: name of the snapshot to initialize :type name: string :param overwrite: whether or not to overwrite an existing snapshot with the same name :type overwrite: bool :param background: whether or not to run the task in the background :type background: bool :param deactivate_interfaces: list of interfaces to deactivate in new snapshot :type deactivate_interfaces: list[Interface] :param deactivate_links: list of links to deactivate in new snapshot :type deactivate_links: list[Edge] :param deactivate_nodes: list of names of nodes to deactivate in new snapshot :type deactivate_nodes: list[str] :param restore_interfaces: list of interfaces to reactivate :type restore_interfaces: list[Interface] :param restore_links: list of links to reactivate :type restore_links: list[Edge] :param restore_nodes: list of names of nodes to reactivate :type restore_nodes: list[str] :param add_files: path to zip file or directory containing files to add :type add_files: str :return: name of initialized snapshot, JSON dictionary of task status if background=True, or None if the call fails :rtype: Union[str, Dict, None] """ if bf_session.network is None: raise ValueError('Network must be set to fork a snapshot.') if name is None: name = Options.default_snapshot_prefix + get_uuid() validate_name(name) if name in bf_list_snapshots(): if overwrite: bf_delete_snapshot(name) else: raise ValueError('A snapshot named ' '{}' ' already exists in network ' '{}' ''.format(name, bf_session.network)) encoded_file = None if add_files is not None: file_to_send = add_files if os.path.isdir(add_files): temp_zip_file = tempfile.NamedTemporaryFile() zip_dir(add_files, temp_zip_file) file_to_send = temp_zip_file.name if os.path.isfile(file_to_send): with open(file_to_send, "rb") as f: encoded_file = base64.b64encode(f.read()).decode('ascii') json_data = { "snapshotBase": base_name, "snapshotNew": name, "deactivateInterfaces": deactivate_interfaces, "deactivateLinks": deactivate_links, "deactivateNodes": deactivate_nodes, "restoreInterfaces": restore_interfaces, "restoreLinks": restore_links, "restoreNodes": restore_nodes, "zipFile": encoded_file } restv2helper.fork_snapshot(bf_session, json_data) work_item = workhelper.get_workitem_parse(bf_session, name) answer_dict = workhelper.execute(work_item, bf_session, background=background) if background: bf_session.baseSnapshot = name return answer_dict status = WorkStatusCode(answer_dict['status']) if status != WorkStatusCode.TERMINATEDNORMALLY: raise BatfishException( 'Forking snapshot {ss} from {base} failed with status {status}'. format(ss=name, base=base_name, status=status)) else: bf_session.baseSnapshot = name bf_logger.info("Default snapshot is now set to %s", bf_session.baseSnapshot) return bf_session.baseSnapshot