Ejemplo n.º 1
0
def bf_extract_answer_summary(answer_dict):
    """Get the answer for a previously asked question."""
    if "status" not in answer_dict or answer_dict["status"] != "SUCCESS":
        raise BatfishException("Question was not answered successfully")
    if "summary" not in answer_dict:
        raise BatfishException("Summary not found in the answer")
    return answer_dict["summary"]
Ejemplo n.º 2
0
def _get_snapshot_parse_status():
    # type: () -> Dict[str, str]
    """
    Get parsing and conversion status for files and nodes in the current snapshot.

    :return: dictionary of files and nodes to parse/convert status
    :rtype: dict
    """
    parse_status = {}  # type: Dict[str, str]
    try:
        answer = _INIT_INFO_QUESTION.answer()
        if not isinstance(answer, Answer):
            raise BatfishException(
                "question.answer() did not return an Answer: {}".format(
                    answer))

        if 'answerElements' not in answer:
            raise BatfishException('Invalid answer format for init info')
        answer_elements = answer['answerElements']
        if not len(answer_elements):
            raise BatfishException('Invalid answer format for init info')
        # These statuses contain parse and conversion status
        parse_status = answer_elements[0].get('parseStatus', {})
    except BatfishException as e:
        bf_logger.warning("Failed to check snapshot init info: %s", e)

    return parse_status
Ejemplo n.º 3
0
def get_snapshot_parse_status(session):
    # type: (Session) -> Dict[str, str]
    """
    Get parsing and conversion status for files and nodes in the current snapshot.

    :param session: Batfish session to use for getting snapshot parse status
    :type session: :class:`~pybatfish.client.session.Session`
    :return: dictionary of files and nodes to parse/convert status
    :rtype: dict
    """
    parse_status = {}  # type: Dict[str, str]
    try:
        answer = QuestionBase(_INIT_INFO_QUESTION, session).answer()
        if not isinstance(answer, Answer):
            raise BatfishException(
                "question.answer() did not return an Answer: {}".format(
                    answer))

        if 'answerElements' not in answer:
            raise BatfishException('Invalid answer format for init info')
        answer_elements = answer['answerElements']
        if not len(answer_elements):
            raise BatfishException('Invalid answer format for init info')
        # These statuses contain parse and conversion status
        parse_status = answer_elements[0].get('parseStatus', {})
    except BatfishException as e:
        logging.getLogger(__name__).warning(
            "Failed to check snapshot init info: %s", e)

    return parse_status
Ejemplo n.º 4
0
def execute(work_item, session, background=False):
    # type: (WorkItem, Session, bool) -> Dict[str, Any]
    """Submit a work item to Batfish.

    :param work_item: work to submit
    :type work_item: :py:class:`~pybatfish.client.WorkItem`
    :param session: Batfish session to use.
    :type session: :py:class:`~pybatfish.client.session.Session`
    :param background: Whether to background the job. If `True`,
        this function only returns the result of submitting the job.
    :type background: bool

    :return: If `background=True`, a dict containing a single key 'result' with
    a string description of the result. If `background=False`, a dict containing
    a single key 'status' with a string describing work status.
    """
    snapshot = work_item.requestParams.get(BfConsts.ARG_TESTRIG)
    if snapshot is None:
        raise ValueError('Work item {} does not include a snapshot name'.format(
            work_item.to_json()))

    json_data = {
        CoordConsts.SVC_KEY_WORKITEM: work_item.to_json(),
        CoordConsts.SVC_KEY_API_KEY: session.apiKey,
    }

    # Submit the work item
    response = resthelper.get_json_response(
        session, CoordConsts.SVC_RSC_QUEUE_WORK, json_data)

    if background:
        # TODO: this is ugly and messes with return types: design and write async replacement
        # After we drop 2.7 support
        return {"result": str(response["result"])}

    try:
        answer = get_work_status(work_item.id, session)
        status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS])
        task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS]

        while not WorkStatusCode.is_terminated(status):
            print_work_status(session, status, task_details)
            time.sleep(1)
            answer = get_work_status(work_item.id, session)
            status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS])
            task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS]

        print_work_status(session, status, task_details)

        if status == WorkStatusCode.ASSIGNMENTERROR:
            raise BatfishException(
                "Work finished with status {}\n{}".format(status,
                                                          work_item.to_json()))
        return {"status": status}

    except KeyboardInterrupt:
        response = kill_work(session, work_item.id)
        raise KeyboardInterrupt(
            "Killed ongoing work: {}. Server response: {}".format(
                work_item.id, json.dumps(response)))
Ejemplo n.º 5
0
def bf_run_analysis(name, snapshot, reference_snapshot=None):
    # type: (str, str, Optional[str]) -> Any
    work_item = workhelper.get_workitem_run_analysis(
        bf_session, name, snapshot, reference_snapshot)
    work_answer = workhelper.execute(work_item, bf_session)
    if work_answer["status"] != WorkStatusCode.TERMINATEDNORMALLY:
        raise BatfishException("Failed to run analysis")

    return bf_get_analysis_answers(name, snapshot, reference_snapshot)
Ejemplo n.º 6
0
    def set_network(
        self, name: Optional[str] = None, prefix: str = Options.default_network_prefix
    ) -> str:
        """
        Configure the network used for analysis.

        :param name: name of the network to set. If `None`, a name will be generated
        :type name: str
        :param prefix: prefix to prepend to auto-generated network names if name is empty
        :type name: str

        :return: name of the configured network
        :rtype: str
        :raises BatfishException: if configuration fails
        """
        if name is None:
            name = prefix + get_uuid()
        validate_name(name, "network")

        try:
            net = restv2helper.get_network(self, name)
            self.network = str(net["name"])
            return self.network
        except HTTPError as e:
            if e.response.status_code != 404:
                raise BatfishException("Unknown error accessing network", e)

        json_data = workhelper.get_data_init_network(self, name)
        json_response = resthelper.get_json_response(
            self, CoordConsts.SVC_RSC_INIT_NETWORK, json_data
        )

        network_name = json_response.get(CoordConsts.SVC_KEY_NETWORK_NAME)
        if network_name is None:
            raise BatfishException(
                "Network initialization failed. Server response: {}".format(
                    json_response
                )
            )

        self.network = str(network_name)
        return self.network
Ejemplo n.º 7
0
def bf_set_network(name=None, prefix=Options.default_network_prefix):
    # type: (str, str) -> str
    """
    Configure the network used for analysis.

    :param name: name of the network to set. If `None`, a name will be generated using prefix.
    :type name: string
    :param prefix: prefix to prepend to auto-generated network names if name is empty
    :type name: string

    :return: The name of the configured network, if configured successfully.
    :rtype: string
    :raises BatfishException: if configuration fails
    """
    if name is None:
        name = prefix + get_uuid()
    validate_name(name, "network")

    try:
        net = restv2helper.get_network(bf_session, name)
        bf_session.network = str(net['name'])
        return bf_session.network
    except HTTPError as e:
        if e.response.status_code != 404:
            raise BatfishException('Unknown error accessing network', e)

    json_data = workhelper.get_data_init_network(bf_session, name)
    json_response = resthelper.get_json_response(
        bf_session, CoordConsts.SVC_RSC_INIT_NETWORK, json_data)

    network_name = json_response.get(CoordConsts.SVC_KEY_NETWORK_NAME)
    if network_name is None:
        raise BatfishException(
            "Network initialization failed. Server response: {}".format(
                json_response))

    bf_session.network = str(network_name)
    return bf_session.network
Ejemplo n.º 8
0
    def _make_query(self, nodes):
        """
        make query
        """
        # nodes is actually a single node here, not sure why batfish have named it "nodes"?
        try:
            query = self.b_fish.bfq.nodeProperties(nodes=nodes)
            result = query.answer().frame()
            # Append each nodes query result to the results_dict list
            self.results_dict[nodes].append(result)

        except BatfishException as e:
            print(e)
            raise BatfishException(f"Batfish Query failure :  {e}")
Ejemplo n.º 9
0
    def _make_query(self, flow, nodes):
        """
        make query
        """
        # nodes is actually a single node here, not sure why batfish have named it "nodes"?
        # flow is a headerConstraint object which was built from passing in args relating to the source/dst ip/proto (5 tuple etc)
        try:
            query = self.b_fish.bfq.testFilters(headers=flow, nodes=nodes)
            result = query.answer().frame()
            # Append each nodes query result to the results_dict list
            self.results_dict[nodes].append(result)

        except BatfishException as e:
            print(e)
            raise BatfishException(f"Batfish Query failure :  {e}")
Ejemplo n.º 10
0
def get_work_status(w_item_id, session):
    json_data = {CoordConsts.SVC_KEY_API_KEY: session.apiKey,
                 CoordConsts.SVC_KEY_WORKID: w_item_id}

    answer = resthelper.get_json_response(session,
                                          CoordConsts.SVC_RSC_GET_WORKSTATUS,
                                          json_data)

    if CoordConsts.SVC_KEY_WORKSTATUS in answer:
        return answer
    else:
        raise BatfishException(
            "Expected key (%s) not found in status check response: %s" %
            (CoordConsts.SVC_KEY_WORKSTATUS,
             answer))
Ejemplo n.º 11
0
def _get_question_object(session, name):
    # type: (Optional[Session], str) -> Any
    """
    Get the question object corresponding to the specified question name.

    First searches the specified session, but falls back to bfq if it contains
    the question and specified session does not.
    """
    # If no session was specified or it doesn't have the specified question
    # (e.g. questions were loaded with load_questions()), use bfq for reverse
    # compatibility
    if session and hasattr(session.q, name):
        return session.q
    elif hasattr(bfq, name):
        return bfq
    else:
        raise BatfishException('{} question was not found'.format(name))
Ejemplo n.º 12
0
def get_json_response(session, resource, jsonData=None, useHttpGet=False):
    # type: (Session, str, Optional[Dict], bool) -> Dict[str, Any]
    """Send a request (POST or GET) to Batfish.

    :param session: :py:class:`~pybatfish.client.session.Session` object to use
    :param resource: the API endpoint to call on the Batfish server, string
    :param jsonData: any HTTP POST data to send, as a dictionary
    :param useHttpGet: boolean, whether HTTP GET request should be sent
    """
    if useHttpGet:
        response = _get_data(session, resource)
    else:
        response = _post_data(session, resource, jsonData)

    json_response = response.json()
    if json_response[0] != CoordConsts.SVC_KEY_SUCCESS:
        raise BatfishException("Coordinator returned failure: {}".format(
            json_response[1]))

    return dict(json_response[1])
Ejemplo n.º 13
0
    def _parse_snapshot(self, name, background, extra_args):
        # type: (str, bool, Optional[Dict[str, Any]]) -> Union[str, Dict[str, str]]
        """
        Parse specified snapshot.

        :param name: name of the snapshot to initialize
        :type name: str
        :param background: whether or not to run the task in the background
        :type background: bool
        :param extra_args: extra arguments to be passed to the parse command.
        :type extra_args: dict

        :return: name of initialized snapshot, or JSON dictionary of task status if background=True
        :rtype: Union[str, Dict]
        """
        work_item = workhelper.get_workitem_parse(self, name)
        answer_dict = workhelper.execute(
            work_item, self, background=background, extra_args=extra_args
        )
        if background:
            self.snapshot = name
            return answer_dict

        status = WorkStatusCode(answer_dict["status"])

        if status != WorkStatusCode.TERMINATEDNORMALLY:
            init_log = restv2helper.get_work_log(self, name, work_item.id)
            raise BatfishException(
                "Initializing snapshot {ss} failed with status {status}\n{log}".format(
                    ss=name, status=status, log=init_log
                )
            )
        else:
            self.snapshot = name
            logging.getLogger(__name__).info(
                "Default snapshot is now set to %s", self.snapshot
            )
            if self.enable_diagnostics:
                warn_on_snapshot_failure(self)

            return self.snapshot
Ejemplo n.º 14
0
def bf_auto_complete(completion_type, query, max_suggestions=None):
    # type: (VariableType, str, Optional[int]) -> List[AutoCompleteSuggestion]
    """
    Get a list of autocomplete suggestions that match the provided query based on the variable type.

    If completion is not supported for the provided variable type a BatfishException will be raised.

    Usage Example::

        >>> from pybatfish.client.commands import bf_auto_complete, bf_set_network
        >>> from pybatfish.datamodel.primitives import AutoCompleteSuggestion, VariableType
        >>> name = bf_set_network()
        >>> bf_auto_complete(VariableType.ROUTING_PROTOCOL_SPEC, "b") # doctest: +SKIP
        [AutoCompleteSuggestion(description=None, insertion_index=0, is_partial=False, rank=2147483647, text='bgp'),
            AutoCompleteSuggestion(description=None, insertion_index=0, is_partial=False, rank=2147483647, text='ebgp'),
            AutoCompleteSuggestion(description=None, insertion_index=0, is_partial=False, rank=2147483647, text='ibgp')]

    :param completion_type: The type of parameter to suggest autocompletions for
    :type completion_type: :class:`~pybatfish.datamodel.primitives.VariableType`
    :param query: The partial string to match suggestions on
    :type query: str
    :param max_suggestions: Optional max number of suggestions to be returned
    :type max_suggestions: int
    """
    jsonData = workhelper.get_data_auto_complete(bf_session, completion_type,
                                                 query, max_suggestions)
    response = resthelper.get_json_response(bf_session,
                                            CoordConsts.SVC_RSC_AUTO_COMPLETE,
                                            jsonData)
    if CoordConsts.SVC_KEY_SUGGESTIONS in response:
        suggestions = [
            AutoCompleteSuggestion.from_dict(json.loads(suggestion))
            for suggestion in response[CoordConsts.SVC_KEY_SUGGESTIONS]
        ]
        return suggestions

    raise BatfishException("Unexpected response: {}.".format(response))
Ejemplo n.º 15
0
def _upload_diagnostics(bucket=_S3_BUCKET, region=_S3_REGION, dry_run=True,
                        netconan_config=None, questions=_INIT_INFO_QUESTIONS,
                        resource_prefix=''):
    # type: (str, str, bool, Optional[str], Iterable[QuestionBase], str) -> str
    """
    Fetch, anonymize, and optionally upload snapshot initialization information.

    :param bucket: name of the AWS S3 bucket to upload to
    :type bucket: string
    :param region: name of the region containing the bucket
    :type region: string
    :param dry_run: whether or not to skip upload; if False, anonymized files will be stored locally, otherwise anonymized files will be uploaded to the specified S3 bucket
    :type dry_run: bool
    :param netconan_config: path to Netconan configuration file
    :type netconan_config: string
    :param questions: list of questions to run and upload
    :type questions: list[QuestionBase]
    :param resource_prefix: prefix to append to any uploaded resources
    :type resource_prefix: str
    :return: location of anonymized files (local directory if doing dry run, otherwise upload ID)
    :rtype: string
    """
    tmp_dir = tempfile.mkdtemp()
    try:
        for q in questions:
            instance_name = q.get_name()
            try:
                ans = q.answer()
                if not isinstance(ans, Answer):
                    raise BatfishException(
                        "question.answer() did not return an Answer: {}".format(
                            ans))
                content = json.dumps(ans.dict(), indent=4, sort_keys=True)
            except BatfishException as e:
                content = "Failed to answer {}: {}".format(instance_name, e)
                bf_logger.warning(content)

            with open(os.path.join(tmp_dir, instance_name), 'w') as f:
                f.write(content)

        tmp_dir_anon = tempfile.mkdtemp()
        _anonymize_dir(tmp_dir, tmp_dir_anon, netconan_config)
    finally:
        shutil.rmtree(tmp_dir)

    if dry_run:
        bf_logger.info(
            'See anonymized files produced by dry-run here: {}'.format(
                tmp_dir_anon))
        return tmp_dir_anon

    try:
        if bucket is None:
            raise ValueError('Bucket must be set to upload init info.')
        if region is None:
            raise ValueError('Region must be set to upload init info.')

        # Generate anonymous S3 subdirectory name
        anon_dir = '{}{}'.format(resource_prefix, uuid.uuid4().hex)
        upload_dest = 'https://{bucket}.s3-{region}.amazonaws.com/{resource}'.format(
            bucket=bucket, region=region, resource=anon_dir)

        _upload_dir_to_url(upload_dest, tmp_dir_anon,
                           headers={'x-amz-acl': 'bucket-owner-full-control'})
        bf_logger.debug('Uploaded files to: {}'.format(upload_dest))
    finally:
        shutil.rmtree(tmp_dir_anon)

    return anon_dir
Ejemplo n.º 16
0
def upload_diagnostics(
    session: "Session",
    metadata: Dict[str, Any],
    bucket: str = _S3_BUCKET,
    region: str = _S3_REGION,
    dry_run: bool = True,
    netconan_config: Optional[str] = None,
    questions: Iterable[Dict[str, Any]] = _INIT_INFO_QUESTIONS,
    resource_prefix: str = "",
    proxy: Optional[str] = None,
) -> str:
    """
    Fetch, anonymize, and optionally upload snapshot initialization information.

    :param session: Batfish session to use for running diagnostics questions
    :type session: :class:`~pybatfish.client.session.Session`
    :param metadata: additional metadata to upload with the diagnostics
    :type metadata: dict[str, Any]
    :param bucket: name of the AWS S3 bucket to upload to
    :type bucket: string
    :param region: name of the region containing the bucket
    :type region: string
    :param dry_run: if True, upload is skipped and the anonymized files will be stored locally for review. If False, anonymized files will be uploaded to the specified S3 bucket
    :type dry_run: bool
    :param netconan_config: path to Netconan configuration file
    :type netconan_config: string
    :param questions: list of question templates to run and upload
    :type questions: list[QuestionBase]
    :param resource_prefix: prefix to append to any uploaded resources
    :type resource_prefix: str
    :param proxy: proxy URL to use when uploading data.
    :return: location of anonymized files (local directory if doing dry run, otherwise upload ID)
    :rtype: string
    """
    logger = logging.getLogger(__name__)
    tmp_dir = tempfile.mkdtemp()
    try:
        for template in questions:
            q = QuestionBase(template, session)
            instance_name = q.get_name()
            try:
                ans = q.answer()
                if not isinstance(ans, Answer):
                    raise BatfishException(
                        "question.answer() did not return an Answer: {}".
                        format(ans))
                content = json.dumps(ans.dict(), indent=4, sort_keys=True)
            except BatfishException as e:
                content = "Failed to answer {}: {}".format(instance_name, e)
                logger.warning(content)

            with open(os.path.join(tmp_dir, instance_name), "w") as f:
                f.write(content)

        tmp_dir_anon = tempfile.mkdtemp()
        if questions:
            _anonymize_dir(tmp_dir, tmp_dir_anon, netconan_config)
    finally:
        shutil.rmtree(tmp_dir)

    with open(os.path.join(tmp_dir_anon, METADATA_FILENAME), "w") as f:
        f.write(json.dumps(metadata))

    if dry_run:
        logger.info("See anonymized files produced by dry-run here: {}".format(
            tmp_dir_anon))
        return tmp_dir_anon

    try:
        if bucket is None:
            raise ValueError("Bucket must be set to upload init info.")
        if region is None:
            raise ValueError("Region must be set to upload init info.")

        # Generate anonymous S3 subdirectory name
        anon_dir = "{}{}".format(resource_prefix, uuid.uuid4().hex)
        upload_dest = "https://{bucket}.s3-{region}.amazonaws.com/{resource}".format(
            bucket=bucket, region=region, resource=anon_dir)

        _upload_dir_to_url(
            upload_dest,
            tmp_dir_anon,
            headers={"x-amz-acl": "bucket-owner-full-control"},
            proxies={"https": proxy} if proxy is not None else None,
        )
        logger.debug("Uploaded files to: {}".format(upload_dest))
    finally:
        shutil.rmtree(tmp_dir_anon)

    return anon_dir
Ejemplo n.º 17
0
def _check_network():
    """Check if current network is set."""
    if bf_session.network is None:
        raise BatfishException("Network is not set")
Ejemplo n.º 18
0
def execute(work_item, session, background=False):
    # type: (WorkItem, Session, bool) -> Dict[str, str]
    """Submit a work item to Batfish.

    :param work_item: work to submit
    :type work_item: :py:class:`~pybatfish.client.WorkItem`
    :param session: Batfish session to use.
    :type session: :py:class:`~pybatfish.client.session.Session`
    :param background: Whether to background the job. If `True`,
        this function only returns the result of submitting the job.
    :type background: bool

    :return: If `background=True`, a dict containing a single key 'result' with
    a string description of the result. If `background=False`, a dict containing
    "status" and "answer" keys, both strings.
    """
    json_data = {
        CoordConsts.SVC_KEY_WORKITEM: work_item.to_json(),
        CoordConsts.SVC_KEY_API_KEY: session.apiKey
    }

    # Submit the work item
    response = resthelper.get_json_response(session,
                                            CoordConsts.SVC_RSC_QUEUE_WORK,
                                            json_data)

    if background:
        return {"result": str(response["result"])}

    try:
        answer = get_work_status(work_item.id, session)
        status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS])
        task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS]

        while not WorkStatusCode.is_terminated(status):
            print_work_status(session, status, task_details)
            time.sleep(1)
            answer = get_work_status(work_item.id, session)
            status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS])
            task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS]

        print_work_status(session, status, task_details)

        if status == WorkStatusCode.ASSIGNMENTERROR:
            raise BatfishException("Work finished with status {}\n{}".format(
                status, work_item.to_json()))

        # get the answer
        answer_file_name = _compute_batfish_answer_file_name(work_item)
        answer_bytes = resthelper.get_object(session, answer_file_name)

        # In Python 3.x, answer needs to be decoded before it can be used
        # for things like json.loads (<= 3.6).
        if six.PY3:
            answer_string = answer_bytes.decode(encoding="utf-8")
        else:
            answer_string = answer_bytes
        return {"status": status, "answer": answer_string}

    except KeyboardInterrupt:
        response = kill_work(session, work_item.id)
        raise KeyboardInterrupt(
            "Killed ongoing work: {}. Server response: {}".format(
                work_item.id, json.dumps(response)))
Ejemplo n.º 19
0
def bf_init_snapshot(upload, name=None, overwrite=False, background=False):
    # type: (str, Optional[str], bool, bool) -> Union[str, Dict[str, str]]
    """Initialize a new snapshot.

    :param upload: snapshot to upload
    :type upload: zip file or directory
    :param name: name of the snapshot to initialize
    :type name: string
    :param overwrite: whether or not to overwrite an existing snapshot with the
       same name
    :type overwrite: bool
    :param background: whether or not to run the task in the background
    :type background: bool
    :return: name of initialized snapshot, or JSON dictionary of task status if background=True
    :rtype: Union[str, Dict]
    """
    if bf_session.network is None:
        bf_set_network()

    if name is None:
        name = Options.default_snapshot_prefix + get_uuid()
    validate_name(name)

    if name in bf_list_snapshots():
        if overwrite:
            bf_delete_snapshot(name)
        else:
            raise ValueError('A snapshot named '
                             '{}'
                             ' already exists in network '
                             '{}'
                             ''.format(name, bf_session.network))

    file_to_send = upload
    if os.path.isdir(upload):
        temp_zip_file = tempfile.NamedTemporaryFile()
        zip_dir(upload, temp_zip_file)
        file_to_send = temp_zip_file.name

    json_data = workhelper.get_data_upload_snapshot(bf_session, name,
                                                    file_to_send)
    resthelper.get_json_response(bf_session,
                                 CoordConsts.SVC_RSC_UPLOAD_SNAPSHOT,
                                 json_data)

    work_item = workhelper.get_workitem_parse(bf_session, name)
    answer_dict = workhelper.execute(work_item,
                                     bf_session,
                                     background=background)
    if background:
        bf_session.baseSnapshot = name
        return answer_dict

    status = WorkStatusCode(answer_dict["status"])
    if status != WorkStatusCode.TERMINATEDNORMALLY:
        raise BatfishException(
            'Initializing snapshot {ss} failed with status {status}'.format(
                ss=name, status=status))
    else:
        bf_session.baseSnapshot = name
        bf_logger.info("Default snapshot is now set to %s",
                       bf_session.baseSnapshot)
        return bf_session.baseSnapshot
Ejemplo n.º 20
0
def execute(work_item, session, background=False, extra_args=None):
    # type: (WorkItem, Session, bool, Optional[Dict[str, Any]]) -> Dict[str, Any]
    """Submit a work item to Batfish.

    :param work_item: work to submit
    :type work_item: :py:class:`~pybatfish.client.WorkItem`
    :param session: Batfish session to use.
    :type session: :py:class:`~pybatfish.client.session.Session`
    :param background: Whether to background the job. If `True`,
        this function only returns the result of submitting the job.
    :type background: bool
    :param extra_args: extra arguments to be passed to Batfish.
    :type extra_args: dict

    :return: If `background=True`, a dict containing a single key 'result' with
    a string description of the result. If `background=False`, a dict containing
    a single key 'status' with a string describing work status.
    """
    if extra_args is not None:
        work_item.requestParams.update(extra_args)

    snapshot = work_item.requestParams.get(BfConsts.ARG_TESTRIG)
    if snapshot is None:
        raise ValueError(
            "Work item {} does not include a snapshot name".format(
                work_item.to_json()))

    json_data = {
        CoordConsts.SVC_KEY_WORKITEM: work_item.to_json(),
        CoordConsts.SVC_KEY_API_KEY: session.api_key,
    }

    # Submit the work item
    response = resthelper.get_json_response(session,
                                            CoordConsts.SVC_RSC_QUEUE_WORK,
                                            json_data)

    if background:
        # TODO: this is ugly and messes with return types: design and write async replacement
        # After we drop 2.7 support
        return {"result": str(response["result"])}

    answer = get_work_status(work_item.id, session)
    status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS])
    task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS]

    cur_sleep = 0.1  # seconds
    while not WorkStatusCode.is_terminated(status):
        _print_work_status(session, status, task_details)
        time.sleep(cur_sleep)
        cur_sleep = min(1.0, cur_sleep * 1.5)
        answer = get_work_status(work_item.id, session)
        status = WorkStatusCode(answer[CoordConsts.SVC_KEY_WORKSTATUS])
        task_details = answer[CoordConsts.SVC_KEY_TASKSTATUS]

    _print_work_status(session, status, task_details)

    # Handle fail conditions not producing logs
    if status in [
            WorkStatusCode.ASSIGNMENTERROR, WorkStatusCode.REQUEUEFAILURE
    ]:
        raise BatfishException(
            "Work finished with status {}\nwork_item: {}\ntask_details: {}".
            format(status, work_item.to_json(), json.loads(task_details)))

    # Handle fail condition with logs
    if status == WorkStatusCode.TERMINATEDABNORMALLY:
        log = restv2helper.get_work_log(session, snapshot, work_item.id)
        log_file_msg = ""
        if len(log) > MAX_LOG_LENGTH:
            log_file = tempfile.NamedTemporaryFile().name
            with open(log_file, "w") as log_file_handle:
                log_file_handle.write(str(log))
            log_file_msg = "Full log written to {}\n".format(log_file)
        raise BatfishException(
            "Work terminated abnormally\nwork_item: {item}\n\n{msg}log: {prefix}{log}"
            .format(
                item=work_item.to_json(),
                msg=log_file_msg,
                log=log[-MAX_LOG_LENGTH:],
                prefix="..." if log_file_msg else "",
            ))

    return {"status": status}
Ejemplo n.º 21
0
def bf_fork_snapshot(base_name,
                     name=None,
                     overwrite=False,
                     background=False,
                     deactivate_interfaces=None,
                     deactivate_links=None,
                     deactivate_nodes=None,
                     restore_interfaces=None,
                     restore_links=None,
                     restore_nodes=None,
                     add_files=None):
    # type: (str, Optional[str], bool, bool, Optional[List[Interface]], Optional[List[Edge]], Optional[List[str]], Optional[List[Interface]], Optional[List[Edge]], Optional[List[str]], Optional[str]) -> Union[str, Dict, None]
    """Copy an existing snapshot and deactivate or reactivate specified interfaces, nodes, and links on the copy.

    :param base_name: name of the snapshot to copy
    :type base_name: string
    :param name: name of the snapshot to initialize
    :type name: string
    :param overwrite: whether or not to overwrite an existing snapshot with the
        same name
    :type overwrite: bool
    :param background: whether or not to run the task in the background
    :type background: bool
    :param deactivate_interfaces: list of interfaces to deactivate in new snapshot
    :type deactivate_interfaces: list[Interface]
    :param deactivate_links: list of links to deactivate in new snapshot
    :type deactivate_links: list[Edge]
    :param deactivate_nodes: list of names of nodes to deactivate in new snapshot
    :type deactivate_nodes: list[str]
    :param restore_interfaces: list of interfaces to reactivate
    :type restore_interfaces: list[Interface]
    :param restore_links: list of links to reactivate
    :type restore_links: list[Edge]
    :param restore_nodes: list of names of nodes to reactivate
    :type restore_nodes: list[str]
    :param add_files: path to zip file or directory containing files to add
    :type add_files: str
    :return: name of initialized snapshot, JSON dictionary of task status if
        background=True, or None if the call fails
    :rtype: Union[str, Dict, None]
    """
    if bf_session.network is None:
        raise ValueError('Network must be set to fork a snapshot.')

    if name is None:
        name = Options.default_snapshot_prefix + get_uuid()
    validate_name(name)

    if name in bf_list_snapshots():
        if overwrite:
            bf_delete_snapshot(name)
        else:
            raise ValueError('A snapshot named '
                             '{}'
                             ' already exists in network '
                             '{}'
                             ''.format(name, bf_session.network))

    encoded_file = None
    if add_files is not None:
        file_to_send = add_files
        if os.path.isdir(add_files):
            temp_zip_file = tempfile.NamedTemporaryFile()
            zip_dir(add_files, temp_zip_file)
            file_to_send = temp_zip_file.name

        if os.path.isfile(file_to_send):
            with open(file_to_send, "rb") as f:
                encoded_file = base64.b64encode(f.read()).decode('ascii')

    json_data = {
        "snapshotBase": base_name,
        "snapshotNew": name,
        "deactivateInterfaces": deactivate_interfaces,
        "deactivateLinks": deactivate_links,
        "deactivateNodes": deactivate_nodes,
        "restoreInterfaces": restore_interfaces,
        "restoreLinks": restore_links,
        "restoreNodes": restore_nodes,
        "zipFile": encoded_file
    }
    restv2helper.fork_snapshot(bf_session, json_data)

    work_item = workhelper.get_workitem_parse(bf_session, name)
    answer_dict = workhelper.execute(work_item,
                                     bf_session,
                                     background=background)
    if background:
        bf_session.baseSnapshot = name
        return answer_dict

    status = WorkStatusCode(answer_dict['status'])
    if status != WorkStatusCode.TERMINATEDNORMALLY:
        raise BatfishException(
            'Forking snapshot {ss} from {base} failed with status {status}'.
            format(ss=name, base=base_name, status=status))
    else:
        bf_session.baseSnapshot = name
        bf_logger.info("Default snapshot is now set to %s",
                       bf_session.baseSnapshot)
        return bf_session.baseSnapshot