Example #1
0
def test_upload_diagnostics(network, example_snapshot):
    """Upload initialization information for example snapshot."""
    # This call raises an exception if any file upload results in HTTP status != 200
    resource = upload_diagnostics(session=bf_session,
                                  metadata={},
                                  dry_run=False,
                                  resource_prefix='test/')
    base_url = 'https://{bucket}.s3-{region}.amazonaws.com'.format(
        bucket=_S3_BUCKET, region=_S3_REGION)

    # Confirm none of the uploaded questions are accessible
    for template in _INIT_INFO_QUESTIONS:
        q = QuestionBase(template, bf_session)
        r = requests.get('{}/{}/{}'.format(base_url, resource, q.get_name()))
        assert (r.status_code == 403)
Example #2
0
def get_snapshot_parse_status(session):
    # type: (Session) -> Dict[str, str]
    """
    Get parsing and conversion status for files and nodes in the current snapshot.

    :param session: Batfish session to use for getting snapshot parse status
    :type session: :class:`~pybatfish.client.session.Session`
    :return: dictionary of files and nodes to parse/convert status
    :rtype: dict
    """
    parse_status = {}  # type: Dict[str, str]
    try:
        answer = QuestionBase(_INIT_INFO_QUESTION, session).answer()
        if not isinstance(answer, Answer):
            raise BatfishException(
                "question.answer() did not return an Answer: {}".format(
                    answer))

        if 'answerElements' not in answer:
            raise BatfishException('Invalid answer format for init info')
        answer_elements = answer['answerElements']
        if not len(answer_elements):
            raise BatfishException('Invalid answer format for init info')
        # These statuses contain parse and conversion status
        parse_status = answer_elements[0].get('parseStatus', {})
    except BatfishException as e:
        logging.getLogger(__name__).warning(
            "Failed to check snapshot init info: %s", e)

    return parse_status
Example #3
0
import tempfile
import uuid
from typing import Dict, Iterable, Optional  # noqa: F401

import requests
from netconan import netconan
from requests import HTTPError

from pybatfish.datamodel.answer import Answer  # noqa: F401
from pybatfish.exception import BatfishException
from pybatfish.question.question import QuestionBase

_FILE_PARSE_STATUS_QUESTION = QuestionBase({
    "class": "org.batfish.question.initialization.FileParseStatusQuestion",
    "differential": False,
    "instance": {
        "instanceName": "__fileParseStatus",
    }
})
_INIT_INFO_QUESTION = QuestionBase({
    "class": "org.batfish.question.InitInfoQuestionPlugin$InitInfoQuestion",
    "differential": False,
    "instance": {
        "instanceName": "__initInfo"
    },
})
_INIT_ISSUES_QUESTION = QuestionBase({
    "class": "org.batfish.question.initialization.InitIssuesQuestion",
    "differential": False,
    "instance": {
        "instanceName": "__initIssues"
Example #4
0
def test_questions(network, example_snapshot):
    """Run diagnostic questions on example snapshot."""
    for template in _INIT_INFO_QUESTIONS:
        # Goal here is to run question successfully, i.e. not crash
        QuestionBase(template, bf_session).answer()
Example #5
0
def upload_diagnostics(session,
                       metadata,
                       bucket=_S3_BUCKET,
                       region=_S3_REGION,
                       dry_run=True,
                       netconan_config=None,
                       questions=_INIT_INFO_QUESTIONS,
                       resource_prefix=''):
    # type: (Session, Dict[str, Any], str, str, bool, Optional[str], Iterable[Dict[str, object]], str) -> str
    """
    Fetch, anonymize, and optionally upload snapshot initialization information.

    :param session: Batfish session to use for running diagnostics questions
    :type session: :class:`~pybatfish.client.session.Session`
    :param metadata: additional metadata to upload with the diagnostics
    :type metadata: dict[str, Any]
    :param bucket: name of the AWS S3 bucket to upload to
    :type bucket: string
    :param region: name of the region containing the bucket
    :type region: string
    :param dry_run: if True, upload is skipped and the anonymized files will be stored locally for review. If False, anonymized files will be uploaded to the specified S3 bucket
    :type dry_run: bool
    :param netconan_config: path to Netconan configuration file
    :type netconan_config: string
    :param questions: list of question templates to run and upload
    :type questions: list[QuestionBase]
    :param resource_prefix: prefix to append to any uploaded resources
    :type resource_prefix: str
    :return: location of anonymized files (local directory if doing dry run, otherwise upload ID)
    :rtype: string
    """
    logger = logging.getLogger(__name__)
    tmp_dir = tempfile.mkdtemp()
    try:
        for template in questions:
            q = QuestionBase(template, session)
            instance_name = q.get_name()
            try:
                ans = q.answer()
                if not isinstance(ans, Answer):
                    raise BatfishException(
                        "question.answer() did not return an Answer: {}".
                        format(ans))
                content = json.dumps(ans.dict(), indent=4, sort_keys=True)
            except BatfishException as e:
                content = "Failed to answer {}: {}".format(instance_name, e)
                logger.warning(content)

            with open(os.path.join(tmp_dir, instance_name), 'w') as f:
                f.write(content)

        tmp_dir_anon = tempfile.mkdtemp()
        if questions:
            _anonymize_dir(tmp_dir, tmp_dir_anon, netconan_config)
    finally:
        shutil.rmtree(tmp_dir)

    with open(os.path.join(tmp_dir_anon, METADATA_FILENAME), 'w') as f:
        f.write(json.dumps(metadata))

    if dry_run:
        logger.info('See anonymized files produced by dry-run here: {}'.format(
            tmp_dir_anon))
        return tmp_dir_anon

    try:
        if bucket is None:
            raise ValueError('Bucket must be set to upload init info.')
        if region is None:
            raise ValueError('Region must be set to upload init info.')

        # Generate anonymous S3 subdirectory name
        anon_dir = '{}{}'.format(resource_prefix, uuid.uuid4().hex)
        upload_dest = 'https://{bucket}.s3-{region}.amazonaws.com/{resource}'.format(
            bucket=bucket, region=region, resource=anon_dir)

        _upload_dir_to_url(upload_dest,
                           tmp_dir_anon,
                           headers={'x-amz-acl': 'bucket-owner-full-control'})
        logger.debug('Uploaded files to: {}'.format(upload_dest))
    finally:
        shutil.rmtree(tmp_dir_anon)

    return anon_dir