Exemplo n.º 1
0
def _config_required(config_path: Path) -> dict:
    try:
        with config_path.open("r") as cf:
            config = yaml.safe_load(cf)
            log.debug("Unvalidated data from file '{f}': {c}",
                      f=str(config_path),
                      c=config)
    except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
        raise ConfigError(str(yaml_error))
    return config
Exemplo n.º 2
0
async def check_redis_instance():
    """Ensure Redis is running before starting server.

    Returns:
        {bool} -- True if Redis is running.
    """
    await check_redis(db=params.cache.database, config=REDIS_CONFIG)

    log.debug(
        f"Redis is running at: {REDIS_CONFIG['host']}:{REDIS_CONFIG['port']}")
    return True
Exemplo n.º 3
0
    async def send(self, query):
        """Send an incoming webhook to http endpoint."""

        payload = Webhook(**query)

        log.debug("Sending query data to {}:\n{}", self.config.host.host, payload)

        return await self._apost(
            endpoint=self.config.host.path,
            headers=self.config.headers,
            params=self.config.params,
            data=payload.export_dict(),
        )
Exemplo n.º 4
0
    def serialize(self):
        """Convert the Arista-formatted fields to standard parsed data model."""
        routes = []
        count = 0
        for prefix, entries in self.bgp_route_entries.items():

            count += entries.total_paths

            for route in entries.bgp_route_paths:

                as_path = self._get_as_path(route.as_path_entry.as_path)
                rpki_state = RPKI_STATE_MAP.get(
                    route.route_type.origin_validity, 3)

                # BGP AS Path and BGP Community queries do not include the routeDetail
                # block. Therefore, we must verify it exists before including its data.
                communities = []
                if route.route_detail is not None:
                    communities = route.route_detail.community_list

                # iBGP paths contain an empty AS_PATH array. If the AS_PATH is empty, we
                # set the source_as to the router's local-as.
                source_as = self.asn
                if len(as_path) != 0:
                    source_as = as_path[0]

                routes.append({
                    "prefix": prefix,
                    "active": route.route_type.active,
                    "age": self._get_route_age(route.timestamp),
                    "weight": route.weight,
                    "med": route.med,
                    "local_preference": route.local_preference,
                    "as_path": as_path,
                    "communities": communities,
                    "next_hop": route.next_hop,
                    "source_as": source_as,
                    "source_rid": route.peer_entry.peer_router_id,
                    "peer_rid": route.peer_entry.peer_router_id,
                    "rpki_state": rpki_state,
                })

        serialized = ParsedRoutes(
            vrf=self.vrf,
            count=count,
            routes=routes,
            winning_weight=WINNING_WEIGHT,
        )

        log.debug("Serialized Arista response: {}", serialized)
        return serialized
Exemplo n.º 5
0
def generate_opengraph(
    image_path: Path,
    max_width: int,
    max_height: int,
    target_path: Path,
    background_color: str,
):
    """Generate an OpenGraph compliant image."""
    from PIL import Image

    def center_point(background: Image, foreground: Image):
        """Generate a tuple of center points for PIL."""
        bg_x, bg_y = background.size[0:2]
        fg_x, fg_y = foreground.size[0:2]
        x1 = math.floor((bg_x / 2) - (fg_x / 2))
        y1 = math.floor((bg_y / 2) - (fg_y / 2))
        x2 = math.floor((bg_x / 2) + (fg_x / 2))
        y2 = math.floor((bg_y / 2) + (fg_y / 2))
        return (x1, y1, x2, y2)

    # Convert image to JPEG format with static name "opengraph.jpg"
    dst_path = target_path / "opengraph.jpg"

    # Copy the original image to the target path
    copied = shutil.copy2(image_path, target_path)
    log.debug("Copied {} to {}", str(image_path), str(target_path))

    with Image.open(copied) as src:

        # Only resize the image if it needs to be resized
        if src.size[0] != max_width or src.size[1] != max_height:

            # Resize image while maintaining aspect ratio
            log.debug("Opengraph image is not 1200x630, resizing...")
            src.thumbnail((max_width, max_height))

        # Only impose a background image if the original image has
        # alpha/transparency channels
        if src.mode in ("RGBA", "LA"):
            log.debug("Opengraph image has transparency, converting...")
            background = Image.new("RGB", (max_width, max_height),
                                   background_color)
            background.paste(src, box=center_point(background, src))
            dst = background
        else:
            dst = src

        # Save new image to derived target path
        dst.save(dst_path)

        # Delete the copied image
        Path(copied).unlink()

        if not dst_path.exists():
            raise RuntimeError(
                f"Unable to save resized image to {str(dst_path)}")

        log.debug("Opengraph image ready at {}", str(dst_path))

    return True
Exemplo n.º 6
0
    async def parsed_response(  # noqa: C901 ("too complex")
            self, output: Iterable) -> str:
        """Send output through common parsers."""

        log.debug("Pre-parsed responses:\n{}", output)
        parsed = ()
        response = None

        structured_nos = structured_parsers.keys()
        structured_query_types = structured_parsers.get(self.device.nos,
                                                        {}).keys()

        scrape_nos = scrape_parsers.keys()
        scrape_query_types = scrape_parsers.get(self.device.nos, {}).keys()

        if not self.device.structured_output:
            _parsed = ()
            for func in parsers:
                for response in output:
                    _output = func(commands=self.query, output=response)
                    _parsed += (_output, )
            if self.device.nos in scrape_nos and self.query_type in scrape_query_types:
                func = scrape_parsers[self.device.nos][self.query_type]
                for response in _parsed:
                    _output = func(response)
                    parsed += (_output, )
            else:
                parsed += _parsed

            response = "\n\n".join(parsed)
        elif (self.device.structured_output
              and self.device.nos in structured_nos
              and self.query_type not in structured_query_types):
            for func in parsers:
                for response in output:
                    _output = func(commands=self.query, output=response)
                    parsed += (_output, )
            response = "\n\n".join(parsed)
        elif (self.device.structured_output
              and self.device.nos in structured_nos
              and self.query_type in structured_query_types):
            func = structured_parsers[self.device.nos][self.query_type]
            response = func(output)

        if response is None:
            response = "\n\n".join(output)

        log.debug("Post-parsed responses:\n{}", response)
        return response
Exemplo n.º 7
0
    def msteams(self):
        """Format the webhook data as a Microsoft Teams card."""

        def code(value):
            """Wrap argument in backticks for markdown inline code formatting."""
            return f"`{str(value)}`"

        header_data = [
            {"name": k, "value": code(v)}
            for k, v in self.headers.dict(by_alias=True).items()
        ]
        time_fmt = self.timestamp.strftime("%Y %m %d %H:%M:%S")
        payload = {
            "@type": "MessageCard",
            "@context": "http://schema.org/extensions",
            "themeColor": "118ab2",
            "summary": _WEBHOOK_TITLE,
            "sections": [
                {
                    "activityTitle": _WEBHOOK_TITLE,
                    "activitySubtitle": f"{time_fmt} UTC",
                    "activityImage": _ICON_URL,
                    "facts": [
                        {"name": "Query Location", "value": self.query_location},
                        {"name": "Query Target", "value": code(self.query_target)},
                        {"name": "Query Type", "value": self.query_type},
                        {"name": "Query VRF", "value": self.query_vrf},
                    ],
                },
                {"markdown": True, "text": "**Source Information**"},
                {"markdown": True, "text": "---"},
                {
                    "markdown": True,
                    "facts": [
                        {"name": "IP", "value": code(self.source)},
                        {"name": "Prefix", "value": code(self.network.prefix)},
                        {"name": "ASN", "value": code(self.network.asn)},
                        {"name": "Country", "value": self.network.country},
                        {"name": "Organization", "value": self.network.org},
                    ],
                },
                {"markdown": True, "text": "**Request Headers**"},
                {"markdown": True, "text": "---"},
                {"markdown": True, "facts": header_data},
            ],
        }
        log.debug("Created MS Teams webhook: {}", str(payload))

        return payload
Exemplo n.º 8
0
def _config_optional(config_path: Path) -> dict:
    if config_path is None:
        config = {}
    else:
        try:
            with config_path.open("r") as cf:
                config = yaml.safe_load(cf) or {}
                log.debug(
                    "Unvalidated data from file '{f}': {c}",
                    f=str(config_path),
                    c=config,
                )
        except (yaml.YAMLError, yaml.MarkedYAMLError) as yaml_error:
            raise ConfigError(error_msg=str(yaml_error))
    return config
Exemplo n.º 9
0
def parse_juniper(output: Sequence) -> Dict:  # noqa: C901
    """Parse a Juniper BGP XML response."""
    data = {}

    for i, response in enumerate(output):
        cleaned = clean_xml_output(response)

        try:
            parsed = xmltodict.parse(
                cleaned, force_list=("rt", "rt-entry", "community")
            )

            log.debug("Initially Parsed Response: \n{}", parsed)

            if "rpc-reply" in parsed.keys():
                parsed_base = parsed["rpc-reply"]["route-information"]
            elif "route-information" in parsed.keys():
                parsed_base = parsed["route-information"]

            if "route-table" not in parsed_base:
                return data

            if "rt" not in parsed_base["route-table"]:
                return data

            parsed = parsed_base["route-table"]

            validated = JuniperRoute(**parsed)
            serialized = validated.serialize().export_dict()

            if i == 0:
                data.update(serialized)
            else:
                data["routes"].extend(serialized["routes"])

        except xmltodict.expat.ExpatError as err:
            log.critical(str(err))
            raise ParsingError("Error parsing response data") from err

        except KeyError as err:
            log.critical("{} was not found in the response", str(err))
            raise ParsingError("Error parsing response data")

        except ValidationError as err:
            log.critical(str(err))
            raise ParsingError(err.errors())

    return data
Exemplo n.º 10
0
async def _check_redis():
    """Ensure Redis is running before starting server.

    Raises:
        HyperglassError: Raised if Redis is not running.

    Returns:
        {bool} -- True if Redis is running.
    """
    try:
        await check_redis(db=params.cache.database, config=REDIS_CONFIG)
    except RuntimeError as e:
        raise HyperglassError(str(e), level="danger") from None

    log.debug(f"Redis is running at: {REDIS_CONFIG['host']}:{REDIS_CONFIG['port']}")
    return True
Exemplo n.º 11
0
async def read_package_json() -> Dict:
    """Import package.json as a python dict."""

    package_json_file = Path(__file__).parent.parent / "ui" / "package.json"

    try:

        with package_json_file.open("r") as file:
            package_json = json.load(file)

    except Exception as e:
        raise RuntimeError(f"Error reading package.json: {str(e)}")

    log.debug("package.json:\n{p}", p=package_json)

    return package_json
Exemplo n.º 12
0
    def queries(self):
        """Return queries for each enabled AFI.

        Returns:
            {list} -- List of queries to run
        """
        query = []

        for afi in self.afis:
            if self.transport == "rest":
                query.append(self.json(afi=afi))
            else:
                query.append(self.scrape(afi=afi))

        log.debug(f"Constructed query: {query}")
        return query
Exemplo n.º 13
0
    def json(self, afi):
        """Return JSON version of validated query for REST devices.

        Arguments:
            afi {object} -- AFI object

        Returns:
            {str} -- JSON query string
        """
        log.debug("Building JSON query for {q}", q=repr(self.query_data))
        return _json.dumps({
            "query_type": self.query_data.query_type,
            "vrf": self.query_data.query_vrf.name,
            "afi": afi.protocol,
            "source": str(afi.source_address),
            "target": str(self.target),
        })
Exemplo n.º 14
0
def _member_of(target, network):
    """Check if IP address belongs to network.

    Arguments:
        target {object} -- Target IPv4/IPv6 address
        network {object} -- ACL network

    Returns:
        {bool} -- True if target is a member of network, False if not
    """
    log.debug(f"Checking membership of {target} for {network}")

    membership = False
    if (network.network_address <= target.network_address and
            network.broadcast_address >= target.broadcast_address  # NOQA: W503
        ):
        log.debug(f"{target} is a member of {network}")
        membership = True
    return membership
Exemplo n.º 15
0
def resolve_hostname(hostname: str) -> Generator:
    """Resolve a hostname via DNS/hostfile."""
    from socket import getaddrinfo, gaierror

    log.debug("Ensuring '{}' is resolvable...", hostname)

    ip4 = None
    ip6 = None
    try:
        res = getaddrinfo(hostname, None)
        for sock in res:
            if sock[0].value == 2 and ip4 is None:
                ip4 = ip_address(sock[4][0])
            elif sock[0].value == 30 and ip6 is None:
                ip6 = ip_address(sock[4][0])
    except (gaierror, ValueError, IndexError):
        pass

    yield ip4
    yield ip6
Exemplo n.º 16
0
def network_info_sync(*targets: str) -> Dict[str, Dict[str, str]]:
    """Get ASN, Containing Prefix, and other info about an internet resource."""

    targets = [str(t) for t in targets]
    cache = SyncCache(db=params.cache.database, **REDIS_CONFIG)

    # Set default data structure.
    data = {t: {k: "" for k in DEFAULT_KEYS} for t in targets}

    # Get all cached bgp.tools data.
    cached = cache.get_dict(CACHE_KEY)

    # Try to use cached data for each of the items in the list of
    # resources.
    for t in targets:

        if t in cached:
            # Reassign the cached network info to the matching resource.
            data[t] = cached[t]
            log.debug("Using cached network info for {}", t)

    # Remove cached items from the resource list so they're not queried.
    targets = [t for t in targets if t not in cached]

    try:
        if targets:
            whoisdata = run_whois_sync(targets)

            if whoisdata:
                # If the response is not empty, parse it.
                data.update(parse_whois(whoisdata, targets))

                # Cache the response
                for t in targets:
                    cache.set_dict(CACHE_KEY, t, data[t])
                    log.debug("Cached network info for {}", t)

    except Exception as err:
        log.error(str(err))

    return data
Exemplo n.º 17
0
    async def response(self):
        """Initiate query validation and execution."""
        device = getattr(devices, self.query_location)

        log.debug(f"Received query for {self.query_data}")
        log.debug(f"Matched device config: {device}")

        supported, transport = validate_nos(device.nos)

        connect = None
        output = params.messages.general
        connect = Connect(device, self.query_data, transport)

        if supported and transport == "rest":
            output = await connect.rest()

        elif supported and transport == "scrape":
            if device.proxy:
                output = await connect.scrape_proxied()
            else:
                output = await connect.scrape_direct()
        else:
            raise ConfigError('"{nos}" is not supported.', nos=device.nos)

        if output == "" or output == "\n":
            raise ResponseEmpty(params.messages.no_output,
                                device_name=device.display_name)

        log.debug(
            f"Output for query: {self.query_data.json()}:\n{repr(output)}")

        return output
Exemplo n.º 18
0
    async def network_info(self, resource, serialize=False):
        """Get network info via RIPE's Network Info API endpoint.

        See: https://stat.ripe.net/docs/data_api#network-info
        """
        try:
            valid_ip = ip_address(resource)

            if not valid_ip.is_global:
                log.debug("IP {ip} is not a global address", ip=str(valid_ip))
                return {"prefix": None, "asn": None}

        except ValueError:
            log.debug("'{resource}' is not a valid IP address", resource=resource)
            return {"prefix": None, "asn": None}

        raw = await self._aget(endpoint="network-info", params={"resource": valid_ip})

        data = {
            "asns": raw["data"]["asns"],
            "prefix": ip_network(raw["data"]["prefix"]),
        }

        if serialize:
            data["prefix"] = str(data["prefix"])
            data["asns"] = data["asns"][0]

        log.debug("Collected network info from RIPEState: {i}", i=str(data))
        return data
Exemplo n.º 19
0
    def serialize(self):
        """Convert the Juniper-specific fields to standard parsed data model."""
        vrf_parts = self.table_name.split(".")
        if len(vrf_parts) == 2:
            vrf = "default"
        else:
            vrf = vrf_parts[0]

        routes = []
        count = 0
        for table in self.rt:
            count += table.rt_entry_count
            prefix = "/".join(
                str(i) for i in (table.rt_destination, table.rt_prefix_length))
            for route in table.rt_entry:
                routes.append({
                    "prefix": prefix,
                    "active": route.active_tag,
                    "age": route.age,
                    "weight": route.preference,
                    "med": route.metric,
                    "local_preference": route.local_preference,
                    "as_path": route.as_path,
                    "communities": route.communities,
                    "next_hop": route.next_hop,
                    "source_as": route.source_as,
                    "source_rid": route.source_rid,
                    "peer_rid": route.peer_rid,
                    "rpki_state": route.validation_state,
                })

        serialized = ParsedRoutes(
            vrf=vrf,
            count=count,
            routes=routes,
            winning_weight="low",
        )

        log.debug("Serialized Juniper response: {}", serialized)
        return serialized
Exemplo n.º 20
0
def resolve_hostname(hostname: str) -> Generator:
    """Resolve a hostname via DNS/hostfile."""
    # Standard Library
    from socket import gaierror, getaddrinfo

    log.debug("Ensuring '{}' is resolvable...", hostname)

    ip4 = None
    ip6 = None
    try:
        res = getaddrinfo(hostname, None)
        for sock in res:
            if sock[0].value == 2 and ip4 is None:
                ip4 = ip_address(sock[4][0])
            elif sock[0].value in (10, 30) and ip6 is None:
                ip6 = ip_address(sock[4][0])
    except (gaierror, ValueError, IndexError) as err:
        log.debug(str(err))
        pass

    yield ip4
    yield ip6
Exemplo n.º 21
0
async def import_certificate(encoded_request: EncodedRequest):
    """Import a certificate from hyperglass-agent."""

    # Try to match the requested device name with configured devices
    log.debug("Attempting certificate import for device '{}'",
              devices[encoded_request.device])
    try:
        matched_device = devices[encoded_request.device]
    except AttributeError:
        raise HTTPException(
            detail=f"Device {str(encoded_request.device)} not found",
            status_code=404)

    try:
        # Decode JSON Web Token
        decoded_request = await jwt_decode(
            payload=encoded_request.encoded,
            secret=matched_device.credential.password.get_secret_value(),
        )
    except HyperglassError as decode_error:
        raise HTTPException(detail=str(decode_error), status_code=400)

    try:
        # Write certificate to file
        import_public_key(
            app_path=APP_PATH,
            device_name=matched_device._id,
            keystring=decoded_request,
        )
    except RuntimeError as err:
        raise HyperglassError(str(err), level="danger")

    log.info("Added public key for {}", encoded_request.device)
    return {
        "output": f"Added public key for {encoded_request.device}",
        "level": "success",
        "keywords": [encoded_request.device],
    }
Exemplo n.º 22
0
def run_whois_sync(resource: str):
    """Open raw socket to bgp.tools and execute query."""

    # Open the socket to bgp.tools
    log.debug("Opening connection to bgp.tools")
    sock = socket.socket()
    sock.connect(("bgp.tools", 43))
    sock.send(f"{resource}\n".encode())

    # Read the response
    response = b""
    while True:
        data = sock.recv(128)
        if data:
            response += data

        else:
            log.debug("Closing connection to bgp.tools")
            sock.shutdown(1)
            sock.close()
            break

    return response.decode()
Exemplo n.º 23
0
def parse_whois(output: str):
    """Parse raw whois output from bgp.tools.

    Sample output:
    AS      | IP               | BGP Prefix          | CC | Registry | Allocated  | AS Name # noqa: E501
    13335   | 1.1.1.1          | 1.1.1.0/24          | US | ARIN     | 2010-07-14 | Cloudflare, Inc.
    """

    # Each new line is a row.
    rawlines = output.split("\n")

    lines = ()
    for rawline in rawlines:

        # Split each row into fields, separated by a pipe.
        line = ()
        rawfields = rawline.split("|")

        for rawfield in rawfields:

            # Remove newline and leading/trailing whitespaces.
            field = re.sub(r"(\n|\r)", "", rawfield).strip(" ")
            line += (field, )

        lines += (line, )

    headers = lines[0]
    row = lines[1]
    data = {}

    for i, header in enumerate(headers):
        # Try to replace bgp.tools key names with easier to parse key names
        key = REPLACE_KEYS.get(header, header)
        data.update({key: row[i]})

    log.debug("Parsed bgp.tools data: {}", data)
    return data
Exemplo n.º 24
0
def parse_whois(output: str, targets: List[str]) -> Dict[str, str]:
    """Parse raw whois output from bgp.tools.

    Sample output:
    AS    | IP      | BGP Prefix | CC | Registry | Allocated  | AS Name
    13335 | 1.1.1.1 | 1.1.1.0/24 | US | ARIN     | 2010-07-14 | Cloudflare, Inc.
    """
    def lines(raw):
        """Generate clean string values for each column."""
        for r in (r for r in raw.split("\n") if r):
            fields = (re.sub(r"(\n|\r)", "", field).strip(" ")
                      for field in r.split("|"))
            yield fields

    data = {}

    for line in lines(output):

        # Unpack each line's parsed values.
        asn, ip, prefix, country, rir, allocated, org = line

        # Match the line to the item in the list of resources to query.
        if ip in targets:
            i = targets.index(ip)
            data[targets[i]] = {
                "asn": asn,
                "ip": ip,
                "prefix": prefix,
                "country": country,
                "rir": rir,
                "allocated": allocated,
                "org": org,
            }

    log.debug("Parsed bgp.tools data: {}", data)
    return data
Exemplo n.º 25
0
def rpki_state(prefix, asn):
    """Get RPKI state and map to expected integer."""
    log.debug("Validating RPKI State for {p} via AS{a}", p=prefix, a=asn)

    state = 3
    ro = f"{prefix}@{asn}"

    cached = cache.get_dict(CACHE_KEY, ro)

    if cached is not None:
        state = cached
    else:

        ql = 'query GetValidation {{ validation(prefix: "{}", asn: {}) {{ state }} }}'
        query = ql.format(prefix, asn)

        try:
            with BaseExternal(
                    base_url="https://rpki.cloudflare.com") as client:
                response = client._post("/api/graphql", data={"query": query})
            validation_state = (response.get("data",
                                             {}).get("validation", {}).get(
                                                 "state", "DEFAULT"))
            state = RPKI_STATE_MAP[validation_state]
            cache.set_dict(CACHE_KEY, ro, state)
        except Exception as err:
            log.error(str(err))
            state = 3

    msg = "RPKI Validation State for {} via AS{} is {}".format(
        prefix, asn, RPKI_NAME_MAP[state])
    if cached is not None:
        msg += " [CACHED]"

    log.debug(msg)
    return state
Exemplo n.º 26
0
async def execute(query: Query) -> Union[str, Sequence[Dict]]:
    """Initiate query validation and execution."""

    output = params.messages.general

    log.debug("Received query for {}", query.json())
    log.debug("Matched device config: {}", query.device)

    mapped_driver = map_driver(query.device.driver)
    driver = mapped_driver(query.device, query)

    timeout_args = {
        "unformatted_msg": params.messages.connection_error,
        "device_name": query.device.name,
        "error": params.messages.request_timeout,
    }

    if query.device.proxy:
        timeout_args["proxy"] = query.device.proxy.name

    signal.signal(signal.SIGALRM, handle_timeout(**timeout_args))
    signal.alarm(params.request_timeout - 1)

    if query.device.proxy:
        proxy = driver.setup_proxy()
        with proxy() as tunnel:
            response = await driver.collect(
                tunnel.local_bind_host, tunnel.local_bind_port
            )
    else:
        response = await driver.collect()

    output = await driver.parsed_response(response)

    if isinstance(output, str):
        # If the output is a string (not structured) and is empty,
        # produce an error.
        if output == "" or output == "\n":
            raise ResponseEmpty(
                params.messages.no_output, device_name=query.device.name
            )
    elif isinstance(output, Dict):
        # If the output an empty dict, responses have data, produce an
        # error.
        if not output:
            raise ResponseEmpty(
                params.messages.no_output, device_name=query.device.name
            )

    log.debug("Output for query: {}:\n{}", query.json(), repr(output))
    signal.alarm(0)

    return output
Exemplo n.º 27
0
async def execute(query: Query) -> Union[str, Sequence[Dict]]:
    """Initiate query validation and execution."""

    output = params.messages.general

    log.debug("Received query for {}", query.json())
    log.debug("Matched device config: {}", query.device)

    supported, driver_name = validate_nos(query.device.nos)

    mapped_driver = DRIVER_MAP.get(driver_name, NetmikoConnection)
    driver = mapped_driver(query.device, query)

    timeout_args = {
        "unformatted_msg": params.messages.connection_error,
        "device_name": query.device.name,
        "error": params.messages.request_timeout,
    }

    if query.device.proxy:
        timeout_args["proxy"] = query.device.proxy.name

    signal.signal(signal.SIGALRM, handle_timeout(**timeout_args))
    signal.alarm(params.request_timeout - 1)

    if query.device.proxy:
        proxy = driver.setup_proxy()
        with proxy() as tunnel:
            response = await driver.collect(
                tunnel.local_bind_host, tunnel.local_bind_port
            )
    else:
        response = await driver.collect()

    output = await driver.parsed_response(response)

    if output == "" or output == "\n":
        raise ResponseEmpty(params.messages.no_output, device_name=query.device.name)

    log.debug("Output for query: {}:\n{}", query.json(), repr(output))
    signal.alarm(0)

    return output
Exemplo n.º 28
0
def parse_arista(output: Sequence[str]) -> Dict:  # noqa: C901
    """Parse a Arista BGP JSON response."""
    data = {}

    for i, response in enumerate(output):

        try:
            data: Dict = json.loads(response)

            log.debug("Pre-parsed data: {}", data)

            vrf = list(data["vrfs"].keys())[0]
            routes = data["vrfs"][vrf]

            log.debug("Pre-validated data: {}", routes)

            validated = AristaRoute(**routes)
            serialized = validated.serialize().export_dict()

            if i == 0:
                data.update(serialized)
            else:
                data["routes"].extend(serialized["routes"])

        except json.JSONDecodeError as err:
            log.critical("Error decoding JSON: {}", str(err))
            raise ParsingError("Error parsing response data")

        except KeyError as err:
            log.critical("'{}' was not found in the response", str(err))
            raise ParsingError("Error parsing response data")

        except IndexError as err:
            log.critical(str(err))
            raise ParsingError("Error parsing response data")

        except ValidationError as err:
            log.critical(str(err))
            raise ParsingError(err.errors())

    log.debug("Serialzed: {}", data)
    return data
Exemplo n.º 29
0
    logger=log,
    log_directory=params.logging.directory,
    log_format=params.logging.format,
    log_max_size=params.logging.max_size,
)

# Set up syslog logging if enabled.
if params.logging.syslog is not None and params.logging.syslog.enable:
    enable_syslog_logging(
        logger=log,
        syslog_host=params.logging.syslog.host,
        syslog_port=params.logging.syslog.port,
    )

if params.logging.http is not None and params.logging.http.enable:
    log.debug("HTTP logging is enabled")

# Perform post-config initialization string formatting or other
# functions that require access to other config levels. E.g.,
# something in 'params.web.text' needs to be formatted with a value
# from params.
try:
    params.web.text.subtitle = params.web.text.subtitle.format(**params.dict(
        exclude={"web", "queries", "messages"}))

    # If keywords are unmodified (default), add the org name &
    # site_title.
    if _params.Params().site_keywords == params.site_keywords:
        params.site_keywords = sorted(
            {*params.site_keywords, params.org_name, params.site_title})
Exemplo n.º 30
0
    async def __aexit__(self, exc_type=None, exc_value=None, traceback=None):
        """Close connection on exit."""
        log.debug("Closing session with {}", self.base_url)

        await self._asession.aclose()
        return True