예제 #1
0
def pdns_query(
        pdns_baseurl: str,
        apikey: str,
        query: str,
        timeout: int,
        proxy_string: Optional[Text] = None,
        batch_size: int = 1000) -> Generator[Dict[str, Any], None, None]:
    """Query the passivedns result of an address.
    pdns_baseurl - the url to the passivedns api (https://api.mnemonic.no)
    apikey - PassiveDNS API key with the passivedns role (minimum)
    query - string fqdn or ipv4/6
    timeout - default 299 seconds.
    """

    try:
        qmap = {"baseurl": pdns_baseurl.strip("/"), "query": query}

        pdns_url = "{baseurl}/pdns/v3/{query}".format(**qmap)

        if apikey:
            headers = {"Argus-API-Key": apikey}
        else:
            headers = {}

        yield from mnemonic.batch_query("GET",
                                        pdns_url,
                                        headers=headers,
                                        timeout=timeout,
                                        proxy_string=proxy_string,
                                        batch_size=batch_size)

    except (urllib3.exceptions.ReadTimeoutError,
            requests.exceptions.ReadTimeout, socket.timeout) as err:
        warning("Timeout ({0.__class__.__name__}), query: {1}".format(
            err, query))
예제 #2
0
def event_case_query(
    argus_baseurl: Text,
    apikey: Text,
    last_update: int,
    timeout: int,
    proxy_string: Optional[Text] = None,
) -> Generator[Dict[str, Any], None, None]:
    """Query the argus for events associated to cases.
    argus_baseurl - the url to the ARGUS api (https://api.mnemonic.no)
    apikey - Argus API key
    timeout - timeout towards API
    proxy_string - proxy string for the request
    """

    # Batch size
    limit = 2000

    # Use start time 1w prior to lastUpdated timestamp
    # Events can have "old" startTimestamp if they are delayed into argus.
    # lastUpdateTimstamp = timestamp when event is added to Argus
    # startTimeTimstamp = original timestamp of the event
    start_time = last_update - 3600 * 24 * 7 * 1000

    try:
        criteria = {
            "lastUpdatedTimestamp": last_update,
            "startTimestamp": start_time,
            "sortBy": ["lastUpdated"],
            "includeFlags": ["NOTIFIED"],
            "limit": limit,
            # Do not include events that are associated to incidents from streaming filter
            "excludeFlags": ["ASSOCIATED_TO_CASE_BY_FILTER"],
        }

        headers = {"Argus-API-Key": apikey}

        # Do query in batches and yield events
        yield from mnemonic.batch_query(
            "POST",
            "{}/events/v1/aggregated/search".format(argus_baseurl.rstrip("/")),
            headers=headers,
            timeout=timeout,
            json_params=criteria,
            proxy_string=proxy_string,
        )

    except worker.ServiceTimeout as err:
        warning("Service timeout: {}".format(err))

    except (
        urllib3.exceptions.ReadTimeoutError,
        requests.exceptions.ReadTimeout,
        socket.timeout,
    ) as err:
        error("Timeout ({0.__class__.__name__})".format(err))