Example #1
0
async def reverse_all_ips_in_range(
        iprange: str,
        callback: Callable,
        nameservers: Optional[List[str]] = None) -> None:
    """
    Reverse all the IPs stored in a network range.
    All the queries are made concurrently.

    Parameters
    ----------
    iprange: str.
        An IPv4 range formated as 'x.x.x.x/y'.
        The last 2 digits of the ip can be set to anything,
        they will be ignored.
    callback: Callable.
        Arbitrary postprocessing function.
    nameservers: List[str].
        Optional list of DNS servers.

    Returns
    -------
    out: None.
    """
    __resolver = DNSResolver(timeout=4, nameservers=nameservers)
    for __ip in list_ips_in_network_range(iprange):
        log_query(__ip)
        __host = await reverse_single_ip(ip=__ip, resolver=__resolver)
        callback(__host)
        log_result(__host)
Example #2
0
async def get_ip_reverse(ip):
    from aiodns import DNSResolver
    from aiodns.error import DNSError
    resolver = DNSResolver()
    try:
        res = await wait_for(resolver.gethostbyaddr(ip), dns_timeout)
        return res.name
    except CancelledError as e:
        raise e
    except TimeoutError:
        raise ReverseLookupError(
            f'gethostbyaddr({ip!r}) timed out after {dns_timeout} s')
    except DNSError as e:
        raise ReverseLookupError(str(e))
    except Exception as e:
        raise ReverseLookupError(f'gethostbyaddr({ip!r}) failed: {e!r}')
Example #3
0
File: dns.py Project: thavel/nyuki
    def __init__(self, nyuki, loop=None):
        self._nyuki = nyuki
        self._entry = None
        self._period = None
        self._future = None
        self._callbacks = []
        self._resolver = DNSResolver(loop=loop or asyncio.get_event_loop())

        self._nyuki.register_schema(self.CONF_SCHEMA)
Example #4
0
    def __init__(self, maxsize=128, nameservers=None):
        """Create a async DNS resolver.

        maxsize: the max size of cache result
        nameservers: custom DNS resolve server"""

        self._cache = LRUCache(maxsize=maxsize)
        self._hits = 0
        self._miss = 0
        self.resolver = DNSResolver(nameservers=nameservers)
Example #5
0
async def get_url_ip_addresses(url):
    from aiodns import DNSResolver
    from socket import AF_INET
    from urllib.parse import urlparse
    hostname = urlparse(url).hostname
    resolver = DNSResolver()
    try:
        r = await wait_for(resolver.gethostbyname(hostname, AF_INET),
                           dns_timeout)
    except CancelledError as e:
        raise e
    except TimeoutError:
        raise Exception(
            f'gethostbyname({hostname!r}, AF_INET) timed out after {dns_timeout} s'
        )
    except Exception as e:
        raise Exception(f'gethostbyname({hostname!r}, AF_INET) failed: {e!r}')
    logger.debug('get_url_ip_addresses: %r -> %r -> %r', url, hostname,
                 r.addresses)
    return hostname, r.addresses
Example #6
0
 def __init__(
     self,
     workers: int = 10,
     timeout: float = 3.0,
     banner_buffer: Optional[int] = None,
     loop: asyncio.AbstractEventLoop = None,
     resolver: DNSResolver = None,
 ):
     MxLoop.__init__(self, loop)
     MxWorkPool.__init__(self, workers)
     self._timeout = timeout
     self._banner_buffer = banner_buffer
     self._resolver = resolver or DNSResolver(self._NS, loop=self.loop)
 async def _check_server(self, server: str):
     """Check a DNS server and report issues."""
     ip_addr = server[6:] if server.startswith("dns://") else server
     resolver = DNSResolver(nameservers=[ip_addr])
     await resolver.query(DNS_CHECK_HOST, "A")
Example #8
0
#!/usr/bin/env python3

import asyncio
import sys

from aiodns import DNSResolver
from aiodns.error import DNSError

loop = asyncio.get_event_loop()
resolver = DNSResolver(loop=loop, nameservers=["1.1.1.1", "8.8.8.8"])


async def main():
    for entry in sys.stdin:
        try:
            ips = await resolver.query(entry.strip(), "A")
            for ip in ips:
                print(ip.host)
        except Exception:
            pass

        try:
            ips = await resolver.query(entry.strip(), "AAAA")
            for ip in ips:
                print(ip.host)
        except Exception:
            pass


if __name__ == "__main__":
    loop.run_until_complete(main())
Example #9
0
import os
import asyncio

from aiodns import DNSResolver
from aiodns.error import DNSError

loop = asyncio.get_event_loop()
resolver = DNSResolver(loop=loop)


def get_entries(file):
    with open(file, "r") as entries:
        return entries.readlines()


def sort_entries(file):
    """
    Sort the blacklist alphabetically. Keeps things nice and ordered.
    """

    entries = get_entries(file)
    entries.sort()

    with open(file, "w") as new:
        new.writelines(["{}".format(entry) for entry in entries])


async def remove_dead_entries(entries):
    """
    Remove any domains which don't have A (IPv4) or AAAA (IPv6) records.
    Could be written better...
Example #10
0
async def download_file(url: str,
                        fh: typing.Union[io.BufferedIOBase, typing.BinaryIO],
                        loop: typing.Optional[
                            asyncio.AbstractEventLoop] = None,
                        ssrf_protection: bool = True,
                        max_size: typing.Optional[int] = None,
                        https_only: bool = False,
                        keep_files_open: bool = False,
                        **session_args) -> io.BufferedIOBase:
    """
    Asynchronously downloads a file using the supplied BufferedIOBase
    Args:
        url: The file URL
        fh: Any valid BufferedIO instance. Generally either a writable file opened in binary mode, or a BytesIO instance
        loop: An active event loop to use. If not provided, a new one will be created.
        ssrf_protection: Server Side Request Forgery Protection: When enabled, the hostname is resolved prior to making
            an HTTP request to ensure that the provided hostname does not resolve to a private IP address space.
        max_size: The maximum filesize allowed in bytes. If the server returns a content size greater than this limit,
            or tries to send us more content than the server advertises, the download will be rejected.
        https_only: When true, non-secure download requests will be rejected.
        keep_files_open: Default behavior is to seek to the beginning and return an open file-handler for BytesIO
            instances while returning a closed file-handler for everything else. Set this to True if you always want
            an open file handler returned regardless of the fh type.
        **session_args: Any additional kwargs are passed along to aiohttp.ClientSession()

    Returns:
        io.BufferedIOBase
    """
    # Make sure we have a valid event loop
    loop = loop or asyncio.new_event_loop()

    # Make sure we have a valid schema
    if not url.lower().startswith(('https://', 'http://')):
        if https_only:
            url = 'https://' + url
        else:
            url = 'http://' + url

    # Make sure the domain is secure if https_only is set
    if https_only and not url.lower().startswith('https://'):
        raise BadUrlError(ERROR_MESSAGE_INSECURE_URL)

    # Parse the URL into components
    parsed_url = urlparse(url)
    if not parsed_url.hostname:
        raise BadUrlError(ERROR_MESSAGE_NO_HOSTNAME)

    # Pre-resolve the hostname if necessary
    if ssrf_protection:
        resolver = DNSResolver(loop=loop)

        # We perform DNS resolutions via the systems hosts file first if available
        try:
            res = await resolver.gethostbyname(parsed_url.hostname,
                                               socket.AF_INET)
        except DNSError:
            raise BadUrlError(ERROR_MESSAGE_DNS_FAILURE)

        for ip in res.addresses:
            ip = ipaddress.ip_address(
                ip
            )  # type: typing.Union[ipaddress.IPv4Address, ipaddress.IPv6Address]
            if ip.is_private:
                raise SecurityError(ip)

    # If we're still here, everything should be good. Let's ready the download request.
    async with ClientSession(loop=loop, raise_for_status=True,
                             **session_args) as session:  # type: ClientSession
        async with session.get(url, read_until_eof=max_size is
                               None) as response:  # type: ClientResponse
            if (response.content_length
                    and max_size) and response.content_length > max_size:
                fh.close()
                if hasattr(fh, 'name'):
                    os.remove(fh.name)
                raise FilesizeError(response.content_length)

            bytes_read = 0
            async for data in response.content.iter_chunked(1024):
                # Filesize exceeded. Nuke the file and abort.
                if max_size and bytes_read > max_size:
                    fh.close()
                    if hasattr(fh, 'name'):
                        os.remove(fh.name)

                    raise FilesizeError(bytes_read)

                fh.write(data)
                bytes_read += 1024

    if isinstance(fh, io.BytesIO):
        fh.seek(0)
        return fh

    if not keep_files_open:
        fh.close()

    return fh