Ejemplo n.º 1
0
def process_domain(registered_domain, updated_domains, now=None):
    """Update the statistics for all fuzz results for this domain."""
    if now is None:
        now = datetime.datetime.now()

    updated_domains = set(updated_domains)

    delta_report = repository.get_delta_report(registered_domain)
    if delta_report is None:
        return updated_domains

    for domain in delta_reports.extract_domains(delta_report):

        if domain in updated_domains:
            continue

        updated = statistics_repository.noise_stat_last_updated(domain)
        if updated is not None and (now - updated) < FREQUENCY:
            continue

        stat = statistics_repository.get_noise_stat(domain)
        if stat is None:
            stat = NoiseStatistic(domain, deltas=1)
        else:
            stat.increment()
            stat.update_window()

        statistics_repository.set_noise_stat(stat)
        statistics_repository.mark_noise_stat_as_updated(domain)
        updated_domains.add(domain)

    return updated_domains
Ejemplo n.º 2
0
def process_domain(registered_domain, now=None):
    """Update the statistics for all fuzz results for this domain."""
    if now is None:
        now = datetime.datetime.now()

    delta_report = repository.get_delta_report(registered_domain)
    if delta_report is None:
        return

    for domain in delta_reports.extract_domains(delta_report):

        updated = statistics_repository.noise_stat_last_updated(domain)
        if updated is not None and (now - updated) < FREQUENCY:
            continue

        stat = statistics_repository.get_noise_stat(domain)
        if stat is None:
            stat = NoiseStatistic(domain, deltas=1)
        else:
            stat.increment()
            stat.update_window()

        statistics_repository.set_noise_stat(stat)
        statistics_repository.mark_noise_stat_as_updated(domain)
Ejemplo n.º 3
0
def view(hexdomain):
    """Return new atom items for changes in resolved domains."""
    # Parse out the requested domain
    domain = dnstwister.tools.parse_domain(hexdomain)

    # Redirect old base64 requests to the new format.
    if domain is None:
        redirect_url = _base64_redirect(hexdomain)
        if redirect_url is not None:
            return flask.redirect(redirect_url, code=302)
        flask.abort(
            400,
            'Malformed domain or domain not represented in hexadecimal format.'
        )

    # Prepare a feed
    feed = werkzeug.contrib.atom.AtomFeed(
        title='dnstwister report for {}'.format(domain),
        feed_url='{}atom/{}'.format(flask.request.url_root, hexdomain),
        url='{}search/{}'.format(flask.request.url_root, hexdomain),
    )

    # The publish/update date for the placeholder is locked to 00:00:00.000
    # (midnight UTC) on the current day.
    today = datetime.datetime.now().replace(hour=0,
                                            minute=0,
                                            second=0,
                                            microsecond=0)

    # Ensure the domain is registered.
    if not repository.is_domain_registered(domain):
        repository.register_domain(domain)

    # Retrieve the delta report
    delta_report = repository.get_delta_report(domain)

    # If we don't have a delta report yet, show the placeholder.
    if delta_report is None:
        feed.add(
            title='No report yet for {}'.format(domain),
            title_type='text',
            content=flask.render_template('syndication/atom/placeholder.html',
                                          domain=domain),
            content_type='html',
            author='dnstwister',
            updated=today,
            published=today,
            id='waiting:{}'.format(domain),
            url=feed.url,
        )

    else:

        # If there is a delta report, generate the feed and return it. We use
        # the actual date of generation here.
        updated = repository.delta_report_updated(domain)
        if updated is None:
            updated = today

        # Setting the ID to be epoch seconds, floored per 24 hours, ensure the
        # updates are only every 24 hours max.
        id_24hr = (updated - datetime.datetime(1970, 1, 1)).total_seconds()

        common_kwargs = {
            'title_type': 'text',
            'content_type': 'html',
            'author': 'dnstwister',
            'updated': updated,
            'published': updated,
            'url': feed.url,
        }

        for (dom, ip) in delta_report['new']:
            feed.add(title='NEW: {}'.format(dom),
                     content=flask.render_template(
                         'syndication/atom/new.html',
                         ip=ip,
                         hexdomain=binascii.hexlify(dom)),
                     id='new:{}:{}:{}'.format(dom, ip, id_24hr),
                     **common_kwargs)

        for (dom, old_ip, new_ip) in delta_report['updated']:
            feed.add(title='UPDATED: {}'.format(dom),
                     content=flask.render_template(
                         'syndication/atom/updated.html',
                         new_ip=new_ip,
                         old_ip=old_ip,
                         hexdomain=binascii.hexlify(dom),
                     ),
                     id='updated:{}:{}:{}:{}'.format(dom, old_ip, new_ip,
                                                     id_24hr),
                     **common_kwargs)

        for dom in delta_report['deleted']:
            feed.add(title='DELETED: {}'.format(dom),
                     content=flask.render_template(
                         'syndication/atom/deleted.html', ),
                     id='deleted:{}:{}'.format(dom, id_24hr),
                     **common_kwargs)

    feed_response = feed.get_response()

    repository.mark_delta_report_as_read(domain)

    return feed_response
Ejemplo n.º 4
0
def process_sub(sub_id, detail):
    """Process a subscription."""
    domain = detail['domain']
    email_address = detail['email_address']

    hide_noisy = False
    try:
        hide_noisy = bool(detail['hide_noisy'])
    except KeyError:
        pass

    sub_log = sub_id[:10]

    # Ensure the domain is registered for reporting, register if not.
    repository.register_domain(domain)

    # Mark delta report as "read" so it's not unsubscribed.
    repository.mark_delta_report_as_read(domain)

    # Don't send more than once every 24 hours
    last_sent = repository.email_last_send_for_sub(sub_id)
    if last_sent is not None:
        age_last_sent = datetime.datetime.now() - last_sent
        if age_last_sent < datetime.timedelta(seconds=PERIOD):
            print '<24h: {}'.format(sub_log)
            return

    # Grab the delta
    delta = repository.get_delta_report(domain)
    if delta is None:
        print 'No delta: {}'.format(sub_log)
        return

    # Grab the delta report update time.
    delta_updated = repository.delta_report_updated(domain)

    # If the delta report was updated > 23 hours ago, we're too close to the
    # next delta report. This means we should hold off so we don't send the
    # same delta report twice.
    if delta_updated is not None:
        age_delta_updated = datetime.datetime.now() - delta_updated
        if age_delta_updated > datetime.timedelta(hours=23):
            print '>23h: {}'.format(sub_log)
            return

    # Filter out noisy domains if that's the user's preference.
    if hide_noisy and feature_flags.enable_noisy_domains():
        delta = remove_noisy(delta)

    # Don't email if no changes
    new = delta['new'] if len(delta['new']) > 0 else None
    updated = delta['updated'] if len(delta['updated']) > 0 else None
    deleted = delta['deleted'] if len(delta['deleted']) > 0 else None

    if new is updated is deleted is None:
        print 'Empty: {}'.format(sub_log)
        return

    # Add analysis links
    if new is not None:
        new = [(dom, ip, ANALYSIS_ROOT.format(tools.encode_domain(dom)))
               for (dom, ip) in new]

    if updated is not None:
        updated = [(dom, old_ip, new_ip,
                    ANALYSIS_ROOT.format(tools.encode_domain(dom)))
                   for (dom, old_ip, new_ip) in updated]

    # Email
    noisy_link = None
    if hide_noisy and feature_flags.enable_noisy_domains():
        noisy_link = 'https://dnstwister.report/email/{}/noisy'.format(sub_id)

    body = email_tools.render_email(
        'report.html',
        domain=domain,
        new=new,
        updated=updated,
        deleted=deleted,
        unsubscribe_link='https://dnstwister.report/email/unsubscribe/{}'.
        format(sub_id),
        noisy_link=noisy_link)

    # Mark as emailed to ensure we don't flood if there's an error after the
    # actual email has been sent.
    repository.update_last_email_sub_sent_date(sub_id)

    emailer.send(
        email_address, u'dnstwister report for {}'.format(
            template_tools.domain_renderer(domain)), body)
    print 'Sent: {}'.format(sub_log)
Ejemplo n.º 5
0
def process_sub(sub_id, detail):
    """Process a subscription."""

    domain = detail['domain']
    email_address = detail['email_address']

    # Ensure the domain is registered for reporting, register if not.
    repository.register_domain(domain)

    # Mark delta report as "read" so it's not unsubscribed.
    repository.mark_delta_report_as_read(domain)

    # Don't send more than once every 24 hours
    last_sent = repository.email_last_send_for_sub(sub_id)
    if last_sent is not None:
        age_last_sent = datetime.datetime.now() - last_sent
        if age_last_sent < datetime.timedelta(seconds=PERIOD):
            print 'Skipping {} + {}, < 24h hours'.format(
                email_address, domain
            )
            return

    # Grab the delta
    delta = repository.get_delta_report(domain)
    if delta is None:
        print 'Skipping {} + {}, no delta report yet'.format(
            email_address, domain
        )
        return

    # Grab the delta report update time.
    delta_updated = repository.delta_report_updated(domain)

    # If the delta report was updated > 23 hours ago, we're too close to the
    # next delta report. This means we should hold off so we don't send the
    # same delta report twice.
    if delta_updated is not None:
        age_delta_updated = datetime.datetime.now() - delta_updated
        if age_delta_updated > datetime.timedelta(hours=23):
            print 'Skipping {} + {}, delta > 23h hours old'.format(
                email_address, domain
            )
            return

    # Don't email if no changes
    new = delta['new'] if len(delta['new']) > 0 else None
    updated = delta['updated'] if len(delta['updated']) > 0 else None
    deleted = delta['deleted'] if len(delta['deleted']) > 0 else None

    if new is updated is deleted is None:
        print 'Skipping {} + {}, no changes'.format(
            email_address, domain
        )
        return

    # Add analysis links
    if new is not None:
        new = [(dom, ip, ANALYSIS_ROOT.format(binascii.hexlify(dom)))
               for (dom, ip)
               in new]

    if updated is not None:
        updated = [(dom, old_ip, new_ip, ANALYSIS_ROOT.format(binascii.hexlify(dom)))
                   for (dom, old_ip, new_ip)
                   in updated]

    # Email
    body = email_tools.render_email(
        'report.html',
        domain=domain,
        new=new,
        updated=updated,
        deleted=deleted,
        unsubscribe_link='https://dnstwister.report/email/unsubscribe/{}'.format(sub_id)
    )

    # Mark as emailed to ensure we don't flood if there's an error after the
    # actual email has been sent.
    repository.update_last_email_sub_sent_date(sub_id)

    emailer.send(
        email_address, 'dnstwister report for {}'.format(domain), body
    )
    print 'Emailed delta for {} to {}'.format(domain, email_address)
Ejemplo n.º 6
0
def view(hexdomain):
    """Return new atom items for changes in resolved domains."""
    # Parse out the requested domain
    domain = dnstwister.tools.parse_domain(hexdomain)
    if domain is None:
        flask.abort(400, 'Malformed domain or domain not represented in hexadecimal format.')

    # Prepare a feed
    feed = werkzeug.contrib.atom.AtomFeed(
        title='dnstwister report for {}'.format(domain),
        feed_url='{}atom/{}'.format(flask.request.url_root, hexdomain),
        url='{}search/{}'.format(flask.request.url_root, hexdomain),
    )

    # The publish/update date for the placeholder is locked to 00:00:00.000
    # (midnight UTC) on the current day.
    today = datetime.datetime.now().replace(
        hour=0, minute=0, second=0, microsecond=0
    )

    # Ensure the domain is registered.
    if not repository.is_domain_registered(domain):
        repository.register_domain(domain)

    # Retrieve the delta report
    delta_report = repository.get_delta_report(domain)

    # If we don't have a delta report yet, show the placeholder.
    if delta_report is None:
        feed.add(
            title='No report yet for {}'.format(domain),
            title_type='text',
            content=flask.render_template(
                'syndication/atom/placeholder.html', domain=domain
            ),
            content_type='html',
            author='dnstwister',
            updated=today,
            published=today,
            id='waiting:{}'.format(domain),
            url=feed.url,
        )

    else:

        # If there is a delta report, generate the feed and return it. We use
        # the actual date of generation here.
        updated = repository.delta_report_updated(domain)
        if updated is None:
            updated = today

        # Setting the ID to be epoch seconds, floored per 24 hours, ensure the
        # updates are only every 24 hours max.
        id_24hr = (updated - datetime.datetime(1970, 1, 1)).total_seconds()

        common_kwargs = {
            'title_type': 'text',
            'content_type': 'html',
            'author': 'dnstwister',
            'updated': updated,
            'published': updated,
            'url': feed.url,
        }

        for (dom, ip) in delta_report['new']:
            feed.add(
                title='NEW: {}'.format(dom),
                content=flask.render_template(
                    'syndication/atom/new.html',
                    ip=ip, hexdomain=binascii.hexlify(dom)
                ),
                id='new:{}:{}:{}'.format(dom, ip, id_24hr),
                **common_kwargs
            )

        for (dom, old_ip, new_ip) in delta_report['updated']:
            feed.add(
                title='UPDATED: {}'.format(dom),
                content=flask.render_template(
                    'syndication/atom/updated.html',
                    new_ip=new_ip, old_ip=old_ip,
                    hexdomain=binascii.hexlify(dom),
                ),
                id='updated:{}:{}:{}:{}'.format(dom, old_ip, new_ip, id_24hr),
                **common_kwargs
            )

        for dom in delta_report['deleted']:
            feed.add(
                title='DELETED: {}'.format(dom),
                content=flask.render_template(
                    'syndication/atom/deleted.html',
                ),
                id='deleted:{}:{}'.format(dom, id_24hr),
                **common_kwargs
            )

    feed_response = feed.get_response()

    repository.mark_delta_report_as_read(domain)

    return feed_response
Ejemplo n.º 7
0
def process_sub(sub_id, detail):
    """Process a subscription."""

    domain = detail['domain']
    email_address = detail['email_address']

    # Ensure the domain is registered for reporting, register if not.
    repository.register_domain(domain)

    # Mark delta report as "read" so it's not unsubscribed.
    repository.mark_delta_report_as_read(domain)

    # Don't send more than once every 24 hours
    last_sent = repository.email_last_send_for_sub(sub_id)
    if last_sent is not None:
        age_last_sent = datetime.datetime.now() - last_sent
        if age_last_sent < datetime.timedelta(seconds=PERIOD):
            print 'Skipping {} + {}, < 24h hours'.format(email_address, domain)
            return

    delta_report = repository.get_delta_report(domain)
    if delta_report is None:
        print 'Skipping {} + {}, no delta report yet'.format(
            email_address, domain)
        return

    delta_updated = repository.delta_report_updated(domain)

    # If the delta report was updated > 23 hours ago, we're too close to the
    # next delta report. This means we should hold off so we don't send the
    # same delta report twice.
    if delta_updated is not None:
        age_delta_updated = datetime.datetime.now() - delta_updated
        if age_delta_updated > datetime.timedelta(hours=23):
            print 'Skipping {} + {}, delta > 23h hours old'.format(
                email_address, domain)
            return

    delta_domains = delta_reports.extract_domains(delta_report)
    noisy_domains = get_noisy_domains(delta_domains)

    report = EmailReport(delta_report,
                         noisy_domains,
                         include_noisy_domains=include_noisy_domains())

    if not report.has_results():
        print 'Skipping {} + {}, no results to put in email'.format(
            email_address, domain)
        return

    try:
        send_email(domain, email_address, sub_id, report)

    except:
        print 'Failed to send email for {}:\n {}'.format(
            domain, traceback.format_exc())

    finally:
        # Mark as emailed to ensure we don't flood if there's an error after
        # the actual email has been sent.
        repository.update_last_email_sub_sent_date(sub_id)