Exemplo n.º 1
0
def read_answers_lmdb(dnsreplies_factory: DNSRepliesFactory,
                      qid: QID) -> Mapping[ResolverID, DNSReply]:
    assert lmdb is not None, "LMDB wasn't initialized!"
    adb = lmdb.get_db(LMDB.ANSWERS)
    with lmdb.env.begin(adb) as txn:
        replies_blob = txn.get(qid)
    assert replies_blob
    return dnsreplies_factory.parse(replies_blob)
Exemplo n.º 2
0
def test_dns_replies_factory():
    with pytest.raises(ValueError):
        DNSRepliesFactory([])

    rf = DNSRepliesFactory(['a'])
    replies = rf.parse(DR_TIMEOUT_BIN)
    assert replies['a'] == DR_TIMEOUT

    rf2 = DNSRepliesFactory(['a', 'b'])
    bin_data = DR_A_0_BIN + DR_ABCD_1_BIN
    replies = rf2.parse(bin_data)
    assert replies['a'] == DR_A_0
    assert replies['b'] == DR_ABCD_1

    with pytest.raises(ValueError):
        rf2.parse(DR_A_0_BIN + b'a')

    assert rf2.serialize(replies) == bin_data
Exemplo n.º 3
0
def test_lmdb_answers_single_server():
    envdir = os.path.join(LMDB_DIR, 'answers_single_server')
    with LMDB(envdir) as lmdb:
        adb = lmdb.open_db(LMDB.ANSWERS)
        meta = MetaDatabase(lmdb, ['kresd'])
        assert meta.read_start_time() == INT_3M
        assert meta.read_end_time() == INT_3M

        servers = meta.read_servers()
        assert len(servers) == 1
        assert servers[0] == 'kresd'

        with lmdb.env.begin(adb) as txn:
            data = txn.get(qid2key(INT_3M))
        df = DNSRepliesFactory(servers)
        replies = df.parse(data)
        assert len(replies) == 1
        assert replies[servers[0]] == DNSReply(b'a', TIME_3M)
Exemplo n.º 4
0
def test_lmdb_answers_multiple_servers():
    envdir = os.path.join(LMDB_DIR, 'answers_multiple_servers')
    with LMDB(envdir) as lmdb:
        adb = lmdb.open_db(LMDB.ANSWERS)
        meta = MetaDatabase(lmdb, ['kresd', 'bind', 'unbound'])
        assert meta.read_start_time() is None
        assert meta.read_end_time() is None

        servers = meta.read_servers()
        assert len(servers) == 3
        assert servers[0] == 'kresd'
        assert servers[1] == 'bind'
        assert servers[2] == 'unbound'

        df = DNSRepliesFactory(servers)

        with lmdb.env.begin(adb) as txn:
            data = txn.get(qid2key(INT_3M))

        replies = df.parse(data)
        assert len(replies) == 3
        assert replies[servers[0]] == DNSReply(b'', TIME_3M)
        assert replies[servers[1]] == DNSReply(b'ab', TIME_3M)
        assert replies[servers[2]] == DNSReply(b'a', TIME_3M)
Exemplo n.º 5
0
def load_data(
    txn: lmdb.Transaction, dnsreplies_factory: DNSRepliesFactory
) -> Dict[ResolverID, List[Tuple[float, Optional[int]]]]:
    data = {}  # type: Dict[ResolverID, List[Tuple[float, Optional[int]]]]
    cursor = txn.cursor()
    for value in cursor.iternext(keys=False, values=True):
        replies = dnsreplies_factory.parse(value)
        for resolver, reply in replies.items():
            if len(reply.wire) < 12:
                # 12 is chosen to be consistent with dnspython's ShortHeader exception
                rcode = None
            else:
                (flags, ) = struct.unpack('!H', reply.wire[2:4])
                rcode = flags & 0x000f
            data.setdefault(resolver, []).append((reply.time, rcode))
    return data
Exemplo n.º 6
0
def main():
    cli.setup_logging()
    parser = argparse.ArgumentParser(
        description='attempt to reproduce original diffs from JSON report')
    cli.add_arg_envdir(parser)
    cli.add_arg_config(parser)
    cli.add_arg_datafile(parser)
    parser.add_argument('--sequential', action='store_true', default=False,
                        help='send one query at a time (slower, but more reliable)')

    args = parser.parse_args()
    sendrecv.module_init(args)
    datafile = cli.get_datafile(args)
    report = DiffReport.from_json(datafile)
    restart_scripts = repro.get_restart_scripts(args.cfg)
    servers = args.cfg['servers']['names']
    dnsreplies_factory = DNSRepliesFactory(servers)

    if args.sequential:
        nproc = 1
    else:
        nproc = args.cfg['sendrecv']['jobs']

    if report.reprodata is None:
        report.reprodata = ReproData()

    with LMDB(args.envdir, readonly=True) as lmdb:
        lmdb.open_db(LMDB.QUERIES)
        cli.check_metadb_servers_version(lmdb, servers)

        dstream = repro.query_stream_from_disagreements(lmdb, report)
        try:
            repro.reproduce_queries(
                dstream, report, dnsreplies_factory, args.cfg['diff']['criteria'],
                args.cfg['diff']['target'], restart_scripts, nproc)
        finally:
            # make sure data is saved in case of interrupt
            report.export_json(datafile)
Exemplo n.º 7
0
def main():
    global lmdb

    cli.setup_logging()
    parser = argparse.ArgumentParser(
        description=
        'compute diff from answers stored in LMDB and write diffs to LMDB')
    cli.add_arg_envdir(parser)
    cli.add_arg_config(parser)
    cli.add_arg_datafile(parser)

    args = parser.parse_args()
    datafile = cli.get_datafile(args, check_exists=False)
    criteria = args.cfg['diff']['criteria']
    target = args.cfg['diff']['target']
    servers = args.cfg['servers']['names']

    with LMDB(args.envdir) as lmdb_:
        # NOTE: To avoid an lmdb.BadRslotError, probably caused by weird
        # interaction when using multiple transaction / processes, open a separate
        # environment. Also, any dbs have to be opened before using MetaDatabase().
        report = prepare_report(lmdb_, servers)
        cli.check_metadb_servers_version(lmdb_, servers)

    with LMDB(args.envdir, fast=True) as lmdb_:
        lmdb = lmdb_
        lmdb.open_db(LMDB.ANSWERS)
        lmdb.open_db(LMDB.DIFFS, create=True, drop=True)
        qid_stream = lmdb.key_stream(LMDB.ANSWERS)

        dnsreplies_factory = DNSRepliesFactory(servers)
        compare_func = partial(compare_lmdb_wrapper, criteria, target,
                               dnsreplies_factory)
        with pool.Pool() as p:
            for _ in p.imap_unordered(compare_func, qid_stream, chunksize=10):
                pass
        export_json(datafile, report)
Exemplo n.º 8
0
def main():
    cli.setup_logging()
    parser = argparse.ArgumentParser(
        description='Plot query response time histogram from answers stored '
        'in LMDB')
    parser.add_argument(
        '-o',
        '--output',
        type=str,
        default='histogram',
        help='output directory for image files (default: histogram)')
    parser.add_argument('-f',
                        '--format',
                        type=str,
                        default='png',
                        help='output image format (default: png)')
    parser.add_argument('-c',
                        '--config',
                        default='respdiff.cfg',
                        dest='cfgpath',
                        help='config file (default: respdiff.cfg)')
    parser.add_argument('envdir',
                        type=str,
                        help='LMDB environment to read answers from')
    args = parser.parse_args()
    config = cfg.read_cfg(args.cfgpath)
    servers = config['servers']['names']
    dnsreplies_factory = DNSRepliesFactory(servers)

    with LMDB(args.envdir, readonly=True) as lmdb_:
        adb = lmdb_.open_db(LMDB.ANSWERS)

        try:
            MetaDatabase(lmdb_, servers,
                         create=False)  # check version and servers
        except NotImplementedError as exc:
            logging.critical(exc)
            sys.exit(1)

        with lmdb_.env.begin(adb) as txn:
            data = load_data(txn, dnsreplies_factory)

    def get_filepath(filename) -> str:
        return os.path.join(args.output, filename + '.' + args.format)

    if not os.path.exists(args.output):
        os.makedirs(args.output)
    create_histogram({k: [tup[0] for tup in d]
                      for (k, d) in data.items()}, get_filepath('all'), 'all',
                     config)

    # rcode-specific queries
    with pool.Pool() as p:
        fargs = []
        for rcode in range(HISTOGRAM_RCODE_MAX + 1):
            rcode_text = dns.rcode.to_text(rcode)
            filepath = get_filepath(rcode_text)
            fargs.append((data, filepath, rcode_text, config, rcode))
        p.starmap(histogram_by_rcode, fargs)
    filepath = get_filepath('unparsed')
    histogram_by_rcode(data, filepath, 'unparsed queries', config, None)