Example #1
0
def corrupt_share(options):
    import random
    from allmydata.storage.mutable import MutableShareFile
    from allmydata.storage.immutable import ShareFile
    from allmydata.mutable.layout import unpack_header
    from allmydata.immutable.layout import ReadBucketProxy
    out = options.stdout
    fn = options['filename']
    assert options["offset"] == "block-random", "other offsets not implemented"

    # first, what kind of share is it?

    def flip_bit(start, end):
        offset = random.randrange(start, end)
        bit = random.randrange(0, 8)
        print("[%d..%d):  %d.b%d" % (start, end, offset, bit), file=out)
        f = open(fn, "rb+")
        f.seek(offset)
        d = f.read(1)
        d = bchr(ord(d) ^ 0x01)
        f.seek(offset)
        f.write(d)
        f.close()

    with open(fn, "rb") as f:
        prefix = f.read(32)

        if MutableShareFile.is_valid_header(prefix):
            # mutable
            m = MutableShareFile(fn)
            with open(fn, "rb") as f:
                f.seek(m.DATA_OFFSET)
                # Read enough data to get a mutable header to unpack.
                data = f.read(2000)
            # make sure this slot contains an SMDF share
            assert data[
                0:1] == b"\x00", "non-SDMF mutable shares not supported"
            f.close()

            (version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
             ig_datalen, offsets) = unpack_header(data)

            assert version == 0, "we only handle v0 SDMF files"
            start = m.DATA_OFFSET + offsets["share_data"]
            end = m.DATA_OFFSET + offsets["enc_privkey"]
            flip_bit(start, end)
        else:
            # otherwise assume it's immutable
            f = ShareFile(fn)
            bp = ReadBucketProxy(None, None, '')
            offsets = bp._parse_offsets(f.read_share_data(0, 0x24))
            start = f._data_offset + offsets["data"]
            end = f._data_offset + offsets["plaintext_hash_tree"]
            flip_bit(start, end)
Example #2
0
def corrupt_share(options):
    import random
    from allmydata.storage.mutable import MutableShareFile
    from allmydata.storage.immutable import ShareFile
    from allmydata.mutable.layout import unpack_header
    from allmydata.immutable.layout import ReadBucketProxy
    out = options.stdout
    fn = options['filename']
    assert options["offset"] == "block-random", "other offsets not implemented"
    # first, what kind of share is it?

    def flip_bit(start, end):
        offset = random.randrange(start, end)
        bit = random.randrange(0, 8)
        print >>out, "[%d..%d):  %d.b%d" % (start, end, offset, bit)
        f = open(fn, "rb+")
        f.seek(offset)
        d = f.read(1)
        d = chr(ord(d) ^ 0x01)
        f.seek(offset)
        f.write(d)
        f.close()

    f = open(fn, "rb")
    prefix = f.read(32)
    f.close()
    if prefix == MutableShareFile.MAGIC:
        # mutable
        m = MutableShareFile(fn)
        f = open(fn, "rb")
        f.seek(m.DATA_OFFSET)
        data = f.read(2000)
        # make sure this slot contains an SMDF share
        assert data[0] == "\x00", "non-SDMF mutable shares not supported"
        f.close()

        (version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
         ig_datalen, offsets) = unpack_header(data)

        assert version == 0, "we only handle v0 SDMF files"
        start = m.DATA_OFFSET + offsets["share_data"]
        end = m.DATA_OFFSET + offsets["enc_privkey"]
        flip_bit(start, end)
    else:
        # otherwise assume it's immutable
        f = ShareFile(fn)
        bp = ReadBucketProxy(None, '', '')
        offsets = bp._parse_offsets(f.read_share_data(0, 0x24))
        start = f._data_offset + offsets["data"]
        end = f._data_offset + offsets["plaintext_hash_tree"]
        flip_bit(start, end)
Example #3
0
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_bytes

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, None, '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print("%20s: %d" % ("version", bp._version), file=out)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size", "needed_shares",
             "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash", "crypttext_hash",
             "crypttext_root_hash", "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}

    def to_string(v):
        if isinstance(v, bytes):
            return unicode(v, "utf-8")
        else:
            return str(v)

    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
    print(file=out)
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
    print(file=out)
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print(file=out)
        print("LEFTOVER:", file=out)
        for k in sorted(leftover):
            print("%20s: %s" % (k, to_string(unpacked[k])), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_bytes(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                       unpacked["needed_shares"],
                                       unpacked["total_shares"],
                                       unpacked["size"])
            verify_cap = u.to_string()
            print("%20s: %s" %
                  ("verify-cap", quote_output(verify_cap, quotemarks=False)),
                  file=out)

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] - offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print(file=out)
    print(" Size of data within the share:", file=out)
    for k in sorted(sizes):
        print("%20s: %s" % (k, sizes[k]), file=out)

    if options['offsets']:
        print(file=out)
        print(" Section Offsets:", file=out)
        print("%20s: %s" % ("share data", f._data_offset), file=out)
        for k in [
                "data", "plaintext_hash_tree", "crypttext_hash_tree",
                "block_hashes", "share_hashes", "uri_extension"
        ]:
            name = {"data": "block data"}.get(k, k)
            offset = f._data_offset + offsets[k]
            print("  %20s: %s   (0x%x)" % (name, offset, offset), file=out)
        print("%20s: %s" % ("leases", f._lease_offset), file=out)
Example #4
0
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_str

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, '', '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print >>out, "%20s: %d" % ("version", bp._version)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size",
             "needed_shares", "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash",
             "crypttext_hash", "crypttext_root_hash",
             "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}
    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])
    print >>out
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])
    print >>out
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print >>out
        print >>out, "LEFTOVER:"
        for k in sorted(leftover):
            print >>out, "%20s: %s" % (k, unpacked[k])

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                      unpacked["needed_shares"],
                                      unpacked["total_shares"], unpacked["size"])
            verify_cap = u.to_string()
            print >>out, "%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False))

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] -
                           offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print >>out
    print >>out, " Size of data within the share:"
    for k in sorted(sizes):
        print >>out, "%20s: %s" % (k, sizes[k])

    if options['offsets']:
        print >>out
        print >>out, " Section Offsets:"
        print >>out, "%20s: %s" % ("share data", f._data_offset)
        for k in ["data", "plaintext_hash_tree", "crypttext_hash_tree",
                  "block_hashes", "share_hashes", "uri_extension"]:
            name = {"data": "block data"}.get(k,k)
            offset = f._data_offset + offsets[k]
            print >>out, "  %20s: %s   (0x%x)" % (name, offset, offset)
        print >>out, "%20s: %s" % ("leases", f._lease_offset)