コード例 #1
0
ファイル: uri.py プロジェクト: mk-fg/tahoe-lafs
 def init_from_human_encoding(cls, uri):
     mo = cls.HUMAN_RE.search(uri)
     if not mo:
         raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
     return cls(
         base32.a2b(mo.group(1)), base32.a2b(mo.group(2)), int(mo.group(3)), int(mo.group(4)), int(mo.group(5))
     )
コード例 #2
0
ファイル: storage_client.py プロジェクト: WinLAFS/tahoe-lafs
    def __init__(self, key_s, ann):
        self.key_s = key_s
        self.announcement = ann

        assert "anonymous-storage-FURL" in ann, ann
        furl = str(ann["anonymous-storage-FURL"])
        m = re.match(r'pb://(\w+)@', furl)
        assert m, furl
        tubid_s = m.group(1).lower()
        self._tubid = base32.a2b(tubid_s)
        assert "permutation-seed-base32" in ann, ann
        ps = base32.a2b(str(ann["permutation-seed-base32"]))
        self._permutation_seed = ps

        if key_s:
            self._long_description = key_s
            if key_s.startswith("v0-"):
                # remove v0- prefix from abbreviated name
                self._short_description = key_s[3:3+8]
            else:
                self._short_description = key_s[:8]
        else:
            self._long_description = tubid_s
            self._short_description = tubid_s[:6]

        self.announcement_time = time.time()
        self.last_connect_time = None
        self.last_loss_time = None
        self.remote_host = None
        self.rref = None
        self._is_connected = False
        self._reconnector = None
        self._trigger_cb = None
コード例 #3
0
ファイル: debug.py プロジェクト: drewp/tahoe-lafs
def dump_cap(options):
    from allmydata import uri
    from allmydata.util import base32
    from base64 import b32decode
    import urlparse, urllib

    out = options.stdout
    cap = options.cap
    nodeid = None
    if options['nodeid']:
        nodeid = b32decode(options['nodeid'].upper())
    secret = None
    if options['client-secret']:
        secret = base32.a2b(options['client-secret'])
    elif options['client-dir']:
        secretfile = os.path.join(options['client-dir'], "private", "secret")
        try:
            secret = base32.a2b(open(secretfile, "r").read().strip())
        except EnvironmentError:
            pass

    if cap.startswith("http"):
        scheme, netloc, path, params, query, fragment = urlparse.urlparse(cap)
        assert path.startswith("/uri/")
        cap = urllib.unquote(path[len("/uri/"):])

    u = uri.from_string(cap)

    print >>out
    dump_uri_instance(u, nodeid, secret, out)
コード例 #4
0
ファイル: client.py プロジェクト: jsgf/tahoe-lafs
 def init_secrets(self):
     lease_s = self.get_or_create_private_config("secret", _make_secret)
     lease_secret = base32.a2b(lease_s)
     convergence_s = self.get_or_create_private_config('convergence',
                                                       _make_secret)
     self.convergence = base32.a2b(convergence_s)
     self._secret_holder = SecretHolder(lease_secret, self.convergence)
コード例 #5
0
ファイル: status.py プロジェクト: shermes641/tahoe-lafs
    def child_event_json(self, ctx):
        inevow.IRequest(ctx).setHeader("content-type", "text/plain")
        data = { } # this will be returned to the GET
        ds = self.download_status

        data["read"] = self._find_overlap(ds.read_events,
                                          "start_time", "finish_time")
        data["segment"] = self._find_overlap(ds.segment_events,
                                             "start_time", "finish_time")
        data["dyhb"] = self._find_overlap(ds.dyhb_requests,
                                          "start_time", "finish_time")
        data["block"],data["block_rownums"] = self._find_overlap_requests(ds.block_requests)

        servernums = {}
        serverid_strings = {}
        for d_ev in data["dyhb"]:
            if d_ev["serverid"] not in servernums:
                servernum = len(servernums)
                servernums[d_ev["serverid"]] = servernum
                #title= "%s: %s" % ( ",".join([str(shnum) for shnum in shnums]))
                serverid_strings[servernum] = d_ev["serverid"][:4]
        data["server_info"] = dict([(serverid, {"num": servernums[serverid],
                                                "color": self.color(base32.a2b(serverid)),
                                                "short": serverid_strings[servernums[serverid]],
                                                })
                                   for serverid in servernums.keys()])
        data["num_serverids"] = len(serverid_strings)
        # we'd prefer the keys of serverids[] to be ints, but this is JSON,
        # so they get converted to strings. Stupid javascript.
        data["serverids"] = serverid_strings
        data["bounds"] = {"min": ds.first_timestamp, "max": ds.last_timestamp}
        return simplejson.dumps(data, indent=1) + "\n"
コード例 #6
0
    def test_static_servers(self):
        broker = StorageFarmBroker(True, lambda h: Mock())

        key_s = 'v0-1234-1'
        servers_yaml = """\
storage:
  v0-1234-1:
    ann:
      anonymous-storage-FURL: pb://ge@nowhere/fake
      permutation-seed-base32: aaaaaaaaaaaaaaaaaaaaaaaa
"""
        servers = yamlutil.safe_load(servers_yaml)
        permseed = base32.a2b("aaaaaaaaaaaaaaaaaaaaaaaa")
        broker.set_static_servers(servers["storage"])
        self.failUnlessEqual(len(broker._static_server_ids), 1)
        s = broker.servers[key_s]
        self.failUnlessEqual(s.announcement,
                             servers["storage"]["v0-1234-1"]["ann"])
        self.failUnlessEqual(s.get_serverid(), key_s)
        self.assertEqual(s.get_permutation_seed(), permseed)

        # if the Introducer announces the same thing, we're supposed to
        # ignore it

        ann2 = {
            "service-name": "storage",
            "anonymous-storage-FURL": "pb://{}@nowhere/fake2".format(base32.b2a(str(1))),
            "permutation-seed-base32": "bbbbbbbbbbbbbbbbbbbbbbbb",
        }
        broker._got_announcement(key_s, ann2)
        s2 = broker.servers[key_s]
        self.assertIdentical(s2, s)
        self.assertEqual(s2.get_permutation_seed(), permseed)
コード例 #7
0
    def __init__(self, server_id, ann, tub_maker, handler_overrides):
        service.MultiService.__init__(self)
        assert isinstance(server_id, str)
        self._server_id = server_id
        self.announcement = ann
        self._tub_maker = tub_maker
        self._handler_overrides = handler_overrides

        assert "anonymous-storage-FURL" in ann, ann
        furl = str(ann["anonymous-storage-FURL"])
        m = re.match(r'pb://(\w+)@', furl)
        assert m, furl
        tubid_s = m.group(1).lower()
        self._tubid = base32.a2b(tubid_s)
        if "permutation-seed-base32" in ann:
            ps = base32.a2b(str(ann["permutation-seed-base32"]))
        elif re.search(r'^v0-[0-9a-zA-Z]{52}$', server_id):
            ps = base32.a2b(server_id[3:])
        else:
            log.msg("unable to parse serverid '%(server_id)s as pubkey, "
                    "hashing it to get permutation-seed, "
                    "may not converge with other clients",
                    server_id=server_id,
                    facility="tahoe.storage_broker",
                    level=log.UNUSUAL, umid="qu86tw")
            ps = hashlib.sha256(server_id).digest()
        self._permutation_seed = ps

        assert server_id
        self._long_description = server_id
        if server_id.startswith("v0-"):
            # remove v0- prefix from abbreviated name
            self._short_description = server_id[3:3+8]
        else:
            self._short_description = server_id[:8]

        self.last_connect_time = None
        self.last_loss_time = None
        self.remote_host = None
        self.rref = None
        self._is_connected = False
        self._reconnector = None
        self._trigger_cb = None
        self._on_status_changed = ObserverList()
コード例 #8
0
 def test_static_permutation_seed_pubkey(self):
     broker = StorageFarmBroker(True, lambda h: Mock())
     server_id = "v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
     k = "4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
     ann = {
         "anonymous-storage-FURL": "pb://abcde@nowhere/fake",
     }
     broker.set_static_servers({server_id.decode("ascii"): {"ann": ann}})
     s = broker.servers[server_id]
     self.assertEqual(s.get_permutation_seed(), base32.a2b(k))
コード例 #9
0
 def test_static_permutation_seed_explicit(self):
     broker = StorageFarmBroker(True, lambda h: Mock())
     server_id = "v0-4uazse3xb6uu5qpkb7tel2bm6bpea4jhuigdhqcuvvse7hugtsia"
     k = "w5gl5igiexhwmftwzhai5jy2jixn7yx7"
     ann = {
         "anonymous-storage-FURL": "pb://abcde@nowhere/fake",
         "permutation-seed-base32": k,
     }
     broker.set_static_servers({server_id.decode("ascii"): {"ann": ann}})
     s = broker.servers[server_id]
     self.assertEqual(s.get_permutation_seed(), base32.a2b(k))
コード例 #10
0
 def childFactory(self, ctx, name):
     if not name:
         return self
     # /operation/$OPHANDLE/$STORAGEINDEX provides detailed information
     # about a specific file or directory that was checked
     si = base32.a2b(name)
     s = self.monitor.get_status()
     try:
         results = s.get_results_for_storage_index(si)
         return CheckAndRepairResultsRenderer(self.client, results)
     except KeyError:
         raise WebError("No detailed results for SI %s" % html.escape(name),
                        http.NOT_FOUND)
コード例 #11
0
ファイル: common.py プロジェクト: LeastAuthority/tahoe-lafs
def unsign_from_foolscap(ann_t):
    (msg, sig_vs, claimed_key_vs) = ann_t
    if not sig_vs or not claimed_key_vs:
        raise UnknownKeyError("only signed announcements recognized")
    if not sig_vs.startswith("v0-"):
        raise UnknownKeyError("only v0- signatures recognized")
    if not claimed_key_vs.startswith("v0-"):
        raise UnknownKeyError("only v0- keys recognized")
    claimed_key = keyutil.parse_pubkey("pub-"+claimed_key_vs)
    sig_bytes = base32.a2b(keyutil.remove_prefix(sig_vs, "v0-"))
    claimed_key.verify(sig_bytes, msg)
    key_vs = claimed_key_vs
    ann = json.loads(msg.decode("utf-8"))
    return (ann, key_vs)
コード例 #12
0
ファイル: blacklist.py プロジェクト: ArtRichards/tahoe-lafs
 def read_blacklist(self):
     try:
         current_mtime = os.stat(self.blacklist_fn).st_mtime
     except EnvironmentError:
         # unreadable blacklist file means no blacklist
         self.entries.clear()
         return
     try:
         if self.last_mtime is None or current_mtime > self.last_mtime:
             self.entries.clear()
             for line in open(self.blacklist_fn, "r").readlines():
                 line = line.strip()
                 if not line or line.startswith("#"):
                     continue
                 si_s, reason = line.split(None, 1)
                 si = base32.a2b(si_s) # must be valid base32
                 self.entries[si] = reason
             self.last_mtime = current_mtime
     except Exception, e:
         twisted_log.err(e, "unparseable blacklist file")
         raise
コード例 #13
0
def get_serverid_from_furl(furl):
    m = re.match(r'pb://(\w+)@', furl)
    assert m, furl
    id = m.group(1).lower()
    return base32.a2b(id)
コード例 #14
0
ファイル: test_hashutil.py プロジェクト: sajith/tahoe-lafs
    def test_known_answers(self):
        """
        Verify backwards compatibility by comparing hash outputs for some
        well-known (to us) inputs.
        """
        self._testknown(
            hashutil.block_hash,
            b"msjr5bh4evuh7fa3zw7uovixfbvlnstr5b65mrerwfnvjxig2jvq", b"")
        self._testknown(
            hashutil.uri_extension_hash,
            b"wthsu45q7zewac2mnivoaa4ulh5xvbzdmsbuyztq2a5fzxdrnkka", b"")
        self._testknown(
            hashutil.plaintext_hash,
            b"5lz5hwz3qj3af7n6e3arblw7xzutvnd3p3fjsngqjcb7utf3x3da", b"")
        self._testknown(
            hashutil.crypttext_hash,
            b"itdj6e4njtkoiavlrmxkvpreosscssklunhwtvxn6ggho4rkqwga", b"")
        self._testknown(
            hashutil.crypttext_segment_hash,
            b"aovy5aa7jej6ym5ikgwyoi4pxawnoj3wtaludjz7e2nb5xijb7aa", b"")
        self._testknown(
            hashutil.plaintext_segment_hash,
            b"4fdgf6qruaisyukhqcmoth4t3li6bkolbxvjy4awwcpprdtva7za", b"")
        self._testknown(hashutil.convergence_hash,
                        b"3mo6ni7xweplycin6nowynw2we", 3, 10, 100, b"",
                        b"converge")
        self._testknown(
            hashutil.my_renewal_secret_hash,
            b"ujhr5k5f7ypkp67jkpx6jl4p47pyta7hu5m527cpcgvkafsefm6q", b"")
        self._testknown(
            hashutil.my_cancel_secret_hash,
            b"rjwzmafe2duixvqy6h47f5wfrokdziry6zhx4smew4cj6iocsfaa", b"")
        self._testknown(
            hashutil.file_renewal_secret_hash,
            b"hzshk2kf33gzbd5n3a6eszkf6q6o6kixmnag25pniusyaulqjnia", b"",
            b"si")
        self._testknown(
            hashutil.file_cancel_secret_hash,
            b"bfciwvr6w7wcavsngxzxsxxaszj72dej54n4tu2idzp6b74g255q", b"",
            b"si")
        self._testknown(
            hashutil.bucket_renewal_secret_hash,
            b"e7imrzgzaoashsncacvy3oysdd2m5yvtooo4gmj4mjlopsazmvuq", b"",
            b"\x00" * 20)
        self._testknown(
            hashutil.bucket_cancel_secret_hash,
            b"dvdujeyxeirj6uux6g7xcf4lvesk632aulwkzjar7srildvtqwma", b"",
            b"\x00" * 20)
        self._testknown(
            hashutil.hmac,
            b"c54ypfi6pevb3nvo6ba42jtglpkry2kbdopqsi7dgrm4r7tw5sra", b"tag",
            b"")
        self._testknown(hashutil.mutable_rwcap_key_hash,
                        b"6rvn2iqrghii5n4jbbwwqqsnqu", b"iv", b"wk")
        self._testknown(hashutil.ssk_writekey_hash,
                        b"ykpgmdbpgbb6yqz5oluw2q26ye", b"")
        self._testknown(
            hashutil.ssk_write_enabler_master_hash,
            b"izbfbfkoait4dummruol3gy2bnixrrrslgye6ycmkuyujnenzpia", b"")
        self._testknown(
            hashutil.ssk_write_enabler_hash,
            b"fuu2dvx7g6gqu5x22vfhtyed7p4pd47y5hgxbqzgrlyvxoev62tq", b"wk",
            b"\x00" * 20)
        self._testknown(
            hashutil.ssk_pubkey_fingerprint_hash,
            b"3opzw4hhm2sgncjx224qmt5ipqgagn7h5zivnfzqycvgqgmgz35q", b"")
        self._testknown(hashutil.ssk_readkey_hash,
                        b"vugid4as6qbqgeq2xczvvcedai", b"")
        self._testknown(hashutil.ssk_readkey_data_hash,
                        b"73wsaldnvdzqaf7v4pzbr2ae5a", b"iv", b"rk")
        self._testknown(hashutil.ssk_storage_index_hash,
                        b"j7icz6kigb6hxrej3tv4z7ayym", b"")

        self._testknown(
            hashutil.permute_server_hash,
            b"kb4354zeeurpo3ze5e275wzbynm6hlap",  # b32(expected)
            b"SI",  # peer selection index == storage_index
            base32.a2b(b"u33m4y7klhz3bypswqkozwetvabelhxt"),  # seed
        )
コード例 #15
0
ファイル: debug.py プロジェクト: ClashTheBunny/tahoe-lafs
def dump_MDMF_share(m, length, options):
    from allmydata.mutable.layout import MDMFSlotReadProxy
    from allmydata.util import base32, hashutil
    from allmydata.uri import MDMFVerifierURI
    from allmydata.util.encodingutil import quote_output, to_str

    offset = m.DATA_OFFSET
    out = options.stdout

    f = open(options['filename'], "rb")
    storage_index = None; shnum = 0

    class ShareDumper(MDMFSlotReadProxy):
        def _read(self, readvs, force_remote=False, queue=False):
            data = []
            for (where,length) in readvs:
                f.seek(offset+where)
                data.append(f.read(length))
            return defer.succeed({shnum: data})

    p = ShareDumper(None, storage_index, shnum)
    def extract(func):
        stash = []
        # these methods return Deferreds, but we happen to know that they run
        # synchronously when not actually talking to a remote server
        d = func()
        d.addCallback(stash.append)
        return stash[0]

    verinfo = extract(p.get_verinfo)
    encprivkey = extract(p.get_encprivkey)
    signature = extract(p.get_signature)
    pubkey = extract(p.get_verification_key)
    block_hash_tree = extract(p.get_blockhashes)
    share_hash_chain = extract(p.get_sharehashes)
    f.close()

    (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
     offsets) = verinfo

    print >>out, " MDMF contents:"
    print >>out, "  seqnum: %d" % seqnum
    print >>out, "  root_hash: %s" % base32.b2a(root_hash)
    #print >>out, "  IV: %s" % base32.b2a(IV)
    print >>out, "  required_shares: %d" % k
    print >>out, "  total_shares: %d" % N
    print >>out, "  segsize: %d" % segsize
    print >>out, "  datalen: %d" % datalen
    print >>out, "  enc_privkey: %d bytes" % len(encprivkey)
    print >>out, "  pubkey: %d bytes" % len(pubkey)
    print >>out, "  signature: %d bytes" % len(signature)
    share_hash_ids = ",".join([str(hid)
                               for hid in sorted(share_hash_chain.keys())])
    print >>out, "  share_hash_chain: %s" % share_hash_ids
    print >>out, "  block_hash_tree: %d nodes" % len(block_hash_tree)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = MDMFVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print >>out, "  verify-cap:", quote_output(verify_cap, quotemarks=False)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.

        print >>out
        print >>out, " Section Offsets:"
        def printoffset(name, value, shift=0):
            print >>out, "%s%.20s: %s   (0x%x)" % (" "*shift, name, value, value)
        printoffset("first lease", m.HEADER_SIZE, 2)
        printoffset("share data", m.DATA_OFFSET, 2)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 4)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 4)
        for k in ["enc_privkey", "share_hash_chain", "signature",
                  "verification_key", "verification_key_end",
                  "share_data", "block_hash_tree", "EOF"]:
            name = {"share_data": "block data",
                    "verification_key": "pubkey",
                    "verification_key_end": "end of pubkey",
                    "EOF": "end of share data"}.get(k,k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 4)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4, 2)
        f.close()

    print >>out
コード例 #16
0
ファイル: keyutil.py プロジェクト: ArtRichards/tahoe-lafs
def parse_privkey(privkey_vs):
    sk_bytes = a2b(remove_prefix(privkey_vs, "priv-v0-"))
    sk = ed25519.SigningKey(sk_bytes)
    vk_bytes = sk.get_verifying_key_bytes()
    return (sk, "pub-v0-"+b2a(vk_bytes))
コード例 #17
0
ファイル: make-canary-files.py プロジェクト: drewp/tahoe-lafs
opts.parseOptions()

verbose = bool(opts["verbose"])

nodes = {}
for line in open(opts["nodeids"], "r").readlines():
    line = line.strip()
    if not line or line.startswith("#"):
        continue
    pieces = line.split(None, 1)
    if len(pieces) == 2:
        nodeid_s, nickname = pieces
    else:
        nodeid_s = pieces[0]
        nickname = None
    nodeid = base32.a2b(nodeid_s)
    nodes[nodeid] = nickname

if opts["k"] != 3 or opts["N"] != 10:
    print "note: using non-default k/N requires patching the Tahoe code"
    print "src/allmydata/client.py line 55, DEFAULT_ENCODING_PARAMETERS"

convergence_file = os.path.expanduser(opts["convergence"])
convergence_s = open(convergence_file, "rb").read().strip()
convergence = base32.a2b(convergence_s)

def get_permuted_peers(key):
    results = []
    for nodeid in nodes:
        permuted = sha.new(key + nodeid).digest()
        results.append((permuted, nodeid))
コード例 #18
0
ファイル: common.py プロジェクト: ArtRichards/tahoe-lafs
def si_a2b(ascii_storageindex):
    return base32.a2b(ascii_storageindex)
コード例 #19
0
ファイル: debug.py プロジェクト: tahoe-lafs/tahoe-lafs
def dump_SDMF_share(m, length, options):
    from allmydata.mutable.layout import unpack_share, unpack_header
    from allmydata.mutable.common import NeedMoreDataError
    from allmydata.util import base32, hashutil
    from allmydata.uri import SSKVerifierURI
    from allmydata.util.encodingutil import quote_output, to_str

    offset = m.DATA_OFFSET

    out = options.stdout

    f = open(options['filename'], "rb")
    f.seek(offset)
    data = f.read(min(length, 2000))
    f.close()

    try:
        pieces = unpack_share(data)
    except NeedMoreDataError as e:
        # retry once with the larger size
        size = e.needed_bytes
        f = open(options['filename'], "rb")
        f.seek(offset)
        data = f.read(min(length, size))
        f.close()
        pieces = unpack_share(data)

    (seqnum, root_hash, IV, k, N, segsize, datalen,
     pubkey, signature, share_hash_chain, block_hash_tree,
     share_data, enc_privkey) = pieces
    (ig_version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
     ig_datalen, offsets) = unpack_header(data)

    print(" SDMF contents:", file=out)
    print("  seqnum: %d" % seqnum, file=out)
    print("  root_hash: %s" % base32.b2a(root_hash), file=out)
    print("  IV: %s" % base32.b2a(IV), file=out)
    print("  required_shares: %d" % k, file=out)
    print("  total_shares: %d" % N, file=out)
    print("  segsize: %d" % segsize, file=out)
    print("  datalen: %d" % datalen, file=out)
    print("  enc_privkey: %d bytes" % len(enc_privkey), file=out)
    print("  pubkey: %d bytes" % len(pubkey), file=out)
    print("  signature: %d bytes" % len(signature), file=out)
    share_hash_ids = ",".join(sorted([str(hid)
                                      for hid in share_hash_chain.keys()]))
    print("  share_hash_chain: %s" % share_hash_ids, file=out)
    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = SSKVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print("  verify-cap:", quote_output(verify_cap, quotemarks=False), file=out)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.
        print(file=out)
        print(" Section Offsets:", file=out)
        def printoffset(name, value, shift=0):
            print("%s%20s: %s   (0x%x)" % (" "*shift, name, value, value), file=out)
        printoffset("first lease", m.HEADER_SIZE)
        printoffset("share data", m.DATA_OFFSET)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 2)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 2)
        for k in ["signature", "share_hash_chain", "block_hash_tree",
                  "share_data",
                  "enc_privkey", "EOF"]:
            name = {"share_data": "block data",
                    "EOF": "end of share data"}.get(k,k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 2)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4)
        f.close()

    print(file=out)
コード例 #20
0
ファイル: keyutil.py プロジェクト: ArtRichards/tahoe-lafs
def parse_pubkey(pubkey_vs):
    vk_bytes = a2b(remove_prefix(pubkey_vs, "pub-v0-"))
    return ed25519.VerifyingKey(vk_bytes)
コード例 #21
0
 def __attrs_post_init__(self):
     if self.convergence is None:
         convergence_s = self.config.get_private_config('convergence')
         self.convergence = base32.a2b(convergence_s)
コード例 #22
0
ファイル: keyutil.py プロジェクト: raystyle/CloudMalwareAlarm
def parse_privkey(privkey_vs):
    sk_bytes = a2b(remove_prefix(privkey_vs, "priv-v0-"))
    sk = ed25519.SigningKey(sk_bytes)
    vk_bytes = sk.get_verifying_key_bytes()
    return (sk, "pub-v0-" + b2a(vk_bytes))
コード例 #23
0
ファイル: debug.py プロジェクト: ArdaXi/tahoe-lafs
def dump_SDMF_share(m, length, options):
    from allmydata.mutable.layout import unpack_share, unpack_header
    from allmydata.mutable.common import NeedMoreDataError
    from allmydata.util import base32, hashutil
    from allmydata.uri import SSKVerifierURI
    from allmydata.util.encodingutil import quote_output, to_str

    offset = m.DATA_OFFSET

    out = options.stdout

    f = open(options['filename'], "rb")
    f.seek(offset)
    data = f.read(min(length, 2000))
    f.close()

    try:
        pieces = unpack_share(data)
    except NeedMoreDataError as e:
        # retry once with the larger size
        size = e.needed_bytes
        f = open(options['filename'], "rb")
        f.seek(offset)
        data = f.read(min(length, size))
        f.close()
        pieces = unpack_share(data)

    (seqnum, root_hash, IV, k, N, segsize, datalen, pubkey, signature,
     share_hash_chain, block_hash_tree, share_data, enc_privkey) = pieces
    (ig_version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
     ig_datalen, offsets) = unpack_header(data)

    print(" SDMF contents:", file=out)
    print("  seqnum: %d" % seqnum, file=out)
    print("  root_hash: %s" % base32.b2a(root_hash), file=out)
    print("  IV: %s" % base32.b2a(IV), file=out)
    print("  required_shares: %d" % k, file=out)
    print("  total_shares: %d" % N, file=out)
    print("  segsize: %d" % segsize, file=out)
    print("  datalen: %d" % datalen, file=out)
    print("  enc_privkey: %d bytes" % len(enc_privkey), file=out)
    print("  pubkey: %d bytes" % len(pubkey), file=out)
    print("  signature: %d bytes" % len(signature), file=out)
    share_hash_ids = ",".join(
        sorted([str(hid) for hid in share_hash_chain.keys()]))
    print("  share_hash_chain: %s" % share_hash_ids, file=out)
    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = SSKVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print("  verify-cap:",
                  quote_output(verify_cap, quotemarks=False),
                  file=out)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.
        print(file=out)
        print(" Section Offsets:", file=out)

        def printoffset(name, value, shift=0):
            print("%s%20s: %s   (0x%x)" % (" " * shift, name, value, value),
                  file=out)

        printoffset("first lease", m.HEADER_SIZE)
        printoffset("share data", m.DATA_OFFSET)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 2)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 2)
        for k in [
                "signature", "share_hash_chain", "block_hash_tree",
                "share_data", "enc_privkey", "EOF"
        ]:
            name = {
                "share_data": "block data",
                "EOF": "end of share data"
            }.get(k, k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 2)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4)
        f.close()

    print(file=out)
コード例 #24
0
ファイル: test_base32.py プロジェクト: seedon198/tahoe-lafs
 def test_a2b(self):
     self.failUnlessEqual(base32.a2b(b"ci2a"), b"\x12\x34")
     self.failUnlessRaises(AssertionError, base32.a2b, b"b0gus")
     self.assertFalse(base32.could_be_base32_encoded(b"b0gus"))
コード例 #25
0
ファイル: debug.py プロジェクト: sloanyang/tahoe-lafs
    print >> out, "  datalen: %d" % datalen
    print >> out, "  enc_privkey: %d bytes" % len(enc_privkey)
    print >> out, "  pubkey: %d bytes" % len(pubkey)
    print >> out, "  signature: %d bytes" % len(signature)
    share_hash_ids = ",".join(
        sorted([str(hid) for hid in share_hash_chain.keys()]))
    print >> out, "  share_hash_chain: %s" % share_hash_ids
    print >> out, "  block_hash_tree: %d nodes" % len(block_hash_tree)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = SSKVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print >> out, "  verify-cap:", quote_output(verify_cap,
                                                        quotemarks=False)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.
        print >> out
        print >> out, " Section Offsets:"

        def printoffset(name, value, shift=0):
            print >> out, "%s%20s: %s   (0x%x)" % (" " * shift, name, value,
                                                   value)
コード例 #26
0
ファイル: keyutil.py プロジェクト: raystyle/CloudMalwareAlarm
def parse_pubkey(pubkey_vs):
    vk_bytes = a2b(remove_prefix(pubkey_vs, "pub-v0-"))
    return ed25519.VerifyingKey(vk_bytes)
コード例 #27
0
 def init_from_string(cls, uri):
     mo = cls.STRING_RE.search(uri)
     if not mo:
         raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
     return cls(base32.a2b(mo.group(1)), base32.a2b(mo.group(2)),
                int(mo.group(3)), int(mo.group(4)), int(mo.group(5)))
コード例 #28
0
ファイル: uri.py プロジェクト: ArtRichards/tahoe-lafs
 def init_from_string(cls, uri):
     mo = cls.STRING_RE.search(uri)
     if not mo:
         raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
     return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))
コード例 #29
0
ファイル: common.py プロジェクト: viktoriiasavchuk/tahoe-lafs
def si_a2b(ascii_storageindex):
    return base32.a2b(ascii_storageindex)
コード例 #30
0
ファイル: debug.py プロジェクト: drewp/tahoe-lafs
    print >>out, "  datalen: %d" % datalen
    print >>out, "  enc_privkey: %d bytes" % len(enc_privkey)
    print >>out, "  pubkey: %d bytes" % len(pubkey)
    print >>out, "  signature: %d bytes" % len(signature)
    share_hash_ids = ",".join(sorted([str(hid)
                                      for hid in share_hash_chain.keys()]))
    print >>out, "  share_hash_chain: %s" % share_hash_ids
    print >>out, "  block_hash_tree: %d nodes" % len(block_hash_tree)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = SSKVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print >>out, "  verify-cap:", quote_output(verify_cap, quotemarks=False)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.
        print >>out
        print >>out, " Section Offsets:"
        def printoffset(name, value, shift=0):
            print >>out, "%s%20s: %s   (0x%x)" % (" "*shift, name, value, value)
        printoffset("first lease", m.HEADER_SIZE)
        printoffset("share data", m.DATA_OFFSET)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
コード例 #31
0
opts.parseOptions()

verbose = bool(opts["verbose"])

nodes = {}
for line in open(opts["nodeids"], "r").readlines():
    line = line.strip()
    if not line or line.startswith("#"):
        continue
    pieces = line.split(None, 1)
    if len(pieces) == 2:
        nodeid_s, nickname = pieces
    else:
        nodeid_s = pieces[0]
        nickname = None
    nodeid = base32.a2b(nodeid_s)
    nodes[nodeid] = nickname

if opts["k"] != 3 or opts["N"] != 10:
    print("note: using non-default k/N requires patching the Tahoe code")
    print("src/allmydata/client.py line 55, DEFAULT_ENCODING_PARAMETERS")

convergence_file = os.path.expanduser(opts["convergence"])
convergence_s = open(convergence_file, "rb").read().strip()
convergence = base32.a2b(convergence_s)


def get_permuted_peers(key):
    results = []
    for nodeid in nodes:
        permuted = hashlib.sha1(key + nodeid).digest()
コード例 #32
0
ファイル: debug.py プロジェクト: drewp/tahoe-lafs
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_str

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, '', '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print >>out, "%20s: %d" % ("version", bp._version)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size",
             "needed_shares", "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash",
             "crypttext_hash", "crypttext_root_hash",
             "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}
    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])
    print >>out
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])
    print >>out
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print >>out
        print >>out, "LEFTOVER:"
        for k in sorted(leftover):
            print >>out, "%20s: %s" % (k, unpacked[k])

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                      unpacked["needed_shares"],
                                      unpacked["total_shares"], unpacked["size"])
            verify_cap = u.to_string()
            print >>out, "%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False))

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] -
                           offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print >>out
    print >>out, " Size of data within the share:"
    for k in sorted(sizes):
        print >>out, "%20s: %s" % (k, sizes[k])

    if options['offsets']:
        print >>out
        print >>out, " Section Offsets:"
        print >>out, "%20s: %s" % ("share data", f._data_offset)
        for k in ["data", "plaintext_hash_tree", "crypttext_hash_tree",
                  "block_hashes", "share_hashes", "uri_extension"]:
            name = {"data": "block data"}.get(k,k)
            offset = f._data_offset + offsets[k]
            print >>out, "  %20s: %s   (0x%x)" % (name, offset, offset)
        print >>out, "%20s: %s" % ("leases", f._lease_offset)
コード例 #33
0
ファイル: debug.py プロジェクト: trel/tahoe-lafs
def dump_MDMF_share(m, length, options):
    from allmydata.mutable.layout import MDMFSlotReadProxy
    from allmydata.util import base32, hashutil
    from allmydata.uri import MDMFVerifierURI
    from allmydata.util.encodingutil import quote_output, to_str

    offset = m.DATA_OFFSET
    out = options.stdout

    f = open(options['filename'], "rb")
    storage_index = None
    shnum = 0

    class ShareDumper(MDMFSlotReadProxy):
        def _read(self, readvs, force_remote=False, queue=False):
            data = []
            for (where, length) in readvs:
                f.seek(offset + where)
                data.append(f.read(length))
            return defer.succeed({shnum: data})

    p = ShareDumper(None, storage_index, shnum)

    def extract(func):
        stash = []
        # these methods return Deferreds, but we happen to know that they run
        # synchronously when not actually talking to a remote server
        d = func()
        d.addCallback(stash.append)
        return stash[0]

    verinfo = extract(p.get_verinfo)
    encprivkey = extract(p.get_encprivkey)
    signature = extract(p.get_signature)
    pubkey = extract(p.get_verification_key)
    block_hash_tree = extract(p.get_blockhashes)
    share_hash_chain = extract(p.get_sharehashes)
    f.close()

    (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
     offsets) = verinfo

    print >> out, " MDMF contents:"
    print >> out, "  seqnum: %d" % seqnum
    print >> out, "  root_hash: %s" % base32.b2a(root_hash)
    #print >>out, "  IV: %s" % base32.b2a(IV)
    print >> out, "  required_shares: %d" % k
    print >> out, "  total_shares: %d" % N
    print >> out, "  segsize: %d" % segsize
    print >> out, "  datalen: %d" % datalen
    print >> out, "  enc_privkey: %d bytes" % len(encprivkey)
    print >> out, "  pubkey: %d bytes" % len(pubkey)
    print >> out, "  signature: %d bytes" % len(signature)
    share_hash_ids = ",".join(
        [str(hid) for hid in sorted(share_hash_chain.keys())])
    print >> out, "  share_hash_chain: %s" % share_hash_ids
    print >> out, "  block_hash_tree: %d nodes" % len(block_hash_tree)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = MDMFVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print >> out, "  verify-cap:", quote_output(verify_cap,
                                                        quotemarks=False)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.

        print >> out
        print >> out, " Section Offsets:"

        def printoffset(name, value, shift=0):
            print >> out, "%s%.20s: %s   (0x%x)" % (" " * shift, name, value,
                                                    value)

        printoffset("first lease", m.HEADER_SIZE, 2)
        printoffset("share data", m.DATA_OFFSET, 2)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 4)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 4)
        for k in [
                "enc_privkey", "share_hash_chain", "signature",
                "verification_key", "verification_key_end", "share_data",
                "block_hash_tree", "EOF"
        ]:
            name = {
                "share_data": "block data",
                "verification_key": "pubkey",
                "verification_key_end": "end of pubkey",
                "EOF": "end of share data"
            }.get(k, k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 4)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4, 2)
        f.close()

    print >> out
コード例 #34
0
ファイル: debug.py プロジェクト: trel/tahoe-lafs
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_str

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, None, '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print >> out, "%20s: %d" % ("version", bp._version)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size", "needed_shares",
             "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash", "crypttext_hash",
             "crypttext_root_hash", "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}
    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >> out, "%20s: %s" % (dk, unpacked[k])
    print >> out
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >> out, "%20s: %s" % (dk, unpacked[k])
    print >> out
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >> out, "%20s: %s" % (dk, unpacked[k])

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print >> out
        print >> out, "LEFTOVER:"
        for k in sorted(leftover):
            print >> out, "%20s: %s" % (k, unpacked[k])

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                       unpacked["needed_shares"],
                                       unpacked["total_shares"],
                                       unpacked["size"])
            verify_cap = u.to_string()
            print >> out, "%20s: %s" % (
                "verify-cap", quote_output(verify_cap, quotemarks=False))

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] - offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print >> out
    print >> out, " Size of data within the share:"
    for k in sorted(sizes):
        print >> out, "%20s: %s" % (k, sizes[k])

    if options['offsets']:
        print >> out
        print >> out, " Section Offsets:"
        print >> out, "%20s: %s" % ("share data", f._data_offset)
        for k in [
                "data", "plaintext_hash_tree", "crypttext_hash_tree",
                "block_hashes", "share_hashes", "uri_extension"
        ]:
            name = {"data": "block data"}.get(k, k)
            offset = f._data_offset + offsets[k]
            print >> out, "  %20s: %s   (0x%x)" % (name, offset, offset)
        print >> out, "%20s: %s" % ("leases", f._lease_offset)
コード例 #35
0
ファイル: uri.py プロジェクト: trel/tahoe-lafs
 def init_from_human_encoding(cls, uri):
     mo = cls.HUMAN_RE.search(uri)
     if not mo:
         raise BadURIError("'%s' doesn't look like a %s cap" % (uri, cls))
     return cls(si_a2b(mo.group(1)), base32.a2b(mo.group(2)))