Пример #1
0
 def init_from_parsed(self, parsed):
     nodetype, d = parsed
     self.writecap = to_bytes(d.get("rw_uri"))
     self.readcap = to_bytes(d.get("ro_uri"))
     self.mutable = d.get("mutable", False)  # older nodes don't provide it
     self.children_d = dict([(str(name), value)
                             for (name, value) in d["children"].items()])
     self.children = None
Пример #2
0
 def test_to_bytes(self):
     self.failUnlessReallyEqual(to_bytes(b"foo"), b"foo")
     self.failUnlessReallyEqual(to_bytes(b"lumi\xc3\xa8re"),
                                b"lumi\xc3\xa8re")
     self.failUnlessReallyEqual(
         to_bytes(b"\xFF"),
         b"\xFF")  # passes through invalid UTF-8 -- is this what we want?
     self.failUnlessReallyEqual(to_bytes(u"lumi\u00E8re"),
                                b"lumi\xc3\xa8re")
     self.failUnlessReallyEqual(to_bytes(None), None)
Пример #3
0
        def _get_test_txt_uris(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("rw_uri", data)
            self.rw_uri = to_bytes(data["rw_uri"])
            self.failUnlessIn("ro_uri", data)
            self.ro_uri = to_bytes(data["ro_uri"])
Пример #4
0
        def _check_json(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("ro_uri", data)
            self.failUnlessEqual(to_bytes(data["ro_uri"]), self.ro_uri)
            self.failUnlessIn("rw_uri", data)
            self.failUnlessEqual(to_bytes(data["rw_uri"]), self.rw_uri)
Пример #5
0
        def _process_file_json(args, fn):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "filenode")

            if "mutable" in fn:
                self.failUnless(data['mutable'])
                self.failUnlessIn("rw_uri", data)
                self.failUnlessEqual(to_bytes(data["rw_uri"]), self.childuris[fn])
            else:
                self.failIf(data['mutable'])
                self.failUnlessIn("ro_uri", data)
                self.failIfEqual(to_bytes(data["ro_uri"]), self.childuris[fn])
Пример #6
0
        def _process_directory_json(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("children", data)
            children = data['children']

            # Store the URIs for later use.
            self.childuris = {}
            for k in ["mutable1", "mutable2", "imm1", "imm2"]:
                self.failUnlessIn(k, children)
                childtype, childdata = children[k]
                self.failUnlessEqual(childtype, "filenode")
                if "mutable" in k:
                    self.failUnless(childdata['mutable'])
                    self.failUnlessIn("rw_uri", childdata)
                    uri_key = "rw_uri"
                else:
                    self.failIf(childdata['mutable'])
                    self.failUnlessIn("ro_uri", childdata)
                    uri_key = "ro_uri"
                self.childuris[k] = to_bytes(childdata[uri_key])
Пример #7
0
 def _check2(args):
     (rc, out, err) = args
     self.failUnlessReallyEqual(err, "")
     self.failUnlessReallyEqual(rc, 0)
     data = json.loads(out)
     self.failUnlessReallyEqual(to_bytes(data["summary"]), "Healthy")
     self.failUnlessReallyEqual(data["results"]["healthy"], True)
Пример #8
0
 def _process_imm2_json(args):
     (rc, out, err) = args
     self.failUnlessEqual(rc, 0)
     filetype, data = json.loads(out)
     self.failUnlessEqual(filetype, "filenode")
     self.failIf(data['mutable'])
     self.failUnlessIn("ro_uri", data)
     self.failUnlessEqual(to_bytes(data["ro_uri"]), self.childuris["imm2"])
Пример #9
0
def convert_children_json(nodemaker, children_json):
    """I convert the JSON output of GET?t=json into the dict-of-nodes input
    to both dirnode.create_subdirectory() and
    client.create_directory(initial_children=). This is used by
    t=mkdir-with-children and t=mkdir-immutable"""
    children = {}
    if children_json:
        data = json.loads(children_json)
        for (namex, (ctype, propdict)) in data.iteritems():
            namex = unicode(namex)
            writecap = to_bytes(propdict.get("rw_uri"))
            readcap = to_bytes(propdict.get("ro_uri"))
            metadata = propdict.get("metadata", {})
            # name= argument is just for error reporting
            childnode = nodemaker.create_from_cap(writecap, readcap, name=namex)
            children[namex] = (childnode, metadata)
    return children
Пример #10
0
        def _process_test_json(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("ro_uri", data)
            self._test_read_uri = to_bytes(data["ro_uri"])
Пример #11
0
    def get_target_info(self, destination_spec):
        precondition(isinstance(destination_spec, str), destination_spec)
        rootcap, path_utf8 = get_alias(self.aliases, destination_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            if not os.path.exists(pathname):
                t = LocalMissingTarget(pathname)
            elif os.path.isdir(pathname):
                t = LocalDirectoryTarget(self.progress, pathname)
            else:
                # TODO: should this be _assert? what happens if the target is
                # a special file?
                assert os.path.isfile(pathname), pathname
                t = LocalFileTarget(pathname)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % url_quote(rootcap)
            if path:
                url += "/" + escape_path(path)

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                # doesn't exist yet
                t = TahoeMissingTarget(url)
            elif resp.status == 200:
                parsed = json.loads(resp.read())
                nodetype, d = parsed
                if nodetype == "dirnode":
                    t = TahoeDirectoryTarget(self.nodeurl, self.cache,
                                             self.progress)
                    t.init_from_parsed(parsed)
                else:
                    writecap = to_bytes(d.get("rw_uri"))
                    readcap = to_bytes(d.get("ro_uri"))
                    mutable = d.get("mutable", False)
                    t = TahoeFileTarget(self.nodeurl, mutable, writecap,
                                        readcap, url)
            else:
                raise HTTPError(
                    "Error examining target %s" %
                    quote_output(destination_spec), resp)
        return t
Пример #12
0
 def populate(self, recurse):
     if self.children is not None:
         return
     self.children = {}
     for i, (name, data) in enumerate(self.children_d.items()):
         self.progressfunc("examining %d of %d" %
                           (i + 1, len(self.children_d)))
         if data[0] == "filenode":
             mutable = data[1].get("mutable", False)
             writecap = to_bytes(data[1].get("rw_uri"))
             readcap = to_bytes(data[1].get("ro_uri"))
             url = None
             if self.writecap:
                 url = self.nodeurl + "/".join([
                     "uri",
                     url_quote(self.writecap),
                     url_quote(unicode_to_url(name))
                 ])
             self.children[name] = TahoeFileTarget(self.nodeurl, mutable,
                                                   writecap, readcap, url)
         elif data[0] == "dirnode":
             writecap = to_bytes(data[1].get("rw_uri"))
             readcap = to_bytes(data[1].get("ro_uri"))
             if writecap and writecap in self.cache:
                 child = self.cache[writecap]
             elif readcap and readcap in self.cache:
                 child = self.cache[readcap]
             else:
                 child = TahoeDirectoryTarget(self.nodeurl, self.cache,
                                              self.progressfunc)
                 child.init_from_grid(writecap, readcap)
                 if writecap:
                     self.cache[writecap] = child
                 if readcap:
                     self.cache[readcap] = child
                 if recurse:
                     child.populate(recurse=True)
             self.children[name] = child
         else:
             # TODO: there should be an option to skip unknown nodes.
             raise TahoeError("Cannot copy unknown nodes (ticket #839). "
                              "You probably need to use a later version of "
                              "Tahoe-LAFS to copy this directory.")
Пример #13
0
    def check_directory(self, contents):
        """I will tell you if a new directory needs to be created for a given
        set of directory contents, or if I know of an existing (immutable)
        directory that can be used instead.

        'contents' should be a dictionary that maps from child name (a single
        unicode string) to immutable childcap (filecap or dircap).

        I return a DirectoryResult object, synchronously. If r.was_created()
        returns False, you should create the directory (with
        t=mkdir-immutable). When you are finished, call r.did_create(dircap)
        so I can update my database.

        If was_created() returns a dircap, you might be able to avoid the
        mkdir. Call r.should_check(), and if it says False, you can skip the
        mkdir and use the dircap returned by was_created().

        If should_check() returns True, you should perform a check operation
        on the dircap returned by was_created(). If the check indicates the
        directory is healthy, please call
        r.did_check_healthy(checker_results) so I can update the database,
        using the de-JSONized response from the webapi t=check call for
        'checker_results'. If the check indicates the directory is not
        healthy, please repair or re-create the directory and call
        r.did_create(dircap) when you're done.
        """

        now = time.time()
        entries = []
        for name in contents:
            entries.append([name.encode("utf-8"), contents[name]])
        entries.sort()
        data = b"".join([
            netstring(name_utf8) + netstring(cap)
            for (name_utf8, cap) in entries
        ])
        dirhash = backupdb_dirhash(data)
        dirhash_s = base32.b2a(dirhash)
        c = self.cursor
        c.execute(
            "SELECT dircap, last_checked"
            " FROM directories WHERE dirhash=?", (dirhash_s, ))
        row = c.fetchone()
        if not row:
            return DirectoryResult(self, dirhash_s, None, False)
        (dircap, last_checked) = row
        age = now - last_checked

        probability = ((age - self.NO_CHECK_BEFORE) /
                       (self.ALWAYS_CHECK_AFTER - self.NO_CHECK_BEFORE))
        probability = min(max(probability, 0.0), 1.0)
        should_check = bool(random.random() < probability)

        return DirectoryResult(self, dirhash_s, to_bytes(dircap), should_check)
Пример #14
0
def mkdir(contents, options):
    kids = dict([(childname, (contents[childname][0], {
        "ro_uri": contents[childname][1],
        "metadata": contents[childname][2],
    })) for childname in contents])
    body = json.dumps(kids).encode("utf-8")
    url = options['node-url'] + "uri?t=mkdir-immutable"
    resp = do_http("POST", url, body)
    if resp.status < 200 or resp.status >= 300:
        raise HTTPError("Error during mkdir", resp)

    dircap = to_bytes(resp.read().strip())
    return dircap
Пример #15
0
    def parseArgs(self, invite_code, local_dir):
        super(JoinOptions, self).parseArgs()

        try:
            if int(self['poll-interval']) <= 0:
                raise ValueError("should be positive")
        except ValueError:
            raise usage.UsageError(
                "--poll-interval must be a positive integer")
        self.local_dir = FilePath(local_dir)
        if not self.local_dir.exists():
            raise usage.UsageError("'{}' doesn't exist".format(local_dir))
        if not self.local_dir.isdir():
            raise usage.UsageError("'{}' isn't a directory".format(local_dir))
        self.invite_code = to_bytes(argv_to_unicode(invite_code))
Пример #16
0
        def _process_tahoe_json(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnlessIn("children", data)
            kiddata = data['children']

            self.failUnlessIn("test_file.txt", kiddata)
            testtype, testdata = kiddata['test_file.txt']
            self.failUnlessEqual(testtype, "filenode")
            self.failUnless(testdata['mutable'])
            self.failUnlessIn("ro_uri", testdata)
            self.failUnlessEqual(to_bytes(testdata["ro_uri"]), self._test_read_uri)
            self.failIfIn("rw_uri", testdata)
Пример #17
0
        def _got_testdir_json(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")

            self.failUnlessIn("children", data)
            childdata = data['children']

            self.failUnlessIn("file2.txt", childdata)
            file2type, file2data = childdata['file2.txt']
            self.failUnlessEqual(file2type, "filenode")
            self.failUnless(file2data['mutable'])
            self.failUnlessIn("ro_uri", file2data)
            self.failUnlessEqual(to_bytes(file2data["ro_uri"]), self._test_read_uri)
            self.failIfIn("rw_uri", file2data)
Пример #18
0
def list(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr

    if not nodeurl.endswith("/"):
        nodeurl += "/"
    if where.endswith("/"):
        where = where[:-1]
    try:
        rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    url = nodeurl + "uri/%s" % url_quote(rootcap)
    if path:
        # move where.endswith check here?
        url += "/" + escape_path(path)
    assert not url.endswith("/")
    url += "?t=json"
    resp = do_http("GET", url)
    if resp.status == 404:
        print("No such file or directory", file=stderr)
        return 2
    if resp.status != 200:
        print(format_http_error("Error during GET", resp), file=stderr)
        if resp.status == 0:
            return 3
        else:
            return resp.status

    data = resp.read()

    if options['json']:
        # The webapi server should always output printable ASCII.
        if is_printable_ascii(data):
            print(data, file=stdout)
            return 0
        else:
            print("The JSON response contained unprintable characters:",
                  file=stderr)
            print(quote_output(data, quotemarks=False), file=stderr)
            return 1

    try:
        parsed = json.loads(data)
    except Exception as e:
        print("error: %s" % quote_output(e.args[0], quotemarks=False),
              file=stderr)
        print("Could not parse JSON response:", file=stderr)
        print(quote_output(data, quotemarks=False), file=stderr)
        return 1

    nodetype, d = parsed
    children = {}
    if nodetype == "dirnode":
        children = d['children']
    else:
        # paths returned from get_alias are always valid UTF-8
        childname = path.split("/")[-1].decode('utf-8')
        children = {childname: (nodetype, d)}
        if "metadata" not in d:
            d["metadata"] = {}
    childnames = sorted(children.keys())
    now = time.time()

    # we build up a series of rows, then we loop through them to compute a
    # maxwidth so we can format them tightly. Size, filename, and URI are the
    # variable-width ones.
    rows = []
    has_unknowns = False

    for name in childnames:
        child = children[name]
        name = unicode(name)
        childtype = child[0]

        # See webapi.txt for a discussion of the meanings of unix local
        # filesystem mtime and ctime, Tahoe mtime and ctime, and Tahoe
        # linkmotime and linkcrtime.
        ctime = child[1].get("metadata", {}).get('tahoe', {}).get("linkcrtime")
        if not ctime:
            ctime = child[1]["metadata"].get("ctime")

        mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime")
        if not mtime:
            mtime = child[1]["metadata"].get("mtime")
        rw_uri = to_bytes(child[1].get("rw_uri"))
        ro_uri = to_bytes(child[1].get("ro_uri"))
        if ctime:
            # match for formatting that GNU 'ls' does
            if (now - ctime) > 6 * 30 * 24 * 60 * 60:
                # old files
                fmt = "%b %d  %Y"
            else:
                fmt = "%b %d %H:%M"
            ctime_s = time.strftime(fmt, time.localtime(ctime))
        else:
            ctime_s = "-"
        if childtype == "dirnode":
            t0 = "d"
            size = "-"
            classify = "/"
        elif childtype == "filenode":
            t0 = "-"
            size = str(child[1].get("size", "?"))
            classify = ""
            if rw_uri:
                classify = "*"
        else:
            has_unknowns = True
            t0 = "?"
            size = "?"
            classify = "?"
        t1 = "-"
        if ro_uri:
            t1 = "r"
        t2 = "-"
        if rw_uri:
            t2 = "w"
        t3 = "-"
        if childtype == "dirnode":
            t3 = "x"

        uri = rw_uri or ro_uri

        line = []
        if options["long"]:
            line.append(t0 + t1 + t2 + t3)
            line.append(size)
            line.append(ctime_s)
        if not options["classify"]:
            classify = ""

        encoding_error = False
        try:
            line.append(unicode_to_output(name) + classify)
        except UnicodeEncodeError:
            encoding_error = True
            line.append(quote_output(name) + classify)

        if options["uri"]:
            line.append(ensure_str(uri))
        if options["readonly-uri"]:
            line.append(
                quote_output(ensure_str(ro_uri) or "-", quotemarks=False))

        rows.append((encoding_error, line))

    max_widths = []
    left_justifys = []
    for (encoding_error, row) in rows:
        for i, cell in enumerate(row):
            while len(max_widths) <= i:
                max_widths.append(0)
            while len(left_justifys) <= i:
                left_justifys.append(False)
            max_widths[i] = max(max_widths[i], len(cell))
            if ensure_text(cell).startswith("URI"):
                left_justifys[i] = True
    if len(left_justifys) == 1:
        left_justifys[0] = True
    fmt_pieces = []
    for i in range(len(max_widths)):
        piece = "%"
        if left_justifys[i]:
            piece += "-"
        piece += str(max_widths[i])
        piece += "s"
        fmt_pieces.append(piece)
    fmt = " ".join(fmt_pieces)

    rc = 0
    for (encoding_error, row) in rows:
        if encoding_error:
            print((fmt % tuple(row)).rstrip(), file=stderr)
            rc = 1
        else:
            print((fmt % tuple(row)).rstrip(), file=stdout)

    if rc == 1:
        print("\nThis listing included files whose names could not be converted to the terminal" \
                        "\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr)
    if has_unknowns:
        print("\nThis listing included unknown objects. Using a webapi server that supports" \
                        "\na later version of Tahoe may help.", file=stderr)

    return rc
Пример #19
0
    def check_file(self, path, use_timestamps=True):
        """I will tell you if a given local file needs to be uploaded or not,
        by looking in a database and seeing if I have a record of this file
        having been uploaded earlier.

        I return a FileResults object, synchronously. If r.was_uploaded()
        returns False, you should upload the file. When you are finished
        uploading it, call r.did_upload(filecap), so I can update my
        database.

        If was_uploaded() returns a filecap, you might be able to avoid an
        upload. Call r.should_check(), and if it says False, you can skip the
        upload and use the filecap returned by was_uploaded().

        If should_check() returns True, you should perform a filecheck on the
        filecap returned by was_uploaded(). If the check indicates the file
        is healthy, please call r.did_check_healthy(checker_results) so I can
        update the database, using the de-JSONized response from the webapi
        t=check call for 'checker_results'. If the check indicates the file
        is not healthy, please upload the file and call r.did_upload(filecap)
        when you're done.

        If use_timestamps=True (the default), I will compare ctime and mtime
        of the local file against an entry in my database, and consider the
        file to be unchanged if ctime, mtime, and filesize are all the same
        as the earlier version. If use_timestamps=False, I will not trust the
        timestamps, so more files (perhaps all) will be marked as needing
        upload. A future version of this database may hash the file to make
        equality decisions, in which case use_timestamps=False will not
        always imply r.must_upload()==True.

        'path' points to a local file on disk, possibly relative to the
        current working directory. The database stores absolute pathnames.
        """

        path = abspath_expanduser_unicode(path)

        # TODO: consider using get_pathinfo.
        s = os.stat(path)
        size = s[stat.ST_SIZE]
        ctime = s[stat.ST_CTIME]
        mtime = s[stat.ST_MTIME]

        now = time.time()
        c = self.cursor

        c.execute(
            "SELECT size,mtime,ctime,fileid"
            " FROM local_files"
            " WHERE path=?", (path, ))
        row = self.cursor.fetchone()
        if not row:
            return FileResult(self, None, False, path, mtime, ctime, size)
        (last_size, last_mtime, last_ctime, last_fileid) = row

        c.execute(
            "SELECT caps.filecap, last_upload.last_checked"
            " FROM caps,last_upload"
            " WHERE caps.fileid=? AND last_upload.fileid=?",
            (last_fileid, last_fileid))
        row2 = c.fetchone()

        if ((last_size != size or not use_timestamps or last_mtime != mtime
             or last_ctime != ctime)  # the file has been changed
                or
            (not row2)  # we somehow forgot where we put the file last time
            ):
            c.execute("DELETE FROM local_files WHERE path=?", (path, ))
            self.connection.commit()
            return FileResult(self, None, False, path, mtime, ctime, size)

        # at this point, we're allowed to assume the file hasn't been changed
        (filecap, last_checked) = row2
        age = now - last_checked

        probability = ((age - self.NO_CHECK_BEFORE) /
                       (self.ALWAYS_CHECK_AFTER - self.NO_CHECK_BEFORE))
        probability = min(max(probability, 0.0), 1.0)
        should_check = bool(random.random() < probability)

        return FileResult(self, to_bytes(filecap), should_check, path, mtime,
                          ctime, size)
Пример #20
0
def dump_MDMF_share(m, length, options):
    from allmydata.mutable.layout import MDMFSlotReadProxy
    from allmydata.util import base32, hashutil
    from allmydata.uri import MDMFVerifierURI
    from allmydata.util.encodingutil import quote_output, to_bytes

    offset = m.DATA_OFFSET
    out = options.stdout

    f = open(options['filename'], "rb")
    storage_index = None
    shnum = 0

    class ShareDumper(MDMFSlotReadProxy):
        def _read(self, readvs, force_remote=False, queue=False):
            data = []
            for (where, length) in readvs:
                f.seek(offset + where)
                data.append(f.read(length))
            return defer.succeed({shnum: data})

    p = ShareDumper(None, storage_index, shnum)

    def extract(func):
        stash = []
        # these methods return Deferreds, but we happen to know that they run
        # synchronously when not actually talking to a remote server
        d = func()
        d.addCallback(stash.append)
        return stash[0]

    verinfo = extract(p.get_verinfo)
    encprivkey = extract(p.get_encprivkey)
    signature = extract(p.get_signature)
    pubkey = extract(p.get_verification_key)
    block_hash_tree = extract(p.get_blockhashes)
    share_hash_chain = extract(p.get_sharehashes)
    f.close()

    (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
     offsets) = verinfo

    print(" MDMF contents:", file=out)
    print("  seqnum: %d" % seqnum, file=out)
    print("  root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"),
          file=out)
    #print("  IV: %s" % base32.b2a(IV), file=out)
    print("  required_shares: %d" % k, file=out)
    print("  total_shares: %d" % N, file=out)
    print("  segsize: %d" % segsize, file=out)
    print("  datalen: %d" % datalen, file=out)
    print("  enc_privkey: %d bytes" % len(encprivkey), file=out)
    print("  pubkey: %d bytes" % len(pubkey), file=out)
    print("  signature: %d bytes" % len(signature), file=out)
    share_hash_ids = ",".join(
        [str(hid) for hid in sorted(share_hash_chain.keys())])
    print("  share_hash_chain: %s" % share_hash_ids, file=out)
    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_bytes(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = MDMFVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print("  verify-cap:",
                  quote_output(verify_cap, quotemarks=False),
                  file=out)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.

        print(file=out)
        print(" Section Offsets:", file=out)

        def printoffset(name, value, shift=0):
            print("%s%.20s: %s   (0x%x)" % (" " * shift, name, value, value),
                  file=out)

        printoffset("first lease", m.HEADER_SIZE, 2)
        printoffset("share data", m.DATA_OFFSET, 2)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 4)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 4)
        for k in [
                "enc_privkey", "share_hash_chain", "signature",
                "verification_key", "verification_key_end", "share_data",
                "block_hash_tree", "EOF"
        ]:
            name = {
                "share_data": "block data",
                "verification_key": "pubkey",
                "verification_key_end": "end of pubkey",
                "EOF": "end of share data"
            }.get(k, k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 4)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4, 2)
        f.close()

    print(file=out)
Пример #21
0
def dump_SDMF_share(m, length, options):
    from allmydata.mutable.layout import unpack_share, unpack_header
    from allmydata.mutable.common import NeedMoreDataError
    from allmydata.util import base32, hashutil
    from allmydata.uri import SSKVerifierURI
    from allmydata.util.encodingutil import quote_output, to_bytes

    offset = m.DATA_OFFSET

    out = options.stdout

    f = open(options['filename'], "rb")
    f.seek(offset)
    data = f.read(min(length, 2000))
    f.close()

    try:
        pieces = unpack_share(data)
    except NeedMoreDataError as e:
        # retry once with the larger size
        size = e.needed_bytes
        f = open(options['filename'], "rb")
        f.seek(offset)
        data = f.read(min(length, size))
        f.close()
        pieces = unpack_share(data)

    (seqnum, root_hash, IV, k, N, segsize, datalen, pubkey, signature,
     share_hash_chain, block_hash_tree, share_data, enc_privkey) = pieces
    (ig_version, ig_seqnum, ig_roothash, ig_IV, ig_k, ig_N, ig_segsize,
     ig_datalen, offsets) = unpack_header(data)

    print(" SDMF contents:", file=out)
    print("  seqnum: %d" % seqnum, file=out)
    print("  root_hash: %s" % unicode(base32.b2a(root_hash), "utf-8"),
          file=out)
    print("  IV: %s" % unicode(base32.b2a(IV), "utf-8"), file=out)
    print("  required_shares: %d" % k, file=out)
    print("  total_shares: %d" % N, file=out)
    print("  segsize: %d" % segsize, file=out)
    print("  datalen: %d" % datalen, file=out)
    print("  enc_privkey: %d bytes" % len(enc_privkey), file=out)
    print("  pubkey: %d bytes" % len(pubkey), file=out)
    print("  signature: %d bytes" % len(signature), file=out)
    share_hash_ids = ",".join(
        sorted([str(hid) for hid in share_hash_chain.keys()]))
    print("  share_hash_chain: %s" % share_hash_ids, file=out)
    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_bytes(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = SSKVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print("  verify-cap:",
                  quote_output(verify_cap, quotemarks=False),
                  file=out)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.
        print(file=out)
        print(" Section Offsets:", file=out)

        def printoffset(name, value, shift=0):
            print("%s%20s: %s   (0x%x)" % (" " * shift, name, value, value),
                  file=out)

        printoffset("first lease", m.HEADER_SIZE)
        printoffset("share data", m.DATA_OFFSET)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 2)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 2)
        for k in [
                "signature", "share_hash_chain", "block_hash_tree",
                "share_data", "enc_privkey", "EOF"
        ]:
            name = {
                "share_data": "block data",
                "EOF": "end of share data"
            }.get(k, k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 2)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4)
        f.close()

    print(file=out)
Пример #22
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, str), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec,
                                         quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(
                        source_spec, quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % url_quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash + 1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError(
                    "Error examining source %s" % quote_output(source_spec),
                    resp)
            parsed = json.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_bytes(d.get("rw_uri"))
                readcap = to_bytes(d.get("ro_uri"))
                mutable = d.get("mutable",
                                False)  # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap,
                                    name)
        return t
Пример #23
0
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_bytes

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, None, '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print("%20s: %d" % ("version", bp._version), file=out)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size", "needed_shares",
             "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash", "crypttext_hash",
             "crypttext_root_hash", "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}

    def to_string(v):
        if isinstance(v, bytes):
            return unicode(v, "utf-8")
        else:
            return str(v)

    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
    print(file=out)
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)
    print(file=out)
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, to_string(unpacked[k])), file=out)

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print(file=out)
        print("LEFTOVER:", file=out)
        for k in sorted(leftover):
            print("%20s: %s" % (k, to_string(unpacked[k])), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_bytes(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                       unpacked["needed_shares"],
                                       unpacked["total_shares"],
                                       unpacked["size"])
            verify_cap = u.to_string()
            print("%20s: %s" %
                  ("verify-cap", quote_output(verify_cap, quotemarks=False)),
                  file=out)

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] - offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print(file=out)
    print(" Size of data within the share:", file=out)
    for k in sorted(sizes):
        print("%20s: %s" % (k, sizes[k]), file=out)

    if options['offsets']:
        print(file=out)
        print(" Section Offsets:", file=out)
        print("%20s: %s" % ("share data", f._data_offset), file=out)
        for k in [
                "data", "plaintext_hash_tree", "crypttext_hash_tree",
                "block_hashes", "share_hashes", "uri_extension"
        ]:
            name = {"data": "block data"}.get(k, k)
            offset = f._data_offset + offsets[k]
            print("  %20s: %s   (0x%x)" % (name, offset, offset), file=out)
        print("%20s: %s" % ("leases", f._lease_offset), file=out)
Пример #24
0
def mv(options, mode="move"):
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    from_path = str(from_path, "utf-8")
    from_url = nodeurl + "uri/%s" % url_quote(rootcap)
    if from_path:
        from_url += "/" + escape_path(from_path)
    # figure out the source cap
    resp = do_http("GET", from_url + "?t=json")
    if not re.search(r'^2\d\d$', str(resp.status)):
        print(format_http_error("Error", resp), file=stderr)
        return 1
    data = resp.read()
    nodetype, attrs = json.loads(data)
    cap = to_bytes(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    to_url = nodeurl + "uri/%s" % url_quote(rootcap)
    path = str(path, "utf-8")
    if path:
        to_url += "/" + escape_path(path)

    if to_url.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url += escape_path(from_path[from_path.rfind("/") + 1:])

    to_url += "?t=uri&replace=only-files"

    resp = do_http("PUT", to_url, cap)
    status = resp.status
    if not re.search(r'^2\d\d$', str(status)):
        if status == 409:
            print("Error: You can't overwrite a directory with a file",
                  file=stderr)
        else:
            print(format_http_error("Error", resp), file=stderr)
            if mode == "move":
                print("NOT removing the original", file=stderr)
        return 1

    if mode == "move":
        # now remove the original
        resp = do_http("DELETE", from_url)
        if not re.search(r'^2\d\d$', str(resp.status)):
            print(format_http_error("Error deleting original after move",
                                    resp),
                  file=stderr)
            return 2

    print("OK", file=stdout)
    return 0