Ejemplo n.º 1
0
 def populate(self, recurse):
     if self.children is not None:
         return
     self.children = {}
     for i, (name, data) in enumerate(self.children_d.items()):
         self.progressfunc("examining %d of %d" % (i, len(self.children_d)))
         if data[0] == "filenode":
             mutable = data[1].get("mutable", False)
             writecap = to_str(data[1].get("rw_uri"))
             readcap = to_str(data[1].get("ro_uri"))
             self.children[name] = TahoeFileSource(self.nodeurl, mutable,
                                                   writecap, readcap)
         elif data[0] == "dirnode":
             writecap = to_str(data[1].get("rw_uri"))
             readcap = to_str(data[1].get("ro_uri"))
             if writecap and writecap in self.cache:
                 child = self.cache[writecap]
             elif readcap and readcap in self.cache:
                 child = self.cache[readcap]
             else:
                 child = TahoeDirectorySource(self.nodeurl, self.cache,
                                              self.progressfunc)
                 child.init_from_grid(writecap, readcap)
                 if writecap:
                     self.cache[writecap] = child
                 if readcap:
                     self.cache[readcap] = child
                 if recurse:
                     child.populate(True)
             self.children[name] = child
         else:
             # TODO: there should be an option to skip unknown nodes.
             raise TahoeError("Cannot copy unknown nodes (ticket #839). "
                              "You probably need to use a later version of "
                              "Tahoe-LAFS to copy this directory.")
Ejemplo n.º 2
0
 def init_from_parsed(self, parsed):
     nodetype, d = parsed
     self.writecap = to_str(d.get("rw_uri"))
     self.readcap = to_str(d.get("ro_uri"))
     self.mutable = d.get("mutable", False)  # older nodes don't provide it
     self.children_d = dict([(unicode(name), value) for (name, value) in d["children"].iteritems()])
     self.children = None
Ejemplo n.º 3
0
 def populate(self, recurse):
     if self.children is not None:
         return
     self.children = {}
     for i,(name, data) in enumerate(self.children_d.items()):
         self.progressfunc("examining %d of %d" % (i+1, len(self.children_d)))
         if data[0] == "filenode":
             mutable = data[1].get("mutable", False)
             writecap = to_str(data[1].get("rw_uri"))
             readcap = to_str(data[1].get("ro_uri"))
             self.children[name] = TahoeFileSource(self.nodeurl, mutable,
                                                   writecap, readcap, name)
         elif data[0] == "dirnode":
             writecap = to_str(data[1].get("rw_uri"))
             readcap = to_str(data[1].get("ro_uri"))
             if writecap and writecap in self.cache:
                 child = self.cache[writecap]
             elif readcap and readcap in self.cache:
                 child = self.cache[readcap]
             else:
                 child = TahoeDirectorySource(self.nodeurl, self.cache,
                                              self.progressfunc, name)
                 child.init_from_grid(writecap, readcap)
                 if writecap:
                     self.cache[writecap] = child
                 if readcap:
                     self.cache[readcap] = child
                 if recurse:
                     child.populate(recurse=True)
             self.children[name] = child
         else:
             # TODO: there should be an option to skip unknown nodes.
             raise TahoeError("Cannot copy unknown nodes (ticket #839). "
                              "You probably need to use a later version of "
                              "Tahoe-LAFS to copy this directory.")
Ejemplo n.º 4
0
 def test_to_str(self):
     self.failUnlessReallyEqual(to_str("foo"), "foo")
     self.failUnlessReallyEqual(to_str("lumi\xc3\xa8re"), "lumi\xc3\xa8re")
     self.failUnlessReallyEqual(
         to_str("\xFF"),
         "\xFF")  # passes through invalid UTF-8 -- is this what we want?
     self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), "lumi\xc3\xa8re")
     self.failUnlessReallyEqual(to_str(None), None)
Ejemplo n.º 5
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec,
                                         quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname, name)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash + 1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError(
                    "Error examining source %s" % quote_output(source_spec),
                    resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable",
                                False)  # older nodes don't provide it

                last_slash = source_spec.rfind(u"/")
                if last_slash != -1:
                    # TODO: this looks funny and redundant with the 'name'
                    # assignment above. cf #2329
                    name = source_spec[last_slash + 1:]

                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap,
                                    name)
        return t
Ejemplo n.º 6
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec,
                                                         quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = json.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
Ejemplo n.º 7
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec,
                                                         quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
Ejemplo n.º 8
0
 def init_from_parsed(self, parsed):
     nodetype, d = parsed
     self.writecap = to_str(d.get("rw_uri"))
     self.readcap = to_str(d.get("ro_uri"))
     self.mutable = d.get("mutable", False)  # older nodes don't provide it
     self.children_d = dict([(unicode(name), value)
                             for (name, value) in d["children"].iteritems()
                             ])
     self.children = None
Ejemplo n.º 9
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it

                last_slash = source_spec.rfind(u"/")
                if last_slash != -1:
                    # TODO: this looks funny and redundant with the 'name'
                    # assignment above. cf #2329
                    name = source_spec[last_slash+1:]

                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
Ejemplo n.º 10
0
        def _get_test_txt_uris((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("rw_uri", data)
            self.rw_uri = to_str(data["rw_uri"])
            self.failUnlessIn("ro_uri", data)
            self.ro_uri = to_str(data["ro_uri"])
Ejemplo n.º 11
0
        def _check_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = simplejson.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("ro_uri", data)
            self.failUnlessEqual(to_str(data["ro_uri"]), self.ro_uri)
            self.failUnlessIn("rw_uri", data)
            self.failUnlessEqual(to_str(data["rw_uri"]), self.rw_uri)
Ejemplo n.º 12
0
        def _get_test_txt_uris((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = simplejson.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("rw_uri", data)
            self.rw_uri = to_str(data["rw_uri"])
            self.failUnlessIn("ro_uri", data)
            self.ro_uri = to_str(data["ro_uri"])
Ejemplo n.º 13
0
        def _check_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("ro_uri", data)
            self.failUnlessEqual(to_str(data["ro_uri"]), self.ro_uri)
            self.failUnlessIn("rw_uri", data)
            self.failUnlessEqual(to_str(data["rw_uri"]), self.rw_uri)
Ejemplo n.º 14
0
        def _process_file_json(args, fn):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "filenode")

            if "mutable" in fn:
                self.failUnless(data['mutable'])
                self.failUnlessIn("rw_uri", data)
                self.failUnlessEqual(to_str(data["rw_uri"]), self.childuris[fn])
            else:
                self.failIf(data['mutable'])
                self.failUnlessIn("ro_uri", data)
                self.failIfEqual(to_str(data["ro_uri"]), self.childuris[fn])
Ejemplo n.º 15
0
 def _check2(args):
     (rc, out, err) = args
     self.failUnlessReallyEqual(err, "")
     self.failUnlessReallyEqual(rc, 0)
     data = json.loads(out)
     self.failUnlessReallyEqual(to_str(data["summary"]), "Healthy")
     self.failUnlessReallyEqual(data["results"]["healthy"], True)
Ejemplo n.º 16
0
 def _check2(args):
     (rc, out, err) = args
     self.failUnlessReallyEqual(err, "")
     self.failUnlessReallyEqual(rc, 0)
     data = json.loads(out)
     self.failUnlessReallyEqual(to_str(data["summary"]), "Healthy")
     self.failUnlessReallyEqual(data["results"]["healthy"], True)
Ejemplo n.º 17
0
        def _process_directory_json(args):
            (rc, out, err) = args
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("children", data)
            children = data['children']

            # Store the URIs for later use.
            self.childuris = {}
            for k in ["mutable1", "mutable2", "imm1", "imm2"]:
                self.failUnlessIn(k, children)
                childtype, childdata = children[k]
                self.failUnlessEqual(childtype, "filenode")
                if "mutable" in k:
                    self.failUnless(childdata['mutable'])
                    self.failUnlessIn("rw_uri", childdata)
                    uri_key = "rw_uri"
                else:
                    self.failIf(childdata['mutable'])
                    self.failUnlessIn("ro_uri", childdata)
                    uri_key = "ro_uri"
                self.childuris[k] = to_str(childdata[uri_key])
Ejemplo n.º 18
0
 def _process_imm2_json((rc, out, err)):
     self.failUnlessEqual(rc, 0)
     filetype, data = simplejson.loads(out)
     self.failUnlessEqual(filetype, "filenode")
     self.failIf(data['mutable'])
     self.failUnlessIn("ro_uri", data)
     self.failUnlessEqual(to_str(data["ro_uri"]), self.childuris["imm2"])
Ejemplo n.º 19
0
 def _process_imm2_json((rc, out, err)):
     self.failUnlessEqual(rc, 0)
     filetype, data = json.loads(out)
     self.failUnlessEqual(filetype, "filenode")
     self.failIf(data['mutable'])
     self.failUnlessIn("ro_uri", data)
     self.failUnlessEqual(to_str(data["ro_uri"]), self.childuris["imm2"])
Ejemplo n.º 20
0
    def get_source_info(self, source_spec):
        rootcap, path = get_alias(self.aliases, source_spec, None)
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path.decode('utf-8'))
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind("/")
                name = path
                if last_slash:
                    name = path[last_slash + 1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError(
                    "Error examining source %s" % quote_output(source_spec),
                    resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable",
                                False)  # older nodes don't provide it
                if source_spec.rfind('/') != -1:
                    name = source_spec[source_spec.rfind('/') + 1:]
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap)
        return name, t
Ejemplo n.º 21
0
        def _process_test_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("ro_uri", data)
            self._test_read_uri = to_str(data["ro_uri"])
Ejemplo n.º 22
0
def convert_children_json(nodemaker, children_json):
    """I convert the JSON output of GET?t=json into the dict-of-nodes input
    to both dirnode.create_subdirectory() and
    client.create_directory(initial_children=). This is used by
    t=mkdir-with-children and t=mkdir-immutable"""
    children = {}
    if children_json:
        data = json.loads(children_json)
        for (namex, (ctype, propdict)) in data.iteritems():
            namex = unicode(namex)
            writecap = to_str(propdict.get("rw_uri"))
            readcap = to_str(propdict.get("ro_uri"))
            metadata = propdict.get("metadata", {})
            # name= argument is just for error reporting
            childnode = nodemaker.create_from_cap(writecap, readcap, name=namex)
            children[namex] = (childnode, metadata)
    return children
Ejemplo n.º 23
0
        def _process_test_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = simplejson.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("ro_uri", data)
            self._test_read_uri = to_str(data["ro_uri"])
Ejemplo n.º 24
0
def convert_children_json(nodemaker, children_json):
    """I convert the JSON output of GET?t=json into the dict-of-nodes input
    to both dirnode.create_subdirectory() and
    client.create_directory(initial_children=). This is used by
    t=mkdir-with-children and t=mkdir-immutable"""
    children = {}
    if children_json:
        data = simplejson.loads(children_json)
        for (namex, (ctype, propdict)) in data.iteritems():
            namex = unicode(namex)
            writecap = to_str(propdict.get("rw_uri"))
            readcap = to_str(propdict.get("ro_uri"))
            metadata = propdict.get("metadata", {})
            # name= argument is just for error reporting
            childnode = nodemaker.create_from_cap(writecap, readcap, name=namex)
            children[namex] = (childnode, metadata)
    return children
Ejemplo n.º 25
0
    def get_source_info(self, source_spec):
        rootcap, path = get_alias(self.aliases, source_spec, None)
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path.decode('utf-8'))
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind("/")
                name = path
                if last_slash:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                if source_spec.rfind('/') != -1:
                    name = source_spec[source_spec.rfind('/')+1:]
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap)
        return name, t
Ejemplo n.º 26
0
    def get_target_info(self, destination_spec):
        precondition(isinstance(destination_spec, unicode), destination_spec)
        rootcap, path_utf8 = get_alias(self.aliases, destination_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            if not os.path.exists(pathname):
                t = LocalMissingTarget(pathname)
            elif os.path.isdir(pathname):
                t = LocalDirectoryTarget(self.progress, pathname)
            else:
                # TODO: should this be _assert? what happens if the target is
                # a special file?
                assert os.path.isfile(pathname), pathname
                t = LocalFileTarget(pathname)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            if path:
                url += "/" + escape_path(path)

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                # doesn't exist yet
                t = TahoeMissingTarget(url)
            elif resp.status == 200:
                parsed = simplejson.loads(resp.read())
                nodetype, d = parsed
                if nodetype == "dirnode":
                    t = TahoeDirectoryTarget(self.nodeurl, self.cache,
                                             self.progress)
                    t.init_from_parsed(parsed)
                else:
                    writecap = to_str(d.get("rw_uri"))
                    readcap = to_str(d.get("ro_uri"))
                    mutable = d.get("mutable", False)
                    t = TahoeFileTarget(self.nodeurl, mutable, writecap,
                                        readcap, url)
            else:
                raise HTTPError(
                    "Error examining target %s" %
                    quote_output(destination_spec), resp)
        return t
Ejemplo n.º 27
0
    def get_target_info(self, destination_spec):
        precondition(isinstance(destination_spec, unicode), destination_spec)
        rootcap, path_utf8 = get_alias(self.aliases, destination_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            if not os.path.exists(pathname):
                t = LocalMissingTarget(pathname)
            elif os.path.isdir(pathname):
                t = LocalDirectoryTarget(self.progress, pathname)
            else:
                # TODO: should this be _assert? what happens if the target is
                # a special file?
                assert os.path.isfile(pathname), pathname
                t = LocalFileTarget(pathname) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            if path:
                url += "/" + escape_path(path)

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                # doesn't exist yet
                t = TahoeMissingTarget(url)
            elif resp.status == 200:
                parsed = json.loads(resp.read())
                nodetype, d = parsed
                if nodetype == "dirnode":
                    t = TahoeDirectoryTarget(self.nodeurl, self.cache,
                                             self.progress)
                    t.init_from_parsed(parsed)
                else:
                    writecap = to_str(d.get("rw_uri"))
                    readcap = to_str(d.get("ro_uri"))
                    mutable = d.get("mutable", False)
                    t = TahoeFileTarget(self.nodeurl, mutable,
                                        writecap, readcap, url)
            else:
                raise HTTPError("Error examining target %s"
                                 % quote_output(destination_spec), resp)
        return t
Ejemplo n.º 28
0
    def check_directory(self, contents):
        """I will tell you if a new directory needs to be created for a given
        set of directory contents, or if I know of an existing (immutable)
        directory that can be used instead.

        'contents' should be a dictionary that maps from child name (a single
        unicode string) to immutable childcap (filecap or dircap).

        I return a DirectoryResult object, synchronously. If r.was_created()
        returns False, you should create the directory (with
        t=mkdir-immutable). When you are finished, call r.did_create(dircap)
        so I can update my database.

        If was_created() returns a dircap, you might be able to avoid the
        mkdir. Call r.should_check(), and if it says False, you can skip the
        mkdir and use the dircap returned by was_created().

        If should_check() returns True, you should perform a check operation
        on the dircap returned by was_created(). If the check indicates the
        directory is healthy, please call
        r.did_check_healthy(checker_results) so I can update the database,
        using the de-JSONized response from the webapi t=check call for
        'checker_results'. If the check indicates the directory is not
        healthy, please repair or re-create the directory and call
        r.did_create(dircap) when you're done.
        """

        now = time.time()
        entries = []
        for name in contents:
            entries.append([name.encode("utf-8"), contents[name]])
        entries.sort()
        data = "".join([
            netstring(name_utf8) + netstring(cap)
            for (name_utf8, cap) in entries
        ])
        dirhash = backupdb_dirhash(data)
        dirhash_s = base32.b2a(dirhash)
        c = self.cursor
        c.execute(
            "SELECT dircap, last_checked"
            " FROM directories WHERE dirhash=?", (dirhash_s, ))
        row = c.fetchone()
        if not row:
            return DirectoryResult(self, dirhash_s, None, False)
        (dircap, last_checked) = row
        age = now - last_checked

        probability = ((age - self.NO_CHECK_BEFORE) /
                       (self.ALWAYS_CHECK_AFTER - self.NO_CHECK_BEFORE))
        probability = min(max(probability, 0.0), 1.0)
        should_check = bool(random.random() < probability)

        return DirectoryResult(self, dirhash_s, to_str(dircap), should_check)
Ejemplo n.º 29
0
    def _POST_relink(self, req):
        charset = get_arg(req, "_charset", "utf-8")
        replace = parse_replace_arg(get_arg(req, "replace", "true"))

        from_name = get_arg(req, "from_name")
        if from_name is not None:
            from_name = from_name.strip()
            from_name = from_name.decode(charset)
            assert isinstance(from_name, unicode)
        else:
            raise WebError("from_name= is required")

        to_name = get_arg(req, "to_name")
        if to_name is not None:
            to_name = to_name.strip()
            to_name = to_name.decode(charset)
            assert isinstance(to_name, unicode)
        else:
            to_name = from_name

        # Disallow slashes in both from_name and to_name, that would only
        # cause confusion.
        if "/" in from_name:
            raise WebError("from_name= may not contain a slash",
                           http.BAD_REQUEST)
        if "/" in to_name:
            raise WebError("to_name= may not contain a slash",
                           http.BAD_REQUEST)

        to_dir = get_arg(req, "to_dir")
        if to_dir is not None and to_dir != self.node.get_write_uri():
            to_dir = to_dir.strip()
            to_dir = to_dir.decode(charset)
            assert isinstance(to_dir, unicode)
            to_path = to_dir.split(u"/")
            to_root = self.client.nodemaker.create_from_cap(to_str(to_path[0]))
            if not IDirectoryNode.providedBy(to_root):
                raise WebError("to_dir is not a directory", http.BAD_REQUEST)
            d = to_root.get_child_at_path(to_path[1:])
        else:
            d = defer.succeed(self.node)

        def _got_new_parent(new_parent):
            if not IDirectoryNode.providedBy(new_parent):
                raise WebError("to_dir is not a directory", http.BAD_REQUEST)

            return self.node.move_child_to(from_name, new_parent, to_name,
                                           replace)

        d.addCallback(_got_new_parent)
        d.addCallback(lambda res: "thing moved")
        return d
Ejemplo n.º 30
0
def mkdir(contents, options):
    kids = dict([(childname, (contents[childname][0], {
        "ro_uri": contents[childname][1],
        "metadata": contents[childname][2],
    })) for childname in contents])
    body = json.dumps(kids).encode("utf-8")
    url = options['node-url'] + "uri?t=mkdir-immutable"
    resp = do_http("POST", url, body)
    if resp.status < 200 or resp.status >= 300:
        raise HTTPError("Error during mkdir", resp)

    dircap = to_str(resp.read().strip())
    return dircap
Ejemplo n.º 31
0
    def parseArgs(self, invite_code, local_dir):
        BasedirOptions.parseArgs(self)

        try:
            if int(self['poll-interval']) <= 0:
                raise ValueError("should be positive")
        except ValueError:
            raise usage.UsageError(
                "--poll-interval must be a positive integer"
            )
        # Expand the path relative to the current directory of the CLI command, not the node.
        self.local_dir = None if local_dir is None else argv_to_abspath(local_dir, long_path=False)
        self.invite_code = to_str(argv_to_unicode(invite_code))
Ejemplo n.º 32
0
    def parseArgs(self, invite_code, local_dir):
        BasedirOptions.parseArgs(self)

        try:
            if int(self['poll-interval']) <= 0:
                raise ValueError("should be positive")
        except ValueError:
            raise usage.UsageError(
                "--poll-interval must be a positive integer"
            )
        # Expand the path relative to the current directory of the CLI command, not the node.
        self.local_dir = None if local_dir is None else argv_to_abspath(local_dir, long_path=False)
        self.invite_code = to_str(argv_to_unicode(invite_code))
Ejemplo n.º 33
0
    def _POST_relink(self, req):
        charset = get_arg(req, "_charset", "utf-8")
        replace = parse_replace_arg(get_arg(req, "replace", "true"))

        from_name = get_arg(req, "from_name")
        if from_name is not None:
            from_name = from_name.strip()
            from_name = from_name.decode(charset)
            assert isinstance(from_name, unicode)
        else:
            raise WebError("from_name= is required")

        to_name = get_arg(req, "to_name")
        if to_name is not None:
            to_name = to_name.strip()
            to_name = to_name.decode(charset)
            assert isinstance(to_name, unicode)
        else:
            to_name = from_name

        # Disallow slashes in both from_name and to_name, that would only
        # cause confusion.
        if "/" in from_name:
            raise WebError("from_name= may not contain a slash",
                           http.BAD_REQUEST)
        if "/" in to_name:
            raise WebError("to_name= may not contain a slash",
                           http.BAD_REQUEST)

        to_dir = get_arg(req, "to_dir")
        if to_dir is not None and to_dir != self.node.get_write_uri():
            to_dir = to_dir.strip()
            to_dir = to_dir.decode(charset)
            assert isinstance(to_dir, unicode)
            to_path = to_dir.split(u"/")
            to_root = self.client.nodemaker.create_from_cap(to_str(to_path[0]))
            if not IDirectoryNode.providedBy(to_root):
                raise WebError("to_dir is not a directory", http.BAD_REQUEST)
            d = to_root.get_child_at_path(to_path[1:])
        else:
            d = defer.succeed(self.node)

        def _got_new_parent(new_parent):
            if not IDirectoryNode.providedBy(new_parent):
                raise WebError("to_dir is not a directory", http.BAD_REQUEST)

            return self.node.move_child_to(from_name, new_parent,
                                           to_name, replace)
        d.addCallback(_got_new_parent)
        d.addCallback(lambda res: "thing moved")
        return d
Ejemplo n.º 34
0
    def check_directory(self, contents):
        """I will tell you if a new directory needs to be created for a given
        set of directory contents, or if I know of an existing (immutable)
        directory that can be used instead.

        'contents' should be a dictionary that maps from child name (a single
        unicode string) to immutable childcap (filecap or dircap).

        I return a DirectoryResult object, synchronously. If r.was_created()
        returns False, you should create the directory (with
        t=mkdir-immutable). When you are finished, call r.did_create(dircap)
        so I can update my database.

        If was_created() returns a dircap, you might be able to avoid the
        mkdir. Call r.should_check(), and if it says False, you can skip the
        mkdir and use the dircap returned by was_created().

        If should_check() returns True, you should perform a check operation
        on the dircap returned by was_created(). If the check indicates the
        directory is healthy, please call
        r.did_check_healthy(checker_results) so I can update the database,
        using the de-JSONized response from the webapi t=check call for
        'checker_results'. If the check indicates the directory is not
        healthy, please repair or re-create the directory and call
        r.did_create(dircap) when you're done.
        """

        now = time.time()
        entries = []
        for name in contents:
            entries.append( [name.encode("utf-8"), contents[name]] )
        entries.sort()
        data = "".join([netstring(name_utf8)+netstring(cap)
                        for (name_utf8,cap) in entries])
        dirhash = backupdb_dirhash(data)
        dirhash_s = base32.b2a(dirhash)
        c = self.cursor
        c.execute("SELECT dircap, last_checked"
                  " FROM directories WHERE dirhash=?", (dirhash_s,))
        row = c.fetchone()
        if not row:
            return DirectoryResult(self, dirhash_s, None, False)
        (dircap, last_checked) = row
        age = now - last_checked

        probability = ((age - self.NO_CHECK_BEFORE) /
                       (self.ALWAYS_CHECK_AFTER - self.NO_CHECK_BEFORE))
        probability = min(max(probability, 0.0), 1.0)
        should_check = bool(random.random() < probability)

        return DirectoryResult(self, dirhash_s, to_str(dircap), should_check)
Ejemplo n.º 35
0
        def _process_tahoe_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnlessIn("children", data)
            kiddata = data['children']

            self.failUnlessIn("test_file.txt", kiddata)
            testtype, testdata = kiddata['test_file.txt']
            self.failUnlessEqual(testtype, "filenode")
            self.failUnless(testdata['mutable'])
            self.failUnlessIn("ro_uri", testdata)
            self.failUnlessEqual(to_str(testdata["ro_uri"]), self._test_read_uri)
            self.failIfIn("rw_uri", testdata)
Ejemplo n.º 36
0
        def _process_tahoe_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)

            filetype, data = simplejson.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnlessIn("children", data)
            kiddata = data['children']

            self.failUnlessIn("test_file.txt", kiddata)
            testtype, testdata = kiddata['test_file.txt']
            self.failUnlessEqual(testtype, "filenode")
            self.failUnless(testdata['mutable'])
            self.failUnlessIn("ro_uri", testdata)
            self.failUnlessEqual(to_str(testdata["ro_uri"]), self._test_read_uri)
            self.failIfIn("rw_uri", testdata)
Ejemplo n.º 37
0
def mkdir(contents, options):
    kids = dict([ (childname, (contents[childname][0],
                               {"ro_uri": contents[childname][1],
                                "metadata": contents[childname][2],
                                }))
                  for childname in contents
                  ])
    body = simplejson.dumps(kids).encode("utf-8")
    url = options['node-url'] + "uri?t=mkdir-immutable"
    resp = do_http("POST", url, body)
    if resp.status < 200 or resp.status >= 300:
        raise HTTPError("Error during mkdir", resp)

    dircap = to_str(resp.read().strip())
    return dircap
Ejemplo n.º 38
0
        def _got_testdir_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)

            filetype, data = simplejson.loads(out)
            self.failUnlessEqual(filetype, "dirnode")

            self.failUnlessIn("children", data)
            childdata = data['children']

            self.failUnlessIn("file2.txt", childdata)
            file2type, file2data = childdata['file2.txt']
            self.failUnlessEqual(file2type, "filenode")
            self.failUnless(file2data['mutable'])
            self.failUnlessIn("ro_uri", file2data)
            self.failUnlessEqual(to_str(file2data["ro_uri"]), self._test_read_uri)
            self.failIfIn("rw_uri", file2data)
Ejemplo n.º 39
0
        def _got_testdir_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")

            self.failUnlessIn("children", data)
            childdata = data['children']

            self.failUnlessIn("file2.txt", childdata)
            file2type, file2data = childdata['file2.txt']
            self.failUnlessEqual(file2type, "filenode")
            self.failUnless(file2data['mutable'])
            self.failUnlessIn("ro_uri", file2data)
            self.failUnlessEqual(to_str(file2data["ro_uri"]), self._test_read_uri)
            self.failIfIn("rw_uri", file2data)
Ejemplo n.º 40
0
    def parseArgs(self, invite_code, local_dir):
        super(JoinOptions, self).parseArgs()

        try:
            if int(self['poll-interval']) <= 0:
                raise ValueError("should be positive")
        except ValueError:
            raise usage.UsageError(
                "--poll-interval must be a positive integer"
            )
        self.local_dir = FilePath(local_dir)
        if not self.local_dir.exists():
            raise usage.UsageError(
                "'{}' doesn't exist".format(local_dir)
            )
        if not self.local_dir.isdir():
            raise usage.UsageError(
                "'{}' isn't a directory".format(local_dir)
            )
        self.invite_code = to_str(argv_to_unicode(invite_code))
Ejemplo n.º 41
0
    def parseArgs(self, invite_code, local_dir):
        super(JoinOptions, self).parseArgs()

        try:
            if int(self['poll-interval']) <= 0:
                raise ValueError("should be positive")
        except ValueError:
            raise usage.UsageError(
                "--poll-interval must be a positive integer")
        # Expand the path relative to the current directory of the CLI command, not the node.
        self.local_dir = None if local_dir is None else argv_to_abspath(
            local_dir, long_path=False)
        self.invite_code = to_str(argv_to_unicode(invite_code))
        if self['author'] is None:
            self['author'] = os.environ.get('USERNAME',
                                            os.environ.get('USER', None))
            if self['author'] is None:
                raise usage.UsageError(
                    "--author not provided and no USERNAME environment-variable"
                )
Ejemplo n.º 42
0
 def test_to_str(self):
     self.failUnlessReallyEqual(to_str("foo"), "foo")
     self.failUnlessReallyEqual(to_str("lumi\xc3\xa8re"), "lumi\xc3\xa8re")
     self.failUnlessReallyEqual(to_str("\xFF"), "\xFF")  # passes through invalid UTF-8 -- is this what we want?
     self.failUnlessReallyEqual(to_str(u"lumi\u00E8re"), "lumi\xc3\xa8re")
     self.failUnlessReallyEqual(to_str(None), None)
Ejemplo n.º 43
0
def dump_MDMF_share(m, length, options):
    from allmydata.mutable.layout import MDMFSlotReadProxy
    from allmydata.util import base32, hashutil
    from allmydata.uri import MDMFVerifierURI
    from allmydata.util.encodingutil import quote_output, to_str

    offset = m.DATA_OFFSET
    out = options.stdout

    f = open(options['filename'], "rb")
    storage_index = None
    shnum = 0

    class ShareDumper(MDMFSlotReadProxy):
        def _read(self, readvs, force_remote=False, queue=False):
            data = []
            for (where, length) in readvs:
                f.seek(offset + where)
                data.append(f.read(length))
            return defer.succeed({shnum: data})

    p = ShareDumper(None, storage_index, shnum)

    def extract(func):
        stash = []
        # these methods return Deferreds, but we happen to know that they run
        # synchronously when not actually talking to a remote server
        d = func()
        d.addCallback(stash.append)
        return stash[0]

    verinfo = extract(p.get_verinfo)
    encprivkey = extract(p.get_encprivkey)
    signature = extract(p.get_signature)
    pubkey = extract(p.get_verification_key)
    block_hash_tree = extract(p.get_blockhashes)
    share_hash_chain = extract(p.get_sharehashes)
    f.close()

    (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
     offsets) = verinfo

    print(" MDMF contents:", file=out)
    print("  seqnum: %d" % seqnum, file=out)
    print("  root_hash: %s" % base32.b2a(root_hash), file=out)
    #print >>out, "  IV: %s" % base32.b2a(IV)
    print("  required_shares: %d" % k, file=out)
    print("  total_shares: %d" % N, file=out)
    print("  segsize: %d" % segsize, file=out)
    print("  datalen: %d" % datalen, file=out)
    print("  enc_privkey: %d bytes" % len(encprivkey), file=out)
    print("  pubkey: %d bytes" % len(pubkey), file=out)
    print("  signature: %d bytes" % len(signature), file=out)
    share_hash_ids = ",".join(
        [str(hid) for hid in sorted(share_hash_chain.keys())])
    print("  share_hash_chain: %s" % share_hash_ids, file=out)
    print("  block_hash_tree: %d nodes" % len(block_hash_tree), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = MDMFVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print("  verify-cap:",
                  quote_output(verify_cap, quotemarks=False),
                  file=out)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.

        print(file=out)
        print(" Section Offsets:", file=out)

        def printoffset(name, value, shift=0):
            print("%s%.20s: %s   (0x%x)" % (" " * shift, name, value, value),
                  file=out)

        printoffset("first lease", m.HEADER_SIZE, 2)
        printoffset("share data", m.DATA_OFFSET, 2)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 4)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 4)
        for k in [
                "enc_privkey", "share_hash_chain", "signature",
                "verification_key", "verification_key_end", "share_data",
                "block_hash_tree", "EOF"
        ]:
            name = {
                "share_data": "block data",
                "verification_key": "pubkey",
                "verification_key_end": "end of pubkey",
                "EOF": "end of share data"
            }.get(k, k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 4)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4, 2)
        f.close()

    print(file=out)
Ejemplo n.º 44
0
def list(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr

    if not nodeurl.endswith("/"):
        nodeurl += "/"
    if where.endswith("/"):
        where = where[:-1]
    try:
        rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        # move where.endswith check here?
        url += "/" + escape_path(path)
    assert not url.endswith("/")
    url += "?t=json"
    resp = do_http("GET", url)
    if resp.status == 404:
        print("No such file or directory", file=stderr)
        return 2
    if resp.status != 200:
        print(format_http_error("Error during GET", resp), file=stderr)
        if resp.status == 0:
            return 3
        else:
            return resp.status

    data = resp.read()

    if options['json']:
        # The webapi server should always output printable ASCII.
        if is_printable_ascii(data):
            print(data, file=stdout)
            return 0
        else:
            print("The JSON response contained unprintable characters:", file=stderr)
            print(quote_output(data, quotemarks=False), file=stderr)
            return 1

    try:
        parsed = json.loads(data)
    except Exception as e:
        print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
        print("Could not parse JSON response:", file=stderr)
        print(quote_output(data, quotemarks=False), file=stderr)
        return 1

    nodetype, d = parsed
    children = {}
    if nodetype == "dirnode":
        children = d['children']
    else:
        # paths returned from get_alias are always valid UTF-8
        childname = path.split("/")[-1].decode('utf-8')
        children = {childname: (nodetype, d)}
        if "metadata" not in d:
            d["metadata"] = {}
    childnames = sorted(children.keys())
    now = time.time()

    # we build up a series of rows, then we loop through them to compute a
    # maxwidth so we can format them tightly. Size, filename, and URI are the
    # variable-width ones.
    rows = []
    has_unknowns = False

    for name in childnames:
        child = children[name]
        name = unicode(name)
        childtype = child[0]

        # See webapi.txt for a discussion of the meanings of unix local
        # filesystem mtime and ctime, Tahoe mtime and ctime, and Tahoe
        # linkmotime and linkcrtime.
        ctime = child[1].get("metadata", {}).get('tahoe', {}).get("linkcrtime")
        if not ctime:
            ctime = child[1]["metadata"].get("ctime")

        mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime")
        if not mtime:
            mtime = child[1]["metadata"].get("mtime")
        rw_uri = to_str(child[1].get("rw_uri"))
        ro_uri = to_str(child[1].get("ro_uri"))
        if ctime:
            # match for formatting that GNU 'ls' does
            if (now - ctime) > 6*30*24*60*60:
                # old files
                fmt = "%b %d  %Y"
            else:
                fmt = "%b %d %H:%M"
            ctime_s = time.strftime(fmt, time.localtime(ctime))
        else:
            ctime_s = "-"
        if childtype == "dirnode":
            t0 = "d"
            size = "-"
            classify = "/"
        elif childtype == "filenode":
            t0 = "-"
            size = str(child[1].get("size", "?"))
            classify = ""
            if rw_uri:
                classify = "*"
        else:
            has_unknowns = True
            t0 = "?"
            size = "?"
            classify = "?"
        t1 = "-"
        if ro_uri:
            t1 = "r"
        t2 = "-"
        if rw_uri:
            t2 = "w"
        t3 = "-"
        if childtype == "dirnode":
            t3 = "x"

        uri = rw_uri or ro_uri

        line = []
        if options["long"]:
            line.append(t0+t1+t2+t3)
            line.append(size)
            line.append(ctime_s)
        if not options["classify"]:
            classify = ""

        encoding_error = False
        try:
            line.append(unicode_to_output(name) + classify)
        except UnicodeEncodeError:
            encoding_error = True
            line.append(quote_output(name) + classify)

        if options["uri"]:
            line.append(uri)
        if options["readonly-uri"]:
            line.append(quote_output(ro_uri or "-", quotemarks=False))

        rows.append((encoding_error, line))

    max_widths = []
    left_justifys = []
    for (encoding_error, row) in rows:
        for i,cell in enumerate(row):
            while len(max_widths) <= i:
                max_widths.append(0)
            while len(left_justifys) <= i:
                left_justifys.append(False)
            max_widths[i] = max(max_widths[i], len(cell))
            if cell.startswith("URI"):
                left_justifys[i] = True
    if len(left_justifys) == 1:
        left_justifys[0] = True
    fmt_pieces = []
    for i in range(len(max_widths)):
        piece = "%"
        if left_justifys[i]:
            piece += "-"
        piece += str(max_widths[i])
        piece += "s"
        fmt_pieces.append(piece)
    fmt = " ".join(fmt_pieces)

    rc = 0
    for (encoding_error, row) in rows:
        if encoding_error:
            print((fmt % tuple(row)).rstrip(), file=stderr)
            rc = 1
        else:
            print((fmt % tuple(row)).rstrip(), file=stdout)

    if rc == 1:
        print("\nThis listing included files whose names could not be converted to the terminal" \
                        "\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr)
    if has_unknowns:
        print("\nThis listing included unknown objects. Using a webapi server that supports" \
                        "\na later version of Tahoe may help.", file=stderr)

    return rc
Ejemplo n.º 45
0
    def test_cp_replaces_mutable_file_contents(self):
        self.basedir = "cli/Cp/cp_replaces_mutable_file_contents"
        self.set_up_grid(oneshare=True)

        # Write a test file, which we'll copy to the grid.
        test_txt_path = os.path.join(self.basedir, "test.txt")
        test_txt_contents = "foo bar baz"
        f = open(test_txt_path, "w")
        f.write(test_txt_contents)
        f.close()

        d = self.do_cli("create-alias", "tahoe")
        d.addCallback(lambda ignored:
            self.do_cli("mkdir", "tahoe:test"))
        # We have to use 'tahoe put' here because 'tahoe cp' doesn't
        # know how to make mutable files at the destination.
        d.addCallback(lambda ignored:
            self.do_cli("put", "--mutable", test_txt_path, "tahoe:test/test.txt"))
        d.addCallback(lambda ignored:
            self.do_cli("get", "tahoe:test/test.txt"))
        def _check((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            self.failUnlessEqual(out, test_txt_contents)
        d.addCallback(_check)

        # We'll do ls --json to get the read uri and write uri for the
        # file we've just uploaded.
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test/test.txt"))
        def _get_test_txt_uris((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("rw_uri", data)
            self.rw_uri = to_str(data["rw_uri"])
            self.failUnlessIn("ro_uri", data)
            self.ro_uri = to_str(data["ro_uri"])
        d.addCallback(_get_test_txt_uris)

        # Now make a new file to copy in place of test.txt.
        new_txt_path = os.path.join(self.basedir, "new.txt")
        new_txt_contents = "baz bar foo" * 100000
        f = open(new_txt_path, "w")
        f.write(new_txt_contents)
        f.close()

        # Copy the new file on top of the old file.
        d.addCallback(lambda ignored:
            self.do_cli("cp", new_txt_path, "tahoe:test/test.txt"))

        # If we get test.txt now, we should see the new data.
        d.addCallback(lambda ignored:
            self.do_cli("get", "tahoe:test/test.txt"))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, new_txt_contents))
        # If we get the json of the new file, we should see that the old
        # uri is there
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test/test.txt"))
        def _check_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("ro_uri", data)
            self.failUnlessEqual(to_str(data["ro_uri"]), self.ro_uri)
            self.failUnlessIn("rw_uri", data)
            self.failUnlessEqual(to_str(data["rw_uri"]), self.rw_uri)
        d.addCallback(_check_json)

        # and, finally, doing a GET directly on one of the old uris
        # should give us the new contents.
        d.addCallback(lambda ignored:
            self.do_cli("get", self.rw_uri))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, new_txt_contents))
        # Now copy the old test.txt without an explicit destination
        # file. tahoe cp will match it to the existing file and
        # overwrite it appropriately.
        d.addCallback(lambda ignored:
            self.do_cli("cp", test_txt_path, "tahoe:test"))
        d.addCallback(lambda ignored:
            self.do_cli("get", "tahoe:test/test.txt"))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, test_txt_contents))
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test/test.txt"))
        d.addCallback(_check_json)
        d.addCallback(lambda ignored:
            self.do_cli("get", self.rw_uri))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, test_txt_contents))

        # Now we'll make a more complicated directory structure.
        # test2/
        # test2/mutable1
        # test2/mutable2
        # test2/imm1
        # test2/imm2
        imm_test_txt_path = os.path.join(self.basedir, "imm_test.txt")
        imm_test_txt_contents = test_txt_contents * 10000
        fileutil.write(imm_test_txt_path, imm_test_txt_contents)
        d.addCallback(lambda ignored:
            self.do_cli("mkdir", "tahoe:test2"))
        d.addCallback(lambda ignored:
            self.do_cli("put", "--mutable", new_txt_path,
                        "tahoe:test2/mutable1"))
        d.addCallback(lambda ignored:
            self.do_cli("put", "--mutable", new_txt_path,
                        "tahoe:test2/mutable2"))
        d.addCallback(lambda ignored:
            self.do_cli('put', new_txt_path, "tahoe:test2/imm1"))
        d.addCallback(lambda ignored:
            self.do_cli("put", imm_test_txt_path, "tahoe:test2/imm2"))
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test2"))
        def _process_directory_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)

            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("children", data)
            children = data['children']

            # Store the URIs for later use.
            self.childuris = {}
            for k in ["mutable1", "mutable2", "imm1", "imm2"]:
                self.failUnlessIn(k, children)
                childtype, childdata = children[k]
                self.failUnlessEqual(childtype, "filenode")
                if "mutable" in k:
                    self.failUnless(childdata['mutable'])
                    self.failUnlessIn("rw_uri", childdata)
                    uri_key = "rw_uri"
                else:
                    self.failIf(childdata['mutable'])
                    self.failUnlessIn("ro_uri", childdata)
                    uri_key = "ro_uri"
                self.childuris[k] = to_str(childdata[uri_key])
        d.addCallback(_process_directory_json)
        # Now build a local directory to copy into place, like the following:
        # test2/
        # test2/mutable1
        # test2/mutable2
        # test2/imm1
        # test2/imm3
        def _build_local_directory(ignored):
            test2_path = os.path.join(self.basedir, "test2")
            fileutil.make_dirs(test2_path)
            for fn in ("mutable1", "mutable2", "imm1", "imm3"):
                fileutil.write(os.path.join(test2_path, fn), fn * 1000)
            self.test2_path = test2_path
        d.addCallback(_build_local_directory)
        d.addCallback(lambda ignored:
            self.do_cli("cp", "-r", self.test2_path, "tahoe:"))

        # We expect that mutable1 and mutable2 are overwritten in-place,
        # so they'll retain their URIs but have different content.
        def _process_file_json((rc, out, err), fn):
            self.failUnlessEqual(rc, 0)
            filetype, data = json.loads(out)
            self.failUnlessEqual(filetype, "filenode")

            if "mutable" in fn:
                self.failUnless(data['mutable'])
                self.failUnlessIn("rw_uri", data)
                self.failUnlessEqual(to_str(data["rw_uri"]), self.childuris[fn])
            else:
                self.failIf(data['mutable'])
                self.failUnlessIn("ro_uri", data)
                self.failIfEqual(to_str(data["ro_uri"]), self.childuris[fn])
Ejemplo n.º 46
0
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_str

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, None, '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print("%20s: %d" % ("version", bp._version), file=out)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size", "needed_shares",
             "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash", "crypttext_hash",
             "crypttext_root_hash", "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}
    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, unpacked[k]), file=out)
    print(file=out)
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, unpacked[k]), file=out)
    print(file=out)
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print("%20s: %s" % (dk, unpacked[k]), file=out)

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print(file=out)
        print("LEFTOVER:", file=out)
        for k in sorted(leftover):
            print("%20s: %s" % (k, unpacked[k]), file=out)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                       unpacked["needed_shares"],
                                       unpacked["total_shares"],
                                       unpacked["size"])
            verify_cap = u.to_string()
            print("%20s: %s" %
                  ("verify-cap", quote_output(verify_cap, quotemarks=False)),
                  file=out)

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] - offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print(file=out)
    print(" Size of data within the share:", file=out)
    for k in sorted(sizes):
        print("%20s: %s" % (k, sizes[k]), file=out)

    if options['offsets']:
        print(file=out)
        print(" Section Offsets:", file=out)
        print("%20s: %s" % ("share data", f._data_offset), file=out)
        for k in [
                "data", "plaintext_hash_tree", "crypttext_hash_tree",
                "block_hashes", "share_hashes", "uri_extension"
        ]:
            name = {"data": "block data"}.get(k, k)
            offset = f._data_offset + offsets[k]
            print("  %20s: %s   (0x%x)" % (name, offset, offset), file=out)
        print("%20s: %s" % ("leases", f._lease_offset), file=out)
Ejemplo n.º 47
0
    print >>out, "  total_shares: %d" % N
    print >>out, "  segsize: %d" % segsize
    print >>out, "  datalen: %d" % datalen
    print >>out, "  enc_privkey: %d bytes" % len(enc_privkey)
    print >>out, "  pubkey: %d bytes" % len(pubkey)
    print >>out, "  signature: %d bytes" % len(signature)
    share_hash_ids = ",".join(sorted([str(hid)
                                      for hid in share_hash_chain.keys()]))
    print >>out, "  share_hash_chain: %s" % share_hash_ids
    print >>out, "  block_hash_tree: %d nodes" % len(block_hash_tree)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = SSKVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print >>out, "  verify-cap:", quote_output(verify_cap, quotemarks=False)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.
        print >>out
        print >>out, " Section Offsets:"
        def printoffset(name, value, shift=0):
            print >>out, "%s%20s: %s   (0x%x)" % (" "*shift, name, value, value)
        printoffset("first lease", m.HEADER_SIZE)
Ejemplo n.º 48
0
def dump_immutable_chk_share(f, out, options):
    from allmydata import uri
    from allmydata.util import base32
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util.encodingutil import quote_output, to_str

    # use a ReadBucketProxy to parse the bucket and find the uri extension
    bp = ReadBucketProxy(None, '', '')
    offsets = bp._parse_offsets(f.read_share_data(0, 0x44))
    print >>out, "%20s: %d" % ("version", bp._version)
    seek = offsets['uri_extension']
    length = struct.unpack(bp._fieldstruct,
                           f.read_share_data(seek, bp._fieldsize))[0]
    seek += bp._fieldsize
    UEB_data = f.read_share_data(seek, length)

    unpacked = uri.unpack_extension_readable(UEB_data)
    keys1 = ("size", "num_segments", "segment_size",
             "needed_shares", "total_shares")
    keys2 = ("codec_name", "codec_params", "tail_codec_params")
    keys3 = ("plaintext_hash", "plaintext_root_hash",
             "crypttext_hash", "crypttext_root_hash",
             "share_root_hash", "UEB_hash")
    display_keys = {"size": "file_size"}
    for k in keys1:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])
    print >>out
    for k in keys2:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])
    print >>out
    for k in keys3:
        if k in unpacked:
            dk = display_keys.get(k, k)
            print >>out, "%20s: %s" % (dk, unpacked[k])

    leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3)
    if leftover:
        print >>out
        print >>out, "LEFTOVER:"
        for k in sorted(leftover):
            print >>out, "%20s: %s" % (k, unpacked[k])

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            uri_extension_hash = base32.a2b(unpacked["UEB_hash"])
            u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash,
                                      unpacked["needed_shares"],
                                      unpacked["total_shares"], unpacked["size"])
            verify_cap = u.to_string()
            print >>out, "%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False))

    sizes = {}
    sizes['data'] = (offsets['plaintext_hash_tree'] -
                           offsets['data'])
    sizes['validation'] = (offsets['uri_extension'] -
                           offsets['plaintext_hash_tree'])
    sizes['uri-extension'] = len(UEB_data)
    print >>out
    print >>out, " Size of data within the share:"
    for k in sorted(sizes):
        print >>out, "%20s: %s" % (k, sizes[k])

    if options['offsets']:
        print >>out
        print >>out, " Section Offsets:"
        print >>out, "%20s: %s" % ("share data", f._data_offset)
        for k in ["data", "plaintext_hash_tree", "crypttext_hash_tree",
                  "block_hashes", "share_hashes", "uri_extension"]:
            name = {"data": "block data"}.get(k,k)
            offset = f._data_offset + offsets[k]
            print >>out, "  %20s: %s   (0x%x)" % (name, offset, offset)
        print >>out, "%20s: %s" % ("leases", f._lease_offset)
Ejemplo n.º 49
0
def dump_MDMF_share(m, length, options):
    from allmydata.mutable.layout import MDMFSlotReadProxy
    from allmydata.util import base32, hashutil
    from allmydata.uri import MDMFVerifierURI
    from allmydata.util.encodingutil import quote_output, to_str

    offset = m.DATA_OFFSET
    out = options.stdout

    f = open(options['filename'], "rb")
    storage_index = None; shnum = 0

    class ShareDumper(MDMFSlotReadProxy):
        def _read(self, readvs, force_remote=False, queue=False):
            data = []
            for (where,length) in readvs:
                f.seek(offset+where)
                data.append(f.read(length))
            return defer.succeed({shnum: data})

    p = ShareDumper(None, storage_index, shnum)
    def extract(func):
        stash = []
        # these methods return Deferreds, but we happen to know that they run
        # synchronously when not actually talking to a remote server
        d = func()
        d.addCallback(stash.append)
        return stash[0]

    verinfo = extract(p.get_verinfo)
    encprivkey = extract(p.get_encprivkey)
    signature = extract(p.get_signature)
    pubkey = extract(p.get_verification_key)
    block_hash_tree = extract(p.get_blockhashes)
    share_hash_chain = extract(p.get_sharehashes)
    f.close()

    (seqnum, root_hash, salt_to_use, segsize, datalen, k, N, prefix,
     offsets) = verinfo

    print >>out, " MDMF contents:"
    print >>out, "  seqnum: %d" % seqnum
    print >>out, "  root_hash: %s" % base32.b2a(root_hash)
    #print >>out, "  IV: %s" % base32.b2a(IV)
    print >>out, "  required_shares: %d" % k
    print >>out, "  total_shares: %d" % N
    print >>out, "  segsize: %d" % segsize
    print >>out, "  datalen: %d" % datalen
    print >>out, "  enc_privkey: %d bytes" % len(encprivkey)
    print >>out, "  pubkey: %d bytes" % len(pubkey)
    print >>out, "  signature: %d bytes" % len(signature)
    share_hash_ids = ",".join([str(hid)
                               for hid in sorted(share_hash_chain.keys())])
    print >>out, "  share_hash_chain: %s" % share_hash_ids
    print >>out, "  block_hash_tree: %d nodes" % len(block_hash_tree)

    # the storage index isn't stored in the share itself, so we depend upon
    # knowing the parent directory name to get it
    pieces = options['filename'].split(os.sep)
    if len(pieces) >= 2:
        piece = to_str(pieces[-2])
        if base32.could_be_base32_encoded(piece):
            storage_index = base32.a2b(piece)
            fingerprint = hashutil.ssk_pubkey_fingerprint_hash(pubkey)
            u = MDMFVerifierURI(storage_index, fingerprint)
            verify_cap = u.to_string()
            print >>out, "  verify-cap:", quote_output(verify_cap, quotemarks=False)

    if options['offsets']:
        # NOTE: this offset-calculation code is fragile, and needs to be
        # merged with MutableShareFile's internals.

        print >>out
        print >>out, " Section Offsets:"
        def printoffset(name, value, shift=0):
            print >>out, "%s%.20s: %s   (0x%x)" % (" "*shift, name, value, value)
        printoffset("first lease", m.HEADER_SIZE, 2)
        printoffset("share data", m.DATA_OFFSET, 2)
        o_seqnum = m.DATA_OFFSET + struct.calcsize(">B")
        printoffset("seqnum", o_seqnum, 4)
        o_root_hash = m.DATA_OFFSET + struct.calcsize(">BQ")
        printoffset("root_hash", o_root_hash, 4)
        for k in ["enc_privkey", "share_hash_chain", "signature",
                  "verification_key", "verification_key_end",
                  "share_data", "block_hash_tree", "EOF"]:
            name = {"share_data": "block data",
                    "verification_key": "pubkey",
                    "verification_key_end": "end of pubkey",
                    "EOF": "end of share data"}.get(k,k)
            offset = m.DATA_OFFSET + offsets[k]
            printoffset(name, offset, 4)
        f = open(options['filename'], "rb")
        printoffset("extra leases", m._read_extra_lease_offset(f) + 4, 2)
        f.close()

    print >>out
Ejemplo n.º 50
0
    def parseArgs(self, invite_code, local_dir):
        BasedirOptions.parseArgs(self)

        # Expand the path relative to the current directory of the CLI command, not the node.
        self.local_dir = None if local_dir is None else argv_to_abspath(local_dir, long_path=False)
        self.invite_code = to_str(argv_to_unicode(invite_code))
Ejemplo n.º 51
0
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if from_path:
        from_url += "/" + escape_path(from_path)
    # figure out the source cap
    data = urllib.urlopen(from_url + "?t=json").read()
    nodetype, attrs = simplejson.loads(data)
    cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        to_url += "/" + escape_path(path)

    if to_url.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url += escape_path(from_path[from_path.rfind("/")+1:])
Ejemplo n.º 52
0
    try:
        rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if from_path:
        from_url += "/" + escape_path(from_path)
    # figure out the source cap
    resp = do_http("GET", from_url + "?t=json")
    if not re.search(r'^2\d\d$', str(resp.status)):
        print >> stderr, format_http_error("Error", resp)
        return 1
    data = resp.read()
    nodetype, attrs = simplejson.loads(data)
    cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        to_url += "/" + escape_path(path)

    if to_url.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url += escape_path(from_path[from_path.rfind("/") + 1:])
Ejemplo n.º 53
0
    def check_file(self, path, use_timestamps=True):
        """I will tell you if a given local file needs to be uploaded or not,
        by looking in a database and seeing if I have a record of this file
        having been uploaded earlier.

        I return a FileResults object, synchronously. If r.was_uploaded()
        returns False, you should upload the file. When you are finished
        uploading it, call r.did_upload(filecap), so I can update my
        database.

        If was_uploaded() returns a filecap, you might be able to avoid an
        upload. Call r.should_check(), and if it says False, you can skip the
        upload and use the filecap returned by was_uploaded().

        If should_check() returns True, you should perform a filecheck on the
        filecap returned by was_uploaded(). If the check indicates the file
        is healthy, please call r.did_check_healthy(checker_results) so I can
        update the database, using the de-JSONized response from the webapi
        t=check call for 'checker_results'. If the check indicates the file
        is not healthy, please upload the file and call r.did_upload(filecap)
        when you're done.

        I use_timestamps=True (the default), I will compare ctime and mtime
        of the local file against an entry in my database, and consider the
        file to be unchanged if ctime, mtime, and filesize are all the same
        as the earlier version. If use_timestamps=False, I will not trust the
        timestamps, so more files (perhaps all) will be marked as needing
        upload. A future version of this database may hash the file to make
        equality decisions, in which case use_timestamps=False will not
        always imply r.must_upload()==True.

        'path' points to a local file on disk, possibly relative to the
        current working directory. The database stores absolute pathnames.
        """

        path = abspath_expanduser_unicode(path)
        s = os.stat(path)
        size = s[stat.ST_SIZE]
        ctime = s[stat.ST_CTIME]
        mtime = s[stat.ST_MTIME]

        now = time.time()
        c = self.cursor

        c.execute("SELECT size,mtime,ctime,fileid"
                  " FROM local_files"
                  " WHERE path=?",
                  (path,))
        row = self.cursor.fetchone()
        if not row:
            return FileResult(self, None, False, path, mtime, ctime, size)
        (last_size,last_mtime,last_ctime,last_fileid) = row

        c.execute("SELECT caps.filecap, last_upload.last_checked"
                  " FROM caps,last_upload"
                  " WHERE caps.fileid=? AND last_upload.fileid=?",
                  (last_fileid, last_fileid))
        row2 = c.fetchone()

        if ((last_size != size
             or not use_timestamps
             or last_mtime != mtime
             or last_ctime != ctime) # the file has been changed
            or (not row2) # we somehow forgot where we put the file last time
            ):
            c.execute("DELETE FROM local_files WHERE path=?", (path,))
            self.connection.commit()
            return FileResult(self, None, False, path, mtime, ctime, size)

        # at this point, we're allowed to assume the file hasn't been changed
        (filecap, last_checked) = row2
        age = now - last_checked

        probability = ((age - self.NO_CHECK_BEFORE) /
                       (self.ALWAYS_CHECK_AFTER - self.NO_CHECK_BEFORE))
        probability = min(max(probability, 0.0), 1.0)
        should_check = bool(random.random() < probability)

        return FileResult(self, to_str(filecap), should_check,
                          path, mtime, ctime, size)
Ejemplo n.º 54
0
    for name in childnames:
        child = children[name]
        name = unicode(name)
        childtype = child[0]

        # See webapi.txt for a discussion of the meanings of unix local
        # filesystem mtime and ctime, Tahoe mtime and ctime, and Tahoe
        # linkmotime and linkcrtime.
        ctime = child[1].get("metadata", {}).get('tahoe', {}).get("linkcrtime")
        if not ctime:
            ctime = child[1]["metadata"].get("ctime")

        mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime")
        if not mtime:
            mtime = child[1]["metadata"].get("mtime")
        rw_uri = to_str(child[1].get("rw_uri"))
        ro_uri = to_str(child[1].get("ro_uri"))
        if ctime:
            # match for formatting that GNU 'ls' does
            if (now - ctime) > 6*30*24*60*60:
                # old files
                fmt = "%b %d  %Y"
            else:
                fmt = "%b %d %H:%M"
            ctime_s = time.strftime(fmt, time.localtime(ctime))
        else:
            ctime_s = "-"
        if childtype == "dirnode":
            t0 = "d"
            size = "-"
            classify = "/"
Ejemplo n.º 55
0
    def check_file(self, path, use_timestamps=True):
        """I will tell you if a given local file needs to be uploaded or not,
        by looking in a database and seeing if I have a record of this file
        having been uploaded earlier.

        I return a FileResults object, synchronously. If r.was_uploaded()
        returns False, you should upload the file. When you are finished
        uploading it, call r.did_upload(filecap), so I can update my
        database.

        If was_uploaded() returns a filecap, you might be able to avoid an
        upload. Call r.should_check(), and if it says False, you can skip the
        upload and use the filecap returned by was_uploaded().

        If should_check() returns True, you should perform a filecheck on the
        filecap returned by was_uploaded(). If the check indicates the file
        is healthy, please call r.did_check_healthy(checker_results) so I can
        update the database, using the de-JSONized response from the webapi
        t=check call for 'checker_results'. If the check indicates the file
        is not healthy, please upload the file and call r.did_upload(filecap)
        when you're done.

        I use_timestamps=True (the default), I will compare ctime and mtime
        of the local file against an entry in my database, and consider the
        file to be unchanged if ctime, mtime, and filesize are all the same
        as the earlier version. If use_timestamps=False, I will not trust the
        timestamps, so more files (perhaps all) will be marked as needing
        upload. A future version of this database may hash the file to make
        equality decisions, in which case use_timestamps=False will not
        always imply r.must_upload()==True.

        'path' points to a local file on disk, possibly relative to the
        current working directory. The database stores absolute pathnames.
        """

        path = abspath_expanduser_unicode(path)
        s = os.stat(path)
        size = s[stat.ST_SIZE]
        ctime = s[stat.ST_CTIME]
        mtime = s[stat.ST_MTIME]

        now = time.time()
        c = self.cursor

        c.execute(
            "SELECT size,mtime,ctime,fileid"
            " FROM local_files"
            " WHERE path=?", (path, ))
        row = self.cursor.fetchone()
        if not row:
            return FileResult(self, None, False, path, mtime, ctime, size)
        (last_size, last_mtime, last_ctime, last_fileid) = row

        c.execute(
            "SELECT caps.filecap, last_upload.last_checked"
            " FROM caps,last_upload"
            " WHERE caps.fileid=? AND last_upload.fileid=?",
            (last_fileid, last_fileid))
        row2 = c.fetchone()

        if ((last_size != size or not use_timestamps or last_mtime != mtime
             or last_ctime != ctime)  # the file has been changed
                or
            (not row2)  # we somehow forgot where we put the file last time
            ):
            c.execute("DELETE FROM local_files WHERE path=?", (path, ))
            self.connection.commit()
            return FileResult(self, None, False, path, mtime, ctime, size)

        # at this point, we're allowed to assume the file hasn't been changed
        (filecap, last_checked) = row2
        age = now - last_checked

        probability = ((age - self.NO_CHECK_BEFORE) /
                       (self.ALWAYS_CHECK_AFTER - self.NO_CHECK_BEFORE))
        probability = min(max(probability, 0.0), 1.0)
        should_check = bool(random.random() < probability)

        return FileResult(self, to_str(filecap), should_check, path, mtime,
                          ctime, size)
Ejemplo n.º 56
0
def tahoe_mv(nodeurl, aliases, from_file, to_file, treq):
    """
    :param DecodedURL nodeurl: The web end point of the Tahoe-LAFS node associated
        with the magic-folder client.
    :param [unicode] aliases: XXX

    :param unicode from_file: cap of the source.

    :param unicode to_file: cap of the destination.

    :param HTTPClient treq: An ``HTTPClient`` or similar object to use to make
        the queries.

    :return integer: Returns 0 for successful execution. In the case of a failure,
        an exception is raised.
    """
    try:
        rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except Exception as e:
        raise e

    from_url = nodeurl.child(
        u"uri",
        u"{}".format(rootcap),
    )

    if from_path:
        from_url = from_url.child(u"{}".format(escape_path(from_path)))

    from_url = from_url.add(
        u"t",
        u"json",
    )

    get_url = from_url.to_uri().to_text().encode("ascii")
    response = yield treq.get(get_url)
    if response.code != OK:
        returnValue((yield bad_response(get_url, response)))
    result = yield readBody(response)

    nodetype, attrs = json.loads(result)
    cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except Exception as e:
        raise e

    to_url = nodeurl.child(u"uri", u"{}".format(rootcap))

    if path:
        to_url = to_url.child(u"{}".format(escape_path(path)))

    if path.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url = to_url.child(u"{}".format(
            escape_path(from_path[from_path.rfind("/") + 1:])))

    put_url = to_url.add(
        u"t",
        u"uri",
    ).add(
        u"replace",
        u"only-files",
    )

    response = yield treq.put(put_url.to_text(), cap)
    if not re.search(r'^2\d\d$', str(response.code)):
        if response.code == CONFLICT:
            raise Exception("You cannot overwrite a directory with a file")
        else:
            raise Exception(format_http_error("Error", response))

    returnValue(0)
Ejemplo n.º 57
0
    def test_cp_replaces_mutable_file_contents(self):
        self.basedir = "cli/Cp/cp_replaces_mutable_file_contents"
        self.set_up_grid()

        # Write a test file, which we'll copy to the grid.
        test_txt_path = os.path.join(self.basedir, "test.txt")
        test_txt_contents = "foo bar baz"
        f = open(test_txt_path, "w")
        f.write(test_txt_contents)
        f.close()

        d = self.do_cli("create-alias", "tahoe")
        d.addCallback(lambda ignored:
            self.do_cli("mkdir", "tahoe:test"))
        # We have to use 'tahoe put' here because 'tahoe cp' doesn't
        # know how to make mutable files at the destination.
        d.addCallback(lambda ignored:
            self.do_cli("put", "--mutable", test_txt_path, "tahoe:test/test.txt"))
        d.addCallback(lambda ignored:
            self.do_cli("get", "tahoe:test/test.txt"))
        def _check((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            self.failUnlessEqual(out, test_txt_contents)
        d.addCallback(_check)

        # We'll do ls --json to get the read uri and write uri for the
        # file we've just uploaded.
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test/test.txt"))
        def _get_test_txt_uris((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = simplejson.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("rw_uri", data)
            self.rw_uri = to_str(data["rw_uri"])
            self.failUnlessIn("ro_uri", data)
            self.ro_uri = to_str(data["ro_uri"])
        d.addCallback(_get_test_txt_uris)

        # Now make a new file to copy in place of test.txt.
        new_txt_path = os.path.join(self.basedir, "new.txt")
        new_txt_contents = "baz bar foo" * 100000
        f = open(new_txt_path, "w")
        f.write(new_txt_contents)
        f.close()

        # Copy the new file on top of the old file.
        d.addCallback(lambda ignored:
            self.do_cli("cp", new_txt_path, "tahoe:test/test.txt"))

        # If we get test.txt now, we should see the new data.
        d.addCallback(lambda ignored:
            self.do_cli("get", "tahoe:test/test.txt"))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, new_txt_contents))
        # If we get the json of the new file, we should see that the old
        # uri is there
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test/test.txt"))
        def _check_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)
            filetype, data = simplejson.loads(out)

            self.failUnlessEqual(filetype, "filenode")
            self.failUnless(data['mutable'])

            self.failUnlessIn("ro_uri", data)
            self.failUnlessEqual(to_str(data["ro_uri"]), self.ro_uri)
            self.failUnlessIn("rw_uri", data)
            self.failUnlessEqual(to_str(data["rw_uri"]), self.rw_uri)
        d.addCallback(_check_json)

        # and, finally, doing a GET directly on one of the old uris
        # should give us the new contents.
        d.addCallback(lambda ignored:
            self.do_cli("get", self.rw_uri))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, new_txt_contents))
        # Now copy the old test.txt without an explicit destination
        # file. tahoe cp will match it to the existing file and
        # overwrite it appropriately.
        d.addCallback(lambda ignored:
            self.do_cli("cp", test_txt_path, "tahoe:test"))
        d.addCallback(lambda ignored:
            self.do_cli("get", "tahoe:test/test.txt"))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, test_txt_contents))
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test/test.txt"))
        d.addCallback(_check_json)
        d.addCallback(lambda ignored:
            self.do_cli("get", self.rw_uri))
        d.addCallback(lambda (rc, out, err):
            self.failUnlessEqual(out, test_txt_contents))

        # Now we'll make a more complicated directory structure.
        # test2/
        # test2/mutable1
        # test2/mutable2
        # test2/imm1
        # test2/imm2
        imm_test_txt_path = os.path.join(self.basedir, "imm_test.txt")
        imm_test_txt_contents = test_txt_contents * 10000
        fileutil.write(imm_test_txt_path, imm_test_txt_contents)
        d.addCallback(lambda ignored:
            self.do_cli("mkdir", "tahoe:test2"))
        d.addCallback(lambda ignored:
            self.do_cli("put", "--mutable", new_txt_path,
                        "tahoe:test2/mutable1"))
        d.addCallback(lambda ignored:
            self.do_cli("put", "--mutable", new_txt_path,
                        "tahoe:test2/mutable2"))
        d.addCallback(lambda ignored:
            self.do_cli('put', new_txt_path, "tahoe:test2/imm1"))
        d.addCallback(lambda ignored:
            self.do_cli("put", imm_test_txt_path, "tahoe:test2/imm2"))
        d.addCallback(lambda ignored:
            self.do_cli("ls", "--json", "tahoe:test2"))
        def _process_directory_json((rc, out, err)):
            self.failUnlessEqual(rc, 0)

            filetype, data = simplejson.loads(out)
            self.failUnlessEqual(filetype, "dirnode")
            self.failUnless(data['mutable'])
            self.failUnlessIn("children", data)
            children = data['children']

            # Store the URIs for later use.
            self.childuris = {}
            for k in ["mutable1", "mutable2", "imm1", "imm2"]:
                self.failUnlessIn(k, children)
                childtype, childdata = children[k]
                self.failUnlessEqual(childtype, "filenode")
                if "mutable" in k:
                    self.failUnless(childdata['mutable'])
                    self.failUnlessIn("rw_uri", childdata)
                    uri_key = "rw_uri"
                else:
                    self.failIf(childdata['mutable'])
                    self.failUnlessIn("ro_uri", childdata)
                    uri_key = "ro_uri"
                self.childuris[k] = to_str(childdata[uri_key])
        d.addCallback(_process_directory_json)
        # Now build a local directory to copy into place, like the following:
        # test2/
        # test2/mutable1
        # test2/mutable2
        # test2/imm1
        # test2/imm3
        def _build_local_directory(ignored):
            test2_path = os.path.join(self.basedir, "test2")
            fileutil.make_dirs(test2_path)
            for fn in ("mutable1", "mutable2", "imm1", "imm3"):
                fileutil.write(os.path.join(test2_path, fn), fn * 1000)
            self.test2_path = test2_path
        d.addCallback(_build_local_directory)
        d.addCallback(lambda ignored:
            self.do_cli("cp", "-r", self.test2_path, "tahoe:"))

        # We expect that mutable1 and mutable2 are overwritten in-place,
        # so they'll retain their URIs but have different content.
        def _process_file_json((rc, out, err), fn):
            self.failUnlessEqual(rc, 0)
            filetype, data = simplejson.loads(out)
            self.failUnlessEqual(filetype, "filenode")

            if "mutable" in fn:
                self.failUnless(data['mutable'])
                self.failUnlessIn("rw_uri", data)
                self.failUnlessEqual(to_str(data["rw_uri"]), self.childuris[fn])
            else:
                self.failIf(data['mutable'])
                self.failUnlessIn("ro_uri", data)
                self.failIfEqual(to_str(data["ro_uri"]), self.childuris[fn])