def run(self, options):
        stderr = options.stderr
        self.options = options
        self.ophandle = ophandle = base32.b2a(os.urandom(16))
        nodeurl = options['node-url']
        if not nodeurl.endswith("/"):
            nodeurl += "/"
        self.nodeurl = nodeurl
        where = options.where
        try:
            rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1
        if path == '/':
            path = ''
        url = nodeurl + "uri/%s" % urllib.quote(rootcap)
        if path:
            url += "/" + escape_path(path)
        # todo: should it end with a slash?
        url = self.make_url(url, ophandle)
        resp = do_http("POST", url)
        if resp.status not in (200, 302):
            print(format_http_error("ERROR", resp), file=stderr)
            return 1
        # now we poll for results. We nominally poll at t=1, 5, 10, 30, 60,
        # 90, k*120 seconds, but if the poll takes non-zero time, that will
        # be slightly longer. I'm not worried about trying to make up for
        # that time.

        return self.wait_for_results()
Exemple #2
0
    def run(self):
        options = self.options
        nodeurl = options['node-url']
        self.verbosity = 1
        if options['quiet']:
            self.verbosity = 0
        if options['verbose']:
            self.verbosity = 2
        stdout = options.stdout
        stderr = options.stderr

        start_timestamp = datetime.datetime.now()
        self.backupdb = None
        bdbfile = os.path.join(options["node-directory"],
                               "private", "backupdb.sqlite")
        bdbfile = abspath_expanduser_unicode(bdbfile)
        self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
        if not self.backupdb:
            print >>stderr, "ERROR: Unable to load backup db."
            return 1

        try:
            rootcap, path = get_alias(options.aliases, options.to_dir, DEFAULT_ALIAS)
        except UnknownAliasError, e:
            e.display(stderr)
            return 1
def webopen(options, opener=None):
    nodeurl = options['node-url']
    stderr = options.stderr
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    where = options.where
    if where:
        try:
            rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1
        if path == '/':
            path = ''
        url = nodeurl + "uri/%s" % urllib.quote(rootcap)
        if path:
            url += "/" + escape_path(path)
    else:
        url = nodeurl
    if options['info']:
        url += "?t=info"
    if not opener:
        import webbrowser
        opener = webbrowser.open
    opener(url)
    return 0
def unlink(options, command="unlink"):
    """
    @return: a Deferred which eventually fires with the exit code
    """
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    if not path:
        print("""
'tahoe %s' can only unlink directory entries, so a path must be given.""" % (command,), file=stderr)
        return 1

    url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    url += "/" + escape_path(path)

    resp = do_http("DELETE", url)

    if resp.status in (200,):
        print(format_http_success(resp), file=stdout)
        return 0

    print(format_http_error("ERROR", resp), file=stderr)
    return 1
Exemple #5
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec,
                                                         quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = json.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
Exemple #6
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it

                last_slash = source_spec.rfind(u"/")
                if last_slash != -1:
                    # TODO: this looks funny and redundant with the 'name'
                    # assignment above. cf #2329
                    name = source_spec[last_slash+1:]

                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
def check_location(options, where):
    stdout = options.stdout
    stderr = options.stderr
    nodeurl = options['node-url']
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    try:
        rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
Exemple #8
0
def put(options):
    """
    @param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose

    @return: a Deferred which eventually fires with the exit code
    """
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    mutable = options['mutable']
    format = options['format']
    if options['quiet']:
        verbosity = 0
    else:
        verbosity = 2
    stdin = options.stdin
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    if to_file:
        # several possibilities for the TO_FILE argument.
        #  <none> : unlinked upload
        #  foo : TAHOE_ALIAS/foo
        #  subdir/foo : TAHOE_ALIAS/subdir/foo
        #  /oops/subdir/foo : DISALLOWED
        #  ALIAS:foo  : aliases[ALIAS]/foo
        #  ALIAS:subdir/foo  : aliases[ALIAS]/subdir/foo

        #  ALIAS:/oops/subdir/foo : DISALLOWED
        #  DIRCAP:./foo        : DIRCAP/foo
        #  DIRCAP:./subdir/foo : DIRCAP/subdir/foo
        #  MUTABLE-FILE-WRITECAP : filecap

        # FIXME: don't hardcode cap format.
        if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
            url = nodeurl + "uri/%s" % urllib.quote(to_file)
        else:
            try:
                rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
            except UnknownAliasError, e:
                e.display(stderr)
                return 1
            if path.startswith("/"):
                suggestion = to_file.replace(u"/", u"", 1)
                print >>stderr, "Error: The remote filename must not start with a slash"
                print >>stderr, "Please try again, perhaps with %s" % quote_output(suggestion)
                return 1
            url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
            if path:
                url += escape_path(path)
Exemple #9
0
def mkdir(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    if where:
        try:
            rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError, e:
            e.display(stderr)
            return 1
 def run(self, options):
     stderr = options.stderr
     self.options = options
     self.ophandle = ophandle = base32.b2a(os.urandom(16))
     nodeurl = options['node-url']
     if not nodeurl.endswith("/"):
         nodeurl += "/"
     self.nodeurl = nodeurl
     where = options.where
     try:
         rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
     except UnknownAliasError, e:
         e.display(stderr)
         return 1
def get(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
 def run(self, options):
     stdout = options.stdout
     stderr = options.stderr
     self.rc = 0
     self.options = options
     nodeurl = options['node-url']
     if not nodeurl.endswith("/"):
         nodeurl += "/"
     self.nodeurl = nodeurl
     where = options.where
     try:
         rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
     except UnknownAliasError, e:
         e.display(stderr)
         return 1
Exemple #13
0
    def deepcheck_location(self, options, where):
        stdout = options.stdout
        stderr = options.stderr
        self.rc = 0
        self.options = options
        nodeurl = options['node-url']
        if not nodeurl.endswith("/"):
            nodeurl += "/"
        self.nodeurl = nodeurl

        try:
            rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1
        if path == '/':
            path = ''
        url = nodeurl + "uri/%s" % urllib.quote(rootcap)
        if path:
            url += "/" + escape_path(path)
        # todo: should it end with a slash?
        url += "?t=stream-deep-check"
        if options["verify"]:
            url += "&verify=true"
        if options["repair"]:
            url += "&repair=true"
            output = DeepCheckAndRepairOutput(self, options)
        else:
            output = DeepCheckOutput(self, options)
        if options["add-lease"]:
            url += "&add-lease=true"
        resp = do_http("POST", url)
        if resp.status not in (200, 302):
            print(format_http_error("ERROR", resp), file=stderr)
            return 1

        # use Twisted to split this into lines
        while True:
            chunk = resp.read(100)
            if not chunk:
                break
            if self.options["raw"]:
                stdout.write(chunk)
            else:
                output.dataReceived(chunk)
        if not self.options["raw"]:
            output.done()
        return 0
Exemple #14
0
def rm(options):
    """
    @return: a Deferred which eventually fires with the exit code
    """
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
def webopen(options, opener=None):
    nodeurl = options['node-url']
    stderr = options.stderr
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    where = options.where
    if where:
        try:
            rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError, e:
            e.display(stderr)
            return 1
        if path == '/':
            path = ''
        url = nodeurl + "uri/%s" % urllib.quote(rootcap)
        if path:
            url += "/" + escape_path(path)
Exemple #16
0
    def get_source_info(self, source_spec):
        rootcap, path = get_alias(self.aliases, source_spec, None)
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path.decode('utf-8'))
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind("/")
                name = path
                if last_slash:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                if source_spec.rfind('/') != -1:
                    name = source_spec[source_spec.rfind('/')+1:]
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap)
        return name, t
Exemple #17
0
    def get_target_info(self, destination_spec):
        precondition(isinstance(destination_spec, unicode), destination_spec)
        rootcap, path_utf8 = get_alias(self.aliases, destination_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            if not os.path.exists(pathname):
                t = LocalMissingTarget(pathname)
            elif os.path.isdir(pathname):
                t = LocalDirectoryTarget(self.progress, pathname)
            else:
                # TODO: should this be _assert? what happens if the target is
                # a special file?
                assert os.path.isfile(pathname), pathname
                t = LocalFileTarget(pathname) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            if path:
                url += "/" + escape_path(path)

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                # doesn't exist yet
                t = TahoeMissingTarget(url)
            elif resp.status == 200:
                parsed = json.loads(resp.read())
                nodetype, d = parsed
                if nodetype == "dirnode":
                    t = TahoeDirectoryTarget(self.nodeurl, self.cache,
                                             self.progress)
                    t.init_from_parsed(parsed)
                else:
                    writecap = to_str(d.get("rw_uri"))
                    readcap = to_str(d.get("ro_uri"))
                    mutable = d.get("mutable", False)
                    t = TahoeFileTarget(self.nodeurl, mutable,
                                        writecap, readcap, url)
            else:
                raise HTTPError("Error examining target %s"
                                 % quote_output(destination_spec), resp)
        return t
Exemple #18
0
def mkdir(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    if where:
        try:
            rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1

    if not where or not path:
        # create a new unlinked directory
        url = nodeurl + "uri?t=mkdir"
        if options["format"]:
            url += "&format=%s" % urllib.quote(options['format'])
        resp = do_http("POST", url)
        rc = check_http_error(resp, stderr)
        if rc:
            return rc
        new_uri = resp.read().strip()
        # emit its write-cap
        print(quote_output(new_uri, quotemarks=False), file=stdout)
        return 0

    # create a new directory at the given location
    if path.endswith("/"):
        path = path[:-1]
    # path must be "/".join([s.encode("utf-8") for s in segments])
    url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap),
                                           urllib.quote(path))
    if options['format']:
        url += "&format=%s" % urllib.quote(options['format'])

    resp = do_http("POST", url)
    check_http_error(resp, stderr)
    new_uri = resp.read().strip()
    print(quote_output(new_uri, quotemarks=False), file=stdout)
    return 0
Exemple #19
0
def get(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        url += "/" + escape_path(path)

    resp = do_http("GET", url)
    if resp.status in (200, 201,):
        if to_file:
            outf = open(to_file, "wb")
        else:
            outf = stdout
        while True:
            data = resp.read(4096)
            if not data:
                break
            outf.write(data)
        if to_file:
            outf.close()
        rc = 0
    else:
        print(format_http_error("Error during GET", resp), file=stderr)
        rc = 1

    return rc
 def run(self, options):
     self.rc = 0
     stdout = options.stdout
     stderr = options.stderr
     self.options = options
     nodeurl = options['node-url']
     if not nodeurl.endswith("/"):
         nodeurl += "/"
     self.nodeurl = nodeurl
     where = options.where
     try:
         rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
     except UnknownAliasError as e:
         e.display(stderr)
         return 1
     if path == '/':
         path = ''
     url = nodeurl + "uri/%s" % urllib.quote(rootcap)
     if path:
         url += "/" + escape_path(path)
     # todo: should it end with a slash?
     url += "?t=stream-manifest"
     resp = do_http("POST", url)
     if resp.status not in (200, 302):
         print(format_http_error("ERROR", resp), file=stderr)
         return 1
     #print "RESP", dir(resp)
     # use Twisted to split this into lines
     self.in_error = False
     while True:
         chunk = resp.read(100)
         if not chunk:
             break
         if self.options["raw"]:
             stdout.write(chunk)
         else:
             self.dataReceived(chunk)
     return self.rc
Exemple #21
0
def put(options):
    """
    @param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose

    @return: a Deferred which eventually fires with the exit code
    """
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    mutable = options['mutable']
    format = options['format']
    if options['quiet']:
        verbosity = 0
    else:
        verbosity = 2
    stdin = options.stdin
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    if to_file:
        # several possibilities for the TO_FILE argument.
        #  <none> : unlinked upload
        #  foo : TAHOE_ALIAS/foo
        #  subdir/foo : TAHOE_ALIAS/subdir/foo
        #  /oops/subdir/foo : DISALLOWED
        #  ALIAS:foo  : aliases[ALIAS]/foo
        #  ALIAS:subdir/foo  : aliases[ALIAS]/subdir/foo

        #  ALIAS:/oops/subdir/foo : DISALLOWED
        #  DIRCAP:./foo        : DIRCAP/foo
        #  DIRCAP:./subdir/foo : DIRCAP/subdir/foo
        #  MUTABLE-FILE-WRITECAP : filecap

        # FIXME: don't hardcode cap format.
        if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
            url = nodeurl + "uri/%s" % urllib.quote(to_file)
        else:
            try:
                rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
            except UnknownAliasError as e:
                e.display(stderr)
                return 1
            if path.startswith("/"):
                suggestion = to_file.replace(u"/", u"", 1)
                print("Error: The remote filename must not start with a slash",
                      file=stderr)
                print("Please try again, perhaps with %s" %
                      quote_output(suggestion),
                      file=stderr)
                return 1
            url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
            if path:
                url += escape_path(path)
    else:
        # unlinked upload
        url = nodeurl + "uri"

    queryargs = []
    if mutable:
        queryargs.append("mutable=true")
    if format:
        queryargs.append("format=%s" % format)
    if queryargs:
        url += "?" + "&".join(queryargs)

    if from_file:
        infileobj = open(from_file, "rb")
    else:
        # do_http() can't use stdin directly: for one thing, we need a
        # Content-Length field. So we currently must copy it.
        if verbosity > 0:
            print("waiting for file data on stdin..", file=stderr)
        data = stdin.read()
        infileobj = StringIO(data)

    resp = do_http("PUT", url, infileobj)

    if resp.status in (
            200,
            201,
    ):
        print(format_http_success(resp), file=stderr)
        print(quote_output(resp.read(), quotemarks=False), file=stdout)
        return 0

    print(format_http_error("Error", resp), file=stderr)
    return 1
Exemple #22
0
    def run(self):
        options = self.options
        nodeurl = options['node-url']
        self.verbosity = 1
        if options['quiet']:
            self.verbosity = 0
        if options['verbose']:
            self.verbosity = 2
        stdout = options.stdout
        stderr = options.stderr

        start_timestamp = datetime.datetime.now()
        bdbfile = os.path.join(options["node-directory"],
                               "private", "backupdb.sqlite")
        bdbfile = abspath_expanduser_unicode(bdbfile)
        self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
        if not self.backupdb:
            print >>stderr, "ERROR: Unable to load backup db."
            return 1

        try:
            rootcap, path = get_alias(options.aliases, options.to_dir, DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1
        to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
        if path:
            to_url += escape_path(path)
        if not to_url.endswith("/"):
            to_url += "/"

        archives_url = to_url + "Archives/"

        # first step: make sure the target directory exists, as well as the
        # Archives/ subdirectory.
        resp = do_http("GET", archives_url + "?t=json")
        if resp.status == 404:
            resp = do_http("POST", archives_url + "?t=mkdir")
            if resp.status != 200:
                print >>stderr, format_http_error("Unable to create target directory", resp)
                return 1

        # second step: process the tree
        targets = list(collect_backup_targets(
            options.from_dir,
            listdir_unicode,
            self.options.filter_listdir,
        ))
        completed = run_backup(
            warn=self.warn,
            upload_file=self.upload,
            upload_directory=self.upload_directory,
            targets=targets,
            start_timestamp=start_timestamp,
            stdout=stdout,
        )
        new_backup_dircap = completed.dircap

        # third: attach the new backup to the list
        now = time_format.iso_utc(int(time.time()), sep="_") + "Z"

        put_child(archives_url, now, new_backup_dircap)
        put_child(to_url, "Latest", new_backup_dircap)
        print >>stdout, completed.report(
            self.verbosity,
            self._files_checked,
            self._directories_checked,
        )

        # The command exits with code 2 if files or directories were skipped
        if completed.any_skips():
            return 2

        # done!
        return 0
Exemple #23
0
def put(options):
    """
    @param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose

    @return: a Deferred which eventually fires with the exit code
    """
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    mutable = options['mutable']
    format = options['format']
    if options['quiet']:
        verbosity = 0
    else:
        verbosity = 2
    stdin = options.stdin
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    if to_file:
        # several possibilities for the TO_FILE argument.
        #  <none> : unlinked upload
        #  foo : TAHOE_ALIAS/foo
        #  subdir/foo : TAHOE_ALIAS/subdir/foo
        #  /oops/subdir/foo : DISALLOWED
        #  ALIAS:foo  : aliases[ALIAS]/foo
        #  ALIAS:subdir/foo  : aliases[ALIAS]/subdir/foo

        #  ALIAS:/oops/subdir/foo : DISALLOWED
        #  DIRCAP:./foo        : DIRCAP/foo
        #  DIRCAP:./subdir/foo : DIRCAP/subdir/foo
        #  MUTABLE-FILE-WRITECAP : filecap

        # FIXME: don't hardcode cap format.
        if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
            url = nodeurl + "uri/%s" % urllib.quote(to_file)
        else:
            try:
                rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
            except UnknownAliasError as e:
                e.display(stderr)
                return 1
            if path.startswith("/"):
                suggestion = to_file.replace(u"/", u"", 1)
                print("Error: The remote filename must not start with a slash", file=stderr)
                print("Please try again, perhaps with %s" % quote_output(suggestion), file=stderr)
                return 1
            url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
            if path:
                url += escape_path(path)
    else:
        # unlinked upload
        url = nodeurl + "uri"

    queryargs = []
    if mutable:
        queryargs.append("mutable=true")
    if format:
        queryargs.append("format=%s" % format)
    if queryargs:
        url += "?" + "&".join(queryargs)

    if from_file:
        infileobj = open(from_file, "rb")
    else:
        # do_http() can't use stdin directly: for one thing, we need a
        # Content-Length field. So we currently must copy it.
        if verbosity > 0:
            print("waiting for file data on stdin..", file=stderr)
        data = stdin.read()
        infileobj = StringIO(data)

    resp = do_http("PUT", url, infileobj)

    if resp.status in (200, 201,):
        print(format_http_success(resp), file=stderr)
        print(quote_output(resp.read(), quotemarks=False), file=stdout)
        return 0

    print(format_http_error("Error", resp), file=stderr)
    return 1
Exemple #24
0
def list(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr

    if not nodeurl.endswith("/"):
        nodeurl += "/"
    if where.endswith("/"):
        where = where[:-1]
    try:
        rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        # move where.endswith check here?
        url += "/" + escape_path(path)
    assert not url.endswith("/")
    url += "?t=json"
    resp = do_http("GET", url)
    if resp.status == 404:
        print("No such file or directory", file=stderr)
        return 2
    if resp.status != 200:
        print(format_http_error("Error during GET", resp), file=stderr)
        if resp.status == 0:
            return 3
        else:
            return resp.status

    data = resp.read()

    if options['json']:
        # The webapi server should always output printable ASCII.
        if is_printable_ascii(data):
            print(data, file=stdout)
            return 0
        else:
            print("The JSON response contained unprintable characters:", file=stderr)
            print(quote_output(data, quotemarks=False), file=stderr)
            return 1

    try:
        parsed = json.loads(data)
    except Exception as e:
        print("error: %s" % quote_output(e.args[0], quotemarks=False), file=stderr)
        print("Could not parse JSON response:", file=stderr)
        print(quote_output(data, quotemarks=False), file=stderr)
        return 1

    nodetype, d = parsed
    children = {}
    if nodetype == "dirnode":
        children = d['children']
    else:
        # paths returned from get_alias are always valid UTF-8
        childname = path.split("/")[-1].decode('utf-8')
        children = {childname: (nodetype, d)}
        if "metadata" not in d:
            d["metadata"] = {}
    childnames = sorted(children.keys())
    now = time.time()

    # we build up a series of rows, then we loop through them to compute a
    # maxwidth so we can format them tightly. Size, filename, and URI are the
    # variable-width ones.
    rows = []
    has_unknowns = False

    for name in childnames:
        child = children[name]
        name = unicode(name)
        childtype = child[0]

        # See webapi.txt for a discussion of the meanings of unix local
        # filesystem mtime and ctime, Tahoe mtime and ctime, and Tahoe
        # linkmotime and linkcrtime.
        ctime = child[1].get("metadata", {}).get('tahoe', {}).get("linkcrtime")
        if not ctime:
            ctime = child[1]["metadata"].get("ctime")

        mtime = child[1].get("metadata", {}).get('tahoe', {}).get("linkmotime")
        if not mtime:
            mtime = child[1]["metadata"].get("mtime")
        rw_uri = to_str(child[1].get("rw_uri"))
        ro_uri = to_str(child[1].get("ro_uri"))
        if ctime:
            # match for formatting that GNU 'ls' does
            if (now - ctime) > 6*30*24*60*60:
                # old files
                fmt = "%b %d  %Y"
            else:
                fmt = "%b %d %H:%M"
            ctime_s = time.strftime(fmt, time.localtime(ctime))
        else:
            ctime_s = "-"
        if childtype == "dirnode":
            t0 = "d"
            size = "-"
            classify = "/"
        elif childtype == "filenode":
            t0 = "-"
            size = str(child[1].get("size", "?"))
            classify = ""
            if rw_uri:
                classify = "*"
        else:
            has_unknowns = True
            t0 = "?"
            size = "?"
            classify = "?"
        t1 = "-"
        if ro_uri:
            t1 = "r"
        t2 = "-"
        if rw_uri:
            t2 = "w"
        t3 = "-"
        if childtype == "dirnode":
            t3 = "x"

        uri = rw_uri or ro_uri

        line = []
        if options["long"]:
            line.append(t0+t1+t2+t3)
            line.append(size)
            line.append(ctime_s)
        if not options["classify"]:
            classify = ""

        encoding_error = False
        try:
            line.append(unicode_to_output(name) + classify)
        except UnicodeEncodeError:
            encoding_error = True
            line.append(quote_output(name) + classify)

        if options["uri"]:
            line.append(uri)
        if options["readonly-uri"]:
            line.append(quote_output(ro_uri or "-", quotemarks=False))

        rows.append((encoding_error, line))

    max_widths = []
    left_justifys = []
    for (encoding_error, row) in rows:
        for i,cell in enumerate(row):
            while len(max_widths) <= i:
                max_widths.append(0)
            while len(left_justifys) <= i:
                left_justifys.append(False)
            max_widths[i] = max(max_widths[i], len(cell))
            if cell.startswith("URI"):
                left_justifys[i] = True
    if len(left_justifys) == 1:
        left_justifys[0] = True
    fmt_pieces = []
    for i in range(len(max_widths)):
        piece = "%"
        if left_justifys[i]:
            piece += "-"
        piece += str(max_widths[i])
        piece += "s"
        fmt_pieces.append(piece)
    fmt = " ".join(fmt_pieces)

    rc = 0
    for (encoding_error, row) in rows:
        if encoding_error:
            print((fmt % tuple(row)).rstrip(), file=stderr)
            rc = 1
        else:
            print((fmt % tuple(row)).rstrip(), file=stdout)

    if rc == 1:
        print("\nThis listing included files whose names could not be converted to the terminal" \
                        "\noutput encoding. Their names are shown using backslash escapes and in quotes.", file=stderr)
    if has_unknowns:
        print("\nThis listing included unknown objects. Using a webapi server that supports" \
                        "\na later version of Tahoe may help.", file=stderr)

    return rc
Exemple #25
0
        return 1
    from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if from_path:
        from_url += "/" + escape_path(from_path)
    # figure out the source cap
    resp = do_http("GET", from_url + "?t=json")
    if not re.search(r'^2\d\d$', str(resp.status)):
        print >> stderr, format_http_error("Error", resp)
        return 1
    data = resp.read()
    nodetype, attrs = json.loads(data)
    cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        to_url += "/" + escape_path(path)

    if to_url.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url += escape_path(from_path[from_path.rfind("/") + 1:])

    to_url += "?t=uri&replace=only-files"

    resp = do_http("PUT", to_url, cap)
    status = resp.status
Exemple #26
0
    try:
        rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if from_path:
        from_url += "/" + escape_path(from_path)
    # figure out the source cap
    data = urllib.urlopen(from_url + "?t=json").read()
    nodetype, attrs = simplejson.loads(data)
    cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except UnknownAliasError, e:
        e.display(stderr)
        return 1
    to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        to_url += "/" + escape_path(path)

    if to_url.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url += escape_path(from_path[from_path.rfind("/")+1:])

    to_url += "?t=uri&replace=only-files"

    resp = do_http("PUT", to_url, cap)
    status = resp.status
Exemple #27
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec,
                                         quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(
                        source_spec, quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash + 1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError(
                    "Error examining source %s" % quote_output(source_spec),
                    resp)
            parsed = json.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable",
                                False)  # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap,
                                    name)
        return t
Exemple #28
0
def mv(options, mode="move"):
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    try:
        rootcap, from_path = get_alias(aliases, from_file, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    from_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if from_path:
        from_url += "/" + escape_path(from_path)
    # figure out the source cap
    resp = do_http("GET", from_url + "?t=json")
    if not re.search(r'^2\d\d$', str(resp.status)):
        print(format_http_error("Error", resp), file=stderr)
        return 1
    data = resp.read()
    nodetype, attrs = json.loads(data)
    cap = to_str(attrs.get("rw_uri") or attrs["ro_uri"])

    # now get the target
    try:
        rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    to_url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        to_url += "/" + escape_path(path)

    if to_url.endswith("/"):
        # "mv foo.txt bar/" == "mv foo.txt bar/foo.txt"
        to_url += escape_path(from_path[from_path.rfind("/")+1:])

    to_url += "?t=uri&replace=only-files"

    resp = do_http("PUT", to_url, cap)
    status = resp.status
    if not re.search(r'^2\d\d$', str(status)):
        if status == 409:
            print("Error: You can't overwrite a directory with a file", file=stderr)
        else:
            print(format_http_error("Error", resp), file=stderr)
            if mode == "move":
                print("NOT removing the original", file=stderr)
        return 1

    if mode == "move":
        # now remove the original
        resp = do_http("DELETE", from_url)
        if not re.search(r'^2\d\d$', str(resp.status)):
            print(format_http_error("Error deleting original after move", resp), file=stderr)
            return 2

    print("OK", file=stdout)
    return 0
Exemple #29
0
def check_location(options, where):
    stdout = options.stdout
    stderr = options.stderr
    nodeurl = options['node-url']
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    try:
        rootcap, path = get_alias(options.aliases, where, DEFAULT_ALIAS)
    except UnknownAliasError as e:
        e.display(stderr)
        return 1
    if path == '/':
        path = ''
    url = nodeurl + "uri/%s" % urllib.quote(rootcap)
    if path:
        url += "/" + escape_path(path)
    # todo: should it end with a slash?
    url += "?t=check&output=JSON"
    if options["verify"]:
        url += "&verify=true"
    if options["repair"]:
        url += "&repair=true"
    if options["add-lease"]:
        url += "&add-lease=true"

    resp = do_http("POST", url)
    if resp.status != 200:
        print(format_http_error("ERROR", resp), file=stderr)
        return 1
    jdata = resp.read()
    if options.get("raw"):
        stdout.write(jdata)
        stdout.write("\n")
        return 0
    data = json.loads(jdata)

    if options["repair"]:
        # show repair status
        if data["pre-repair-results"]["results"]["healthy"]:
            summary = "healthy"
        else:
            summary = "not healthy"
        stdout.write("Summary: %s\n" % summary)
        cr = data["pre-repair-results"]["results"]
        stdout.write(" storage index: %s\n" % quote_output(data["storage-index"], quotemarks=False))
        stdout.write(" good-shares: %r (encoding is %r-of-%r)\n"
                     % (cr["count-shares-good"],
                        cr["count-shares-needed"],
                        cr["count-shares-expected"]))
        stdout.write(" wrong-shares: %r\n" % cr["count-wrong-shares"])
        corrupt = cr["list-corrupt-shares"]
        if corrupt:
            stdout.write(" corrupt shares:\n")
            for (serverid, storage_index, sharenum) in corrupt:
                stdout.write("  %s\n" % _quote_serverid_index_share(serverid, storage_index, sharenum))
        if data["repair-attempted"]:
            if data["repair-successful"]:
                stdout.write(" repair successful\n")
            else:
                stdout.write(" repair failed\n")
    else:
        # LIT files and directories do not have a "summary" field.
        summary = data.get("summary", "Healthy (LIT)")
        stdout.write("Summary: %s\n" % quote_output(summary, quotemarks=False))
        cr = data["results"]
        stdout.write(" storage index: %s\n" % quote_output(data["storage-index"], quotemarks=False))

        if all([field in cr for field in ("count-shares-good", "count-shares-needed",
                                          "count-shares-expected", "count-wrong-shares")]):
            stdout.write(" good-shares: %r (encoding is %r-of-%r)\n"
                         % (cr["count-shares-good"],
                            cr["count-shares-needed"],
                            cr["count-shares-expected"]))
            stdout.write(" wrong-shares: %r\n" % cr["count-wrong-shares"])

        corrupt = cr.get("list-corrupt-shares", [])
        if corrupt:
            stdout.write(" corrupt shares:\n")
            for (serverid, storage_index, sharenum) in corrupt:
                stdout.write("  %s\n" % _quote_serverid_index_share(serverid, storage_index, sharenum))

    return 0;
Exemple #30
0
    def run(self):
        options = self.options
        nodeurl = options['node-url']
        self.verbosity = 1
        if options['quiet']:
            self.verbosity = 0
        if options['verbose']:
            self.verbosity = 2
        stdout = options.stdout
        stderr = options.stderr

        start_timestamp = datetime.datetime.now()
        bdbfile = os.path.join(options["node-directory"], "private",
                               "backupdb.sqlite")
        bdbfile = abspath_expanduser_unicode(bdbfile)
        self.backupdb = backupdb.get_backupdb(bdbfile, stderr)
        if not self.backupdb:
            print >> stderr, "ERROR: Unable to load backup db."
            return 1

        try:
            rootcap, path = get_alias(options.aliases, options.to_dir,
                                      DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1
        to_url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
        if path:
            to_url += escape_path(path)
        if not to_url.endswith("/"):
            to_url += "/"

        archives_url = to_url + "Archives/"

        # first step: make sure the target directory exists, as well as the
        # Archives/ subdirectory.
        resp = do_http("GET", archives_url + "?t=json")
        if resp.status == 404:
            resp = do_http("POST", archives_url + "?t=mkdir")
            if resp.status != 200:
                print >> stderr, format_http_error(
                    "Unable to create target directory", resp)
                return 1

        # second step: process the tree
        targets = list(
            collect_backup_targets(
                options.from_dir,
                listdir_unicode,
                self.options.filter_listdir,
            ))
        completed = run_backup(
            warn=self.warn,
            upload_file=self.upload,
            upload_directory=self.upload_directory,
            targets=targets,
            start_timestamp=start_timestamp,
            stdout=stdout,
        )
        new_backup_dircap = completed.dircap

        # third: attach the new backup to the list
        now = time_format.iso_utc(int(time.time()), sep="_") + "Z"

        put_child(archives_url, now, new_backup_dircap)
        put_child(to_url, "Latest", new_backup_dircap)
        print >> stdout, completed.report(
            self.verbosity,
            self._files_checked,
            self._directories_checked,
        )

        # The command exits with code 2 if files or directories were skipped
        if completed.any_skips():
            return 2

        # done!
        return 0