Example #1
0
def start(opts, out=sys.stdout, err=sys.stderr):
    basedir = opts['basedir']
    print >>out, "STARTING", quote_output(basedir)
    if not os.path.isdir(basedir):
        print >>err, "%s does not look like a directory at all" % quote_output(basedir)
        return 1
    for fn in listdir_unicode(basedir):
        if fn.endswith(u".tac"):
            tac = str(fn)
            break
    else:
        print >>err, "%s does not look like a node directory (no .tac file)" % quote_output(basedir)
        return 1
    if "client" in tac:
        nodetype = "client"
    elif "introducer" in tac:
        nodetype = "introducer"
    else:
        nodetype = "unknown (%s)" % tac

    args = ["twistd", "-y", tac]
    if opts["syslog"]:
        args.append("--syslog")
    elif nodetype in ("client", "introducer"):
        fileutil.make_dirs(os.path.join(basedir, "logs"))
        args.extend(["--logfile", os.path.join("logs", "twistd.log")])
    if opts["profile"]:
        args.extend(["--profile=profiling_results.prof", "--savestats",])
    # now we're committed
    os.chdir(basedir)
    from twisted.scripts import twistd
    sys.argv = args
    twistd.run()
Example #2
0
def start(config, out=sys.stdout, err=sys.stderr):
    basedir = config['basedir']
    print >>out, "STARTING", quote_output(basedir)
    if not os.path.isdir(basedir):
        print >>err, "%s does not look like a directory at all" % quote_output(basedir)
        return 1
    nodetype = identify_node_type(basedir)
    if not nodetype:
        print >>err, "%s is not a recognizable node directory" % quote_output(basedir)
        return 1
    # Now prepare to turn into a twistd process. This os.chdir is the point
    # of no return.
    os.chdir(basedir)
    twistd_args = []
    if (nodetype in ("client", "introducer")
        and "--nodaemon" not in config.twistd_args
        and "--syslog" not in config.twistd_args
        and "--logfile" not in config.twistd_args):
        fileutil.make_dirs(os.path.join(basedir, "logs"))
        twistd_args.extend(["--logfile", os.path.join("logs", "twistd.log")])
    twistd_args.extend(config.twistd_args)
    twistd_args.append("StartTahoeNode") # point at our StartTahoeNodePlugin

    twistd_config = MyTwistdConfig()
    try:
        twistd_config.parseOptions(twistd_args)
    except usage.error, ue:
        # these arguments were unsuitable for 'twistd'
        print >>err, config
        print >>err, "tahoe %s: usage error from twistd: %s\n" % (config.subcommand_name, ue)
        return 1
Example #3
0
def format_http_error(msg, resp):
    return "%s: %s %s\n%s" % (
        msg,
        resp.status,
        quote_output(resp.reason, quotemarks=False),
        quote_output(resp.read(), quotemarks=False),
    )
Example #4
0
    def modify(self, old_contents, servermap, first_time):
        children = self.node._unpack_contents(old_contents)
        now = time.time()
        for (namex, (child, new_metadata)) in self.entries.iteritems():
            name = normalize(namex)
            precondition(IFilesystemNode.providedBy(child), child)

            # Strictly speaking this is redundant because we would raise the
            # error again in _pack_normalized_children.
            child.raise_error()

            metadata = None
            if name in children:
                if not self.overwrite:
                    raise ExistingChildError("child %s already exists" % quote_output(name, encoding='utf-8'))

                if self.overwrite == "only-files" and IDirectoryNode.providedBy(children[name][0]):
                    raise ExistingChildError("child %s already exists" % quote_output(name, encoding='utf-8'))
                metadata = children[name][1].copy()

            metadata = update_metadata(metadata, new_metadata, now)
            if self.create_readonly_node and metadata.get('no-write', False):
                child = self.create_readonly_node(child, name)

            children[name] = (child, metadata)
        new_contents = self.node._pack_contents(children)
        return new_contents
Example #5
0
    def lineReceived(self, line):
        if self.in_error:
            print >>self.stderr, quote_output(line, quotemarks=False)
            return
        if line.startswith("ERROR:"):
            self.in_error = True
            self.streamer.rc = 1
            print >>self.stderr, quote_output(line, quotemarks=False)
            return

        d = simplejson.loads(line)
        stdout = self.stdout
        if d["type"] not in ("file", "directory"):
            return
        self.num_objects += 1
        # non-verbose means print a progress marker every 100 files
        if self.num_objects % 100 == 0:
            print >>stdout, "%d objects checked.." % self.num_objects
        cr = d["check-results"]
        if cr["results"]["healthy"]:
            self.files_healthy += 1
        else:
            self.files_unhealthy += 1
        if self.verbose:
            # verbose means also print one line per file
            path = d["path"]
            if not path:
                path = ["<root>"]
            summary = cr.get("summary", "Healthy (LIT)")
            print >>stdout, "%s: %s" % (quote_path(path), quote_output(summary, quotemarks=False))

        # always print out corrupt shares
        for shareloc in cr["results"].get("list-corrupt-shares", []):
            (serverid, storage_index, sharenum) = shareloc
            print >>stdout, " corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum)
def add_alias(options):
    nodedir = options['node-directory']
    alias = options.alias
    precondition(isinstance(alias, unicode), alias=alias)
    cap = options.cap
    stdout = options.stdout
    stderr = options.stderr
    if u":" in alias:
        # a single trailing colon will already have been stripped if present
        print >>stderr, "Alias names cannot contain colons."
        return 1
    if u" " in alias:
        print >>stderr, "Alias names cannot contain spaces."
        return 1

    old_aliases = get_aliases(nodedir)
    if alias in old_aliases:
        print >>stderr, "Alias %s already exists!" % quote_output(alias)
        return 1
    aliasfile = os.path.join(nodedir, "private", "aliases")
    cap = uri.from_string_dirnode(cap).to_string()

    add_line_to_aliasfile(aliasfile, alias, cap)

    print >>stdout, "Alias %s added" % quote_output(alias)
    return 0
Example #7
0
    def upload(self, childpath):
        precondition(isinstance(childpath, unicode), childpath)

        #self.verboseprint("uploading %s.." % quote_output(childpath))
        metadata = get_local_metadata(childpath)

        # we can use the backupdb here
        must_upload, bdb_results = self.check_backupdb_file(childpath)

        if must_upload:
            self.verboseprint("uploading %s.." % quote_output(childpath))
            infileobj = open(childpath, "rb")
            url = self.options['node-url'] + "uri"
            resp = do_http("PUT", url, infileobj)
            if resp.status not in (200, 201):
                raise HTTPError("Error during file PUT", resp)

            filecap = resp.read().strip()
            self.verboseprint(" %s -> %s" % (quote_output(childpath, quotemarks=False),
                                             quote_output(filecap, quotemarks=False)))
            #self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),))

            if bdb_results:
                bdb_results.did_upload(filecap)

            self.files_uploaded += 1
            return filecap, metadata

        else:
            self.verboseprint("skipping %s.." % quote_output(childpath))
            self.files_reused += 1
            return bdb_results.was_uploaded(), metadata
Example #8
0
def create_alias(options):
    # mkdir+add_alias
    nodedir = options['node-directory']
    alias = options.alias
    stdout = options.stdout
    stderr = options.stderr
    assert ":" not in alias
    assert " " not in alias

    old_aliases = get_aliases(nodedir)
    if alias in old_aliases:
        print >>stderr, "Alias %s already exists!" % quote_output(alias)
        return 1

    aliasfile = os.path.join(nodedir, "private", "aliases")

    nodeurl = options['node-url']
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    url = nodeurl + "uri?t=mkdir"
    resp = do_http("POST", url)
    rc = check_http_error(resp, stderr)
    if rc:
        return rc
    new_uri = resp.read().strip()

    # probably check for others..

    add_line_to_aliasfile(aliasfile, alias, new_uri)

    print >>stdout, "Alias %s created" % (quote_output(alias),)
    return 0
Example #9
0
def list_aliases(options):
    nodedir = options['node-directory']
    stdout = options.stdout
    stderr = options.stderr

    data = _get_alias_details(nodedir)

    max_width = max([len(quote_output(name)) for name in data.keys()] + [0])
    fmt = "%" + str(max_width) + "s: %s"
    rc = 0

    if options['json']:
        try:
            # XXX why are we presuming utf-8 output?
            print >>stdout, json.dumps(data, indent=4).decode('utf-8')
        except (UnicodeEncodeError, UnicodeDecodeError):
            print >>stderr, json.dumps(data, indent=4)
            rc = 1
    else:
        for name, details in data.items():
            dircap = details['readonly'] if options['readonly-uri'] else details['readwrite']
            try:
                print >>stdout, fmt % (unicode_to_output(name), unicode_to_output(dircap.decode('utf-8')))
            except (UnicodeEncodeError, UnicodeDecodeError):
                print >>stderr, fmt % (quote_output(name), quote_output(dircap))
                rc = 1

    if rc == 1:
        print >>stderr, "\nThis listing included aliases or caps that could not be converted to the terminal" \
                        "\noutput encoding. These are shown using backslash escapes and in quotes."
    return rc
Example #10
0
    def _unpack_contents(self, data):
        # the directory is serialized as a list of netstrings, one per child.
        # Each child is serialized as a list of four netstrings: (name, ro_uri,
        # rwcapdata, metadata), in which the name, ro_uri, metadata are in
        # cleartext. The 'name' is UTF-8 encoded, and should be normalized to NFC.
        # The rwcapdata is formatted as:
        # pack("16ss32s", iv, AES(H(writekey+iv), plaintext_rw_uri), mac)
        assert isinstance(data, str), (repr(data), type(data))
        # an empty directory is serialized as an empty string
        if data == "":
            return AuxValueDict()
        writeable = not self.is_readonly()
        mutable = self.is_mutable()
        children = AuxValueDict()
        position = 0
        while position < len(data):
            entries, position = split_netstring(data, 1, position)
            entry = entries[0]
            (namex_utf8, ro_uri, rwcapdata, metadata_s), subpos = split_netstring(entry, 4)
            if not mutable and len(rwcapdata) > 0:
                raise ValueError("the rwcapdata field of a dirnode in an immutable directory was not empty")

            # A name containing characters that are unassigned in one version of Unicode might
            # not be normalized wrt a later version. See the note in section 'Normalization Stability'
            # at <http://unicode.org/policies/stability_policy.html>.
            # Therefore we normalize names going both in and out of directories.
            name = normalize(namex_utf8.decode("utf-8"))

            rw_uri = ""
            if writeable:
                rw_uri = self._decrypt_rwcapdata(rwcapdata)

            # Since the encryption uses CTR mode, it currently leaks the length of the
            # plaintext rw_uri -- and therefore whether it is present, i.e. whether the
            # dirnode is writeable (ticket #925). By stripping trailing spaces in
            # Tahoe >= 1.6.0, we may make it easier for future versions to plug this leak.
            # ro_uri is treated in the same way for consistency.
            # rw_uri and ro_uri will be either None or a non-empty string.

            rw_uri = rw_uri.rstrip(' ') or None
            ro_uri = ro_uri.rstrip(' ') or None

            try:
                child = self._create_and_validate_node(rw_uri, ro_uri, name)
                if mutable or child.is_allowed_in_immutable_directory():
                    metadata = simplejson.loads(metadata_s)
                    assert isinstance(metadata, dict)
                    children[name] = (child, metadata)
                    children.set_with_aux(name, (child, metadata), auxilliary=entry)
                else:
                    log.msg(format="mutable cap for child %(name)s unpacked from an immutable directory",
                                   name=quote_output(name, encoding='utf-8'),
                                   facility="tahoe.webish", level=log.UNUSUAL)
            except CapConstraintError, e:
                log.msg(format="unmet constraint on cap for child %(name)s unpacked from a directory:\n"
                               "%(message)s", message=e.args[0], name=quote_output(name, encoding='utf-8'),
                               facility="tahoe.webish", level=log.UNUSUAL)
Example #11
0
 def childFactory(self, ctx, name):
     req = IRequest(ctx)
     if isinstance(self.node, ProhibitedNode):
         raise FileProhibited(self.node.reason)
     if should_create_intermediate_directories(req):
         raise WebError("Cannot create directory %s, because its "
                        "parent is a file, not a directory" % quote_output(name, encoding='utf-8'))
     raise WebError("Files have no children, certainly not named %s"
                    % quote_output(name, encoding='utf-8'))
Example #12
0
def get_alias(aliases, path_unicode, default):
    """
    Transform u"work:path/filename" into (aliases[u"work"], u"path/filename".encode('utf-8')).
    If default=None, then an empty alias is indicated by returning
    DefaultAliasMarker. We special-case strings with a recognized cap URI
    prefix, to make it easy to access specific files/directories by their
    caps.
    If the transformed alias is either not found in aliases, or is blank
    and default is not found in aliases, an UnknownAliasError is
    raised.
    """
    precondition(isinstance(path_unicode, unicode), path_unicode)

    from allmydata import uri
    path = path_unicode.encode('utf-8').strip(" ")
    if uri.has_uri_prefix(path):
        # We used to require "URI:blah:./foo" in order to get a subpath,
        # stripping out the ":./" sequence. We still allow that for compatibility,
        # but now also allow just "URI:blah/foo".
        sep = path.find(":./")
        if sep != -1:
            return path[:sep], path[sep+3:]
        sep = path.find("/")
        if sep != -1:
            return path[:sep], path[sep+1:]
        return path, ""
    colon = path.find(":")
    if colon == -1:
        # no alias
        if default == None:
            return DefaultAliasMarker, path
        if default not in aliases:
            raise UnknownAliasError("No alias specified, and the default %s alias doesn't exist. "
                                    "To create it, use 'tahoe create-alias %s'."
                                    % (quote_output(default), quote_output(default, quotemarks=False)))
        return uri.from_string_dirnode(aliases[default]).to_string(), path
    if colon == 1 and default is None and platform_uses_lettercolon_drivename():
        # treat C:\why\must\windows\be\so\weird as a local path, not a tahoe
        # file in the "C:" alias
        return DefaultAliasMarker, path

    # decoding must succeed because path is valid UTF-8 and colon & space are ASCII
    alias = path[:colon].decode('utf-8')
    if u"/" in alias:
        # no alias, but there's a colon in a dirname/filename, like
        # "foo/bar:7"
        if default == None:
            return DefaultAliasMarker, path
        if default not in aliases:
            raise UnknownAliasError("No alias specified, and the default %s alias doesn't exist. "
                                    "To create it, use 'tahoe create-alias %s'."
                                    % (quote_output(default), quote_output(default, quotemarks=False)))
        return uri.from_string_dirnode(aliases[default]).to_string(), path
    if alias not in aliases:
        raise UnknownAliasError("Unknown alias %s, please create it with 'tahoe add-alias' or 'tahoe create-alias'." %
                                quote_output(alias))
    return uri.from_string_dirnode(aliases[alias]).to_string(), path[colon+1:]
Example #13
0
def create_node(config, out=sys.stdout, err=sys.stderr):
    basedir = config['basedir']
    # This should always be called with an absolute Unicode basedir.
    precondition(isinstance(basedir, unicode), basedir)

    if os.path.exists(basedir):
        if listdir_unicode(basedir):
            print >>err, "The base directory %s is not empty." % quote_output(basedir)
            print >>err, "To avoid clobbering anything, I am going to quit now."
            print >>err, "Please use a different directory, or empty this one."
            return -1
        # we're willing to use an empty directory
    else:
        os.mkdir(basedir)
    f = open(os.path.join(basedir, "tahoe-client.tac"), "w")
    f.write(client_tac)
    f.close()

    c = open(os.path.join(basedir, "tahoe.cfg"), "w")

    write_node_config(c, config)

    c.write("[client]\n")
    c.write("introducer.furl = %s\n" % config.get("introducer", ""))
    c.write("helper.furl =\n")
    c.write("#key_generator.furl =\n")
    c.write("#stats_gatherer.furl =\n")
    c.write("#shares.needed = 3\n")
    c.write("#shares.happy = 7\n")
    c.write("#shares.total = 10\n")
    c.write("\n")

    boolstr = {True:"true", False:"false"}
    c.write("[storage]\n")
    storage_enabled = not config.get("no-storage", None)
    c.write("enabled = %s\n" % boolstr[storage_enabled])
    c.write("#readonly =\n")
    c.write("#reserved_space =\n")
    c.write("#expire.enabled =\n")
    c.write("#expire.mode =\n")
    c.write("\n")

    c.write("[helper]\n")
    c.write("enabled = false\n")
    c.write("\n")

    c.close()

    from allmydata.util import fileutil
    fileutil.make_dirs(os.path.join(basedir, "private"), 0700)
    print >>out, "Node created in %s" % quote_output(basedir)
    if not config.get("introducer", ""):
        print >>out, " Please set [client]introducer.furl= in tahoe.cfg!"
        print >>out, " The node cannot connect to a grid without it."
    if not config.get("nickname", ""):
        print >>out, " Please set [node]nickname= in tahoe.cfg"
    return 0
Example #14
0
def describe_share(abs_sharefile, si_s, shnum_s, now, out):
    from allmydata import uri
    from allmydata.storage.mutable import MutableShareFile
    from allmydata.storage.immutable import ShareFile
    from allmydata.mutable.layout import unpack_share
    from allmydata.mutable.common import NeedMoreDataError
    from allmydata.immutable.layout import ReadBucketProxy
    from allmydata.util import base32
    from allmydata.util.encodingutil import quote_output
    import struct

    f = open(abs_sharefile, "rb")
    prefix = f.read(32)

    if prefix == MutableShareFile.MAGIC:
        # mutable share
        m = MutableShareFile(abs_sharefile)
        WE, nodeid = m._read_write_enabler_and_nodeid(f)
        data_length = m._read_data_length(f)
        expiration_time = min( [lease.expiration_time
                                for (i,lease) in m._enumerate_leases(f)] )
        expiration = max(0, expiration_time - now)

        share_type = "unknown"
        f.seek(m.DATA_OFFSET)
        if f.read(1) == "\x00":
            # this slot contains an SMDF share
            share_type = "SDMF"

        if share_type == "SDMF":
            f.seek(m.DATA_OFFSET)
            data = f.read(min(data_length, 2000))

            try:
                pieces = unpack_share(data)
            except NeedMoreDataError, e:
                # retry once with the larger size
                size = e.needed_bytes
                f.seek(m.DATA_OFFSET)
                data = f.read(min(data_length, size))
                pieces = unpack_share(data)
            (seqnum, root_hash, IV, k, N, segsize, datalen,
             pubkey, signature, share_hash_chain, block_hash_tree,
             share_data, enc_privkey) = pieces

            print >>out, "SDMF %s %d/%d %d #%d:%s %d %s" % \
                  (si_s, k, N, datalen,
                   seqnum, base32.b2a(root_hash),
                   expiration, quote_output(abs_sharefile))
        else:
            print >>out, "UNKNOWN mutable %s" % quote_output(abs_sharefile)
    def lineReceived(self, line):
        stdout = self.options.stdout
        stderr = self.options.stderr
        if self.in_error:
            print >>stderr, quote_output(line, quotemarks=False)
            return
        if line.startswith("ERROR:"):
            self.in_error = True
            self.rc = 1
            print >>stderr, quote_output(line, quotemarks=False)
            return

        try:
            d = json.loads(line.decode('utf-8'))
        except Exception, e:
            print >>stderr, "ERROR could not decode/parse %s\nERROR  %r" % (quote_output(line), e)
Example #16
0
def catalog_shares(options):
    from allmydata.util.encodingutil import listdir_unicode, quote_output

    out = options.stdout
    err = options.stderr
    now = time.time()
    for d in options.nodedirs:
        d = os.path.join(d, "storage/shares")
        try:
            abbrevs = listdir_unicode(d)
        except EnvironmentError:
            # ignore nodes that have storage turned off altogether
            pass
        else:
            for abbrevdir in sorted(abbrevs):
                if abbrevdir == "incoming":
                    continue
                abbrevdir = os.path.join(d, abbrevdir)
                # this tool may get run against bad disks, so we can't assume
                # that listdir_unicode will always succeed. Try to catalog as much
                # as possible.
                try:
                    sharedirs = listdir_unicode(abbrevdir)
                    for si_s in sorted(sharedirs):
                        si_dir = os.path.join(abbrevdir, si_s)
                        catalog_shares_one_abbrevdir(si_s, si_dir, now, out,
                                                     err)
                except:
                    print >> err, "Error processing %s" % quote_output(
                        abbrevdir)
                    failure.Failure().printTraceback(err)

    return 0
Example #17
0
    def get_or_create_private_config(self, name, default=_None):
        """Try to get the (string) contents of a private config file (which
        is a config file that resides within the subdirectory named
        'private'), and return it. Any leading or trailing whitespace will be
        stripped from the data.

        If the file does not exist, and default is not given, report an error.
        If the file does not exist and a default is specified, try to create
        it using that default, and then return the value that was written.
        If 'default' is a string, use it as a default value. If not, treat it
        as a zero-argument callable that is expected to return a string.
        """
        privname = os.path.join(self._basedir, "private", name)
        try:
            value = fileutil.read(privname, mode="r")
        except EnvironmentError as e:
            if e.errno != errno.ENOENT:
                raise  # we only care about "file doesn't exist"
            if default is _None:
                raise MissingConfigEntry(
                    "The required configuration file %s is missing." %
                    (quote_output(privname), ))
            if isinstance(default, bytes):
                default = str(default, "utf-8")
            if isinstance(default, str):
                value = default
            else:
                value = default()
            fileutil.write(privname, value)
        return value.strip()
Example #18
0
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
    from allmydata.util.encodingutil import listdir_unicode, quote_output

    try:
        for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
            abs_sharefile = os.path.join(si_dir, shnum_s)
            assert os.path.isfile(abs_sharefile)
            try:
                describe_share(abs_sharefile, si_s, shnum_s, now, out)
            except:
                print >> err, "Error processing %s" % quote_output(
                    abs_sharefile)
                failure.Failure().printTraceback(err)
    except:
        print >> err, "Error processing %s" % quote_output(si_dir)
        failure.Failure().printTraceback(err)
Example #19
0
    def modify(self, old_contents, servermap, first_time):
        children = self.node._unpack_contents(old_contents)
        now = time.time()
        for (namex, (child, new_metadata)) in self.entries.iteritems():
            name = normalize(namex)
            precondition(IFilesystemNode.providedBy(child), child)

            # Strictly speaking this is redundant because we would raise the
            # error again in _pack_normalized_children.
            child.raise_error()

            metadata = None
            if name in children:
                if not self.overwrite:
                    raise ExistingChildError("child %s already exists" % quote_output(name, encoding='utf-8'))

                if self.overwrite == "only-files" and IDirectoryNode.providedBy(children[name][0]):
                    raise ExistingChildError("child %s already exists as a directory" % quote_output(name, encoding='utf-8'))
                metadata = children[name][1].copy()

            metadata = update_metadata(metadata, new_metadata, now)
            if self.create_readonly_node and metadata.get('no-write', False):
                child = self.create_readonly_node(child, name)

            children[name] = (child, metadata)
        new_contents = self.node._pack_contents(children)
        return new_contents
Example #20
0
def runner(argv,
           run_by_human=True,
           stdin=None, stdout=None, stderr=None,
           install_node_control=True, additional_commands=None):

    stdin  = stdin  or sys.stdin
    stdout = stdout or sys.stdout
    stderr = stderr or sys.stderr

    config = Options()
    if install_node_control:
        config.subCommands.extend(startstop_node.subCommands)

    ac_dispatch = {}
    if additional_commands:
        for ac in additional_commands:
            config.subCommands.extend(ac.subCommands)
            ac_dispatch.update(ac.dispatch)

    try:
        config.parseOptions(argv)
    except usage.error, e:
        if not run_by_human:
            raise
        c = config
        while hasattr(c, 'subOptions'):
            c = c.subOptions
        print >>stdout, str(c)
        try:
            msg = e.args[0].decode(get_argv_encoding())
        except Exception:
            msg = repr(e)
        print >>stdout, "%s:  %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False))
        return 1
Example #21
0
    def check_backupdb_file(self, childpath):
        if not self.backupdb:
            return True, None
        use_timestamps = not self.options["ignore-timestamps"]
        r = self.backupdb.check_file(childpath, use_timestamps)

        if not r.was_uploaded():
            return True, r

        if not r.should_check():
            # the file was uploaded or checked recently, so we can just use
            # it
            return False, r

        # we must check the file before using the results
        filecap = r.was_uploaded()
        self.verboseprint("checking %s" % quote_output(filecap))
        nodeurl = self.options['node-url']
        checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(filecap)
        self.files_checked += 1
        resp = do_http("POST", checkurl)
        if resp.status != 200:
            # can't check, so we must assume it's bad
            return True, r

        cr = simplejson.loads(resp.read())
        healthy = cr["results"]["healthy"]
        if not healthy:
            # must upload
            return True, r
        # file is healthy, no need to upload
        r.did_check_healthy(cr)
        return False, r
Example #22
0
def create_stats_gatherer(config, out=sys.stdout, err=sys.stderr):
    basedir = config['basedir']
    # This should always be called with an absolute Unicode basedir.
    precondition(isinstance(basedir, unicode), basedir)

    if os.path.exists(basedir):
        if listdir_unicode(basedir):
            print >>err, "The base directory %s is not empty." % quote_output(basedir)
            print >>err, "To avoid clobbering anything, I am going to quit now."
            print >>err, "Please use a different directory, or empty this one."
            return -1
        # we're willing to use an empty directory
    else:
        os.mkdir(basedir)
    write_tac(basedir, "stats-gatherer")
    if config["hostname"]:
        portnum = iputil.allocate_tcp_port()
        location = "tcp:%s:%d" % (config["hostname"], portnum)
        port = "tcp:%d" % portnum
    else:
        location = config["location"]
        port = config["port"]
    fileutil.write(os.path.join(basedir, "location"), location+"\n")
    fileutil.write(os.path.join(basedir, "port"), port+"\n")
    return 0
Example #23
0
    def check_backupdb_file(self, childpath):
        if not self.backupdb:
            return True, None
        use_timestamps = not self.options["ignore-timestamps"]
        r = self.backupdb.check_file(childpath, use_timestamps)

        if not r.was_uploaded():
            return True, r

        if not r.should_check():
            # the file was uploaded or checked recently, so we can just use
            # it
            return False, r

        # we must check the file before using the results
        filecap = r.was_uploaded()
        self.verboseprint("checking %s" % quote_output(filecap))
        nodeurl = self.options['node-url']
        checkurl = nodeurl + "uri/%s?t=check&output=JSON" % url_quote(filecap)
        self._files_checked += 1
        resp = do_http("POST", checkurl)
        if resp.status != 200:
            # can't check, so we must assume it's bad
            return True, r

        cr = json.loads(resp.read())
        healthy = cr["results"]["healthy"]
        if not healthy:
            # must upload
            return True, r
        # file is healthy, no need to upload
        r.did_check_healthy(cr)
        return False, r
Example #24
0
def catalog_shares_one_abbrevdir(si_s, si_dir, now, out, err):
    from allmydata.util.encodingutil import listdir_unicode, quote_output

    try:
        for shnum_s in sorted(listdir_unicode(si_dir), key=_as_number):
            abs_sharefile = os.path.join(si_dir, shnum_s)
            assert os.path.isfile(abs_sharefile)
            try:
                describe_share(abs_sharefile, si_s, shnum_s, now,
                               out)
            except:
                print >>err, "Error processing %s" % quote_output(abs_sharefile)
                failure.Failure().printTraceback(err)
    except:
        print >>err, "Error processing %s" % quote_output(si_dir)
        failure.Failure().printTraceback(err)
Example #25
0
    def check_backupdb_directory(self, compare_contents):
        if not self.backupdb:
            return True, None
        r = self.backupdb.check_directory(compare_contents)

        if not r.was_created():
            return True, r

        if not r.should_check():
            # the file was uploaded or checked recently, so we can just use
            # it
            return False, r

        # we must check the directory before re-using it
        dircap = r.was_created()
        self.verboseprint("checking %s" % quote_output(dircap))
        nodeurl = self.options['node-url']
        checkurl = nodeurl + "uri/%s?t=check&output=JSON" % url_quote(dircap)
        self._directories_checked += 1
        resp = do_http("POST", checkurl)
        if resp.status != 200:
            # can't check, so we must assume it's bad
            return True, r

        cr = json.loads(resp.read())
        healthy = cr["results"]["healthy"]
        if not healthy:
            # must create
            return True, r
        # directory is healthy, no need to upload
        r.did_check_healthy(cr)
        return False, r
Example #26
0
    def upload(self, childpath):
        precondition_abspath(childpath)

        #self.verboseprint("uploading %s.." % quote_local_unicode_path(childpath))
        metadata = get_local_metadata(childpath)

        # we can use the backupdb here
        must_upload, bdb_results = self.check_backupdb_file(childpath)

        if must_upload:
            self.verboseprint("uploading %s.." %
                              quote_local_unicode_path(childpath))
            infileobj = open(childpath, "rb")
            url = self.options['node-url'] + "uri"
            resp = do_http("PUT", url, infileobj)
            if resp.status not in (200, 201):
                raise HTTPError("Error during file PUT", resp)

            filecap = resp.read().strip()
            self.verboseprint(
                " %s -> %s" %
                (quote_local_unicode_path(childpath, quotemarks=False),
                 quote_output(filecap, quotemarks=False)))
            #self.verboseprint(" metadata: %s" % (quote_output(metadata, quotemarks=False),))

            if bdb_results:
                bdb_results.did_upload(filecap)

            return True, filecap, metadata

        else:
            self.verboseprint("skipping %s.." %
                              quote_local_unicode_path(childpath))
            return False, bdb_results.was_uploaded(), metadata
        def _check_create_unicode((rc, out, err)):
            self.failUnlessReallyEqual(rc, 0)
            self.failUnlessReallyEqual(err, "")
            self.failUnlessIn("Alias %s created" % quote_output(u"\u00E9tudes"), out)

            aliases = get_aliases(self.get_clientdir())
            self.failUnless(aliases[u"\u00E9tudes"].startswith("URI:DIR2:"))
Example #28
0
    def lineReceived(self, line):
        stdout = self.options.stdout
        stderr = self.options.stderr
        if self.in_error:
            print >>stderr, quote_output(line, quotemarks=False)
            return
        if line.startswith("ERROR:"):
            self.in_error = True
            self.rc = 1
            print >>stderr, quote_output(line, quotemarks=False)
            return

        try:
            d = simplejson.loads(line.decode('utf-8'))
        except Exception, e:
            print >>stderr, "ERROR could not decode/parse %s\nERROR  %r" % (quote_output(line), e)
Example #29
0
def runner(argv,
           run_by_human=True,
           stdin=None, stdout=None, stderr=None,
           install_node_control=True, additional_commands=None):

    stdin  = stdin  or sys.stdin
    stdout = stdout or sys.stdout
    stderr = stderr or sys.stderr

    config = Options()
    if install_node_control:
        config.subCommands.extend(startstop_node.subCommands)

    ac_dispatch = {}
    if additional_commands:
        for ac in additional_commands:
            config.subCommands.extend(ac.subCommands)
            ac_dispatch.update(ac.dispatch)

    try:
        config.parseOptions(argv)
    except usage.error, e:
        if not run_by_human:
            raise
        c = config
        while hasattr(c, 'subOptions'):
            c = c.subOptions
        print >>stdout, str(c)
        try:
            msg = e.args[0].decode(get_io_encoding())
        except Exception:
            msg = repr(e)
        print >>stdout, "%s:  %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False))
        return 1
Example #30
0
def stop(config, out=sys.stdout, err=sys.stderr):
    basedir = config['basedir']
    print >>out, "STOPPING", quote_output(basedir)
    pidfile = os.path.join(basedir, "twistd.pid")
    if not os.path.exists(pidfile):
        print >>err, "%s does not look like a running node directory (no twistd.pid)" % quote_output(basedir)
        # we define rc=2 to mean "nothing is running, but it wasn't me who
        # stopped it"
        return 2
    pid = open(pidfile, "r").read()
    pid = int(pid)

    # kill it hard (SIGKILL), delete the twistd.pid file, then wait for the
    # process itself to go away. If it hasn't gone away after 20 seconds, warn
    # the user but keep waiting until they give up.
    try:
        os.kill(pid, signal.SIGKILL)
    except OSError, oserr:
        if oserr.errno == 3:
            print oserr.strerror
            # the process didn't exist, so wipe the pid file
            os.remove(pidfile)
            return 2
        else:
            raise
Example #31
0
def catalog_shares(options):
    from allmydata.util.encodingutil import listdir_unicode, quote_output

    out = options.stdout
    err = options.stderr
    now = time.time()
    for d in options.nodedirs:
        d = os.path.join(d, "storage/shares")
        try:
            abbrevs = listdir_unicode(d)
        except EnvironmentError:
            # ignore nodes that have storage turned off altogether
            pass
        else:
            for abbrevdir in sorted(abbrevs):
                if abbrevdir == "incoming":
                    continue
                abbrevdir = os.path.join(d, abbrevdir)
                # this tool may get run against bad disks, so we can't assume
                # that listdir_unicode will always succeed. Try to catalog as much
                # as possible.
                try:
                    sharedirs = listdir_unicode(abbrevdir)
                    for si_s in sorted(sharedirs):
                        si_dir = os.path.join(abbrevdir, si_s)
                        catalog_shares_one_abbrevdir(si_s, si_dir, now, out,err)
                except:
                    print >>err, "Error processing %s" % quote_output(abbrevdir)
                    failure.Failure().printTraceback(err)

    return 0
Example #32
0
    def check_backupdb_directory(self, compare_contents):
        if not self.backupdb:
            return True, None
        r = self.backupdb.check_directory(compare_contents)

        if not r.was_created():
            return True, r

        if not r.should_check():
            # the file was uploaded or checked recently, so we can just use
            # it
            return False, r

        # we must check the directory before re-using it
        dircap = r.was_created()
        self.verboseprint("checking %s" % quote_output(dircap))
        nodeurl = self.options['node-url']
        checkurl = nodeurl + "uri/%s?t=check&output=JSON" % urllib.quote(dircap)
        self.directories_checked += 1
        resp = do_http("POST", checkurl)
        if resp.status != 200:
            # can't check, so we must assume it's bad
            return True, r

        cr = simplejson.loads(resp.read())
        healthy = cr["results"]["healthy"]
        if not healthy:
            # must create
            return True, r
        # directory is healthy, no need to upload
        r.did_check_healthy(cr)
        return False, r
Example #33
0
def create_stats_gatherer(config):
    err = config.stderr
    basedir = config['basedir']
    # This should always be called with an absolute Unicode basedir.
    precondition(isinstance(basedir, unicode), basedir)

    if os.path.exists(basedir):
        if listdir_unicode(basedir):
            print("The base directory %s is not empty." % quote_output(basedir), file=err)
            print("To avoid clobbering anything, I am going to quit now.", file=err)
            print("Please use a different directory, or empty this one.", file=err)
            return -1
        # we're willing to use an empty directory
    else:
        os.mkdir(basedir)
    write_tac(basedir, "stats-gatherer")
    if config["hostname"]:
        portnum = iputil.allocate_tcp_port()
        location = "tcp:%s:%d" % (config["hostname"], portnum)
        port = "tcp:%d" % portnum
    else:
        location = config["location"]
        port = config["port"]
    fileutil.write(os.path.join(basedir, "location"), location+"\n")
    fileutil.write(os.path.join(basedir, "port"), port+"\n")
    return 0
Example #34
0
def stop(config, out=sys.stdout, err=sys.stderr):
    basedir = config['basedir']
    print >>out, "STOPPING", quote_output(basedir)
    pidfile = os.path.join(basedir, "twistd.pid")
    if not os.path.exists(pidfile):
        print >>err, "%s does not look like a running node directory (no twistd.pid)" % quote_output(basedir)
        # we define rc=2 to mean "nothing is running, but it wasn't me who
        # stopped it"
        return 2
    pid = open(pidfile, "r").read()
    pid = int(pid)

    # kill it hard (SIGKILL), delete the twistd.pid file, then wait for the
    # process itself to go away. If it hasn't gone away after 20 seconds, warn
    # the user but keep waiting until they give up.
    try:
        os.kill(pid, signal.SIGKILL)
    except OSError, oserr:
        if oserr.errno == 3:
            print oserr.strerror
            # the process didn't exist, so wipe the pid file
            os.remove(pidfile)
            return 2
        else:
            raise
Example #35
0
    def get_or_create_private_config(self, name, default=_None):
        """Try to get the (string) contents of a private config file (which
        is a config file that resides within the subdirectory named
        'private'), and return it. Any leading or trailing whitespace will be
        stripped from the data.

        If the file does not exist, and default is not given, report an error.
        If the file does not exist and a default is specified, try to create
        it using that default, and then return the value that was written.
        If 'default' is a string, use it as a default value. If not, treat it
        as a zero-argument callable that is expected to return a string.
        """
        privname = os.path.join(self.basedir, "private", name)
        try:
            value = fileutil.read(privname)
        except EnvironmentError:
            if os.path.exists(privname):
                raise
            if default is _None:
                raise MissingConfigEntry("The required configuration file %s is missing."
                                         % (quote_output(privname),))
            if isinstance(default, basestring):
                value = default
            else:
                value = default()
            fileutil.write(privname, value)
        return value.strip()
Example #36
0
def _pack_normalized_children(children, writekey, deep_immutable=False):
    """Take a dict that maps:
         children[unicode_nfc_name] = (IFileSystemNode, metadata_dict)
    and pack it into a single string, for use as the contents of the backing
    file. This is the same format as is returned by _unpack_contents. I also
    accept an AuxValueDict, in which case I'll use the auxilliary cached data
    as the pre-packed entry, which is faster than re-packing everything each
    time.

    If writekey is provided then I will superencrypt the child's writecap with
    writekey.

    If deep_immutable is True, I will require that all my children are deeply
    immutable, and will raise a MustBeDeepImmutableError if not.
    """
    precondition((writekey is None) or isinstance(writekey, str), writekey)

    has_aux = isinstance(children, AuxValueDict)
    entries = []
    for name in sorted(children.keys()):
        assert isinstance(name, unicode)
        entry = None
        (child, metadata) = children[name]
        child.raise_error()
        if deep_immutable and not child.is_allowed_in_immutable_directory():
            raise MustBeDeepImmutableError(
                "child %s is not allowed in an immutable directory" % quote_output(name, encoding="utf-8"), name
            )
        if has_aux:
            entry = children.get_aux(name)
        if not entry:
            assert IFilesystemNode.providedBy(child), (name, child)
            assert isinstance(metadata, dict)
            rw_uri = child.get_write_uri()
            if rw_uri is None:
                rw_uri = ""
            assert isinstance(rw_uri, str), rw_uri

            # should be prevented by MustBeDeepImmutableError check above
            assert not (rw_uri and deep_immutable)

            ro_uri = child.get_readonly_uri()
            if ro_uri is None:
                ro_uri = ""
            assert isinstance(ro_uri, str), ro_uri
            if writekey is not None:
                writecap = netstring(_encrypt_rw_uri(writekey, rw_uri))
            else:
                writecap = ZERO_LEN_NETSTR
            entry = "".join(
                [
                    netstring(name.encode("utf-8")),
                    netstring(strip_prefix_for_ro(ro_uri, deep_immutable)),
                    writecap,
                    netstring(simplejson.dumps(metadata)),
                ]
            )
        entries.append(netstring(entry))
    return "".join(entries)
Example #37
0
    def try_copy(self):
        source_specs = self.options.sources
        destination_spec = self.options.destination
        recursive = self.options["recursive"]

        target = self.get_target_info(destination_spec)

        sources = []  # list of (name, source object)
        for ss in source_specs:
            name, source = self.get_source_info(ss)
            sources.append((name, source))

        have_source_dirs = bool([
            s for (name, s) in sources
            if isinstance(s, (LocalDirectorySource, TahoeDirectorySource))
        ])

        if have_source_dirs and not recursive:
            self.to_stderr("cannot copy directories without --recursive")
            return 1

        if isinstance(target, (LocalFileTarget, TahoeFileTarget)):
            # cp STUFF foo.txt, where foo.txt already exists. This limits the
            # possibilities considerably.
            if len(sources) > 1:
                self.to_stderr("target %s is not a directory" %
                               quote_output(destination_spec))
                return 1
            if have_source_dirs:
                self.to_stderr("cannot copy directory into a file")
                return 1
            name, source = sources[0]
            return self.copy_file(source, target)

        if isinstance(target, (LocalMissingTarget, TahoeMissingTarget)):
            if recursive:
                return self.copy_to_directory(sources, target)
            if len(sources) > 1:
                # if we have -r, we'll auto-create the target directory. Without
                # it, we'll only create a file.
                self.to_stderr(
                    "cannot copy multiple files into a file without -r")
                return 1
            # cp file1 newfile
            name, source = sources[0]
            return self.copy_file(source, target)

        if isinstance(target, (LocalDirectoryTarget, TahoeDirectoryTarget)):
            # We're copying to an existing directory -- make sure that we
            # have target names for everything
            for (name, source) in sources:
                if name is None and isinstance(source, TahoeFileSource):
                    self.to_stderr(
                        "error: you must specify a destination filename")
                    return 1
            return self.copy_to_directory(sources, target)

        self.to_stderr("unknown target")
        return 1
Example #38
0
        def _check_create_unicode((rc, out, err)):
            self.failUnlessReallyEqual(rc, 0)
            self.failUnlessReallyEqual(err, "")
            self.failUnlessIn(
                "Alias %s created" % quote_output(u"\u00E9tudes"), out)

            aliases = get_aliases(self.get_clientdir())
            self.failUnless(aliases[u"\u00E9tudes"].startswith("URI:DIR2:"))
Example #39
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec,
                                         quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                assert os.path.isfile(pathname)
                t = LocalFileSource(pathname, name)  # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash + 1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError(
                    "Error examining source %s" % quote_output(source_spec),
                    resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable",
                                False)  # older nodes don't provide it

                last_slash = source_spec.rfind(u"/")
                if last_slash != -1:
                    # TODO: this looks funny and redundant with the 'name'
                    # assignment above. cf #2329
                    name = source_spec[last_slash + 1:]

                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap,
                                    name)
        return t
Example #40
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec,
                                                         quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = json.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
Example #41
0
    def get_source_info(self, source_spec):
        """
        This turns an argv string into a (Local|Tahoe)(File|Directory)Source.
        """
        precondition(isinstance(source_spec, unicode), source_spec)
        rootcap, path_utf8 = get_alias(self.aliases, source_spec, None)
        path = path_utf8.decode("utf-8")
        # any trailing slash is removed in abspath_expanduser_unicode(), so
        # make a note of it here, to throw an error later
        had_trailing_slash = path.endswith("/")
        if rootcap == DefaultAliasMarker:
            # no alias, so this is a local file
            pathname = abspath_expanduser_unicode(path)
            name = os.path.basename(pathname)
            if not os.path.exists(pathname):
                raise MissingSourceError(source_spec, quotefn=quote_local_unicode_path)
            if os.path.isdir(pathname):
                t = LocalDirectorySource(self.progress, pathname, name)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec,
                                                         quotefn=quote_local_unicode_path)
                if not os.path.isfile(pathname):
                    raise WeirdSourceError(pathname)
                t = LocalFileSource(pathname, name) # non-empty
        else:
            # this is a tahoe object
            url = self.nodeurl + "uri/%s" % urllib.quote(rootcap)
            name = None
            if path:
                if path.endswith("/"):
                    path = path[:-1]
                url += "/" + escape_path(path)
                last_slash = path.rfind(u"/")
                name = path
                if last_slash != -1:
                    name = path[last_slash+1:]

            resp = do_http("GET", url + "?t=json")
            if resp.status == 404:
                raise MissingSourceError(source_spec)
            elif resp.status != 200:
                raise HTTPError("Error examining source %s" % quote_output(source_spec),
                                resp)
            parsed = simplejson.loads(resp.read())
            nodetype, d = parsed
            if nodetype == "dirnode":
                t = TahoeDirectorySource(self.nodeurl, self.cache,
                                         self.progress, name)
                t.init_from_parsed(parsed)
            else:
                if had_trailing_slash:
                    raise FilenameWithTrailingSlashError(source_spec)
                writecap = to_str(d.get("rw_uri"))
                readcap = to_str(d.get("ro_uri"))
                mutable = d.get("mutable", False) # older nodes don't provide it
                t = TahoeFileSource(self.nodeurl, mutable, writecap, readcap, name)
        return t
Example #42
0
    def lineReceived(self, line):
        stdout = self.options.stdout
        stderr = self.options.stderr
        if self.in_error:
            print(quote_output(line, quotemarks=False), file=stderr)
            return
        if line.startswith("ERROR:"):
            self.in_error = True
            self.rc = 1
            print(quote_output(line, quotemarks=False), file=stderr)
            return

        try:
            d = json.loads(line.decode('utf-8'))
        except Exception as e:
            print("ERROR could not decode/parse %s\nERROR  %r" % (quote_output(line), e), file=stderr)
        else:
            if d["type"] in ("file", "directory"):
                if self.options["storage-index"]:
                    si = d.get("storage-index", None)
                    if si:
                        print(quote_output(si, quotemarks=False), file=stdout)
                elif self.options["verify-cap"]:
                    vc = d.get("verifycap", None)
                    if vc:
                        print(quote_output(vc, quotemarks=False), file=stdout)
                elif self.options["repair-cap"]:
                    vc = d.get("repaircap", None)
                    if vc:
                        print(quote_output(vc, quotemarks=False), file=stdout)
                else:
                    print("%s %s" % (quote_output(d["cap"], quotemarks=False),
                                               quote_path(d["path"], quotemarks=False)), file=stdout)
Example #43
0
def _pack_normalized_children(children, writekey, deep_immutable=False):
    """Take a dict that maps:
         children[unicode_nfc_name] = (IFileSystemNode, metadata_dict)
    and pack it into a single string, for use as the contents of the backing
    file. This is the same format as is returned by _unpack_contents. I also
    accept an AuxValueDict, in which case I'll use the auxilliary cached data
    as the pre-packed entry, which is faster than re-packing everything each
    time.

    If writekey is provided then I will superencrypt the child's writecap with
    writekey.

    If deep_immutable is True, I will require that all my children are deeply
    immutable, and will raise a MustBeDeepImmutableError if not.
    """
    precondition((writekey is None) or isinstance(writekey, str), writekey)

    has_aux = isinstance(children, AuxValueDict)
    entries = []
    for name in sorted(children.keys()):
        assert isinstance(name, unicode)
        entry = None
        (child, metadata) = children[name]
        child.raise_error()
        if deep_immutable and not child.is_allowed_in_immutable_directory():
            raise MustBeDeepImmutableError(
                "child %s is not allowed in an immutable directory" %
                quote_output(name, encoding='utf-8'), name)
        if has_aux:
            entry = children.get_aux(name)
        if not entry:
            assert IFilesystemNode.providedBy(child), (name, child)
            assert isinstance(metadata, dict)
            rw_uri = child.get_write_uri()
            if rw_uri is None:
                rw_uri = ""
            assert isinstance(rw_uri, str), rw_uri

            # should be prevented by MustBeDeepImmutableError check above
            assert not (rw_uri and deep_immutable)

            ro_uri = child.get_readonly_uri()
            if ro_uri is None:
                ro_uri = ""
            assert isinstance(ro_uri, str), ro_uri
            if writekey is not None:
                writecap = netstring(_encrypt_rw_uri(writekey, rw_uri))
            else:
                writecap = ZERO_LEN_NETSTR
            entry = "".join([
                netstring(name.encode("utf-8")),
                netstring(strip_prefix_for_ro(ro_uri, deep_immutable)),
                writecap,
                netstring(json.dumps(metadata))
            ])
        entries.append(netstring(entry))
    return "".join(entries)
Example #44
0
    def try_copy(self):
        source_specs = self.options.sources
        destination_spec = self.options.destination
        recursive = self.options["recursive"]

        target = self.get_target_info(destination_spec)

        sources = [] # list of (name, source object)
        for ss in source_specs:
            name, source = self.get_source_info(ss)
            sources.append( (name, source) )

        del name
        have_source_dirs = bool([s for (name,s) in sources
                                 if isinstance(s, (LocalDirectorySource,
                                                   TahoeDirectorySource))])

        if have_source_dirs and not recursive:
            self.to_stderr("cannot copy directories without --recursive")
            return 1

        if isinstance(target, (LocalFileTarget, TahoeFileTarget)):
            # cp STUFF foo.txt, where foo.txt already exists. This limits the
            # possibilities considerably.
            if len(sources) > 1:
                self.to_stderr("target %s is not a directory" % quote_output(destination_spec))
                return 1
            if have_source_dirs:
                self.to_stderr("cannot copy directory into a file")
                return 1
            name, source = sources[0]
            return self.copy_file(source, target)

        if isinstance(target, (LocalMissingTarget, TahoeMissingTarget)):
            if recursive:
                return self.copy_to_directory(sources, target)
            if len(sources) > 1:
                # if we have -r, we'll auto-create the target directory. Without
                # it, we'll only create a file.
                self.to_stderr("cannot copy multiple files into a file without -r")
                return 1
            # cp file1 newfile
            name, source = sources[0]
            return self.copy_file(source, target)

        if isinstance(target, (LocalDirectoryTarget, TahoeDirectoryTarget)):
            # We're copying to an existing directory -- make sure that we
            # have target names for everything
            for (name, source) in sources:
                if name is None and isinstance(source, TahoeFileSource):
                    self.to_stderr(
                        "error: you must specify a destination filename")
                    return 1
            return self.copy_to_directory(sources, target)

        self.to_stderr("unknown target")
        return 1
Example #45
0
    def process(self, localpath):
        precondition(isinstance(localpath, unicode), localpath)
        # returns newdircap

        self.verboseprint("processing %s" % quote_output(localpath))
        create_contents = {} # childname -> (type, rocap, metadata)
        compare_contents = {} # childname -> rocap

        try:
            children = listdir_unicode(localpath)
        except EnvironmentError:
            self.directories_skipped += 1
            self.warn("WARNING: permission denied on directory %s" % quote_output(localpath))
            children = []
        except FilenameEncodingError:
            self.directories_skipped += 1
            self.warn("WARNING: could not list directory %s due to a filename encoding error" % quote_output(localpath))
            children = []

        for child in self.options.filter_listdir(children):
            assert isinstance(child, unicode), child
            childpath = os.path.join(localpath, child)
            # note: symlinks to directories are both islink() and isdir()
            if os.path.isdir(childpath) and not os.path.islink(childpath):
                metadata = get_local_metadata(childpath)
                # recurse on the child directory
                childcap = self.process(childpath)
                assert isinstance(childcap, str)
                create_contents[child] = ("dirnode", childcap, metadata)
                compare_contents[child] = childcap
            elif os.path.isfile(childpath) and not os.path.islink(childpath):
                try:
                    childcap, metadata = self.upload(childpath)
                    assert isinstance(childcap, str)
                    create_contents[child] = ("filenode", childcap, metadata)
                    compare_contents[child] = childcap
                except EnvironmentError:
                    self.files_skipped += 1
                    self.warn("WARNING: permission denied on file %s" % quote_output(childpath))
            else:
                self.files_skipped += 1
                if os.path.islink(childpath):
                    self.warn("WARNING: cannot backup symlink %s" % quote_output(childpath))
                else:
                    self.warn("WARNING: cannot backup special file %s" % quote_output(childpath))

        must_create, r = self.check_backupdb_directory(compare_contents)
        if must_create:
            self.verboseprint(" creating directory for %s" % quote_output(localpath))
            newdircap = mkdir(create_contents, self.options)
            assert isinstance(newdircap, str)
            if r:
                r.did_create(newdircap)
            self.directories_created += 1
            return newdircap
        else:
            self.verboseprint(" re-using old directory for %s" % quote_output(localpath))
            self.directories_reused += 1
            return r.was_created()
Example #46
0
def mkdir(options):
    nodeurl = options['node-url']
    aliases = options.aliases
    where = options.where
    stdout = options.stdout
    stderr = options.stderr
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    if where:
        try:
            rootcap, path = get_alias(aliases, where, DEFAULT_ALIAS)
        except UnknownAliasError as e:
            e.display(stderr)
            return 1

    if not where or not path:
        # create a new unlinked directory
        url = nodeurl + "uri?t=mkdir"
        if options["format"]:
            url += "&format=%s" % urllib.quote(options['format'])
        resp = do_http("POST", url)
        rc = check_http_error(resp, stderr)
        if rc:
            return rc
        new_uri = resp.read().strip()
        # emit its write-cap
        print(quote_output(new_uri, quotemarks=False), file=stdout)
        return 0

    # create a new directory at the given location
    if path.endswith("/"):
        path = path[:-1]
    # path must be "/".join([s.encode("utf-8") for s in segments])
    url = nodeurl + "uri/%s/%s?t=mkdir" % (urllib.quote(rootcap),
                                           urllib.quote(path))
    if options['format']:
        url += "&format=%s" % urllib.quote(options['format'])

    resp = do_http("POST", url)
    check_http_error(resp, stderr)
    new_uri = resp.read().strip()
    print(quote_output(new_uri, quotemarks=False), file=stdout)
    return 0
Example #47
0
def describe_share(abs_sharefile, si_s, shnum_s, now, out):
    with open(abs_sharefile, "rb") as f:
        prefix = f.read(32)
        if MutableShareFile.is_valid_header(prefix):
            _describe_mutable_share(abs_sharefile, f, now, si_s, out)
        elif ShareFile.is_valid_header(prefix):
            _describe_immutable_share(abs_sharefile, now, si_s, out)
        else:
            print("UNKNOWN really-unknown %s" % quote_output(abs_sharefile),
                  file=out)
Example #48
0
 def _check1((rc,out,err)):
     if good_out is None:
         self.failUnlessReallyEqual(rc, 1)
         self.failUnlessIn("files whose names could not be converted", err)
         self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err)
         self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share"]))
     else:
         self.failUnlessReallyEqual(rc, 0)
         self.failUnlessReallyEqual(err, "")
         self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", good_out]))
Example #49
0
    def lineReceived(self, line):
        if self.in_error:
            print(quote_output(line, quotemarks=False), file=self.stderr)
            return
        if line.startswith("ERROR:"):
            self.in_error = True
            self.streamer.rc = 1
            print(quote_output(line, quotemarks=False), file=self.stderr)
            return

        d = json.loads(line)
        stdout = self.stdout
        if d["type"] not in ("file", "directory"):
            return
        self.num_objects += 1
        # non-verbose means print a progress marker every 100 files
        if self.num_objects % 100 == 0:
            print("%d objects checked.." % self.num_objects, file=stdout)
        cr = d["check-results"]
        if cr["results"]["healthy"]:
            self.files_healthy += 1
        else:
            self.files_unhealthy += 1
        if self.verbose:
            # verbose means also print one line per file
            path = d["path"]
            if not path:
                path = ["<root>"]

            # LIT files and directories do not have a "summary" field.
            summary = cr.get("summary", "Healthy (LIT)")
            print("%s: %s" %
                  (quote_path(path), quote_output(summary, quotemarks=False)),
                  file=stdout)

        # always print out corrupt shares
        for shareloc in cr["results"].get("list-corrupt-shares", []):
            (serverid, storage_index, sharenum) = shareloc
            print(
                " corrupt: %s" %
                _quote_serverid_index_share(serverid, storage_index, sharenum),
                file=stdout)
Example #50
0
 def get_config(self, section, option, default=_None, boolean=False):
     try:
         if boolean:
             return self.config.getboolean(section, option)
         return self.config.get(section, option)
     except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
         if default is _None:
             fn = os.path.join(self.basedir, u"tahoe.cfg")
             raise MissingConfigEntry("%s is missing the [%s]%s entry" %
                                      (quote_output(fn), section, option))
         return default
Example #51
0
def put(options):
    """
    @param verbosity: 0, 1, or 2, meaning quiet, verbose, or very verbose

    @return: a Deferred which eventually fires with the exit code
    """
    nodeurl = options['node-url']
    aliases = options.aliases
    from_file = options.from_file
    to_file = options.to_file
    mutable = options['mutable']
    format = options['format']
    if options['quiet']:
        verbosity = 0
    else:
        verbosity = 2
    stdin = options.stdin
    stdout = options.stdout
    stderr = options.stderr

    if nodeurl[-1] != "/":
        nodeurl += "/"
    if to_file:
        # several possibilities for the TO_FILE argument.
        #  <none> : unlinked upload
        #  foo : TAHOE_ALIAS/foo
        #  subdir/foo : TAHOE_ALIAS/subdir/foo
        #  /oops/subdir/foo : DISALLOWED
        #  ALIAS:foo  : aliases[ALIAS]/foo
        #  ALIAS:subdir/foo  : aliases[ALIAS]/subdir/foo

        #  ALIAS:/oops/subdir/foo : DISALLOWED
        #  DIRCAP:./foo        : DIRCAP/foo
        #  DIRCAP:./subdir/foo : DIRCAP/subdir/foo
        #  MUTABLE-FILE-WRITECAP : filecap

        # FIXME: don't hardcode cap format.
        if to_file.startswith("URI:MDMF:") or to_file.startswith("URI:SSK:"):
            url = nodeurl + "uri/%s" % urllib.quote(to_file)
        else:
            try:
                rootcap, path = get_alias(aliases, to_file, DEFAULT_ALIAS)
            except UnknownAliasError, e:
                e.display(stderr)
                return 1
            if path.startswith("/"):
                suggestion = to_file.replace(u"/", u"", 1)
                print >> stderr, "Error: The remote filename must not start with a slash"
                print >> stderr, "Please try again, perhaps with %s" % quote_output(
                    suggestion)
                return 1
            url = nodeurl + "uri/%s/" % urllib.quote(rootcap)
            if path:
                url += escape_path(path)
Example #52
0
def run(config, stdout, stderr):
    from twisted.internet import reactor
    from twisted.python import log, logfile
    from allmydata import client

    basedir = config['basedir']
    precondition(isinstance(basedir, unicode), basedir)

    if not os.path.isdir(basedir):
        print >> stderr, "%s does not look like a directory at all" % quote_output(
            basedir)
        return 1
    for fn in listdir_unicode(basedir):
        if fn.endswith(u".tac"):
            tac = str(fn)
            break
    else:
        print >> stderr, "%s does not look like a node directory (no .tac file)" % quote_output(
            basedir)
        return 1
    if "client" not in tac:
        print >> stderr, ("%s looks like it contains a non-client node (%s).\n"
                          "Use 'tahoe start' instead of 'tahoe run'." %
                          (quote_output(basedir), tac))
        return 1

    os.chdir(basedir)

    # set up twisted logging. this will become part of the node rsn.
    logdir = os.path.join(basedir, 'logs')
    if not os.path.exists(logdir):
        os.makedirs(logdir)
    lf = logfile.LogFile('tahoesvc.log', logdir)
    log.startLogging(lf)

    # run the node itself
    c = client.Client(basedir)
    reactor.callLater(0, c.startService)  # after reactor startup
    reactor.run()

    return 0
Example #53
0
 def _check((rc, out, err)):
     try:
         unicode_to_output(u"\u00C4rtonwall")
     except UnicodeEncodeError:
         self.failUnlessReallyEqual(rc, 1)
         self.failUnlessReallyEqual(out, "Metallica\n")
         self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err)
         self.failUnlessIn("files whose names could not be converted", err)
     else:
         self.failUnlessReallyEqual(rc, 0)
         self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n")
         self.failUnlessReallyEqual(err, "")
Example #54
0
 def _check(self, inp, out, enc, optional_quotes, quote_newlines):
     if PY3 and isinstance(out, bytes):
         out = out.decode(enc or encodingutil.io_encoding)
     out2 = out
     if optional_quotes:
         out2 = out2[1:-1]
     self.failUnlessReallyEqual(quote_output(inp, encoding=enc, quote_newlines=quote_newlines), out)
     self.failUnlessReallyEqual(quote_output(inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
     if out[0:2] == 'b"':
         pass
     elif isinstance(inp, bytes):
         try:
             unicode_inp = inp.decode("utf-8")
         except UnicodeDecodeError:
             # Some things decode on Python 2, but not Python 3...
             return
         self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quote_newlines=quote_newlines), out)
         self.failUnlessReallyEqual(quote_output(unicode_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
     else:
         try:
             bytes_inp = inp.encode('utf-8')
         except UnicodeEncodeError:
             # Some things encode on Python 2, but not Python 3, e.g.
             # surrogates like u"\uDC00\uD800"...
             return
         self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quote_newlines=quote_newlines), out)
         self.failUnlessReallyEqual(quote_output(bytes_inp, encoding=enc, quotemarks=False, quote_newlines=quote_newlines), out2)
Example #55
0
 def _check(self, inp, out, enc, optional_quotes, quote_newlines):
     out2 = out
     if optional_quotes:
         out2 = out2[1:-1]
     self.failUnlessReallyEqual(
         quote_output(inp, encoding=enc, quote_newlines=quote_newlines),
         out)
     self.failUnlessReallyEqual(
         quote_output(inp,
                      encoding=enc,
                      quotemarks=False,
                      quote_newlines=quote_newlines), out2)
     if out[0:2] == 'b"':
         pass
     elif isinstance(inp, str):
         self.failUnlessReallyEqual(
             quote_output(unicode(inp),
                          encoding=enc,
                          quote_newlines=quote_newlines), out)
         self.failUnlessReallyEqual(
             quote_output(unicode(inp),
                          encoding=enc,
                          quotemarks=False,
                          quote_newlines=quote_newlines), out2)
     else:
         self.failUnlessReallyEqual(
             quote_output(inp.encode('utf-8'),
                          encoding=enc,
                          quote_newlines=quote_newlines), out)
         self.failUnlessReallyEqual(
             quote_output(inp.encode('utf-8'),
                          encoding=enc,
                          quotemarks=False,
                          quote_newlines=quote_newlines), out2)
Example #56
0
 def opt_exclude_from(self, filepath):
     """Ignore file matching glob patterns listed in file, one per
     line. The file is assumed to be in the argv encoding."""
     abs_filepath = argv_to_abspath(filepath)
     try:
         exclude_file = file(abs_filepath)
     except:
         raise BackupConfigurationError('Error opening exclude file %s.' % quote_output(abs_filepath))
     try:
         for line in exclude_file:
             self.opt_exclude(line)
     finally:
         exclude_file.close()
Example #57
0
 def _check4((rc, out, err)):
     if good_out is None:
         self.failUnlessReallyEqual(rc, 1)
         self.failUnlessIn("files whose names could not be converted", err)
         self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err)
         self.failUnlessReallyEqual(out, "")
     else:
         # listing a file (as dir/filename) should have the edge metadata,
         # including the filename
         self.failUnlessReallyEqual(rc, 0)
         self.failUnlessIn(good_out, out)
         self.failIfIn("-r-- %d -" % len(small), out,
                       "trailing hyphen means unknown date")
Example #58
0
def create_alias(options):
    # mkdir+add_alias
    nodedir = options['node-directory']
    alias = options.alias
    precondition(isinstance(alias, unicode), alias=alias)
    stdout = options.stdout
    stderr = options.stderr
    if u":" in alias:
        # a single trailing colon will already have been stripped if present
        print >>stderr, "Alias names cannot contain colons."
        return 1
    if u" " in alias:
        print >>stderr, "Alias names cannot contain spaces."
        return 1

    old_aliases = get_aliases(nodedir)
    if alias in old_aliases:
        print >>stderr, "Alias %s already exists!" % quote_output(alias)
        return 1

    aliasfile = os.path.join(nodedir, "private", "aliases")

    nodeurl = options['node-url']
    if not nodeurl.endswith("/"):
        nodeurl += "/"
    url = nodeurl + "uri?t=mkdir"
    resp = do_http("POST", url)
    rc = check_http_error(resp, stderr)
    if rc:
        return rc
    new_uri = resp.read().strip()

    # probably check for others..

    add_line_to_aliasfile(aliasfile, alias, new_uri)

    print >>stdout, "Alias %s created" % (quote_output(alias),)
    return 0
Example #59
0
def start(opts, out=sys.stdout, err=sys.stderr):
    basedir = opts['basedir']
    print >> out, "STARTING", quote_output(basedir)
    if not os.path.isdir(basedir):
        print >> err, "%s does not look like a directory at all" % quote_output(
            basedir)
        return 1
    for fn in listdir_unicode(basedir):
        if fn.endswith(u".tac"):
            tac = str(fn)
            break
    else:
        print >> err, "%s does not look like a node directory (no .tac file)" % quote_output(
            basedir)
        return 1
    if "client" in tac:
        nodetype = "client"
    elif "introducer" in tac:
        nodetype = "introducer"
    else:
        nodetype = "unknown (%s)" % tac

    args = ["twistd", "-y", tac]
    if opts["syslog"]:
        args.append("--syslog")
    elif nodetype in ("client", "introducer"):
        fileutil.make_dirs(os.path.join(basedir, "logs"))
        args.extend(["--logfile", os.path.join("logs", "twistd.log")])
    if opts["profile"]:
        args.extend([
            "--profile=profiling_results.prof",
            "--savestats",
        ])
    # now we're committed
    os.chdir(basedir)
    from twisted.scripts import twistd
    sys.argv = args
    twistd.run()