示例#1
0
    def test_replace_locations(self):
        # exercise fencepost conditions
        suspects = list(range(SEGSIZE - 3, SEGSIZE + 1)) + list(
            range(2 * SEGSIZE - 3, 2 * SEGSIZE + 1))
        letters = iter("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
        d0 = self.do_upload_mdmf()

        def _run(ign):
            expected = self.data
            d = defer.succeed(None)
            for offset in suspects:
                new_data = next(letters).encode(
                    "ascii") * 2  # "AA", then "BB", etc
                expected = expected[:offset] + new_data + expected[offset + 2:]
                d.addCallback(
                    lambda ign: self.mdmf_node.get_best_mutable_version())

                def _modify(mv, offset=offset, new_data=new_data):
                    # close over 'offset','new_data'
                    md = MutableData(new_data)
                    return mv.update(md, offset)

                d.addCallback(_modify)
                d.addCallback(
                    lambda ignored: self.mdmf_node.download_best_version())
                d.addCallback(self._check_differences, expected)
            return d

        d0.addCallback(_run)
        return d0
示例#2
0
def create_tub(tub_options,
               default_connection_handlers,
               foolscap_connection_handlers,
               handler_overrides={},
               **kwargs):
    """
    Create a Tub with the right options and handlers. It will be
    ephemeral unless the caller provides certFile= in kwargs

    :param handler_overrides: anything in this will override anything
        in `default_connection_handlers` for just this call.

    :param dict tub_options: every key-value pair in here will be set in
        the new Tub via `Tub.setOption`
    """
    tub = Tub(**kwargs)
    for (name, value) in list(tub_options.items()):
        tub.setOption(name, value)
    handlers = default_connection_handlers.copy()
    handlers.update(handler_overrides)
    tub.removeAllConnectionHintHandlers()
    for hint_type, handler_name in list(handlers.items()):
        handler = foolscap_connection_handlers.get(handler_name)
        if handler:
            tub.addConnectionHintHandler(hint_type, handler)
    return tub
示例#3
0
 def _check_connections(self):
     for i, c in enumerate(self.clients):
         if not c.connected_to_introducer():
             log.msg("%s not connected to introducer yet" % (i, ))
             return False
         sb = c.get_storage_broker()
         connected_servers = sb.get_connected_servers()
         connected_names = sorted(
             list(connected.get_nickname()
                  for connected in sb.get_known_servers()
                  if connected.is_connected()))
         if len(connected_servers) != self.numclients:
             wanted = sorted(
                 list(client.nickname for client in self.clients))
             log.msg(
                 "client %s storage broker connected to %s, missing %s" % (
                     i,
                     connected_names,
                     set(wanted) - set(connected_names),
                 ))
             return False
         log.msg("client %s storage broker connected to %s, happy" % (
             i,
             connected_names,
         ))
         up = c.getServiceNamed("uploader")
         if up._helper_furl and not up._helper:
             log.msg("Helper fURL but no helper")
             return False
     return True
示例#4
0
    def test_simple(self):
        recent_items = active_items = [
            UploadStatus(),
            DownloadStatus(b"abcd", 12345),
            PublishStatus(),
            RetrieveStatus(),
            UpdateStatus(),
            FakeStatus(),
        ]
        values = [
            BytesIO(
                json.dumps({
                    "active":
                    list(marshal_json(item) for item in active_items),
                    "recent":
                    list(marshal_json(item) for item in recent_items),
                }).encode("utf-8")),
            BytesIO(
                json.dumps({
                    "counters": {
                        "bytes_downloaded": 0,
                    },
                    "stats": {
                        "node.uptime": 0,
                    }
                }).encode("utf-8")),
        ]

        def do_http(*args, **kw):
            return values.pop(0)

        do_status(self.options, do_http)
示例#5
0
 def test_check_not_enough_shares(self):
     for shares in list(self._storage._peers.values()):
         for shnum in list(shares.keys()):
             if shnum > 0:
                 del shares[shnum]
     d = self._fn.check(Monitor())
     d.addCallback(self.check_bad, "test_check_not_enough_shares")
     return d
示例#6
0
 def send_crypttext_hash_tree_to_all_shareholders(self):
     self.log("sending crypttext hash tree", level=log.NOISY)
     self.set_status("Sending Crypttext Hash Tree")
     self.set_encode_and_push_progress(extra=0.3)
     t = HashTree(self._crypttext_hashes)
     all_hashes = list(t)
     self.uri_extension_data["crypttext_root_hash"] = t[0]
     dl = []
     for shareid in list(self.landlords):
         dl.append(self.send_crypttext_hash_tree(shareid, all_hashes))
     return self._gather_responses(dl)
示例#7
0
 def consume_directory(self, dirpath):
     return self, {
         os.path.basename(create_path): create_value
         for (create_path,
              create_value) in list(self._create_contents.items())
         if os.path.dirname(create_path) == dirpath
     }, {
         os.path.basename(compare_path): compare_value
         for (compare_path,
              compare_value) in list(self._compare_contents.items())
         if os.path.dirname(compare_path) == dirpath
     }
示例#8
0
    def test_exclude_options(self):
        root_listdir = (u'lib.a', u'_darcs', u'subdir', u'nice_doc.lyx')
        subdir_listdir = (u'another_doc.lyx', u'run_snake_run.py', u'CVS',
                          u'.svn', u'_darcs')
        basedir = "cli/Backup/exclude_options"
        fileutil.make_dirs(basedir)
        nodeurl_path = os.path.join(basedir, 'node.url')
        fileutil.write(nodeurl_path, 'http://example.net:2357/')

        def parse(args):
            return parse_options(basedir, "backup", args)

        # test simple exclude
        backup_options = parse(['--exclude', '*lyx', 'from', 'to'])
        filtered = list(backup_options.filter_listdir(root_listdir))
        self._check_filtering(filtered, root_listdir,
                              (u'lib.a', u'_darcs', u'subdir'),
                              (u'nice_doc.lyx', ))
        # multiple exclude
        backup_options = parse(
            ['--exclude', '*lyx', '--exclude', 'lib.?', 'from', 'to'])
        filtered = list(backup_options.filter_listdir(root_listdir))
        self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'),
                              (u'nice_doc.lyx', u'lib.a'))
        # vcs metadata exclusion
        backup_options = parse(['--exclude-vcs', 'from', 'to'])
        filtered = list(backup_options.filter_listdir(subdir_listdir))
        self._check_filtering(filtered, subdir_listdir, (
            u'another_doc.lyx',
            u'run_snake_run.py',
        ), (u'CVS', u'.svn', u'_darcs'))
        # read exclude patterns from file
        exclusion_string = "_darcs\n*py\n.svn"
        excl_filepath = os.path.join(basedir, 'exclusion')
        fileutil.write(excl_filepath, exclusion_string)
        backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to'])
        filtered = list(backup_options.filter_listdir(subdir_listdir))
        self._check_filtering(filtered, subdir_listdir,
                              (u'another_doc.lyx', u'CVS'),
                              (u'.svn', u'_darcs', u'run_snake_run.py'))
        # test BackupConfigurationError
        self.failUnlessRaises(
            cli.BackupConfigurationError, parse,
            ['--exclude-from', excl_filepath + '.no', 'from', 'to'])

        # test that an iterator works too
        backup_options = parse(['--exclude', '*lyx', 'from', 'to'])
        filtered = list(backup_options.filter_listdir(iter(root_listdir)))
        self._check_filtering(filtered, root_listdir,
                              (u'lib.a', u'_darcs', u'subdir'),
                              (u'nice_doc.lyx', ))
示例#9
0
 def failUnlessOneRecoverable(self, sm, num_shares):
     self.assertThat(sm.recoverable_versions(), HasLength(1))
     self.assertThat(sm.unrecoverable_versions(), HasLength(0))
     best = sm.best_recoverable_version()
     self.assertThat(best, NotEquals(None))
     self.assertThat(sm.recoverable_versions(), Equals(set([best])))
     self.assertThat(sm.shares_available(), HasLength(1))
     self.assertThat(sm.shares_available()[best], Equals(
         (num_shares, 3, 10)))
     shnum, servers = list(sm.make_sharemap().items())[0]
     server = list(servers)[0]
     self.assertThat(sm.version_on_server(server, shnum), Equals(best))
     self.assertThat(sm.version_on_server(server, 666), Equals(None))
     return sm
示例#10
0
def run_cli_unicode(verb, argv, nodeargs=None, stdin=None, encoding=None):
    """
    Run a Tahoe-LAFS CLI command.

    :param unicode verb: The command to run.  For example, ``u"create-node"``.

    :param [unicode] argv: The arguments to pass to the command.  For example,
        ``[u"--hostname=localhost"]``.

    :param [unicode] nodeargs: Extra arguments to pass to the Tahoe executable
        before ``verb``.

    :param unicode stdin: Text to pass to the command via stdin.

    :param NoneType|str encoding: The name of an encoding to use for all
        bytes/unicode conversions necessary *and* the encoding to cause stdio
        to declare with its ``encoding`` attribute.  ``None`` means ASCII will
        be used and no declaration will be made at all.
    """
    if nodeargs is None:
        nodeargs = []
    precondition(
        all(isinstance(arg, future_str) for arg in [verb] + nodeargs + argv),
        "arguments to run_cli_unicode must be unicode",
        verb=verb,
        nodeargs=nodeargs,
        argv=argv,
    )
    codec = encoding or "ascii"
    if PY2:
        encode = lambda t: None if t is None else t.encode(codec)
    else:
        # On Python 3 command-line parsing expects Unicode!
        encode = lambda t: t
    d = run_cli_native(encode(verb),
                       nodeargs=list(encode(arg) for arg in nodeargs),
                       stdin=encode(stdin),
                       encoding=encoding,
                       *list(encode(arg) for arg in argv))

    def maybe_decode(result):
        code, stdout, stderr = result
        if isinstance(stdout, bytes):
            stdout = stdout.decode(codec)
        if isinstance(stderr, bytes):
            stderr = stderr.decode(codec)
        return code, stdout, stderr

    d.addCallback(maybe_decode)
    return d
示例#11
0
    def test_multipart_set_boundary_does_not_change_header_type(self):
        """
        Tests that Message.set_boundary() does not cause Python2 errors.
        
        In particular, tests that set_boundary does not cause the type of the
        message headers list to be changed from the future built-in list.
        """
        multipart_message = email.mime.multipart.MIMEMultipart()
        headers_type = type(multipart_message._headers)
        self.assertEqual(headers_type, type(list()))

        boundary = '===============6387699881409002085=='
        multipart_message.set_boundary(boundary)
        headers_type = type(multipart_message._headers)
        self.assertEqual(headers_type, type(list()))
    def test_auxdict(self):
        d = dictutil.AuxValueDict()
        # we put the serialized form in the auxdata
        d.set_with_aux("key", ("filecap", "metadata"), "serialized")

        self.failUnlessEqual(list(d.keys()), ["key"])
        self.failUnlessEqual(d["key"], ("filecap", "metadata"))
        self.failUnlessEqual(d.get_aux("key"), "serialized")

        def _get_missing(key):
            return d[key]

        self.failUnlessRaises(KeyError, _get_missing, "nonkey")
        self.failUnlessEqual(d.get("nonkey"), None)
        self.failUnlessEqual(d.get("nonkey", "nonvalue"), "nonvalue")
        self.failUnlessEqual(d.get_aux("nonkey"), None)
        self.failUnlessEqual(d.get_aux("nonkey", "nonvalue"), "nonvalue")

        d["key"] = ("filecap2", "metadata2")
        self.failUnlessEqual(d["key"], ("filecap2", "metadata2"))
        self.failUnlessEqual(d.get_aux("key"), None)

        d.set_with_aux("key2", "value2", "aux2")
        self.failUnlessEqual(sorted(d.keys()), ["key", "key2"])
        del d["key2"]
        self.failUnlessEqual(list(d.keys()), ["key"])
        self.failIf("key2" in d)
        self.failUnlessRaises(KeyError, _get_missing, "key2")
        self.failUnlessEqual(d.get("key2"), None)
        self.failUnlessEqual(d.get_aux("key2"), None)
        d["key2"] = "newvalue2"
        self.failUnlessEqual(d.get("key2"), "newvalue2")
        self.failUnlessEqual(d.get_aux("key2"), None)

        d = dictutil.AuxValueDict({1: 2, 3: 4})
        self.failUnlessEqual(sorted(d.keys()), [1, 3])
        self.failUnlessEqual(d[1], 2)
        self.failUnlessEqual(d.get_aux(1), None)

        d = dictutil.AuxValueDict([(1, 2), (3, 4)])
        self.failUnlessEqual(sorted(d.keys()), [1, 3])
        self.failUnlessEqual(d[1], 2)
        self.failUnlessEqual(d.get_aux(1), None)

        d = dictutil.AuxValueDict(one=1, two=2)
        self.failUnlessEqual(sorted(d.keys()), ["one", "two"])
        self.failUnlessEqual(d["one"], 1)
        self.failUnlessEqual(d.get_aux("one"), None)
示例#13
0
 def draw(self,thing):
     if type(thing) is Point:
         pos=list(thing.coords)
         self.scatter.setPoints(pos=pos)
     if type(thing) is LineString:
         points=list(thing.coords)
         path=QPainterPath(QPointF(*points[0]))
         for i in np.arange(1,len(points)):
             path.lineTo(QPointF(*points[i]))
         self.pathitem.setPath(path)
     if type(thing) is Polygon:
         points=list(thing.exterior.coords)
         path=QPainterPath(QPointF(*points[0]))
         for i in np.arange(1,len(points)):
             path.lineTo(QPointF(*points[i]))
         self.pathitem.setPath(path)
示例#14
0
文件: driver.py 项目: p3trus/slave
def _typelist(x):
    """Helper function converting all items of x to instances."""
    if isinstance(x, collections.Sequence):
        return list(map(_to_instance, x))
    elif isinstance(x, collections.Iterable):
        return x
    return None if x is None else [_to_instance(x)]
示例#15
0
def plot_eventresult(result, v0=None, fname=None, title=None,
                     quantities=QUANTITIES_EVENT,
                     seismic_moment_method=None, seismic_moment_options={},
                     figsize=None):
    v0 = v0 or result.get('v0') or result.get('config', {}).get('v0')
    freq = np.array(result['freq'])
    res = copy(result)
    _values_view = res.pop('events').values()
    res.update((list(_values_view))[0])
    N = len(quantities)
    n = int(np.ceil(np.sqrt(N)))
    fig = plt.figure(figsize=figsize)
    gs = gridspec.GridSpec(n, n)
    share = None
    for i, q in enumerate(quantities):
        ax = plt.subplot(gs[i // n, i % n], sharex=share)
        if q == 'sds':
            plot_sds(freq, res['omM'], M0=res.get('M0'),
                     fc=res.get('fc'), ax=ax,
                     seismic_moment_method=seismic_moment_method,
                     seismic_moment_options=seismic_moment_options)
        else:
            vals = calc_dependent(q, res[DEPMAP[q]], freq, v0)
            ax.loglog(freq, vals, 'o-k')
        ax.annotate(QLABELS[q], (1, 1), (-5, -5), 'axes fraction',
                    'offset points', ha='right', va='top')
        _set_gridlabels(ax, i, n, n, N)
        if share is None:
            share = ax
    ax.set_xlim(freq[0], freq[-1])
    _savefig(fig, fname=fname, title=title)
示例#16
0
def _storage_from_foolscap_plugin(node_config, config, announcement, get_rref):
    """
    Construct an ``IStorageServer`` from the most locally-preferred plugin
    that is offered in the given announcement.

    :param allmydata.node._Config node_config: The node configuration to
        pass to the plugin.
    """
    plugins = {
        plugin.name: plugin
        for plugin in getPlugins(IFoolscapStoragePlugin)
    }
    storage_options = announcement.get(u"storage-options", [])
    for plugin_name, plugin_config in list(config.storage_plugins.items()):
        try:
            plugin = plugins[plugin_name]
        except KeyError:
            raise ValueError("{} not installed".format(plugin_name))
        for option in storage_options:
            if plugin_name == option[u"name"]:
                furl = option[u"storage-server-FURL"]
                return furl, plugin.get_storage_client(
                    node_config,
                    option,
                    get_rref,
                )
    raise AnnouncementNotMatched()
示例#17
0
    def test_concurrent(self):
        """
        The same data can be uploaded by more than one ``Uploader`` at a time.
        """
        self.basedir = "helper/AssistedUpload/test_concurrent"
        self.setUpHelper(self.basedir)
        u1 = make_uploader(self.helper_furl, self.s, "u1")
        u2 = make_uploader(self.helper_furl, self.s, "u2")

        yield wait_a_few_turns()

        for u in [u1, u2]:
            self.assertTrue(
                u._helper,
                "Expected uploader to have a helper reference, had {} instead."
                .format(u._helper, ),
            )

        uploads = list(
            upload_data(u, DATA, convergence=b"some convergence string")
            for u in [u1, u2])

        result1, result2 = yield defer.gatherResults(uploads)

        self.assertEqual(
            result1.get_uri(),
            result2.get_uri(),
        )
示例#18
0
    def cancel_lease(self, cancel_secret):
        """Remove a lease with the given cancel_secret. If the last lease is
        cancelled, the file will be removed. Return the number of bytes that
        were freed (by truncating the list of leases, and possibly by
        deleting the file. Raise IndexError if there was no lease with the
        given cancel_secret.
        """

        leases = list(self.get_leases())
        num_leases_removed = 0
        for i, lease in enumerate(leases):
            if timing_safe_compare(lease.cancel_secret, cancel_secret):
                leases[i] = None
                num_leases_removed += 1
        if not num_leases_removed:
            raise IndexError("unable to find matching lease to cancel")
        if num_leases_removed:
            # pack and write out the remaining leases. We write these out in
            # the same order as they were added, so that if we crash while
            # doing this, we won't lose any non-cancelled leases.
            leases = [l for l in leases if l]  # remove the cancelled leases
            with open(self.home, 'rb+') as f:
                for i, lease in enumerate(leases):
                    self._write_lease_record(f, i, lease)
                self._write_num_leases(f, len(leases))
                self._truncate_leases(f, len(leases))
        space_freed = self.LEASE_SIZE * num_leases_removed
        if not len(leases):
            space_freed += os.stat(self.home)[stat.ST_SIZE]
            self.unlink()
        return space_freed
示例#19
0
def run_bintahoe(extra_argv, python_options=None):
    """
    Run the main Tahoe entrypoint in a child process with the given additional
    arguments.

    :param [unicode] extra_argv: More arguments for the child process argv.

    :return: A three-tuple of stdout (unicode), stderr (unicode), and the
        child process "returncode" (int).
    """
    executable = ensure_text(sys.executable)
    argv = [executable]
    if python_options is not None:
        argv.extend(python_options)
    argv.extend([u"-b", u"-m", u"allmydata.scripts.runner"])
    argv.extend(extra_argv)
    argv = list(unicode_to_argv(arg) for arg in argv)
    p = Popen(argv, stdout=PIPE, stderr=PIPE)
    if PY2:
        encoding = "utf-8"
    else:
        encoding = locale.getpreferredencoding(False)
    out = p.stdout.read().decode(encoding)
    err = p.stderr.read().decode(encoding)
    returncode = p.wait()
    return (out, err, returncode)
示例#20
0
def list_aliases(options):
    """
    Show aliases that exist.
    """
    data = _get_alias_details(options['node-directory'])

    if options['json']:
        dumped = json.dumps(data, indent=4)
        if isinstance(dumped, bytes):
            dumped = dumped.decode("utf-8")
        output = _escape_format(dumped)
    else:

        def dircap(details):
            return (details['readonly'] if options['readonly-uri'] else
                    details['readwrite']).decode("utf-8")

        def format_dircap(name, details):
            return fmt % (name, dircap(details))

        max_width = max([len(quote_output(name))
                         for name in data.keys()] + [0])
        fmt = "%" + str(max_width) + "s: %s"
        output = "\n".join(
            list(
                format_dircap(name, details)
                for name, details in data.items()))

    if output:
        # Show whatever we computed.  Skip this if there is no output to avoid
        # a spurious blank line.
        show_output(options.stdout, output)

    return 0
示例#21
0
    def modify(self, old_contents, servermap, first_time):
        children = self.node._unpack_contents(old_contents)
        now = time.time()
        for (namex, (child, new_metadata)) in list(self.entries.items()):
            name = normalize(namex)
            precondition(IFilesystemNode.providedBy(child), child)

            # Strictly speaking this is redundant because we would raise the
            # error again in _pack_normalized_children.
            child.raise_error()

            metadata = None
            if name in children:
                if not self.overwrite:
                    raise ExistingChildError("child %s already exists" % quote_output(name, encoding='utf-8'))

                if self.overwrite == ONLY_FILES and IDirectoryNode.providedBy(children[name][0]):
                    raise ExistingChildError("child %s already exists as a directory" % quote_output(name, encoding='utf-8'))
                metadata = children[name][1].copy()

            metadata = update_metadata(metadata, new_metadata, now)
            if self.create_readonly_node and metadata.get('no-write', False):
                child = self.create_readonly_node(child, name)

            children[name] = (child, metadata)
        new_contents = self.node._pack_contents(children)
        return new_contents
示例#22
0
    def init_storage(self, announceable_storage_servers):
        # should we run a storage server (and publish it for others to use)?
        if not storage_enabled(self.config):
            return
        if not self._is_tub_listening():
            raise ValueError("config error: storage is enabled, but tub "
                             "is not listening ('tub.port=' is empty)")

        ss = self.get_anonymous_storage_server()
        announcement = {
            "permutation-seed-base32": self._init_permutation_seed(ss),
        }

        if anonymous_storage_enabled(self.config):
            furl_file = self.config.get_private_path("storage.furl").encode(get_filesystem_encoding())
            furl = self.tub.registerReference(ss, furlFile=furl_file)
            announcement["anonymous-storage-FURL"] = furl

        enabled_storage_servers = self._enable_storage_servers(
            announceable_storage_servers,
        )
        storage_options = list(
            storage_server.announcement
            for storage_server
            in enabled_storage_servers
        )
        plugins_announcement = {}
        if storage_options:
            # Only add the new key if there are any plugins enabled.
            plugins_announcement[u"storage-options"] = storage_options

        announcement.update(plugins_announcement)

        for ic in self.introducer_clients:
            ic.publish("storage", announcement, self._node_private_key)
示例#23
0
def cli(node, *argv):
    """
    Run a tahoe CLI subcommand for a given node in a blocking manner, returning
    the output.
    """
    arguments = ["tahoe", '--node-directory', node.node_dir]
    return check_output(arguments + list(argv))
示例#24
0
def _render_section_values(values):
    """
    Convert a ``dict`` of ``unicode`` to the body of an ini-format section as
    ``unicode``.
    """
    return u"\n".join(
        list(u"{} = {}".format(k, v) for (k, v) in sorted(values.items())))
示例#25
0
 def abbrev_verinfo_dict(self, verinfo_d):
     output = {}
     for verinfo,value in list(verinfo_d.items()):
         (seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
          offsets_tuple) = verinfo
         output["%d-%s" % (seqnum, base32.b2a(root_hash)[:4])] = value
     return output
示例#26
0
 def get_announcements(self):
     """Return a list of AnnouncementDescriptor for all announcements"""
     announcements = []
     for (index, (_, canary, ann, when)) in list(self._announcements.items()):
         ad = AnnouncementDescriptor(when, index, canary, ann)
         announcements.append(ad)
     return announcements
示例#27
0
    def test_verify_mdmf_all_bad_sharedata(self):
        d = self.publish_mdmf()
        # On 8 of the shares, corrupt the beginning of the share data.
        # The signature check during the servermap update won't catch this.
        d.addCallback(lambda ignored: corrupt(None, self._storage,
                                              "share_data", list(range(8))))
        # On 2 of the shares, corrupt the end of the share data.
        # The signature check during the servermap update won't catch
        # this either, and the retrieval process will have to process
        # all of the segments before it notices.
        d.addCallback(
            lambda ignored:
            # the block hash tree comes right after the share data, so if we
            # corrupt a little before the block hash tree, we'll corrupt in the
            # last block of each share.
            corrupt(None, self._storage, "block_hash_tree", [8, 9], -5))
        d.addCallback(lambda ignored: self._fn.check(Monitor(), verify=True))
        # The verifier should flag the file as unhealthy, and should
        # list all 10 shares as bad.
        d.addCallback(self.check_bad, "test_verify_mdmf_all_bad_sharedata")

        def _check_num_bad(r):
            self.failIf(r.is_recoverable())
            smap = r.get_servermap()
            self.failUnlessEqual(len(smap.get_bad_shares()), 10)

        d.addCallback(_check_num_bad)
        return d
示例#28
0
    def test_basic_pubkey_at_end(self):
        # we corrupt the pubkey in all but the last 'k' shares, allowing the
        # download to succeed but forcing a bunch of retries first. Note that
        # this is rather pessimistic: our Retrieve process will throw away
        # the whole share if the pubkey is bad, even though the rest of the
        # share might be good.

        self._fn._pubkey = None
        k = self._fn.get_required_shares()
        N = self._fn.get_total_shares()
        d = defer.succeed(None)
        d.addCallback(corrupt, self._storage, "pubkey",
                      shnums_to_corrupt=list(range(0, N-k)))
        d.addCallback(lambda res: self.make_servermap())
        def _do_retrieve(servermap):
            self.failUnless(servermap.get_problems())
            self.failUnless("pubkey doesn't match fingerprint"
                            in str(servermap.get_problems()[0]))
            ver = servermap.best_recoverable_version()
            r = Retrieve(self._fn, self._storage_broker, servermap, ver)
            c = consumer.MemoryConsumer()
            return r.download(c)
        d.addCallback(_do_retrieve)
        d.addCallback(lambda mc: b"".join(mc.chunks))
        d.addCallback(lambda new_contents:
                      self.failUnlessEqual(new_contents, self.CONTENTS))
        return d
示例#29
0
    def _mark_bad_share(self, server, shnum, reader, f):
        """
        I mark the given (server, shnum) as a bad share, which means that it
        will not be used anywhere else.

        There are several reasons to want to mark something as a bad
        share. These include:

            - A connection error to the server.
            - A mismatched prefix (that is, a prefix that does not match
              our local conception of the version information string).
            - A failing block hash, salt hash, share hash, or other
              integrity check.

        This method will ensure that readers that we wish to mark bad
        (for these reasons or other reasons) are not used for the rest
        of the download. Additionally, it will attempt to tell the
        remote server (with no guarantee of success) that its share is
        corrupt.
        """
        self.log("marking share %d on server %r as bad" % \
                 (shnum, server.get_name()))
        prefix = self.verinfo[-2]
        self.servermap.mark_bad_share(server, shnum, prefix)
        self._bad_shares.add((server, shnum, f))
        self._status.add_problem(server, f)
        self._last_failure = f

        # Remove the reader from _active_readers
        self._active_readers.remove(reader)
        for shnum in list(self.remaining_sharemap.keys()):
            self.remaining_sharemap.discard(shnum, reader.server)

        if f.check(BadShareError):
            self.notify_server_corruption(server, shnum, str(f.value))
示例#30
0
def create_introducer_clients(config, main_tub, _introducer_factory=None):
    """
    Read, validate and parse any 'introducers.yaml' configuration.

    :param _introducer_factory: for testing; the class to instantiate instead
        of IntroducerClient

    :returns: a list of IntroducerClient instances
    """
    if _introducer_factory is None:
        _introducer_factory = IntroducerClient

    # we return this list
    introducer_clients = []

    introducers = config.get_introducer_configuration()

    for petname, (furl, cache_path) in list(introducers.items()):
        ic = _introducer_factory(
            main_tub,
            furl.encode("ascii"),
            config.nickname,
            str(allmydata.__full_version__),
            str(_Client.OLDEST_SUPPORTED_VERSION),
            partial(_sequencer, config),
            cache_path,
        )
        introducer_clients.append(ic)
    return introducer_clients
示例#31
0
 def remote_slot_readv(self, storage_index, shares, readv):
     start = time.time()
     self.count("readv")
     si_s = si_b2a(storage_index)
     lp = log.msg("storage: slot_readv %s %s" % (si_s, shares),
                  facility="tahoe.storage",
                  level=log.OPERATIONAL)
     si_dir = storage_index_to_dir(storage_index)
     # shares exist if there is a file for them
     bucketdir = os.path.join(self.sharedir, si_dir)
     if not os.path.isdir(bucketdir):
         self.add_latency("readv", time.time() - start)
         return {}
     datavs = {}
     for sharenum_s in os.listdir(bucketdir):
         try:
             sharenum = int(sharenum_s)
         except ValueError:
             continue
         if sharenum in shares or not shares:
             filename = os.path.join(bucketdir, sharenum_s)
             msf = MutableShareFile(filename, self)
             datavs[sharenum] = msf.readv(readv)
     log.msg("returning shares %s" % (list(datavs.keys()), ),
             facility="tahoe.storage",
             level=log.NOISY,
             parent=lp)
     self.add_latency("readv", time.time() - start)
     return datavs
示例#32
0
        def _got_buckets(result):
            bucketdict, success = result

            shareverds = []
            for (sharenum, bucket) in list(bucketdict.items()):
                d = self._download_and_verify(s, sharenum, bucket)
                shareverds.append(d)

            dl = deferredutil.gatherResults(shareverds)

            def collect(results):
                verified = set()
                corrupt = set()
                incompatible = set()
                for succ, sharenum, whynot in results:
                    if succ:
                        verified.add(sharenum)
                    else:
                        if whynot == 'corrupt':
                            corrupt.add(sharenum)
                        elif whynot == 'incompatible':
                            incompatible.add(sharenum)
                return (verified, s, corrupt, incompatible, success)

            dl.addCallback(collect)
            return dl
示例#33
0
    def get_all_blockhashes(self):
        """Retrieve and validate all the block-hash-tree nodes that are
        included in this share. Each share contains a full Merkle tree, but
        we usually only fetch the minimal subset necessary for any particular
        block. This function fetches everything at once. The Verifier uses
        this function to validate the block hash tree.

        Call this (and wait for the Deferred it returns to fire) after
        calling get_all_sharehashes() and before calling get_block() for the
        first time: this lets us check that the share contains all block
        hashes and avoids downloading them multiple times.

        I return a Deferred which errbacks upon failure, probably with
        BadOrMissingHash.
        """

        # get_block_hashes(anything) currently always returns everything
        needed = list(range(len(self.block_hash_tree)))
        d = self.bucket.get_block_hashes(needed)
        def _got_block_hashes(blockhashes):
            if len(blockhashes) < len(self.block_hash_tree):
                raise BadOrMissingHash()
            bh = dict(enumerate(blockhashes))

            try:
                self.block_hash_tree.set_hashes(bh)
            except IndexError as le:
                raise BadOrMissingHash(le)
            except (hashtree.BadHashError, hashtree.NotEnoughHashesError) as le:
                raise BadOrMissingHash(le)
        d.addCallback(_got_block_hashes)
        return d
示例#34
0
    def set_boundary(self, boundary):
        """Set the boundary parameter in Content-Type to 'boundary'.

        This is subtly different than deleting the Content-Type header and
        adding a new one with a new boundary parameter via add_header().  The
        main difference is that using the set_boundary() method preserves the
        order of the Content-Type header in the original message.

        HeaderParseError is raised if the message has no Content-Type header.
        """
        missing = object()
        params = self._get_params_preserve(missing, 'content-type')
        if params is missing:
            # There was no Content-Type header, and we don't know what type
            # to set it to, so raise an exception.
            raise errors.HeaderParseError('No Content-Type header found')
        newparams = list()
        foundp = False
        for pk, pv in params:
            if pk.lower() == 'boundary':
                newparams.append(('boundary', '"%s"' % boundary))
                foundp = True
            else:
                newparams.append((pk, pv))
        if not foundp:
            # The original Content-Type header had no boundary attribute.
            # Tack one on the end.  BAW: should we raise an exception
            # instead???
            newparams.append(('boundary', '"%s"' % boundary))
        # Replace the existing Content-Type header with the new value
        newheaders = list()
        for h, v in self._headers:
            if h.lower() == 'content-type':
                parts = list()
                for k, v in newparams:
                    if v == '':
                        parts.append(k)
                    else:
                        parts.append('%s=%s' % (k, v))
                val = SEMISPACE.join(parts)
                newheaders.append(self.policy.header_store_parse(h, val))

            else:
                newheaders.append((h, v))
        self._headers = newheaders
示例#35
0
def crop_line(line,pts_array):
    line_start,line_end=list(line.coords)
    line_start=Point(line_start); line_end=Point(line_end)
    x=(line_start.x-line_end.x)/line.length
    y=(line_start.y-line_end.y)/line.length
    center=line.centroid
    pts=pts_array-np.array([center.x,center.y])
    pts=pts[:,0]*x+pts[:,1]*y
    return pts2line([translate(center,xoff=(np.min(pts)*x), yoff=np.min(pts)*y),translate(center,xoff=(np.max(pts)*x), yoff=np.max(pts)*y)])
示例#36
0
def getFirstNosePoint(image,line):
    pts=np.where(image)
    pts=MultiPoint([(pts[0][i],pts[1][i]) for i in np.arange(len(pts[0]))])
    line_pts=list(line.coords)
    start_line=LineString([line_pts[0],line_pts[1]])
    end_line=LineString([line_pts[-1],line_pts[-2]])
    if getMeanDistance(start_line,pts)<getMeanDistance(end_line,pts):
        nose=Point(line_pts[0])
    else:
        nose=Point(line_pts[-1])
    return nose
示例#37
0
文件: message.py 项目: Daniel75/repo
    def __delitem__(self, name):
        """Delete all occurrences of a header, if present.

        Does not raise an exception if the header is missing.
        """
        name = name.lower()
        newheaders = list()
        for k, v in self._headers:
            if k.lower() != name:
                newheaders.append((k, v))
        self._headers = newheaders
示例#38
0
文件: message.py 项目: Daniel75/repo
 def __init__(self, policy=compat32):
     self.policy = policy
     self._headers = list()
     self._unixfrom = None
     self._payload = None
     self._charset = None
     # Defaults for multipart messages
     self.preamble = self.epilogue = None
     self.defects = []
     # Default content type
     self._default_type = 'text/plain'
示例#39
0
 def test_cmdline(self):
     parallel = os.environ.get('PARALLEL', 'true') == 'true'
     script = run_cmdline
     msg = ('Only %d plot files (%s) are created.\n\n'
            'Created files are:\n%s\n\n'
            '%s')
     with tempdir(delete=True):
         script(['--create-config', '--tutorial'])
         args = [] if parallel else ['--no-parallel']
         script(args)
         # check if pictures were created
         if os.path.exists('example.log'):
             with open('example.log') as flog:
                 log = 'Content of log file:\n' + flog.read()
         else:
             log = 'Log file does not exist.'
         files = list(glob('plots/*.png'))
         msg2 = msg % (len(files), 'png', files, log)
         self.assertEqual(len(files), 85, msg=msg2)
         files = list(glob('plots/*.pdf'))
         msg2 = msg % (len(files), 'pdf', files, log)
         self.assertEqual(len(files), 4, msg=msg2)
示例#40
0
def test_get_providers(mock_provider_list):
    """
    Should get the list of available providers
    """
    # Given: Existing config provider list"

    mock_provider_list.__iter__.return_value = ['provider1', 'provider2']

    # When: I fetch provider list
    providers = service.get_providers()

    # Then: Expected provider list is returned
    eq_(list(providers), ['provider1', 'effective'])
示例#41
0
def _get_effective_provider():
    """
    Gets the effective config provider.

    :return: Effective Config provider.
    :rtype: orchestrator.cluster_config.effective.MergedConfigProvider
    """
    providers = list()
    for provider_type in get_providers():
        if provider_type != 'effective':
            provider = get_provider(provider_type)
            if provider:
                providers.append(provider)
    return MergedConfigProvider(*providers)
示例#42
0
def evaluate_config(config, default_variables={}, var_key='variables'):
    """
    Performs rendering of all template values defined in config. Also takes
    user defined variables nd default variables for substitution in the config
    .
    :param config:
    :param default_variables:
    :param var_key:
    :return: Evaluated config
    :rtype: dict
    """
    updated_config = copy.deepcopy(config)
    updated_config.setdefault(var_key, {})
    updated_config.setdefault('deployers', {})

    if 'defaults' in updated_config:
        # We do not want to do any processing ind efaults section.
        # It is only used for YAML substitution which at this point is already
        # done.
        del(updated_config['defaults'])

    for deployer_name, deployer in \
            list(updated_config.get('deployers').items()):
        updated_config['deployers'][deployer_name].setdefault(
            'variables', {})
        updated_config['deployers'][deployer_name]['variables']\
            .setdefault('deployer', deployer_name)

    updated_config = transform_string_values(
        evaluate_value(updated_config, default_variables))

    # Remove all disabled deployers
    for deployer_name, deployer in \
            list(updated_config.get('deployers').items()):
        if not deployer.get('enabled', True):
            del(updated_config['deployers'][deployer_name])
    return updated_config
示例#43
0
文件: config.py 项目: totem/config
def _get_effective_provider():
    """
    Gets the effective config provider.

    :return: Effective Config provider.
    :rtype: orchestrator.cluster_config.effective.MergedConfigProvider
    """
    providers = list()
    for provider_type in get_provider_types():
        if provider_type not in ('effective', 'default'):
            provider = get_provider(provider_type)
            if provider:
                providers.append(provider)

    if CONFIG_PROVIDERS['effective']['cache']['enabled']:
        cache_provider = _get_etcd_provider(
            ttl=CONFIG_PROVIDERS['effective']['cache']['ttl'])
    else:
        cache_provider = None
    return MergedConfigProvider(*providers, cache_provider=cache_provider)
示例#44
0
def getLine(image,angle=None):
    """
    This function takes a boolean image of a rodent, performs several steps, and returns a line running down the middle of the rodent from nose to tail.
    The steps are:
    1) Find the point at the center of mass.
    2) Draw a horizontal line through this point.
    3) Find the mean distance from the line to every point in the rodent. Rotate the line and find the angle which gives the minimum such distance. Keep the line at this angle. This line gives a good approximation for position.
    4) Draw 7 points on the line: one at the midpoint, two at the ends, two 3/4 away from the midpoint, and two halfway between the ends and the midpoint.
    5) Construct 7 lines which run through the 7 points at an angle perpendicular to the main line.
    6) Draw 6 boxes between these 7 lines.  Find the center of mass for each box along the axes parallel to these lines.  Move each of the 7 points (except the midpoint) along their lines to the center of mass of their respective region.
    7) Draw a box around the end segment of the line.  The bounds for the box lie parallel and perpendicular to this segment.  Fixing the inner point and allowing the end point to vary, fit the segment so that the average distance from the segment to the points in the box are a minimum. This step is done for both ends of the line, because we don't as yet know which end is the head and which is the tail.
    
    """
    pts=np.where(image)
    pts_array=np.column_stack((pts[0],pts[1]))
    pts=MultiPoint([(pts[0][i],pts[1][i]) for i in np.arange(len(pts[0]))])
    # STEP 1
    x0,y0=center_of_mass(image)
    center=Point(x0,y0)
    bounds=pts.bounds
    b_len=((bounds[2]-bounds[0])**2+(bounds[3]-bounds[1])**2)**(1/2) #this is the length of the diagonal, the maximum possible length of an object inside a box
    # STEP 2
    line=pts2line([translate(center,b_len),translate(center,-b_len)])
    # STEP 3
    if angle is None:
        angles=np.arange(-90,90,10)
    else:
        angles=np.arange(angle-20,angle+20,10) #this assumes the angle changes at most 10 degrees between frames
    distances=np.zeros(angles.shape,dtype=np.float)
    for i in np.arange(len(angles)):
        distances[i]=getMeanDistance(rotate(line,angles[i]),pts)
    angle=angles[np.argmin(distances)]
    line=rotate(line,angle)
    line=crop_line(line,pts_array)
    
    # STEP 4 & 5
    # now that we have the approximate line down the main axis, let's divide it in half and allow the endpoints and midpoint to be translated along the perpendicular axis
    start,end=list(line.coords)
    start=np.array(start); end=np.array(end)
    mid=(start+end)/2
    mid_axis=rotate(line,90)
    mid_axis=crop_line(mid_axis,pts_array)
    axis1=translate(mid_axis,xoff=start[0]-mid[0],yoff=start[1]-mid[1])
    axis2=translate(mid_axis,xoff=(start[0]-mid[0])/(4./3),yoff=(start[1]-mid[1])/(4./3.))
    axis3=translate(mid_axis,xoff=(start[0]-mid[0])/2,yoff=(start[1]-mid[1])/2.)
    axis4=translate(mid_axis,xoff=(end[0]-mid[0])/2,yoff=(end[1]-mid[1])/2)
    axis5=translate(mid_axis,xoff=(end[0]-mid[0])/(4./3.),yoff=(end[1]-mid[1])/(4./3.))
    axis6=translate(mid_axis,xoff=end[0]-mid[0],yoff=end[1]-mid[1])
    
    # STEP 6
    pt1=getMeanPoint(axis1,axis2,pts_array)
    pt2=getMeanPoint(axis2,axis3,pts_array)
    pt3=getMeanPoint(axis3,mid_axis,pts_array)
    pt4=getMeanPoint(axis4,mid_axis,pts_array)
    pt5=getMeanPoint(axis5,axis4,pts_array)
    pt6=getMeanPoint(axis6,axis5,pts_array)
    
    # STEP 7
    start=pt1.coords[0]
    end=pt2.coords[0]
    headLine=LineString([start,end])
    headLine=scale(headLine,2,2)
    start=np.array(start); end=np.array(end)
    mid=(start+end)/2
    mid_axis=rotate(headLine,90)
    axis1=translate(mid_axis,xoff=(start[0]-mid[0])*2,yoff=(start[1]-mid[1])*2)
    axis2=translate(mid_axis,xoff=end[0]-mid[0],yoff=end[1]-mid[1])
    #poly=Polygon([p for p in axis1.coords]+[p for p in reversed(axis2.coords)]) #draws a box around one half of the mouse
    #pts_inside=MultiPoint([pt for pt in pts if poly.contains(pt)])
    poly=np.array([p for p in axis1.coords]+[p for p in reversed(axis2.coords)])
    rr, cc = polygon(poly[:, 0], poly[:, 1])
    box_array=np.column_stack((rr,cc))
    pts_inside=multidim_intersect(pts_array,box_array)
    if len(pts_inside)==0: #this only happens when the object lies completely outside of the box at the end of the line
        pts_inside=pts_array 
    pts_inside=MultiPoint([tuple(p) for p in pts_inside])
    
    coor=np.array([np.array(s) for s in axis1.coords])
    pt=end
    p0=(.5,)
    bounds=[(0.0,1.0)]
    p, cov_x, infodic, mesg, ier = leastsqbound(err, p0,args=(coor,pts_inside,pt),bounds = bounds,ftol=.1, full_output=True)     
    headLine=LineString([coor[0]+p[0]*(coor[1]-coor[0]),pt])
    headLine=crop_line(headLine,np.array([np.array([p.x,p.y]) for p in pts_inside]))
    pt1=Point(headLine.coords[1])
    
    start=pt6.coords[0]
    end=pt5.coords[0]
    headLine=LineString([start,end])
    headLine=scale(headLine,2,2)
    start=np.array(start); end=np.array(end)
    mid=(start+end)/2
    mid_axis=rotate(headLine,90)
    axis1=translate(mid_axis,xoff=(start[0]-mid[0])*2,yoff=(start[1]-mid[1])*2)
    axis2=translate(mid_axis,xoff=end[0]-mid[0],yoff=end[1]-mid[1])
    
    #poly=Polygon([p for p in axis1.coords]+[p for p in reversed(axis2.coords)]) #draws a box around one half of the mouse
    #pts_inside=MultiPoint([pt for pt in pts if poly.contains(pt)])
    
    poly=np.array([p for p in axis1.coords]+[p for p in reversed(axis2.coords)])
    rr, cc = polygon(poly[:, 0], poly[:, 1])
    box_array=np.column_stack((rr,cc))
    pts_inside=multidim_intersect(pts_array,box_array)
    if len(pts_inside)==0: #this only happens when the object lies completely outside of the box at the end of the line
        pts_inside=pts_array 
    pts_inside=MultiPoint([tuple(p) for p in pts_inside])
    
    
    
    coor=np.array([np.array(s) for s in axis1.coords])
    pt=end
    p0=(.5,)
    bounds=[(0.0,1.0)]
    p, cov_x, infodic, mesg, ier = leastsqbound(err, p0,args=(coor,pts_inside,pt),bounds = bounds,ftol=.1, full_output=True)     
    headLine=LineString([coor[0]+p[0]*(coor[1]-coor[0]),pt])
    headLine=crop_line(headLine,np.array([np.array([p.x,p.y]) for p in pts_inside]))
    pt6=Point(headLine.coords[1])
    
    line=pts2line([pt1,pt2,pt3,pt4,pt5,pt6])
    return line, angle, center
示例#45
0

if __name__ == '__main__':
    HOST, PORT = "localhost", 5028
    server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
    ip, port = server.server_address

    # Start a thread with the server -- that thread will then start one
    # more thread for each request
    server_thread = threading.Thread(target=server.serve_forever)
    # Exit the server thread when the main thread terminates
    server_thread.daemon = True
    server_thread.start()
    print("Server loop running in thread:", server_thread.name)

    print('Available serial ports:', list(SerialPort.available_ports()))

    dev0 = Device(timeout=3.0)

    # the ``channel`` method
    try:
        with channel(dev0, TcpSocket(HOST, PORT)) as dev:
            print(dev.stdio.closed)
            dev.send('Hello World!')
            dev.state.first_received = dev.receive(32).strip()
            #print(dev.receive().strip())
            print(dev.state.first_received)
    except DeviceTimeoutError as e:
        print(e)
    except IOTimeoutError as e:
        print(e)
示例#46
0
def line2vector(line):
    start,end=list(line.coords)
    start=np.array(start); end=np.array(end);
    v=end-start
    v/=np.sqrt(np.sum(np.square(v)))
    return v
workbook = excel.Workbooks.Open(filename)
sheet = workbook.Worksheets('Puff Data') #workbook.Worksheets(1) #workbook.Sheets('Sheet1').Select(); sheet = xlApp.ActiveSheet
header=np.array(sheet.Rows(1).Value[0])
nCols=np.max(np.argwhere(header.astype(np.bool)))+1
nPuffs=np.max(np.argwhere(np.array(sheet.Columns(1).Value).astype(np.bool)))
header=header[:nCols]
puff_info=[]
for row in np.arange(nPuffs)+2:
    puff=np.array(sheet.Rows(int(row)).Value[0][:nCols])
    puff_info.append(dict(zip(header,puff)))
    
puff_info=groupSites(puff_info,radius)

sheet=workbook.Worksheets.Add()
sheet.Name="Puff Data radius {}".format(radius)
header=list(header)
header.append('Amplitude Normalized by Site Mean')
header.append('Amplitude Normalized by Fitted Line')
for j in np.arange(len(header)):
    sheet.Cells(1,j+1).Value=header[j]
for i in np.arange(nPuffs):
    for j in np.arange(len(header)):
        sheet.Cells(int(i)+2,int(j)+1).Value=puff_info[i][header[j]]
    



workbook.Save()
workbook.Close()
excel.Quit()
示例#48
0
def pts2line(pts):
    line=list(pts[0].coords)
    for i in np.arange(1,len(pts)):
        line.extend(pts[i].coords)
    return LineString(line)