Example #1
0
    def _encode(self, k, n, data, version=SDMF_VERSION):
        # encode 'data' into a peerid->shares dict.

        fn = self._fn
        # disable the nodecache, since for these tests we explicitly need
        # multiple nodes pointing at the same file
        self._nodemaker._node_cache = DevNullDictionary()
        fn2 = self._nodemaker.create_from_cap(fn.get_uri())
        # then we copy over other fields that are normally fetched from the
        # existing shares
        fn2._pubkey = fn._pubkey
        fn2._privkey = fn._privkey
        fn2._encprivkey = fn._encprivkey
        # and set the encoding parameters to something completely different
        fn2._required_shares = k
        fn2._total_shares = n

        s = self._storage
        s._peers = {}  # clear existing storage
        p2 = Publish(fn2, self._storage_broker, None)
        uploadable = MutableData(data)
        d = p2.publish(uploadable)

        def _published(res):
            shares = s._peers
            s._peers = {}
            return shares

        d.addCallback(_published)
        return d
    def _encode(self, k, n, data, version=SDMF_VERSION):
        # encode 'data' into a peerid->shares dict.

        fn = self._fn
        # disable the nodecache, since for these tests we explicitly need
        # multiple nodes pointing at the same file
        self._nodemaker._node_cache = DevNullDictionary()
        fn2 = self._nodemaker.create_from_cap(fn.get_uri())
        # then we copy over other fields that are normally fetched from the
        # existing shares
        fn2._pubkey = fn._pubkey
        fn2._privkey = fn._privkey
        fn2._encprivkey = fn._encprivkey
        # and set the encoding parameters to something completely different
        fn2._required_shares = k
        fn2._total_shares = n

        s = self._storage
        s._peers = {}  # clear existing storage
        p2 = Publish(fn2, self._storage_broker, None)
        uploadable = MutableData(data)
        d = p2.publish(uploadable)

        def _published(res):
            shares = s._peers
            s._peers = {}
            return shares

        d.addCallback(_published)
        return d
Example #3
0
 def _upload(self, new_contents, servermap):
     assert self._pubkey, "update_servermap must be called before publish"
     p = Publish(self, self._storage_broker, servermap)
     if self._history:
         self._history.notify_publish(p.get_status(), len(new_contents))
     d = p.publish(new_contents)
     d.addCallback(self._did_upload, len(new_contents))
     return d
Example #4
0
 def _upload(self, new_contents, servermap):
     assert self._pubkey, "update_servermap must be called before publish"
     p = Publish(self, self._storage_broker, servermap)
     if self._history:
         self._history.notify_publish(p.get_status(), len(new_contents))
     d = p.publish(new_contents)
     d.addCallback(self._did_upload, len(new_contents))
     return d
Example #5
0
 def _build_uploadable_and_finish(self, segments_and_bht, data, offset):
     """
     After the process has the plaintext segments, I build the
     TransformingUploadable that the publisher will eventually
     re-upload to the grid. I then invoke the publisher with that
     uploadable, and return a Deferred when the publish operation has
     completed without issue.
     """
     u = TransformingUploadable(data, offset, self._version[3],
                                segments_and_bht[0], segments_and_bht[1])
     p = Publish(self._node, self._storage_broker, self._servermap)
     return p.update(u, offset, segments_and_bht[2], self._version)
Example #6
0
 def _build_uploadable_and_finish(self, segments_and_bht, data, offset):
     """
     After the process has the plaintext segments, I build the
     TransformingUploadable that the publisher will eventually
     re-upload to the grid. I then invoke the publisher with that
     uploadable, and return a Deferred when the publish operation has
     completed without issue.
     """
     u = TransformingUploadable(data, offset,
                                self._version[3],
                                segments_and_bht[0],
                                segments_and_bht[1])
     p = Publish(self._node, self._storage_broker, self._servermap)
     return p.update(u, offset, segments_and_bht[2], self._version)
Example #7
0
    def _upload(self, new_contents, servermap):
        """
        A MutableFileNode still has to have some way of getting
        published initially, which is what I am here for. After that,
        all publishing, updating, modifying and so on happens through
        MutableFileVersions.
        """
        assert self._pubkey, "update_servermap must be called before publish"

        # Define IPublishInvoker with a set_downloader_hints method?
        # Then have the publisher call that method when it's done publishing?
        p = Publish(self, self._storage_broker, servermap)
        if self._history:
            self._history.notify_publish(p.get_status(),
                                         new_contents.get_size())
        d = p.publish(new_contents)
        d.addCallback(self._did_upload, new_contents.get_size())
        return d
Example #8
0
    def _upload(self, new_contents, servermap):
        """
        A MutableFileNode still has to have some way of getting
        published initially, which is what I am here for. After that,
        all publishing, updating, modifying and so on happens through
        MutableFileVersions.
        """
        assert self._pubkey, "update_servermap must be called before publish"

        # Define IPublishInvoker with a set_downloader_hints method?
        # Then have the publisher call that method when it's done publishing?
        p = Publish(self, self._storage_broker, servermap)
        if self._history:
            self._history.notify_publish(p.get_status(),
                                         new_contents.get_size())
        d = p.publish(new_contents)
        d.addCallback(self._did_upload, new_contents.get_size())
        return d