def test_getcontent_file(self, check_file_content=True):
        """Get the content from a file."""
        data = os.urandom(300000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def check_file(req):
            if req.data != deflated_data:
                raise Exception("data does not match")

        def auth(client):
            d = client.dummy_authenticate("open sesame")
            d.addCallbacks(lambda _: client.get_root(), client.test_fail)
            d.addCallbacks(
                lambda root: client.make_file(request.ROOT, root, "hola"),
                client.test_fail)
            d.addCallback(self.save_req, 'req')
            d.addCallbacks(
                lambda mkfile_req: client.put_content(
                    request.ROOT,
                    mkfile_req.new_id, NO_CONTENT_HASH, hash_value,
                    crc32_value, size, deflated_size, StringIO(deflated_data)),
                client.test_fail)
            d.addCallback(lambda _: client.get_content(
                          request.ROOT, self._state.req.new_id, hash_value))
            if check_file_content:
                d.addCallback(check_file)
            d.addCallbacks(client.test_done, client.test_fail)

        return self.callback_test(auth, timeout=1.5)
Esempio n. 2
0
    def test_putcontent_unlinked(self):
        """Try to put content in an unlinked file."""
        empty_hash = content_hash_factory().content_hash()
        data = "*"
        size = 1
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)

        def auth(client):
            # setup
            d = client.dummy_authenticate("open sesame")
            d.addCallback(lambda r: client.get_root())

            # create file and remove it
            d.addCallback(lambda r: client.make_file(request.ROOT, r, "hola"))
            d.addCallback(lambda req: self._save_state("file", req.new_id))
            d.addCallback(
                lambda _: client.unlink(request.ROOT, self._state.file))

            # try to put content
            d.addCallback(lambda _: client.put_content(
                request.ROOT, self._state.file, empty_hash, hash_value,
                crc32_value, size, StringIO(data)))
            d.addCallbacks(client.test_fail, lambda x: client.test_done("ok"))

        return self.callback_test(auth)
Esempio n. 3
0
    def test_put_content(self):
        """Write a file."""
        data = "*" * 10000
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            """Authenticate and test."""
            d = client.dummy_authenticate("open sesame")
            d.addCallback(lambda _: client.create_udf(u"~", u"myudf"))
            d.addCallback(self.save_req, "udf")

            # create a file with content
            d.addCallback(lambda r: client.make_file(self._state.udf.volume_id,
                                                     self._state.udf.node_id,
                                                     "foo"))
            # put content
            d.addCallback(lambda req: client.put_content(
                          self._state.udf.volume_id, req.new_id,
                          NO_CONTENT_HASH, hash_value, crc32_value, size,
                          deflated_size, StringIO(deflated_data)))

            d.addCallbacks(client.test_done, client.test_fail)
        return self.callback_test(auth)
Esempio n. 4
0
    def test_getcontent_file_slow(self):
        """Get content from a file with very low BW and fail with timeout."""
        data = os.urandom(300000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        @defer.inlineCallbacks
        def auth(client):
            """Test."""
            yield client.dummy_authenticate("open sesame")
            root = yield client.get_root()

            # make a file and put content in it
            mkfile_req = yield client.make_file(request.ROOT, root, "hola")
            yield client.put_content(request.ROOT, mkfile_req.new_id,
                                     NO_CONTENT_HASH, hash_value, crc32_value,
                                     size, deflated_size,
                                     StringIO(deflated_data))

            # set the read limit, and get content
            client.factory.factory.readLimit = 1000
            yield client.get_content(request.ROOT, mkfile_req.new_id,
                                     hash_value)

        d = self.callback_test(auth, add_default_callbacks=True, timeout=0.1)
        err = yield self.assertFailure(d, Exception)
        self.assertEqual(str(err), "timeout")
Esempio n. 5
0
    def test_proxy_producer_streaming(self):
        """Test ProxyHashingProducer."""
        data = os.urandom(1024 * 10)
        message = zlib.compress(data)
        ds = diskstorage.DiskStorage(os.path.join(self.tmpdir, "testfile"))
        consumer = ds.put("somenode")
        producer = upload.ProxyHashingProducer(consumer, True)

        chunk_sz = 10
        for part in xrange(0, len(message), chunk_sz):
            yield producer.dataReceived(message[part:part + chunk_sz])
        producer.stopProducing()
        yield producer.flush_decompressor()

        with open(consumer.filepath, "rb") as fh:
            self.assertEqual(fh.read(), message)
        hasher = content_hash_factory()
        hasher.update(data)
        self.assertEqual(producer.hash_object.content_hash(),
                         hasher.content_hash())
        magic_hasher = magic_hash_factory()
        magic_hasher.update(data)
        self.assertEqual(producer.magic_hash_object.content_hash()._magic_hash,
                         magic_hasher.content_hash()._magic_hash)
        self.assertEqual(producer.inflated_size, len(data))
        self.assertEqual(producer.crc32, crc32(data))
    def test_get_content_on_share(self):
        """read a file on a share."""
        data = ""
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            """auth"""
            d = client.dummy_authenticate("open sesame")
            # need to put data to be able to retrieve it!
            d.addCallback(
                lambda r: client.put_content(
                    self.share_modify, self.filerw, NO_CONTENT_HASH,
                    hash_value, crc32_value, size, deflated_size,
                    StringIO(deflated_data)))
            d.addCallback(
                lambda r: client.get_content(
                    self.share_modify, self.filerw, EMPTY_HASH))
            d.addCallbacks(client.test_done, client.test_fail)

        return self.callback_test(auth)
Esempio n. 7
0
    def test_getcontent_file(self, check_file_content=True):
        """Get the content from a file."""
        data = os.urandom(300000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def check_file(req):
            if req.data != deflated_data:
                raise Exception("data does not match")

        def auth(client):
            d = client.dummy_authenticate("open sesame")
            d.addCallbacks(lambda _: client.get_root(), client.test_fail)
            d.addCallbacks(
                lambda root: client.make_file(request.ROOT, root, "hola"),
                client.test_fail)
            d.addCallback(self.save_req, 'req')
            d.addCallbacks(
                lambda mkfile_req: client.
                put_content(request.ROOT, mkfile_req.new_id, NO_CONTENT_HASH,
                            hash_value, crc32_value, size, deflated_size,
                            StringIO(deflated_data)), client.test_fail)
            d.addCallback(lambda _: client.get_content(
                request.ROOT, self._state.req.new_id, hash_value))
            if check_file_content:
                d.addCallback(check_file)
            d.addCallbacks(client.test_done, client.test_fail)

        return self.callback_test(auth, timeout=1.5)
Esempio n. 8
0
    def test_putcontent_unlinked(self):
        """Try to put content in an unlinked file."""
        empty_hash = content_hash_factory().content_hash()
        data = "*"
        size = 1
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)

        def auth(client):
            # setup
            d = client.dummy_authenticate("open sesame")
            d.addCallback(lambda r: client.get_root())

            # create file and remove it
            d.addCallback(lambda r: client.make_file(request.ROOT, r, "hola"))
            d.addCallback(lambda req: self._save_state("file", req.new_id))
            d.addCallback(lambda _: client.unlink(request.ROOT,
                                                  self._state.file))

            # try to put content
            d.addCallback(lambda _: client.put_content(
                request.ROOT, self._state.file, empty_hash, hash_value,
                crc32_value, size, StringIO(data)))
            d.addCallbacks(client.test_fail, lambda x: client.test_done("ok"))
        return self.callback_test(auth)
    def test_getcontent_file_slow(self):
        """Get content from a file with very low BW and fail with timeout."""
        data = os.urandom(300000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        @defer.inlineCallbacks
        def auth(client):
            """Test."""
            yield client.dummy_authenticate("open sesame")
            root = yield client.get_root()

            # make a file and put content in it
            mkfile_req = yield client.make_file(request.ROOT, root, "hola")
            yield client.put_content(request.ROOT, mkfile_req.new_id,
                                     NO_CONTENT_HASH, hash_value, crc32_value,
                                     size, deflated_size,
                                     StringIO(deflated_data))

            # set the read limit, and get content
            client.factory.factory.readLimit = 1000
            yield client.get_content(request.ROOT, mkfile_req.new_id,
                                     hash_value)

        d = self.callback_test(auth, add_default_callbacks=True,
                               timeout=0.1)
        err = yield self.assertFailure(d, Exception)
        self.assertEqual(str(err), "timeout")
Esempio n. 10
0
    def _hash(self, path):
        """Actually hashes a file."""
        hasher = content_hash_factory()
        crc = 0
        size = 0
        try:
            initial_stat = stat_path(path)
            with open_file(path, 'rb') as fh:
                while True:
                    # stop hashing if path_to_cancel = path or _stopped is True
                    with self.mutex:
                        path_to_cancel = self._should_cancel
                    if path_to_cancel == path or self._stopped:
                        raise StopHashing('hashing of %r was cancelled' % path)
                    cont = fh.read(self.chunk_size)
                    if not cont:
                        break
                    hasher.update(cont)
                    crc = crc32(cont, crc)
                    size += len(cont)
        finally:
            with self.mutex:
                self._should_cancel = None

        return hasher.content_hash(), crc, size, initial_stat
    def test_upload(self):
        """Hiccup the network in the middle of an upload."""
        data = os.urandom(1000)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        self.patch(self.main.fs, 'open_file', lambda mdid: StringIO(data))
        mdid, node_id = yield self._mkfile('hola')

        def worker():
            """Async worker."""
            self.aq.upload('', node_id, NO_CONTENT_HASH, hash_value,
                           crc32_value, size, mdid)
            return self.hiccup()
        d = self.wait_for_nirvana()
        d.addCallback(lambda _: self.nuke_client_method(
            'put_content_request', worker, lambda: self.connlost_deferred))

        self.assertInQ(d, lambda: ('AQ_UPLOAD_FINISHED',
                                   {'share_id': '',
                                    'hash': hash_value,
                                    'node_id': anUUID,
                                    'new_generation': 2L}))
        yield d
 def _get_data(self, data_len=1000):
     """Get the hash, crc and size of a chunk of data."""
     data = os.urandom(data_len)  # not terribly compressible
     hash_object = content_hash_factory()
     hash_object.update(data)
     hash_value = hash_object.content_hash()
     crc32_value = crc32(data)
     size = len(data)
     return NoCloseStringIO(data), data, hash_value, crc32_value, size
Esempio n. 13
0
 def _get_data(self, data_len=1000):
     """Get the hash, crc and size of a chunk of data."""
     data = os.urandom(data_len)  # not terribly compressible
     hash_object = content_hash_factory()
     hash_object.update(data)
     hash_value = hash_object.content_hash()
     crc32_value = crc32(data)
     size = len(data)
     return NoCloseStringIO(data), data, hash_value, crc32_value, size
Esempio n. 14
0
    def test_unique(self):
        """The hasher should return in order."""
        # calculate what we should receive
        should_be = []
        for i in range(10):
            hasher = content_hash_factory()
            text = "supercalifragilistico"+str(i)
            hasher.hash_object.update(text)
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            with open_file(tfile, "wb") as fh:
                fh.write("supercalifragilistico"+str(i))
            d = dict(path=tfile, hash=hasher.content_hash(),
                     crc32=crc32(text), size=len(text), stat=stat_path(tfile))
            should_be.append(("HQ_HASH_NEW", d))

        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def __init__(innerself):
                innerself.store = []
            def push(innerself, event, **kwargs):
                """Callback."""
                innerself.store.append((event, kwargs))
                if len(innerself.store) == 10:
                    if innerself.store == should_be:
                        d.callback(True)
                    else:
                        d.errback(Exception("are different!"))

        receiver = Helper()

        hq = hash_queue.HashQueue(receiver)
        self.addCleanup(hq.shutdown)
        # stop the hasher so we can test the unique items in the queue
        hq.hasher.stop()
        self.log.debug('Hasher stopped (forced)')
        # allow the hasher to fully stop
        time.sleep(0.1)
        # create a new hasher just like the HashQueue creates it
        hq.hasher = hash_queue._Hasher(hq._queue, hq._end_mark, receiver)
        hq.hasher.setDaemon(True)

        # send to hash twice
        for i in range(10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            hq.insert(tfile, "mdid")
            hq.insert(tfile, "mdid")
        # start the hasher
        self.log.debug('Hasher started (forced)')
        hq.hasher.start()
        # insert the last item to check the uniqueness in the queue while
        # the hasher is running
        for i in range(9, 10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            hq.insert(tfile, "mdid")
        return d
    def test_putcontent_slow(self, num_files=1):
        """Test putting content to a file with very low bandwidth and fail
        with timeout.
        """
        data = os.urandom(30000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            def check_file(result):
                def _check_file():
                    filesync_tm.begin()
                    try:
                        store = get_filesync_store()
                        content_blob = store.get(ContentBlob, hash_value)
                        if not content_blob:
                            raise ValueError("content blob is not there")
                    finally:
                        filesync_tm.abort()

                d = threads.deferToThread(_check_file)
                return d

            d = client.dummy_authenticate("open sesame")
            filename = "hola_1"
            d.addCallbacks(lambda _: client.get_root(), client.test_fail)
            d.addCallbacks(lambda root: client.make_file(request.ROOT, root, filename), client.test_fail)

            def set_write_limit(r):
                client.factory.factory.writeLimit = 100
                return r

            d.addCallback(set_write_limit)
            d.addCallbacks(
                lambda mkfile_req: client.put_content(
                    request.ROOT,
                    mkfile_req.new_id,
                    NO_CONTENT_HASH,
                    hash_value,
                    crc32_value,
                    size,
                    deflated_size,
                    StringIO(deflated_data),
                ),
                client.test_fail,
            )
            return d

        d1 = defer.Deferred()
        test_d = self.callback_test(auth, timeout=1)
        test_d.addCallbacks(d1.errback, lambda r: d1.callback(None))
        return d1
Esempio n. 16
0
    def test_putcontent_slow(self, num_files=1):
        """Test putting content to a file with very low bandwidth and fail
        with timeout.
        """
        data = os.urandom(30000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            def check_file(result):
                def _check_file():
                    storage_tm.begin()
                    try:
                        store = get_storage_store()
                        content_blob = store.get(model.ContentBlob, hash_value)
                        if not content_blob:
                            raise ValueError("content blob is not there")
                    finally:
                        storage_tm.abort()

                d = threads.deferToThread(_check_file)
                return d

            d = client.dummy_authenticate("open sesame")
            filename = 'hola_1'
            d.addCallbacks(lambda _: client.get_root(), client.test_fail)
            d.addCallbacks(
                lambda root: client.make_file(request.ROOT, root, filename),
                client.test_fail)

            def set_write_limit(r):
                client.factory.factory.writeLimit = 100
                return r

            d.addCallback(set_write_limit)
            d.addCallbacks(
                lambda mkfile_req: client.
                put_content(request.ROOT, mkfile_req.new_id, NO_CONTENT_HASH,
                            hash_value, crc32_value, size, deflated_size,
                            StringIO(deflated_data)), client.test_fail)
            return d

        d1 = defer.Deferred()
        test_d = self.callback_test(auth, timeout=1)
        test_d.addCallbacks(d1.errback, lambda r: d1.callback(None))
        return d1
    def test_putcontent(self, num_files=1):
        """Test putting content to a file."""
        data = os.urandom(300000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            def check_file(result):
                def _check_file():
                    filesync_tm.begin()
                    try:
                        store = get_filesync_store()
                        content_blob = store.get(ContentBlob, hash_value)
                        if not content_blob:
                            raise ValueError("content blob is not there")
                    finally:

                        filesync_tm.abort()

                d = threads.deferToThread(_check_file)
                return d

            d = client.dummy_authenticate("open sesame")
            filenames = iter("hola_%d" % i for i in xrange(num_files))
            for i in range(num_files):
                d.addCallbacks(lambda _: client.get_root(), client.test_fail)
                d.addCallbacks(lambda root: client.make_file(request.ROOT, root, filenames.next()), client.test_fail)
                d.addCallbacks(
                    lambda mkfile_req: client.put_content(
                        request.ROOT,
                        mkfile_req.new_id,
                        NO_CONTENT_HASH,
                        hash_value,
                        crc32_value,
                        size,
                        deflated_size,
                        StringIO(deflated_data),
                    ),
                    client.test_fail,
                )
            d.addCallback(check_file)
            d.addCallbacks(client.test_done, client.test_fail)
            return d

        return self.callback_test(auth, timeout=1)
Esempio n. 18
0
    def test_order(self):
        """The hasher should return in order."""
        # calculate what we should receive
        should_be = []
        for i in range(10):
            hasher = content_hash_factory()
            text = "supercalifragilistico"+str(i)
            hasher.hash_object.update(text)
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            with open_file(tfile, "wb") as fh:
                fh.write("supercalifragilistico"+str(i))
            d = dict(path=tfile, hash=hasher.content_hash(),
                     crc32=crc32(text), size=len(text), stat=stat_path(tfile))
            should_be.append(("HQ_HASH_NEW", d))

        # create the hasher
        mark = object()
        queue = hash_queue.UniqueQueue()
        d = defer.Deferred()

        class Helper(object):
            """Helper class."""
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def __init__(innerself):
                innerself.store = []
            def push(innerself, event, **kwargs):
                """Callback."""
                innerself.store.append((event, kwargs))
                if len(innerself.store) == 10:
                    hasher.stop()
                    hasher.join(timeout=5)
                    if innerself.store == should_be:
                        d.callback(True)
                    else:
                        d.errback(Exception("are different!"))
        receiver = Helper()

        hasher = hash_queue._Hasher(queue, mark, receiver)

        # send what to hash
        for i in range(10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            item = ((tfile, "mdid"), FAKE_TIMESTAMP)
            queue.put(item)

        # start the hasher after putting the work items
        hasher.start()

        return d
Esempio n. 19
0
    def put(self, local, remote):
        """Put local file into remote file."""
        try:
            node_id = self.get_id_from_filename(remote)
        except ValueError:
            parent_id = self.get_cwd_id()
            r = self.defer_from_thread(
                self.factory.current_protocol.make_file, self.volume, parent_id, remote.split("/")[-1]
            )
            node_id = r.new_id

        old_hash = self.get_hash(node_id)

        ho = content_hash_factory()
        zipper = zlib.compressobj()
        crc32_value = 0
        size = 0
        deflated_size = 0
        temp_file_name = None
        with open(local) as fh:
            with tempfile.NamedTemporaryFile(mode="w", prefix="cmd_client-", delete=False) as dest:
                temp_file_name = dest.name
                while True:
                    cont = fh.read(1024 ** 2)
                    if not cont:
                        dest.write(zipper.flush())
                        deflated_size = dest.tell()
                        break
                    ho.update(cont)
                    crc32_value = crc32(cont, crc32_value)
                    size += len(cont)
                    dest.write(zipper.compress(cont))
        hash_value = ho.content_hash()
        try:
            self.defer_from_thread(
                self.factory.current_protocol.put_content,
                self.volume,
                node_id,
                old_hash,
                hash_value,
                crc32_value,
                size,
                deflated_size,
                open(temp_file_name, "r"),
            )
        finally:
            if os.path.exists(temp_file_name):
                os.unlink(temp_file_name)
Esempio n. 20
0
    def test_putcontent(self, num_files=1):
        """Test putting content to a file."""
        data = os.urandom(300000)
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            def check_file(result):
                def _check_file():
                    storage_tm.begin()
                    try:
                        store = get_storage_store()
                        content_blob = store.get(model.ContentBlob, hash_value)
                        if not content_blob:
                            raise ValueError("content blob is not there")
                    finally:

                        storage_tm.abort()

                d = threads.deferToThread(_check_file)
                return d

            d = client.dummy_authenticate("open sesame")
            filenames = iter('hola_%d' % i for i in xrange(num_files))
            for i in range(num_files):
                d.addCallbacks(lambda _: client.get_root(), client.test_fail)
                d.addCallbacks(
                    lambda root: client.make_file(request.ROOT, root,
                                                  filenames.next()),
                    client.test_fail)
                d.addCallbacks(
                    lambda mkfile_req: client.put_content(
                        request.ROOT, mkfile_req.new_id, NO_CONTENT_HASH,
                        hash_value, crc32_value, size, deflated_size,
                        StringIO(deflated_data)), client.test_fail)
            d.addCallback(check_file)
            d.addCallbacks(client.test_done, client.test_fail)
            return d

        return self.callback_test(auth, timeout=1)
    def test_put_content_on_share_ro(self):
        """Write a file on a share thats read only."""
        data = "*" * 100000
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)

        def auth(client):
            """auth"""
            d = client.dummy_authenticate("open sesame")
            d.addCallback(
                lambda r: client.put_content(
                    self.share, self.filero, NO_CONTENT_HASH, hash_value,
                    crc32_value, size, StringIO(data)))
            d.addCallbacks(client.test_fail, lambda x: client.test_done())

        return self.callback_test(auth)
Esempio n. 22
0
    def put(self, local, remote):
        """Put local file into remote file."""
        try:
            node_id = self.get_id_from_filename(remote)
        except ValueError:
            parent_id = self.get_cwd_id()
            r = self.defer_from_thread(
                self.factory.current_protocol.make_file,
                self.volume, parent_id, remote.split("/")[-1])
            node_id = r.new_id

        old_hash = self.get_hash(node_id)

        ho = content_hash_factory()
        zipper = zlib.compressobj()
        crc32_value = 0
        size = 0
        deflated_size = 0
        temp_file_name = None
        with open(local) as fh:
            with tempfile.NamedTemporaryFile(mode='w', prefix='cmd_client-',
                                             delete=False) as dest:
                temp_file_name = dest.name
                while True:
                    cont = fh.read(1024 ** 2)
                    if not cont:
                        dest.write(zipper.flush())
                        deflated_size = dest.tell()
                        break
                    ho.update(cont)
                    crc32_value = crc32(cont, crc32_value)
                    size += len(cont)
                    dest.write(zipper.compress(cont))
        hash_value = ho.content_hash()
        try:
            self.defer_from_thread(
                self.factory.current_protocol.put_content,
                self.volume, node_id, old_hash, hash_value,
                crc32_value, size, deflated_size, open(temp_file_name, 'r'))
        finally:
            if os.path.exists(temp_file_name):
                os.unlink(temp_file_name)
Esempio n. 23
0
    def test_putcontent(self):
        """Test putting content to a file."""
        data = "*" * 100000
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            def check_file(result):
                def _check_file():
                    try:
                        content_blob = self.usr0.volume().get_content(
                            hash_value)
                    except errors.DoesNotExist:
                        raise ValueError("content blob is not there")
                    assert self.s4_site.resource.buckets["test"] \
                        .bucket_children[str(content_blob.storage_key)] \
                        .contents == deflated_data

                d = threads.deferToThread(_check_file)
                return d

            d = client.dummy_authenticate("open sesame")
            d.addCallbacks(lambda _: client.get_root(), client.test_fail)
            d.addCallbacks(
                lambda root: client.make_file(request.ROOT, root, "hola"),
                client.test_fail)
            d.addCallbacks(
                lambda mkfile_req: client.put_content(
                    request.ROOT, mkfile_req.new_id, NO_CONTENT_HASH,
                    hash_value, crc32_value, size, deflated_size,
                    StringIO(deflated_data)),
                client.test_fail)
            d.addCallback(check_file)
            d.addCallbacks(client.test_done, client.test_fail)
        return self.callback_test(auth)
Esempio n. 24
0
    def test_order(self):
        """The hasher should return in order."""
        # calculate what we should receive
        should_be = []
        for i in range(10):
            hasher = content_hash_factory()
            text = "supercalifragilistico"+str(i)
            hasher.hash_object.update(text)
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            with open_file(tfile, "wb") as fh:
                fh.write("supercalifragilistico"+str(i))
            d = dict(path=tfile, hash=hasher.content_hash(),
                     crc32=crc32(text), size=len(text), stat=stat_path(tfile))
            should_be.append(("HQ_HASH_NEW", d))

        d = defer.Deferred()
        class Helper(object):
            """Helper class."""
            # class-closure, cannot use self, pylint: disable-msg=E0213
            def __init__(innerself):
                innerself.store = []
            def push(innerself, event, **kwargs):
                """Callback."""
                innerself.store.append((event, kwargs))
                if len(innerself.store) == 10:
                    if innerself.store[:-1] == should_be[:-1]:
                        d.callback(True)
                    else:
                        d.errback(Exception("are different! "))
        receiver = Helper()

        hq = hash_queue.HashQueue(receiver)
        self.addCleanup(hq.shutdown)

        # send what to hash
        for i in range(10):
            tfile = os.path.join(self.test_dir, "tfile"+str(i))
            hq.insert(tfile, "mdid")
        return d
    def do_create_lots_of_files(self, suffix=''):
        """A helper that creates N files."""
        # data for putcontent
        ho = content_hash_factory()
        hash_value = ho.content_hash()
        crc32_value = crc32("")
        deflated_content = zlib.compress("")
        deflated_size = len(deflated_content)

        mk = yield self.client.make_file(request.ROOT, self.root_id,
                                         "test_first" + suffix)
        yield self.client.put_content(
            request.ROOT, mk.new_id, NO_CONTENT_HASH, hash_value,
            crc32_value, 0, deflated_size, StringIO(deflated_content))

        for i in xrange(self.N):
            mk = yield self.client.make_file(request.ROOT, self.root_id,
                                             "test_%03x%s" % (i, suffix))
            yield self.client.put_content(request.ROOT, mk.new_id,
                                          NO_CONTENT_HASH, hash_value,
                                          crc32_value, 0, deflated_size,
                                          StringIO(deflated_content))
    def do_create_lots_of_files(self, suffix=''):
        """A helper that creates N files."""
        # data for putcontent
        ho = content_hash_factory()
        hash_value = ho.content_hash()
        crc32_value = crc32("")
        deflated_content = zlib.compress("")
        deflated_size = len(deflated_content)

        mk = yield self.client.make_file(request.ROOT, self.root_id,
                                         "test_first" + suffix)
        yield self.client.put_content(request.ROOT, mk.new_id, NO_CONTENT_HASH,
                                      hash_value, crc32_value,
                                      0, deflated_size,
                                      StringIO(deflated_content))

        for i in xrange(self.N):
            mk = yield self.client.make_file(request.ROOT, self.root_id,
                                             "test_%03x%s" % (i, suffix))
            yield self.client.put_content(request.ROOT, mk.new_id,
                                          NO_CONTENT_HASH, hash_value,
                                          crc32_value, 0, deflated_size,
                                          StringIO(deflated_content))
Esempio n. 27
0
    def test_mkfile_already_exists_content(self):
        """Create a file on a file that already exists and have content."""
        data = "*" * 100
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            d = client.dummy_authenticate("open sesame")
            d.addCallback(lambda r: client.get_root())
            d.addCallback(self.save_req, "root")
            d.addCallback(lambda r: client.make_file(request.ROOT, r, "hola"))
            d.addCallback(lambda req: client.put_content(request.ROOT,
                          req.new_id, NO_CONTENT_HASH, hash_value, crc32_value,
                          size, deflated_size, StringIO(deflated_data)))
            d.addCallback(lambda r: client.make_file(request.ROOT,
                                                     self._state.root, "hola"))
            d.addCallbacks(
                lambda x: client.test_done("ok"), client.test_fail)
        return self.callback_test(auth)
Esempio n. 28
0
    def test_mkfile_already_exists_content(self):
        """Create a file on a file that already exists and have content."""
        data = "*" * 100
        deflated_data = zlib.compress(data)
        hash_object = content_hash_factory()
        hash_object.update(data)
        hash_value = hash_object.content_hash()
        crc32_value = crc32(data)
        size = len(data)
        deflated_size = len(deflated_data)

        def auth(client):
            d = client.dummy_authenticate("open sesame")
            d.addCallback(lambda r: client.get_root())
            d.addCallback(self.save_req, "root")
            d.addCallback(lambda r: client.make_file(request.ROOT, r, "hola"))
            d.addCallback(lambda req: client.put_content(
                request.ROOT, req.new_id, NO_CONTENT_HASH, hash_value,
                crc32_value, size, deflated_size, StringIO(deflated_data)))
            d.addCallback(lambda r: client.make_file(request.ROOT, self._state.
                                                     root, "hola"))
            d.addCallbacks(lambda x: client.test_done("ok"), client.test_fail)

        return self.callback_test(auth)
Esempio n. 29
0
 def add_inflated_data(self, data):
     """Process inflated data to make sure checksums match."""
     self.hash_object.update(data)
     self.magic_hash_object.update(data)
     self.crc32 = crc32(data, self.crc32)
     self.inflated_size += len(data)