Пример #1
0
class TestTransfer(unittest.TestCase):
    @defer.inlineCallbacks
    def setUp(self):
        mocks.mock_conf_settings(self)
        self.db_dir, self.blob_dir = mk_db_and_blob_dir()
        self.wallet = FakeWallet()
        self.peer_manager = PeerManager()
        self.peer_finder = FakePeerFinder(5553, self.peer_manager, 1)
        self.rate_limiter = RateLimiter()
        self.prm = OnlyFreePaymentsManager()
        self.storage = SQLiteStorage(self.db_dir)
        self.blob_manager = DiskBlobManager(self.blob_dir, self.storage)
        self.sd_identifier = StreamDescriptorIdentifier()
        self.lbry_file_manager = EncryptedFileManager(
            self.peer_finder, self.rate_limiter, self.blob_manager,
            self.wallet, self.prm, self.storage, self.sd_identifier)

        self.uploader = LbryUploader(5209343)
        self.sd_hash = yield self.uploader.setup()
        yield self.storage.setup()
        yield self.blob_manager.setup()
        yield self.lbry_file_manager.setup()
        yield add_lbry_file_to_sd_identifier(self.sd_identifier)

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.uploader.stop()
        lbry_files = self.lbry_file_manager.lbry_files
        for lbry_file in lbry_files:
            yield self.lbry_file_manager.delete_lbry_file(lbry_file)
        yield self.lbry_file_manager.stop()
        yield self.blob_manager.stop()
        yield self.storage.stop()
        rm_db_and_blob_dir(self.db_dir, self.blob_dir)
        if os.path.exists("test_file"):
            os.remove("test_file")

    @defer.inlineCallbacks
    def test_lbry_transfer(self):
        sd_blob = yield download_sd_blob(self.sd_hash, self.blob_manager,
                                         self.peer_finder, self.rate_limiter,
                                         self.prm, self.wallet)
        metadata = yield self.sd_identifier.get_metadata_for_sd_blob(sd_blob)
        downloader = yield metadata.factories[0].make_downloader(
            metadata,
            self.prm.min_blob_data_payment_rate,
            self.prm,
            self.db_dir,
            download_mirrors=None)
        yield downloader.start()
        with open(os.path.join(self.db_dir, 'test_file'), 'rb') as f:
            hashsum = md5()
            hashsum.update(f.read())
        self.assertEqual(hashsum.hexdigest(),
                         "4ca2aafb4101c1e42235aad24fbb83be")
Пример #2
0
class FileManagerComponent(Component):
    component_name = FILE_MANAGER_COMPONENT
    depends_on = [
        DHT_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT,
        DATABASE_COMPONENT, WALLET_COMPONENT, STREAM_IDENTIFIER_COMPONENT,
        PAYMENT_RATE_COMPONENT
    ]

    def __init__(self, component_manager):
        Component.__init__(self, component_manager)
        self.file_manager = None

    @property
    def component(self):
        return self.file_manager

    def get_status(self):
        if not self.file_manager:
            return
        return {'managed_files': len(self.file_manager.lbry_files)}

    @defer.inlineCallbacks
    def start(self):
        dht_node = self.component_manager.get_component(DHT_COMPONENT)
        rate_limiter = self.component_manager.get_component(
            RATE_LIMITER_COMPONENT)
        blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
        storage = self.component_manager.get_component(DATABASE_COMPONENT)
        wallet = self.component_manager.get_component(WALLET_COMPONENT)
        sd_identifier = self.component_manager.get_component(
            STREAM_IDENTIFIER_COMPONENT)
        payment_rate_manager = self.component_manager.get_component(
            PAYMENT_RATE_COMPONENT)
        log.info('Starting the file manager')
        self.file_manager = EncryptedFileManager(dht_node.peer_finder,
                                                 rate_limiter, blob_manager,
                                                 wallet, payment_rate_manager,
                                                 storage, sd_identifier)
        yield self.file_manager.setup()
        log.info('Done setting up file manager')

    @defer.inlineCallbacks
    def stop(self):
        yield self.file_manager.stop()
Пример #3
0
class LbryUploader(object):
    def __init__(self,
                 sd_hash_queue,
                 kill_event,
                 dead_event,
                 file_size,
                 ul_rate_limit=None,
                 is_generous=False):
        self.sd_hash_queue = sd_hash_queue
        self.kill_event = kill_event
        self.dead_event = dead_event
        self.file_size = file_size
        self.ul_rate_limit = ul_rate_limit
        self.is_generous = is_generous
        # these attributes get defined in `start`
        self.reactor = None
        self.sd_identifier = None
        self.session = None
        self.lbry_file_manager = None
        self.server_port = None
        self.kill_check = None

    def start(self):
        use_epoll_on_linux()
        from twisted.internet import reactor
        self.reactor = reactor
        logging.debug("Starting the uploader")
        Random.atfork()
        r = random.Random()
        r.seed("start_lbry_uploader")
        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = RateLimiter()
        self.sd_identifier = StreamDescriptorIdentifier()
        db_dir = "server"
        os.mkdir(db_dir)
        self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1],
                               db_dir=db_dir,
                               lbryid="abcd",
                               peer_finder=peer_finder,
                               hash_announcer=hash_announcer,
                               peer_port=5553,
                               use_upnp=False,
                               rate_limiter=rate_limiter,
                               wallet=wallet,
                               blob_tracker_class=DummyBlobAvailabilityTracker,
                               dht_node_class=Node,
                               is_generous=self.is_generous)
        stream_info_manager = TempEncryptedFileMetadataManager()
        self.lbry_file_manager = EncryptedFileManager(self.session,
                                                      stream_info_manager,
                                                      self.sd_identifier)
        if self.ul_rate_limit is not None:
            self.session.rate_limiter.set_ul_limit(self.ul_rate_limit)
        reactor.callLater(1, self.start_all)
        if not reactor.running:
            reactor.run()

    def start_all(self):
        d = self.session.setup()
        d.addCallback(
            lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier))
        d.addCallback(lambda _: self.lbry_file_manager.setup())
        d.addCallback(lambda _: self.start_server())
        d.addCallback(lambda _: self.create_stream())
        d.addCallback(self.create_stream_descriptor)
        d.addCallback(self.put_sd_hash_on_queue)

        def print_error(err):
            logging.critical("Server error: %s", err.getErrorMessage())

        d.addErrback(print_error)
        return d

    def start_server(self):
        session = self.session
        query_handler_factories = {
            1:
            BlobAvailabilityHandlerFactory(session.blob_manager),
            2:
            BlobRequestHandlerFactory(session.blob_manager, session.wallet,
                                      session.payment_rate_manager, None),
            3:
            session.wallet.get_wallet_info_query_handler_factory(),
        }
        server_factory = ServerProtocolFactory(session.rate_limiter,
                                               query_handler_factories,
                                               session.peer_manager)
        self.server_port = self.reactor.listenTCP(5553, server_factory)
        logging.debug("Started listening")
        self.kill_check = task.LoopingCall(self.check_for_kill)
        self.kill_check.start(1.0)
        return True

    def kill_server(self):
        session = self.session
        ds = []
        ds.append(session.shut_down())
        ds.append(self.lbry_file_manager.stop())
        if self.server_port:
            ds.append(self.server_port.stopListening())
        self.kill_check.stop()
        self.dead_event.set()
        dl = defer.DeferredList(ds)
        dl.addCallback(lambda _: self.reactor.stop())
        return dl

    def check_for_kill(self):
        if self.kill_event.is_set():
            self.kill_server()

    def create_stream(self):
        test_file = GenFile(self.file_size,
                            b''.join([chr(i) for i in xrange(0, 64, 6)]))
        d = create_lbry_file(self.session, self.lbry_file_manager, "test_file",
                             test_file)
        return d

    def create_stream_descriptor(self, stream_hash):
        descriptor_writer = BlobStreamDescriptorWriter(
            self.session.blob_manager)
        d = get_sd_info(self.lbry_file_manager.stream_info_manager,
                        stream_hash, True)
        d.addCallback(descriptor_writer.create_descriptor)
        return d

    def put_sd_hash_on_queue(self, sd_hash):
        self.sd_hash_queue.put(sd_hash)
Пример #4
0
class CreateEncryptedFileTest(unittest.TestCase):
    timeout = 5

    def setUp(self):
        mocks.mock_conf_settings(self)
        self.tmp_db_dir, self.tmp_blob_dir = mk_db_and_blob_dir()
        self.wallet = FakeWallet()
        self.peer_manager = PeerManager()
        self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2)
        self.rate_limiter = DummyRateLimiter()
        self.sd_identifier = StreamDescriptorIdentifier()
        self.storage = SQLiteStorage(self.tmp_db_dir)
        self.blob_manager = DiskBlobManager(self.tmp_blob_dir, self.storage)
        self.prm = OnlyFreePaymentsManager()
        self.lbry_file_manager = EncryptedFileManager(
            self.peer_finder, self.rate_limiter, self.blob_manager,
            self.wallet, self.prm, self.storage, self.sd_identifier)
        d = self.storage.setup()
        d.addCallback(lambda _: self.lbry_file_manager.setup())
        return d

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.lbry_file_manager.stop()
        yield self.blob_manager.stop()
        yield self.storage.stop()
        rm_db_and_blob_dir(self.tmp_db_dir, self.tmp_blob_dir)

    @defer.inlineCallbacks
    def create_file(self, filename):
        handle = mocks.GenFile(3 * MB, '1')
        key = '2' * (AES.block_size / 8)
        out = yield EncryptedFileCreator.create_lbry_file(
            self.blob_manager, self.storage, self.prm, self.lbry_file_manager,
            filename, handle, key, iv_generator())
        defer.returnValue(out)

    @defer.inlineCallbacks
    def test_can_create_file(self):
        expected_stream_hash = "41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \
                               "7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25"
        expected_sd_hash = "db043b44384c149126685990f6bb6563aa565ae331303d522" \
                           "c8728fe0534dd06fbcacae92b0891787ad9b68ffc8d20c1"
        filename = 'test.file'
        lbry_file = yield self.create_file(filename)
        sd_hash = yield self.storage.get_sd_blob_hash_for_stream(
            lbry_file.stream_hash)

        # read the sd blob file
        sd_blob = self.blob_manager.blobs[sd_hash]
        sd_reader = BlobStreamDescriptorReader(sd_blob)
        sd_file_info = yield sd_reader.get_info()

        # this comes from the database, the blobs returned are sorted
        sd_info = yield get_sd_info(self.storage,
                                    lbry_file.stream_hash,
                                    include_blobs=True)
        self.assertDictEqual(sd_info, sd_file_info)
        self.assertListEqual(sd_info['blobs'], sd_file_info['blobs'])
        self.assertEqual(sd_info['stream_hash'], expected_stream_hash)
        self.assertEqual(len(sd_info['blobs']), 3)
        self.assertNotEqual(sd_info['blobs'][0]['length'], 0)
        self.assertNotEqual(sd_info['blobs'][1]['length'], 0)
        self.assertEqual(sd_info['blobs'][2]['length'], 0)
        self.assertEqual(expected_stream_hash, lbry_file.stream_hash)
        self.assertEqual(sd_hash, lbry_file.sd_hash)
        self.assertEqual(sd_hash, expected_sd_hash)
        blobs = yield self.blob_manager.get_all_verified_blobs()
        self.assertEqual(3, len(blobs))
        num_should_announce_blobs = yield self.blob_manager.count_should_announce_blobs(
        )
        self.assertEqual(2, num_should_announce_blobs)

    @defer.inlineCallbacks
    def test_can_create_file_with_unicode_filename(self):
        expected_stream_hash = (
            'd1da4258f3ce12edb91d7e8e160d091d3ab1432c2e55a6352dce0'
            '2fd5adb86fe144e93e110075b5865fff8617776c6c0')
        filename = u'☃.file'
        lbry_file = yield self.create_file(filename)
        self.assertEqual(expected_stream_hash, lbry_file.stream_hash)
Пример #5
0
class CreateEncryptedFileTest(unittest.TestCase):
    timeout = 5

    def setUp(self):
        mocks.mock_conf_settings(self)
        self.tmp_db_dir, self.tmp_blob_dir = mk_db_and_blob_dir()
        self.wallet = FakeWallet()
        self.peer_manager = PeerManager()
        self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2)
        self.rate_limiter = DummyRateLimiter()
        self.sd_identifier = StreamDescriptorIdentifier()
        self.storage = SQLiteStorage(self.tmp_db_dir)
        self.blob_manager = DiskBlobManager(self.tmp_blob_dir, self.storage)
        self.prm = OnlyFreePaymentsManager()
        self.lbry_file_manager = EncryptedFileManager(
            self.peer_finder, self.rate_limiter, self.blob_manager,
            self.wallet, self.prm, self.storage, self.sd_identifier)
        d = self.storage.setup()
        d.addCallback(lambda _: self.lbry_file_manager.setup())
        return d

    @defer.inlineCallbacks
    def tearDown(self):
        yield self.lbry_file_manager.stop()
        yield self.blob_manager.stop()
        yield self.storage.stop()
        rm_db_and_blob_dir(self.tmp_db_dir, self.tmp_blob_dir)

    @defer.inlineCallbacks
    def create_file(self, filename):
        handle = mocks.GenFile(3 * MB, b'1')
        key = b'2' * (AES.block_size // 8)
        out = yield EncryptedFileCreator.create_lbry_file(
            self.blob_manager, self.storage, self.prm, self.lbry_file_manager,
            filename, handle, key, iv_generator())
        defer.returnValue(out)

    @defer.inlineCallbacks
    def test_can_create_file(self):
        expected_stream_hash = "41e6b247d923d191b154fb6f1b8529d6ddd6a73d65c35" \
                               "7b1acb742dd83151fb66393a7709e9f346260a4f4db6de10c25"
        expected_sd_hash = "40c485432daec586c1a2d247e6c08d137640a5af6e81f3f652" \
                           "3e62e81a2e8945b0db7c94f1852e70e371d917b994352c"
        filename = 'test.file'
        lbry_file = yield self.create_file(filename)
        sd_hash = yield self.storage.get_sd_blob_hash_for_stream(
            lbry_file.stream_hash)

        # read the sd blob file
        sd_blob = self.blob_manager.blobs[sd_hash]
        sd_reader = BlobStreamDescriptorReader(sd_blob)
        sd_file_info = yield sd_reader.get_info()

        # this comes from the database, the blobs returned are sorted
        sd_info = yield get_sd_info(self.storage,
                                    lbry_file.stream_hash,
                                    include_blobs=True)
        self.maxDiff = None
        unicode_sd_info = json.loads(
            json.dumps(sd_info, sort_keys=True, cls=JSONBytesEncoder))
        self.assertDictEqual(unicode_sd_info, sd_file_info)
        self.assertEqual(sd_info['stream_hash'], expected_stream_hash)
        self.assertEqual(len(sd_info['blobs']), 3)
        self.assertNotEqual(sd_info['blobs'][0]['length'], 0)
        self.assertNotEqual(sd_info['blobs'][1]['length'], 0)
        self.assertEqual(sd_info['blobs'][2]['length'], 0)
        self.assertEqual(expected_stream_hash, lbry_file.stream_hash)
        self.assertEqual(sd_hash, lbry_file.sd_hash)
        self.assertEqual(sd_hash, expected_sd_hash)
        blobs = yield self.blob_manager.get_all_verified_blobs()
        self.assertEqual(3, len(blobs))
        num_should_announce_blobs = yield self.blob_manager.count_should_announce_blobs(
        )
        self.assertEqual(2, num_should_announce_blobs)

    @defer.inlineCallbacks
    def test_can_create_file_with_unicode_filename(self):
        expected_stream_hash = (
            'd1da4258f3ce12edb91d7e8e160d091d3ab1432c2e55a6352dce0'
            '2fd5adb86fe144e93e110075b5865fff8617776c6c0')
        filename = '☃.file'
        lbry_file = yield self.create_file(filename)
        self.assertEqual(expected_stream_hash, lbry_file.stream_hash)
class TestStreamify(TestCase):
    maxDiff = 5000

    def setUp(self):
        mocks.mock_conf_settings(self)
        self.session = None
        self.lbry_file_manager = None
        self.is_generous = True
        self.db_dir = tempfile.mkdtemp()
        self.blob_dir = os.path.join(self.db_dir, "blobfiles")
        os.mkdir(self.blob_dir)
        self.dht_node = FakeNode()
        self.wallet = FakeWallet()
        self.peer_manager = PeerManager()
        self.peer_finder = FakePeerFinder(5553, self.peer_manager, 2)
        self.rate_limiter = DummyRateLimiter()
        self.sd_identifier = StreamDescriptorIdentifier()
        self.storage = SQLiteStorage(self.db_dir)
        self.blob_manager = DiskBlobManager(self.blob_dir, self.storage,
                                            self.dht_node._dataStore)
        self.prm = OnlyFreePaymentsManager()
        self.lbry_file_manager = EncryptedFileManager(
            self.peer_finder, self.rate_limiter, self.blob_manager,
            self.wallet, self.prm, self.storage, self.sd_identifier)
        d = self.storage.setup()
        d.addCallback(lambda _: self.lbry_file_manager.setup())
        return d

    @defer.inlineCallbacks
    def tearDown(self):
        lbry_files = self.lbry_file_manager.lbry_files
        for lbry_file in lbry_files:
            yield self.lbry_file_manager.delete_lbry_file(lbry_file)
        yield self.lbry_file_manager.stop()
        yield self.storage.stop()
        yield threads.deferToThread(shutil.rmtree, self.db_dir)
        if os.path.exists("test_file"):
            os.remove("test_file")

    def test_create_stream(self):
        def verify_equal(sd_info):
            self.assertEqual(sd_info, test_create_stream_sd_file)

        def verify_stream_descriptor_file(stream_hash):
            d = get_sd_info(self.storage, stream_hash, True)
            d.addCallback(verify_equal)
            return d

        def iv_generator():
            iv = 0
            while 1:
                iv += 1
                yield b"%016d" % iv

        def create_stream():
            test_file = GenFile(5209343, bytes(
                (i + 3) for i in range(0, 64, 6)))
            d = create_lbry_file(self.blob_manager,
                                 self.storage,
                                 self.prm,
                                 self.lbry_file_manager,
                                 "test_file",
                                 test_file,
                                 key=b'0123456701234567',
                                 iv_generator=iv_generator())
            d.addCallback(lambda lbry_file: lbry_file.stream_hash)
            return d

        d = create_stream()
        d.addCallback(verify_stream_descriptor_file)
        return d

    @defer.inlineCallbacks
    def test_create_and_combine_stream(self):
        test_file = GenFile(53209343, bytes((i + 5) for i in range(0, 64, 6)))
        lbry_file = yield create_lbry_file(self.blob_manager, self.storage,
                                           self.prm, self.lbry_file_manager,
                                           "test_file", test_file)
        sd_hash = yield self.storage.get_sd_blob_hash_for_stream(
            lbry_file.stream_hash)
        self.assertTrue(lbry_file.sd_hash, sd_hash)
        yield lbry_file.start()
        f = open('test_file', 'rb')
        hashsum = md5()
        hashsum.update(f.read())
        self.assertEqual(hashsum.hexdigest(),
                         "68959747edc73df45e45db6379dd7b3b")
Пример #7
0
class TestReflector(unittest.TestCase):
    def setUp(self):
        self.reflector_port = None
        self.port = None
        mocks.mock_conf_settings(self)
        self.server_db_dir, self.server_blob_dir = mk_db_and_blob_dir()
        self.client_db_dir, self.client_blob_dir = mk_db_and_blob_dir()
        prm = OnlyFreePaymentsManager()
        wallet = mocks.Wallet()
        peer_manager = PeerManager.PeerManager()
        peer_finder = mocks.PeerFinder(5553, peer_manager, 2)
        self.server_storage = SQLiteStorage(self.server_db_dir)
        self.server_blob_manager = BlobManager.DiskBlobManager(
            self.server_blob_dir, self.server_storage)
        self.client_storage = SQLiteStorage(self.client_db_dir)
        self.client_blob_manager = BlobManager.DiskBlobManager(
            self.client_blob_dir, self.client_storage)
        self.server_lbry_file_manager = EncryptedFileManager(
            peer_finder, DummyRateLimiter(), self.server_blob_manager, wallet,
            prm, self.server_storage,
            StreamDescriptor.StreamDescriptorIdentifier())
        self.client_lbry_file_manager = EncryptedFileManager(
            peer_finder, DummyRateLimiter(), self.client_blob_manager, wallet,
            prm, self.client_storage,
            StreamDescriptor.StreamDescriptorIdentifier())

        self.expected_blobs = [
            ('dc4708f76a5e7af0f1cae0ee96b824e2ed9250c9346c093b'
             '441f0a20d3607c17948b6fcfb4bc62020fe5286693d08586', 2097152),
            ('f4067522c1b49432a2a679512e3917144317caa1abba0c04'
             '1e0cd2cf9f635d4cf127ce1824fa04189b63916174951f70', 2097152),
            ('305486c434260484fcb2968ce0e963b72f81ba56c11b08b1'
             'af0789b55b44d78422600f9a38e3cf4f2e9569897e5646a9', 1015056),
        ]

        d = self.server_storage.setup()
        d.addCallback(lambda _: self.server_blob_manager.setup())
        d.addCallback(lambda _: self.server_lbry_file_manager.setup())
        d.addCallback(lambda _: self.client_storage.setup())
        d.addCallback(lambda _: self.client_blob_manager.setup())
        d.addCallback(lambda _: self.client_lbry_file_manager.setup())

        @defer.inlineCallbacks
        def verify_equal(sd_info, stream_hash):
            self.assertDictEqual(mocks.create_stream_sd_file, sd_info)
            sd_hash = yield self.client_storage.get_sd_blob_hash_for_stream(
                stream_hash)
            defer.returnValue(sd_hash)

        def save_sd_blob_hash(sd_hash):
            self.sd_hash = sd_hash
            self.expected_blobs.append((sd_hash, 923))

        def verify_stream_descriptor_file(stream_hash):
            self.stream_hash = stream_hash
            d = get_sd_info(self.client_storage, stream_hash, True)
            d.addCallback(verify_equal, stream_hash)
            d.addCallback(save_sd_blob_hash)
            return d

        def create_stream():
            test_file = mocks.GenFile(5209343,
                                      bytes((i + 3) for i in range(0, 64, 6)))
            d = EncryptedFileCreator.create_lbry_file(
                self.client_blob_manager,
                self.client_storage,
                prm,
                self.client_lbry_file_manager,
                "test_file",
                test_file,
                key=b"0123456701234567",
                iv_generator=iv_generator())
            d.addCallback(lambda lbry_file: lbry_file.stream_hash)
            return d

        def start_server():
            server_factory = reflector.ServerFactory(
                peer_manager, self.server_blob_manager,
                self.server_lbry_file_manager)
            from twisted.internet import reactor
            port = 8943
            while self.reflector_port is None:
                try:
                    self.reflector_port = reactor.listenTCP(
                        port, server_factory)
                    self.port = port
                except error.CannotListenError:
                    port += 1

        d.addCallback(lambda _: create_stream())
        d.addCallback(verify_stream_descriptor_file)
        d.addCallback(lambda _: start_server())
        return d

    @defer.inlineCallbacks
    def tearDown(self):
        lbry_files = self.client_lbry_file_manager.lbry_files
        for lbry_file in lbry_files:
            yield self.client_lbry_file_manager.delete_lbry_file(lbry_file)
        yield self.client_lbry_file_manager.stop()
        yield self.client_blob_manager.stop()
        yield self.client_storage.stop()
        self.reflector_port.stopListening()
        lbry_files = self.server_lbry_file_manager.lbry_files
        for lbry_file in lbry_files:
            yield self.server_lbry_file_manager.delete_lbry_file(lbry_file)
        yield self.server_lbry_file_manager.stop()
        yield self.server_blob_manager.stop()
        yield self.server_storage.stop()
        try:
            rm_db_and_blob_dir(self.client_db_dir, self.client_blob_dir)
        except Exception as err:
            raise unittest.SkipTest("TODO: fix this for windows")
        try:
            rm_db_and_blob_dir(self.server_db_dir, self.server_blob_dir)
        except Exception as err:
            raise unittest.SkipTest("TODO: fix this for windows")
        if os.path.exists("test_file"):
            os.remove("test_file")

    def test_stream_reflector(self):
        def verify_blob_on_reflector():
            check_blob_ds = []
            for blob_hash, blob_size in self.expected_blobs:
                check_blob_ds.append(verify_have_blob(blob_hash, blob_size))
            return defer.DeferredList(check_blob_ds)

        @defer.inlineCallbacks
        def verify_stream_on_reflector():
            # check stream_info_manager has all the right information
            streams = yield self.server_storage.get_all_streams()
            self.assertEqual(1, len(streams))
            self.assertEqual(self.stream_hash, streams[0])

            blobs = yield self.server_storage.get_blobs_for_stream(
                self.stream_hash)
            blob_hashes = [
                b.blob_hash for b in blobs if b.blob_hash is not None
            ]
            expected_blob_hashes = [
                b[0] for b in self.expected_blobs[:-1] if b[0] is not None
            ]
            self.assertEqual(expected_blob_hashes, blob_hashes)
            sd_hash = yield self.server_storage.get_sd_blob_hash_for_stream(
                streams[0])
            self.assertEqual(self.sd_hash, sd_hash)

            # check lbry file manager has the file
            files = yield self.server_lbry_file_manager.lbry_files

            self.assertEqual(0, len(files))

            streams = yield self.server_storage.get_all_streams()
            self.assertEqual(1, len(streams))
            stream_info = yield self.server_storage.get_stream_info(
                self.stream_hash)
            self.assertEqual(self.sd_hash, stream_info[3])
            self.assertEqual(hexlify(b'test_file').decode(), stream_info[0])

            # check should_announce blobs on blob_manager
            blob_hashes = yield self.server_storage.get_all_should_announce_blobs(
            )
            self.assertSetEqual({self.sd_hash, expected_blob_hashes[0]},
                                set(blob_hashes))

        def verify_have_blob(blob_hash, blob_size):
            d = self.server_blob_manager.get_blob(blob_hash)
            d.addCallback(lambda blob: verify_blob_completed(blob, blob_size))
            return d

        def send_to_server():
            factory = reflector.ClientFactory(self.client_blob_manager,
                                              self.stream_hash, self.sd_hash)

            from twisted.internet import reactor
            reactor.connectTCP('localhost', self.port, factory)
            return factory.finished_deferred

        def verify_blob_completed(blob, blob_size):
            self.assertTrue(blob.get_is_verified())
            self.assertEqual(blob_size, blob.length)
            return

        d = send_to_server()
        d.addCallback(lambda _: verify_blob_on_reflector())
        d.addCallback(lambda _: verify_stream_on_reflector())
        return d

    def test_blob_reflector(self):
        def verify_data_on_reflector():
            check_blob_ds = []
            for blob_hash, blob_size in self.expected_blobs:
                check_blob_ds.append(verify_have_blob(blob_hash, blob_size))
            return defer.DeferredList(check_blob_ds)

        def verify_have_blob(blob_hash, blob_size):
            d = self.server_blob_manager.get_blob(blob_hash)
            d.addCallback(lambda blob: verify_blob_completed(blob, blob_size))
            return d

        def send_to_server(blob_hashes_to_send):
            factory = reflector.BlobClientFactory(self.client_blob_manager,
                                                  blob_hashes_to_send)

            from twisted.internet import reactor
            reactor.connectTCP('localhost', self.port, factory)
            return factory.finished_deferred

        def verify_blob_completed(blob, blob_size):
            self.assertTrue(blob.get_is_verified())
            self.assertEqual(blob_size, blob.length)

        d = send_to_server([x[0] for x in self.expected_blobs])
        d.addCallback(lambda _: verify_data_on_reflector())
        return d

    def test_blob_reflector_v1(self):
        @defer.inlineCallbacks
        def verify_stream_on_reflector():
            # this protocol should not have any impact on stream info manager
            streams = yield self.server_storage.get_all_streams()
            self.assertEqual(0, len(streams))
            # there should be no should announce blobs here
            blob_hashes = yield self.server_storage.get_all_should_announce_blobs(
            )
            self.assertEqual(0, len(blob_hashes))

        def verify_data_on_reflector():
            check_blob_ds = []
            for blob_hash, blob_size in self.expected_blobs:
                check_blob_ds.append(verify_have_blob(blob_hash, blob_size))
            return defer.DeferredList(check_blob_ds)

        def verify_have_blob(blob_hash, blob_size):
            d = self.server_blob_manager.get_blob(blob_hash)
            d.addCallback(lambda blob: verify_blob_completed(blob, blob_size))
            return d

        def send_to_server(blob_hashes_to_send):
            factory = reflector.BlobClientFactory(self.client_blob_manager,
                                                  blob_hashes_to_send)
            factory.protocol_version = 0

            from twisted.internet import reactor
            reactor.connectTCP('localhost', self.port, factory)
            return factory.finished_deferred

        def verify_blob_completed(blob, blob_size):
            self.assertTrue(blob.get_is_verified())
            self.assertEqual(blob_size, blob.length)

        d = send_to_server([x[0] for x in self.expected_blobs])
        d.addCallback(lambda _: verify_data_on_reflector())
        return d

    # test case when we reflect blob, and than that same blob
    # is reflected as stream
    def test_blob_reflect_and_stream(self):
        def verify_blob_on_reflector():
            check_blob_ds = []
            for blob_hash, blob_size in self.expected_blobs:
                check_blob_ds.append(verify_have_blob(blob_hash, blob_size))
            return defer.DeferredList(check_blob_ds)

        @defer.inlineCallbacks
        def verify_stream_on_reflector():
            # check stream_info_manager has all the right information

            streams = yield self.server_storage.get_all_streams()
            self.assertEqual(1, len(streams))
            self.assertEqual(self.stream_hash, streams[0])

            blobs = yield self.server_storage.get_blobs_for_stream(
                self.stream_hash)
            blob_hashes = [
                b.blob_hash for b in blobs if b.blob_hash is not None
            ]
            expected_blob_hashes = [
                b[0] for b in self.expected_blobs[:-1] if b[0] is not None
            ]
            self.assertEqual(expected_blob_hashes, blob_hashes)
            sd_hash = yield self.server_storage.get_sd_blob_hash_for_stream(
                self.stream_hash)
            self.assertEqual(self.sd_hash, sd_hash)

            # check should_announce blobs on blob_manager
            to_announce = yield self.server_storage.get_all_should_announce_blobs(
            )
            self.assertSetEqual(set(to_announce),
                                {self.sd_hash, expected_blob_hashes[0]})

        def verify_have_blob(blob_hash, blob_size):
            d = self.server_blob_manager.get_blob(blob_hash)
            d.addCallback(lambda blob: verify_blob_completed(blob, blob_size))
            return d

        def send_to_server_as_blobs(blob_hashes_to_send):
            factory = reflector.BlobClientFactory(self.client_blob_manager,
                                                  blob_hashes_to_send)
            factory.protocol_version = 0

            from twisted.internet import reactor
            reactor.connectTCP('localhost', self.port, factory)
            return factory.finished_deferred

        def send_to_server_as_stream(result):
            factory = reflector.ClientFactory(self.client_blob_manager,
                                              self.stream_hash, self.sd_hash)

            from twisted.internet import reactor
            reactor.connectTCP('localhost', self.port, factory)
            return factory.finished_deferred

        def verify_blob_completed(blob, blob_size):
            self.assertTrue(blob.get_is_verified())
            self.assertEqual(blob_size, blob.length)

        # Modify this to change which blobs to send
        blobs_to_send = self.expected_blobs

        d = send_to_server_as_blobs([x[0] for x in self.expected_blobs])
        d.addCallback(send_to_server_as_stream)
        d.addCallback(lambda _: verify_blob_on_reflector())
        d.addCallback(lambda _: verify_stream_on_reflector())
        return d
Пример #8
0
class LbryUploader(object):
    def __init__(self, file_size, ul_rate_limit=None):
        self.file_size = file_size
        self.ul_rate_limit = ul_rate_limit
        self.kill_check = None
        # these attributes get defined in `start`
        self.db_dir = None
        self.blob_dir = None
        self.wallet = None
        self.peer_manager = None
        self.rate_limiter = None
        self.prm = None
        self.storage = None
        self.blob_manager = None
        self.lbry_file_manager = None
        self.server_port = None

    @defer.inlineCallbacks
    def setup(self):
        init_conf_windows()

        self.db_dir, self.blob_dir = mk_db_and_blob_dir()
        self.wallet = FakeWallet()
        self.peer_manager = PeerManager()
        self.rate_limiter = RateLimiter()
        if self.ul_rate_limit is not None:
            self.rate_limiter.set_ul_limit(self.ul_rate_limit)
        self.prm = OnlyFreePaymentsManager()
        self.storage = SQLiteStorage(self.db_dir)
        self.blob_manager = DiskBlobManager(self.blob_dir, self.storage)
        self.lbry_file_manager = EncryptedFileManager(
            FakePeerFinder(5553, self.peer_manager, 1), self.rate_limiter,
            self.blob_manager, self.wallet, self.prm, self.storage,
            StreamDescriptorIdentifier())

        yield self.storage.setup()
        yield self.blob_manager.setup()
        yield self.lbry_file_manager.setup()

        query_handler_factories = {
            1:
            BlobAvailabilityHandlerFactory(self.blob_manager),
            2:
            BlobRequestHandlerFactory(self.blob_manager, self.wallet, self.prm,
                                      None),
            3:
            self.wallet.get_wallet_info_query_handler_factory(),
        }
        server_factory = ServerProtocolFactory(self.rate_limiter,
                                               query_handler_factories,
                                               self.peer_manager)
        self.server_port = reactor.listenTCP(5553,
                                             server_factory,
                                             interface="localhost")
        test_file = GenFile(self.file_size, bytes(i for i in range(0, 64, 6)))
        lbry_file = yield create_lbry_file(self.blob_manager, self.storage,
                                           self.prm, self.lbry_file_manager,
                                           "test_file", test_file)
        defer.returnValue(lbry_file.sd_hash)

    @defer.inlineCallbacks
    def stop(self):
        lbry_files = self.lbry_file_manager.lbry_files
        for lbry_file in lbry_files:
            yield self.lbry_file_manager.delete_lbry_file(lbry_file)
        yield self.lbry_file_manager.stop()
        yield self.blob_manager.stop()
        yield self.storage.stop()
        self.server_port.stopListening()
        rm_db_and_blob_dir(self.db_dir, self.blob_dir)
        if os.path.exists("test_file"):
            os.remove("test_file")
Пример #9
0
class LbryUploader(object):
    def __init__(self, sd_hash_queue, kill_event, dead_event,
                 file_size, ul_rate_limit=None, is_generous=False):
        self.sd_hash_queue = sd_hash_queue
        self.kill_event = kill_event
        self.dead_event = dead_event
        self.file_size = file_size
        self.ul_rate_limit = ul_rate_limit
        self.is_generous = is_generous
        # these attributes get defined in `start`
        self.reactor = None
        self.sd_identifier = None
        self.session = None
        self.lbry_file_manager = None
        self.server_port = None
        self.kill_check = None

    def start(self):
        use_epoll_on_linux()
        init_conf_windows()

        from twisted.internet import reactor
        self.reactor = reactor
        logging.debug("Starting the uploader")
        Random.atfork()
        r = random.Random()
        r.seed("start_lbry_uploader")
        wallet = FakeWallet()
        peer_manager = PeerManager()
        peer_finder = FakePeerFinder(5553, peer_manager, 1)
        hash_announcer = FakeAnnouncer()
        rate_limiter = RateLimiter()
        self.sd_identifier = StreamDescriptorIdentifier()
        self.db_dir, self.blob_dir = mk_db_and_blob_dir()

        self.session = Session(
            conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=self.db_dir, blob_dir=self.blob_dir,
            node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer,
            peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
            blob_tracker_class=DummyBlobAvailabilityTracker,
            dht_node_class=Node, is_generous=self.is_generous, external_ip="127.0.0.1")
        stream_info_manager = DBEncryptedFileMetadataManager(self.db_dir)
        self.lbry_file_manager = EncryptedFileManager(
            self.session, stream_info_manager, self.sd_identifier)
        if self.ul_rate_limit is not None:
            self.session.rate_limiter.set_ul_limit(self.ul_rate_limit)
        reactor.callLater(1, self.start_all)
        if not reactor.running:
            reactor.run()

    def start_all(self):
        d = self.session.setup()
        d.addCallback(lambda _: add_lbry_file_to_sd_identifier(self.sd_identifier))
        d.addCallback(lambda _: self.lbry_file_manager.setup())
        d.addCallback(lambda _: self.start_server())
        d.addCallback(lambda _: self.create_stream())
        d.addCallback(self.create_stream_descriptor)
        d.addCallback(self.put_sd_hash_on_queue)

        def print_error(err):
            logging.critical("Server error: %s", err.getErrorMessage())

        d.addErrback(print_error)
        return d

    def start_server(self):
        session = self.session
        query_handler_factories = {
            1: BlobAvailabilityHandlerFactory(session.blob_manager),
            2: BlobRequestHandlerFactory(
                session.blob_manager, session.wallet,
                session.payment_rate_manager,
                None),
            3: session.wallet.get_wallet_info_query_handler_factory(),
        }
        server_factory = ServerProtocolFactory(session.rate_limiter,
                                               query_handler_factories,
                                               session.peer_manager)
        self.server_port = self.reactor.listenTCP(5553, server_factory)
        logging.debug("Started listening")
        self.kill_check = task.LoopingCall(self.check_for_kill)
        self.kill_check.start(1.0)
        return True

    def kill_server(self):
        session = self.session
        ds = []
        ds.append(session.shut_down())
        ds.append(self.lbry_file_manager.stop())
        if self.server_port:
            ds.append(self.server_port.stopListening())
        self.kill_check.stop()
        self.dead_event.set()
        dl = defer.DeferredList(ds)
        dl.addCallback(lambda _: rm_db_and_blob_dir(self.db_dir, self.blob_dir))
        dl.addCallback(lambda _: self.reactor.stop())
        return dl

    def check_for_kill(self):
        if self.kill_event.is_set():
            self.kill_server()

    def create_stream(self):
        test_file = GenFile(self.file_size, b''.join([chr(i) for i in xrange(0, 64, 6)]))
        d = create_lbry_file(self.session, self.lbry_file_manager, "test_file", test_file)
        return d

    def create_stream_descriptor(self, stream_hash):
        descriptor_writer = BlobStreamDescriptorWriter(self.session.blob_manager)
        d = get_sd_info(self.lbry_file_manager.stream_info_manager, stream_hash, True)
        d.addCallback(descriptor_writer.create_descriptor)
        return d

    def put_sd_hash_on_queue(self, sd_hash):
        self.sd_hash_queue.put(sd_hash)