Esempio n. 1
0
    def test_store_stream(self, stream_hash=None):
        stream_hash = stream_hash or random_lbry_hash()
        sd_hash = random_lbry_hash()
        blob1 = random_lbry_hash()
        blob2 = random_lbry_hash()

        yield self.store_fake_blob(sd_hash)
        yield self.store_fake_blob(blob1)
        yield self.store_fake_blob(blob2)

        yield self.store_fake_stream(stream_hash, sd_hash)
        yield self.store_fake_stream_blob(stream_hash, blob1, 1)
        yield self.store_fake_stream_blob(stream_hash, blob2, 2)

        stream_blobs = yield f2d(self.storage.get_blobs_for_stream(stream_hash))
        stream_blob_hashes = [b.blob_hash for b in stream_blobs]
        self.assertListEqual(stream_blob_hashes, [blob1, blob2])

        blob_hashes = yield f2d(self.storage.get_all_blob_hashes())
        self.assertSetEqual(set(blob_hashes), {sd_hash, blob1, blob2})

        stream_blobs = yield f2d(self.storage.get_blobs_for_stream(stream_hash))
        stream_blob_hashes = [b.blob_hash for b in stream_blobs]
        self.assertListEqual(stream_blob_hashes, [blob1, blob2])

        yield f2d(self.storage.set_should_announce(sd_hash, 1, 1))
        yield f2d(self.storage.set_should_announce(blob1, 1, 1))

        should_announce_count = yield f2d(self.storage.count_should_announce_blobs())
        self.assertEqual(should_announce_count, 2)
        should_announce_hashes = yield f2d(self.storage.get_blobs_to_announce())
        self.assertSetEqual(set(should_announce_hashes), {sd_hash, blob1})

        stream_hashes = yield f2d(self.storage.get_all_streams())
        self.assertListEqual(stream_hashes, [stream_hash])
Esempio n. 2
0
    async def test_store_file(self):
        download_directory = self.db_dir
        out = await self.storage.get_all_lbry_files()
        self.assertEqual(len(out), 0)

        stream_hash = random_lbry_hash()
        sd_hash = random_lbry_hash()
        blob1 = random_lbry_hash()
        blob2 = random_lbry_hash()

        await self.store_fake_blob(sd_hash)
        await self.store_fake_blob(blob1)
        await self.store_fake_blob(blob2)

        await self.store_fake_stream(stream_hash, sd_hash)
        await self.store_fake_stream_blob(stream_hash, blob1, 1)
        await self.store_fake_stream_blob(stream_hash, blob2, 2)

        blob_data_rate = 0
        file_name = "test file"
        await self.storage.save_published_file(stream_hash, file_name,
                                               download_directory,
                                               blob_data_rate)

        files = await self.storage.get_all_lbry_files()
        self.assertEqual(1, len(files))
Esempio n. 3
0
 async def make_and_store_fake_stream(self, blob_count=2, stream_hash=None):
     stream_hash = stream_hash or random_lbry_hash()
     blobs = [
         BlobInfo(i + 1, 100, "DEADBEEF", random_lbry_hash())
         for i in range(blob_count)
     ]
     await self.store_fake_stream(stream_hash, blobs)
Esempio n. 4
0
    async def test_store_content_claim(self):
        download_directory = self.db_dir
        out = await self.storage.get_all_lbry_files()
        self.assertEqual(len(out), 0)

        stream_hash = random_lbry_hash()
        sd_hash = fake_claim_info['value']['stream']['source']['source']

        # test that we can associate a content claim to a file
        # use the generated sd hash in the fake claim
        fake_outpoint = "%s:%i" % (fake_claim_info['txid'], fake_claim_info['nout'])

        await self.make_and_store_fake_stream(blob_count=2, stream_hash=stream_hash, sd_hash=sd_hash)
        blob_data_rate = 0
        file_name = "test file"
        await self.storage.save_published_file(
            stream_hash, file_name, download_directory, blob_data_rate
        )
        await self.storage.save_claims([fake_claim_info])
        await self.storage.save_content_claim(stream_hash, fake_outpoint)
        stored_content_claim = await self.storage.get_content_claim(stream_hash)
        self.assertDictEqual(stored_content_claim, fake_claim_info)

        stream_hashes = await self.storage.get_old_stream_hashes_for_claim_id(fake_claim_info['claim_id'],
                                                                              stream_hash)
        self.assertListEqual(stream_hashes, [])

        # test that we can't associate a claim update with a new stream to the file
        second_stream_hash, second_sd_hash = random_lbry_hash(), random_lbry_hash()
        await self.make_and_store_fake_stream(blob_count=2, stream_hash=second_stream_hash, sd_hash=second_sd_hash)
        with self.assertRaisesRegex(Exception, "stream mismatch"):
            await self.storage.save_content_claim(second_stream_hash, fake_outpoint)

        # test that we can associate a new claim update containing the same stream to the file
        update_info = deepcopy(fake_claim_info)
        update_info['txid'] = "beef0000" * 12
        update_info['nout'] = 0
        second_outpoint = "%s:%i" % (update_info['txid'], update_info['nout'])
        await self.storage.save_claims([update_info])
        await self.storage.save_content_claim(stream_hash, second_outpoint)
        update_info_result = await self.storage.get_content_claim(stream_hash)
        self.assertDictEqual(update_info_result, update_info)

        # test that we can't associate an update with a mismatching claim id
        invalid_update_info = deepcopy(fake_claim_info)
        invalid_update_info['txid'] = "beef0001" * 12
        invalid_update_info['nout'] = 0
        invalid_update_info['claim_id'] = "beef0002" * 5
        invalid_update_outpoint = "%s:%i" % (invalid_update_info['txid'], invalid_update_info['nout'])
        with self.assertRaisesRegex(Exception, "mismatching claim ids when updating stream "
                                               "deadbeefdeadbeefdeadbeefdeadbeefdeadbeef "
                                               "vs beef0002beef0002beef0002beef0002beef0002"):
            await self.storage.save_claims([invalid_update_info])
            await self.storage.save_content_claim(stream_hash, invalid_update_outpoint)
        current_claim_info = await self.storage.get_content_claim(stream_hash)
        # this should still be the previous update
        self.assertDictEqual(current_claim_info, update_info)
Esempio n. 5
0
    def make_and_store_fake_stream(self, blob_count=2, stream_hash=None, sd_hash=None):
        stream_hash = stream_hash or random_lbry_hash()
        sd_hash = sd_hash or random_lbry_hash()
        blobs = {
            i + 1: random_lbry_hash() for i in range(blob_count)
        }

        yield self.store_fake_blob(sd_hash)

        for blob in blobs.values():
            yield self.store_fake_blob(blob)

        yield self.store_fake_stream(stream_hash, sd_hash)

        for pos, blob in sorted(blobs.items(), key=lambda x: x[0]):
            yield self.store_fake_stream_blob(stream_hash, blob, pos)
Esempio n. 6
0
 def test_delete_blob(self):
     blob_hash = random_lbry_hash()
     yield self.store_fake_blob(blob_hash)
     blob_hashes = yield f2d(self.storage.get_all_blob_hashes())
     self.assertEqual(blob_hashes, [blob_hash])
     yield f2d(self.storage.delete_blobs_from_db(blob_hashes))
     blob_hashes = yield f2d(self.storage.get_all_blob_hashes())
     self.assertEqual(blob_hashes, [])
Esempio n. 7
0
 async def test_delete_blob(self):
     blob_hash = random_lbry_hash()
     await self.store_fake_blob(blob_hash)
     blob_hashes = await self.storage.get_all_blob_hashes()
     self.assertEqual(blob_hashes, [blob_hash])
     await self.storage.delete_blobs_from_db(blob_hashes)
     blob_hashes = await self.storage.get_all_blob_hashes()
     self.assertEqual(blob_hashes, [])
Esempio n. 8
0
 async def store_fake_stream(self, stream_hash, blobs=None, file_name="fake_file", key="DEADBEEF"):
     blobs = blobs or [BlobInfo(1, 100, "DEADBEEF", random_lbry_hash())]
     descriptor = StreamDescriptor(
         asyncio.get_event_loop(), self.blob_dir, file_name, key, file_name, blobs, stream_hash
     )
     sd_blob = await descriptor.make_sd_blob()
     await self.storage.store_stream(sd_blob, descriptor)
     return descriptor
Esempio n. 9
0
 def test_too_much_write(self):
     # writing too much data should result in failure
     expected_length = 16
     content = b'0' * 32
     blob_hash = random_lbry_hash()
     blob_file = BlobFile(self.blob_dir, blob_hash, expected_length)
     writer, finished_d = blob_file.open_for_writing(peer=1)
     writer.write(content)
     out = yield self.assertFailure(finished_d, InvalidDataError)
Esempio n. 10
0
 def test_bad_hash(self):
     # test a write that should fail because its content's hash
     # does not equal the blob_hash
     length = 64
     content = b'0' * length
     blob_hash = random_lbry_hash()
     blob_file = BlobFile(self.blob_dir, blob_hash, length)
     writer, finished_d = blob_file.open_for_writing(peer=1)
     writer.write(content)
     yield self.assertFailure(finished_d, InvalidDataError)
Esempio n. 11
0
 def test_supports_storage(self):
     claim_ids = [random_lbry_hash() for _ in range(10)]
     random_supports = [{
         "txid": random_lbry_hash(),
         "nout": i,
         "address": f"addr{i}",
         "amount": f"{i}.0"
     } for i in range(20)]
     expected_supports = {}
     for idx, claim_id in enumerate(claim_ids):
         yield f2d(self.storage.save_supports(claim_id, random_supports[idx*2:idx*2+2]))
         for random_support in random_supports[idx*2:idx*2+2]:
             random_support['claim_id'] = claim_id
             expected_supports.setdefault(claim_id, []).append(random_support)
     supports = yield f2d(self.storage.get_supports(claim_ids[0]))
     self.assertEqual(supports, expected_supports[claim_ids[0]])
     all_supports = yield f2d(self.storage.get_supports(*claim_ids))
     for support in all_supports:
         self.assertIn(support, expected_supports[support['claim_id']])
Esempio n. 12
0
    def test_delete_stream(self):
        stream_hash = random_lbry_hash()
        yield self.test_store_stream(stream_hash)
        yield self.storage.delete_stream(stream_hash)
        stream_hashes = yield self.storage.get_all_streams()
        self.assertListEqual(stream_hashes, [])

        stream_blobs = yield self.storage.get_blobs_for_stream(stream_hash)
        self.assertListEqual(stream_blobs, [])
        blob_hashes = yield self.storage.get_all_blob_hashes()
        self.assertListEqual(blob_hashes, [])
Esempio n. 13
0
 def setUp(self):
     mock_conf_settings(self)
     self.num_blobs = 10
     self.blobs_to_announce = []
     for i in range(0, self.num_blobs):
         self.blobs_to_announce.append(random_lbry_hash())
     self.dht_node = MocDHTNode()
     self.clock = self.dht_node.clock
     utils.call_later = self.clock.callLater
     self.storage = MocStorage(self.blobs_to_announce)
     self.announcer = DHTHashAnnouncer(self.dht_node, self.storage)
Esempio n. 14
0
 async def test_store_and_delete_stream(self):
     stream_hash = random_lbry_hash()
     descriptor = await self.store_fake_stream(stream_hash)
     files = await self.storage.get_all_lbry_files()
     self.assertListEqual(files, [])
     stream_hashes = await self.storage.get_all_stream_hashes()
     self.assertListEqual(stream_hashes, [stream_hash])
     await self.storage.delete_stream(descriptor)
     files = await self.storage.get_all_lbry_files()
     self.assertListEqual(files, [])
     stream_hashes = await self.storage.get_all_stream_hashes()
     self.assertListEqual(stream_hashes, [])
Esempio n. 15
0
    def test_store_file(self):
        download_directory = self.db_dir
        out = yield self.storage.get_all_lbry_files()
        self.assertEqual(len(out), 0)

        stream_hash = random_lbry_hash()
        sd_hash = random_lbry_hash()
        blob1 = random_lbry_hash()
        blob2 = random_lbry_hash()

        yield self.store_fake_blob(sd_hash)
        yield self.store_fake_blob(blob1)
        yield self.store_fake_blob(blob2)

        yield self.store_fake_stream(stream_hash, sd_hash)
        yield self.store_fake_stream_blob(stream_hash, blob1, 1)
        yield self.store_fake_stream_blob(stream_hash, blob2, 2)

        blob_data_rate = 0
        file_name = "test file"
        out = yield self.storage.save_published_file(stream_hash, file_name,
                                                     download_directory,
                                                     blob_data_rate)
        rowid = yield self.storage.get_rowid_for_stream_hash(stream_hash)
        self.assertEqual(out, rowid)

        files = yield self.storage.get_all_lbry_files()
        self.assertEqual(1, len(files))

        status = yield self.storage.get_lbry_file_status(rowid)
        self.assertEqual(status, ManagedEncryptedFileDownloader.STATUS_STOPPED)

        running = ManagedEncryptedFileDownloader.STATUS_RUNNING
        yield self.storage.change_file_status(rowid, running)
        status = yield self.storage.get_lbry_file_status(rowid)
        self.assertEqual(status, ManagedEncryptedFileDownloader.STATUS_RUNNING)
Esempio n. 16
0
    def test_delete_blob(self):
        # create blob
        blob_hash = yield self._create_and_add_blob()
        blobs = yield self.bm.get_all_verified_blobs()
        self.assertEqual(len(blobs), 1)

        # delete blob
        yield self.bm.delete_blobs([blob_hash])
        self.assertFalse(os.path.isfile(os.path.join(self.blob_dir, blob_hash)))
        blobs = yield self.bm.get_all_verified_blobs()
        self.assertEqual(len(blobs), 0)
        blobs = yield f2d(self.bm.storage.get_all_blob_hashes())
        self.assertEqual(len(blobs), 0)
        self.assertNotIn(blob_hash, self.bm.blobs)

        # delete blob that was already deleted once
        yield self.bm.delete_blobs([blob_hash])

        # delete blob that does not exist, nothing will
        # happen
        blob_hash = random_lbry_hash()
        yield self.bm.delete_blobs([blob_hash])
Esempio n. 17
0
 def test_store_blob(self):
     blob_hash = random_lbry_hash()
     yield self.store_fake_blob(blob_hash)
     blob_hashes = yield self.storage.get_all_blob_hashes()
     self.assertEqual(blob_hashes, [blob_hash])