コード例 #1
0
ファイル: test_SQLiteStorage.py プロジェクト: xuanduoc84/lbry
 async def make_and_store_fake_stream(self, blob_count=2, stream_hash=None):
     stream_hash = stream_hash or random_lbry_hash()
     blobs = [
         BlobInfo(i + 1, 100, "DEADBEEF", random_lbry_hash())
         for i in range(blob_count)
     ]
     await self.store_fake_stream(stream_hash, blobs)
コード例 #2
0
ファイル: migrate8to9.py プロジェクト: xzc1017964340/lbry
def do_migration(conf):
    db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
    blob_dir = os.path.join(conf.data_dir, "blobfiles")
    connection = sqlite3.connect(db_path)
    cursor = connection.cursor()

    query = "select stream_name, stream_key, suggested_filename, sd_hash, stream_hash from stream"
    streams = cursor.execute(query).fetchall()

    blobs = cursor.execute(
        "select s.stream_hash, s.position, s.iv, b.blob_hash, b.blob_length from stream_blob s "
        "left outer join blob b ON b.blob_hash=s.blob_hash order by s.position"
    ).fetchall()
    blobs_by_stream = {}
    for stream_hash, position, iv, blob_hash, blob_length in blobs:
        blobs_by_stream.setdefault(stream_hash, []).append(
            BlobInfo(position, blob_length or 0, iv, blob_hash))

    for stream_name, stream_key, suggested_filename, sd_hash, stream_hash in streams:
        sd = StreamDescriptor(asyncio.get_event_loop(), blob_dir, stream_name,
                              stream_key, suggested_filename,
                              blobs_by_stream[stream_hash], stream_hash,
                              sd_hash)
        if sd_hash != sd.calculate_sd_hash():
            log.warning("Stream for descriptor %s is invalid, cleaning it up",
                        sd_hash)
            blob_hashes = [
                blob.blob_hash for blob in blobs_by_stream[stream_hash]
            ]
            delete_stream(cursor, stream_hash, sd_hash, blob_hashes, blob_dir)

    connection.commit()
    connection.close()
コード例 #3
0
        def _get_blobs_for_stream(transaction):
            crypt_blob_infos = []
            stream_blobs = transaction.execute(
                "select blob_hash, position, iv from stream_blob where stream_hash=?",
                (stream_hash, )).fetchall()
            if only_completed:
                lengths = transaction.execute(
                    "select b.blob_hash, b.blob_length from blob b "
                    "inner join stream_blob s ON b.blob_hash=s.blob_hash and b.status='finished' and s.stream_hash=?",
                    (stream_hash, )).fetchall()
            else:
                lengths = transaction.execute(
                    "select b.blob_hash, b.blob_length from blob b "
                    "inner join stream_blob s ON b.blob_hash=s.blob_hash and s.stream_hash=?",
                    (stream_hash, )).fetchall()

            blob_length_dict = {}
            for blob_hash, length in lengths:
                blob_length_dict[blob_hash] = length

            for blob_hash, position, iv in stream_blobs:
                blob_length = blob_length_dict.get(blob_hash, 0)
                crypt_blob_infos.append(
                    BlobInfo(position, blob_length, iv, blob_hash))
            crypt_blob_infos = sorted(crypt_blob_infos,
                                      key=lambda info: info.blob_num)
            return crypt_blob_infos
コード例 #4
0
 def _from_stream_descriptor_blob(cls, loop: asyncio.BaseEventLoop, blob_dir: str,
                                  blob: BlobFile) -> 'StreamDescriptor':
     assert os.path.isfile(blob.file_path)
     with open(blob.file_path, 'rb') as f:
         json_bytes = f.read()
     decoded = json.loads(json_bytes.decode())
     if decoded['blobs'][-1]['length'] != 0:
         raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
     if any([blob_info['length'] == 0 for blob_info in decoded['blobs'][:-1]]):
         raise InvalidStreamDescriptorError("Contains zero-length data blob")
     if 'blob_hash' in decoded['blobs'][-1]:
         raise InvalidStreamDescriptorError("Stream terminator blob should not have a hash")
     descriptor = cls(
         loop, blob_dir,
         binascii.unhexlify(decoded['stream_name']).decode(),
         decoded['key'],
         binascii.unhexlify(decoded['suggested_file_name']).decode(),
         [BlobInfo(info['blob_num'], info['length'], info['iv'], info.get('blob_hash'))
          for info in decoded['blobs']],
         decoded['stream_hash'],
         blob.blob_hash
     )
     if descriptor.get_stream_hash() != decoded['stream_hash']:
         raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
     return descriptor
コード例 #5
0
    async def create_stream(cls, loop: asyncio.BaseEventLoop, blob_dir: str,
                            file_path: str, key: typing.Optional[bytes] = None,
                            iv_generator: typing.Optional[typing.Generator[bytes, None, None]] = None
                            ) -> 'StreamDescriptor':

        blobs: typing.List[BlobInfo] = []

        iv_generator = iv_generator or random_iv_generator()
        key = key or os.urandom(AES.block_size // 8)
        blob_num = -1
        for blob_bytes in file_reader(file_path):
            blob_num += 1
            blob_info = await BlobFile.create_from_unencrypted(
                    loop, blob_dir, key, next(iv_generator), blob_bytes, blob_num
                )
            blobs.append(blob_info)
        blobs.append(
            BlobInfo(len(blobs), 0, binascii.hexlify(next(iv_generator)).decode()))  # add the stream terminator
        descriptor = cls(
            loop, blob_dir, os.path.basename(file_path), binascii.hexlify(key).decode(), os.path.basename(file_path),
            blobs
        )
        sd_blob = await descriptor.make_sd_blob()
        descriptor.sd_hash = sd_blob.blob_hash
        return descriptor
コード例 #6
0
 def _from_stream_descriptor_blob(cls, loop: asyncio.BaseEventLoop, blob_dir: str,
                                  blob: AbstractBlob) -> 'StreamDescriptor':
     with blob.reader_context() as blob_reader:
         json_bytes = blob_reader.read()
     try:
         decoded = json.loads(json_bytes.decode())
     except json.JSONDecodeError:
         blob.delete()
         raise InvalidStreamDescriptorError("Does not decode as valid JSON")
     if decoded['blobs'][-1]['length'] != 0:
         raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
     if any([blob_info['length'] == 0 for blob_info in decoded['blobs'][:-1]]):
         raise InvalidStreamDescriptorError("Contains zero-length data blob")
     if 'blob_hash' in decoded['blobs'][-1]:
         raise InvalidStreamDescriptorError("Stream terminator blob should not have a hash")
     if any([i != blob_info['blob_num'] for i, blob_info in enumerate(decoded['blobs'])]):
         raise InvalidStreamDescriptorError("Stream contains out of order or skipped blobs")
     descriptor = cls(
         loop, blob_dir,
         binascii.unhexlify(decoded['stream_name']).decode(),
         decoded['key'],
         binascii.unhexlify(decoded['suggested_file_name']).decode(),
         [BlobInfo(info['blob_num'], info['length'], info['iv'], info.get('blob_hash'))
          for info in decoded['blobs']],
         decoded['stream_hash'],
         blob.blob_hash
     )
     if descriptor.get_stream_hash() != decoded['stream_hash']:
         raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
     return descriptor
コード例 #7
0
ファイル: test_SQLiteStorage.py プロジェクト: xuanduoc84/lbry
 async def store_fake_stream(self,
                             stream_hash,
                             blobs=None,
                             file_name="fake_file",
                             key="DEADBEEF"):
     blobs = blobs or [BlobInfo(1, 100, "DEADBEEF", random_lbry_hash())]
     descriptor = StreamDescriptor(asyncio.get_event_loop(), self.blob_dir,
                                   file_name, key, file_name, blobs,
                                   stream_hash)
     sd_blob = await descriptor.make_sd_blob()
     await self.storage.store_stream(sd_blob, descriptor)
     return descriptor
コード例 #8
0
    async def create_from_unencrypted(cls, loop: asyncio.BaseEventLoop, blob_dir: str, key: bytes,
                                      iv: bytes, unencrypted: bytes, blob_num: int) -> BlobInfo:
        """
        Create an encrypted BlobFile from plaintext bytes
        """

        blob_bytes, blob_hash = encrypt_blob_bytes(key, iv, unencrypted)
        length = len(blob_bytes)
        blob = cls(loop, blob_dir, blob_hash, length)
        writer = blob.open_for_writing()
        writer.write(blob_bytes)
        await blob.verified.wait()
        return BlobInfo(blob_num, length, binascii.hexlify(iv).decode(), blob_hash)
コード例 #9
0
ファイル: blob_file.py プロジェクト: EnigmaCurry/lbry
    async def create_from_unencrypted(
        cls,
        loop: asyncio.BaseEventLoop,
        blob_dir: typing.Optional[str],
        key: bytes,
        iv: bytes,
        unencrypted: bytes,
        blob_num: int,
        blob_completed_callback: typing.Optional[typing.Callable[
            ['AbstractBlob'], None]] = None
    ) -> BlobInfo:
        """
        Create an encrypted BlobFile from plaintext bytes
        """

        blob_bytes, blob_hash = encrypt_blob_bytes(key, iv, unencrypted)
        length = len(blob_bytes)
        blob = cls(loop, blob_hash, length, blob_completed_callback, blob_dir)
        writer = blob.get_blob_writer()
        writer.write(blob_bytes)
        await blob.verified.wait()
        return BlobInfo(blob_num, length,
                        binascii.hexlify(iv).decode(), blob_hash)