Пример #1
0
    async def create_stream(
            cls, loop: asyncio.AbstractEventLoop, blob_dir: str, file_path: str, key: typing.Optional[bytes] = None,
            iv_generator: typing.Optional[typing.Generator[bytes, None, None]] = None,
            old_sort: bool = False,
            blob_completed_callback: typing.Optional[typing.Callable[['AbstractBlob'],
                                                                     asyncio.Task]] = None) -> 'StreamDescriptor':
        blobs: typing.List[BlobInfo] = []

        iv_generator = iv_generator or random_iv_generator()
        key = key or os.urandom(AES.block_size // 8)
        blob_num = -1
        for blob_bytes in file_reader(file_path):
            blob_num += 1
            blob_info = await BlobFile.create_from_unencrypted(
                    loop, blob_dir, key, next(iv_generator), blob_bytes, blob_num, blob_completed_callback
                )
            blobs.append(blob_info)
        blobs.append(
            BlobInfo(len(blobs), 0, binascii.hexlify(next(iv_generator)).decode()))  # add the stream terminator
        descriptor = cls(
            loop, blob_dir, os.path.basename(file_path), binascii.hexlify(key).decode(), os.path.basename(file_path),
            blobs
        )
        sd_blob = await descriptor.make_sd_blob(old_sort=old_sort, blob_completed_callback=blob_completed_callback)
        descriptor.sd_hash = sd_blob.blob_hash
        return descriptor
Пример #2
0
 def _from_stream_descriptor_blob(cls, loop: asyncio.AbstractEventLoop, blob_dir: str,
                                  blob: AbstractBlob) -> 'StreamDescriptor':
     with blob.reader_context() as blob_reader:
         json_bytes = blob_reader.read()
     try:
         decoded = json.loads(json_bytes.decode())
     except json.JSONDecodeError:
         blob.delete()
         raise InvalidStreamDescriptorError("Does not decode as valid JSON")
     if decoded['blobs'][-1]['length'] != 0:
         raise InvalidStreamDescriptorError("Does not end with a zero-length blob.")
     if any([blob_info['length'] == 0 for blob_info in decoded['blobs'][:-1]]):
         raise InvalidStreamDescriptorError("Contains zero-length data blob")
     if 'blob_hash' in decoded['blobs'][-1]:
         raise InvalidStreamDescriptorError("Stream terminator blob should not have a hash")
     if any([i != blob_info['blob_num'] for i, blob_info in enumerate(decoded['blobs'])]):
         raise InvalidStreamDescriptorError("Stream contains out of order or skipped blobs")
     descriptor = cls(
         loop, blob_dir,
         binascii.unhexlify(decoded['stream_name']).decode(),
         decoded['key'],
         binascii.unhexlify(decoded['suggested_file_name']).decode(),
         [BlobInfo(info['blob_num'], info['length'], info['iv'], info.get('blob_hash'))
          for info in decoded['blobs']],
         decoded['stream_hash'],
         blob.blob_hash
     )
     if descriptor.get_stream_hash() != decoded['stream_hash']:
         raise InvalidStreamDescriptorError("Stream hash does not match stream metadata")
     return descriptor
Пример #3
0
        def _get_blobs_for_stream(transaction):
            crypt_blob_infos = []
            stream_blobs = transaction.execute(
                "select blob_hash, position, iv from stream_blob where stream_hash=? "
                "order by position asc", (stream_hash, )
            ).fetchall()
            if only_completed:
                lengths = transaction.execute(
                    "select b.blob_hash, b.blob_length from blob b "
                    "inner join stream_blob s ON b.blob_hash=s.blob_hash and b.status='finished' and s.stream_hash=?",
                    (stream_hash, )
                ).fetchall()
            else:
                lengths = transaction.execute(
                    "select b.blob_hash, b.blob_length from blob b "
                    "inner join stream_blob s ON b.blob_hash=s.blob_hash and s.stream_hash=?",
                    (stream_hash, )
                ).fetchall()

            blob_length_dict = {}
            for blob_hash, length in lengths:
                blob_length_dict[blob_hash] = length

            for blob_hash, position, iv in stream_blobs:
                blob_length = blob_length_dict.get(blob_hash, 0)
                crypt_blob_infos.append(BlobInfo(position, blob_length, iv, blob_hash))
                if not blob_hash:
                    break
            return crypt_blob_infos
Пример #4
0
 async def make_and_store_fake_stream(self, blob_count=2, stream_hash=None):
     stream_hash = stream_hash or random_lbry_hash()
     blobs = [
         BlobInfo(i + 1, 100, "DEADBEEF", random_lbry_hash())
         for i in range(blob_count)
     ]
     await self.store_fake_stream(stream_hash, blobs)
Пример #5
0
def do_migration(conf):
    db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
    blob_dir = os.path.join(conf.data_dir, "blobfiles")
    connection = sqlite3.connect(db_path)
    cursor = connection.cursor()

    query = "select stream_name, stream_key, suggested_filename, sd_hash, stream_hash from stream"
    streams = cursor.execute(query).fetchall()

    blobs = cursor.execute("select s.stream_hash, s.position, s.iv, b.blob_hash, b.blob_length from stream_blob s "
                           "left outer join blob b ON b.blob_hash=s.blob_hash order by s.position").fetchall()
    blobs_by_stream = {}
    for stream_hash, position, iv, blob_hash, blob_length in blobs:
        blobs_by_stream.setdefault(stream_hash, []).append(BlobInfo(position, blob_length or 0, iv, blob_hash))

    for stream_name, stream_key, suggested_filename, sd_hash, stream_hash in streams:
        sd = StreamDescriptor(None, blob_dir, stream_name, stream_key, suggested_filename,
                              blobs_by_stream[stream_hash], stream_hash, sd_hash)
        if sd_hash != sd.calculate_sd_hash():
            log.info("Stream for descriptor %s is invalid, cleaning it up", sd_hash)
            blob_hashes = [blob.blob_hash for blob in blobs_by_stream[stream_hash]]
            delete_stream(cursor, stream_hash, sd_hash, blob_hashes, blob_dir)

    connection.commit()
    connection.close()
Пример #6
0
 async def store_fake_stream(self, stream_hash, blobs=None, file_name="fake_file", key="DEADBEEF"):
     blobs = blobs or [BlobInfo(1, 100, "DEADBEEF", random_lbry_hash())]
     descriptor = StreamDescriptor(
         asyncio.get_event_loop(), self.blob_dir, file_name, key, file_name, blobs, stream_hash
     )
     sd_blob = await descriptor.make_sd_blob()
     await self.storage.store_stream(sd_blob, descriptor)
     return descriptor
Пример #7
0
    async def create_from_unencrypted(
            cls, loop: asyncio.AbstractEventLoop, blob_dir: typing.Optional[str], key: bytes, iv: bytes,
            unencrypted: bytes, blob_num: int,
            blob_completed_callback: typing.Optional[typing.Callable[['AbstractBlob'], None]] = None) -> BlobInfo:
        """
        Create an encrypted BlobFile from plaintext bytes
        """

        blob_bytes, blob_hash = encrypt_blob_bytes(key, iv, unencrypted)
        length = len(blob_bytes)
        blob = cls(loop, blob_hash, length, blob_completed_callback, blob_dir)
        writer = blob.get_blob_writer()
        writer.write(blob_bytes)
        await blob.verified.wait()
        return BlobInfo(blob_num, length, binascii.hexlify(iv).decode(), blob_hash)