def _checksig(self, value, address): try: claim_dict = Claim.from_bytes(value) cert_id = claim_dict.signing_channel_hash if not self.should_validate_signatures: return cert_id if cert_id: cert_claim = self.db.get_claim_info(cert_id) if cert_claim: certificate = Claim.from_bytes(cert_claim.value) claim_dict.validate_signature(address, certificate) return cert_id except Exception: pass
def __init__(self, stream_hash: str, outpoint: opt_str = None, claim_id: opt_str = None, name: opt_str = None, amount: opt_int = None, height: opt_int = None, serialized: opt_str = None, channel_claim_id: opt_str = None, address: opt_str = None, claim_sequence: opt_int = None, channel_name: opt_str = None): self.stream_hash = stream_hash self.claim_id = claim_id self.outpoint = outpoint self.claim_name = name self.amount = amount self.height = height self.claim: typing.Optional[ Claim] = None if not serialized else Claim.from_bytes( binascii.unhexlify(serialized)) self.claim_address = address self.claim_sequence = claim_sequence self.channel_claim_id = channel_claim_id self.channel_name = channel_name
def claim(self) -> Claim: if self.is_claim: if not isinstance(self.script.values['claim'], Claim): self.script.values['claim'] = Claim.from_bytes( self.script.values['claim']) return self.script.values['claim'] raise ValueError( 'Only claim name and claim update have the claim payload.')
def test_stream_claim(self): stream = Stream() claim = stream.claim self.assertEqual(claim.claim_type, Claim.STREAM) claim = Claim.from_bytes(claim.to_bytes()) self.assertEqual(claim.claim_type, Claim.STREAM) self.assertIsNotNone(claim.stream) with self.assertRaisesRegex(ValueError, 'Claim is not a channel.'): print(claim.channel)
async def parse_and_validate_claim_result(self, claim_result, certificate=None, raw=False): if not claim_result or 'value' not in claim_result: return claim_result claim_result['decoded_claim'] = False decoded = None if not raw: claim_value = claim_result['value'] try: decoded = claim_result['value'] = Claim.from_bytes( unhexlify(claim_value)) claim_result['decoded_claim'] = True except DecodeError: pass if decoded: claim_result['has_signature'] = False if decoded.is_signed: claim_tx = await self.network.get_transaction( claim_result['txid']) if certificate is None: log.info("fetching certificate to check claim signature") certificate = await self.network.get_claims_by_ids( decoded.signing_channel_id) if not certificate: log.warning('Certificate %s not found', decoded.signing_channel_id) cert_tx = await self.network.get_transaction( certificate['txid']) if certificate else None claim_result['has_signature'] = True claim_result['signature_is_valid'] = False validated, channel_name = validate_claim_signature_and_get_channel_name( claim_result, certificate, self.ledger, claim_tx=claim_tx, cert_tx=cert_tx) claim_result['channel_name'] = channel_name if validated: claim_result['signature_is_valid'] = True if 'height' in claim_result and claim_result['height'] is None: claim_result['height'] = -1 if 'amount' in claim_result: claim_result = format_amount_value(claim_result) claim_result['permanent_url'] = _get_permanent_url( claim_result, decoded.signing_channel_id if decoded else None) return claim_result
def test_stream_claim(self): stream = Stream() claim = stream.claim self.assertTrue(claim.is_stream) self.assertFalse(claim.is_channel) claim = Claim.from_bytes(claim.to_bytes()) self.assertTrue(claim.is_stream) self.assertFalse(claim.is_channel) self.assertIsNotNone(claim.stream) with self.assertRaisesRegex(ValueError, 'Claim is not a channel.'): print(claim.channel)
async def get_certificate_and_validate_result(self, claim_result): if not claim_result or 'value' not in claim_result: return claim_result certificate = None certificate_id = Claim.from_bytes(unhexlify( claim_result['value'])).signing_channel_id if certificate_id: certificate = await self.network.get_claims_by_ids(certificate_id) certificate = certificate.pop( certificate_id) if certificate else None return await self.parse_and_validate_claim_result( claim_result, certificate=certificate)
def _decode_claim_result(claim): if 'decoded_claim' in claim: return claim if 'value' not in claim: log.warning('Got an invalid claim while parsing, please report: %s', claim) claim['protobuf'] = None claim['value'] = None backend_message = ' SDK message: ' + claim.get('error', '') claim['error'] = "Failed to parse: missing value." + backend_message return claim try: if not isinstance(claim['value'], Claim): claim['value'] = Claim.from_bytes(unhexlify(claim['value'])) claim['protobuf'] = hexlify(claim['value'].to_bytes()) claim['decoded_claim'] = True except DecodeError: claim['decoded_claim'] = False claim['protobuf'] = claim['value'] claim['value'] = None return claim
def _save_content_claim(transaction, claim_outpoint, stream_hash): # get the claim id and serialized metadata claim_info = transaction.execute( "select claim_id, serialized_metadata from claim where claim_outpoint=?", (claim_outpoint, )).fetchone() if not claim_info: raise Exception("claim not found") new_claim_id, claim = claim_info[0], Claim.from_bytes( binascii.unhexlify(claim_info[1])) # certificate claims should not be in the content_claim table if not claim.is_stream: raise Exception("claim does not contain a stream") # get the known sd hash for this stream known_sd_hash = transaction.execute( "select sd_hash from stream where stream_hash=?", (stream_hash, )).fetchone() if not known_sd_hash: raise Exception("stream not found") # check the claim contains the same sd hash if known_sd_hash[0] != claim.stream.source.sd_hash: raise Exception("stream mismatch") # if there is a current claim associated to the file, check that the new claim is an update to it current_associated_content = transaction.execute( "select claim_outpoint from content_claim where stream_hash=?", (stream_hash, )).fetchone() if current_associated_content: current_associated_claim_id = transaction.execute( "select claim_id from claim where claim_outpoint=?", current_associated_content).fetchone()[0] if current_associated_claim_id != new_claim_id: raise Exception( f"mismatching claim ids when updating stream {current_associated_claim_id} vs {new_claim_id}" ) # update the claim associated to the file transaction.execute( "insert or replace into content_claim values (?, ?)", (stream_hash, claim_outpoint))
def _decode_claim_result(claim): if 'has_signature' in claim and claim['has_signature']: if not claim['signature_is_valid']: log.warning("lbry://%s#%s has an invalid signature", claim['name'], claim['claim_id']) if 'value' not in claim: log.warning('Got an invalid claim while parsing, please report: %s', claim) claim['hex'] = None claim['value'] = None backend_message = ' SDK message: ' + claim[ 'error'] if 'error' in claim else '' claim['error'] = "Failed to parse: missing value." + backend_message return claim try: if not isinstance(claim['value'], Claim): claim['value'] = Claim.from_bytes(claim['value']) claim['hex'] = hexlify(claim['value'].to_bytes()) except DecodeError: claim['hex'] = claim['value'] claim['value'] = None claim['error'] = "Failed to decode value" return claim
async def _download_stream_from_uri( self, uri, timeout: float, exchange_rate_manager: 'ExchangeRateManager', file_name: typing.Optional[str] = None) -> ManagedStream: start_time = self.loop.time() parsed_uri = parse_lbry_uri(uri) if parsed_uri.is_channel: raise ResolveError( "cannot download a channel claim, specify a /path") # resolve the claim resolved = (await self.wallet.ledger.resolve(0, 10, uri)).get(uri, {}) resolved = resolved if 'value' in resolved else resolved.get('claim') if not resolved: raise ResolveError(f"Failed to resolve stream at '{uri}'") if 'error' in resolved: raise ResolveError(f"error resolving stream: {resolved['error']}") claim = Claim.from_bytes(binascii.unhexlify(resolved['protobuf'])) outpoint = f"{resolved['txid']}:{resolved['nout']}" resolved_time = self.loop.time() - start_time # resume or update an existing stream, if the stream changed download it and delete the old one after updated_stream, to_replace = await self._check_update_or_replace( outpoint, resolved['claim_id'], claim) if updated_stream: return updated_stream # check that the fee is payable fee_amount, fee_address = None, None if claim.stream.has_fee: fee_amount = round( exchange_rate_manager.convert_currency( claim.stream.fee.currency, "LBC", claim.stream.fee.amount), 5) max_fee_amount = round( exchange_rate_manager.convert_currency( self.config.max_key_fee['currency'], "LBC", Decimal(self.config.max_key_fee['amount'])), 5) if fee_amount > max_fee_amount: msg = f"fee of {fee_amount} exceeds max configured to allow of {max_fee_amount}" log.warning(msg) raise KeyFeeAboveMaxAllowed(msg) balance = await self.wallet.default_account.get_balance() if lbc_to_dewies(str(fee_amount)) > balance: msg = f"fee of {fee_amount} exceeds max available balance" log.warning(msg) raise InsufficientFundsError(msg) fee_address = claim.stream.fee.address # download the stream download_id = binascii.hexlify(generate_id()).decode() downloader = StreamDownloader(self.loop, self.config, self.blob_manager, claim.stream.source.sd_hash, self.config.download_dir, file_name) stream = None descriptor_time_fut = self.loop.create_future() start_download_time = self.loop.time() time_to_descriptor = None time_to_first_bytes = None error = None try: stream = await asyncio.wait_for( asyncio.ensure_future( self.start_downloader(descriptor_time_fut, downloader, download_id, outpoint, claim, resolved, file_name)), timeout) time_to_descriptor = await descriptor_time_fut time_to_first_bytes = self.loop.time( ) - start_download_time - time_to_descriptor self.wait_for_stream_finished(stream) if fee_address and fee_amount and not to_replace: stream.tx = await self.wallet.send_amount_to_address( lbc_to_dewies(str(fee_amount)), fee_address.encode('latin1')) elif to_replace: # delete old stream now that the replacement has started downloading await self.delete_stream(to_replace) except asyncio.TimeoutError: if descriptor_time_fut.done(): time_to_descriptor = descriptor_time_fut.result() error = DownloadDataTimeout(downloader.sd_hash) self.blob_manager.delete_blob(downloader.sd_hash) await self.storage.delete_stream(downloader.descriptor) else: descriptor_time_fut.cancel() error = DownloadSDTimeout(downloader.sd_hash) if stream: await self.stop_stream(stream) else: downloader.stop() if error: log.warning(error) if self.analytics_manager: self.loop.create_task( self.analytics_manager.send_time_to_first_bytes( resolved_time, self.loop.time() - start_time, download_id, parse_lbry_uri(uri).name, outpoint, None if not stream else len(stream.downloader.blob_downloader.active_connections), None if not stream else len( stream.downloader.blob_downloader.scores), False if not downloader else downloader.added_fixed_peers, self.config.fixed_peer_delay if not downloader else downloader.fixed_peers_delay, claim.stream.source.sd_hash, time_to_descriptor, None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].blob_hash, None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].length, time_to_first_bytes, None if not error else error.__class__.__name__)) if error: raise error return stream
async def download_stream_from_uri( self, uri, exchange_rate_manager: 'ExchangeRateManager', timeout: typing.Optional[float] = None, file_name: typing.Optional[str] = None, download_directory: typing.Optional[str] = None, save_file: typing.Optional[bool] = None, resolve_timeout: float = 3.0) -> ManagedStream: timeout = timeout or self.config.download_timeout start_time = self.loop.time() resolved_time = None stream = None error = None outpoint = None if save_file is None: save_file = self.config.save_files if file_name and not save_file: save_file = True if save_file: download_directory = download_directory or self.config.download_dir else: download_directory = None try: # resolve the claim if not URL.parse(uri).has_stream: raise ResolveError( "cannot download a channel claim, specify a /path") try: resolved_result = self._convert_to_old_resolve_output( await asyncio.wait_for(self.wallet.ledger.resolve([uri]), resolve_timeout)) except asyncio.TimeoutError: raise ResolveTimeout(uri) await self.storage.save_claims_for_resolve([ value for value in resolved_result.values() if 'error' not in value ]) resolved = resolved_result.get(uri, {}) resolved = resolved if 'value' in resolved else resolved.get( 'claim') if not resolved: raise ResolveError(f"Failed to resolve stream at '{uri}'") if 'error' in resolved: raise ResolveError( f"error resolving stream: {resolved['error']}") claim = Claim.from_bytes(binascii.unhexlify(resolved['protobuf'])) outpoint = f"{resolved['txid']}:{resolved['nout']}" resolved_time = self.loop.time() - start_time # resume or update an existing stream, if the stream changed download it and delete the old one after updated_stream, to_replace = await self._check_update_or_replace( outpoint, resolved['claim_id'], claim) if updated_stream: log.info("already have stream for %s", uri) if save_file and updated_stream.output_file_exists: save_file = False await updated_stream.start(node=self.node, timeout=timeout, save_now=save_file) if not updated_stream.output_file_exists and ( save_file or file_name or download_directory): await updated_stream.save_file( file_name=file_name, download_directory=download_directory, node=self.node) return updated_stream content_fee = None fee_amount, fee_address = None, None # check that the fee is payable if not to_replace and claim.stream.has_fee: fee_amount = round( exchange_rate_manager.convert_currency( claim.stream.fee.currency, "LBC", claim.stream.fee.amount), 5) max_fee_amount = round( exchange_rate_manager.convert_currency( self.config.max_key_fee['currency'], "LBC", Decimal(self.config.max_key_fee['amount'])), 5) if fee_amount > max_fee_amount: msg = f"fee of {fee_amount} exceeds max configured to allow of {max_fee_amount}" log.warning(msg) raise KeyFeeAboveMaxAllowed(msg) balance = await self.wallet.default_account.get_balance() if lbc_to_dewies(str(fee_amount)) > balance: msg = f"fee of {fee_amount} exceeds max available balance" log.warning(msg) raise InsufficientFundsError(msg) fee_address = claim.stream.fee.address stream = ManagedStream(self.loop, self.config, self.blob_manager, claim.stream.source.sd_hash, download_directory, file_name, ManagedStream.STATUS_RUNNING, content_fee=content_fee, analytics_manager=self.analytics_manager) log.info("starting download for %s", uri) before_download = self.loop.time() await stream.start(self.node, timeout) stream.set_claim(resolved, claim) if to_replace: # delete old stream now that the replacement has started downloading await self.delete_stream(to_replace) elif fee_address: stream.content_fee = await self.wallet.send_amount_to_address( lbc_to_dewies(str(fee_amount)), fee_address.encode('latin1')) log.info("paid fee of %s for %s", fee_amount, uri) await self.storage.save_content_fee(stream.stream_hash, stream.content_fee) self.streams[stream.sd_hash] = stream self.storage.content_claim_callbacks[ stream.stream_hash] = lambda: self._update_content_claim(stream ) await self.storage.save_content_claim(stream.stream_hash, outpoint) if save_file: await asyncio.wait_for(stream.save_file(node=self.node), timeout - (self.loop.time() - before_download), loop=self.loop) return stream except asyncio.TimeoutError: error = DownloadDataTimeout(stream.sd_hash) raise error except Exception as err: # forgive data timeout, dont delete stream error = err raise finally: if self.analytics_manager and ( error or (stream and (stream.downloader.time_to_descriptor or stream.downloader.time_to_first_bytes))): self.loop.create_task( self.analytics_manager.send_time_to_first_bytes( resolved_time, self.loop.time() - start_time, None if not stream else stream.download_id, uri, outpoint, None if not stream else len( stream.downloader.blob_downloader. active_connections), None if not stream else len(stream.downloader.blob_downloader.scores), False if not stream else stream.downloader.added_fixed_peers, self.config.fixed_peer_delay if not stream else stream.downloader.fixed_peers_delay, None if not stream else stream.sd_hash, None if not stream else stream.downloader.time_to_descriptor, None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].blob_hash, None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].length, None if not stream else stream.downloader.time_to_first_bytes, None if not error else error.__class__.__name__))
def _make_db(new_db): # create the new tables new_db.executescript(CREATE_TABLES_QUERY) # first migrate the blobs blobs = blobs_db_cursor.execute("select * from blobs").fetchall() _populate_blobs(blobs) # pylint: disable=no-value-for-parameter log.info("migrated %i blobs", new_db.execute("select count(*) from blob").fetchone()[0]) # used to store the query arguments if we need to try re-importing the lbry file later file_args = {} # <sd_hash>: args tuple file_outpoints = {} # <outpoint tuple>: sd_hash # get the file and stream queries ready for (rowid, sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate, status) in \ lbryfile_db.execute( "select distinct lbry_files.rowid, d.sd_blob_hash, lbry_files.*, o.blob_data_rate, o.status " "from lbry_files " "inner join lbry_file_descriptors d on lbry_files.stream_hash=d.stream_hash " "inner join lbry_file_options o on lbry_files.stream_hash=o.stream_hash"): # this is try to link the file to a content claim after we've imported all the files if rowid in old_rowid_to_outpoint: file_outpoints[old_rowid_to_outpoint[rowid]] = sd_hash elif sd_hash in old_sd_hash_to_outpoint: file_outpoints[old_sd_hash_to_outpoint[sd_hash]] = sd_hash sd_hash_to_stream_hash[sd_hash] = stream_hash if stream_hash in stream_hash_to_stream_blobs: file_args[sd_hash] = ( sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate or 0.0, status, stream_hash_to_stream_blobs.pop(stream_hash)) # used to store the query arguments if we need to try re-importing the claim claim_queries = {} # <sd_hash>: claim query tuple # get the claim queries ready, only keep those with associated files for outpoint, sd_hash in file_outpoints.items(): if outpoint in claim_outpoint_queries: claim_queries[sd_hash] = claim_outpoint_queries[outpoint] # insert the claims new_db.executemany( "insert or ignore into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", [ ("%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]), claim_arg_tup[2], claim_arg_tup[3], claim_arg_tup[7], claim_arg_tup[6], claim_arg_tup[8], Claim.from_bytes(claim_arg_tup[8]).signing_channel_id, claim_arg_tup[5], claim_arg_tup[4]) for sd_hash, claim_arg_tup in claim_queries.items() if claim_arg_tup ] # sd_hash, (txid, nout, claim_id, name, sequence, address, height, amount, serialized) ) log.info("migrated %i claims", new_db.execute("select count(*) from claim").fetchone()[0]) damaged_stream_sds = [] # import the files and get sd hashes of streams to attempt recovering for sd_hash, file_query in file_args.items(): failed_sd = _import_file(*file_query) if failed_sd: damaged_stream_sds.append(failed_sd) # recover damaged streams if damaged_stream_sds: blob_dir = os.path.join(conf.data_dir, "blobfiles") damaged_sds_on_disk = [] if not os.path.isdir(blob_dir) else list( {p for p in os.listdir(blob_dir) if p in damaged_stream_sds}) for damaged_sd in damaged_sds_on_disk: try: decoded, sd_length = verify_sd_blob(damaged_sd, blob_dir) blobs = decoded['blobs'] _add_recovered_blobs(blobs, damaged_sd, sd_length) # pylint: disable=no-value-for-parameter _import_file(*file_args[damaged_sd]) damaged_stream_sds.remove(damaged_sd) except (OSError, ValueError, TypeError, IOError, AssertionError, sqlite3.IntegrityError): continue log.info("migrated %i files", new_db.execute("select count(*) from file").fetchone()[0]) # associate the content claims to their respective files for claim_arg_tup in claim_queries.values(): if claim_arg_tup and (claim_arg_tup[0], claim_arg_tup[1]) in file_outpoints \ and file_outpoints[(claim_arg_tup[0], claim_arg_tup[1])] in sd_hash_to_stream_hash: try: new_db.execute( "insert or ignore into content_claim values (?, ?)", (sd_hash_to_stream_hash.get( file_outpoints.get( (claim_arg_tup[0], claim_arg_tup[1]))), "%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]))) except sqlite3.IntegrityError: continue log.info( "migrated %i content claims", new_db.execute("select count(*) from content_claim").fetchone()[0])