def resolve(self, urls) -> List: result = [] for raw_url in urls: try: url = URL.parse(raw_url) except ValueError as e: result.append(e) continue channel = None if url.has_channel: matches = self._search(is_controlling=True, **url.channel.to_dict()) if matches: channel = matches[0] else: result.append( LookupError(f'Could not find channel in "{raw_url}".')) continue if url.has_stream: query = url.stream.to_dict() if channel is not None: query['channel_hash'] = channel['claim_hash'] matches = self._search(is_controlling=True, **query) if matches: result.append(matches[0]) else: result.append( LookupError(f'Could not find stream in "{raw_url}".')) continue else: result.append(channel) return result
async def resolve(self, page, page_size, *uris): uris = set(uris) try: for uri in uris: for part in URL.parse(uri).parts: if part.claim_id: validate_claim_id(part.claim_id) claim_trie_root = self.ledger.headers.claim_trie_root resolutions = await self.network.get_values_for_uris(self.ledger.headers.hash().decode(), *uris) if len(uris) > 1: return await self._batch_handle(resolutions, uris, page, page_size, claim_trie_root) return await self._handle_resolutions(resolutions, uris, page, page_size, claim_trie_root) except ValueError as err: return {'error': err.args[0]} except Exception as e: log.exception(e) return {'error': str(e)}
def _assert_url(self, url_string, **kwargs): url = URL.parse(url_string) if url_string.startswith('lbry://'): self.assertEqual(url_string, str(url)) else: self.assertEqual(f'lbry://{url_string}', str(url)) present = {} for key in kwargs: for segment_name in self.segments: if key.startswith(segment_name): present[segment_name] = True break for segment_name in self.segments: segment = getattr(url, segment_name) if segment_name not in present: self.assertIsNone(segment) else: for field in self.fields: self.assertEqual( getattr(segment, field), kwargs.get(f'{segment_name}_{field}', None))
def _resolve_one(self, raw_url): try: url = URL.parse(raw_url) except ValueError as e: return e channel = None if url.has_channel: query = url.channel.to_dict() if set(query) == {'name'}: query['is_controlling'] = True else: query['order_by'] = ['^height'] matches = self._search(**query, limit=1) if matches: channel = matches[0] else: return LookupError(f'Could not find channel in "{raw_url}".') if url.has_stream: query = url.stream.to_dict() if channel is not None: if set(query) == {'name'}: # temporarily emulate is_controlling for claims in channel query['order_by'] = ['effective_amount'] else: query['order_by'] = ['^channel_join'] query['channel_hash'] = channel['claim_hash'] query['is_channel_signature_valid'] = 1 elif set(query) == {'name'}: query['is_controlling'] = 1 matches = self._search(**query, limit=1) if matches: return matches[0] else: return LookupError(f'Could not find stream in "{raw_url}".') return channel
async def _handle_resolve_uri_response(self, uri, resolution, claim_trie_root, page=0, page_size=10): result = {} parsed_uri = URL.parse(uri) certificate_response = None # parse an included certificate if 'certificate' in resolution: certificate_response = resolution['certificate']['result'] certificate_resolution_type = resolution['certificate']['resolution_type'] if certificate_resolution_type == "winning" and certificate_response: if 'height' in certificate_response: certificate_response = _verify_proof(parsed_uri.stream.name, claim_trie_root, certificate_response, ledger=self.ledger) elif certificate_resolution_type not in ['winning', 'claim_id', 'sequence']: raise Exception(f"unknown response type: {certificate_resolution_type}") result['certificate'] = await self.parse_and_validate_claim_result(certificate_response) result['claims_in_channel'] = len(resolution.get('unverified_claims_in_channel', [])) # if this was a resolution for a name, parse the result if 'claim' in resolution: claim_response = resolution['claim']['result'] claim_resolution_type = resolution['claim']['resolution_type'] if claim_resolution_type == "winning" and claim_response: if 'height' in claim_response: claim_response = _verify_proof(parsed_uri.stream.name, claim_trie_root, claim_response, ledger=self.ledger) elif claim_resolution_type not in ["sequence", "winning", "claim_id"]: raise Exception(f"unknown response type: {claim_resolution_type}") result['claim'] = await self.parse_and_validate_claim_result(claim_response, certificate_response) # if this was a resolution for a name in a channel make sure there is only one valid # match elif 'unverified_claims_for_name' in resolution and 'certificate' in result: unverified_claims_for_name = resolution['unverified_claims_for_name'] channel_info = await self.get_channel_claims_page(unverified_claims_for_name, result['certificate'], page=1) claims_in_channel, upper_bound = channel_info if not claims_in_channel: log.error("No valid claims for this name for this channel") elif len(claims_in_channel) > 1: log.warning("Multiple signed claims for the same name.") winner = pick_winner_from_channel_path_collision(claims_in_channel) if winner: result['claim'] = winner else: log.error("No valid claims for this name for this channel") else: result['claim'] = claims_in_channel[0] # parse and validate claims in a channel iteratively into pages of results elif 'unverified_claims_in_channel' in resolution and 'certificate' in result: ids_to_check = resolution['unverified_claims_in_channel'] channel_info = await self.get_channel_claims_page(ids_to_check, result['certificate'], page=page, page_size=page_size) claims_in_channel, upper_bound = channel_info if claims_in_channel: result['total_claims'] = upper_bound result['claims_in_channel'] = claims_in_channel elif 'error' not in result: return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)} # invalid signatures can only return outside a channel if result.get('claim', {}).get('has_signature', False): if parsed_uri.has_stream and not result['claim']['signature_is_valid']: return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)} return result
def _fail_url(self, url): with self.assertRaisesRegex(ValueError, 'Invalid LBRY URL'): URL.parse(url)
async def download_stream_from_uri( self, uri, exchange_rate_manager: 'ExchangeRateManager', timeout: typing.Optional[float] = None, file_name: typing.Optional[str] = None, download_directory: typing.Optional[str] = None, save_file: typing.Optional[bool] = None, resolve_timeout: float = 3.0) -> ManagedStream: timeout = timeout or self.config.download_timeout start_time = self.loop.time() resolved_time = None stream = None error = None outpoint = None if save_file is None: save_file = self.config.save_files if file_name and not save_file: save_file = True if save_file: download_directory = download_directory or self.config.download_dir else: download_directory = None try: # resolve the claim if not URL.parse(uri).has_stream: raise ResolveError( "cannot download a channel claim, specify a /path") try: resolved_result = self._convert_to_old_resolve_output( await asyncio.wait_for(self.wallet.ledger.resolve([uri]), resolve_timeout)) except asyncio.TimeoutError: raise ResolveTimeout(uri) await self.storage.save_claims_for_resolve([ value for value in resolved_result.values() if 'error' not in value ]) resolved = resolved_result.get(uri, {}) resolved = resolved if 'value' in resolved else resolved.get( 'claim') if not resolved: raise ResolveError(f"Failed to resolve stream at '{uri}'") if 'error' in resolved: raise ResolveError( f"error resolving stream: {resolved['error']}") claim = Claim.from_bytes(binascii.unhexlify(resolved['protobuf'])) outpoint = f"{resolved['txid']}:{resolved['nout']}" resolved_time = self.loop.time() - start_time # resume or update an existing stream, if the stream changed download it and delete the old one after updated_stream, to_replace = await self._check_update_or_replace( outpoint, resolved['claim_id'], claim) if updated_stream: log.info("already have stream for %s", uri) if save_file and updated_stream.output_file_exists: save_file = False await updated_stream.start(node=self.node, timeout=timeout, save_now=save_file) if not updated_stream.output_file_exists and ( save_file or file_name or download_directory): await updated_stream.save_file( file_name=file_name, download_directory=download_directory, node=self.node) return updated_stream content_fee = None fee_amount, fee_address = None, None # check that the fee is payable if not to_replace and claim.stream.has_fee: fee_amount = round( exchange_rate_manager.convert_currency( claim.stream.fee.currency, "LBC", claim.stream.fee.amount), 5) max_fee_amount = round( exchange_rate_manager.convert_currency( self.config.max_key_fee['currency'], "LBC", Decimal(self.config.max_key_fee['amount'])), 5) if fee_amount > max_fee_amount: msg = f"fee of {fee_amount} exceeds max configured to allow of {max_fee_amount}" log.warning(msg) raise KeyFeeAboveMaxAllowed(msg) balance = await self.wallet.default_account.get_balance() if lbc_to_dewies(str(fee_amount)) > balance: msg = f"fee of {fee_amount} exceeds max available balance" log.warning(msg) raise InsufficientFundsError(msg) fee_address = claim.stream.fee.address stream = ManagedStream(self.loop, self.config, self.blob_manager, claim.stream.source.sd_hash, download_directory, file_name, ManagedStream.STATUS_RUNNING, content_fee=content_fee, analytics_manager=self.analytics_manager) log.info("starting download for %s", uri) before_download = self.loop.time() await stream.start(self.node, timeout) stream.set_claim(resolved, claim) if to_replace: # delete old stream now that the replacement has started downloading await self.delete_stream(to_replace) elif fee_address: stream.content_fee = await self.wallet.send_amount_to_address( lbc_to_dewies(str(fee_amount)), fee_address.encode('latin1')) log.info("paid fee of %s for %s", fee_amount, uri) await self.storage.save_content_fee(stream.stream_hash, stream.content_fee) self.streams[stream.sd_hash] = stream self.storage.content_claim_callbacks[ stream.stream_hash] = lambda: self._update_content_claim(stream ) await self.storage.save_content_claim(stream.stream_hash, outpoint) if save_file: await asyncio.wait_for(stream.save_file(node=self.node), timeout - (self.loop.time() - before_download), loop=self.loop) return stream except asyncio.TimeoutError: error = DownloadDataTimeout(stream.sd_hash) raise error except Exception as err: # forgive data timeout, dont delete stream error = err raise finally: if self.analytics_manager and ( error or (stream and (stream.downloader.time_to_descriptor or stream.downloader.time_to_first_bytes))): self.loop.create_task( self.analytics_manager.send_time_to_first_bytes( resolved_time, self.loop.time() - start_time, None if not stream else stream.download_id, uri, outpoint, None if not stream else len( stream.downloader.blob_downloader. active_connections), None if not stream else len(stream.downloader.blob_downloader.scores), False if not stream else stream.downloader.added_fixed_peers, self.config.fixed_peer_delay if not stream else stream.downloader.fixed_peers_delay, None if not stream else stream.sd_hash, None if not stream else stream.downloader.time_to_descriptor, None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].blob_hash, None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].length, None if not stream else stream.downloader.time_to_first_bytes, None if not error else error.__class__.__name__))
async def claimtrie_getvalueforuri(self, block_hash, uri, known_certificates=None): # TODO: this thing is huge, refactor CLAIM_ID = "claim_id" WINNING = "winning" SEQUENCE = "sequence" uri = uri block_hash = block_hash try: parsed_uri = URL.parse(uri) except ValueError as err: return {'error': err.args[0]} result = {} if parsed_uri.has_channel: certificate = None # TODO: this is also done on the else, refactor if parsed_uri.channel.claim_id: if len(parsed_uri.channel.claim_id) < 40: certificate_info = self.claimtrie_getpartialmatch( parsed_uri.channel.name, parsed_uri.channel.claim_id) else: certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.channel.claim_id) if certificate_info and self.claim_matches_name(certificate_info, parsed_uri.channel.name): certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info} elif parsed_uri.claim_sequence: certificate_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence) if certificate_info: certificate = {'resolution_type': SEQUENCE, 'result': certificate_info} else: certificate_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash) if certificate_info: certificate = {'resolution_type': WINNING, 'result': certificate_info} if certificate and 'claim_id' not in certificate['result']: return result if certificate: result['certificate'] = certificate channel_id = certificate['result']['claim_id'] claims_in_channel = self.claimtrie_getclaimssignedbyidminimal(channel_id) if not parsed_uri.path: result['unverified_claims_in_channel'] = {claim['claim_id']: (claim['name'], claim['height']) for claim in claims_in_channel} else: # making an assumption that there aren't case conflicts on an existing channel norm_path = self.normalize_name(parsed_uri.path) result['unverified_claims_for_name'] = {claim['claim_id']: (claim['name'], claim['height']) for claim in claims_in_channel if self.normalize_name(claim['name']) == norm_path} else: claim = None if parsed_uri.claim_id: if len(parsed_uri.claim_id) < 40: claim_info = self.claimtrie_getpartialmatch(parsed_uri.name, parsed_uri.claim_id) else: claim_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id) if claim_info and self.claim_matches_name(claim_info, parsed_uri.name): claim = {'resolution_type': CLAIM_ID, 'result': claim_info} elif parsed_uri.claim_sequence: claim_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence) if claim_info: claim = {'resolution_type': SEQUENCE, 'result': claim_info} else: claim_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash) if claim_info: claim = {'resolution_type': WINNING, 'result': claim_info} if (claim and # is not an unclaimed winning name (claim['resolution_type'] != WINNING or proof_has_winning_claim(claim['result']['proof']))): raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1] raw_certificate_id = self.db.get_claim_info(raw_claim_id).cert_id if raw_certificate_id: certificate_id = hash_to_hex_str(raw_certificate_id) certificate = await self.claimtrie_getclaimbyid(certificate_id) if certificate: certificate = {'resolution_type': CLAIM_ID, 'result': certificate} result['certificate'] = certificate result['claim'] = claim return result
def get_claims(self, cols, **constraints): if 'order_by' in constraints: sql_order_by = [] for order_by in constraints['order_by']: is_asc = order_by.startswith('^') column = order_by[1:] if is_asc else order_by if column not in self.ORDER_FIELDS: raise NameError(f'{column} is not a valid order_by field') if column == 'name': column = 'normalized' sql_order_by.append(f"claim.{column} ASC" if is_asc else f"claim.{column} DESC") constraints['order_by'] = sql_order_by ops = {'<=': '__lte', '>=': '__gte', '<': '__lt', '>': '__gt'} for constraint in self.INTEGER_PARAMS: if constraint in constraints: value = constraints.pop(constraint) postfix = '' if isinstance(value, str): if len(value) >= 2 and value[:2] in ops: postfix, value = ops[value[:2]], int(value[2:]) elif len(value) >= 1 and value[0] in ops: postfix, value = ops[value[0]], int(value[1:]) constraints[f'claim.{constraint}{postfix}'] = value if constraints.pop('is_controlling', False): if {'sequence', 'amount_order'}.isdisjoint(constraints): constraints['claimtrie.claim_hash__is_not_null'] = '' if 'sequence' in constraints: constraints['order_by'] = 'claim.activation_height ASC' constraints['offset'] = int(constraints.pop('sequence')) - 1 constraints['limit'] = 1 if 'amount_order' in constraints: constraints['order_by'] = 'claim.effective_amount DESC' constraints['offset'] = int(constraints.pop('amount_order')) - 1 constraints['limit'] = 1 if 'claim_id' in constraints: constraints['claim.claim_hash'] = sqlite3.Binary( unhexlify(constraints.pop('claim_id'))[::-1]) if 'name' in constraints: constraints['claim.normalized'] = normalize_name( constraints.pop('name')) if 'channel' in constraints: url = URL.parse(constraints.pop('channel')) if url.channel.claim_id: constraints['channel_id'] = url.channel.claim_id else: constraints['channel_name'] = url.channel.name if 'channel_id' in constraints: constraints['channel_hash'] = unhexlify( constraints.pop('channel_id'))[::-1] if 'channel_hash' in constraints: constraints['channel.claim_hash'] = sqlite3.Binary( constraints.pop('channel_hash')) if 'channel_name' in constraints: constraints['channel.normalized'] = normalize_name( constraints.pop('channel_name')) if 'txid' in constraints: tx_hash = unhexlify(constraints.pop('txid'))[::-1] nout = constraints.pop('nout', 0) constraints['claim.txo_hash'] = sqlite3.Binary( tx_hash + struct.pack('<I', nout)) _apply_constraints_for_array_attributes(constraints, 'tag') _apply_constraints_for_array_attributes(constraints, 'language') _apply_constraints_for_array_attributes(constraints, 'location') try: return self.db.execute(*query( f""" SELECT {cols} FROM claim LEFT JOIN claimtrie USING (claim_hash) LEFT JOIN claim as channel ON (claim.channel_hash=channel.claim_hash) """, **constraints)).fetchall() except: self.logger.exception('Failed to execute claim search query:') print( query( f""" SELECT {cols} FROM claim LEFT JOIN claimtrie USING (claim_hash) LEFT JOIN claim as channel ON (claim.channel_hash=channel.claim_hash) """, **constraints)) raise