def _checksig(self, name, value, address): try: parse_lbry_uri(name.decode()) # skip invalid names cert_id = Claim.FromString( value).publisherSignature.certificateId[::-1] or None if not self.should_validate_signatures: return cert_id if cert_id: cert_claim = self.get_claim_info(cert_id) if cert_claim: certificate = smart_decode(cert_claim.value) claim_dict = smart_decode(value) claim_dict.validate_signature(address, certificate) return cert_id except Exception as e: pass
def update_db(app_db, names_db, height_db, expiring_height): outpoint_db = plyvel.DB('db/claim_outpoint/') def get_txid_for_claim_id(claim_id): txid_nout = outpoint_db.get(claim_id) txid = txid_nout[0:64] return txid expired_names, known_types, txids, expired_channels = {}, set(), {}, {} [app_db.delete(x[0]) for x in app_db] with app_db.write_batch() as writer: for (claim_id, height) in height_db: key = struct.pack('>I40s', int(height), claim_id) try: name = names_db.get(claim_id).decode('utf8') parsed = parse_lbry_uri(name) decoded = smart_decode(values_db.get(claim_id)) known_types.add(decoded.get('claimType', 'unknown')) if decoded.get('claimType') == 'certificateType' or parsed.is_channel: expired_channels[name] = (height, claim_id) if int(height) < expiring_height: expired_names[name] = (int(height), claim_id) txids[name] = get_txid_for_claim_id(claim_id) writer.put(key, name.encode('utf8')) except (DecodeError, UnicodeDecodeError, URIParseError): continue return expired_names, known_types, txids, expired_channels
def get_test_daemon(data_rate=None, generous=True, with_fee=False): if data_rate is None: data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1] rates = { 'BTCLBC': { 'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1 }, 'USDBTC': { 'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2 } } daemon = LBRYDaemon(None) daemon.session = mock.Mock(spec=Session.Session) daemon.session.wallet = mock.Mock(spec=Wallet.LBRYumWallet) market_feeds = [BTCLBCFeed(), USDBTCFeed()] daemon.exchange_rate_manager = DummyExchangeRateManager( market_feeds, rates) base_prm = PaymentRateManager.BasePaymentRateManager(rate=data_rate) prm = PaymentRateManager.NegotiatedPaymentRateManager( base_prm, DummyBlobAvailabilityTracker(), generous=generous) daemon.session.payment_rate_manager = prm metadata = { "author": "fake author", "language": "en", "content_type": "fake/format", "description": "fake description", "license": "fake license", "license_url": "fake license url", "nsfw": False, "sources": { "lbry_sd_hash": 'd2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98' 'b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280' }, "thumbnail": "fake thumbnail", "title": "fake title", "ver": "0.0.3" } if with_fee: metadata.update({ "fee": { "USD": { "address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75 } } }) daemon._resolve_name = lambda _: defer.succeed(metadata) migrated = smart_decode(json.dumps(metadata)) daemon.session.wallet.resolve = lambda *_: defer.succeed( {"test": { 'claim': { 'value': migrated.claim_dict } }}) return daemon
def claim_name(self, name, bid, metadata, certificate_id=None, claim_address=None, change_address=None): """ Claim a name, or update if name already claimed by user @param name: str, name to claim @param bid: float, bid amount @param metadata: ClaimDict compliant dict @param certificate_id: str (optional), claim id of channel certificate @param claim_address: str (optional), address to send claim to @param change_address: str (optional), address to send change @return: Deferred which returns a dict containing below items txid - txid of the resulting transaction nout - nout of the resulting claim fee - transaction fee paid to make claim claim_id - claim id of the claim """ decoded = ClaimDict.load_dict(metadata) serialized = decoded.serialized if self.get_balance() < Decimal(bid): raise InsufficientFundsError() claim = yield self._send_name_claim(name, serialized.encode('hex'), bid, certificate_id, claim_address, change_address) if not claim['success']: log.error(claim) msg = 'Claim to name {} failed: {}'.format(name, claim['reason']) raise Exception(msg) claim = self._process_claim_out(claim) yield self.storage.save_claim(self._get_temp_claim_info(claim, name, bid), smart_decode(claim['value'])) defer.returnValue(claim)
def get_certificate_and_validate_result(self, claim_result): if not claim_result or 'value' not in claim_result: return claim_result certificate = None certificate_id = smart_decode(claim_result['value']).certificate_id if certificate_id: certificate = yield self.network.get_claims_by_ids(certificate_id.decode()) certificate = certificate.pop(certificate_id.decode()) if certificate else None return self.parse_and_validate_claim_result(claim_result, certificate=certificate)
def api_decodebyclaim(claimid): connection_string = get_lbrycrdd_connection_details(os.path.expanduser("~")+"/.lbrycrd/lbrycrd.conf") rpc = AuthServiceProxy(connection_string) claim = rpc.getvalueforname(claimid) if claim: converted = ''.join([chr(ord(i)) for i in claim['value']]) decoded = smart_decode(converted) claim['value'] = decoded.claim_dict return json.dumps(claim)
def _save_claims(transaction): content_claims_to_update = [] support_callbacks = [] for claim_info in claim_infos: outpoint = "%s:%i" % (claim_info['txid'], claim_info['nout']) claim_id = claim_info['claim_id'] name = claim_info['name'] amount = int(COIN * claim_info['amount']) height = claim_info['height'] address = claim_info['address'] sequence = claim_info['claim_sequence'] try: certificate_id = claim_info['value'].get('content_claims_to_update', {}).get('certificateId') except AttributeError: certificate_id = None try: if claim_info['value'].get('stream', {}).get('source', {}).get('sourceType') == "lbry_sd_hash": source_hash = claim_info['value'].get('stream', {}).get('source', {}).get('source') else: source_hash = None except AttributeError: source_hash = None serialized = claim_info.get('hex') or smart_decode(claim_info['value']).serialized.encode('hex') transaction.execute( "insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", (outpoint, claim_id, name, amount, height, serialized, certificate_id, address, sequence) ) if 'supports' in claim_info: # if this response doesn't have support info don't overwrite the existing # support info support_callbacks.append(self.save_supports(claim_id, claim_info['supports'])) if not source_hash: continue stream_hash = transaction.execute( "select file.stream_hash from stream " "inner join file on file.stream_hash=stream.stream_hash where sd_hash=?", (source_hash, ) ).fetchone() if not stream_hash: continue stream_hash = stream_hash[0] known_outpoint = transaction.execute( "select claim_outpoint from content_claim where stream_hash=?", (stream_hash, ) ) known_claim_id = transaction.execute( "select claim_id from claim " "inner join content_claim c3 ON claim.claim_outpoint=c3.claim_outpoint " "where c3.stream_hash=?", (stream_hash, ) ) if not known_claim_id: content_claims_to_update.append((stream_hash, outpoint)) elif known_outpoint != outpoint: content_claims_to_update.append((stream_hash, outpoint)) update_file_callbacks = [] for stream_hash, outpoint in content_claims_to_update: self._save_content_claim(transaction, outpoint, stream_hash) if stream_hash in self.content_claim_callbacks: update_file_callbacks.append(self.content_claim_callbacks[stream_hash]()) return update_file_callbacks, support_callbacks
def validate_claim_signature_and_get_channel_name(claim, certificate_claim, claim_address, decoded_certificate=None): if not certificate_claim: return False, None certificate = decoded_certificate or smart_decode(certificate_claim['value']) if not isinstance(certificate, ClaimDict): raise TypeError("Certificate is not a ClaimDict: %s" % str(type(certificate))) if _validate_signed_claim(claim, claim_address, certificate): return True, certificate_claim['name'] return False, None
def api_decodebyclaim(claimid): connection_string = get_lbrycrdd_connection_details() rpc = AuthServiceProxy(connection_string) claim = rpc.getvalueforname(claimid) if claim: converted = "".join([chr(ord(i)) for i in claim['value']]) decoded = smart_decode( converted ) # Decode the claims and dump them back to logstash plugin return json.dumps(decoded.claim_dict)
def api_decodebyclaim(claimid): connection_string = get_lbrycrdd_connection_details( os.path.expanduser("~") + "/.lbrycrd/lbrycrd.conf") rpc = AuthServiceProxy(connection_string) claim = rpc.getvalueforname(claimid) if claim: converted = ''.join([chr(ord(i)) for i in claim['value']]) decoded = smart_decode(converted) claim['value'] = decoded.claim_dict return json.dumps(claim)
async def reclaim(claim_id, name): value = values_db.get(claim_id) try: decoded = smart_decode(value) stripped_sig = decoded.serialized_no_signature result = await rpc('claimname', [name, hexlify(stripped_sig), 0.001]) return {'success': 'code' not in result, 'result': result} except (DecodeError, UnicodeDecodeError, AssertionError) as e: msg = 'decode failed for %s: %s' % (claim_id, hexlify(value)) return {'success': False, 'result': msg}
def save_claim(self, claim_info, claim_dict=None): outpoint = "%s:%i" % (claim_info['txid'], claim_info['nout']) claim_id = claim_info['claim_id'] name = claim_info['name'] amount = int(COIN * claim_info['amount']) height = claim_info['height'] address = claim_info['address'] sequence = claim_info['claim_sequence'] claim_dict = claim_dict or smart_decode(claim_info['value']) serialized = claim_dict.serialized.encode('hex') def _save_claim(transaction): transaction.execute( "insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", (outpoint, claim_id, name, amount, height, serialized, claim_dict.certificate_id, address, sequence)) yield self.db.runInteraction(_save_claim) if 'supports' in claim_info: # if this response doesn't have support info don't overwrite the existing # support info yield self.save_supports(claim_id, claim_info['supports']) # check for content claim updates if claim_dict.source_hash: existing_file_stream_hash = yield self.run_and_return_one_or_none( "select file.stream_hash from stream " "inner join file on file.stream_hash=stream.stream_hash " "where sd_hash=?", claim_dict.source_hash) if existing_file_stream_hash: known_outpoint = yield self.run_and_return_one_or_none( "select claim_outpoint from content_claim where stream_hash=?", existing_file_stream_hash) known_claim_id = yield self.run_and_return_one_or_none( "select claim_id from claim " "inner join content_claim c3 ON claim.claim_outpoint=c3.claim_outpoint " "where c3.stream_hash=?", existing_file_stream_hash) if not known_claim_id: # this is a claim matching one of our files that has # no associated claim yet log.info("discovered content claim %s for stream %s", claim_id, existing_file_stream_hash) yield self.save_content_claim(existing_file_stream_hash, outpoint) elif known_claim_id and known_claim_id == claim_id: if known_outpoint != outpoint: # this is an update for one of our files log.info("updating content claim %s for stream %s", claim_id, existing_file_stream_hash) yield self.save_content_claim( existing_file_stream_hash, outpoint) else: # we're up to date already pass else: # this is a claim containing a clone of a file that we have log.warning( "claim %s contains the same stream as the one already downloaded from claim %s", claim_id, known_claim_id)
def get_value_and_address_by_claimid(claim_id, name): lbrycrdd = LBRYcrd( os.path.join(os.path.expanduser("~"), "Library/Application Support/lbrycrd/lbrycrd.conf")) claims = lbrycrdd("getclaimsforname", name) for claim in claims['claims']: if claim['claimId'] == claim_id: vout = get_vout(lbrycrdd("getrawtransaction", claim['txid'], 1), claim['n']) script_bytes = vout['scriptPubKey']['hex'].decode('hex') address = get_address_from_output_script(script_bytes)[1][1] bytes = "".join(chr(ord(i)) for i in claim['value']) return smart_decode(bytes).serialized.encode('hex'), address
def check_name(name): lbrycrdd = LBRYcrd(lbrycrdd_path) winning = lbrycrdd("getvalueforname", str(name)) try: claim_value = "".join(chr(ord(i)) for i in winning['value']) decoded = smart_decode(claim_value) winning['value'] = decoded except: pass if winning: updates, root = get_claim_chain(winning['txid'], name, lbrycrdd) winning['claim updates'] = updates winning['claim root'] = root or winning['txid'] return winning
def api_decode(txid, nout): connection_string = get_lbrycrdd_connection_details(os.path.expanduser("~")+"/.lbrycrd/lbrycrd.conf") rpc = AuthServiceProxy(connection_string) result = rpc.getclaimsfortx(txid) claim = None for claim_out in result: if claim_out['nOut'] == int(nout): claim = claim_out break if claim: converted = ''.join([chr(ord(i)) for i in claim['value']]) decoded = smart_decode(converted) claim['value'] = decoded.claim_dict return json.dumps(claim)
def test_smart_decode_raises(self): with self.assertRaises(TypeError): smart_decode(1) with self.assertRaises(DecodeError): smart_decode("aaab") with self.assertRaises(DecodeError): smart_decode("{'bogus_dict':1}")
def get_test_daemon(data_rate=None, generous=True, with_fee=False): if data_rate is None: data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1] rates = { 'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1}, 'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2} } component_manager = ComponentManager( skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, UPNP_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT, STREAM_IDENTIFIER_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT, HEADERS_COMPONENT, RATE_LIMITER_COMPONENT], file_manager=FakeFileManager ) daemon = LBRYDaemon(component_manager=component_manager) daemon.payment_rate_manager = OnlyFreePaymentsManager() daemon.wallet_manager = mock.Mock(spec=LbryWalletManager) daemon.wallet_manager.wallet = mock.Mock(spec=Wallet) daemon.wallet_manager.wallet.use_encryption = False daemon.wallet_manager.network = FakeNetwork() daemon.storage = mock.Mock(spec=SQLiteStorage) market_feeds = [BTCLBCFeed(), USDBTCFeed()] daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates) daemon.file_manager = component_manager.get_component(FILE_MANAGER_COMPONENT) metadata = { "author": "fake author", "language": "en", "content_type": "fake/format", "description": "fake description", "license": "fake license", "license_url": "fake license url", "nsfw": False, "sources": { "lbry_sd_hash": 'd2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98' 'b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280' }, "thumbnail": "fake thumbnail", "title": "fake title", "ver": "0.0.3" } if with_fee: metadata.update( {"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}}) migrated = smart_decode(json.dumps(metadata)) daemon._resolve = daemon.wallet_manager.resolve = lambda *_: defer.succeed( {"test": {'claim': {'value': migrated.claim_dict}}}) return daemon
def api_decode(txid, nout): connection_string = get_lbrycrdd_connection_details( os.path.expanduser("~") + "/.lbrycrd/lbrycrd.conf") rpc = AuthServiceProxy(connection_string) result = rpc.getclaimsfortx(txid) claim = None for claim_out in result: if claim_out['nOut'] == int(nout): claim = claim_out break if claim: converted = ''.join([chr(ord(i)) for i in claim['value']]) decoded = smart_decode(converted) claim['value'] = decoded.claim_dict return json.dumps(claim)
def _decode_claim_result(claim): if 'has_signature' in claim and claim['has_signature']: if not claim['signature_is_valid']: log.warning("lbry://%s#%s has an invalid signature", claim['name'], claim['claim_id']) try: decoded = smart_decode(claim['value']) claim_dict = decoded.claim_dict claim['value'] = claim_dict claim['hex'] = hexlify(decoded.serialized) except DecodeError: claim['hex'] = claim['value'] claim['value'] = None claim['error'] = "Failed to decode value" return claim
def api_decode(txid, nout): connection_string = get_lbrycrdd_connection_details() rpc = AuthServiceProxy(connection_string) result = rpc.getclaimsfortx(txid) claim = None for claim_out in result: if claim_out['nOut'] == int(nout): claim = claim_out break if claim: converted = "".join([chr(ord(i)) for i in claim['value']]) decoded = smart_decode( converted ) # Decode the claims and dump them back to logstash plugin return json.dumps(decoded.claim_dict)
def get_claim(self, claim_id): claim = yield self._get_claim_by_claimid(claim_id) if not claim: log.warning("Claim does not exist: %s", claim_id) defer.returnValue(None) try: decoded = smart_decode(claim['value']) claim['value'] = decoded.claim_dict claim['hex'] = decoded.serialized.encode('hex') except DecodeError: claim['hex'] = claim['value'] claim['value'] = None claim['error'] = "Failed to decode" log.warning("Failed to decode claim value for lbry://%s#%s", claim['name'], claim['claim_id']) defer.returnValue(claim)
def parse_and_validate_claim_result(self, claim_result, certificate=None, raw=False): if not claim_result or 'value' not in claim_result: return claim_result claim_result['decoded_claim'] = False decoded = None if not raw: claim_value = claim_result['value'] try: decoded = smart_decode(claim_value) claim_result['value'] = decoded.claim_dict claim_result['decoded_claim'] = True except DecodeError: pass if decoded: claim_result['has_signature'] = False if decoded.has_signature: if certificate is None: log.info("fetching certificate to check claim signature") certificate = self.network.get_claims_by_ids( decoded.certificate_id) if not certificate: log.warning('Certificate %s not found', decoded.certificate_id) claim_result['has_signature'] = True claim_result['signature_is_valid'] = False validated, channel_name = validate_claim_signature_and_get_channel_name( decoded, certificate, claim_result['address']) claim_result['channel_name'] = channel_name if validated: claim_result['signature_is_valid'] = True if 'height' in claim_result and claim_result['height'] is None: claim_result['height'] = -1 if 'amount' in claim_result and not isinstance(claim_result['amount'], float): claim_result = format_amount_value(claim_result) claim_result['permanent_url'] = _get_permanent_url(claim_result) return claim_result
def get_test_daemon(data_rate=None, generous=True, with_fee=False): if data_rate is None: data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1] rates = { 'BTCLBC': {'spot': 3.0, 'ts': util.DEFAULT_ISO_TIME + 1}, 'USDBTC': {'spot': 2.0, 'ts': util.DEFAULT_ISO_TIME + 2} } daemon = LBRYDaemon(None) daemon.session = mock.Mock(spec=Session.Session) daemon.session.wallet = mock.Mock(spec=Wallet.LBRYumWallet) market_feeds = [BTCLBCFeed(), USDBTCFeed()] daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates) base_prm = PaymentRateManager.BasePaymentRateManager(rate=data_rate) prm = PaymentRateManager.NegotiatedPaymentRateManager(base_prm, DummyBlobAvailabilityTracker(), generous=generous) daemon.session.payment_rate_manager = prm metadata = { "author": "fake author", "language": "en", "content_type": "fake/format", "description": "fake description", "license": "fake license", "license_url": "fake license url", "nsfw": False, "sources": { "lbry_sd_hash": 'd2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98' 'b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280' }, "thumbnail": "fake thumbnail", "title": "fake title", "ver": "0.0.3" } if with_fee: metadata.update( {"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}}) daemon._resolve_name = lambda _: defer.succeed(metadata) migrated = smart_decode(json.dumps(metadata)) daemon.session.wallet.resolve = lambda *_: defer.succeed( {"test": {'claim': {'value': migrated.claim_dict}}}) return daemon
def get_claims_for_name(self, name): result = yield self._get_claims_for_name(name) claims = result['claims'] claims_for_return = [] for claim in claims: try: decoded = smart_decode(claim['value']) claim['value'] = decoded.claim_dict claim['hex'] = decoded.serialized.encode('hex') claims_for_return.append(claim) except DecodeError: claim['hex'] = claim['value'] claim['value'] = None claim['error'] = "Failed to decode" log.warning("Failed to decode claim value for lbry://%s#%s", claim['name'], claim['claim_id']) claims_for_return.append(claim) result['claims'] = claims_for_return defer.returnValue(result)
def save_claim(self, claim_info, claim_dict=None): outpoint = "%s:%i" % (claim_info['txid'], claim_info['nout']) claim_id = claim_info['claim_id'] name = claim_info['name'] amount = int(COIN * claim_info['amount']) height = claim_info['height'] address = claim_info['address'] sequence = claim_info['claim_sequence'] claim_dict = claim_dict or smart_decode(claim_info['value']) serialized = claim_dict.serialized.encode('hex') def _save_claim(transaction): transaction.execute( "insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", (outpoint, claim_id, name, amount, height, serialized, claim_dict.certificate_id, address, sequence) ) yield self.db.runInteraction(_save_claim) if 'supports' in claim_info: # if this response doesn't have support info don't overwrite the existing # support info yield self.save_supports(claim_id, claim_info['supports'])
def import_signed_claim_transaction(self, claim, claim_id, undo_info): """ handle the import of claims/updates signed """ try: decoded_claim = smart_decode(claim.value) parsed_uri = parse_lbry_uri(claim.name) if decoded_claim.has_signature: cert_id = decoded_claim.certificate_id else: cert_id = None except Exception as e: logger.warn("decode error for lbry://{}#{}".format( claim.name, claim_id)) decoded_claim = None cert_id = None if type(claim) == deserialize.NameClaim: undo_info = self.import_signed_claim(claim, cert_id, claim_id, undo_info) elif type(claim) == deserialize.ClaimUpdate: undo_info = self.import_signed_update(claim, cert_id, claim_id, undo_info) return undo_info
def _handle_claim_result(self, results): if not results: raise UnknownNameError("No results to return") if 'error' in results: if results['error'] == 'name is not claimed': raise UnknownNameError(results['error']) else: raise Exception(results['error']) if 'claim' in results: claim = results['claim'] if 'has_signature' in claim and claim['has_signature']: if not claim['signature_is_valid']: log.warning("lbry://%s#%s has an invalid signature", claim['name'], claim['claim_id']) decoded = ClaimDict.load_dict(claim['value']) claim_dict = decoded.claim_dict claim['value'] = claim_dict defer.returnValue(claim) try: decoded = smart_decode(claim['value']) claim_dict = decoded.claim_dict outpoint = ClaimOutpoint(claim['txid'], claim['nout']) name = claim['name'] claim['value'] = claim_dict claim['hex'] = decoded.serialized.encode('hex') yield self._save_name_metadata(name, outpoint, decoded.source_hash) yield self._update_claimid(claim['claim_id'], name, outpoint) except DecodeError: claim['hex'] = claim['value'] claim['value'] = None claim['error'] = "Failed to decode value" results = claim elif 'value' in results: if 'has_signature' in results and results['has_signature']: if not results['signature_is_valid']: log.warning("lbry://%s#%s has an invalid signature", results['name'], results['claim_id']) decoded = ClaimDict.load_dict(results['value']) claim_dict = decoded.claim_dict results['value'] = claim_dict defer.returnValue(results) try: decoded = ClaimDict.load_dict(results['value']) claim_dict = decoded.claim_dict claim_hex = decoded.serialized.encode('hex') claim_err = None outpoint = ClaimOutpoint(results['txid'], results['nout']) name = results['name'] yield self._save_name_metadata(name, outpoint, decoded.source_hash) yield self._update_claimid(results['claim_id'], name, outpoint) except DecodeError: claim_dict = None claim_hex = results['value'] claim_err = "Failed to decode value" if claim_err: results['error'] = claim_err results['hex'] = claim_hex results['value'] = claim_dict log.info("get claim info lbry://%s#%s", results['name'], results['claim_id']) defer.returnValue(results)
def test_hex_decode(self): self.assertEqual(decoded_hex_encoded_003, smart_decode(hex_encoded_003).claim_dict)
def cmd_claimtrie_get_value_for_uri(self, block_hash, uri): uri = str(uri) block_hash = str(block_hash) try: parsed_uri = parse_lbry_uri(uri) except URIParseError as err: return {'error': err.message} result = {} if parsed_uri.is_channel: certificate = None if parsed_uri.claim_id: certificate_info = self.get_claim_info(parsed_uri.claim_id) if certificate_info: certificate = { 'resolution_type': CLAIM_ID, 'result': certificate_info } elif parsed_uri.claim_sequence: claim_id = self.storage.get_claimid_for_nth_claim_to_name( str(parsed_uri.name), parsed_uri.claim_sequence) certificate_info = self.get_claim_info(str(claim_id)) if certificate_info: certificate = { 'resolution_type': SEQUENCE, 'result': certificate_info } else: certificate_info = self.cmd_claimtrie_getvalue( parsed_uri.name, block_hash) if certificate_info: certificate = { 'resolution_type': WINNING, 'result': certificate_info } if certificate and not parsed_uri.path: result['certificate'] = certificate channel_id = certificate['result'].get( 'claim_id') or certificate['result'].get('claimId') channel_id = str(channel_id) claim_ids_in_channel = self.storage.get_claims_signed_by( channel_id) claims_in_channel = { cid: (self.storage.get_claim_name(cid), self.storage.get_claim_height(cid)) for cid in claim_ids_in_channel } result['unverified_claims_in_channel'] = claims_in_channel elif certificate: result['certificate'] = certificate channel_id = certificate['result'].get( 'claim_id') or certificate['result'].get('claimId') channel_id = str(channel_id) claim_ids_matching_name = self.get_signed_claims_with_name_for_channel( channel_id, parsed_uri.path) claims_in_channel = { cid: (self.storage.get_claim_name(cid), self.storage.get_claim_height(cid)) for cid in claim_ids_matching_name } result['unverified_claims_for_name'] = claims_in_channel else: claim = None if parsed_uri.claim_id: claim_info = self.get_claim_info(parsed_uri.claim_id) if claim_info: claim = {'resolution_type': CLAIM_ID, 'result': claim_info} elif parsed_uri.claim_sequence: claim_id = self.storage.get_claimid_for_nth_claim_to_name( str(parsed_uri.name), parsed_uri.claim_sequence) claim_info = self.get_claim_info(str(claim_id)) if claim_info: claim = {'resolution_type': SEQUENCE, 'result': claim_info} else: claim_info = self.cmd_claimtrie_getvalue( parsed_uri.name, block_hash) if claim_info: claim = {'resolution_type': WINNING, 'result': claim_info} if (claim and # is not an unclaimed winning name (claim['resolution_type'] != WINNING or lbrycrd_proof_has_winning_claim(claim['result']['proof']))): try: claim_val = self.get_claim_info( claim['result']['claim_id']) decoded = smart_decode(claim_val['value']) if decoded.certificate_id: certificate_info = self.get_claim_info( decoded.certificate_id) if certificate_info: certificate = { 'resolution_type': CLAIM_ID, 'result': certificate_info } result['certificate'] = certificate except DecodeError: pass result['claim'] = claim return result
def sign_claim(private_key, raw_claim, address, claim_id): claim = smart_decode(raw_claim) return claim.sign(private_key, address, hash_to_hex_str(claim_id), curve=SECP256k1)
def _make_db(new_db): # create the new tables new_db.executescript(SQLiteStorage.CREATE_TABLES_QUERY) # first migrate the blobs blobs = blobs_db_cursor.execute("select * from blobs").fetchall() _populate_blobs(blobs) # pylint: disable=no-value-for-parameter log.info("migrated %i blobs", new_db.execute("select count(*) from blob").fetchone()[0]) # used to store the query arguments if we need to try re-importing the lbry file later file_args = {} # <sd_hash>: args tuple file_outpoints = {} # <outpoint tuple>: sd_hash # get the file and stream queries ready for (rowid, sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate, status) in \ lbryfile_db.execute( "select distinct lbry_files.rowid, d.sd_blob_hash, lbry_files.*, o.blob_data_rate, o.status " "from lbry_files " "inner join lbry_file_descriptors d on lbry_files.stream_hash=d.stream_hash " "inner join lbry_file_options o on lbry_files.stream_hash=o.stream_hash"): # this is try to link the file to a content claim after we've imported all the files if rowid in old_rowid_to_outpoint: file_outpoints[old_rowid_to_outpoint[rowid]] = sd_hash elif sd_hash in old_sd_hash_to_outpoint: file_outpoints[old_sd_hash_to_outpoint[sd_hash]] = sd_hash sd_hash_to_stream_hash[sd_hash] = stream_hash if stream_hash in stream_hash_to_stream_blobs: file_args[sd_hash] = ( sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate or 0.0, status, stream_hash_to_stream_blobs.pop(stream_hash)) # used to store the query arguments if we need to try re-importing the claim claim_queries = {} # <sd_hash>: claim query tuple # get the claim queries ready, only keep those with associated files for outpoint, sd_hash in file_outpoints.iteritems(): if outpoint in claim_outpoint_queries: claim_queries[sd_hash] = claim_outpoint_queries[outpoint] # insert the claims new_db.executemany( "insert or ignore into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)", [ ("%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]), claim_arg_tup[2], claim_arg_tup[3], claim_arg_tup[7], claim_arg_tup[6], claim_arg_tup[8], smart_decode(claim_arg_tup[8]).certificate_id, claim_arg_tup[5], claim_arg_tup[4]) for sd_hash, claim_arg_tup in claim_queries.iteritems() if claim_arg_tup ] # sd_hash, (txid, nout, claim_id, name, sequence, address, height, amount, serialized) ) log.info("migrated %i claims", new_db.execute("select count(*) from claim").fetchone()[0]) damaged_stream_sds = [] # import the files and get sd hashes of streams to attempt recovering for sd_hash, file_query in file_args.iteritems(): failed_sd = _import_file(*file_query) if failed_sd: damaged_stream_sds.append(failed_sd) # recover damaged streams if damaged_stream_sds: blob_dir = os.path.join(db_dir, "blobfiles") damaged_sds_on_disk = [] if not os.path.isdir(blob_dir) else list( {p for p in os.listdir(blob_dir) if p in damaged_stream_sds}) for damaged_sd in damaged_sds_on_disk: try: decoded, sd_length = verify_sd_blob(damaged_sd, blob_dir) blobs = decoded['blobs'] _add_recovered_blobs(blobs, damaged_sd, sd_length) # pylint: disable=no-value-for-parameter _import_file(*file_args[damaged_sd]) damaged_stream_sds.remove(damaged_sd) except (OSError, ValueError, TypeError, IOError, AssertionError, sqlite3.IntegrityError): continue log.info("migrated %i files", new_db.execute("select count(*) from file").fetchone()[0]) # associate the content claims to their respective files for claim_arg_tup in claim_queries.values(): if claim_arg_tup and (claim_arg_tup[0], claim_arg_tup[1]) in file_outpoints \ and file_outpoints[(claim_arg_tup[0], claim_arg_tup[1])] in sd_hash_to_stream_hash: try: new_db.execute( "insert or ignore into content_claim values (?, ?)", (sd_hash_to_stream_hash.get( file_outpoints.get( (claim_arg_tup[0], claim_arg_tup[1]))), "%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]))) except sqlite3.IntegrityError: continue log.info( "migrated %i content claims", new_db.execute("select count(*) from content_claim").fetchone()[0])
claim_010_signed_secp256k1 = claim_010_unsigned.sign(secp256k1_private_key, stream_claim_address, cert_claim_id, curve=SECP256k1) secp256k1_cert = ClaimDict.generate_certificate(secp256k1_private_key, curve=SECP256k1) malformed_secp256k1_cert = ClaimDict.generate_certificate( secp256k1_private_key, curve=SECP256k1) malformed_secp256k1_cert['keyType'] = 'NIST256p' formatted = lambda x: json.dumps(x.claim_dict, indent=2) hex_encoded_003 = claim_010_unsigned.serialized.encode('hex') decoded_hex_encoded_003 = smart_decode(hex_encoded_003).claim_dict template = """ claim_id_1 = \"%s\" claim_address_2 = \"%s\" claim_address_1 = \"%s\" nist256p_private_key = \"\"\"%s\"\"\" nist384p_private_key = \"\"\"%s\"\"\" secp256k1_private_key = \"\"\"%s\"\"\"
def cmd_claimtrie_get_value_for_uri(self, block_hash, uri): uri = str(uri) block_hash = str(block_hash) cache_key = block_hash + uri if cache_key in self.short_term_cache: return self.short_term_cache.get(cache_key) try: parsed_uri = parse_lbry_uri(uri) except URIParseError as err: return {'error': err.message} result = {} if parsed_uri.is_channel: certificate = None if parsed_uri.claim_id: certificate_info = self.get_claim_info(parsed_uri.claim_id) if certificate_info and certificate_info['name'] == parsed_uri.name: certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info} elif parsed_uri.claim_sequence: claim_id = self.storage.get_claimid_for_nth_claim_to_name(str(parsed_uri.name), parsed_uri.claim_sequence) certificate_info = self.get_claim_info(str(claim_id)) if certificate_info: certificate = {'resolution_type': SEQUENCE, 'result': certificate_info} else: certificate_info = self.cmd_claimtrie_getvalue(parsed_uri.name, block_hash) if certificate_info: certificate = {'resolution_type': WINNING, 'result': certificate_info} if certificate and not parsed_uri.path: result['certificate'] = certificate channel_id = certificate['result'].get('claim_id') or certificate['result'].get('claimId') channel_id = str(channel_id) claim_ids_in_channel = self.storage.get_claims_signed_by(channel_id) claims_in_channel = {cid: (self.storage.get_claim_name(cid), self.storage.get_claim_height(cid)) for cid in claim_ids_in_channel} result['unverified_claims_in_channel'] = claims_in_channel elif certificate: result['certificate'] = certificate channel_id = certificate['result'].get('claim_id') or certificate['result'].get('claimId') channel_id = str(channel_id) claim_ids_matching_name = self.get_signed_claims_with_name_for_channel(channel_id, parsed_uri.path) claims_in_channel = {cid: (self.storage.get_claim_name(cid), self.storage.get_claim_height(cid)) for cid in claim_ids_matching_name} result['unverified_claims_for_name'] = claims_in_channel else: claim = None if parsed_uri.claim_id: claim_info = self.get_claim_info(parsed_uri.claim_id) if claim_info and claim_info['name'] == parsed_uri.name: claim = {'resolution_type': CLAIM_ID, 'result': claim_info} elif parsed_uri.claim_sequence: claim_id = self.storage.get_claimid_for_nth_claim_to_name(str(parsed_uri.name), parsed_uri.claim_sequence) claim_info = self.get_claim_info(str(claim_id)) if claim_info: claim = {'resolution_type': SEQUENCE, 'result': claim_info} else: claim_info = self.cmd_claimtrie_getvalue(parsed_uri.name, block_hash) if claim_info: claim = {'resolution_type': WINNING, 'result': claim_info} if (claim and # is not an unclaimed winning name (claim['resolution_type'] != WINNING or lbrycrd_proof_has_winning_claim(claim['result']['proof']))): try: claim_val = self.get_claim_info(claim['result']['claim_id']) decoded = smart_decode(claim_val['value']) if decoded.certificate_id: certificate_info = self.get_claim_info(decoded.certificate_id) if certificate_info: certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info} result['certificate'] = certificate except DecodeError: pass result['claim'] = claim self.short_term_cache.put(cache_key, result) return result
def claim(self) -> ClaimDict: if self.script.is_claim_name or self.script.is_update_claim: return smart_decode(self.script.values['claim']) raise ValueError('Only claim name and claim update have the claim payload.')