示例#1
0
 def _checksig(self, name, value, address):
     try:
         parse_lbry_uri(name.decode())  # skip invalid names
         cert_id = Claim.FromString(value).publisherSignature.certificateId[::-1] or None
         if not self.should_validate_signatures:
             return cert_id
         if cert_id:
             cert_claim = self.db.get_claim_info(cert_id)
             if cert_claim:
                 certificate = smart_decode(cert_claim.value)
                 claim_dict = smart_decode(value)
                 claim_dict.validate_signature(address, certificate)
                 return cert_id
     except Exception as e:
         pass
示例#2
0
 def _checksig(self, name, value, address):
     try:
         parse_lbry_uri(name.decode())  # skip invalid names
         claim_dict = smart_decode(value)
         cert_id = unhexlify(claim_dict.certificate_id)[::-1]
         if not self.should_validate_signatures:
             return cert_id
         if cert_id:
             cert_claim = self.db.get_claim_info(cert_id)
             if cert_claim:
                 certificate = smart_decode(cert_claim.value)
                 claim_dict.validate_signature(address, certificate)
                 return cert_id
     except Exception as e:
         pass
示例#3
0
 def _save_claims(transaction):
     content_claims_to_update = []
     for claim_info in claim_infos:
         outpoint = "%s:%i" % (claim_info['txid'], claim_info['nout'])
         claim_id = claim_info['claim_id']
         name = claim_info['name']
         amount = lbc_to_dewies(claim_info['amount'])
         height = claim_info['height']
         address = claim_info['address']
         sequence = claim_info['claim_sequence']
         try:
             certificate_id = claim_info['value'].get(
                 'publisherSignature', {}).get('certificateId')
         except AttributeError:
             certificate_id = None
         try:
             if claim_info['value'].get('stream', {}).get(
                     'source', {}).get('sourceType') == "lbry_sd_hash":
                 source_hash = claim_info['value'].get(
                     'stream', {}).get('source', {}).get('source')
             else:
                 source_hash = None
         except AttributeError:
             source_hash = None
         serialized = claim_info.get('hex') or binascii.hexlify(
             smart_decode(claim_info['value']).serialized).decode()
         transaction.execute(
             "insert or replace into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)",
             (outpoint, claim_id, name, amount, height, serialized,
              certificate_id, address, sequence))
         # if this response doesn't have support info don't overwrite the existing
         # support info
         if 'supports' in claim_info:
             support_callbacks.append(
                 (claim_id, claim_info['supports']))
         if not source_hash:
             continue
         stream_hash = transaction.execute(
             "select file.stream_hash from stream "
             "inner join file on file.stream_hash=stream.stream_hash where sd_hash=?",
             (source_hash, )).fetchone()
         if not stream_hash:
             continue
         stream_hash = stream_hash[0]
         known_outpoint = transaction.execute(
             "select claim_outpoint from content_claim where stream_hash=?",
             (stream_hash, ))
         known_claim_id = transaction.execute(
             "select claim_id from claim "
             "inner join content_claim c3 ON claim.claim_outpoint=c3.claim_outpoint "
             "where c3.stream_hash=?", (stream_hash, ))
         if not known_claim_id:
             content_claims_to_update.append((stream_hash, outpoint))
         elif known_outpoint != outpoint:
             content_claims_to_update.append((stream_hash, outpoint))
     for stream_hash, outpoint in content_claims_to_update:
         self._save_content_claim(transaction, outpoint, stream_hash)
         if stream_hash in self.content_claim_callbacks:
             update_file_callbacks.append(
                 self.content_claim_callbacks[stream_hash]())
示例#4
0
    async def _download_stream_from_claim(self, node: 'Node', download_directory: str, claim_info: typing.Dict,
                                          file_name: typing.Optional[str] = None) -> typing.Optional[ManagedStream]:

        claim = smart_decode(claim_info['value'])
        downloader = StreamDownloader(self.loop, self.config, self.blob_manager, claim.source_hash.decode(),
                                      download_directory, file_name)
        try:
            downloader.download(node)
            await downloader.got_descriptor.wait()
            log.info("got descriptor %s for %s", claim.source_hash.decode(), claim_info['name'])
        except (asyncio.TimeoutError, asyncio.CancelledError):
            log.info("stream timeout")
            downloader.stop()
            log.info("stopped stream")
            raise DownloadSDTimeout(downloader.sd_hash)
        rowid = await self._store_stream(downloader)
        await self.storage.save_content_claim(
            downloader.descriptor.stream_hash, f"{claim_info['txid']}:{claim_info['nout']}"
        )
        stream = ManagedStream(self.loop, self.blob_manager, rowid, downloader.descriptor, download_directory,
                               file_name, downloader, ManagedStream.STATUS_RUNNING)
        stream.set_claim(claim_info, claim)
        self.streams.add(stream)
        try:
            await stream.downloader.wrote_bytes_event.wait()
            self.wait_for_stream_finished(stream)
            return stream
        except asyncio.CancelledError:
            downloader.stop()
            log.debug("stopped stream")
        await self.stop_stream(stream)
        raise DownloadDataTimeout(downloader.sd_hash)
示例#5
0
文件: resolve.py 项目: niteshldd/lbry
 async def get_certificate_and_validate_result(self, claim_result):
     if not claim_result or 'value' not in claim_result:
         return claim_result
     certificate = None
     certificate_id = smart_decode(claim_result['value']).certificate_id
     if certificate_id:
         certificate = await self.network.get_claims_by_ids(certificate_id)
         certificate = certificate.pop(certificate_id) if certificate else None
     return await self.parse_and_validate_claim_result(claim_result, certificate=certificate)
示例#6
0
文件: resolve.py 项目: niteshldd/lbry
def validate_claim_signature_and_get_channel_name(claim, certificate_claim,
                                                  claim_address, name, decoded_certificate=None):
    if not certificate_claim:
        return False, None
    if 'value' not in certificate_claim:
        log.warning('Got an invalid claim while parsing certificates, please report: %s', certificate_claim)
        return False, None
    certificate = decoded_certificate or smart_decode(certificate_claim['value'])
    if not isinstance(certificate, ClaimDict):
        raise TypeError("Certificate is not a ClaimDict: %s" % str(type(certificate)))
    if _validate_signed_claim(claim, claim_address, name, certificate):
        return True, certificate_claim['name']
    return False, None
示例#7
0
    def test_smart_decode_raises(self):
        with self.assertRaises(TypeError):
            smart_decode(1)

        with self.assertRaises(DecodeError):
            smart_decode("aaab")

        with self.assertRaises(DecodeError):
            smart_decode("{'bogus_dict':1}")
示例#8
0
 def __init__(self, stream_hash: str, outpoint: opt_str = None, claim_id: opt_str = None, name: opt_str = None,
              amount: opt_int = None, height: opt_int = None, serialized: opt_str = None,
              channel_claim_id: opt_str = None, address: opt_str = None, claim_sequence: opt_int = None,
              channel_name: opt_str = None):
     self.stream_hash = stream_hash
     self.claim_id = claim_id
     self.outpoint = outpoint
     self.claim_name = name
     self.amount = amount
     self.height = height
     self.claim: typing.Optional[ClaimDict] = None if not serialized else smart_decode(serialized)
     self.claim_address = address
     self.claim_sequence = claim_sequence
     self.channel_claim_id = channel_claim_id
     self.channel_name = channel_name
示例#9
0
def get_test_daemon(data_rate=None, generous=True, with_fee=False):
    if data_rate is None:
        data_rate = conf.ADJUSTABLE_SETTINGS['data_rate'][1]
    rates = {
        'BTCLBC': {'spot': 3.0, 'ts': test_utils.DEFAULT_ISO_TIME + 1},
        'USDBTC': {'spot': 2.0, 'ts': test_utils.DEFAULT_ISO_TIME + 2}
    }
    component_manager = ComponentManager(
        skip_components=[DATABASE_COMPONENT, DHT_COMPONENT, WALLET_COMPONENT, UPNP_COMPONENT,
                         PEER_PROTOCOL_SERVER_COMPONENT, REFLECTOR_COMPONENT, HASH_ANNOUNCER_COMPONENT,
                         EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
                         HEADERS_COMPONENT, RATE_LIMITER_COMPONENT],
        file_manager=FakeFileManager
    )
    daemon = LBRYDaemon(component_manager=component_manager)
    daemon.payment_rate_manager = OnlyFreePaymentsManager()
    daemon.wallet_manager = mock.Mock(spec=LbryWalletManager)
    daemon.wallet_manager.wallet = mock.Mock(spec=Wallet)
    daemon.wallet_manager.wallet.use_encryption = False
    daemon.wallet_manager.network = FakeNetwork()
    daemon.storage = mock.Mock(spec=SQLiteStorage)
    market_feeds = [BTCLBCFeed(), USDBTCFeed()]
    daemon.exchange_rate_manager = DummyExchangeRateManager(market_feeds, rates)
    daemon.file_manager = component_manager.get_component(FILE_MANAGER_COMPONENT)

    metadata = {
        "author": "fake author",
        "language": "en",
        "content_type": "fake/format",
        "description": "fake description",
        "license": "fake license",
        "license_url": "fake license url",
        "nsfw": False,
        "sources": {
            "lbry_sd_hash": 'd2b8b6e907dde95245fe6d144d16c2fdd60c4e0c6463ec98'
                            'b85642d06d8e9414e8fcfdcb7cb13532ec5454fb8fe7f280'
        },
        "thumbnail": "fake thumbnail",
        "title": "fake title",
        "ver": "0.0.3"
    }
    if with_fee:
        metadata.update(
            {"fee": {"USD": {"address": "bQ6BGboPV2SpTMEP7wLNiAcnsZiH8ye6eA", "amount": 0.75}}})
    migrated = smart_decode(json.dumps(metadata))
    daemon._resolve = daemon.wallet_manager.resolve = lambda *_: defer.succeed(
        {"test": {'claim': {'value': migrated.claim_dict}}})
    return daemon
示例#10
0
    async def parse_and_validate_claim_result(self,
                                              claim_result,
                                              certificate=None,
                                              raw=False):
        if not claim_result or 'value' not in claim_result:
            return claim_result

        claim_result['decoded_claim'] = False
        decoded = None

        if not raw:
            claim_value = claim_result['value']
            try:
                decoded = smart_decode(claim_value)
                claim_result['value'] = decoded.claim_dict
                claim_result['decoded_claim'] = True
            except DecodeError:
                pass

        if decoded:
            claim_result['has_signature'] = False
            if decoded.has_signature:
                if certificate is None:
                    log.info("fetching certificate to check claim signature")
                    certificate = await self.network.get_claims_by_ids(
                        decoded.certificate_id)
                    if not certificate:
                        log.warning('Certificate %s not found',
                                    decoded.certificate_id)
                claim_result['has_signature'] = True
                claim_result['signature_is_valid'] = False
                validated, channel_name = validate_claim_signature_and_get_channel_name(
                    decoded, certificate, claim_result['address'],
                    claim_result['name'])
                claim_result['channel_name'] = channel_name
                if validated:
                    claim_result['signature_is_valid'] = True

        if 'height' in claim_result and claim_result['height'] is None:
            claim_result['height'] = -1

        if 'amount' in claim_result:
            claim_result = format_amount_value(claim_result)

        claim_result['permanent_url'] = _get_permanent_url(
            claim_result, decoded.certificate_id if decoded else None)

        return claim_result
示例#11
0
文件: resolve.py 项目: niteshldd/lbry
def _decode_claim_result(claim):
    if 'has_signature' in claim and claim['has_signature']:
        if not claim['signature_is_valid']:
            log.warning("lbry://%s#%s has an invalid signature",
                        claim['name'], claim['claim_id'])
    if 'value' not in claim:
        log.warning('Got an invalid claim while parsing, please report: %s', claim)
        claim['hex'] = None
        claim['value'] = None
        claim['error'] = "Failed to parse: missing value"
        return claim
    try:
        decoded = smart_decode(claim['value'])
        claim_dict = decoded.claim_dict
        claim['value'] = claim_dict
        claim['hex'] = hexlify(decoded.serialized)
    except DecodeError:
        claim['hex'] = claim['value']
        claim['value'] = None
        claim['error'] = "Failed to decode value"
    return claim
示例#12
0
    def _make_db(new_db):
        # create the new tables
        new_db.executescript(CREATE_TABLES_QUERY)

        # first migrate the blobs
        blobs = blobs_db_cursor.execute("select * from blobs").fetchall()
        _populate_blobs(blobs)  # pylint: disable=no-value-for-parameter
        log.info("migrated %i blobs", new_db.execute("select count(*) from blob").fetchone()[0])

        # used to store the query arguments if we need to try re-importing the lbry file later
        file_args = {}  # <sd_hash>: args tuple

        file_outpoints = {}  # <outpoint tuple>: sd_hash

        # get the file and stream queries ready
        for (rowid, sd_hash, stream_hash, key, stream_name, suggested_file_name, data_rate, status) in \
            lbryfile_db.execute(
                "select distinct lbry_files.rowid, d.sd_blob_hash, lbry_files.*, o.blob_data_rate, o.status "
                "from lbry_files "
                "inner join lbry_file_descriptors d on lbry_files.stream_hash=d.stream_hash "
                "inner join lbry_file_options o on lbry_files.stream_hash=o.stream_hash"):

            # this is try to link the file to a content claim after we've imported all the files
            if rowid in old_rowid_to_outpoint:
                file_outpoints[old_rowid_to_outpoint[rowid]] = sd_hash
            elif sd_hash in old_sd_hash_to_outpoint:
                file_outpoints[old_sd_hash_to_outpoint[sd_hash]] = sd_hash

            sd_hash_to_stream_hash[sd_hash] = stream_hash
            if stream_hash in stream_hash_to_stream_blobs:
                file_args[sd_hash] = (
                    sd_hash, stream_hash, key, stream_name,
                    suggested_file_name, data_rate or 0.0,
                    status, stream_hash_to_stream_blobs.pop(stream_hash)
                )

        # used to store the query arguments if we need to try re-importing the claim
        claim_queries = {}  # <sd_hash>: claim query tuple

        # get the claim queries ready, only keep those with associated files
        for outpoint, sd_hash in file_outpoints.items():
            if outpoint in claim_outpoint_queries:
                claim_queries[sd_hash] = claim_outpoint_queries[outpoint]

        # insert the claims
        new_db.executemany(
            "insert or ignore into claim values (?, ?, ?, ?, ?, ?, ?, ?, ?)",
            [
                (
                    "%s:%i" % (claim_arg_tup[0], claim_arg_tup[1]), claim_arg_tup[2], claim_arg_tup[3],
                    claim_arg_tup[7], claim_arg_tup[6], claim_arg_tup[8],
                    smart_decode(claim_arg_tup[8]).certificate_id, claim_arg_tup[5], claim_arg_tup[4]
                )
                for sd_hash, claim_arg_tup in claim_queries.items() if claim_arg_tup
            ]     # sd_hash,  (txid, nout, claim_id, name, sequence, address, height, amount, serialized)
        )

        log.info("migrated %i claims", new_db.execute("select count(*) from claim").fetchone()[0])

        damaged_stream_sds = []
        # import the files and get sd hashes of streams to attempt recovering
        for sd_hash, file_query in file_args.items():
            failed_sd = _import_file(*file_query)
            if failed_sd:
                damaged_stream_sds.append(failed_sd)

        # recover damaged streams
        if damaged_stream_sds:
            blob_dir = os.path.join(db_dir, "blobfiles")
            damaged_sds_on_disk = [] if not os.path.isdir(blob_dir) else list({p for p in os.listdir(blob_dir)
                                                                               if p in damaged_stream_sds})
            for damaged_sd in damaged_sds_on_disk:
                try:
                    decoded, sd_length = verify_sd_blob(damaged_sd, blob_dir)
                    blobs = decoded['blobs']
                    _add_recovered_blobs(blobs, damaged_sd, sd_length)  # pylint: disable=no-value-for-parameter
                    _import_file(*file_args[damaged_sd])
                    damaged_stream_sds.remove(damaged_sd)
                except (OSError, ValueError, TypeError, IOError, AssertionError, sqlite3.IntegrityError):
                    continue

        log.info("migrated %i files", new_db.execute("select count(*) from file").fetchone()[0])

        # associate the content claims to their respective files
        for claim_arg_tup in claim_queries.values():
            if claim_arg_tup and (claim_arg_tup[0], claim_arg_tup[1]) in file_outpoints \
                    and file_outpoints[(claim_arg_tup[0], claim_arg_tup[1])] in sd_hash_to_stream_hash:
                try:
                    new_db.execute(
                        "insert or ignore into content_claim values (?, ?)",
                        (
                            sd_hash_to_stream_hash.get(file_outpoints.get((claim_arg_tup[0], claim_arg_tup[1]))),
                            "%s:%i" % (claim_arg_tup[0], claim_arg_tup[1])
                        )
                    )
                except sqlite3.IntegrityError:
                    continue

        log.info("migrated %i content claims", new_db.execute("select count(*) from content_claim").fetchone()[0])
示例#13
0
 def claim(self) -> ClaimDict:
     if self.script.is_claim_name or self.script.is_update_claim:
         return smart_decode(self.script.values['claim'])
     raise ValueError(
         'Only claim name and claim update have the claim payload.')
示例#14
0
 def test_binary_decode(self):
     self.assertEqual(expected_binary_claim_decoded, smart_decode(binary_claim).claim_dict)
示例#15
0
 def test_hex_decode(self):
     self.assertEqual(decoded_hex_encoded_003, smart_decode(hex_encoded_003).claim_dict)
示例#16
0
 async def _update_content_claim(self, stream: ManagedStream):
     claim_info = await self.storage.get_content_claim(stream.stream_hash)
     stream.set_claim(claim_info, smart_decode(claim_info['value']))