Esempio n. 1
0
 def put_claim_id_signed_by_cert_id(self, cert_id, claim_id):
     msg = "[+] Adding signature: {} - {}".format(hash_to_hex_str(claim_id),
                                                  hash_to_hex_str(cert_id))
     self.logger.info(msg)
     certs = self.get_signed_claim_ids_by_cert_id(cert_id)
     certs.append(claim_id)
     self.claims_signed_by_cert_cache[cert_id] = certs
Esempio n. 2
0
 def remove_claim_from_certificate_claims(self, cert_id, claim_id):
     self.logger.info("[-] Removing signature: {} - {}".format(
         hash_to_hex_str(claim_id), hash_to_hex_str(cert_id)))
     certs = self.get_signed_claim_ids_by_cert_id(cert_id)
     if claim_id in certs:
         certs.remove(claim_id)
     self.claims_signed_by_cert_cache[cert_id] = certs
Esempio n. 3
0
    def backup_blocks(self, raw_blocks):
        """Backup the raw blocks and flush.

        The blocks should be in order of decreasing height, starting at.
        self.height.  A flush is performed once the blocks are backed up.
        """
        self.db.assert_flushed(self.flush_data())
        assert self.height >= len(raw_blocks)

        coin = self.coin
        for raw_block in raw_blocks:
            # Check and update self.tip
            block = coin.block(raw_block, self.height)
            header_hash = coin.header_hash(block.header)
            if header_hash != self.tip:
                raise ChainError(
                    'backup block {} not tip {} at height {:,d}'.format(
                        hash_to_hex_str(header_hash),
                        hash_to_hex_str(self.tip), self.height))
            self.tip = coin.header_prevhash(block.header)
            self.backup_txs(block.transactions)
            self.height -= 1
            self.db.tx_counts.pop()

        self.logger.info(f'backed up to height {self.height:,d}')
Esempio n. 4
0
 def advance_claim_txs(self, txs, height):
     # TODO: generate claim undo info!
     undo_info = []
     add_undo = undo_info.append
     update_inputs = set()
     for tx, txid in txs:
         update_inputs.clear()
         if tx.has_claims:
             for index, output in enumerate(tx.outputs):
                 claim = output.claim
                 if isinstance(claim, NameClaim):
                     add_undo(self.advance_claim_name_transaction(output, height, txid, index))
                 elif isinstance(claim, ClaimUpdate):
                     update_input = self.db.get_update_input(claim, tx.inputs)
                     if update_input:
                         update_inputs.add(update_input)
                         add_undo(self.advance_update_claim(output, height, txid, index))
                     else:
                         info = (hash_to_hex_str(txid), hash_to_hex_str(claim.claim_id),)
                         self.logger.error("REJECTED: {} updating {}".format(*info))
                 elif isinstance(claim, ClaimSupport):
                     self.advance_support(claim, txid, index, height, output.value)
         for txin in tx.inputs:
             if txin not in update_inputs:
                 abandoned_claim_id = self.db.abandon_spent(txin.prev_hash, txin.prev_idx)
                 if abandoned_claim_id:
                     add_undo((abandoned_claim_id, self.db.get_claim_info(abandoned_claim_id)))
     return undo_info
Esempio n. 5
0
 def electrum_header(cls, header, height):
     version, = struct.unpack('<I', header[:4])
     timestamp, bits, nonce = struct.unpack('<III', header[100:112])
     return {
         'version': version,
         'prev_block_hash': hash_to_hex_str(header[4:36]),
         'merkle_root': hash_to_hex_str(header[36:68]),
         'claim_trie_root': hash_to_hex_str(header[68:100]),
         'timestamp': timestamp,
         'bits': bits,
         'nonce': nonce,
         'block_height': height,
         }
Esempio n. 6
0
 def advance_claim_txs(self, txs, height):
     # TODO: generate claim undo info!
     undo_info = []
     add_undo = undo_info.append
     update_inputs = set()
     for etx, txid in txs:
         update_inputs.clear()
         tx = Transaction(etx.serialize())
         for index, output in enumerate(tx.outputs):
             if not output.is_claim:
                 continue
             if output.script.is_claim_name:
                 add_undo(self.advance_claim_name_transaction(output, height, txid, index))
             elif output.script.is_update_claim:
                 update_input = self.db.get_update_input(output.claim_hash, tx.inputs)
                 if update_input:
                     update_inputs.add(update_input)
                     add_undo(self.advance_update_claim(output, height, txid, index))
                 else:
                     info = (hash_to_hex_str(txid), output.claim_id,)
                     self.logger.error("REJECTED: {} updating {}".format(*info))
         for txin in tx.inputs:
             if txin not in update_inputs:
                 abandoned_claim_id = self.db.abandon_spent(txin.txo_ref.tx_ref.hash, txin.txo_ref.position)
                 if abandoned_claim_id:
                     add_undo((abandoned_claim_id, self.db.get_claim_info(abandoned_claim_id)))
     return undo_info
Esempio n. 7
0
 async def claimtrie_getnthclaimforname(self, name, n):
     n = int(n)
     for claim_id, sequence in self.db.get_claims_for_name(
             name.encode('ISO-8859-1')).items():
         if n == sequence:
             return await self.claimtrie_getclaimbyid(
                 hash_to_hex_str(claim_id))
Esempio n. 8
0
    async def reorg_chain(self, count=None):
        """Handle a chain reorganisation.

        Count is the number of blocks to simulate a reorg, or None for
        a real reorg."""
        if count is None:
            self.logger.info('chain reorg detected')
        else:
            self.logger.info(f'faking a reorg of {count:,d} blocks')
        await self.flush(True)

        async def get_raw_blocks(last_height, hex_hashes):
            heights = range(last_height, last_height - len(hex_hashes), -1)
            try:
                blocks = [self.db.read_raw_block(height) for height in heights]
                self.logger.info(f'read {len(blocks)} blocks from disk')
                return blocks
            except FileNotFoundError:
                return await self.daemon.raw_blocks(hex_hashes)

        def flush_backup():
            # self.touched can include other addresses which is
            # harmless, but remove None.
            self.touched.discard(None)
            self.db.flush_backup(self.flush_data(), self.touched)

        start, last, hashes = await self.reorg_hashes(count)
        # Reverse and convert to hex strings.
        hashes = [hash_to_hex_str(hash) for hash in reversed(hashes)]
        for hex_hashes in chunks(hashes, 50):
            raw_blocks = await get_raw_blocks(last, hex_hashes)
            await self.run_in_thread_with_lock(self.backup_blocks, raw_blocks)
            await self.run_in_thread_with_lock(flush_backup)
            last -= len(raw_blocks)
        await self.prefetcher.reset_height(self.height)
Esempio n. 9
0
    async def calc_reorg_range(self, count):
        """Calculate the reorg range"""
        def diff_pos(hashes1, hashes2):
            """Returns the index of the first difference in the hash lists.
            If both lists match returns their length."""
            for n, (hash1, hash2) in enumerate(zip(hashes1, hashes2)):
                if hash1 != hash2:
                    return n
            return len(hashes)

        if count is None:
            # A real reorg
            start = self.height - 1
            count = 1
            while start > 0:
                hashes = await self.db.fs_block_hashes(start, count)
                hex_hashes = [hash_to_hex_str(hash) for hash in hashes]
                d_hex_hashes = await self.daemon.block_hex_hashes(start, count)
                n = diff_pos(hex_hashes, d_hex_hashes)
                if n > 0:
                    start += n
                    break
                count = min(count * 2, start)
                start -= count

            count = (self.height - start) + 1
        else:
            start = (self.height - count) + 1

        return start, count
Esempio n. 10
0
    def _compact_hashX(self, hashX, hist_map, hist_list,
                       write_items, keys_to_delete):
        """Compress history for a hashX.  hist_list is an ordered list of
        the histories to be compressed."""
        # History entries (tx numbers) are 4 bytes each.  Distribute
        # over rows of up to 50KB in size.  A fixed row size means
        # future compactions will not need to update the first N - 1
        # rows.
        max_row_size = self.max_hist_row_entries * 4
        full_hist = b''.join(hist_list)
        nrows = (len(full_hist) + max_row_size - 1) // max_row_size
        if nrows > 4:
            self.logger.info('hashX {} is large: {:,d} entries across '
                             '{:,d} rows'
                             .format(hash_to_hex_str(hashX),
                                     len(full_hist) // 4, nrows))

        # Find what history needs to be written, and what keys need to
        # be deleted.  Start by assuming all keys are to be deleted,
        # and then remove those that are the same on-disk as when
        # compacted.
        write_size = 0
        keys_to_delete.update(hist_map)
        for n, chunk in enumerate(util.chunks(full_hist, max_row_size)):
            key = hashX + pack_be_uint16(n)
            if hist_map.get(key) == chunk:
                keys_to_delete.remove(key)
            else:
                write_items.append((key, chunk))
                write_size += len(chunk)

        assert n + 1 == nrows
        self.comp_flush_count = max(self.comp_flush_count, n)

        return write_size
Esempio n. 11
0
 def remove_claim_for_name(self, name, claim_id):
     self.logger.info("[-] Removing claim from name: {} - {}".format(
         hash_to_hex_str(claim_id), name))
     claims = self.get_claims_for_name(name)
     claim_n = claims.pop(claim_id)
     for _claim_id, number in claims.items():
         if number > claim_n:
             claims[_claim_id] = number - 1
     self.claims_for_name_cache[name] = claims
Esempio n. 12
0
 def abandon_spent(self, tx_hash, tx_idx):
     claim_id = self.get_claim_id_from_outpoint(tx_hash, tx_idx)
     if claim_id:
         self.logger.info("[!] Abandon: {}".format(
             hash_to_hex_str(claim_id)))
         self.pending_abandons.setdefault(claim_id, []).append((
             tx_hash,
             tx_idx,
         ))
         return claim_id
Esempio n. 13
0
    def genesis_block(cls, block):
        '''Check the Genesis block is the right one for this coin.

        Return the block less its unspendable coinbase.
        '''
        header = cls.block_header(block, 0)
        header_hex_hash = hash_to_hex_str(cls.header_hash(header))
        if header_hex_hash != cls.GENESIS_HASH:
            raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')

        return block
Esempio n. 14
0
    def read_utxo_state(self):
        state = self.utxo_db.get(b'state')
        if not state:
            self.db_height = -1
            self.db_tx_count = 0
            self.db_tip = b'\0' * 32
            self.db_version = max(self.DB_VERSIONS)
            self.utxo_flush_count = 0
            self.wall_time = 0
            self.first_sync = True
        else:
            state = ast.literal_eval(state.decode())
            if not isinstance(state, dict):
                raise self.DBError('failed reading state from DB')
            self.db_version = state['db_version']
            if self.db_version not in self.DB_VERSIONS:
                raise self.DBError('your UTXO DB version is {} but this '
                                   'software only handles versions {}'.format(
                                       self.db_version, self.DB_VERSIONS))
            # backwards compat
            genesis_hash = state['genesis']
            if isinstance(genesis_hash, bytes):
                genesis_hash = genesis_hash.decode()
            if genesis_hash != self.coin.GENESIS_HASH:
                raise self.DBError(
                    'DB genesis hash {} does not match coin {}'.format(
                        genesis_hash, self.coin.GENESIS_HASH))
            self.db_height = state['height']
            self.db_tx_count = state['tx_count']
            self.db_tip = state['tip']
            self.utxo_flush_count = state['utxo_flush_count']
            self.wall_time = state['wall_time']
            self.first_sync = state['first_sync']

        # These are our state as we move ahead of DB state
        self.fs_height = self.db_height
        self.fs_tx_count = self.db_tx_count
        self.last_flush_tx_count = self.fs_tx_count

        # Log some stats
        self.logger.info('DB version: {:d}'.format(self.db_version))
        self.logger.info('coin: {}'.format(self.coin.NAME))
        self.logger.info('network: {}'.format(self.coin.NET))
        self.logger.info('height: {:,d}'.format(self.db_height))
        self.logger.info('tip: {}'.format(hash_to_hex_str(self.db_tip)))
        self.logger.info('tx count: {:,d}'.format(self.db_tx_count))
        if self.utxo_db.for_sync:
            self.logger.info(f'flushing DB cache at {self.env.cache_MB:,d} MB')
        if self.first_sync:
            self.logger.info('sync time so far: {}'.format(
                util.formatted_time(self.wall_time)))
Esempio n. 15
0
    def spend_utxo(self, tx_hash, tx_idx):
        """Spend a UTXO and return the 33-byte value.

        If the UTXO is not in the cache it must be on disk.  We store
        all UTXOs so not finding one indicates a logic error or DB
        corruption.
        """
        # Fast track is it being in the cache
        idx_packed = pack('<H', tx_idx)
        cache_value = self.utxo_cache.pop(tx_hash + idx_packed, None)
        if cache_value:
            return cache_value

        # Spend it from the DB.

        # Key: b'h' + compressed_tx_hash + tx_idx + tx_num
        # Value: hashX
        prefix = b'h' + tx_hash[:4] + idx_packed
        candidates = {
            db_key: hashX
            for db_key, hashX in self.db.utxo_db.iterator(prefix=prefix)
        }

        for hdb_key, hashX in candidates.items():
            tx_num_packed = hdb_key[-4:]

            if len(candidates) > 1:
                tx_num, = unpack('<I', tx_num_packed)
                hash, height = self.db.fs_tx_hash(tx_num)
                if hash != tx_hash:
                    assert hash is not None  # Should always be found
                    continue

            # Key: b'u' + address_hashX + tx_idx + tx_num
            # Value: the UTXO value as a 64-bit unsigned integer
            udb_key = b'u' + hashX + hdb_key[-6:]
            utxo_value_packed = self.db.utxo_db.get(udb_key)
            if utxo_value_packed:
                # Remove both entries for this UTXO
                self.db_deletes.append(hdb_key)
                self.db_deletes.append(udb_key)
                return hashX + tx_num_packed + utxo_value_packed

        raise ChainError('UTXO {} / {:,d} not found in "h" table'.format(
            hash_to_hex_str(tx_hash), tx_idx))
Esempio n. 16
0
    async def _fetch_and_accept(self, hashes, all_hashes, touched):
        """Fetch a list of mempool transactions."""
        hex_hashes_iter = (hash_to_hex_str(hash) for hash in hashes)
        raw_txs = await self.api.raw_transactions(hex_hashes_iter)

        def deserialize_txs():  # This function is pure
            to_hashX = self.coin.hashX_from_script
            deserializer = self.coin.DESERIALIZER

            txs = {}
            for hash, raw_tx in zip(hashes, raw_txs):
                # The daemon may have evicted the tx from its
                # mempool or it may have gotten in a block
                if not raw_tx:
                    continue
                tx, tx_size = deserializer(raw_tx).read_tx_and_vsize()
                # Convert the inputs and outputs into (hashX, value) pairs
                # Drop generation-like inputs from MemPoolTx.prevouts
                txin_pairs = tuple((txin.prev_hash, txin.prev_idx)
                                   for txin in tx.inputs
                                   if not txin.is_generation())
                txout_pairs = tuple((to_hashX(txout.pk_script), txout.value)
                                    for txout in tx.outputs)
                txs[hash] = MemPoolTx(txin_pairs, None, txout_pairs, 0,
                                      tx_size)
            return txs

        # Thread this potentially slow operation so as not to block
        tx_map = await asyncio.get_event_loop().run_in_executor(
            None, deserialize_txs)

        # Determine all prevouts not in the mempool, and fetch the
        # UTXO information from the database.  Failed prevout lookups
        # return None - concurrent database updates happen - which is
        # relied upon by _accept_transactions. Ignore prevouts that are
        # generation-like.
        prevouts = tuple(prevout for tx in tx_map.values()
                         for prevout in tx.prevouts
                         if prevout[0] not in all_hashes)
        utxos = await self.api.lookup_utxos(prevouts)
        utxo_map = {prevout: utxo for prevout, utxo in zip(prevouts, utxos)}

        return self._accept_transactions(tx_map, utxo_map, touched)
Esempio n. 17
0
 def put_claim_for_name(self, name, claim_id):
     self.logger.info("[+] Adding claim {} for name {}.".format(
         hash_to_hex_str(claim_id), name))
     claims = self.get_claims_for_name(name)
     claims.setdefault(claim_id, max(claims.values() or [0]) + 1)
     self.claims_for_name_cache[name] = claims
Esempio n. 18
0
File: tx.py Progetto: shyba/torba
 def __str__(self):
     prev_hash = hash_to_hex_str(self.prev_hash)
     return ("Input({}, {:d}, tree={}, sequence={:d})".format(
         prev_hash, self.prev_idx, self.tree, self.sequence))
Esempio n. 19
0
 def __str__(self):
     script = self.script.hex()
     prev_hash = hash_to_hex_str(self.prev_hash)
     return (
         f"Input({prev_hash}, {self.prev_idx:d}, script={script}, sequence={self.sequence:d})"
     )
Esempio n. 20
0
 def __str__(self):
     prev_hash = hash_to_hex_str(self.prev_hash)
     return (
         f"Input({prev_hash}, {self.prev_idx:d}, tree={self.tree}, sequence={self.sequence:d})"
     )
Esempio n. 21
0
    async def claimtrie_getvalueforuri(self,
                                       block_hash,
                                       uri,
                                       known_certificates=None):
        # TODO: this thing is huge, refactor
        CLAIM_ID = "claim_id"
        WINNING = "winning"
        SEQUENCE = "sequence"
        uri = uri
        block_hash = block_hash
        try:
            parsed_uri = parse_lbry_uri(uri)
        except URIParseError as err:
            return {'error': err.message}
        result = {}

        if parsed_uri.is_channel:
            certificate = None

            # TODO: this is also done on the else, refactor
            if parsed_uri.claim_id:
                certificate_info = await self.claimtrie_getclaimbyid(
                    parsed_uri.claim_id)
                if certificate_info and certificate_info[
                        'name'] == parsed_uri.name:
                    certificate = {
                        'resolution_type': CLAIM_ID,
                        'result': certificate_info
                    }
            elif parsed_uri.claim_sequence:
                certificate_info = await self.claimtrie_getnthclaimforname(
                    parsed_uri.name, parsed_uri.claim_sequence)
                if certificate_info:
                    certificate = {
                        'resolution_type': SEQUENCE,
                        'result': certificate_info
                    }
            else:
                certificate_info = await self.claimtrie_getvalue(
                    parsed_uri.name, block_hash)
                if certificate_info:
                    certificate = {
                        'resolution_type': WINNING,
                        'result': certificate_info
                    }

            if certificate and 'claim_id' not in certificate['result']:
                return result

            if certificate and not parsed_uri.path:
                result['certificate'] = certificate
                channel_id = certificate['result']['claim_id']
                claims_in_channel = await self.claimtrie_getclaimssignedbyid(
                    channel_id)
                result['unverified_claims_in_channel'] = {
                    claim['claim_id']: (claim['name'], claim['height'])
                    for claim in claims_in_channel if claim
                }
            elif certificate:
                result['certificate'] = certificate
                channel_id = certificate['result']['claim_id']
                claim_ids_matching_name = self.get_signed_claims_with_name_for_channel(
                    channel_id, parsed_uri.path)
                claims = await self.batched_formatted_claims_from_daemon(
                    claim_ids_matching_name)

                claims_in_channel = {
                    claim['claim_id']: (claim['name'], claim['height'])
                    for claim in claims
                }
                result['unverified_claims_for_name'] = claims_in_channel
        else:
            claim = None
            if parsed_uri.claim_id:
                claim_info = await self.claimtrie_getclaimbyid(
                    parsed_uri.claim_id)
                if claim_info and claim_info['name'] == parsed_uri.name:
                    claim = {'resolution_type': CLAIM_ID, 'result': claim_info}
            elif parsed_uri.claim_sequence:
                claim_info = await self.claimtrie_getnthclaimforname(
                    parsed_uri.name, parsed_uri.claim_sequence)
                if claim_info:
                    claim = {'resolution_type': SEQUENCE, 'result': claim_info}
            else:
                claim_info = await self.claimtrie_getvalue(
                    parsed_uri.name, block_hash)
                if claim_info:
                    claim = {'resolution_type': WINNING, 'result': claim_info}
            if (claim and
                    # is not an unclaimed winning name
                (claim['resolution_type'] != WINNING
                 or proof_has_winning_claim(claim['result']['proof']))):
                raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1]
                raw_certificate_id = self.db.get_claim_info(
                    raw_claim_id).cert_id
                if raw_certificate_id:
                    certificate_id = hash_to_hex_str(raw_certificate_id)
                    certificate = await self.claimtrie_getclaimbyid(
                        certificate_id)
                    if certificate:
                        certificate = {
                            'resolution_type': CLAIM_ID,
                            'result': certificate
                        }
                        result['certificate'] = certificate
                result['claim'] = claim
        return result
Esempio n. 22
0
 def put_claim_id_for_outpoint(self, tx_hash, tx_idx, claim_id):
     self.logger.info("[+] Adding outpoint: {}:{} for {}.".format(
         hash_to_hex_str(tx_hash), tx_idx,
         hash_to_hex_str(claim_id) if claim_id else None))
     self.outpoint_to_claim_id_cache[tx_hash +
                                     struct.pack('>I', tx_idx)] = claim_id
Esempio n. 23
0
 def prev_hex_hash(raw_block):
     return hash_to_hex_str(raw_block[4:36])
Esempio n. 24
0
 def remove_claim_id_for_outpoint(self, tx_hash, tx_idx):
     self.logger.info("[-] Remove outpoint: {}:{}.".format(
         hash_to_hex_str(tx_hash), tx_idx))
     self.outpoint_to_claim_id_cache[tx_hash +
                                     struct.pack('>I', tx_idx)] = None
Esempio n. 25
0
    async def claimtrie_getvalueforuri(self,
                                       block_hash,
                                       uri,
                                       known_certificates=None):
        # TODO: this thing is huge, refactor
        CLAIM_ID = "claim_id"
        WINNING = "winning"
        SEQUENCE = "sequence"
        uri = uri
        block_hash = block_hash
        try:
            parsed_uri = parse_lbry_uri(uri)
        except URIParseError as err:
            return {'error': err.message}
        result = {}

        if parsed_uri.contains_channel:
            certificate = None

            # TODO: this is also done on the else, refactor
            if parsed_uri.claim_id:
                if len(parsed_uri.claim_id) < CLAIM_ID_MAX_LENGTH:
                    certificate_info = self.claimtrie_getpartialmatch(
                        parsed_uri.name, parsed_uri.claim_id)
                else:
                    certificate_info = await self.claimtrie_getclaimbyid(
                        parsed_uri.claim_id)
                if certificate_info and self.claim_matches_name(
                        certificate_info, parsed_uri.name):
                    certificate = {
                        'resolution_type': CLAIM_ID,
                        'result': certificate_info
                    }
            elif parsed_uri.claim_sequence:
                certificate_info = await self.claimtrie_getnthclaimforname(
                    parsed_uri.name, parsed_uri.claim_sequence)
                if certificate_info:
                    certificate = {
                        'resolution_type': SEQUENCE,
                        'result': certificate_info
                    }
            else:
                certificate_info = await self.claimtrie_getvalue(
                    parsed_uri.name, block_hash)
                if certificate_info:
                    certificate = {
                        'resolution_type': WINNING,
                        'result': certificate_info
                    }

            if certificate and 'claim_id' not in certificate['result']:
                return result

            if certificate:
                result['certificate'] = certificate
                channel_id = certificate['result']['claim_id']
                claims_in_channel = self.claimtrie_getclaimssignedbyidminimal(
                    channel_id)
                if not parsed_uri.path:
                    result['unverified_claims_in_channel'] = {
                        claim['claim_id']: (claim['name'], claim['height'])
                        for claim in claims_in_channel
                    }
                else:
                    # making an assumption that there aren't case conflicts on an existing channel
                    norm_path = self.normalize_name(parsed_uri.path)
                    result['unverified_claims_for_name'] = {
                        claim['claim_id']: (claim['name'], claim['height'])
                        for claim in claims_in_channel
                        if self.normalize_name(claim['name']) == norm_path
                    }

        else:
            claim = None
            if parsed_uri.claim_id:
                if len(parsed_uri.claim_id) < CLAIM_ID_MAX_LENGTH:
                    claim_info = self.claimtrie_getpartialmatch(
                        parsed_uri.name, parsed_uri.claim_id)
                else:
                    claim_info = await self.claimtrie_getclaimbyid(
                        parsed_uri.claim_id)
                if claim_info and self.claim_matches_name(
                        claim_info, parsed_uri.name):
                    claim = {'resolution_type': CLAIM_ID, 'result': claim_info}
            elif parsed_uri.claim_sequence:
                claim_info = await self.claimtrie_getnthclaimforname(
                    parsed_uri.name, parsed_uri.claim_sequence)
                if claim_info:
                    claim = {'resolution_type': SEQUENCE, 'result': claim_info}
            else:
                claim_info = await self.claimtrie_getvalue(
                    parsed_uri.name, block_hash)
                if claim_info:
                    claim = {'resolution_type': WINNING, 'result': claim_info}
            if (claim and
                    # is not an unclaimed winning name
                (claim['resolution_type'] != WINNING
                 or proof_has_winning_claim(claim['result']['proof']))):
                raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1]
                raw_certificate_id = self.db.get_claim_info(
                    raw_claim_id).cert_id
                if raw_certificate_id:
                    certificate_id = hash_to_hex_str(raw_certificate_id)
                    certificate = await self.claimtrie_getclaimbyid(
                        certificate_id)
                    if certificate:
                        certificate = {
                            'resolution_type': CLAIM_ID,
                            'result': certificate
                        }
                        result['certificate'] = certificate
                result['claim'] = claim

        return result
Esempio n. 26
0
 async def claimtrie_getclaimssignedbynthtoname(self, name, n):
     claim = self.claimtrie_getnthclaimforname(name, n)
     if claim and 'claim_id' in claim:
         return await self.claimtrie_getclaimssignedbyid(
             hash_to_hex_str(claim['claim_id']))
Esempio n. 27
0
 def remove_certificate(self, cert_id):
     msg = "[-] Removing certificate: {}".format(hash_to_hex_str(cert_id))
     self.logger.info(msg)
     self.claims_signed_by_cert_cache[cert_id] = []
Esempio n. 28
0
 def put_claim_info(self, claim_id, claim_info):
     self.logger.info("[+] Adding claim info for: {}".format(
         hash_to_hex_str(claim_id)))
     self.claim_cache[claim_id] = claim_info.serialized
Esempio n. 29
0
File: tx.py Progetto: shyba/torba
 def __str__(self):
     script = self.script.hex()
     prev_hash = hash_to_hex_str(self.prev_hash)
     return ("Input({}, {:d}, script={}, sequence={:d})".format(
         prev_hash, self.prev_idx, script, self.sequence))