Ejemplo n.º 1
0
    def on_post(self, req, resp):

        block_hash = None

        if 'block_id' in req.media:
            block = Block.query(self.session).filter(
                Block.id == req.media.get('block_id')).first()
        elif req.media.get('block_hash'):
            block_hash = req.media.get('block_hash')
            block = Block.query(
                self.session).filter(Block.hash == block_hash).first()
        else:
            block = None
            resp.status = falcon.HTTP_BAD_REQUEST
            resp.media = {
                'errors': ['Either block_hash or block_id should be supplied']
            }

        if block:
            print('Sequencing #{} ...'.format(block.id))

            harvester = PolkascanHarvesterService(self.session,
                                                  type_registry=TYPE_REGISTRY)

            if block.id == 1:
                # Add genesis block
                parent_block = harvester.add_block(block.parent_hash)

            block_total = BlockTotal.query(
                self.session).filter_by(id=block.id).first()
            parent_block = Block.query(
                self.session).filter(Block.id == block.id - 1).first()
            parent_block_total = BlockTotal.query(
                self.session).filter_by(id=block.id - 1).first()

            if block_total:
                resp.status = falcon.HTTP_200
                resp.media = {'result': 'already exists', 'blockId': block.id}
            else:

                if parent_block_total:
                    parent_block_total = parent_block_total.asdict()

                if parent_block:
                    parent_block = parent_block.asdict()

                harvester.sequence_block(block, parent_block,
                                         parent_block_total)

                self.session.commit()

                resp.status = falcon.HTTP_201
                resp.media = {
                    'result': 'added',
                    'parentHash': block.parent_hash
                }

        else:
            resp.status = falcon.HTTP_404
            resp.media = {'result': 'Block not found'}
Ejemplo n.º 2
0
    def on_post(self, req, resp):

        msg = "TODO"
        if req.media.get('start_hash'):
            block = Block.query(self.session).filter(Block.hash == req.media.get('start_hash')).first()
        else:
            block = Block.query(self.session).order_by(Block.id.asc()).first()

        if block and block.id != 1:
            harvester = PolkascanHarvesterService(self.session, type_registry=TYPE_REGISTRY)
            block_hash = block.parent_hash
            for nr in range(0, block.id - 1):
                try:
                    block = harvester.add_block(block_hash)
                except BlockAlreadyAdded as e:
                    print('Skipping {}'.format(block_hash))
                block_hash = block.parent_hash
                if block.id == 0:
                    break

            self.session.commit()

            resp.media = {
                'status': 'success',
                'data': {
                    'message': msg
                }
            }
        else:
            resp.status = falcon.HTTP_404
            resp.media = {'result': 'Block not found'}
    def get_item(self, item_id):

        if item_id.isnumeric():
            return Block.query(self.session).filter_by(id=item_id).first()
        elif '-' in item_id:
            st = item_id.split("-")
            return Block.query(self.session).filter_by(bid=int(st[1]),
                                                       shard_num=int(
                                                           st[0])).first()

        else:
            return Block.query(self.session).filter_by(hash=item_id).first()
Ejemplo n.º 4
0
def sequence_block_recursive(self,
                             parent_block_data,
                             parent_sequenced_block_data=None):

    harvester = PolkascanHarvesterService(self.session,
                                          type_registry=TYPE_REGISTRY)
    harvester.metadata_store = self.metadata_store
    for nr in range(0, 10):
        if not parent_sequenced_block_data:
            # No block ever sequenced, check if chain is at genesis state

            block = Block.query(self.session).order_by('id').first()

            if block.id == 1:
                # Add genesis block
                block = harvester.add_block(block.parent_hash)

            if block.id != 0:
                return {'error': 'Chain not at genesis'}

            harvester.process_genesis(block)
            block_id = 0
        else:
            block_id = parent_sequenced_block_data['id'] + 1

            block = Block.query(self.session).get(block_id)

        if block:
            try:
                sequenced_block = harvester.sequence_block(
                    block, parent_block_data, parent_sequenced_block_data)
                self.session.commit()

                parent_block_data = block.asdict()
                parent_sequenced_block_data = sequenced_block.asdict()

                if nr == 9 or not sequenced_block:

                    if sequenced_block:
                        if nr == 9:
                            sequence_block_recursive.delay(
                                parent_block_data, parent_sequenced_block_data)

                    return {'processedBlockId': block.id, 'amount': nr + 1}

            except IntegrityError as e:
                return {
                    'error': 'Sequencer already started',
                    'exception': str(e)
                }
        else:
            return {'error': 'Block {} not found'.format(block_id)}
    def on_get(self, req, resp):

        last_known_block = Block.query(self.session).order_by(Block.id.desc()).first()

        if not last_known_block:
            resp.media = {
                'status': 'success',
                'data': {
                    'message': 'Harvester waiting for first run'
                }
            }
        else:

            remaining_sets_result = Block.get_missing_block_ids(self.session)

            resp.status = falcon.HTTP_200

            resp.media = {
                'status': 'success',
                'data': {
                    'harvester_head': last_known_block.id,
                    'block_process_queue': [
                        {'from': block_set['block_from'], 'to': block_set['block_to']}
                        for block_set in remaining_sets_result
                    ]
                }
            }
    def on_get(self, req, resp):

        sequencer_task = Status.get_status(self.session, 'SEQUENCER_TASK_ID')
        integrity_head = Status.get_status(self.session, 'INTEGRITY_HEAD')
        sequencer_head = self.session.query(func.max(BlockTotal.id)).one()[0]
        best_block = Block.query(self.session).filter_by(
            id=self.session.query(func.max(Block.id)).one()[0]).first()

        if best_block:
            best_block_datetime = best_block.datetime.replace(tzinfo=pytz.UTC).timestamp() * 1000
            best_block_nr = best_block.id
        else:
            best_block_datetime = None
            best_block_nr = None

        substrate = SubstrateInterface(SUBSTRATE_RPC_URL)
        chain_head_block_id = substrate.get_block_number(substrate.get_chain_head())
        chain_finalized_block_id = substrate.get_block_number(substrate.get_chain_finalised_head())

        resp.media = {
            'best_block_datetime': best_block_datetime,
            'best_block_nr': best_block_nr,
            'sequencer_task': sequencer_task.value,
            'sequencer_head': sequencer_head,
            'integrity_head': int(integrity_head.value),
            'chain_head_block_id': chain_head_block_id,
            'chain_finalized_block_id': chain_finalized_block_id
        }
 def get_item(self, item_id):
     if item_id.isnumeric():
         return BlockTotal.query(self.session).get(item_id)
     else:
         block = Block.query(self.session).filter_by(hash=item_id).first()
         if block:
             return BlockTotal.query(self.session).get(block.id)
Ejemplo n.º 8
0
    def remove_block(self, block_hash):
        # Retrieve block
        block = Block.query(self.db_session).filter_by(hash=block_hash).first()

        # Revert event processors
        for event in Event.query(self.db_session).filter_by(block_id=block.id):
            for processor_class in ProcessorRegistry().get_event_processors(event.module_id, event.event_id):
                event_processor = processor_class(block, event, None)
                event_processor.accumulation_revert(self.db_session)

        # Revert extrinsic processors
        for extrinsic in Extrinsic.query(self.db_session).filter_by(block_id=block.id):
            for processor_class in ProcessorRegistry().get_extrinsic_processors(extrinsic.module_id, extrinsic.call_id):
                extrinsic_processor = processor_class(block, extrinsic)
                extrinsic_processor.accumulation_revert(self.db_session)

        # Revert block processors
        for processor_class in ProcessorRegistry().get_block_processors():
            block_processor = processor_class(block)
            block_processor.accumulation_revert(self.db_session)

        # Delete events
        for item in Event.query(self.db_session).filter_by(block_id=block.id):
            self.db_session.delete(item)
        # Delete extrinsics
        for item in Extrinsic.query(self.db_session).filter_by(block_id=block.id):
            self.db_session.delete(item)

        # Delete block
        self.db_session.delete(block)
Ejemplo n.º 9
0
def start_init(self):
    if self.init:
        print('start_init  task is running : '.format(self.init))
        return {'result': 'waiting init task completed! '}

    n = Block.query(self.session).filter_by(bid=1).count()

    if n >= 4:
        print(' init task is completed! count().n: {} '.format(n))

        return {'result': ' init task is completed! '}
    self.init = True
    print("start_init")
    for shard in SHARDS_TABLE:
        substrate_url = SHARDS_TABLE[shard]
        substrate = SubstrateInterface(substrate_url)
        start_block_hash = substrate.get_block_hash(3)

        print('== start_init  substrate_url {} ==start_block_hash-{}'.format(
            substrate_url, start_block_hash))

        end_block_hash = None

        accumulate_block_recursive.delay(start_block_hash, end_block_hash,
                                         substrate_url)

    return {
        'result': 'start_init job started',
        'SHARDS_TABLE': SHARDS_TABLE,
        'init_task_id': self.request.id
    }
Ejemplo n.º 10
0
def sync_block_account_id(self):

    db_session = self.session
    blocks = Block.query(db_session).filter(
        Block.account_index.is_(None)).all()

    for block in blocks:
        log = Log.query(db_session).filter(Log.block_id == block.id).filter(
            Log.type == 'PreRuntime').first()
        if log:
            data = log.data.get("value").get("data")
            if data:
                if data[0:2] != "01" and data[0:2] != "00":
                    continue

                res = RawBabePreDigest(ScaleBytes("0x{}".format(data)))
                if data[0:2] == "01" and len(data) == 34:
                    res.decode()
                    block.account_index = res.value.get("Secondary").get(
                        "authorityIndex")
                elif data[0:2] == "00":
                    res.decode(check_remaining=False)
                    block.account_index = res.value.get("Primary").get(
                        "authorityIndex")
                else:
                    raise "error log data ".format(data)

                block.save(db_session)
                print("...................", block.id, block.account_index)
        else:
            print("...................", "Blocks not found")

    db_session.commit()
Ejemplo n.º 11
0
    def on_post(self, req, resp):

        block_hash = None

        if req.media.get('block_id'):
            substrate = SubstrateInterface(
                url=SUBSTRATE_RPC_URL,
                runtime_config=RuntimeConfiguration(),
                type_registry_preset=settings.TYPE_REGISTRY)
            block_hash = substrate.get_block_hash(req.media.get('block_id'))
        elif req.media.get('block_hash'):
            block_hash = req.media.get('block_hash')
        else:
            resp.status = falcon.HTTP_BAD_REQUEST
            resp.media = {
                'errors': ['Either block_hash or block_id should be supplied']
            }

        if block_hash:
            print('Processing {} ...'.format(block_hash))
            harvester = PolkascanHarvesterService(
                db_session=self.session,
                type_registry=TYPE_REGISTRY,
                type_registry_file=TYPE_REGISTRY_FILE + '-mst')

            block = Block.query(
                self.session).filter(Block.hash == block_hash).first()

            if block:
                resp.status = falcon.HTTP_200
                resp.media = {
                    'result': 'already exists',
                    'parentHash': block.parent_hash
                }
            else:

                amount = req.media.get('amount', 1)

                for nr in range(0, amount):
                    try:
                        block = harvester.add_block(block_hash)
                    except BlockAlreadyAdded as e:
                        print('Skipping {}'.format(block_hash))
                    block_hash = block.parent_hash
                    if block.id == 0:
                        break

                self.session.commit()

                resp.status = falcon.HTTP_201
                resp.media = {
                    'result': 'added',
                    'parentHash': block.parent_hash
                }

        else:
            resp.status = falcon.HTTP_404
            resp.media = {'result': 'Block not found'}
Ejemplo n.º 12
0
    def process_genesis(self, block):

        # Set block time of parent block
        child_block = Block.query(
            self.db_session).filter_by(parent_hash=block.hash).first()
        block.set_datetime(child_block.datetime)

        # Retrieve genesis accounts
        genesis_account_page_count = self.substrate.get_runtime_state(
            module="Indices",
            storage_function="NextEnumSet",
            block_hash=block.hash).get('result', 0)

        # Get Accounts on EnumSet
        block.count_accounts_new = 0
        block.count_accounts = 0

        for enum_set_nr in range(0, genesis_account_page_count + 1):

            genesis_accounts = self.substrate.get_runtime_state(
                module="Indices",
                storage_function="EnumSet",
                params=[enum_set_nr],
                block_hash=block.hash).get('result')

            if genesis_accounts:
                block.count_accounts_new += len(genesis_accounts)
                block.count_accounts += len(genesis_accounts)

                for idx, account_id in enumerate(genesis_accounts):
                    account_audit = AccountAudit(
                        account_id=account_id.replace('0x', ''),
                        block_id=block.id,
                        extrinsic_idx=None,
                        event_idx=None,
                        type_id=ACCOUNT_AUDIT_TYPE_NEW)

                    account_audit.save(self.db_session)

                    account_index_id = enum_set_nr * 64 + idx

                    account_index_audit = AccountIndexAudit(
                        account_index_id=account_index_id,
                        account_id=account_id.replace('0x', ''),
                        block_id=block.id,
                        extrinsic_idx=None,
                        event_idx=None,
                        type_id=ACCOUNT_INDEX_AUDIT_TYPE_NEW)

                    account_index_audit.save(self.db_session)

        block.save(self.db_session)

        # Create initial session
        initial_session_event = NewSessionEventProcessor(block, Event(), None)
        initial_session_event.add_session(db_session=self.db_session,
                                          session_id=0)
Ejemplo n.º 13
0
    def rebuild_search_index(self):

        self.db_session.execute('truncate table {}'.format(
            SearchIndex.__tablename__))

        for block in Block.query(
                self.db_session).order_by('id').yield_per(1000):

            extrinsic_lookup = {}
            block._accounts_new = []
            block._accounts_reaped = []

            for extrinsic in Extrinsic.query(self.db_session).filter_by(
                    block_id=block.id).order_by('extrinsic_idx'):
                extrinsic_lookup[extrinsic.extrinsic_idx] = extrinsic

                # Add search index for signed extrinsics
                if extrinsic.address:
                    search_index = SearchIndex(
                        index_type_id=settings.SEARCH_INDEX_SIGNED_EXTRINSIC,
                        block_id=block.id,
                        extrinsic_idx=extrinsic.extrinsic_idx,
                        account_id=extrinsic.address)
                    search_index.save(self.db_session)

                # Process extrinsic processors
                for processor_class in ProcessorRegistry(
                ).get_extrinsic_processors(extrinsic.module_id,
                                           extrinsic.call_id):
                    extrinsic_processor = processor_class(
                        block=block,
                        extrinsic=extrinsic,
                        substrate=self.substrate)
                    extrinsic_processor.process_search_index(self.db_session)

            for event in Event.query(self.db_session).filter_by(
                    block_id=block.id).order_by('event_idx'):
                extrinsic = None
                if event.extrinsic_idx is not None:
                    try:
                        extrinsic = extrinsic_lookup[event.extrinsic_idx]
                    except (IndexError, KeyError):
                        extrinsic = None

                for processor_class in ProcessorRegistry(
                ).get_event_processors(event.module_id, event.event_id):
                    event_processor = processor_class(
                        block,
                        event,
                        extrinsic,
                        metadata=self.metadata_store.get(
                            block.spec_version_id),
                        substrate=self.substrate)
                    event_processor.process_search_index(self.db_session)

            self.db_session.commit()
Ejemplo n.º 14
0
def find(self, bid, shard_num, substrate):
    print('== find params  *shardnum=*{} *==bid=*{}*'.format(shard_num, bid))
    block_before = Block.query(self.session).filter_by(shard_num=shard_num,
                                                       bid=(bid - 1)).first()
    print('== find sql query  *==by bid-1 hash=*{}*'.format(block_before.hash))
    hash_on = substrate.get_block_hash(bid - 1)
    if hash_on == block_before.hash:
        return bid - 1
    else:
        return find(bid - 1, shard_num, substrate)
Ejemplo n.º 15
0
 def count(self):
     message_count = self.session.query(func.count(Block.id)).filter(
         Block.coinbase == 'yee1gjjlh3ll709jvdvwvpc0helpw8uh3fdldh3ae3a6xkdm0qu3d4zqg2d5d3').scalar()
     print('message_count---', message_count)
     listbl = Block.query(self.session).filter(
         Block.coinbase == 'yee1gjjlh3ll709jvdvwvpc0helpw8uh3fdldh3ae3a6xkdm0qu3d4zqg2d5d3',
         Block.fee_reward > 0).all()
     if len(listbl) > 0:
         for b in listbl:
             print(b.fee_reward)
    def get_relationships(self, include_list, item):
        relationships = {}

        if 'blocks' in include_list:
            relationships['blocks'] = Block.query(self.session).filter_by(
                session_id=item.id).order_by(Block.id.desc())

        if 'validators' in include_list:
            relationships['validators'] = SessionValidator.query(
                self.session).filter_by(session_id=item.id).order_by(
                    SessionValidator.rank_validator)

        return relationships
Ejemplo n.º 17
0
    def on_post(self, req, resp):

        harvester = PolkascanHarvesterService(self.session,
                                              type_registry=TYPE_REGISTRY)
        block = Block.query(self.session).get(1)
        if block:
            result = harvester.process_genesis(block=block)
        else:
            result = 'Block #1 required to process genesis'

        self.session.commit()

        resp.media = {'result': result}
    def serialize_item(self, item):
        data = item.serialize()

        runtime_call = RuntimeCall.query(self.session).filter_by(
            module_id=item.module_id,
            call_id=item.call_id,
            spec_version=item.spec_version_id).first()

        data['attributes']['documentation'] = runtime_call.documentation

        block = Block.query(self.session).get(item.block_id)

        if block.datetime:
            data['attributes']['datetime'] = block.datetime.replace(
                tzinfo=pytz.UTC).isoformat()
        else:
            data['attributes']['datetime'] = None

        if item.account:
            data['attributes']['account'] = item.account.serialize()

        if item.params:
            item.params = self.check_params(item.params, item.serialize_id())

        if item.error:
            # Retrieve ExtrinsicFailed event
            extrinsic_failed_event = Event.query(self.session).filter_by(
                block_id=item.block_id, event_id='ExtrinsicFailed').first()

            # Retrieve runtime error
            if extrinsic_failed_event:
                if 'Module' in extrinsic_failed_event.attributes[0]['value']:

                    error = RuntimeErrorMessage.query(self.session).filter_by(
                        module_index=extrinsic_failed_event.attributes[0]
                        ['value']['Module']['index'],
                        index=extrinsic_failed_event.attributes[0]['value']
                        ['Module']['error'],
                        spec_version=item.spec_version_id).first()

                    if error:
                        data['attributes'][
                            'error_message'] = error.documentation
                elif 'BadOrigin' in extrinsic_failed_event.attributes[0][
                        'value']:
                    data['attributes']['error_message'] = 'Bad origin'
                elif 'CannotLookup' in extrinsic_failed_event.attributes[0][
                        'value']:
                    data['attributes']['error_message'] = 'Cannot lookup'

        return data
    def on_get(self, req, resp, item_id=None):
        resp.status = falcon.HTTP_200

        # TODO make caching more generic for custom resources

        cache_key = '{}-{}'.format(req.method, req.url)
        console_handler = logging.StreamHandler()
        console_handler.setLevel('INFO')
        logger = logging.getLogger('yee')
        logger.setLevel('INFO')
        logger.addHandler(console_handler)
        # logger.info(cache_key)

        response = self.cache_region.get(cache_key, self.cache_expiration_time)

        if response is NO_VALUE:
            logger.info(time.strftime("%a %b %d %H:%M:%S %Y",
                                      time.localtime()))
            count = self.session.query(func.count(
                Block.id)).filter(Block.coinbase == item_id).scalar()
            listbl = Block.query(self.session).filter(
                Block.coinbase == item_id, Block.fee_reward > 0).all()

            #select sum(fee_reward) from data_block where coinbase='yee1w3hn8vhurrjf900zkzl674alsfgxf3vnj8h03run4f3nx5durqrsdsu9r8' and fee_reward>0 ;
            logger.info(time.strftime("%a %b %d %H:%M:%S %Y",
                                      time.localtime()))
            sum = 0
            if len(listbl) > 0:
                for b in listbl:
                    sum = sum + b.fee_reward
                sum = sum / 100000000
            logger.info(count)
            logger.info(sum)
            logger.info(time.strftime("%a %b %d %H:%M:%S %Y",
                                      time.localtime()))
            response = self.get_jsonapi_response(data={
                'type': 'AddressFeeSum',
                'attributes': {
                    'block_reward_sum': str(count * 64),
                    'fee_reward_sum': str(sum)
                }
            }, )

            self.cache_region.set(cache_key, response)
            resp.set_header('X-Cache', 'MISS')
        else:
            resp.set_header('X-Cache', 'HIT')

        resp.media = response
Ejemplo n.º 20
0
def start_sequencer(self):
    # Start sequencer
    max_sequenced_block_id = self.session.query(func.max(
        BlockTotal.id)).one()[0]
    if max_sequenced_block_id is not None:
        sequencer_parent_block = BlockTotal.query(
            self.session).filter_by(id=max_sequenced_block_id).first()
        parent_block = Block.query(
            self.session).filter_by(id=max_sequenced_block_id).first()

        sequence_block_recursive.delay(
            parent_block_data=parent_block.asdict(),
            parent_sequenced_block_data=sequencer_parent_block.asdict())

    else:
        sequence_block_recursive.delay(parent_block_data=None)
    def get_relationships(self, include_list, item):
        relationships = {}

        if 'recent_extrinsics' in include_list:
            relationships['recent_extrinsics'] = Extrinsic.query(
                self.session).filter_by(address=item.id).order_by(
                    Extrinsic.block_id.desc())[:10]

        if 'indices' in include_list:
            relationships['indices'] = AccountIndex.query(
                self.session).filter_by(account_id=item.id).order_by(
                    AccountIndex.updated_at_block.desc())

        if 'rewards' in include_list:
            relationships['rewards'] = Block.query(self.session).filter_by(
                coinbase=bech32.encode(HRP,
                                       bytes().fromhex(item.id))).order_by(
                                           Block.id.desc())

            #  Block.query(self.session).order_by(Block.id.desc()
            # count = self.session.query(func.count(Block.id)).filter(Block.coinbase == item_id).scalar()

        return relationships
Ejemplo n.º 22
0
    def serialize_item(self, item):
        data = item.serialize()

        runtime_call = RuntimeCall.query(self.session).filter_by(
            module_id=item.module_id,
            call_id=item.call_id,
            spec_version=item.spec_version_id).first()

        data['attributes']['documentation'] = runtime_call.documentation

        block = Block.query(self.session).get(item.block_id)

        data['attributes']['datetime'] = block.datetime.replace(
            tzinfo=pytz.UTC).isoformat()

        if item.account:
            data['attributes']['account'] = item.account.serialize()

        if item.error:
            # Retrieve ExtrinsicFailed event
            extrinsic_failed_event = Event.query(self.session).filter_by(
                block_id=item.block_id, event_id='ExtrinsicFailed').first()

            # Retrieve runtime error
            if extrinsic_failed_event and 'Module' in extrinsic_failed_event.attributes[
                    0]['value']:

                error = RuntimeErrorMessage.query(self.session).filter_by(
                    module_id=item.module_id,
                    index=extrinsic_failed_event.attributes[0]['value']
                    ['Module']['error'],
                    spec_version=item.spec_version_id).first()

                if error:
                    data['attributes']['error_message'] = error.documentation

        return data
Ejemplo n.º 23
0
    def on_get(self, req, resp):
        page = int(req.params.get('page') if req.params.get('page') else 1)
        pageSize = int(
            req.params.get('page_size') if req.params.get('page_size') else 20)

        # blocks = Block.query(self.session).order_by(Block.id.desc()).limit(pageSize).offset((page - 1) * pageSize).all()
        blocks = Block.latest_blocks(self.session, page, pageSize)
        resp.status = falcon.HTTP_200
        result = [
            {
                "block_num":
                blockData.id,
                "event_count":
                blockData.count_events,
                "extrinsics_count":
                blockData.count_extrinsics,
                "block_timestamp":
                blockData.datetime.strftime("%Y-%m-%d %H:%M:%S"),
                "block_hash":
                blockData.hash,
                "author":
                ss58_encode(blockData.author.replace('0x', ''))
                if blockData.author is not None else None,
                # "block_timestamp": time.mktime(blockData.datetime.timetuple()),
                "finalized":
                "1" if blockData.author is not None else None,
            } for blockData in blocks
        ]

        count = Block.query(self.session).count()
        resp.media = {
            'status': 'success',
            'data': {
                'result': result,
                'count': count
            }
        }
Ejemplo n.º 24
0
def start_harvester(self, check_gaps=False, shard=None):
    shard = self.request.args[0]
    if shard is None:
        raise HarvesterNotshardParamsError(
            'params shard is missing.. stopping harvester ')

    print("start_harvester")
    substrate_url = SHARDS_TABLE[shard]
    print('== start_harvester substrate_url {} =='.format(substrate_url))
    substrate = SubstrateInterface(substrate_url)

    n = Block.query(self.session).filter_by(bid=1).count()

    if n < 4:
        print('waiting init task completed! count().n: {} '.format(n))

        return {'result': 'waiting init task completed! '}

    block_sets = []

    harvester = PolkascanHarvesterService(self.session,
                                          type_registry=TYPE_REGISTRY)
    harvester.metadata_store = self.metadata_store

    start_block_hash = substrate.get_chain_head()
    end_block_hash = None
    r = 10
    block_nr = substrate.get_block_number(start_block_hash)

    max_block = Block.query(self.session).filter_by(
        shard_num=shard.split(".")[1]).order_by(Block.bid.desc()).first()

    print('start block_nr  {} =='.format(block_nr))
    print('start max_block  {} =='.format(max_block.bid))
    if block_nr - max_block.bid < 10:
        r = block_nr - max_block.bid

    print('current range r: {} =='.format(max_block.bid))

    try:
        for nr in range(1, r + 1):
            block_hash = substrate.get_block_hash(max_block.bid + nr)

            if harvester.add_block(block_hash, substrate_url):
                print('start_harvester+ Added {} '.format(block_hash))
                self.session.commit()

        # Update persistent metadata store in Celery task
        self.metadata_store = harvester.metadata_store

    except BlockAlreadyAdded as e:
        print('. Skipped {} '.format(block_hash))
    except IntegrityError as e:
        print('. Skipped duplicate {}=={} '.format(block_hash, e))
    except Exception as exc:
        print('! ERROR adding {}'.format(block_hash))
        raise HarvesterCouldNotAddBlock(block_hash) from exc

    block_sets.append({
        'start_block_hash': start_block_hash,
        'end_block_hash': end_block_hash
    })

    return {
        'result':
        'Yee data Synchronization job SUCCESS',
        'block_sets':
        block_sets,
        'result':
        'Synch data  from {} to {} blocks check by shardnum of {}'.format(
            max_block.bid + 1, r + max_block.bid + 1, shard)
    }
Ejemplo n.º 25
0
    def start_sequencer(self):
        integrity_status = self.integrity_checks()
        self.db_session.commit()

        block_nr = None

        integrity_head = Status.get_status(self.db_session, 'INTEGRITY_HEAD')

        if not integrity_head.value:
            integrity_head.value = 0

        # 3. Check sequence head
        sequencer_head = self.db_session.query(func.max(BlockTotal.id)).one()[0]

        if sequencer_head is None:
            sequencer_head = -1

        # Start sequencing process

        sequencer_parent_block = BlockTotal.query(self.db_session).filter_by(id=sequencer_head).first()
        parent_block = Block.query(self.db_session).filter_by(id=sequencer_head).first()

        for block_nr in range(sequencer_head + 1, int(integrity_head.value) + 1):

            if block_nr == 0:
                # No block ever sequenced, check if chain is at genesis state
                assert (not sequencer_parent_block)

                block = Block.query(self.db_session).order_by('id').first()

                if not block:
                    self.db_session.commit()
                    return {'error': 'Chain not at genesis'}

                if block.id == 1:
                    # Add genesis block
                    block = self.add_block(block.parent_hash)

                if block.id != 0:
                    self.db_session.commit()
                    return {'error': 'Chain not at genesis'}

                self.process_genesis(block)

                sequencer_parent_block_data = None
                parent_block_data = None
            else:
                block_id = sequencer_parent_block.id + 1

                assert (block_id == block_nr)

                block = Block.query(self.db_session).get(block_nr)

                if not block:
                    self.db_session.commit()
                    return {'result': 'Finished at #{}'.format(sequencer_parent_block.id)}

                sequencer_parent_block_data = sequencer_parent_block.asdict()
                parent_block_data = parent_block.asdict()

            sequenced_block = self.sequence_block(block, parent_block_data, sequencer_parent_block_data)
            self.db_session.commit()

            parent_block = block
            sequencer_parent_block = sequenced_block

        if block_nr:
            return {'result': 'Finished at #{}'.format(block_nr)}
        else:
            return {'result': 'Nothing to sequence'}
Ejemplo n.º 26
0
    def integrity_checks(self):

        # 1. Check finalized head
        substrate = SubstrateInterface(settings.SUBSTRATE_RPC_URL)

        if settings.FINALIZATION_BY_BLOCK_CONFIRMATIONS > 0:
            finalized_block_hash = substrate.get_chain_head()
            finalized_block_number = max(
                substrate.get_block_number(finalized_block_hash) - settings.FINALIZATION_BY_BLOCK_CONFIRMATIONS, 0
            )
        else:
            finalized_block_hash = substrate.get_chain_finalised_head()
            finalized_block_number = substrate.get_block_number(finalized_block_hash)

        # 2. Check integrity head
        integrity_head = Status.get_status(self.db_session, 'INTEGRITY_HEAD')

        if not integrity_head.value:
            # Only continue if block #1 exists
            if Block.query(self.db_session).filter_by(id=1).count() == 0:
                raise BlockIntegrityError('Chain not at genesis')

            integrity_head.value = 0
        else:
            integrity_head.value = int(integrity_head.value)

        start_block_id = max(integrity_head.value - 1, 0)
        end_block_id = finalized_block_number
        chunk_size = 1000
        parent_block = None

        if start_block_id < end_block_id:
            # Continue integrity check

            # print('== Start integrity checks from {} to {} =='.format(start_block_id, end_block_id))

            for block_nr in range(start_block_id, end_block_id, chunk_size):
                # TODO replace limit with filter_by block range
                block_range = Block.query(self.db_session).order_by('id')[block_nr:block_nr + chunk_size]
                for block in block_range:
                    if parent_block:
                        if block.id != parent_block.id + 1:

                            # Save integrity head if block hash of parent matches with hash in node
                            if parent_block.hash == substrate.get_block_hash(integrity_head.value):
                                integrity_head.save(self.db_session)
                                self.db_session.commit()

                            raise BlockIntegrityError('Block #{} is missing.. stopping check '.format(parent_block.id + 1))
                        elif block.parent_hash != parent_block.hash:

                            self.process_reorg_block(parent_block)
                            self.process_reorg_block(block)

                            self.remove_block(block.hash)
                            self.remove_block(parent_block.hash)
                            self.db_session.commit()

                            self.add_block(substrate.get_block_hash(block.id))
                            self.add_block(substrate.get_block_hash(parent_block.id))
                            self.db_session.commit()

                            integrity_head.value = parent_block.id - 1

                            # Save integrity head if block hash of parent matches with hash in node
                            #if parent_block.parent_hash == substrate.get_block_hash(integrity_head.value):
                            integrity_head.save(self.db_session)
                            self.db_session.commit()

                            raise BlockIntegrityError('Block #{} failed integrity checks, Re-adding #{}.. '.format(parent_block.id, block.id))
                        else:
                            integrity_head.value = block.id

                    parent_block = block

                    if block.id == end_block_id:
                        break

            if parent_block:
                if parent_block.hash == substrate.get_block_hash(int(integrity_head.value)):
                    integrity_head.save(self.db_session)
                    self.db_session.commit()

        return {'integrity_head': integrity_head.value}
Ejemplo n.º 27
0
    def process_genesis(self, block):

        # Set block time of parent block
        child_block = Block.query(self.db_session).filter_by(parent_hash=block.hash).first()
        block.set_datetime(child_block.datetime)

        # Retrieve genesis accounts
        if settings.get_versioned_setting('SUBSTRATE_STORAGE_INDICES', block.spec_version_id) == 'Accounts':

            # Get accounts from storage keys
            storage_key_prefix = self.substrate.generate_storage_hash(
                storage_module='System',
                storage_function='Account',
                metadata_version=settings.SUBSTRATE_METADATA_VERSION
            )

            rpc_result = self.substrate.rpc_request(
                'state_getKeys',
                [storage_key_prefix, block.hash]
            ).get('result')
            # Extract accounts from storage key
            genesis_accounts = [storage_key[-64:] for storage_key in rpc_result if len(storage_key) == 162]

            for account_id in genesis_accounts:
                account_audit = AccountAudit(
                    account_id=account_id,
                    block_id=block.id,
                    extrinsic_idx=None,
                    event_idx=None,
                    type_id=settings.ACCOUNT_AUDIT_TYPE_NEW
                )

                account_audit.save(self.db_session)

        elif settings.get_versioned_setting('SUBSTRATE_STORAGE_INDICES', block.spec_version_id) == 'EnumSet':

            genesis_account_page_count = self.substrate.get_runtime_state(
                module="Indices",
                storage_function="NextEnumSet",
                block_hash=block.hash
            ).get('result', 0)

            # Get Accounts on EnumSet
            block.count_accounts_new = 0
            block.count_accounts = 0

            for enum_set_nr in range(0, genesis_account_page_count + 1):

                genesis_accounts = self.substrate.get_runtime_state(
                    module="Indices",
                    storage_function="EnumSet",
                    params=[enum_set_nr],
                    block_hash=block.hash
                ).get('result')

                if genesis_accounts:
                    block.count_accounts_new += len(genesis_accounts)
                    block.count_accounts += len(genesis_accounts)

                    for idx, account_id in enumerate(genesis_accounts):
                        account_audit = AccountAudit(
                            account_id=account_id.replace('0x', ''),
                            block_id=block.id,
                            extrinsic_idx=None,
                            event_idx=None,
                            type_id=settings.ACCOUNT_AUDIT_TYPE_NEW
                        )

                        account_audit.save(self.db_session)

                        account_index_id = enum_set_nr * 64 + idx

                        account_index_audit = AccountIndexAudit(
                            account_index_id=account_index_id,
                            account_id=account_id.replace('0x', ''),
                            block_id=block.id,
                            extrinsic_idx=None,
                            event_idx=None,
                            type_id=settings.ACCOUNT_INDEX_AUDIT_TYPE_NEW
                        )

                        account_index_audit.save(self.db_session)

        block.save(self.db_session)

        # Add hardcoded account like treasury stored in settings
        for account_id in settings.SUBSTRATE_TREASURY_ACCOUNTS:
            account_audit = AccountAudit(
                account_id=account_id,
                block_id=block.id,
                extrinsic_idx=None,
                event_idx=None,
                data={'is_treasury': True},
                type_id=settings.ACCOUNT_AUDIT_TYPE_NEW
            )

            account_audit.save(self.db_session)

        # Check for sudo accounts
        try:
            # Update sudo key
            sudo_key = self.substrate.get_runtime_state(
                module='Sudo',
                storage_function='Key',
                block_hash=block.hash
            ).get('result')

            account_audit = AccountAudit(
                account_id=sudo_key.replace('0x', ''),
                block_id=block.id,
                extrinsic_idx=None,
                event_idx=None,
                data={'is_sudo': True},
                type_id=settings.ACCOUNT_AUDIT_TYPE_NEW
            )

            account_audit.save(self.db_session)
        except ValueError:
            pass

        # Create initial session
        initial_session_event = NewSessionEventProcessor(
            block=block, event=Event(), substrate=self.substrate
        )

        if settings.get_versioned_setting('NEW_SESSION_EVENT_HANDLER', block.spec_version_id):
            initial_session_event.add_session(db_session=self.db_session, session_id=0)
        else:
            initial_session_event.add_session_old(db_session=self.db_session, session_id=0)
Ejemplo n.º 28
0
    def add_block(self, block_hash):

        # Check if block is already process
        if Block.query(self.db_session).filter_by(hash=block_hash).count() > 0:
            raise BlockAlreadyAdded(block_hash)

        if settings.SUBSTRATE_MOCK_EXTRINSICS:
            self.substrate.mock_extrinsics = settings.SUBSTRATE_MOCK_EXTRINSICS

        json_block = self.substrate.get_chain_block(block_hash)

        parent_hash = json_block['block']['header'].pop('parentHash')
        block_id = json_block['block']['header'].pop('number')
        extrinsics_root = json_block['block']['header'].pop('extrinsicsRoot')
        state_root = json_block['block']['header'].pop('stateRoot')
        digest_logs = json_block['block']['header'].get('digest', {}).pop('logs', None)

        # Convert block number to numeric
        if not block_id.isnumeric():
            block_id = int(block_id, 16)

        # ==== Get block runtime from Substrate ==================

        self.substrate.init_runtime(block_hash=block_hash)

        self.process_metadata(self.substrate.runtime_version, block_hash)

        # ==== Get parent block runtime ===================

        if block_id > 0:
            json_parent_runtime_version = self.substrate.get_block_runtime_version(parent_hash)

            parent_spec_version = json_parent_runtime_version.get('specVersion', 0)

            self.process_metadata(parent_spec_version, parent_hash)
        else:
            parent_spec_version = self.substrate.runtime_version

        # ==== Set initial block properties =====================

        block = Block(
            id=block_id,
            parent_id=block_id - 1,
            hash=block_hash,
            parent_hash=parent_hash,
            state_root=state_root,
            extrinsics_root=extrinsics_root,
            count_extrinsics=0,
            count_events=0,
            count_accounts_new=0,
            count_accounts_reaped=0,
            count_accounts=0,
            count_events_extrinsic=0,
            count_events_finalization=0,
            count_events_module=0,
            count_events_system=0,
            count_extrinsics_error=0,
            count_extrinsics_signed=0,
            count_extrinsics_signedby_address=0,
            count_extrinsics_signedby_index=0,
            count_extrinsics_success=0,
            count_extrinsics_unsigned=0,
            count_sessions_new=0,
            count_contracts_new=0,
            count_log=0,
            range10000=math.floor(block_id / 10000),
            range100000=math.floor(block_id / 100000),
            range1000000=math.floor(block_id / 1000000),
            spec_version_id=self.substrate.runtime_version,
            logs=digest_logs
        )

        # Set temp helper variables
        block._accounts_new = []
        block._accounts_reaped = []

        # ==== Get block events from Substrate ==================
        extrinsic_success_idx = {}
        events = []

        try:
            events_decoder = self.substrate.get_block_events(block_hash, self.metadata_store[parent_spec_version])

            event_idx = 0

            for event in events_decoder.elements:

                event.value['module_id'] = event.value['module_id'].lower()

                model = Event(
                    block_id=block_id,
                    event_idx=event_idx,
                    phase=event.value['phase'],
                    extrinsic_idx=event.value['extrinsic_idx'],
                    type=event.value['type'],
                    spec_version_id=parent_spec_version,
                    module_id=event.value['module_id'],
                    event_id=event.value['event_id'],
                    system=int(event.value['module_id'] == 'system'),
                    module=int(event.value['module_id'] != 'system'),
                    attributes=event.value['params'],
                    codec_error=False
                )

                # Process event

                if event.value['phase'] == 0:
                    block.count_events_extrinsic += 1
                elif event.value['phase'] == 1:
                    block.count_events_finalization += 1

                if event.value['module_id'] == 'system':

                    block.count_events_system += 1

                    # Store result of extrinsic
                    if event.value['event_id'] == 'ExtrinsicSuccess':
                        extrinsic_success_idx[event.value['extrinsic_idx']] = True
                        block.count_extrinsics_success += 1

                    if event.value['event_id'] == 'ExtrinsicFailed':
                        extrinsic_success_idx[event.value['extrinsic_idx']] = False
                        block.count_extrinsics_error += 1
                else:

                    block.count_events_module += 1

                model.save(self.db_session)

                events.append(model)

                event_idx += 1

            block.count_events = len(events_decoder.elements)

        except SubstrateRequestException:
            block.count_events = 0

        # === Extract extrinsics from block ====

        extrinsics_data = json_block['block'].pop('extrinsics')

        block.count_extrinsics = len(extrinsics_data)

        extrinsic_idx = 0

        extrinsics = []

        for extrinsic in extrinsics_data:

            extrinsics_decoder = ExtrinsicsDecoder(
                data=ScaleBytes(extrinsic),
                metadata=self.metadata_store[parent_spec_version]
            )

            extrinsic_data = extrinsics_decoder.decode()

            # Lookup result of extrinsic
            extrinsic_success = extrinsic_success_idx.get(extrinsic_idx, False)

            model = Extrinsic(
                block_id=block_id,
                extrinsic_idx=extrinsic_idx,
                extrinsic_hash=extrinsics_decoder.extrinsic_hash,
                extrinsic_length=extrinsic_data.get('extrinsic_length'),
                extrinsic_version=extrinsic_data.get('version_info'),
                signed=extrinsics_decoder.contains_transaction,
                unsigned=not extrinsics_decoder.contains_transaction,
                signedby_address=bool(extrinsics_decoder.contains_transaction and extrinsic_data.get('account_id')),
                signedby_index=bool(extrinsics_decoder.contains_transaction and extrinsic_data.get('account_index')),
                address_length=extrinsic_data.get('account_length'),
                address=extrinsic_data.get('account_id'),
                account_index=extrinsic_data.get('account_index'),
                account_idx=extrinsic_data.get('account_idx'),
                signature=extrinsic_data.get('signature'),
                nonce=extrinsic_data.get('nonce'),
                era=extrinsic_data.get('era'),
                call=extrinsic_data.get('call_code'),
                module_id=extrinsic_data.get('call_module'),
                call_id=extrinsic_data.get('call_function'),
                params=extrinsic_data.get('params'),
                spec_version_id=parent_spec_version,
                success=int(extrinsic_success),
                error=int(not extrinsic_success),
                codec_error=False
            )
            model.save(self.db_session)

            extrinsics.append(model)

            extrinsic_idx += 1

            # Process extrinsic
            if extrinsics_decoder.contains_transaction:
                block.count_extrinsics_signed += 1

                if model.signedby_address:
                    block.count_extrinsics_signedby_address += 1
                if model.signedby_index:
                    block.count_extrinsics_signedby_index += 1

                # Add search index for signed extrinsics
                search_index = SearchIndex(
                    index_type_id=settings.SEARCH_INDEX_SIGNED_EXTRINSIC,
                    block_id=block.id,
                    extrinsic_idx=model.extrinsic_idx,
                    account_id=model.address
                )
                search_index.save(self.db_session)

            else:
                block.count_extrinsics_unsigned += 1

            # Process extrinsic processors
            for processor_class in ProcessorRegistry().get_extrinsic_processors(model.module_id, model.call_id):
                extrinsic_processor = processor_class(block, model, substrate=self.substrate)
                extrinsic_processor.accumulation_hook(self.db_session)
                extrinsic_processor.process_search_index(self.db_session)

        # Process event processors
        for event in events:
            extrinsic = None
            if event.extrinsic_idx is not None:
                try:
                    extrinsic = extrinsics[event.extrinsic_idx]
                except IndexError:
                    extrinsic = None

            for processor_class in ProcessorRegistry().get_event_processors(event.module_id, event.event_id):
                event_processor = processor_class(block, event, extrinsic,
                                                  metadata=self.metadata_store.get(block.spec_version_id),
                                                  substrate=self.substrate)
                event_processor.accumulation_hook(self.db_session)
                event_processor.process_search_index(self.db_session)

        # Process block processors
        for processor_class in ProcessorRegistry().get_block_processors():
            block_processor = processor_class(block, substrate=self.substrate, harvester=self)
            block_processor.accumulation_hook(self.db_session)

        # Debug info
        if settings.DEBUG:
            block.debug_info = json_block

        # ==== Save data block ==================================

        block.save(self.db_session)

        return block
 def get_query(self):
     return Block.query(self.session).order_by(Block.id.desc())
 def get_item(self, item_id):
     if item_id.isnumeric():
         return Block.query(self.session).filter_by(id=item_id).first()
     else:
         return Block.query(self.session).filter_by(hash=item_id).first()