Esempio n. 1
0
def accumulate_block_recursive(self,
                               block_hash,
                               end_block_hash=None,
                               substrate_url=None):
    print(
        'start accumulate_block_recursive block_hash {} =='.format(block_hash))
    print('start accumulate_block_recursive substrate_url {} =='.format(
        substrate_url))

    harvester = PolkascanHarvesterService(self.session,
                                          type_registry=TYPE_REGISTRY)
    harvester.metadata_store = self.metadata_store

    # If metadata store isn't initialized yet, perform some tests
    if not substrate_url:
        return
        # shard_num = NUM[substrate_url]
        # print('start accumulate_block_recursive shard_num {} =='.format(shard_num))

    substrate = SubstrateInterface(substrate_url)
    block_nr = substrate.get_block_number(block_hash)

    block = None
    max_sequenced_block_id = False
    block_one = None
    add_count = 0

    try:
        for nr in range(0, block_nr + 1):
            if not block or block.bid > 0:
                # Process block
                blocka = harvester.add_block(block_hash, substrate_url)
                if blocka:
                    print('+ Added {} '.format(block_hash))
                    add_count += 1
                    self.session.commit()
                    block = blocka
                # Break loop if targeted end block hash is reached
                if block_hash == end_block_hash or block.bid == 0:
                    block_one = block
                    break
                # Continue with parent block hash
                block_hash = block.parent_hash

        # Update persistent metadata store in Celery task
        self.metadata_store = harvester.metadata_store
        harvester.process_shard_genesis(block_one, substrate_url)

        # if block_hash != end_block_hash and block and block.bid > 0:
        #     accumulate_block_recursive.delay(block.parent_hash, end_block_hash)

    except BlockAlreadyAdded as e:
        print('. Skipped {} '.format(block_hash))
    except IntegrityError as e:
        print('. Skipped duplicate {}=={} '.format(block_hash, e))
    except Exception as exc:
        print('! ERROR adding {}'.format(block_hash))
        raise HarvesterCouldNotAddBlock(block_hash) from exc

    return {
        'result': '{} blocks added'.format(add_count),
        'lastAddedBlockHash': block_hash,
        'sequencerStartedFrom': max_sequenced_block_id
    }
Esempio n. 2
0
def start_harvester(self, check_gaps=False, shard=None):
    shard = self.request.args[0]
    if shard is None:
        raise HarvesterNotshardParamsError(
            'params shard is missing.. stopping harvester ')

    print("start_harvester")
    substrate_url = SHARDS_TABLE[shard]
    print('== start_harvester substrate_url {} =='.format(substrate_url))
    substrate = SubstrateInterface(substrate_url)

    n = Block.query(self.session).filter_by(bid=1).count()

    if n < 4:
        print('waiting init task completed! count().n: {} '.format(n))

        return {'result': 'waiting init task completed! '}

    block_sets = []

    harvester = PolkascanHarvesterService(self.session,
                                          type_registry=TYPE_REGISTRY)
    harvester.metadata_store = self.metadata_store

    start_block_hash = substrate.get_chain_head()
    end_block_hash = None
    r = 10
    block_nr = substrate.get_block_number(start_block_hash)

    max_block = Block.query(self.session).filter_by(
        shard_num=shard.split(".")[1]).order_by(Block.bid.desc()).first()

    print('start block_nr  {} =='.format(block_nr))
    print('start max_block  {} =='.format(max_block.bid))
    if block_nr - max_block.bid < 10:
        r = block_nr - max_block.bid

    print('current range r: {} =='.format(max_block.bid))

    try:
        for nr in range(1, r + 1):
            block_hash = substrate.get_block_hash(max_block.bid + nr)

            if harvester.add_block(block_hash, substrate_url):
                print('start_harvester+ Added {} '.format(block_hash))
                self.session.commit()

        # Update persistent metadata store in Celery task
        self.metadata_store = harvester.metadata_store

    except BlockAlreadyAdded as e:
        print('. Skipped {} '.format(block_hash))
    except IntegrityError as e:
        print('. Skipped duplicate {}=={} '.format(block_hash, e))
    except Exception as exc:
        print('! ERROR adding {}'.format(block_hash))
        raise HarvesterCouldNotAddBlock(block_hash) from exc

    block_sets.append({
        'start_block_hash': start_block_hash,
        'end_block_hash': end_block_hash
    })

    return {
        'result':
        'Yee data Synchronization job SUCCESS',
        'block_sets':
        block_sets,
        'result':
        'Synch data  from {} to {} blocks check by shardnum of {}'.format(
            max_block.bid + 1, r + max_block.bid + 1, shard)
    }
Esempio n. 3
0
def accumulate_block_recursive(self, block_hash, end_block_hash=None):

    harvester = PolkascanHarvesterService(self.session,
                                          type_registry=TYPE_REGISTRY)
    harvester.metadata_store = self.metadata_store

    # If metadata store isn't initialized yet, perform some tests
    if not harvester.metadata_store:
        print('Init: create entrypoints')
        # Check if blocks exists
        max_block_id = self.session.query(func.max(Block.id)).one()[0]

        if not max_block_id:
            # Speed up accumulating by creating several entry points
            substrate = SubstrateInterface(SUBSTRATE_RPC_URL)
            block_nr = substrate.get_block_number(block_hash)
            if block_nr > 100:
                for entry_point in range(0, block_nr, block_nr // 4)[1:-1]:
                    entry_point_hash = substrate.get_block_hash(entry_point)
                    accumulate_block_recursive.delay(entry_point_hash)

    block = None
    max_sequenced_block_id = False

    add_count = 0

    try:

        for nr in range(0, 10):
            if not block or block.id > 0:
                # Process block
                block = harvester.add_block(block_hash)

                print('+ Added {} '.format(block_hash))

                add_count += 1

                self.session.commit()

                # Break loop if targeted end block hash is reached
                if block_hash == end_block_hash or block.id == 0:
                    break

                # Continue with parent block hash
                block_hash = block.parent_hash

        # Update persistent metadata store in Celery task
        self.metadata_store = harvester.metadata_store

        if block_hash != end_block_hash and block and block.id > 0:
            accumulate_block_recursive.delay(block.parent_hash, end_block_hash)

    except BlockAlreadyAdded as e:
        print('. Skipped {} '.format(block_hash))
        start_sequencer.delay()
    except IntegrityError as e:
        print('. Skipped duplicate {} '.format(block_hash))
    except Exception as exc:
        print('! ERROR adding {}'.format(block_hash))
        raise HarvesterCouldNotAddBlock(block_hash) from exc

    return {
        'result': '{} blocks added'.format(add_count),
        'lastAddedBlockHash': block_hash,
        'sequencerStartedFrom': max_sequenced_block_id
    }