def scrape_operations(mongo): """Fetch all operations (including virtual) from last known block forward.""" indexer = Indexer(mongo) last_block = indexer.get_checkpoint('operations') log.info('\n> Fetching operations, starting with block %d...' % last_block) blockchain = Blockchain(mode="irreversible") history = blockchain.history( start_block=last_block, ) for operation in history: # insert operation with suppress(DuplicateKeyError): transform = compose(strip_dot_from_keys, json_expand, typify) mongo.Operations.insert_one(transform(operation)) # if this is a new block, checkpoint it, and schedule batch processing if operation['block_num'] != last_block: last_block = operation['block_num'] indexer.set_checkpoint('operations', last_block - 1) if last_block % 10 == 0: log.info("Checkpoint: %s (%s)" % ( last_block, blockchain.steem.hostname ))
def scrape_operations(mongo): """Fetch all operations from last known block forward.""" settings = Settings(mongo) blockchain = Blockchain(mode="irreversible") last_block = settings.last_block() # handle batching _batch_size = 100 _head_block_num = blockchain.get_current_block_num() batch_dicts = [] history = blockchain.history(start_block=last_block, ) def custom_merge(*args): return list(set(filter(bool, flatten(args)))) def schedule_batch(_batch_dicts): """Send a batch to background worker, and reset _dicts container""" _batch = merge_with(custom_merge, *_batch_dicts) if _batch: batch_update_async.delay(_batch) log.info('\n> Fetching operations, starting with block %d...' % last_block) for operation in history: # handle comments if operation['type'] in ['comment', 'delete_comment']: post_identifier = "@%s/%s" % (operation['author'], operation['permlink']) if operation['type'] == 'delete_comment': delete_comment(mongo, post_identifier) else: with suppress(TypeError): upsert_comment( mongo, '%s/%s' % (operation['author'], operation['permlink'])) # if we're close to blockchain head, enable batching recent_blocks = 20 * 60 * 24 * 1 # 1 days worth of blocks if last_block > _head_block_num - recent_blocks: batch_dicts.append(parse_operation(operation)) # insert operation with suppress(DuplicateKeyError): transform = compose(strip_dot_from_keys, json_expand, typify) mongo.Operations.insert_one(transform(operation)) # if this is a new block, checkpoint it, and schedule batch processing if operation['block_num'] != last_block: print("last block:", last_block) last_block = operation['block_num'] settings.update_last_block(last_block - 1) if last_block % 10 == 0: _head_block_num = blockchain.get_current_block_num() if last_block % _batch_size == 0: schedule_batch(batch_dicts) batch_dicts = [] if last_block % 100 == 0: log.info("#%s: (%s)" % (last_block, blockchain.steem.hostname))
'tipu', 'steemvote', 'originalworks', 'withsmn', 'echowhale', 'siditech', 'steemvoter', ] total_posts = 0 boosties = 0 total_claims = Amount('0.000 SBD') total_booster_share = Amount('0.000 SBD') for author_reward in blockchain.history(start_block=trail_24h_block, end_block=last_block, filter_by='author_reward'): author = author_reward['author'] permlink = author_reward['permlink'] sbd_payout = author_reward['sbd_payout'] steem_payout = author_reward['steem_payout'] vesting_payout = author_reward['vesting_payout'] identifier = (author + '/' + permlink) if not 're-' in permlink: post = Post(identifier) if post.is_main_post():