def reblog(cls, account, op_json, block_date): """Handle legacy 'reblog' op""" blogger = op_json['account'] author = op_json['author'] permlink = op_json['permlink'] if blogger != account: return # impersonation if not all(map(Accounts.exists, [author, blogger])): return post_id, depth = Posts.get_id_and_depth(author, permlink) if depth > 0: return # prevent comment reblogs if not post_id: log.debug("reblog: post not found: %s/%s", author, permlink) return if 'delete' in op_json and op_json['delete'] == 'delete': DB.query("DELETE FROM hive_reblogs WHERE account = :a AND " "post_id = :pid LIMIT 1", a=blogger, pid=post_id) if not DbState.is_initial_sync(): FeedCache.delete(post_id, Accounts.get_id(blogger)) else: sql = ("INSERT INTO hive_reblogs (account, post_id, created_at) " "VALUES (:a, :pid, :date) ON CONFLICT (account, post_id) DO NOTHING") DB.query(sql, a=blogger, pid=post_id, date=block_date) if not DbState.is_initial_sync(): FeedCache.insert(post_id, Accounts.get_id(blogger), block_date)
def delete(cls, op): pid, depth = cls.get_id_and_depth(op['author'], op['permlink']) query("UPDATE hive_posts SET is_deleted = '1' WHERE id = :id", id=pid) if not DbState.is_initial_sync(): CachedPost.delete(pid, op['author'], op['permlink']) if depth == 0: FeedCache.delete(pid)
def initial(cls): assert DbState.is_initial_sync(), "already synced" print("[INIT] *** Initial fast sync ***") cls.from_checkpoints() cls.from_steemd(is_initial_sync=True) print("[INIT] *** Initial cache build ***") # TODO: disable indexes during this process CachedPost.recover_missing_posts() FeedCache.rebuild()
def initial(self): """Initial sync routine.""" assert DbState.is_initial_sync(), "already synced" log.info("[INIT] *** Initial fast sync ***") self.from_checkpoints() self.from_steemd(is_initial_sync=True) log.info("[INIT] *** Initial cache build ***") CachedPost.recover_missing_posts(self._steem) FeedCache.rebuild() Follow.force_recount()
def delete(cls, op): """Marks a post record as being deleted.""" pid, depth = cls.get_id_and_depth(op['author'], op['permlink']) DB.query("UPDATE hive_posts SET is_deleted = '1' WHERE id = :id", id=pid) if not DbState.is_initial_sync(): CachedPost.delete(pid, op['author'], op['permlink']) if depth == 0: # TODO: delete from hive_reblogs -- otherwise feed cache gets populated with deleted posts somwrimas FeedCache.delete(pid) else: # force parent child recount when child is deleted prnt = cls._get_parent_by_child_id(pid) CachedPost.recount(prnt['author'], prnt['permlink'], prnt['id'])
def run(): print("[HIVE] Welcome to hivemind") # make sure db schema is up to date, perform checks DbState.initialize() # prefetch id->name memory map Accounts.load_ids() if DbState.is_initial_sync(): print("[INIT] *** Initial fast sync ***") sync_from_checkpoints() sync_from_steemd() print("[INIT] *** Initial cache build ***") # todo: disable indexes during this process cache_missing_posts() FeedCache.rebuild() DbState.finish_initial_sync() else: # recover from fork Blocks.verify_head() # perform cleanup in case process did not exit cleanly cache_missing_posts() while True: # sync up to irreversible block sync_from_steemd() # take care of payout backlog CachedPost.dirty_paidouts(Blocks.head_date()) CachedPost.flush(trx=True) # start listening listen_steemd()
def _insert_feed_cache(cls, post): """Insert the new post into feed cache if it's not a comment.""" if not post['depth']: account_id = Accounts.get_id(post['author']) FeedCache.insert(post['id'], account_id, post['date'])
def _insert_feed_cache(cls, post): if not post['depth']: account_id = Accounts.get_id(post['author']) FeedCache.insert(post['id'], account_id, post['date'])