def delete(cls, op): pid, depth = cls.get_id_and_depth(op['author'], op['permlink']) query("UPDATE hive_posts SET is_deleted = '1' WHERE id = :id", id=pid) if not DbState.is_initial_sync(): CachedPost.delete(pid, op['author'], op['permlink']) if depth == 0: FeedCache.delete(pid)
def delete(cls, op): """Marks a post record as being deleted.""" pid, depth = cls.get_id_and_depth(op['author'], op['permlink']) DB.query("UPDATE hive_posts SET is_deleted = '1' WHERE id = :id", id=pid) if not DbState.is_initial_sync(): CachedPost.delete(pid, op['author'], op['permlink']) if depth == 0: # TODO: delete from hive_reblogs -- otherwise feed cache gets populated with deleted posts somwrimas FeedCache.delete(pid) else: # force parent child recount when child is deleted prnt = cls._get_parent_by_child_id(pid) CachedPost.recount(prnt['author'], prnt['permlink'], prnt['id'])
def audit_cache_deleted(db): """Scan all posts to check for extraneous cache entries.""" last_id = _last_cached_post_id(db) step = 1000000 steps = int(last_id / step) + 1 log.info("audit_cache_deleted -- last id: %d, batches: %d", last_id, steps) sql = """ SELECT hp.id, hp.author, hp.permlink FROM hive_posts hp JOIN hive_posts_cache hpc ON hp.id = hpc.post_id WHERE hp.id BETWEEN :lbound AND :ubound AND hp.is_deleted = True""" for idx in range(steps): lbound = (idx * step) + 1 ubound = (idx + 1) * step extra = db.query_all(sql, lbound=lbound, ubound=ubound) log.info("%d <= id <= %d: %d to delete", lbound, ubound, len(extra)) for row in extra: CachedPost.delete(row['id'], row['author'], row['permlink'])