def _is_schema_loaded(cls): # check if database has been initialized (i.e. schema loaded) engine = db_engine() if engine == 'postgresql': return bool(query_one(""" SELECT 1 FROM pg_catalog.pg_tables WHERE schemaname = 'public' """)) elif engine == 'mysql': return bool(query_one('SHOW TABLES')) raise Exception("unknown db engine %s" % engine)
def get_id_and_depth(cls, author, permlink): _id = cls.get_id(author, permlink) if not _id: return (None, -1) depth = query_one("SELECT depth FROM hive_posts WHERE id = :id", id=_id) return (_id, depth)
async def payouts_last_24h(): """Sum of completed payouts in the last 24 hours.""" sql = """ SELECT SUM(payout) FROM hive_posts_cache WHERE is_paidout = '1' AND payout_at > (NOW() AT TIME ZONE 'utc') - INTERVAL '24 HOUR' """ return float(query_one(sql)) # TODO: decimal
def run(): # if tables not created, do so now if not query_row('SHOW TABLES'): print("[INIT] No tables found. Initializing db...") setup() #TODO: if initial sync is interrupted, cache never rebuilt #TODO: do not build partial feed_cache during init_sync # if this is the initial sync, batch updates until very end is_initial_sync = not query_one("SELECT 1 FROM hive_posts_cache LIMIT 1") if is_initial_sync: print("[INIT] *** Initial sync ***") else: # perform cleanup in case process did not exit cleanly cache_missing_posts() # fast block sync strategies sync_from_checkpoints(is_initial_sync) sync_from_steemd(is_initial_sync) if is_initial_sync: print("[INIT] *** Initial sync complete. Rebuilding cache. ***") cache_missing_posts() rebuild_feed_cache() # initialization complete. follow head blocks listen_steemd()
def get_post_id(author, permlink): """Get post_id from hive db.""" sql = "SELECT id FROM hive_posts WHERE author = :a AND permlink = :p" _id = query_one(sql, a=author, p=permlink) if not _id: raise Exception("post not found: %s/%s" % (author, permlink)) return _id
def register_posts(ops, date): for op in ops: is_edit = query_one( "SELECT 1 FROM hive_posts WHERE author = '%s' AND permlink = '%s'" % (op['author'], op['permlink'])) if is_edit: continue # ignore edits to posts # this method needs to perform auth checking e.g. is op.author authorized to post in op.community? community_or_blog = create_post_as(op) or op['author'] if op['parent_author'] == '': parent_id = None depth = 0 category = op['parent_permlink'] else: parent_data = first( query( "SELECT id, depth, category FROM hive_posts WHERE author = '%s' " "AND permlink = '%s'" % (op['parent_author'], op['parent_permlink']))) parent_id, parent_depth, category = parent_data depth = parent_depth + 1 query( "INSERT INTO hive_posts (parent_id, author, permlink, category, community, depth, created_at) " "VALUES (%s, '%s', '%s', '%s', '%s', %d, '%s')" % (parent_id or 'NULL', op['author'], op['permlink'], category, community_or_blog, depth, date))
def run(): if db_needs_setup(): print("[INIT] Initializing db...") setup() #TODO: if initial sync is interrupted, cache never rebuilt #TODO: do not build partial feed_cache during init_sync # if this is the initial sync, batch updates until very end is_initial_sync = not query_one("SELECT 1 FROM hive_posts_cache LIMIT 1") if is_initial_sync: print("[INIT] *** Initial sync. db_last_block: %d ***" % db_last_block()) else: # perform cleanup in case process did not exit cleanly cache_missing_posts() # prefetch id->name memory map Accounts.load_ids() # fast block sync strategies sync_from_checkpoints(is_initial_sync) sync_from_steemd(is_initial_sync) Accounts.cache_old() Accounts.update_ranks() if is_initial_sync: print("[INIT] *** Initial sync complete. Rebuilding cache. ***") cache_missing_posts() rebuild_feed_cache() # initialization complete. follow head blocks listen_steemd()
def _check_migrations(cls): cls._ver = query_one("SELECT db_version FROM hive_state LIMIT 1") #assert cls._ver, 'could not load state record' if cls._ver is None: query(""" INSERT INTO hive_state (block_num, db_version, steem_per_mvest, usd_per_steem, sbd_per_steem, dgpo) VALUES (0, 1, 0, 0, 0, '') """) cls._ver = 1 if cls._ver == 0: cls._set_schema_ver(1) if cls._ver == 1: query( "ALTER TABLE hive_posts ALTER COLUMN category SET DEFAULT ''") cls._set_schema_ver(2) if cls._ver == 2: cols = ['steem_per_mvest', 'usd_per_steem', 'sbd_per_steem'] for col in cols: query( "ALTER TABLE hive_state ALTER COLUMN %s TYPE numeric(8,3)" % col) cls._set_schema_ver(3)
async def payouts_total(): # memoized historical sum. To update: # SELECT SUM(payout) FROM hive_posts_cache # WHERE is_paidout = 1 AND payout_at <= precalc_date precalc_date = '2017-08-30 00:00:00' precalc_sum = Decimal('19358777.541') # sum all payouts since `precalc_date` sql = """ SELECT SUM(payout) FROM hive_posts_cache WHERE is_paidout = '1' AND payout_at > '%s' """ % (precalc_date) return float(precalc_sum + query_one(sql)) #TODO: decimal
def cache_missing_posts(): # cached posts inserted sequentially, so just compare MAX(id)'s sql = ("SELECT (SELECT IFNULL(MAX(id), 0) FROM hive_posts) - " "(SELECT IFNULL(MAX(post_id), 0) FROM hive_posts_cache)") missing_count = query_one(sql) print("[INIT] Found {} missing post cache entries".format(missing_count)) if not missing_count: return # process in batches of 1m posts missing = select_missing_posts(1e6) while missing: update_posts_batch(missing, get_adapter()) missing = select_missing_posts(1e6)
def get_user_role(account: str, community: str) -> str: if account == community: return 'owner' roles = query_one("SELECT is_admin, is_mod, is_approved, is_muted " "FROM hive_members" "WHERE community = '%s' AND account = '%s' LIMIT 1" % (community, account)) # todo muted precedes member role? # return highest role first if roles['is_admin']: return 'admin' elif roles['is_mod']: return 'moderator' elif roles['is_muted']: return 'muted' elif roles['is_approved']: return 'member' return 'guest'
def _get_props_lite(): """Return a minimal version of get_dynamic_global_properties data.""" raw = json.loads(query_one("SELECT dgpo FROM hive_state")) # convert NAI amounts to legacy nais = ['virtual_supply', 'current_supply', 'current_sbd_supply', 'pending_rewarded_vesting_steem', 'pending_rewarded_vesting_shares', 'total_vesting_fund_steem', 'total_vesting_shares'] for k in nais: if k in raw: raw[k] = legacy_amount(raw[k]) return dict( time=raw['time'], #* sbd_print_rate=raw['sbd_print_rate'], sbd_interest_rate=raw['sbd_interest_rate'], head_block_number=raw['head_block_number'], #* total_vesting_shares=raw['total_vesting_shares'], total_vesting_fund_steem=raw['total_vesting_fund_steem'], last_irreversible_block_num=raw['last_irreversible_block_num'], #* )
def get_id(cls, author, permlink): url = author + '/' + permlink if url in cls._ids: cls._hits += 1 _id = cls._ids.pop(url) cls._ids[url] = _id else: cls._miss += 1 sql = """SELECT id FROM hive_posts WHERE author = :a AND permlink = :p""" _id = query_one(sql, a=author, p=permlink) if _id: cls._set_id(url, _id) # cache stats total = cls._hits + cls._miss if total % 10000 == 0: print( "[DEBUG] post.id lookups: %d, hits: %d (%.1f%%), entries: %d" % (total, cls._hits, 100.0 * cls._hits / total, len(cls._ids))) return _id
def _get_feed_price(): price = query_one("SELECT usd_per_steem FROM hive_state") return {"base": "%.3f SBD" % price, "quote": "1.000 STEEM"}
def _get_account_id(name): _id = query_one("SELECT id FROM hive_accounts WHERE name = :n", n=name) if not _id: raise Exception("invalid account `%s`" % name) return _id
def _get_feed_price(): """Get a steemd-style ratio object representing feed price.""" price = query_one("SELECT usd_per_steem FROM hive_state") return {"base": "%.3f SBD" % price, "quote": "1.000 STEEM"}
def _get_props_lite(): # TODO: trim this response; really only need: total_vesting_fund_steem, # total_vesting_shares, sbd_interest_rate return json.loads(query_one("SELECT dgpo FROM hive_state"))
def _get_account_id(name): """Get account id from hive db.""" assert name, 'no account name specified' _id = query_one("SELECT id FROM hive_accounts WHERE name = :n", n=name) assert _id, "account `%s` not found" % name return _id
def register(cls, ops, block_date): from hive.indexer.community import is_community_post_valid for op in ops: sql = ("SELECT id, is_deleted FROM hive_posts " "WHERE author = :a AND permlink = :p") ret = query_row(sql, a=op['author'], p=op['permlink']) pid = None if not ret: # post does not exist, go ahead and process it pass elif not ret[1]: # post exists and is not deleted, thus it's an edit. ignore. continue else: # post exists but was deleted. time to reinstate. pid = ret[0] # set parent & inherited attributes if op['parent_author'] == '': parent_id = None depth = 0 category = op['parent_permlink'] community = cls._get_op_community(op) or op['author'] else: parent_data = query_row( "SELECT id, depth, category, community FROM hive_posts WHERE author = :a " "AND permlink = :p", a=op['parent_author'], p=op['parent_permlink']) parent_id, parent_depth, category, community = parent_data depth = parent_depth + 1 # community must be an existing account if not Accounts.exists(community): community = op['author'] # validated community; will return None if invalid & defaults to author. is_valid = is_community_post_valid(community, op) if not is_valid: print("Invalid post @{}/{} in @{}".format( op['author'], op['permlink'], community)) # if we're reusing a previously-deleted post (rare!), update it if pid: query( "UPDATE hive_posts SET is_valid = :is_valid, is_deleted = '0', parent_id = :parent_id, category = :category, community = :community, depth = :depth WHERE id = :id", is_valid=is_valid, parent_id=parent_id, category=category, community=community, depth=depth, id=pid) else: sql = """ INSERT INTO hive_posts (is_valid, parent_id, author, permlink, category, community, depth, created_at) VALUES (:is_valid, :parent_id, :author, :permlink, :category, :community, :depth, :date) """ query(sql, is_valid=is_valid, parent_id=parent_id, author=op['author'], permlink=op['permlink'], category=category, community=community, depth=depth, date=block_date) pid = query_one( "SELECT id FROM hive_posts WHERE author = :a AND " "permlink = :p", a=op['author'], p=op['permlink']) # add top-level posts to feed cache if not op['parent_permlink']: sql = "INSERT INTO hive_feed_cache (account_id, post_id, created_at) VALUES (:account_id, :id, :created_at)" query(sql, account_id=Accounts.get_id(op['author']), id=pid, created_at=block_date)
def get_post_id(author, permlink): """Given an author/permlink, retrieve the id from db.""" sql = ("SELECT id FROM hive_posts WHERE author = :a " "AND permlink = :p AND is_deleted = '0' LIMIT 1") return query_one(sql, a=author, p=permlink)
def _is_feed_cache_empty(cls): return not query_one("SELECT 1 FROM hive_feed_cache LIMIT 1")
def _get_account_id(name): """Get account id from hive db.""" _id = query_one("SELECT id FROM hive_accounts WHERE name = :n", n=name) assert _id, "invalid account `%s`" % name return _id
def _get_account_id(name): return query_one("SELECT id FROM hive_accounts WHERE name = :n", n=name)
def is_community(name): """Check if named community exists.""" return bool( query_one("SELECT 1 FROM hive_communities WHERE name = :name", name=name))
def get_account_id(name): if is_valid_account_name(name): return query_one( "SELECT id FROM hive_accounts " "WHERE name = :n LIMIT 1", n=name)
def get_community_privacy(community: str) -> str: """Load community privacy level""" type_id = query_one( 'SELECT type_id from hive_communities WHERE name = "%s"' % community) return PRIVACY_MAP.get(type_id)
def head_num(cls): """Get hive's head block number.""" sql = "SELECT num FROM hive_blocks ORDER BY num DESC LIMIT 1" return query_one(sql) or 0
def _get_post_id(author, permlink): """Get post_id from hive db.""" sql = "SELECT id FROM hive_posts WHERE author = :a AND permlink = :p" return query_one(sql, a=author, p=permlink)
def head_date(cls): """Get hive's head block date.""" sql = "SELECT created_at FROM hive_blocks ORDER BY num DESC LIMIT 1" return str(query_one(sql) or '')
def db_last_block(): return query_one("SELECT MAX(num) FROM hive_blocks") or 0