def _process_notify(cls, account, op_json, block_date): """Handle legacy 'follow' plugin ops (follow/mute/clear, reblog) mark_read {date: {type: 'date'}} """ try: command, payload = valid_op_json(op_json) valid_command(command, valid=('setLastRead')) if command == 'setLastRead': valid_keys(payload, optional=['date']) explicit_date = payload.get('date', None) if explicit_date is None: date = block_date log.info( "setLastRead op: `%s' uses implicit head block time: `%s'", op_json, block_date) else: date = valid_date(explicit_date) if date > block_date: log.warning( "setLastRead::date: `%s' exceeds head block time. Correcting to head block time: `%s'", date, block_date) date = block_date Notify.set_lastread(account, date) except AssertionError as e: log.warning("notify op fail: %s in %s", e, op_json)
def _notifs(cls, post, pid, level, payout): # pylint: disable=too-many-locals,too-many-branches author = post['author'] author_id = Accounts.get_id(author) parent_author = post['parent_author'] date = post['last_update'] # reply notif if level == 'insert' and parent_author and parent_author != author: irredeemable = parent_author in Mutes.all() parent_author_id = Accounts.get_id(parent_author) if not irredeemable and not cls._muted(parent_author_id, author_id): ntype = 'reply' if post['depth'] == 1 else 'reply_comment' Notify(ntype, src_id=author_id, dst_id=parent_author_id, score=Accounts.default_score(author), post_id=pid, when=date).write() # mentions notif if level in ('insert', 'update'): accounts = set(filter(Accounts.exists, mentions(post['body']))) accounts -= {author, parent_author} score = Accounts.default_score(author) if score < 30: max_mentions = 5 elif score < 60: max_mentions = 10 else: max_mentions = 25 if len(accounts) <= max_mentions: penalty = min([score, 2 * (len(accounts) - 1)]) for mention in accounts: mention_id = Accounts.get_id(mention) if (not cls._mentioned(pid, mention_id) and not cls._muted(mention_id, author_id)): Notify('mention', src_id=author_id, dst_id=mention_id, post_id=pid, when=date, score=(score - penalty)).write() else: url = '@%s/%s' % (author, post['permlink']) log.info("skip %d mentions in %s", len(accounts), url) # votes notif url = post['author'] + '/' + post['permlink'] if url in cls._votes: voters = cls._votes[url] del cls._votes[url] net = float(post['net_rshares']) ratio = float(payout) / net if net else 0 for vote in post['active_votes']: rshares = int(vote['rshares']) if vote['voter'] not in voters or rshares < 10e9: continue contrib = int(1000 * ratio * rshares) if contrib < 20: continue # < $0.020 voter_id = Accounts.get_id(vote['voter']) if not cls._voted(pid, author_id, voter_id): score = min(100, (len(str(contrib)) - 1) * 25) # $1 = 75 payload = "$%.3f" % (contrib / 1000) Notify('vote', src_id=voter_id, dst_id=author_id, when=vote['time'], post_id=pid, score=score, payload=payload).write()
def close_own_db_access(cls): PostDataCache.close_own_db_access() Reputations.close_own_db_access() Votes.close_own_db_access() Follow.close_own_db_access() Posts.close_own_db_access() Reblog.close_own_db_access() Notify.close_own_db_access() Accounts.close_own_db_access() PayoutStats.close_own_db_access() Mentions.close_own_db_access()
def setup_own_db_access(cls, sharedDbAdapter): PostDataCache.setup_own_db_access(sharedDbAdapter, "PostDataCache") Reputations.setup_own_db_access(sharedDbAdapter, "Reputations") Votes.setup_own_db_access(sharedDbAdapter, "Votes") Follow.setup_own_db_access(sharedDbAdapter, "Follow") Posts.setup_own_db_access(sharedDbAdapter, "Posts") Reblog.setup_own_db_access(sharedDbAdapter, "Reblog") Notify.setup_own_db_access(sharedDbAdapter, "Notify") Accounts.setup_own_db_access(sharedDbAdapter, "Accounts") PayoutStats.setup_own_db_access(sharedDbAdapter, "PayoutStats") Mentions.setup_own_db_access(sharedDbAdapter, "Mentions")
def _process_notify(cls, account, op_json, block_date): """Handle legacy 'follow' plugin ops (follow/mute/clear, reblog) mark_read {date: {type: 'date'}} """ try: command, payload = valid_op_json(op_json) valid_command(command, valid=('setLastRead')) if command == 'setLastRead': valid_keys(payload, required=['date']) date = valid_date(payload['date']) assert date <= block_date Notify.set_lastread(account, date) except AssertionError as e: log.warning("notify op fail: %s in %s", e, op_json)
def validate(self, raw_op): """Pre-processing and validation of custom_json payload.""" log.info("validating @%s op %s", self.actor, raw_op) try: # validate basic structure self._validate_raw_op(raw_op) self.action = raw_op[0] self.op = raw_op[1] self.actor_id = Accounts.get_id(self.actor) # validate and read schema self._read_schema() # validate permissions self._validate_permissions() self.valid = True except AssertionError as e: payload = str(e) Notify('error', dst_id=self.actor_id, when=self.date, payload=payload).write() return self.valid
def register(cls, names, block_date): """Block processing: hooks into new account registration. `Accounts` calls this method with any newly registered names. This method checks for any valid community names and inserts them. """ for name in names: #if not re.match(r'^hive-[123]\d{4,6}$', name): if not re.match(r'^hive-[1]\d{4,6}$', name): continue type_id = int(name[5]) _id = Accounts.get_id(name) # insert community sql = """INSERT INTO hive_communities (id, name, type_id, created_at) VALUES (:id, :name, :type_id, :date)""" DB.query(sql, id=_id, name=name, type_id=type_id, date=block_date) # insert owner sql = """INSERT INTO hive_roles (community_id, account_id, role_id, created_at) VALUES (:community_id, :account_id, :role_id, :date)""" DB.query(sql, community_id=_id, account_id=_id, role_id=Role.owner.value, date=block_date) Notify('new_community', src_id=None, dst_id=_id, when=block_date, community_id=_id).write()
def follow_op(cls, account, op_json, date): """Process an incoming follow op.""" op = cls._validated_op(account, op_json, date) if not op: return # perform delta check new_state = op['state'] old_state = cls._get_follow_db_state(op['flr'], op['flg']) if new_state == (old_state or 0): return # insert or update state if old_state is None: sql = """INSERT INTO hive_follows (follower, following, created_at, state) VALUES (:flr, :flg, :at, :state)""" else: sql = """UPDATE hive_follows SET state = :state WHERE follower = :flr AND following = :flg""" DB.query(sql, **op) # track count deltas if not DbState.is_initial_sync(): if new_state == 1: Follow.follow(op['flr'], op['flg']) if old_state is None: score = Accounts.default_score(op_json['follower']) Notify('follow', src_id=op['flr'], dst_id=op['flg'], when=op['at'], score=score).write() if old_state == 1: Follow.unfollow(op['flr'], op['flg'])
def check_ad_payment(cls, op, date, num): """Triggers an adFund operation for validated Native Ads transfers.""" memo = op['memo'] try: payment = cls._valid_payment(memo) if payment: amount, token = parse_amount(op['amount'], bypass_nai_lookup=True) params = { 'amount': amount, 'token': token, 'to_account': op['to'], 'community_name': payment['community_name'] } from hive.indexer.accounts import Accounts from hive.indexer.posts import Posts _post_id = Posts.get_id(op['from'], payment['permlink']) assert _post_id, 'post not found: @%s/%s' % ( op['from'], payment['permlink']) _account_id = Accounts.get_id(op['from']) _community_id = payment['community_id'] ad_op = NativeAdOp(_community_id, _post_id, _account_id, { 'action': 'adFund', 'params': params }, num) ad_op.validate_op() ad_op.process() except AssertionError as e: payload = str(e) Notify('error', dst_id=_account_id, when=date, payload=payload).write()
def insert(cls, op, date): """Inserts new post records.""" sql = """INSERT INTO hive_posts (is_valid, is_muted, parent_id, author, permlink, category, community_id, depth, created_at) VALUES (:is_valid, :is_muted, :parent_id, :author, :permlink, :category, :community_id, :depth, :date)""" sql += ";SELECT currval(pg_get_serial_sequence('hive_posts','id'))" post = cls._build_post(op, date) result = DB.query(sql, **post) post['id'] = int(list(result)[0][0]) cls._set_id(op['author'] + '/' + op['permlink'], post['id']) if not DbState.is_initial_sync(): if post['error']: author_id = Accounts.get_id(post['author']) Notify('error', dst_id=author_id, when=date, post_id=post['id'], payload=post['error']).write() CachedPost.insert(op['author'], op['permlink'], post['id']) if op['parent_author']: # update parent's child count CachedPost.recount(op['parent_author'], op['parent_permlink'], post['parent_id']) cls._insert_feed_cache(post)
def reblog(cls, account, op_json, block_date): """Handle legacy 'reblog' op""" if ('account' not in op_json or 'author' not in op_json or 'permlink' not in op_json): return blogger = op_json['account'] author = op_json['author'] permlink = op_json['permlink'] if blogger != account: return # impersonation if not all(map(Accounts.exists, [author, blogger])): return post_id, depth = Posts.get_id_and_depth(author, permlink) if depth > 0: return # prevent comment reblogs if not post_id: log.debug("reblog: post not found: %s/%s", author, permlink) return author_id = Accounts.get_id(author) blogger_id = Accounts.get_id(blogger) if 'delete' in op_json and op_json['delete'] == 'delete': DB.query( "DELETE FROM hive_reblogs WHERE account = :a AND " "post_id = :pid LIMIT 1", a=blogger, pid=post_id) if not DbState.is_initial_sync(): FeedCache.delete(post_id, blogger_id) else: sql = ( "INSERT INTO hive_reblogs (account, post_id, created_at) " "VALUES (:a, :pid, :date) ON CONFLICT (account, post_id) DO NOTHING" ) DB.query(sql, a=blogger, pid=post_id, date=block_date) if not DbState.is_initial_sync(): FeedCache.insert(post_id, blogger_id, block_date) Notify('reblog', src_id=blogger_id, dst_id=author_id, post_id=post_id, when=block_date, score=Accounts.default_score(blogger)).write()
def _notify(self, op, **kwargs): dst_id = None score = 35 if self.account_id and not self.post_id: dst_id = self.account_id if not self._subscribed(self.account_id): score = 15 Notify(block_num=self.block_num, type_id=op, src_id=self.actor_id, dst_id=dst_id, post_id=self.post_id, when=self.date, community_id=self.community_id, score=score, **kwargs)
def undelete(cls, op, date, pid): """Re-allocates an existing record flagged as deleted.""" sql = """UPDATE hive_posts SET is_valid = :is_valid, is_muted = :is_muted, is_deleted = '0', is_pinned = '0', parent_id = :parent_id, category = :category, community_id = :community_id, depth = :depth WHERE id = :id""" post = cls._build_post(op, date, pid) DB.query(sql, **post) if not DbState.is_initial_sync(): if post['error']: author_id = Accounts.get_id(post['author']) Notify('error', dst_id=author_id, when=date, post_id=post['id'], payload=post['error']).write() CachedPost.undelete(pid, post['author'], post['permlink'], post['category']) cls._insert_feed_cache(post)
def _notify(self, op, **kwargs): if DbState.is_initial_sync(): # TODO: set start date for notifs? # TODO: address other callers return dst_id = None score = 35 if self.account_id and not self.post_id: dst_id = self.account_id if not self._subscribed(self.account_id): score = 15 Notify(op, src_id=self.actor_id, dst_id=dst_id, post_id=self.post_id, when=self.date, community_id=self.community_id, score=score, **kwargs).write()
def validate(self, raw_op): """Pre-processing and validation of custom_json payload.""" log.info("validating @%s op %s", self.actor, raw_op) try: # validate basic structure self._validate_raw_op(raw_op) self.action = raw_op[0] self.op = raw_op[1] self.actor_id = Accounts.get_id(self.actor) # validate and read schema self._read_schema() # validate permissions self._validate_permissions() # init native ad context and validate op if self.action in NATIVE_AD_ACTIONS: if self.block_num < NA_START_BLOCK: return False self.native_ad = NativeAdOp(self.community_id, self.post_id, self.account_id, { 'action': self.action, 'params': self.na_params }, self.block_num) self.native_ad.validate_op() self.valid = True except AssertionError as e: payload = str(e) Notify('error', dst_id=self.actor_id, when=self.date, payload=payload).write() return self.valid
def comment_op(cls, op, block_date): """Register new/edited/undeleted posts; insert into feed cache.""" md = {} # At least one case where jsonMetadata was double-encoded: condenser#895 # jsonMetadata = JSON.parse(jsonMetadata); try: md = loads(op['json_metadata']) if not isinstance(md, dict): md = {} except Exception: pass tags = [] if md and 'tags' in md and isinstance(md['tags'], list): for tag in md['tags']: if tag and isinstance(tag, str): tags.append( tag ) # No escaping needed due to used sqlalchemy formatting features sql = """ SELECT is_new_post, id, author_id, permlink_id, post_category, parent_id, community_id, is_valid, is_muted, depth FROM process_hive_post_operation((:author)::varchar, (:permlink)::varchar, (:parent_author)::varchar, (:parent_permlink)::varchar, (:date)::timestamp, (:community_support_start_block)::integer, (:block_num)::integer, (:tags)::VARCHAR[]); """ row = DB.query_row(sql, author=op['author'], permlink=op['permlink'], parent_author=op['parent_author'], parent_permlink=op['parent_permlink'], date=block_date, community_support_start_block=Community.start_block, block_num=op['block_num'], tags=tags) if not row: log.error("Failed to process comment_op: {}".format(op)) return result = dict(row) # TODO we need to enhance checking related community post validation and honor is_muted. error = cls._verify_post_against_community(op, result['community_id'], result['is_valid'], result['is_muted']) img_url = None if 'image' in md: img_url = md['image'] if isinstance(img_url, list) and img_url: img_url = img_url[0] if img_url: img_url = safe_img_url(img_url) is_new_post = result['is_new_post'] if is_new_post: # add content data to hive_post_data post_data = dict( title=op['title'] if op['title'] else '', img_url=img_url if img_url else '', body=op['body'] if op['body'] else '', json=op['json_metadata'] if op['json_metadata'] else '') else: # edit case. Now we need to (potentially) apply patch to the post body. # empty new body means no body edit, not clear (same with other data) new_body = cls._merge_post_body( id=result['id'], new_body_def=op['body']) if op['body'] else None new_title = op['title'] if op['title'] else None new_json = op['json_metadata'] if op['json_metadata'] else None # when 'new_json' is not empty, 'img_url' should be overwritten even if it is itself empty new_img = img_url if img_url else '' if new_json else None post_data = dict(title=new_title, img_url=new_img, body=new_body, json=new_json) # log.info("Adding author: {} permlink: {}".format(op['author'], op['permlink'])) PostDataCache.add_data(result['id'], post_data, is_new_post) if not DbState.is_initial_sync(): if error: author_id = result['author_id'] Notify(block_num=op['block_num'], type_id='error', dst_id=author_id, when=block_date, post_id=result['id'], payload=error)