def generate_spotify_playlist(tracks, playlist_name, username): """ Generates a Spotify playlist from the given tracks :param tracks: list of Track objects :param playlist_name: name of playlist to create :param username: Spotify username """ sp = spotipy.Spotify() formatted_tracks = [u'artist:"{artist}" track:"{track}"'.format(artist=t.artist, track=t.track) for t in tracks] search_res = [sp.search(q=t, type='track', limit=1) for t in formatted_tracks] track_ids = [(first(r.get('tracks', {}).get('items', {})) or {}).get('uri') for r in search_res if r.get('tracks', {}).get('items')] token = util.prompt_for_user_token(username, scope=scope) if token: sp = spotipy.Spotify(auth=token) sp.trace = False playlist = sp.user_playlist_create(username, playlist_name) if playlist and playlist.get('id'): sp.user_playlist_add_tracks(username, playlist.get('id'), track_ids) print "boom!" else: print "Can't get token for", username
def generate_spotify_playlist(tracks, playlist_name, username): """ Generates a Spotify playlist from the given tracks :param tracks: list of Track objects :param playlist_name: name of playlist to create :param username: Spotify username """ sp = spotipy.Spotify() formatted_tracks = [ u'artist:"{artist}" track:"{track}"'.format(artist=t.artist, track=t.track) for t in tracks ] search_res = [ sp.search(q=t, type='track', limit=1) for t in formatted_tracks ] track_ids = [(first(r.get('tracks', {}).get('items', {})) or {}).get('uri') for r in search_res if r.get('tracks', {}).get('items')] token = util.prompt_for_user_token(username, scope=scope) if token: sp = spotipy.Spotify(auth=token) sp.trace = False playlist = sp.user_playlist_create(username, playlist_name) if playlist and playlist.get('id'): sp.user_playlist_add_tracks(username, playlist.get('id'), track_ids) print "boom!" else: print "Can't get token for", username
def get_community(community_name): # sqlalchemy: # q = select([hive_communities]).where(hive_communities.c.account == community_name).limit(1) # conn.execute(q).fetchall() return first( query("SELECT * FROM hive_communities WHERE name = '%s' LIMIT 1" % community_name))
def generate_spotify_playlist(tracks, playlist_name): token = util.prompt_for_user_token(username, 'playlist-modify-public', client_id=client_id, client_secret=client_secret, redirect_uri='http://localhost/') spotify = spotipy.Spotify(auth=token) format_search = [f'artist:{t.artist} track:{t.title}' for t in tracks] search_res = [ spotify.search(q=t, type='track', limit=1) for t in format_search ] track_ids = [(first(r.get('tracks', {}).get('items', {})) or {}).get('uri') for r in search_res if r.get('tracks', {}).get('items')] chunks = [track_ids[x:x + 100] for x in range(0, len(track_ids), 100)] user_playlists = spotify.user_playlists(username) if check_playlist_exists(playlist_name, user_playlists['items']): for p in user_playlists['items']: if p.get('name') == playlist_name: playlist_id = p.get('id') else: playlist = spotify.user_playlist_create(username, playlist_name) playlist_id = playlist.get('id') for chunk in chunks: # spotify.user_playlist_remove_all_occurrences_of_tracks(username, playlist_id, chunk) spotify.user_playlist_replace_tracks(username, playlist_id, chunk)
def register_posts(ops, date): for op in ops: is_edit = query_one( "SELECT 1 FROM hive_posts WHERE author = '%s' AND permlink = '%s'" % (op['author'], op['permlink'])) if is_edit: continue # ignore edits to posts # this method needs to perform auth checking e.g. is op.author authorized to post in op.community? community_or_blog = create_post_as(op) or op['author'] if op['parent_author'] == '': parent_id = None depth = 0 category = op['parent_permlink'] else: parent_data = first( query( "SELECT id, depth, category FROM hive_posts WHERE author = '%s' " "AND permlink = '%s'" % (op['parent_author'], op['parent_permlink']))) parent_id, parent_depth, category = parent_data depth = parent_depth + 1 query( "INSERT INTO hive_posts (parent_id, author, permlink, category, community, depth, created_at) " "VALUES (%s, '%s', '%s', '%s', '%s', %d, '%s')" % (parent_id or 'NULL', op['author'], op['permlink'], category, community_or_blog, depth, date))
def generate_spotify_playlist(tracks, playlist_name, username): """ Generates a Spotify playlist from the given tracks :param tracks: list of Track objects :param playlist_name: name of playlist to create :param username: Spotify username """ sp = spotipy.Spotify() formatted_tracks = [] for t in tracks: try: formatted_tracks.append(u'artist:"{artist}" track:"{track}"'.format(artist=t.artist, track=t.track)) except UnicodeDecodeError: pass search_res = [sp.search(q=t, type='track', limit=1) for t in formatted_tracks] track_ids = [(first(r.get('tracks', {}).get('items', {})) or {}).get('uri') for r in search_res if r.get('tracks', {}).get('items')] token = util.prompt_for_user_token(username, scope=scope, client_id=SPOTIFY_API_KEY, client_secret=SPOTIFY_API_SECRET, redirect_uri=SPOTIFY_URI) if token: sp = spotipy.Spotify(auth=token) sp.trace = False playlist = sp.user_playlist_create(username, playlist_name) if playlist and playlist.get('id'): sp.user_playlist_add_tracks(username, playlist.get('id'), track_ids) print "Playlist has been processed." else: print "Can't get token for", username
def _process_legacy(cls, account, op_json, block_date): """Handle legacy 'follow' plugin ops (follow/mute/clear, reblog) follow {follower: {type: 'account'}, following: {type: 'account'}, what: {type: 'list'}} reblog {account: {type: 'account'}, author: {type: 'account'}, permlink: {type: 'permlink'}, delete: {type: 'str', optional: True}} """ if not isinstance(op_json, list): return if len(op_json) != 2: return if first(op_json) not in ['follow', 'reblog']: return if not isinstance(second(op_json), dict): return cmd, op_json = op_json # ['follow', {data...}] if cmd == 'follow': Follow.follow_op(account, op_json, block_date) elif cmd == 'reblog': cls.reblog(account, op_json, block_date)
async def follow_stats(account: str): sql = """ SELECT SUM(IF(follower = :account, 1, 0)) following, SUM(IF(following = :account, 1, 0)) followers FROM hive_follows WHERE state = 1 """ return first(query(sql))
def get_post_id_and_depth(author, permlink): res = None if author: res = first( query( "SELECT id, depth FROM hive_posts WHERE author = '%s' AND permlink = '%s'" % (author, permlink))) return res or (None, -1)
def look_up(ident, config): device = InputDevice(ident) if not config: # TODO get config based on device pass mapping = lambda x: (x, first(resolve_ecodes({x.type: [x.code]}))) return mapping, device
def parameter_to_env(resource): env = {} for param in resource["parameter"]: if "resource" in param: env[param["name"]] = param["resource"] else: value = param["value"] polimorphic_key = first(value.keys()) if polimorphic_key: env[param["name"]] = value[polimorphic_key] return env
def get_account(self, account: str): """ Lookup account information such as user profile, public keys, balances, etc. Args: account (str): STEEM username that we are looking up. Returns: dict: Account information. """ return first(self.exec('get_accounts', [account]))
def get_account(self, account): """ Lookup account information such as user profile, public keys, balances, etc. Args: account (str): STEEM username that we are looking up. Returns: dict: Account information. """ return first(self.call('get_accounts', [account]))
def recreate_body_diffs(comments): """ Take a list of comments, extract their bodies, re-create full text from diffs and generate pretty html diffs. Returns: (OriginalText, [diffs]) """ body_diffs = [x['body'] for x in comments] full_versions = reverse_patch(body_diffs) results = [full_versions[0]] for i, diff in enumerate(full_versions[1:]): results.append(ghdiff.diff(full_versions[i], diff).replace('\n', '')) return first(results), list(rest(results))
def _validated_op(cls, account, op, date): """Validate and normalize the operation.""" if (not 'what' in op or not isinstance(op['what'], list) or not 'follower' in op or not 'following' in op): log.info("follow_op %s ignored due to basic errors", op) return None what = first(op['what']) or '' # ABW: the empty 'what' is used to clear existing 'blog' or 'ignore' state, however it can also be used to # introduce new empty relation record in hive_follows adding unnecessary data (it might become a problem # only if we wanted to immediately remove empty records) # we could add aliases for '' - 'unfollow' and 'unignore'/'unmute' # we could add alias for 'ignore' - 'mute' defs = {'': Action.Nothing, 'blog': Action.Blog, 'follow': Action.Blog, 'ignore': Action.Ignore, 'blacklist': Action.Blacklist, 'follow_blacklist': Action.Follow_blacklist, 'unblacklist': Action.Unblacklist, 'unfollow_blacklist': Action.Unfollow_blacklist, 'follow_muted': Action.Follow_muted, 'unfollow_muted': Action.Unfollow_muted, 'reset_blacklist' : Action.Reset_blacklist, 'reset_following_list': Action.Reset_following_list, 'reset_muted_list': Action.Reset_muted_list, 'reset_follow_blacklist': Action.Reset_follow_blacklist, 'reset_follow_muted_list': Action.Reset_follow_muted_list, 'reset_all_lists': Action.Reset_all_lists} if not isinstance(what, str) or what not in defs: log.info("follow_op %s ignored due to unknown type of follow", op) return None # follower is empty or follower account does not exist, or it wasn't that account that authorized operation if not op['follower'] or not Accounts.exists(op['follower']) or op['follower'] != account: log.info("follow_op %s ignored due to invalid follower", op) return None # normalize following to list op['following'] = op['following'] if isinstance(op['following'], list) else [op['following']] # if following name does not exist do not process it: basically equal to drop op for single following entry op['following'] = [following for following in op['following'] if following and Accounts.exists(following) and following != op['follower']] # ABW: note that since you could make 'following' list empty anyway by supplying nonexisting account # there was no point in excluding follow_op with provided empty list/empty string - such call actually # makes sense for state > 8 when 'following' is ignored state = defs[what] if not op['following'] and state < Action.Reset_blacklist: log.info("follow_op %s is void due to effectively empty list of following", op) return None return dict(follower=escape_characters(op['follower']), following=[escape_characters(following) for following in op['following']], state=state, at=date)
def _process_legacy(cls, account, op_json, block_date): """Handle legacy 'follow' plugin ops (follow/mute/clear, reblog)""" if not isinstance(op_json, list): return if len(op_json) != 2: return if first(op_json) not in ['follow', 'reblog']: return if not isinstance(second(op_json), dict): return cmd, op_json = op_json # ['follow', {data...}] if cmd == 'follow': Follow.follow_op(account, op_json, block_date) elif cmd == 'reblog': cls.reblog(account, op_json, block_date)
def parameter_to_env(resource): env = {} for param in resource["parameter"]: if "resource" in param: env[param["name"]] = param["resource"] else: value = param["value"] polimorphic_key = first(value.keys()) if polimorphic_key: env[param["name"]] = value[polimorphic_key] # Mapping parameters to fhir resource names questionnaire = env.get("questionnaire") if questionnaire: env["Questionnaire"] = questionnaire questionnaire_response = env.get("questionnaire_response") if questionnaire_response: env["QuestionnaireResponse"] = questionnaire_response return env
def claim_reward_balance(self, reward_steem='0 STEEM', reward_sbd='0 SBD', reward_vests='0 VESTS', account=None): """ Claim reward balances. By default, this will claim ``all`` outstanding balances. To bypass this behaviour, set desired claim amount by setting any of `reward_steem`, `reward_sbd` or `reward_vests`. Args: reward_steem (string): Amount of STEEM you would like to claim. reward_sbd (string): Amount of SBD you would like to claim. reward_vests (string): Amount of VESTS you would like to claim. account (string): The source account for the claim if not ``default_account`` is used. """ if not account: account = configStorage.get("default_account") if not account: raise ValueError("You need to provide an account") # if no values were set by user, claim all outstanding balances on # account if none( float(first(x.split(' '))) for x in [reward_sbd, reward_steem, reward_vests]): a = Account(account) reward_steem = a['reward_steem_balance'] reward_sbd = a['reward_sbd_balance'] reward_vests = a['reward_vesting_balance'] op = operations.ClaimRewardBalance( **{ "account": account, "reward_steem": reward_steem, "reward_sbd": reward_sbd, "reward_vests": reward_vests, }) return self.finalizeOp(op, account, "posting")
def _validated_op(cls, account, op, date): """Validate and normalize the operation.""" if (not 'what' in op or not isinstance(op['what'], list) or not 'follower' in op or not 'following' in op): return what = first(op['what']) or '' defs = {'': 0, 'blog': 1, 'ignore': 2} if what not in defs: return if (op['follower'] == op['following'] # can't follow self or op['follower'] != account # impersonation or not Accounts.exists(op['following']) # invalid account or not Accounts.exists(op['follower'])): # invalid account return return dict(flr=Accounts.get_id(op['follower']), flg=Accounts.get_id(op['following']), state=defs[what], at=date)
def query_one(self, sql, **kwargs): """Perform a `SELECT 1*1`""" row = first(self._query(sql, **kwargs)) return first(row) if row else None
def query_row(self, sql, **kwargs): """Perform a `SELECT 1*m`""" res = self._query(sql, **kwargs) return first(res)
def post_basic(post): md = {} try: md = json.loads(post['json_metadata']) if not isinstance(md, dict): md = {} except json.decoder.JSONDecodeError: pass thumb_url = '' if md and 'image' in md: thumb_url = safe_img_url(first(md['image'])) or '' if thumb_url: md['image'] = [thumb_url] else: del md['image'] # clean up tags, check if nsfw tags = [post['category']] if md and 'tags' in md and isinstance(md['tags'], list): tags = tags + md['tags'] tags = set( list(map(lambda tag: (str(tag) or '').strip('# ').lower()[:32], tags))[0:5]) tags.discard('') is_nsfw = 'nsfw' in tags # TODO: be strict with nulls, or let them pass? body = post['body'] if body.find('\x00') > -1: print("bad body: {}".format(body)) body = "INVALID" # payout date is last_payout if paid, and cashout_time if pending. is_paidout = (post['cashout_time'][0:4] == '1969') payout_at = post['last_payout'] if is_paidout else post['cashout_time'] # payout is declined if max_payout = 0, or if 100% is burned is_payout_declined = False if amount(post['max_accepted_payout']) == 0: is_payout_declined = True elif len(post['beneficiaries']) == 1: benny = first(post['beneficiaries']) if benny['account'] == 'null' and int(benny['weight']) == 10000: is_payout_declined = True # payout entirely in SP is_full_power = int(post['percent_steem_dollars']) == 0 return { 'json_metadata': md, 'image': thumb_url, 'tags': tags, 'is_nsfw': is_nsfw, 'body': body, 'preview': body[0:1024], 'payout_at': payout_at, 'is_paidout': is_paidout, 'is_payout_declined': is_payout_declined, 'is_full_power': is_full_power, }
def generate_cached_post_sql(id, post, updated_at): md = None try: md = json.loads(post['json_metadata']) if type(md) is not dict: md = {} except json.decoder.JSONDecodeError: pass thumb_url = '' if md and 'image' in md: thumb_url = get_img_url(first(md['image'])) or '' md['image'] = [thumb_url] # clean up tags, check if nsfw tags = [post['category']] if md and 'tags' in md and type(md['tags']) == list: tags = tags + md['tags'] tags = set(map(lambda str: (str or '').lower(), tags)) is_nsfw = int('nsfw' in tags) # payout date is last_payout if paid, and cashout_time if pending. payout_at = post['last_payout'] if post['cashout_time'][ 0:4] == '1969' else post['cashout_time'] # get total rshares, and create comma-separated vote data blob rshares = sum(int(v['rshares']) for v in post['active_votes']) csvotes = "\n".join(map(vote_csv_row, post['active_votes'])) # these are rshares which are PENDING payout_declined = False if Amount(post['max_accepted_payout']).amount == 0: payout_declined = True elif len(post['beneficiaries']) == 1: benny = first(post['beneficiaries']) if benny['account'] == 'null' and int(benny['weight']) == 10000: payout_declined = True # total payout (completed and/or pending) payout = sum([ Amount(post['total_payout_value']).amount, Amount(post['curator_payout_value']).amount, Amount(post['pending_payout_value']).amount, ]) # total promotion cost promoted = Amount(post['promoted']).amount # trending scores timestamp = parse_time(post['created']).timestamp() hot_score = score(rshares, timestamp, 10000) trend_score = score(rshares, timestamp, 480000) # TODO: evaluate adding these columns. Some CAN be computed upon access. # Some need to be in the db if queries will depend on them. (is_hidden) # is_no_payout # is_full_power # is_hidden # is_grayed # flag_weight # total_votes # up_votes values = collections.OrderedDict([ ('post_id', '%d' % id), ('title', "%s" % escape(post['title'])), ('preview', "%s" % escape(post['body'][0:1024])), ('img_url', "%s" % escape(thumb_url)), ('payout', "%f" % payout), ('promoted', "%f" % promoted), ('payout_at', "%s" % payout_at), ('updated_at', "%s" % updated_at), ('created_at', "%s" % post['created']), ('children', "%d" % post['children']), # TODO: remove this field ('rshares', "%d" % rshares), ('votes', "%s" % escape(csvotes)), ('json', "%s" % escape(json.dumps(md))), ('is_nsfw', "%d" % is_nsfw), ('sc_trend', "%f" % trend_score), ('sc_hot', "%f" % hot_score) ]) fields = values.keys() cols = ', '.join(fields) params = ', '.join([':' + k for k in fields]) update = ', '.join([k + " = :" + k for k in fields][1:]) sql = "INSERT INTO hive_posts_cache (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s" return (sql % (cols, params, update), values)
def query_one(sql, **kwargs): row = query_row(sql, **kwargs) if row: return first(row)
def post_basic(post): """Basic post normalization: json-md, tags, and flags.""" md = {} try: md = json.loads(post['json_metadata']) if not isinstance(md, dict): md = {} except Exception: pass thumb_url = '' if md and 'image' in md and md['image']: thumb_url = safe_img_url(first(md['image'])) or '' if thumb_url: md['image'] = [thumb_url] else: del md['image'] # clean up tags, check if nsfw tags = [post['category']] if md and 'tags' in md and isinstance(md['tags'], list): tags = tags + md['tags'] tags = set( list(map(lambda tag: (str(tag) or '').strip('# ').lower()[:32], tags))[0:5]) tags.discard('') is_nsfw = 'nsfw' in tags body = post['body'] if body.find('\x00') > -1: #url = post['author'] + '/' + post['permlink'] body = body.replace('\x00', '[NUL]') # payout date is last_payout if paid, and cashout_time if pending. is_paidout = (post['cashout_time'][0:4] == '1969') payout_at = post['last_payout'] if is_paidout else post['cashout_time'] # payout is declined if max_payout = 0, or if 100% is burned is_payout_declined = False if sbd_amount(post['max_accepted_payout']) == 0: is_payout_declined = True elif len(post['beneficiaries']) == 1: benny = first(post['beneficiaries']) if benny['account'] == 'null' and int(benny['weight']) == 10000: is_payout_declined = True # payout entirely in SP is_full_power = int(post['percent_steem_dollars']) == 0 return { 'json_metadata': md, 'image': thumb_url, 'tags': tags, 'is_nsfw': is_nsfw, 'body': body, 'preview': body[0:1024], 'payout_at': payout_at, 'is_paidout': is_paidout, 'is_payout_declined': is_payout_declined, 'is_full_power': is_full_power, }
def account_from_auths(): return first(op.get('required_auths', op.get('required_posting_auths')))
def query_row(sql, **kwargs): res = __query(sql, **kwargs) return first(res)
def query_one(self, sql, **kwargs): """Perform a `SELECT 1*1`""" row = self.query_row(sql, **kwargs) if row: return first(row)
def post_basic(post): """Basic post normalization: json-md, tags, and flags.""" md = {} # At least one case where jsonMetadata was double-encoded: condenser#895 # jsonMetadata = JSON.parse(jsonMetadata); try: md = json.loads(post['json_metadata']) if not isinstance(md, dict): md = {} except Exception: pass thumb_url = '' if md and 'image' in md: if md['image']: if not isinstance(md['image'], list): md['image'] = [md['image']] md['image'] = list(filter(None, map(safe_img_url, md['image']))) if md['image']: thumb_url = md['image'][0] else: del md['image'] # clean up tags, check if nsfw tags = [post['category']] # if (typeof tags == 'string') tags = tags.split(' '); # legacy condenser compat if md and 'tags' in md and isinstance(md['tags'], list): tags = tags + md['tags'] tags = map(lambda tag: (str(tag) or '').strip('# ').lower()[:32], tags) tags = filter(None, tags) tags = list(distinct(tags))[:5] is_nsfw = 'nsfw' in tags body = post['body'] if body.find('\x00') > -1: #url = post['author'] + '/' + post['permlink'] body = body.replace('\x00', '[NUL]') # payout date is last_payout if paid, and cashout_time if pending. is_paidout = (post['cashout_time'][0:4] == '1969') payout_at = post['last_payout'] if is_paidout else post['cashout_time'] # payout is declined if max_payout = 0, or if 100% is burned is_payout_declined = False if sbd_amount(post['max_accepted_payout']) == 0: is_payout_declined = True elif len(post['beneficiaries']) == 1: benny = first(post['beneficiaries']) if benny['account'] == 'null' and int(benny['weight']) == 10000: is_payout_declined = True # payout entirely in SP is_full_power = int(post['percent_steem_dollars']) == 0 return { 'json_metadata': md, 'image': thumb_url, 'tags': tags, 'is_nsfw': is_nsfw, 'body': body, 'preview': body[0:1024], 'payout_at': payout_at, 'is_paidout': is_paidout, 'is_payout_declined': is_payout_declined, 'is_full_power': is_full_power, }
def find_api(method_name): """ Given a method name, find its API. """ endpoint = first(where(api_methods, method=method_name)) if endpoint: return endpoint.get('api')
def __next__(self): next_item = first(self.take(1)) if not next_item: raise StopIteration return next_item
def parse_operation(op): """ Update all relevant collections that this op impacts. """ op_type = op['type'] update_accounts_light = set() update_accounts_full = set() update_comments = set() def construct_identifier(): return '@%s/%s' % ( op.get('author', op.get('comment_author')), op.get('permlink', op.get('comment_permlink')), ) def account_from_auths(): return first(op.get('required_auths', op.get('required_posting_auths'))) if op_type in ['account_create', 'account_create_with_delegation']: update_accounts_light.add(op['creator']) update_accounts_full.add(op['new_account_name']) elif op_type in [ 'account_update', 'withdraw_vesting', 'claim_reward_balance', 'return_vesting_delegation', 'account_witness_vote' ]: update_accounts_light.add(op['account']) elif op_type == 'account_witness_proxy': update_accounts_light.add(op['account']) update_accounts_light.add(op['proxy']) elif op_type in ['author_reward', 'comment']: update_accounts_light.add(op['author']) update_comments.add(construct_identifier()) elif op_type == 'cancel_transfer_from_savings': update_accounts_light.add(op['from']) elif op_type == 'change_recovery_account': update_accounts_light.add(op['account_to_recover']) elif op_type == 'comment_benefactor_reward': update_accounts_light.add(op['benefactor']) elif op_type == [ 'convert', 'fill_convert_request', 'interest', 'limit_order_cancel', 'limit_order_create', 'shutdown_witness', 'witness_update' ]: update_accounts_light.add(op['owner']) elif op_type == 'curation_reward': update_accounts_light.add(op['curator']) elif op_type in ['custom', 'custom_json']: update_accounts_light.add(account_from_auths()) elif op_type == 'delegate_vesting_shares': update_accounts_light.add(op['delegator']) update_accounts_light.add(op['delegatee']) elif op_type == 'delete_comment': update_accounts_light.add(op['author']) elif op_type in [ 'escrow_approve', 'escrow_dispute', 'escrow_release', 'escrow_transfer' ]: accs = keep_in_dict( op, ['agent', 'from', 'to', 'who', 'receiver']).values() update_accounts_light.update(accs) elif op_type == 'feed_publish': update_accounts_light.add(op['publisher']) elif op_type in ['fill_order']: update_accounts_light.add(op['open_owner']) update_accounts_light.add(op['current_owner']) elif op_type in ['fill_vesting_withdraw']: update_accounts_light.add(op['to_account']) update_accounts_light.add(op['from_account']) elif op_type == 'pow2': acc = op['work'][1]['input']['worker_account'] update_accounts_light.add(acc) elif op_type in ['recover_account', 'request_account_recovery']: update_accounts_light.add(op['account_to_recover']) elif op_type == 'set_withdraw_vesting_route': update_accounts_light.add(op['from_account']) # update_accounts_light.add(op['to_account']) elif op_type in [ 'transfer', 'transfer_from_savings', 'transfer_to_savings', 'transfer_to_vesting' ]: accs = keep_in_dict( op, ['agent', 'from', 'to', 'who', 'receiver']).values() update_accounts_light.update(accs) elif op_type == 'vote': update_accounts_light.add(op['voter']) update_comments.add(construct_identifier()) # handle followers if op_type == 'custom_json': with suppress(ValueError): cmd, op_json = json.loads(op['json']) # ['follow', {data...}] if cmd == 'follow': accs = keep_in_dict(op_json, ['follower', 'following']).values() update_accounts_light.discard(first(accs)) update_accounts_light.discard(second(accs)) update_accounts_full.update(accs) return { 'accounts': list(update_accounts_full), 'accounts_light': list(update_accounts_light), 'comments': list(update_comments), }
def generate_cached_post_sql(pid, post, updated_at): if not post['author']: raise Exception("ERROR: post id {} has no chain state.".format(pid)) md = None try: md = json.loads(post['json_metadata']) if not isinstance(md, dict): md = {} except json.decoder.JSONDecodeError: pass thumb_url = '' if md and 'image' in md: thumb_url = get_img_url(first(md['image'])) or '' md['image'] = [thumb_url] # clean up tags, check if nsfw tags = [post['category']] if md and 'tags' in md and isinstance(md['tags'], list): tags = tags + md['tags'] tags = set( list(map(lambda str: (str or '').strip('# ').lower()[:32], tags))[0:5]) tags.discard('') is_nsfw = int('nsfw' in tags) # payout date is last_payout if paid, and cashout_time if pending. is_paidout = (post['cashout_time'][0:4] == '1969') payout_at = post['last_payout'] if is_paidout else post['cashout_time'] # get total rshares, and create comma-separated vote data blob rshares = sum(int(v['rshares']) for v in post['active_votes']) csvotes = "\n".join(map(vote_csv_row, post['active_votes'])) payout_declined = False if amount(post['max_accepted_payout']) == 0: payout_declined = True elif len(post['beneficiaries']) == 1: benny = first(post['beneficiaries']) if benny['account'] == 'null' and int(benny['weight']) == 10000: payout_declined = True full_power = int(post['percent_steem_dollars']) == 0 # total payout (completed and/or pending) payout = sum([ amount(post['total_payout_value']), amount(post['curator_payout_value']), amount(post['pending_payout_value']), ]) # total promotion cost promoted = amount(post['promoted']) # trending scores timestamp = parse_time(post['created']).timestamp() hot_score = score(rshares, timestamp, 10000) trend_score = score(rshares, timestamp, 480000) # TODO: add get_stats fields values = collections.OrderedDict([ ('post_id', '%d' % pid), ('author', "%s" % post['author']), ('permlink', "%s" % post['permlink']), ('title', "%s" % post['title']), ('preview', "%s" % post['body'][0:1024]), ('body', "%s" % post['body']), ('img_url', "%s" % thumb_url), ('payout', "%f" % payout), ('promoted', "%f" % promoted), ('payout_at', "%s" % payout_at), ('updated_at', "%s" % updated_at), ('created_at', "%s" % post['created']), ('rshares', "%d" % rshares), ('votes', "%s" % csvotes), ('json', "%s" % json.dumps(md)), ('is_nsfw', "%d" % is_nsfw), ('is_paidout', "%d" % is_paidout), ('sc_trend', "%f" % trend_score), ('sc_hot', "%f" % hot_score), #('payout_declined', "%d" % int(payout_declined)), #('full_power', "%d" % int(full_power)), ]) fields = values.keys() # Multiple SQL statements are generated for each post sqls = [] # Update main metadata in the hive_posts_cache table cols = ', '.join(fields) params = ', '.join([':' + k for k in fields]) update = ', '.join([k + " = :" + k for k in fields][1:]) sql = "INSERT INTO hive_posts_cache (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s" sqls.append((sql % (cols, params, update), values)) # update tag metadata only for top-level posts if post['depth'] == 0: sql = "DELETE FROM hive_post_tags WHERE post_id = :id" sqls.append((sql, {'id': pid})) sql = "INSERT IGNORE INTO hive_post_tags (post_id, tag) VALUES " params = {} vals = [] for i, tag in enumerate(tags): vals.append("(:id, :t%d)" % i) params["t%d" % i] = tag sqls.append((sql + ','.join(vals), {'id': pid, **params})) return sqls
def process_json_follow_op(account, op_json, block_date): """ Process legacy 'follow' plugin ops (follow/mute/clear, reblog) """ if type(op_json) != list: return if len(op_json) != 2: return if first(op_json) not in ['follow', 'reblog']: return if not isinstance(second(op_json), dict): return cmd, op_json = op_json # ['follow', {data...}] if cmd == 'follow': if type(op_json['what']) != list: return what = first(op_json['what']) or 'clear' if what not in ['blog', 'clear', 'ignore']: return if not all([key in op_json for key in ['follower', 'following']]): print("bad follow op: {} {}".format(block_date, op_json)) return follower = op_json['follower'] following = op_json['following'] if follower != account: return # impersonation if not all(filter(is_valid_account_name, [follower, following])): return # invalid input sql = """ INSERT IGNORE INTO hive_follows (follower, following, created_at, state) VALUES (:fr, :fg, :at, :state) ON DUPLICATE KEY UPDATE state = :state """ state = {'clear': 0, 'blog': 1, 'ignore': 2}[what] query(sql, fr=follower, fg=following, at=block_date, state=state) elif cmd == 'reblog': blogger = op_json['account'] author = op_json['author'] permlink = op_json['permlink'] if blogger != account: return # impersonation if not all(filter(is_valid_account_name, [author, blogger])): return post_id, depth = get_post_id_and_depth(author, permlink) if depth > 0: return # prevent comment reblogs if not post_id: print("reblog: post not found: {}/{}".format(author, permlink)) return if 'delete' in op_json and op_json['delete'] == 'delete': query( "DELETE FROM hive_reblogs WHERE account = :a AND post_id = :pid LIMIT 1", a=blogger, pid=post_id) sql = "DELETE FROM hive_feed_cache WHERE account = :account AND post_id = :id" query(sql, account=blogger, id=post_id) else: query( "INSERT IGNORE INTO hive_reblogs (account, post_id, created_at) " "VALUES (:a, :pid, :date)", a=blogger, pid=post_id, date=block_date) sql = "INSERT IGNORE INTO hive_feed_cache (account, post_id, created_at) VALUES (:account, :id, :created_at)" query(sql, account=blogger, id=post_id, created_at=block_date)