Example #1
0
    def _generate_cache_sqls(cls, accounts, block_date=None):
        if not block_date:
            block_date = get_adapter().head_time()

        sqls = []
        for account in get_adapter().get_accounts(accounts):
            values = {
                'name':
                account['name'],
                'proxy':
                account['proxy'],
                'post_count':
                account['post_count'],
                'reputation':
                rep_log10(account['reputation']),
                'proxy_weight':
                amount(account['vesting_shares']),
                'vote_weight':
                amount(account['vesting_shares']) +
                amount(account['received_vesting_shares']) -
                amount(account['delegated_vesting_shares']),
                'kb_used':
                int(account['lifetime_bandwidth']) / 1e6 / 1024,
                'active_at':
                account['last_bandwidth_update'],
                'cached_at':
                block_date,
                **cls._safe_account_metadata(account)
            }

            update = ', '.join([k + " = :" + k for k in values.keys()][1:])
            sql = "UPDATE hive_accounts SET %s WHERE name = :name" % (update)
            sqls.append([(sql, values)])
        return sqls
Example #2
0
    def get_post_stats(cls,post):
        net_rshares_adj = 0
        neg_rshares = 0
        total_votes = 0
        up_votes = 0
        for vote in post['active_votes']:
            if vote['percent'] == 0:
                continue

            total_votes += 1
            rshares = int(vote['rshares'])
            sign = 1 if vote['percent'] > 0 else -1
            if sign > 0:
                up_votes += 1
            if sign < 0:
                neg_rshares += rshares

            # For graying: sum rshares, but ignore neg rep users and dust downvotes
            neg_rep = str(vote['reputation'])[0] == '-'
            if not (neg_rep and sign < 0 and len(str(rshares)) < 11):
                net_rshares_adj += rshares

        # take negative rshares, divide by 2, truncate 10 digits (plus neg sign),
        #   and count digits. creates a cheap log10, stake-based flag weight.
        #   result: 1 = approx $400 of downvoting stake; 2 = $4,000; etc
        flag_weight = max((len(str(neg_rshares / 2)) - 11, 0))

        allow_delete = post['children'] == 0 and int(post['net_rshares']) <= 0
        has_pending_payout = amount(post['pending_payout_value']) >= 0.02
        author_rep = rep_log10(post['author_reputation'])

        gray_threshold = -9999999999
        low_value_post = net_rshares_adj < gray_threshold and author_rep < 65

        gray = not has_pending_payout and (author_rep < 1 or low_value_post)
        hide = not has_pending_payout and (author_rep < 0)

        return {
            'hide': hide,
            'gray': gray,
            'allow_delete': allow_delete,
            'author_rep': author_rep,
            'flag_weight': flag_weight,
            'total_votes': total_votes,
            'up_votes': up_votes
        }
Example #3
0
def generate_cached_post_sql(pid, post, updated_at):
    if not post['author']:
        raise Exception("ERROR: post id {} has no chain state.".format(pid))

    md = None
    try:
        md = json.loads(post['json_metadata'])
        if not isinstance(md, dict):
            md = {}
    except json.decoder.JSONDecodeError:
        pass

    thumb_url = ''
    if md and 'image' in md:
        thumb_url = safe_img_url(first(md['image'])) or ''
        md['image'] = [thumb_url]

    # clean up tags, check if nsfw
    tags = [post['category']]
    if md and 'tags' in md and isinstance(md['tags'], list):
        tags = tags + md['tags']
    tags = set(list(map(lambda str: (str or '').strip('# ').lower()[:32], tags))[0:5])
    tags.discard('')
    is_nsfw = int('nsfw' in tags)

    # payout date is last_payout if paid, and cashout_time if pending.
    is_paidout = (post['cashout_time'][0:4] == '1969')
    payout_at = post['last_payout'] if is_paidout else post['cashout_time']

    # get total rshares, and create comma-separated vote data blob
    rshares = sum(int(v['rshares']) for v in post['active_votes'])
    csvotes = "\n".join(map(vote_csv_row, post['active_votes']))

    payout_declined = False
    if amount(post['max_accepted_payout']) == 0:
        payout_declined = True
    elif len(post['beneficiaries']) == 1:
        benny = first(post['beneficiaries'])
        if benny['account'] == 'null' and int(benny['weight']) == 10000:
            payout_declined = True

    full_power = int(post['percent_steem_dollars']) == 0

    # total payout (completed and/or pending)
    payout = sum([
        amount(post['total_payout_value']),
        amount(post['curator_payout_value']),
        amount(post['pending_payout_value']),
    ])

    # total promotion cost
    promoted = amount(post['promoted'])

    # trending scores
    timestamp = parse_time(post['created']).timestamp()
    hot_score = score(rshares, timestamp, 10000)
    trend_score = score(rshares, timestamp, 480000)

    # TODO: add Posts.get_post_stats fields
    values = collections.OrderedDict([
        ('post_id', '%d' % pid),
        ('author', "%s" % post['author']),
        ('permlink', "%s" % post['permlink']),
        ('title', "%s" % post['title']),
        ('preview', "%s" % post['body'][0:1024]),
        ('body', "%s" % post['body']),
        ('img_url', "%s" % thumb_url),
        ('payout', "%f" % payout),
        ('promoted', "%f" % promoted),
        ('payout_at', "%s" % payout_at),
        ('updated_at', "%s" % updated_at),
        ('created_at', "%s" % post['created']),
        ('rshares', "%d" % rshares),
        ('votes', "%s" % csvotes),
        ('json', "%s" % json.dumps(md)),
        ('is_nsfw', "%d" % is_nsfw),
        ('is_paidout', "%d" % is_paidout),
        ('sc_trend', "%f" % trend_score),
        ('sc_hot', "%f" % hot_score),
        #('payout_declined', "%d" % int(payout_declined)),
        #('full_power', "%d" % int(full_power)),
    ])
    fields = values.keys()

    # Multiple SQL statements are generated for each post
    sqls = []

    # Update main metadata in the hive_posts_cache table
    cols = ', '.join(fields)
    params = ', '.join([':'+k for k in fields])
    update = ', '.join([k+" = :"+k for k in fields][1:])
    sql = "INSERT INTO hive_posts_cache (%s) VALUES (%s) ON DUPLICATE KEY UPDATE %s"
    sqls.append((sql % (cols, params, update), values))

    # update tag metadata only for top-level posts
    if post['depth'] == 0:
        sql = "DELETE FROM hive_post_tags WHERE post_id = :id"
        sqls.append((sql, {'id': pid}))

        if tags:
            sql = "INSERT IGNORE INTO hive_post_tags (post_id, tag) VALUES "
            params = {}
            vals = []
            for i, tag in enumerate(tags):
                vals.append("(:id, :t%d)" % i)
                params["t%d"%i] = tag
            sqls.append((sql + ','.join(vals), {'id': pid, **params}))

    return sqls