Esempio n. 1
0
    def curation_stats(self):
        trailing_24hr_t = time.time() - datetime.timedelta(
            hours=24).total_seconds()
        trailing_7d_t = time.time() - datetime.timedelta(
            days=7).total_seconds()

        reward_24h = 0.0
        reward_7d = 0.0

        for reward in take(5000,
                           self.history_reverse(filter_by="curation_reward")):

            timestamp = parse_time(reward["timestamp"]).timestamp()
            if timestamp > trailing_7d_t:
                reward_7d += Amount(reward["reward"]).amount

            if timestamp > trailing_24hr_t:
                reward_24h += Amount(reward["reward"]).amount

        reward_7d = self.converter.vests_to_sp(reward_7d)
        reward_24h = self.converter.vests_to_sp(reward_24h)
        return {
            "24hr": reward_24h,
            "7d": reward_7d,
            "avg": reward_7d / 7,
        }
Esempio n. 2
0
    def refresh(self):
        post_author, post_permlink = resolve_identifier(self.identifier)
        post = self.steemd.get_content(post_author, post_permlink)
        if not post["permlink"]:
            raise PostDoesNotExist("Post does not exist: %s" % self.identifier)

        # If this 'post' comes from an operation, it might carry a patch
        if "body" in post and re.match("^@@", post["body"]):
            self.patched = True

        # TODO: Check
        # This field is returned from blockchain, but it's empty. Fill it
        post['reblogged_by'] = [i for i in self.steemd.get_reblogged_by(post_author, post_permlink) if i != post_author]

        # Parse Times
        parse_times = ["active",
                       "cashout_time",
                       "created",
                       "last_payout",
                       "last_update",
                       "max_cashout_time"]
        for p in parse_times:
            post[p] = parse_time(post.get(p, "1970-01-01T00:00:00"))

        # Parse Amounts
        sbd_amounts = [
            'total_payout_value',
            'max_accepted_payout',
            'pending_payout_value',
            'curator_payout_value',
            'total_pending_payout_value',
            'promoted',
        ]
        for p in sbd_amounts:
            post[p] = Amount(post.get(p, "0.000 GBG"))

        # calculate trending and hot scores for sorting
        post['score_trending'] = calculate_trending(post.get('net_rshares', 0), post['created'])
        post['score_hot'] = calculate_hot(post.get('net_rshares', 0), post['created'])

        # turn json_metadata into python dict
        meta_str = post.get("json_metadata", "{}")
        post['json_metadata'] = silent(json.loads)(meta_str) or {}

        post["tags"] = []
        post['community'] = ''
        if isinstance(post['json_metadata'], dict):
            if post["depth"] == 0:
                tags = [post["parent_permlink"]]
                tags += get_in(post, ['json_metadata', 'tags'], default=[])
                tags_set = set(tags)
                post["tags"] = [tag for tag in tags if tag not in tags_set]

            post['community'] = get_in(post, ['json_metadata', 'community'], default='')

        # If this post is a comment, retrieve the root comment
        self.root_identifier, self.category = self._get_root_identifier(post)

        self._store_post(post)
Esempio n. 3
0
    def filter_by_date(items, start_time, end_time=None):
        start_time = parse_time(start_time).timestamp()
        if end_time:
            end_time = parse_time(end_time).timestamp()
        else:
            end_time = time.time()

        filtered_items = []
        for item in items:
            if 'time' in item:
                item_time = item['time']
            elif 'timestamp' in item:
                item_time = item['timestamp']
            timestamp = parse_time(item_time).timestamp()
            if end_time > timestamp > start_time:
                filtered_items.append(item)

        return filtered_items
Esempio n. 4
0
    def check_node_sync(self) -> bool:
        """Checks if API node is in sync."""
        props = self.get_dynamic_global_properties()
        chain_time = parse_time(props['time'])
        time_diff = datetime.utcnow() - chain_time
        if time_diff.total_seconds() > 6:
            log.warning('node out of sync, timediff: {}, waiting...'.format(
                time_diff.total_seconds()))
            return False

        return True
Esempio n. 5
0
    def filter_by_date(items, start_time, end_time=None):
        start_time = parse_time(start_time).timestamp()
        if end_time:
            end_time = parse_time(end_time).timestamp()
        else:
            end_time = time.time()

        filtered_items = []
        for item in items:
            item_time = None
            if "time" in item:
                item_time = item["time"]
            elif "timestamp" in item:
                item_time = item["timestamp"]

            if item_time:
                timestamp = parse_time(item_time).timestamp()
                if end_time > timestamp > start_time:
                    filtered_items.append(item)

        return filtered_items
def main(ctx, notify, account):
    """Find all GBG conversion requests."""

    if account:
        accs = [account]
    else:
        ctx.log.debug('total accounts: %s', ctx.helper.get_account_count())
        accs = ctx.helper.get_all_usernames()

    # obtain conversion_price and market prices whether we're going to send a notification
    if notify:
        bid = ctx.helper.get_market_price(type_='bid')
        conversion_price = ctx.helper.converter.sbd_median_price()
        if not bid or not conversion_price:
            ctx.log.critical('failed to obtain price')
            sys.exit(1)

    start = datetime.utcnow()
    total_sum = Decimal('0.000')
    for acc in accs:
        requests = ctx.helper.get_conversion_requests(acc)
        total = Decimal('0.000')
        # Add datetime field
        for request in requests:
            request['date'] = parse_time(request['conversion_date'])

        # Sort by datetime
        requests = sorted(requests, key=lambda k: k['date'])
        for request in requests:
            amount = request['amount'].split()[0]
            total += Decimal(amount)
            print(
                '{:<16} {:<18} {:>7}'.format(
                    request['owner'], request['amount'], request['date'].strftime('%Y-%m-%d %H:%M')
                )
            )

        total_sum += total

        if len(requests) > 1:
            print('{:<16} {:<18} {:<7}'.format(request['owner'], total, 'Total'))

        if requests and notify:
            msg = ctx.config['notify_message'].format(conversion_price, bid)
            ctx.helper.transfer(acc, '0.001', 'GOLOS', memo=msg, account=ctx.config['notify_account'])

    print('Total on conversion: {}'.format(total_sum))
    ctx.log.debug('getting conversion requests took {:.2f} seconds'.format((datetime.utcnow() - start).total_seconds()))
Esempio n. 7
0
def main(ctx, amount_limit, limit, account):
    """Scan account history looking for transfers."""

    account = Account(account)
    history = account.rawhistory(only_ops=['transfer'], limit=limit)

    for item in history:
        ctx.log.debug(pformat(item))

        timestamp = parse_time(item[1]['timestamp'])
        from_ = item[1]['op'][1]['from']
        to = item[1]['op'][1]['to']
        amount = Amount(item[1]['op'][1]['amount'])
        memo = item[1]['op'][1]['memo']

        if amount.amount > amount_limit:
            print('{}: {:<16} -> {:<16}, {}, {}'.format(timestamp, from_, to, amount, memo))
def main(ctx, min_mgests, account):
    """Find all vesting withdrawals with rates and dates."""

    ctx.log.debug('total accounts: %s', ctx.helper.get_account_count())

    if account:
        accs = [account]
    else:
        accs = ctx.helper.get_all_usernames()

    start = datetime.utcnow()

    # get all accounts in one batch
    all_accounts = ctx.helper.get_accounts(accs)

    # we well get summary info about total withdrawal rate and number of accounts
    sum_rate = float()
    count = int()

    cv = ctx.helper.converter
    steem_per_mvests = cv.steem_per_mvests()

    for acc in all_accounts:
        vshares = Amount(acc['vesting_shares'])
        mgests = vshares.amount / 1000000
        rate = Amount(acc['vesting_withdraw_rate'])
        date = parse_time(acc['next_vesting_withdrawal'])

        if mgests > min_mgests and rate.amount > 1000:
            # We use own calculation instead of cv.vests_to_sp() to speed up execution
            # avoiding API call on each interation
            rate_gp = rate.amount / 1e6 * steem_per_mvests
            gp = vshares.amount / 1e6 * steem_per_mvests
            sum_rate += rate_gp
            count += 1

            print('{:<16} {:<18} {:>6.0f} {:>8.0f}'.format(
                acc['name'], date.strftime('%Y-%m-%d %H:%M'), rate_gp, gp))

    ctx.log.debug('accounts iteration took {:.2f} seconds'.format(
        (datetime.utcnow() - start).total_seconds()))

    ctx.log.info(
        'numbers of matching accounts on vesting withdrawal: {}'.format(count))
    ctx.log.info('sum rate: {:.0f}'.format(sum_rate))
Esempio n. 9
0
    def get_voting_power(self, account: str) -> float:
        """
        Calculate real voting power instead of stale info in get_account()

        :param str account: account name
        :return: voting power 0-100
        """

        acc = Account(account)
        vp = acc.voting_power()

        last_vote_time = parse_time(acc['last_vote_time'])
        elapsed_time = datetime.utcnow() - last_vote_time

        regenerated_power = STEEMIT_100_PERCENT * elapsed_time.total_seconds(
        ) / STEEMIT_VOTE_REGENERATION_SECONDS
        current_power = min(vp + regenerated_power / 100, 100)

        return current_power
Esempio n. 10
0
    def stream(self, filter_by: Union[str, list] = list(), *args, **kwargs):
        """ Yield a stream of operations, starting with current head block.

            Args:
                filter_by (str, list): List of operations to filter for
        """
        if isinstance(filter_by, str):
            filter_by = [filter_by]

        for ops in self.stream_from(*args, **kwargs):

            # deal with different self.stream_from() outputs
            events = ops
            if type(ops) == dict:
                if 'witness_signature' in ops:
                    raise ValueError(
                        'Blockchain.stream() is for operation level streams. '
                        'For block level streaming, use Blockchain.stream_from()'
                    )
                events = [ops]

            for event in events:
                op_type, op = event['op']
                if not filter_by or op_type in filter_by:
                    # return unmodified steemd output
                    if kwargs.get('raw_output'):
                        yield event
                    else:
                        updated_op = op.copy()
                        updated_op.update({
                            "_id":
                            self.hash_op(event),
                            "type":
                            op_type,
                            "timestamp":
                            parse_time(event.get("timestamp")),
                            "block_num":
                            event.get("block"),
                            "trx_id":
                            event.get("trx_id"),
                        })
                        yield updated_op
Esempio n. 11
0
def typify(value: Union[dict, list, set, str]):
    """ Enhance block operation with native types.

    Typify takes a blockchain operation or dict/list/value,
    and then it parses and converts string types into native data types where appropriate.
    """
    if type(value) == dict:
        return walk_values(typify, value)

    if type(value) in [list, set]:
        return list(map(typify, value))

    if type(value) == str:
        if re.match('^\d+\.\d+ (GOLOS|GBG|GESTS)$', value):
            return keep_in_dict(dict(Amount(value)), ['amount', 'asset'])

        if re.match('^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}$', value):
            return parse_time(value)

    return value
Esempio n. 12
0
    def is_last_price_too_old(witness_data: Witness, max_age: int) -> bool:
        """
        Check last price update time and return True if older than max_age.

        :param witness_data: witness object dict, usually :py:class:`golos.witness.Witness` Witness instance
        :param int max_age: max seconds since last update
        """

        last_update = parse_time(witness_data['last_sbd_exchange_update'])
        log.debug('last price update: %s', last_update)
        log.debug('max_age: %s', max_age)

        delta = datetime.utcnow() - last_update
        log.debug('time passed since last update: %s seconds',
                  delta.total_seconds())

        if delta.total_seconds() > max_age:
            log.debug('price too old, need update')
            return True

        return False
Esempio n. 13
0
 def time(self):
     return parse_time(self["timestamp"])
Esempio n. 14
0
    def get_bandwidth(self, account: str, type_: str = 'market') -> bandwidth:
        """
        Estimate current account bandwidth and usage ratio.

        :param str account: account name
        :param str type_: 'market' used for transfer operations, forum - for posting and voting,
            custom - custom ops
        """

        acc = Account(account)

        global_props = self.get_dynamic_global_properties()

        account_vshares = Amount(acc['vesting_shares'])['amount']
        delegated_vshares = Amount(acc['delegated_vesting_shares'])['amount']
        received_vshares = Amount(acc['received_vesting_shares'])['amount']
        account_vshares = account_vshares - delegated_vshares + received_vshares
        log.debug('{:.<30}{:.>30.0f}'.format('account_vshares:',
                                             account_vshares))

        # get bandwidth info from network
        if type_ == 'market':
            account_average_bandwidth = int(acc['average_market_bandwidth'])
            last_bw_update_time = parse_time(
                acc['last_market_bandwidth_update'])
        elif type_ == 'forum':
            account_average_bandwidth = int(acc['average_bandwidth'])
            last_bw_update_time = parse_time(acc['last_bandwidth_update'])
        elif type == 'custom':
            raise NotImplementedError

        # seconds passed since last bandwidth update
        elapsed_time = (datetime.utcnow() -
                        last_bw_update_time).total_seconds()

        max_virtual_bandwidth = int(global_props['max_virtual_bandwidth'])
        log.debug('{:.<30}{:.>30.0f}'.format('max_virtual_bandwidth:',
                                             max_virtual_bandwidth))
        log.debug('{:.<30}{:.>30.0f}'.format(
            'max_virtual_bandwidth, KB:',
            max_virtual_bandwidth / STEEMIT_BANDWIDTH_PRECISION / 1024))

        total_vesting_shares = Amount(
            global_props['total_vesting_shares']).amount
        log.debug('{:.<30}{:.>30.0f}'.format('total_vesting_shares:',
                                             total_vesting_shares))

        # calculate bandwidth regeneration
        if elapsed_time > STEEMIT_BANDWIDTH_AVERAGE_WINDOW_SECONDS:
            new_bandwidth = 0
        else:
            new_bandwidth = (
                (STEEMIT_BANDWIDTH_AVERAGE_WINDOW_SECONDS - elapsed_time) *
                account_average_bandwidth
            ) / STEEMIT_BANDWIDTH_AVERAGE_WINDOW_SECONDS

        # example code to estimate whether your new transaction will exceed bandwidth or not
        # trx_size = 1024*2 # imagine 2 KB trx
        # trx_bandwidth = trx_size * STEEMIT_BANDWIDTH_PRECISION
        # account_average_bandwidth = new_bandwidth + trx_bandwidth

        account_average_bandwidth = new_bandwidth
        log.debug('{:.<30}{:.>30.0f}'.format('account_average_bandwidth:',
                                             account_average_bandwidth))

        # c++ code:
        # has_bandwidth = (account_vshares * max_virtual_bandwidth) > (account_average_bandwidth * total_vshares);

        avail = account_vshares * max_virtual_bandwidth
        used = account_average_bandwidth * total_vesting_shares
        log.debug('{:.<30}{:.>30.0f}'.format('used:', used))
        log.debug('{:.<30}{:.>30.0f}'.format('avail:', avail))

        used_ratio = used / avail
        log.debug('{:.<30}{:.>30.2%}'.format('used ratio:', used_ratio))

        # account bandwidth is actually a representation of sent bytes, so get these bytes
        used_kb = account_average_bandwidth / STEEMIT_BANDWIDTH_PRECISION / 1024

        # market ops uses x10 bandwidth
        if type_ == 'market':
            used_kb = used_kb / 10
        log.debug('{:.<30}{:.>30.2f}'.format('used KB:', used_kb))

        # available account bandwidth is a fraction of max_virtual_bandwidth based on his portion of
        # total_vesting_shares
        avail_kb = account_vshares / total_vesting_shares * max_virtual_bandwidth / STEEMIT_BANDWIDTH_PRECISION / 1024
        if type_ == 'market':
            avail_kb = avail_kb / 10
        log.debug('{:.<30}{:.>30.2f}'.format('avail KB:', avail_kb))

        if used < avail:
            log.debug('has bandwidth')
        else:
            log.debug('no bandwidth')

        return bandwidth(used_kb, avail_kb, used_ratio)
Esempio n. 15
0
 def time(self):
     return parse_time(self['timestamp'])