コード例 #1
0
def update_exchange_rates(symbol: str):

    os.makedirs(BASE_DIRECTORY, exist_ok=True)

    coin_gecko_id = CoinGecko.get_id_by_symbol(symbol)

    if not coin_gecko_id:
        return

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0, minute=0, second=0, microsecond=0)

    path = os.path.join(BASE_DIRECTORY, symbol + '.csv')

    if not os.path.isfile(path):
        act_date = get_first_transaction_timestamp(symbol)
    else:
        act_date = _get_last_timestamp(path) + timedelta(days=1)

    log.debug('updating token exchange rates')

    with open(path, 'a') as file:
        while act_date < max_time:

            price = CoinGecko.get_exchange_rate(coin_gecko_id, act_date)

            file.write(','.join([act_date.strftime('%Y-%m-%d'),
                                 str(price)]) + '\n')
            file.flush()
            time.sleep(1)

            act_date += timedelta(days=1)
コード例 #2
0
def final_data_payments():
    os.makedirs(STORE_FINAL_DATA_PAYMENTS, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process,
                          date_last_processed + timedelta(days=1))

    log.debug('generate final data: general')

    if date_to_process >= max_time:
        return

    while not stop_processing:

        log.debug('creating final payment data for ' +
                  date_to_process.strftime('%Y-%m-%d'))

        # Amount
        # Count
        # Average

        final_data = calculate_daily_payment_data.get_data_for_date(
            date_to_process)

        if len(final_data.keys()) > 0:
            with open(
                    os.path.join(
                        STORE_FINAL_DATA_PAYMENTS,
                        date_to_process.strftime('%Y-%m-%d') + '.json'),
                    'a') as file:

                for currency in final_data.keys():

                    if final_data[currency]['payment_count'] > 0:
                        final_data[currency]['average'] = final_data[currency][
                            'total_amount'] / final_data[currency][
                                'payment_count']
                    else:
                        final_data[currency]['average'] = 0

                file.write(json.dumps(final_data))

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #3
0
def calculate_rolling_retention_data():

    os.makedirs(STORE_ROLLING_RETENTION_DATA, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()

    log.debug('calculate: rolling retention')

    if date_to_process >= max_time:
        return

    total_user_from_yesterday = calculate_total_user_data.get_data_for_date(
        max_time - timedelta(days=1))

    while not stop_processing:

        log.debug('calculate rolling retention data for ' +
                  date_to_process.strftime('%Y-%m-%d'))

        final_data = _calculate_retention_data(date_to_process,
                                               total_user_from_yesterday)

        for currency in final_data.keys():
            file_path = os.path.join(
                STORE_ROLLING_RETENTION_DATA, currency,
                date_to_process.strftime('%Y-%m-%d') + '.json')

            os.makedirs(os.path.join(STORE_ROLLING_RETENTION_DATA, currency),
                        exist_ok=True)

            with open(file_path, 'w') as file:
                file.write(json.dumps(final_data[currency]))

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #4
0
def _get_next_time_to_process(symbol, symbol_dir):
    last_file_timestamp = None
    last_file = None

    files = [f for f in os.listdir(symbol_dir) if os.path.isfile(os.path.join(symbol_dir, f))]

    # get the file with the highest timestamp
    for file in files:
        filename = file.split('.')[0]

        timestamp = datetime.strptime(filename, '%Y-%m-%d')

        if not last_file_timestamp or timestamp > last_file_timestamp:
            last_file_timestamp = timestamp
            last_file = file

    if last_file_timestamp:
        return last_file_timestamp + timedelta(days=1)
    else:
        return get_first_transaction_timestamp(symbol)
コード例 #5
0
def calculate_daily_retention_data():

    os.makedirs(STORE_DAILY_RETENTION_DATA, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process, date_last_processed - timedelta(days=31))

    log.debug('calculate: retention')

    if date_to_process >= max_time:
        return

    while not stop_processing:

        log.debug('creating retention data for ' + date_to_process.strftime('%Y-%m-%d'))

        final_data = _calculate_retention_data(date_to_process)

        for currency in final_data.keys():
            file_path = os.path.join(STORE_DAILY_RETENTION_DATA, currency, date_to_process.strftime('%Y-%m-%d') + '.json')

            os.makedirs(os.path.join(STORE_DAILY_RETENTION_DATA, currency), exist_ok=True)

            with open(file_path, 'w') as file:
                file.write(json.dumps(final_data[currency]))

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #6
0
def final_data_general():

    os.makedirs(STORE_FINAL_DATA_GENERAL, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    # date_last_processed = _get_last_processed_date()
    # date_to_process = max(date_to_process, date_last_processed + timedelta(days=1))

    log.debug('generate final data: general')

    if date_to_process >= max_time:
        return

    while not stop_processing:

        final_data = {}
        payment_data = calculate_daily_payment_data.get_data_for_date(
            date_to_process)

        file_path = os.path.join(
            STORE_FINAL_DATA_GENERAL,
            date_to_process.strftime('%Y-%m-%d') + '.json')

        if not os.path.isfile(file_path):
            for symbol in payment_data.keys():

                final_data[symbol] = {}

                log.debug('creating final general data for ' +
                          date_to_process.strftime('%Y-%m-%d'))

                # Amount of Coins
                # Velocity

                market_data = calculate_market_data.get_data(
                    symbol, date_to_process)

                if not market_data:
                    return

                final_data[symbol]['amount_of_coins'] = market_data[
                    'circulating_supply']
                final_data[symbol]['velocity_m1'] = payment_data[symbol][
                    'total_amount'] / market_data['circulating_supply']

            if len(final_data.keys()) > 0:
                with open(file_path, 'w') as file:
                    file.write(json.dumps(final_data))

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #7
0
def calculate_total_user_data():
    os.makedirs(STORE_TOTAL_USER_DIRECTORY, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process,
                          date_last_processed + timedelta(days=1))

    log.debug('calculate: total user data')

    if date_to_process >= max_time:
        return

    state = _load_state(date_to_process)

    while not stop_processing:

        log.debug('analysing total user data for ' +
                  date_to_process.strftime('%Y-%m-%d'))

        transactions = get_transaction_data(
            date_to_process, type_filter=['bank_MsgMultiSend', 'bank_MsgSend'])

        for transaction in transactions:

            # log.debug(transaction)

            type = transaction[0]
            block = transaction[1]
            timestamp = transaction[2]
            tx_hash = transaction[3]
            amount = int(transaction[4])
            currency = transaction[5]
            from_address = transaction[6]
            to_address = transaction[7]
            tax_amount = transaction[8]
            tax_currency = transaction[9]

            if currency not in state.keys():
                state[currency] = {}

            if from_address not in state[currency]:
                state[currency][from_address] = {
                    'first_seen_timestamp': timestamp,
                }

            state[currency][from_address]['last_seen_timestamp'] = timestamp

        _save_state(date_to_process, state)

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #8
0
def final_data_rolling_retention():
    os.makedirs(STORE_FINAL_DATA_USER, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()

    log.debug('generate final data: rolling retention')

    if date_to_process >= max_time:
        return

    user_list = {}

    while not stop_processing:

        user_meta_data = {}

        log.debug('creating final rolling retention data for ' + date_to_process.strftime('%Y-%m-%d'))

        # Total
        # New
        # Daily Active
        # retention 7d
        # retention 14d
        # retention 30d

        file_path = os.path.join(STORE_FINAL_DATA_USER, date_to_process.strftime('%Y-%m-%d') + '.json')
        raw_data = calculate_daily_transaction_data.get_user(date_to_process)

        for currency in raw_data.keys():

            if currency not in user_list.keys():
                user_list[currency] = set()

            user_meta_data[currency] = {}

            count_new_user = 0
            count_daily_active = len(raw_data[currency])

            for user_object in raw_data[currency]:

                if not user_object['address'] in user_list[currency]:
                    count_new_user += 1

                user_list[currency].add(user_object['address'])

            user_meta_data[currency]['new'] = count_new_user
            user_meta_data[currency]['daily'] = count_daily_active

        if not os.path.isfile(file_path):

            final_data = {}

            for currency in user_list.keys():

                #
                # calculate retention data
                #
                retention_data = calculate_rolling_retention_data.get_retention_for_date(date_to_process, currency)

                final_data[currency] = {
                    'total': len(user_list[currency]),
                    'new': user_meta_data.get(currency)['new'] if (currency in user_meta_data) else 0,
                    'daily': user_meta_data.get(currency)['daily'] if (currency in user_meta_data) else 0,
                    'rolling_retention': retention_data,
                }

            if len(raw_data.keys()) > 0:
                with open(file_path, 'w') as file:
                    file.write(json.dumps(final_data))

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #9
0
def calculate_daily_payment_data():

    os.makedirs(STORE_DAILY_PAYMENTS_DIRECTORY, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process, date_last_processed + timedelta(days=1))

    log.debug('calculate: total amount of coins per coin')

    # TODO remove all lines from STORE_DAILY_PAYMENTS_DIRECTORY which are in the future from date_to_process
    # TODO remove all files from STORE_DAILY_ADDRESS_PAYMENTS_DIRECTORY which are in the future from date_to_process

    if date_to_process >= max_time:
        return

    # with open(symbol_file, 'a') as file:
    while not stop_processing:

        log.debug('analysing payment data for ' + date_to_process.strftime('%Y-%m-%d'))

        transactions = get_transaction_data(date_to_process, type_filter=['bank_MsgMultiSend', 'bank_MsgSend'])

        token = dict()

        for transaction in transactions:

            type = transaction[0]
            block = transaction[1]
            timestamp = transaction[2]
            tx_hash = transaction[3]
            currency = transaction[5]
            tax_amount = transaction[8]
            tax_currency = transaction[9]

            if currency not in token.keys():
                token[currency] = {
                    'total_amount': 0,
                    'payment_count': 0,
                    'total_tax_amount': 0,
                    'active_users': dict(),
                }

            amount = int(transaction[4])
            from_address = transaction[6]
            to_address = transaction[7]
            tax_amount = int(transaction[8])
            tax_currency = transaction[9]

            token[currency]['payment_count'] += 1
            token[currency]['total_amount'] += amount
            token[currency]['total_tax_amount'] += tax_amount

            if from_address not in token[currency]['active_users'].keys():
                token[currency]['active_users'][from_address] = {
                    'total_amount': 0,
                    'payment_count': 0,
                }

            token[currency]['active_users'][from_address]['total_amount'] += amount
            token[currency]['active_users'][from_address]['payment_count'] += 1

        for currency in token.keys():

            with open(os.path.join(STORE_DAILY_PAYMENTS_DIRECTORY, currency + '.csv'), 'a') as file:

                tax_rate = token[currency]['total_tax_amount'] / token[currency]['total_amount']

                file.write(','.join([date_to_process.strftime('%Y-%m-%d'),
                                     str(token[currency]['total_amount']),
                                     str(token[currency]['payment_count']),
                                     f"{tax_rate:.15f}",
                                    ]) + '\n')

            os.makedirs(os.path.join(STORE_DAILY_ADDRESS_PAYMENTS_DIRECTORY, currency), exist_ok=True)

            with open(os.path.join(STORE_DAILY_ADDRESS_PAYMENTS_DIRECTORY,
                                   currency,
                                   date_to_process.strftime('%Y-%m-%d') + '.csv'), 'a') as file:

                for address in token[currency]['active_users'].keys():
                    file.write(','.join([address,
                                         str(token[currency]['active_users'][address]['total_amount']),
                                         str(token[currency]['active_users'][address]['payment_count']),
                                        ]) + '\n')

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #10
0
def final_data_transactions():
    os.makedirs(STORE_FINAL_DATA_TRANSACTIONS, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process,
                          date_last_processed + timedelta(days=1))

    log.debug('generate final data: transactions')

    if date_to_process >= max_time:
        return

    while not stop_processing:

        log.debug('creating final transaction data for ' +
                  date_to_process.strftime('%Y-%m-%d'))

        # Send
        # Deposit
        # Swap
        # Create Validator

        raw_data_send = calculate_daily_transaction_data.get_data(
            date_to_process, 'bank_MsgSend')
        raw_data_multisend = calculate_daily_transaction_data.get_data(
            date_to_process, 'bank_MsgMultiSend')
        raw_data_deposit = calculate_daily_transaction_data.get_data(
            date_to_process, 'gov_MsgDeposit')
        raw_data_swap = calculate_daily_transaction_data.get_data(
            date_to_process, 'market_MsgSwap')
        raw_data_create_validator = calculate_daily_transaction_data.get_data(
            date_to_process, 'staking_MsgCreateValidator')

        final_data = {}

        for raw_data in [
                raw_data_send, raw_data_multisend, raw_data_deposit,
                raw_data_swap, raw_data_create_validator
        ]:
            for currency in raw_data.keys():
                if currency not in final_data.keys():
                    final_data[currency] = {}

                if 'send' not in final_data[currency]:
                    final_data[currency]['send'] = 0

        for currency in raw_data_send.keys():

            final_data[currency]['send'] += raw_data_send[currency]['count']

        for currency in raw_data_multisend.keys():
            final_data[currency]['send'] += raw_data_multisend[currency][
                'count']

        for currency in raw_data_deposit.keys():
            final_data[currency]['deposit'] = raw_data_deposit[currency][
                'count']

        for currency in raw_data_swap.keys():
            final_data[currency]['swap'] = raw_data_swap[currency]['count']

        for currency in raw_data_create_validator.keys():
            final_data[currency][
                'create_validator'] = raw_data_create_validator[currency][
                    'count']

        if len(final_data.keys()) > 0:
            with open(
                    os.path.join(
                        STORE_FINAL_DATA_TRANSACTIONS,
                        date_to_process.strftime('%Y-%m-%d') + '.json'),
                    'a') as file:
                file.write(json.dumps(final_data))

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
コード例 #11
0
def update_realized_market_capitalization():

    os.makedirs(STORE_REALIZED_MARKET_CAP_DATA, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0,
                                minute=0,
                                second=0,
                                microsecond=0,
                                tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process,
                          date_last_processed + timedelta(days=1))

    if date_to_process >= max_time:
        return

    state = _load_state(date_to_process)

    log.debug('calculate: realized market cap')

    while not stop_processing:

        transactions = get_transaction_data(
            date_to_process, type_filter=['bank_MsgMultiSend', 'bank_MsgSend'])

        log.debug('processing realized market cap for: ' +
                  str(date_to_process))

        for transaction in transactions:

            start_date = datetime.now()

            type = transaction[0]
            block = transaction[1]
            timestamp = transaction[2]
            tx_hash = transaction[3]
            amount = int(transaction[4])
            currency = transaction[5]
            from_address = transaction[6]
            to_address = transaction[7]
            tax_amount = int(transaction[8])
            tax_currency = transaction[9]

            price = 0.1
            # TODO get the correct market price per token per date
            # first_market_price_date = get_first_market_price_date(symbol)
            #
            # if not first_market_price_date:
            #     log.debug("no market price available")
            #     return
            #
            # if int(timestamp) < first_market_price_date.timestamp():
            #
            #     if init_price:
            #         price = init_price
            #     else:
            #         price = 0
            # else:
            #     price = get_local_exchange_rate(symbol, datetime.utcfromtimestamp(int(timestamp)))

            if currency not in state.keys():
                state[currency] = {}

            if from_address in state[currency].keys():
                from_account = state[currency][from_address]
            else:
                from_account = None

            if to_address in state[currency].keys():
                to_account = state[currency][to_address]
            else:
                to_account = {
                    'balance': 0,
                    'data': [],
                }
                state[currency][to_address] = to_account

            #
            # add transaction to the from-account
            #

            if from_account:

                remaining_value = amount

                while remaining_value > 0:
                    try:
                        from_amount = from_account['data'][0][1]
                    except Exception:
                        # log.debug(transaction)
                        break

                    if remaining_value < from_amount:
                        from_account['data'][0][1] -= remaining_value
                        remaining_value = 0
                        from_account['data'][0][2] = price

                    else:
                        remaining_value -= from_amount
                        from_account['data'] = from_account['data'][1:]

                from_account['balance'] = max(
                    0,
                    int(from_account['balance']) - amount)

            #
            # add transaction to the to-account
            #

            to_account['data'].append([timestamp, amount, price])
            to_account['balance'] = int(to_account['balance']) + amount

            end_date = datetime.now()
            # print('calculation time: ' + str((end_date - start_date).total_seconds() * 1000))

        #
        # all transactions are processed, saving state to a file
        #
        _save_state(date_to_process, state)

        date_to_process = date_to_process + timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True
def calculate_daily_transaction_data():
    # symbol = token['symbol']
    # symbol_file = STORE_DIRECTORY + symbol

    os.makedirs(STORE_DAILY_TRANSACTIONS_DIRECTORY, exist_ok=True)

    max_time = datetime.utcnow()
    max_time = max_time.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=pytz.UTC)

    stop_processing = False

    date_to_process = get_first_transaction_timestamp()
    date_last_processed = _get_last_processed_date()
    date_to_process = max(date_to_process, date_last_processed + timedelta(days=1))

    log.debug('calculate: total amount of transactions per coin per type')

    # TODO remove all lines from STORE_DAILY_PAYMENTS_DIRECTORY which are in the future from date_to_process
    # TODO remove all files from STORE_DAILY_ADDRESS_PAYMENTS_DIRECTORY which are in the future from date_to_process

    if date_to_process >= max_time:
        return

    # with open(symbol_file, 'a') as file:
    while not stop_processing:

        log.debug('analysing transaction data for ' + date_to_process.strftime('%Y-%m-%d'))

        transactions = get_transaction_data(date_to_process)

        types = dict()

        for transaction in transactions:

            type = transaction[0]
            block = transaction[1]
            timestamp = transaction[2]
            tx_hash = transaction[3]

            if type not in types.keys():
                types[type] = {
                    'count': 0,
                    'currencies': dict(),
                }

            currency = None

            if type == 'bank_MsgMultiSend':
                currency = transaction[5]
            elif type == 'bank_MsgSend':
                currency = transaction[5]
            elif type == 'distribution_MsgWithdrawDelegationReward':
                currency = None
            elif type == 'distribution_MsgWithdrawValidatorCommission':
                currency = None
            elif type == 'gov_MsgDeposit':
                currency = transaction[7]
            elif type == 'gov_MsgSubmitProposal':
                currency = None
            elif type == 'market_MsgSwap':
                currency = None
            elif type == 'oracle_MsgDelegateFeedConsent':
                currency = None
            elif type == 'oracle_MsgExchangeRatePrevote':
                currency = transaction[5]
            elif type == 'oracle_MsgExchangeRateVote':
                currency = transaction[5]
            elif type == 'staking_MsgCreateValidator':
                currency = transaction[6]
            elif type == 'staking_MsgDelegate':
                currency = transaction[7]
            elif type == 'staking_MsgEditValidator':
                currency = None

            if currency and currency not in types[type]['currencies']:
                types[type]['currencies'][currency] = {
                    'count': 0,
                }

            if currency:
                types[type]['currencies'][currency]['count'] += 1
            else:
                types[type]['count'] += 1

        # print(types)

        for type in types.keys():

            os.makedirs(os.path.join(STORE_DAILY_TRANSACTIONS_DIRECTORY, type), exist_ok=True)

            if len(types[type]['currencies']) > 0:

                for currency in types[type]['currencies']:

                    with open(os.path.join(STORE_DAILY_TRANSACTIONS_DIRECTORY, type, currency + '.csv'), 'a') as file:
                        file.write(','.join([date_to_process.strftime('%Y-%m-%d'),
                                             str(types[type]['currencies'][currency]['count']),
                                            ]) + '\n')
            else:
                with open(os.path.join(STORE_DAILY_TRANSACTIONS_DIRECTORY, type, 'default.csv'), 'a') as file:
                    file.write(','.join(
                        [date_to_process.strftime('%Y-%m-%d'),
                         str(types[type]['count']),
                         ]) + '\n')

        date_to_process += timedelta(days=1)

        if date_to_process >= max_time:
            stop_processing = True