Example #1
0
    api_url_for,
    assert_error_response,
    assert_ok_async_response,
    assert_proper_response,
    wait_for_async_task,
)
from rotkehlchen.tests.utils.checks import assert_serialized_lists_equal
from rotkehlchen.tests.utils.factories import make_ethereum_address
from rotkehlchen.tests.utils.makerdao import mock_proxies
from rotkehlchen.tests.utils.mock import MockResponse
from rotkehlchen.typing import ChecksumEthAddress

mocked_prices = {
    'DAI': {
        'USD': {
            1582699808: FVal('1.002'),
            1584024065: FVal('1.002'),
            1585286480: FVal('1.023'),
            1585286769: FVal('1.023'),
            1585290263: FVal('1.023'),
            1586785858: FVal('1.024'),
            1586788927: FVal('1.024'),
            1586805054: FVal('1.024'),
            1587539880: FVal('1.016'),
            1587539889: FVal('1.016'),
            1587910979: FVal('1.015'),
            1588174425: FVal('1.014'),
            1588664698: FVal('1.006'),
            1588696496: FVal('1.006'),
            1588964616: FVal('1.006'),
            1589989097: FVal('1.003'),
Example #2
0
    def modify_eth_account(
        self,
        given_account: EthAddress,
        append_or_remove: str,
        add_or_sub: Callable[[FVal, FVal], FVal],
    ) -> None:
        """Either appends or removes an ETH acccount.

        Call with 'append', operator.add to add the account
        Call with 'remove', operator.sub to remove the account
        """
        # Make sure account goes into web3.py as a properly checksummed address
        account = to_checksum_address(given_account)
        eth_usd_price = Inquirer().find_usd_price(A_ETH)
        balance = self.ethchain.get_eth_balance(account)
        usd_balance = balance * eth_usd_price
        if append_or_remove == 'append':
            self.accounts.eth.append(account)
            self.balances[A_ETH][account] = {
                A_ETH: balance,
                'usd_value': usd_balance
            }
        elif append_or_remove == 'remove':
            if account not in self.accounts.eth:
                raise InputError('Tried to remove a non existing ETH account')
            self.accounts.eth.remove(account)
            del self.balances[A_ETH][account]
        else:
            raise ValueError('Programmer error: Should be append or remove')

        if len(self.balances[A_ETH]) == 0:
            # If the last account was removed balance should be 0
            self.totals[A_ETH]['amount'] = FVal(0)
            self.totals[A_ETH]['usd_value'] = FVal(0)
        else:
            self.totals[A_ETH]['amount'] = add_or_sub(
                self.totals[A_ETH].get('amount', FVal(0)),
                balance,
            )
            self.totals[A_ETH]['usd_value'] = add_or_sub(
                self.totals[A_ETH].get('usd_value', FVal(0)),
                usd_balance,
            )

        for token in self.owned_eth_tokens:
            usd_price = Inquirer().find_usd_price(token)
            if usd_price == 0:
                # skip tokens that have no price
                continue

            token_balance = Blockchain._query_token_balances(
                token_asset=token,
                query_callback=self.ethchain.get_token_balance,
                argument=account,
            )
            if token_balance == 0:
                continue

            usd_value = token_balance * usd_price
            if append_or_remove == 'append':
                account_balance = self.balances[A_ETH][account]
                account_balance[token] = balance
                account_balance[
                    'usd_value'] = account_balance['usd_value'] + usd_value

            self.totals[token] = {
                'amount':
                add_or_sub(
                    self.totals[token].get('amount', FVal(0)),
                    token_balance,
                ),
                'usd_value':
                add_or_sub(
                    self.totals[token].get('usd_value', FVal(0)),
                    usd_value,
                ),
            }
            outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
            result = outcome['result']
        else:
            result = assert_proper_response_with_result(response)

    assert result['BTC']['amount'] == '1.425'
    assert result['XMR']['amount'] == '50.315'
    assert result['BNB']['amount'] == '155'
    # Check DB to make sure a save happened
    assert rotki.data.db.get_last_balance_save_time() >= now
    assert set(rotki.data.db.query_owned_assets()) == {
        'BTC', 'XMR', 'BNB', 'ETH', 'RDN'
    }


@pytest.mark.parametrize('mocked_current_prices', [{'CYFM': FVal(0)}])
@pytest.mark.parametrize('async_query', [False, True])
def test_add_manually_tracked_balances_no_price(rotkehlchen_api_server,
                                                async_query):
    """Test that adding a manually tracked balance of an asset for which we cant
    query a price is handled properly both in the adding and querying part

    Regression test for https://github.com/rotki/rotki/issues/896"""
    _populate_tags(rotkehlchen_api_server)
    balances: List[Dict[str, Any]] = [{
        "asset": "CYFM",
        "label": "CYFM account",
        "amount": "50.315",
        "tags": ["public"],
        "location": "blockchain",
    }]
Example #4
0
    def _get_balances_graph(
        self,
        addresses: List[ChecksumEthAddress],
    ) -> ProtocolBalance:
        """Get the addresses' pools data querying the Uniswap subgraph

        Each liquidity position is converted into a <LiquidityPool>.
        """
        address_balances: DDAddressBalances = defaultdict(list)
        known_assets: Set[EthereumToken] = set()
        unknown_assets: Set[UnknownEthereumToken] = set()

        addresses_lower = [address.lower() for address in addresses]
        querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format())
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$addresses': '[String!]',
            '$balance': 'BigDecimal!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'addresses': addresses_lower,
            'balance': '0',
        }
        while True:
            result = self.graph.query(  # type: ignore # caller already checks
                querystr=querystr,
                param_types=param_types,
                param_values=param_values,
            )
            result_data = result['liquidityPositions']

            for lp in result_data:
                user_address = to_checksum_address(lp['user']['id'])
                user_lp_balance = FVal(lp['liquidityTokenBalance'])
                lp_pair = lp['pair']
                lp_address = to_checksum_address(lp_pair['id'])
                lp_total_supply = FVal(lp_pair['totalSupply'])

                # Insert LP tokens reserves within tokens dicts
                token0 = lp_pair['token0']
                token0['total_amount'] = lp_pair['reserve0']
                token1 = lp_pair['token1']
                token1['total_amount'] = lp_pair['reserve1']

                liquidity_pool_assets = []

                for token in token0, token1:
                    # Get the token <EthereumToken> or <UnknownEthereumToken>
                    asset = get_ethereum_token(
                        symbol=token['symbol'],
                        ethereum_address=to_checksum_address(token['id']),
                        name=token['name'],
                        decimals=int(token['decimals']),
                    )

                    # Classify the asset either as known or unknown
                    if isinstance(asset, EthereumToken):
                        known_assets.add(asset)
                    elif isinstance(asset, UnknownEthereumToken):
                        unknown_assets.add(asset)

                    # Estimate the underlying asset total_amount
                    asset_total_amount = FVal(token['total_amount'])
                    user_asset_balance = (user_lp_balance / lp_total_supply *
                                          asset_total_amount)

                    liquidity_pool_asset = LiquidityPoolAsset(
                        asset=asset,
                        total_amount=asset_total_amount,
                        user_balance=Balance(amount=user_asset_balance),
                    )
                    liquidity_pool_assets.append(liquidity_pool_asset)

                liquidity_pool = LiquidityPool(
                    address=lp_address,
                    assets=liquidity_pool_assets,
                    total_supply=lp_total_supply,
                    user_balance=Balance(amount=user_lp_balance),
                )
                address_balances[user_address].append(liquidity_pool)

            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }

        protocol_balance = ProtocolBalance(
            address_balances=dict(address_balances),
            known_assets=known_assets,
            unknown_assets=unknown_assets,
        )
        return protocol_balance
Example #5
0
    def _get_trades_graph_for_address(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AMMTrade]:
        """Get the address' trades data querying the Uniswap subgraph

        Each trade (swap) instantiates an <AMMTrade>.

        The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`.
        Translated to Uniswap lingo:

        Trade type BUY:
        - `asset1In` (QUOTE, reserve1) is gt 0.
        - `asset0Out` (BASE, reserve0) is gt 0.

        Trade type SELL:
        - `asset0In` (BASE, reserve0) is gt 0.
        - `asset1Out` (QUOTE, reserve1) is gt 0.
        """
        trades: List[AMMTrade] = []
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(SWAPS_QUERY.format())

        while True:
            result = self.graph.query(  # type: ignore # caller already checks
                querystr=querystr,
                param_types=param_types,
                param_values=param_values,
            )
            result_data = result['swaps']
            for entry in result_data:
                swaps = []
                for swap in entry['transaction']['swaps']:
                    timestamp = swap['timestamp']
                    swap_token0 = swap['pair']['token0']
                    swap_token1 = swap['pair']['token1']
                    token0 = get_ethereum_token(
                        symbol=swap_token0['symbol'],
                        ethereum_address=to_checksum_address(
                            swap_token0['id']),
                        name=swap_token0['name'],
                        decimals=swap_token0['decimals'],
                    )
                    token1 = get_ethereum_token(
                        symbol=swap_token1['symbol'],
                        ethereum_address=to_checksum_address(
                            swap_token1['id']),
                        name=swap_token1['name'],
                        decimals=int(swap_token1['decimals']),
                    )
                    amount0_in = FVal(swap['amount0In'])
                    amount1_in = FVal(swap['amount1In'])
                    amount0_out = FVal(swap['amount0Out'])
                    amount1_out = FVal(swap['amount1Out'])
                    swaps.append(
                        AMMSwap(
                            tx_hash=swap['id'].split('-')[0],
                            log_index=int(swap['logIndex']),
                            address=address,
                            from_address=to_checksum_address(swap['sender']),
                            to_address=to_checksum_address(swap['to']),
                            timestamp=Timestamp(int(timestamp)),
                            location=Location.UNISWAP,
                            token0=token0,
                            token1=token1,
                            amount0_in=AssetAmount(amount0_in),
                            amount1_in=AssetAmount(amount1_in),
                            amount0_out=AssetAmount(amount0_out),
                            amount1_out=AssetAmount(amount1_out),
                        ))

                # Now that we got all swaps for a transaction, create the trade object
                trades.extend(self._tx_swaps_to_trades(swaps))

            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }
        return trades
Example #6
0
def test_query_statistics_value_distribution(
    rotkehlchen_api_server_with_exchanges,
    ethereum_accounts,
    btc_accounts,
    start_with_valid_premium,
):
    """Test that using the statistics value distribution endpoint works"""
    start_time = ts_now()
    # Disable caching of query results
    rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen
    rotki.chain_manager.cache_ttl_secs = 0
    token_balances = {A_RDN: ['111000', '4000000']}
    setup = setup_balances(
        rotki=rotki,
        ethereum_accounts=ethereum_accounts,
        btc_accounts=btc_accounts,
        token_balances=token_balances,
        manually_tracked_balances=[
            ManuallyTrackedBalance(
                asset=A_EUR,
                label='My EUR bank',
                amount=FVal('1550'),
                location=Location.BANKS,
                tags=None,
            )
        ],
    )

    # query balances and save data in DB to have data to test the statistics endpoint
    with ExitStack() as stack:
        setup.enter_all_patches(stack)
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server_with_exchanges,
                "allbalancesresource",
            ),
            json={'save_data': True},
        )
    assert_proper_response(response)

    def assert_okay_by_location(response):
        """Helper function to run next query and its assertion twice"""
        if start_with_valid_premium:
            assert_proper_response(response)
            data = response.json()
            assert data['message'] == ''
            assert len(data['result']) == 5
            locations = {'poloniex', 'binance', 'banks', 'blockchain', 'total'}
            for entry in data['result']:
                assert len(entry) == 3
                assert entry['time'] >= start_time
                assert entry['usd_value'] is not None
                assert entry['location'] in locations
                locations.remove(entry['location'])
            assert len(locations) == 0
        else:
            assert_error_response(
                response=response,
                contained_in_msg=
                'logged in user testuser does not have a premium subscription',
                status_code=HTTPStatus.CONFLICT,
            )

    # and now test that statistics work fine for distribution by location for json body
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'location'},
    )
    assert_okay_by_location(response)
    # and now test that statistics work fine for distribution by location for query params
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ) + '?distribution_by=location', )
    assert_okay_by_location(response)

    # finally test that statistics work fine for distribution by asset
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'asset'},
    )
    if start_with_valid_premium:
        assert_proper_response(response)
        data = response.json()
        assert data['message'] == ''
        assert len(data['result']) == 4
        totals = {
            'ETH': get_asset_balance_total('ETH', setup),
            'BTC': get_asset_balance_total('BTC', setup),
            'EUR': get_asset_balance_total('EUR', setup),
            'RDN': get_asset_balance_total('RDN', setup),
        }
        for entry in data['result']:
            assert len(entry) == 5
            assert entry['time'] >= start_time
            assert entry['category'] == 'asset'
            assert entry['usd_value'] is not None
            assert FVal(entry['amount']) == totals[entry['asset']]
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )
Example #7
0
 def query_btc_account_balance(self, account):
     btc_resp = request_get('https://blockchain.info/q/addressbalance/%s' %
                            account)
     return FVal(btc_resp) * FVal('0.00000001')  # result is in satoshis
Example #8
0
def test_combine_stat_dicts():
    a = {
        'EUR': {
            'amount': FVal('50.5'),
            'usd_value': FVal('200.1')
        },
        'BTC': {
            'amount': FVal('2.5'),
            'usd_value': FVal('12200.5')
        },
    }
    b = {
        'RDN': {
            'amount': FVal('15.5'),
            'usd_value': FVal('105.9')
        },
    }
    c = {
        'EUR': {
            'amount': FVal('15.5'),
            'usd_value': FVal('105.9')
        },
        'BTC': {
            'amount': FVal('3.5'),
            'usd_value': FVal('18200.5')
        },
        'ETH': {
            'amount': FVal('100.1'),
            'usd_value': FVal('11200.1')
        },
    }
    result = combine_stat_dicts([a, b, c])
    assert result == {
        'EUR': {
            'amount': FVal('66'),
            'usd_value': FVal('306')
        },
        'RDN': {
            'amount': FVal('15.5'),
            'usd_value': FVal('105.9')
        },
        'ETH': {
            'amount': FVal('100.1'),
            'usd_value': FVal('11200.1')
        },
        'BTC': {
            'amount': FVal('6'),
            'usd_value': FVal('30401')
        },
    }
Example #9
0
def test_tuple_in_process_result():
    d = {'overview': [{'foo': (FVal('0.1'), )}]}

    # Process result should detect the tuple and throw
    with pytest.raises(ValueError):
        json.dumps(process_result(d))
Example #10
0
def trade_from_poloniex(poloniex_trade: Dict[str, Any],
                        pair: TradePair) -> Trade:
    """Turn a poloniex trade returned from poloniex trade history to our common trade
    history format

    Throws:
        - UnsupportedAsset due to asset_from_poloniex()
    """

    trade_type = trade_type_from_string(poloniex_trade['type'])
    amount = FVal(poloniex_trade['amount'])
    rate = FVal(poloniex_trade['rate'])
    perc_fee = FVal(poloniex_trade['fee'])
    base_currency = asset_from_poloniex(get_pair_position_str(pair, 'first'))
    quote_currency = asset_from_poloniex(get_pair_position_str(pair, 'second'))
    timestamp = createTimeStamp(poloniex_trade['date'],
                                formatstr="%Y-%m-%d %H:%M:%S")
    cost = rate * amount
    if trade_type == TradeType.BUY:
        fee = amount * perc_fee
        fee_currency = quote_currency
    elif trade_type == TradeType.SELL:
        fee = cost * perc_fee
        fee_currency = base_currency
    else:
        raise ValueError(
            'Got unexpected trade type "{}" for poloniex trade'.format(
                trade_type))

    if poloniex_trade['category'] == 'settlement':
        if trade_type == TradeType.BUY:
            trade_type = TradeType.SETTLEMENT_BUY
        else:
            trade_type = TradeType.SETTLEMENT_SELL

    log.debug(
        'Processing poloniex Trade',
        sensitive_log=True,
        timestamp=timestamp,
        order_type=trade_type,
        pair=pair,
        base_currency=base_currency,
        quote_currency=quote_currency,
        amount=amount,
        fee=fee,
        rate=rate,
    )

    # Use the converted assets in our pair
    pair = trade_pair_from_assets(base_currency, quote_currency)
    # Since in Poloniex the base currency is the cost currency, iow in poloniex
    # for BTC_ETH we buy ETH with BTC and sell ETH for BTC, we need to turn it
    # into the Rotkehlchen way which is following the base/quote approach.
    pair = invert_pair(pair)
    return Trade(
        timestamp=timestamp,
        location='poloniex',
        pair=pair,
        trade_type=trade_type,
        amount=amount,
        rate=rate,
        fee=fee,
        fee_currency=fee_currency,
    )
Example #11
0
    def query_deposits_withdrawals(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        end_at_least_ts: Timestamp,
    ) -> List:
        with self.lock:
            cache = self.check_trades_cache(
                start_ts,
                end_at_least_ts,
                special_name='deposits_withdrawals',
            )
            cache = cast(Dict, cache)
        if cache is None:
            result = self.returnDepositsWithdrawals(start_ts, end_ts)
            with self.lock:
                self.update_trades_cache(
                    result,
                    start_ts,
                    end_ts,
                    special_name='deposits_withdrawals',
                )
        else:
            result = cache

        log.debug(
            'Poloniex deposits/withdrawal query',
            results_num=len(result['withdrawals']) + len(result['deposits']),
        )

        movements = list()
        for withdrawal in result['withdrawals']:
            try:
                movements.append(
                    AssetMovement(
                        exchange='poloniex',
                        category='withdrawal',
                        timestamp=withdrawal['timestamp'],
                        asset=asset_from_poloniex(withdrawal['currency']),
                        amount=FVal(withdrawal['amount']),
                        fee=FVal(withdrawal['fee']),
                    ))
            except UnsupportedAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found withdrawal of unsupported poloniex asset {e.asset_name}. Ignoring it.',
                )
                continue

        for deposit in result['deposits']:
            try:
                movements.append(
                    AssetMovement(
                        exchange='poloniex',
                        category='deposit',
                        timestamp=deposit['timestamp'],
                        asset=asset_from_poloniex(deposit['currency']),
                        amount=FVal(deposit['amount']),
                        fee=0,
                    ))
            except UnsupportedAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found deposit of unsupported poloniex asset {e.asset_name}. Ignoring it.',
                )
                continue

        return movements
Example #12
0
def test_query_statistics_value_distribution(
    rotkehlchen_api_server_with_exchanges,
    ethereum_accounts,
    btc_accounts,
    number_of_eth_accounts,
    start_with_valid_premium,
):
    """Test that using the statistics value distribution endpoint works"""
    start_time = ts_now()
    # Disable caching of query results
    rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen
    rotki.blockchain.cache_ttl_secs = 0
    setup = setup_balances(rotki, ethereum_accounts, btc_accounts)

    # query balances and save data in DB to have data to test the statistics endpoint
    with setup.poloniex_patch, setup.binance_patch, setup.etherscan_patch, setup.bitcoin_patch:
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server_with_exchanges,
                "allbalancesresource",
            ),
            json={'save_data': True},
        )
    assert_proper_response(response)

    def assert_okay_by_location(response):
        """Helper function to run next query and its assertion twice"""
        if start_with_valid_premium:
            assert_proper_response(response)
            data = response.json()
            assert data['message'] == ''
            assert len(data['result']) == 5
            locations = {'poloniex', 'binance', 'banks', 'blockchain', 'total'}
            for entry in data['result']:
                assert len(entry) == 3
                assert entry['time'] >= start_time
                assert entry['usd_value'] is not None
                assert entry['location'] in locations
                locations.remove(entry['location'])
            assert len(locations) == 0
        else:
            assert_error_response(
                response=response,
                contained_in_msg=
                'logged in user testuser does not have a premium subscription',
                status_code=HTTPStatus.CONFLICT,
            )

    # and now test that statistics work fine for distribution by location for json body
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'location'},
    )
    assert_okay_by_location(response)
    # and now test that statistics work fine for distribution by location for query params
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ) + '?distribution_by=location', )
    assert_okay_by_location(response)

    # finally test that statistics work fine for distribution by asset
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'asset'},
    )
    if start_with_valid_premium:
        assert_proper_response(response)
        data = response.json()
        assert data['message'] == ''
        assert len(data['result']) == 4
        totals = {
            'ETH': get_asset_balance_total('ETH', setup),
            'BTC': get_asset_balance_total('BTC', setup),
            'EUR': get_asset_balance_total('EUR', setup),
            'RDN': get_asset_balance_total('RDN', setup),
        }
        for entry in data['result']:
            assert len(entry) == 4
            assert entry['time'] >= start_time
            assert entry['usd_value'] is not None
            assert FVal(entry['amount']) == totals[entry['asset']]
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )
Example #13
0
def test_query_historical_dsr_with_a_zero_withdrawal(
        rotkehlchen_api_server,
        ethereum_accounts,
        inquirer,  # pylint: disable=unused-argument
):
    """Test DSR for an account that was opened while DSR is 0 and made a 0 DAI withdrawal

    Essentially reproduce DSR problem reported here: https://github.com/rotki/rotki/issues/1032

    The account in question operates in a zero DSR environment but the reported
    problem seems to be just because he tried a zero DAI withdrawal
    """
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    original_get_logs = rotki.chain_manager.ethereum.get_logs
    proxies_mapping = {
        # proxy for 0x714696C5a872611F76655Bc163D0131cBAc60a70
        ethereum_accounts[0]: '0xAe9996b76bdAa003ace6D66328A6942565f5768d',
    }
    mock_proxies(rotki, proxies_mapping, 'makerdao_dsr')

    # Query only until a block we know DSR is 0 and we know the number
    # of DSR events
    def mock_get_logs(
            contract_address,
            abi,
            event_name,
            argument_filters,
            from_block,
            to_block='latest',  # pylint: disable=unused-argument
    ):
        return original_get_logs(
            contract_address,
            abi,
            event_name,
            argument_filters,
            from_block,
            to_block=10149816,  # A block at which DSR is still zero
        )

    patched_get_logs = patch.object(
        rotki.chain_manager.ethereum,
        'get_logs',
        side_effect=mock_get_logs,
    )

    with patched_get_logs:
        response = requests.get(api_url_for(
            rotkehlchen_api_server,
            "makerdaodsrhistoryresource",
        ))
    assert_proper_response(response)
    json_data = response.json()
    assert json_data['message'] == ''
    result = json_data['result'][ethereum_accounts[0]]
    assert result['gain_so_far'] == {'amount': '0', 'usd_value': '0'}
    movements = result['movements']
    expected_movements = [{
        'movement_type': 'deposit',
        'gain_so_far': {
            'amount': ZERO,
            'usd_value': ZERO,
        },
        'value': {
            'amount': FVal('79'),
            'usd_value': FVal('79'),
        },
        'block_number': 9953028,
        'timestamp': 1587970286,
        'tx_hash': '0x988aea85b54c5b2834b144e9f7628b524bf9faf3b87821aa520b7bcfb57ab289',
    }, {
        'movement_type': 'withdrawal',
        'gain_so_far': {
            'amount': ZERO,
            'usd_value': ZERO,
        },
        'value': {
            'amount': FVal('79'),
            'usd_value': FVal('79'),
        },
        'block_number': 9968906,
        'timestamp': 1588182567,
        'tx_hash': '0x2a1bee69b9bafe031026dbcc8f199881b568fd767482b5436dd1cd94f2642443',
    }, {
        'movement_type': 'withdrawal',
        'gain_so_far': {
            'amount': ZERO,
            'usd_value': ZERO,
        },
        'value': {
            'amount': ZERO,
            'usd_value': ZERO,
        },
        'block_number': 9968906,
        'timestamp': 1588182567,
        'tx_hash': '0x618fc9542890a2f58ab20a3c12d173b3638af11fda813e61788e242b4fc9a756',
    }]
    assert_serialized_lists_equal(movements, expected_movements, max_diff="1e-26")
    errors = rotki.msg_aggregator.consume_errors()
    assert len(errors) == 0
Example #14
0
def assert_dsr_history_result_is_correct(result: Dict[str, Any], setup: DSRTestSetup) -> None:
    assert len(result) == 2
    for account, entry in setup.dsr_history_response.items():
        assert len(entry) == len(result[account])
        for key, val in entry.items():
            if key == 'movements':
                assert len(val) == len(result[account]['movements'])
                for idx, movement in enumerate(val):
                    for mov_key, mov_val in movement.items():
                        if mov_key == 'movement_type':
                            assert mov_val == result[account]['movements'][idx][mov_key]
                        elif mov_key in ('gain_so_far', 'value'):
                            assert FVal(mov_val['amount']).is_close(FVal(
                                result[account]['movements'][idx][mov_key]['amount'],
                            ), max_diff='1e-8')
                            assert FVal(mov_val['usd_value']).is_close(FVal(
                                result[account]['movements'][idx][mov_key]['usd_value'],
                            ), max_diff='1e-8')
                        else:
                            assert FVal(mov_val).is_close(FVal(
                                result[account]['movements'][idx][mov_key],
                            ), max_diff='1e-8')
            elif key == 'gain_so_far':
                assert FVal(result[account][key]['amount']).is_close(FVal(val['amount']))
                assert FVal(result[account][key]['usd_value']).is_close(FVal(val['usd_value']))
            else:
                assert FVal(result[account][key]).is_close(FVal(val))
Example #15
0
def test_coinbase_query_deposit_withdrawals(function_scope_coinbase):
    """Test that coinbase deposit/withdrawals history query works fine for the happy path"""
    coinbase = function_scope_coinbase

    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_normal_coinbase_query):
        movements = coinbase.query_online_deposits_withdrawals(
            start_ts=0,
            end_ts=1576726126,
        )

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
    assert len(movements) == 3
    expected_movements = [
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.DEPOSIT,
            timestamp=1519001640,
            address=None,
            transaction_id=None,
            asset=A_USD,
            amount=FVal('55'),
            fee_asset=A_USD,
            fee=FVal('0.05'),
            link='1130eaec-07d7-54c4-a72c-2e92826897df',
        ),
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.WITHDRAWAL,
            address=None,
            transaction_id=None,
            timestamp=1485895742,
            asset=A_USD,
            amount=FVal('10.0'),
            fee_asset=A_USD,
            fee=FVal('0.01'),
            link='146eaec-07d7-54c4-a72c-2e92826897df',
        ),
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.WITHDRAWAL,
            address='0x6dcD6449dbCa615e40d696328209686eA95327b2',
            transaction_id=
            '0x558bfa4d2a4ef598ddb92233459c00eda9e6c14cda75e6773b90208cb6938169',
            timestamp=1566726126,
            asset=A_ETH,
            amount=FVal('0.05770427'),
            fee_asset=A_ETH,
            fee=FVal('0.00021'),
            link='id1',
        )
    ]
    assert expected_movements == movements

    # and now try to query within a specific range
    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_normal_coinbase_query):
        movements = coinbase.query_online_deposits_withdrawals(
            start_ts=0,
            end_ts=1519001650,
        )

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
    assert len(movements) == 2
    assert movements[0].category == AssetMovementCategory.DEPOSIT
    assert movements[0].timestamp == 1519001640
    assert movements[1].category == AssetMovementCategory.WITHDRAWAL
    assert movements[1].timestamp == 1485895742
Example #16
0
    assert db.edit_ledger_action(new_entry) is None
    cursor.execute(query, (identifier,))
    updated_entry = LedgerAction.deserialize_from_db(cursor.fetchone())
    assert updated_entry.rate is None
    assert updated_entry.rate_asset is None
    assert updated_entry.link is None
    assert updated_entry.notes is None


@pytest.mark.parametrize('mocked_price_queries', [prices])
@pytest.mark.parametrize('db_settings, expected', [
    ({'taxable_ledger_actions': [
        LedgerActionType.INCOME,
        LedgerActionType.AIRDROP,
        LedgerActionType.LOSS,
    ]}, FVal('961.425')),  # 578.505 + 478.65 -2*400 + 2*400 - 2 * 47.865
    ({'taxable_ledger_actions': []}, 0),
])
def test_taxable_ledger_action_setting(accountant, expected, google_service):
    """Test that ledger actions respect the taxable setting"""
    history = [
        LedgerAction(
            identifier=1,
            timestamp=1476979735,
            action_type=LedgerActionType.INCOME,
            location=Location.EXTERNAL,
            amount=FVal(1),  # 578.505 EUR/BTC from mocked prices
            asset=A_BTC,
            rate=None,
            rate_asset=None,
            link=None,
Example #17
0
def from_wei(wei_value: FVal) -> FVal:
    return wei_value / FVal(10**18)
Example #18
0
def test_ledger_actions_accounting(accountant, google_service):
    """Test for accounting for ledger actions

    Makes sure that Ledger actions are processed in accounting, range is respected
    and that they contribute to the "bought" amount per asset and that also if
    a rate is given then that is used instead of the queried price
    """
    history = [LedgerAction(  # before range - read only for amount not profit
        identifier=1,
        timestamp=1435979735,  # 0.1 EUR per ETH
        action_type=LedgerActionType.INCOME,
        location=Location.EXTERNAL,
        asset=A_ETH,
        amount=AssetAmount(FVal(1)),
        rate=None,
        rate_asset=None,
        link=None,
        notes=None,
    ), LedgerAction(
        identifier=2,
        timestamp=1437279735,  # 250 EUR per BTC
        action_type=LedgerActionType.INCOME,
        location=Location.BLOCKCHAIN,
        asset=A_BTC,
        amount=AssetAmount(FVal(1)),
        rate=FVal('400'),
        rate_asset=A_EUR,
        link='foo',
        notes='we give a rate here',
    ), LedgerAction(
        identifier=3,
        timestamp=1447279735,  # 0.4 EUR per XMR
        action_type=LedgerActionType.DIVIDENDS_INCOME,
        location=Location.KRAKEN,
        asset=A_XMR,
        amount=AssetAmount(FVal(10)),
        rate=None,
        rate_asset=None,
        link=None,
        notes=None,
    ), LedgerAction(
        identifier=4,
        timestamp=1457279735,  # 1 EUR per ETH
        action_type=LedgerActionType.EXPENSE,
        location=Location.EXTERNAL,
        asset=A_ETH,
        amount=AssetAmount(FVal('0.1')),
        rate=None,
        rate_asset=None,
        link=None,
        notes=None,
    ), LedgerAction(
        identifier=5,
        timestamp=1467279735,  # 420 EUR per BTC
        action_type=LedgerActionType.LOSS,
        location=Location.EXTERNAL,
        asset=A_BTC,
        amount=AssetAmount(FVal('0.1')),
        rate=FVal(500),
        rate_asset=A_USD,
        link='foo2',
        notes='we give a rate here',
    ), LedgerAction(  # after range and should be completely ignored
        identifier=6,
        timestamp=1529693374,
        action_type=LedgerActionType.EXPENSE,
        location=Location.EXTERNAL,
        asset=A_ETH,
        amount=AssetAmount(FVal('0.5')),
        rate=FVal(400),
        rate_asset=A_EUR,
        link='foo3',
        notes='we give a rate here too but doesnt matter',
    )]

    accounting_history_process(
        accountant=accountant,
        start_ts=1436979735,
        end_ts=1519693374,
        history_list=history,
    )
    assert accountant.pots[0].cost_basis.get_calculated_asset_amount(A_BTC).is_close('0.9')
    assert accountant.pots[0].cost_basis.get_calculated_asset_amount(A_ETH).is_close('0.9')
    assert accountant.pots[0].cost_basis.get_calculated_asset_amount(A_XMR).is_close('10')
    expected_pnls = PnlTotals({
        # 400 + 0.4*10 - 1*0.1 + 1*0.1 - 1*0.01 - 0.1*500*0.9004 + 0.1*500*0.9004 - 0.1* 400
        AccountingEventType.LEDGER_ACTION: PNL(taxable=FVal('363.99'), free=ZERO),
    })
    check_pnls_and_csv(accountant, expected_pnls, google_service)
Example #19
0
def test_query_statistics_asset_balance(
    rotkehlchen_api_server_with_exchanges,
    ethereum_accounts,
    btc_accounts,
    start_with_valid_premium,
):
    """Test that using the statistics asset balance over time endpoint works"""
    start_time = ts_now()
    # Disable caching of query results
    rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen
    rotki.chain_manager.cache_ttl_secs = 0
    setup = setup_balances(rotki, ethereum_accounts, btc_accounts)

    # query balances and save data in DB to have data to test the statistics endpoint
    with ExitStack() as stack:
        setup.enter_all_patches(stack)
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server_with_exchanges,
                "allbalancesresource",
            ),
            json={'save_data': True},
        )
    assert_proper_response(response)

    # and now test that statistics work fine for ETH, with default time range (0 - now)
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsassetbalanceresource",
            asset="ETH",
        ), )
    if start_with_valid_premium:
        assert_proper_response(response)
        data = response.json()
        assert data['message'] == ''
        assert len(data['result']) == 1
        entry = data['result'][0]
        assert len(entry) == 4
        assert FVal(entry['amount']) == get_asset_balance_total('ETH', setup)
        assert entry['category'] == 'asset'
        assert entry['time'] >= start_time
        assert entry['usd_value'] is not None
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )

    # and now test that statistics work fine for BTC, with given time range
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsassetbalanceresource",
            asset="BTC",
        ),
        json={
            'from_timestamp': 0,
            'to_timestamp': start_time + 60000
        },
    )
    if start_with_valid_premium:
        assert_proper_response(response)
        data = response.json()
        assert data['message'] == ''
        assert len(data['result']) == 1
        entry = data['result'][0]
        assert len(entry) == 4
        assert FVal(entry['amount']) == get_asset_balance_total('BTC', setup)
        assert entry['time'] >= start_time
        assert entry['category'] == 'asset'
        assert entry['usd_value'] is not None
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )

    # finally test that if the time range is not including the saved balances we get nothing back
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsassetbalanceresource",
            asset="BTC",
        ),
        json={
            'from_timestamp': 0,
            'to_timestamp': start_time - 1
        },
    )
    if start_with_valid_premium:
        assert_proper_response(response)
        data = response.json()
        assert data['message'] == ''
        assert len(data['result']) == 0
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )
Example #20
0
def test_trade_from_binance(function_scope_binance):
    binance = function_scope_binance
    binance_trades_list = [
        {
            'symbol': 'RDNETH',
            'id': 1,
            'orderId': 1,
            'price': FVal(0.0063213),
            'qty': FVal(5.0),
            'commission': FVal(0.005),
            'commissionAsset': 'RDN',
            'time': 1512561941000,
            'isBuyer': True,
            'isMaker': False,
            'isBestMatch': True,
        },
        {
            'symbol': 'ETHUSDT',
            'id': 2,
            'orderId': 2,
            'price': FVal(481.0),
            'qty': FVal(0.505),
            'commission': FVal(0.242905),
            'commissionAsset': 'USDT',
            'time': 1531117990000,
            'isBuyer': False,
            'isMaker': True,
            'isBestMatch': True,
        },
        {
            'symbol': 'BTCUSDT',
            'id': 3,
            'orderId': 3,
            'price': FVal(6376.39),
            'qty': FVal(0.051942),
            'commission': FVal(0.00005194),
            'commissionAsset': 'BTC',
            'time': 1531728338000,
            'isBuyer': True,
            'isMaker': False,
            'isBestMatch': True,
        },
        {
            'symbol': 'ADAUSDT',
            'id': 4,
            'orderId': 4,
            'price': FVal(0.17442),
            'qty': FVal(285.2),
            'commission': FVal(0.00180015),
            'commissionAsset': 'BNB',
            'time': 1531871806000,
            'isBuyer': False,
            'isMaker': True,
            'isBestMatch': True,
        },
    ]
    our_expected_list = [
        Trade(
            timestamp=1512561941,
            location=Location.BINANCE,
            pair='RDN_ETH',
            trade_type=TradeType.BUY,
            amount=FVal(5.0),
            rate=FVal(0.0063213),
            fee=FVal(0.005),
            fee_currency=A_RDN,
            link='1',
        ),
        Trade(
            timestamp=1531117990,
            location=Location.BINANCE,
            pair='ETH_USDT',
            trade_type=TradeType.SELL,
            amount=FVal(0.505),
            rate=FVal(481.0),
            fee=FVal(0.242905),
            fee_currency=A_USDT,
            link='2',
        ),
        Trade(
            timestamp=1531728338,
            location=Location.BINANCE,
            pair='BTC_USDT',
            trade_type=TradeType.BUY,
            amount=FVal(0.051942),
            rate=FVal(6376.39),
            fee=FVal(0.00005194),
            fee_currency=A_BTC,
            link='3',
        ),
        Trade(
            timestamp=1531871806,
            location=Location.BINANCE,
            pair='ADA_USDT',
            trade_type=TradeType.SELL,
            amount=FVal(285.2),
            rate=FVal(0.17442),
            fee=FVal(0.00180015),
            fee_currency=A_BNB,
            link='4',
        ),
    ]

    for idx, binance_trade in enumerate(binance_trades_list):
        our_trade = trade_from_binance(binance_trade, binance.symbols_to_pair)
        assert our_trade == our_expected_list[idx]
        assert isinstance(our_trade.fee_currency, Asset)
Example #21
0
def test_query_balances(rotkehlchen_server, function_scope_binance):
    """Test that the query_balances call works properly.

    That means that the balances are all returned in the expected format and
    that they are saved in the DB.

    The test is for a user with fiat balances and with some binance balances.
    """

    # First set the fiat balances
    ok, _ = rotkehlchen_server.set_fiat_balance('USD', '100.5')
    assert ok
    ok, _ = rotkehlchen_server.set_fiat_balance('EUR', '75.5')
    assert ok
    exchanges = rotkehlchen_server.rotkehlchen.exchange_manager.connected_exchanges
    exchanges['binance'] = function_scope_binance

    def mock_binance_balances(url):  # pylint: disable=unused-argument
        return MockResponse(200, BINANCE_BALANCES_RESPONSE)

    mock_binance = patch.object(
        exchanges['binance'].session,
        'get',
        side_effect=mock_binance_balances,
    )

    eur_usd_rate = Inquirer().query_fiat_pair(A_EUR, A_USD)

    eth_usd_rate = FVal('100.5')
    btc_usd_rate = FVal('120.1')

    def mock_query_cryptocompare_for_fiat_price(asset: Asset) -> Price:
        if asset == A_ETH:
            return Price(eth_usd_rate)
        elif asset == A_BTC:
            return Price(btc_usd_rate)

        # else
        raise AssertionError(f'Unexpected asset {asset} at mock cryptocompare query')

    mock_find_usd_price = patch(
        'rotkehlchen.inquirer.query_cryptocompare_for_fiat_price',
        side_effect=mock_query_cryptocompare_for_fiat_price,
    )

    now = ts_now()
    with mock_binance, mock_find_usd_price:
        result = rotkehlchen_server.query_balances(save_data=True)

    assert result['USD']['amount'] == '100.5'
    assert result['USD']['usd_value'] == '100.5'
    eur_amount = FVal('75.5')
    assert result['EUR']['amount'] == str(eur_amount)
    eur_usd_value = eur_amount * eur_usd_rate
    assert result['EUR']['usd_value'] == str(eur_usd_value)
    eth_amount = FVal('4763368.68006011')
    assert result['ETH']['amount'] == str(eth_amount)
    eth_usd_value = eth_amount * eth_usd_rate
    assert result['ETH']['usd_value'] == str(eth_usd_value)
    btc_amount = FVal('4723846.89208129')
    assert result['BTC']['amount'] == str(btc_amount)
    btc_usd_value = btc_amount * btc_usd_rate
    assert result['BTC']['usd_value'] == str(btc_usd_value)
    binance_usd_value = btc_usd_value + eth_usd_value
    assert result['location']['binance']['usd_value'] == str(binance_usd_value)
    banks_usd_value = eur_usd_value + FVal('100.5')
    assert result['location']['banks']['usd_value'] == str(banks_usd_value)
    assert result['net_usd'] == str(banks_usd_value + binance_usd_value)

    # make sure that balances also got saved in the DB
    db = rotkehlchen_server.rotkehlchen.data.db
    save_ts = db.get_last_balance_save_time()
    assert save_ts >= now
    assert save_ts - now < 5, 'Saving balances took too long'

    location_data = db.get_latest_location_value_distribution()
    assert len(location_data) == 4

    assert location_data[0].location == 'banks'
    assert location_data[0].usd_value == str(banks_usd_value)
    assert location_data[1].location == 'binance'
    assert location_data[1].usd_value == str(binance_usd_value)
    assert location_data[2].location == 'blockchain'
    assert location_data[3].location == 'total'
    assert location_data[3].usd_value == str(banks_usd_value + binance_usd_value)
Example #22
0
def test_binance_query_deposits_withdrawals(function_scope_binance):
    """Test the happy case of binance deposit withdrawal query

    NB: set `start_ts` and `end_ts` with a difference less than 90 days to
    prevent requesting with a time delta.
    """
    start_ts = 1508022000  # 2017-10-15
    end_ts = 1508540400  # 2017-10-21 (less than 90 days since `start_ts`)
    binance = function_scope_binance

    def mock_get_deposit_withdrawal(url):  # pylint: disable=unused-argument
        if 'deposit' in url:
            response_str = BINANCE_DEPOSITS_HISTORY_RESPONSE
        else:
            response_str = BINANCE_WITHDRAWALS_HISTORY_RESPONSE

        return MockResponse(200, response_str)

    with patch.object(binance.session,
                      'get',
                      side_effect=mock_get_deposit_withdrawal):
        movements = binance.query_online_deposits_withdrawals(
            start_ts=Timestamp(start_ts),
            end_ts=Timestamp(end_ts),
        )

    errors = binance.msg_aggregator.consume_errors()
    warnings = binance.msg_aggregator.consume_warnings()
    assert len(errors) == 0
    assert len(warnings) == 0

    assert len(movements) == 4

    assert movements[0].location == Location.BINANCE
    assert movements[0].category == AssetMovementCategory.DEPOSIT
    assert movements[0].timestamp == 1508198532
    assert isinstance(movements[0].asset, Asset)
    assert movements[0].asset == A_ETH
    assert movements[0].amount == FVal('0.04670582')
    assert movements[0].fee == ZERO

    assert movements[1].location == Location.BINANCE
    assert movements[1].category == AssetMovementCategory.DEPOSIT
    assert movements[1].timestamp == 1508398632
    assert isinstance(movements[1].asset, Asset)
    assert movements[1].asset == A_XMR
    assert movements[1].amount == FVal('1000')
    assert movements[1].fee == ZERO

    assert movements[2].location == Location.BINANCE
    assert movements[2].category == AssetMovementCategory.WITHDRAWAL
    assert movements[2].timestamp == 1508488245
    assert isinstance(movements[2].asset, Asset)
    assert movements[2].asset == A_ETH
    assert movements[2].amount == FVal('1')
    assert movements[2].fee == ZERO

    assert movements[3].location == Location.BINANCE
    assert movements[3].category == AssetMovementCategory.WITHDRAWAL
    assert movements[3].timestamp == 1508512521
    assert isinstance(movements[3].asset, Asset)
    assert movements[3].asset == A_XMR
    assert movements[3].amount == FVal('850.1')
    assert movements[3].fee == ZERO
Example #23
0
    def _get_events_graph(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
        event_type: EventType,
    ) -> List[LiquidityPoolEvent]:
        """Get the address' events (mints & burns) querying the Uniswap subgraph
        Each event data is stored in a <LiquidityPoolEvent>.
        """
        address_events: List[LiquidityPoolEvent] = []
        if event_type == EventType.MINT:
            query = MINTS_QUERY
            query_schema = 'mints'
        elif event_type == EventType.BURN:
            query = BURNS_QUERY
            query_schema = 'burns'
        else:
            log.error(
                f'Unexpected event_type: {event_type}. Skipping events query.')
            return address_events

        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(query.format())

        while True:
            result = self.graph.query(  # type: ignore # caller already checks
                querystr=querystr,
                param_types=param_types,
                param_values=param_values,
            )
            result_data = result[query_schema]

            for event in result_data:
                token0_ = event['pair']['token0']
                token1_ = event['pair']['token1']
                token0 = get_ethereum_token(
                    symbol=token0_['symbol'],
                    ethereum_address=to_checksum_address(token0_['id']),
                    name=token0_['name'],
                    decimals=token0_['decimals'],
                )
                token1 = get_ethereum_token(
                    symbol=token1_['symbol'],
                    ethereum_address=to_checksum_address(token1_['id']),
                    name=token1_['name'],
                    decimals=int(token1_['decimals']),
                )
                lp_event = LiquidityPoolEvent(
                    tx_hash=event['transaction']['id'],
                    log_index=int(event['logIndex']),
                    address=address,
                    timestamp=Timestamp(int(event['timestamp'])),
                    event_type=event_type,
                    pool_address=to_checksum_address(event['pair']['id']),
                    token0=token0,
                    token1=token1,
                    amount0=AssetAmount(FVal(event['amount0'])),
                    amount1=AssetAmount(FVal(event['amount1'])),
                    usd_price=Price(FVal(event['amountUSD'])),
                    lp_amount=AssetAmount(FVal(event['liquidity'])),
                )
                address_events.append(lp_event)

            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }

        return address_events
Example #24
0
    def query_balances(self) -> Tuple[Optional[dict], str]:
        try:
            old_balances = self.api_query('Balance', req={})
        except RemoteError as e:
            if "Missing key: 'result'" in str(e):
                # handle https://github.com/rotki/rotki/issues/946
                old_balances = {}
            else:
                msg = (
                    'Kraken API request failed. Could not reach kraken due '
                    'to {}'.format(e)
                )
                log.error(msg)
                return None, msg

        balances = {}
        for k, v in old_balances.items():
            v = FVal(v)
            if v == FVal(0):
                continue

            try:
                our_asset = asset_from_kraken(k)
            except UnknownAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found unsupported/unknown kraken asset {e.asset_name}. '
                    f' Ignoring its balance query.',
                )
                continue
            except DeserializationError:
                self.msg_aggregator.add_error(
                    f'Found kraken asset with non-string type {type(k)}. '
                    f' Ignoring its balance query.',
                )
                continue

            entry = {}
            entry['amount'] = v
            if k == 'KFEE':
                # There is no price value for KFEE. TODO: Shouldn't we then just skip the balance?
                entry['usd_value'] = ZERO
            else:
                try:
                    usd_price = Inquirer().find_usd_price(our_asset)
                except RemoteError as e:
                    self.msg_aggregator.add_error(
                        f'Error processing kraken balance entry due to inability to '
                        f'query USD price: {str(e)}. Skipping balance entry',
                    )
                    continue
                entry['usd_value'] = FVal(v * usd_price)

            if our_asset not in balances:
                balances[our_asset] = entry
            else:  # Some assets may appear twice in kraken balance query for different locations
                # Spot/staking for example
                balances[our_asset]['amount'] += entry['amount']
                balances[our_asset]['usd_value'] += entry['usd_value']

            log.debug(
                'kraken balance query result',
                sensitive_log=True,
                currency=our_asset,
                amount=entry['amount'],
                usd_value=entry['usd_value'],
            )

        return balances, ''
Example #25
0
    def _get_trades_graph_v3_for_address(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AMMTrade]:
        """Get the address' trades data querying the Uniswap subgraph

        Each trade (swap) instantiates an <AMMTrade>.

        The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`.
        Translated to Uniswap lingo:

        Trade type BUY:
        - `amount1` (QUOTE, reserve1) is gt 0.
        - `amount0` (BASE, reserve0) is lt 0.

        Trade type SELL:
        - `amount0` (BASE, reserve0) is gt 0.
        - `amount1` (QUOTE, reserve1) is lt 0.

        May raise:
        - RemoteError
        """
        trades: List[AMMTrade] = []
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(V3_SWAPS_QUERY.format())

        while True:
            try:
                result = self.graph_v3.query(
                    querystr=querystr,
                    param_types=param_types,
                    param_values=param_values,
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    SUBGRAPH_REMOTE_ERROR_MSG.format(
                        error_msg=str(e),
                        location=self.location,
                    ), )
                raise

            result_data = result['swaps']
            for entry in result_data:
                swaps = []
                for swap in entry['transaction']['swaps']:
                    timestamp = swap['timestamp']
                    swap_token0 = swap['token0']
                    swap_token1 = swap['token1']

                    try:
                        token0_deserialized = deserialize_ethereum_address(
                            swap_token0['id'])
                        token1_deserialized = deserialize_ethereum_address(
                            swap_token1['id'])
                        from_address_deserialized = deserialize_ethereum_address(
                            swap['sender'])
                        to_address_deserialized = deserialize_ethereum_address(
                            swap['recipient'])
                    except DeserializationError:
                        msg = (
                            f'Failed to deserialize addresses in trade from uniswap graph with '
                            f'token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, '
                            f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}'
                        )
                        log.error(msg)
                        continue

                    token0 = get_or_create_ethereum_token(
                        userdb=self.database,
                        symbol=swap_token0['symbol'],
                        ethereum_address=token0_deserialized,
                        name=swap_token0['name'],
                        decimals=swap_token0['decimals'],
                    )
                    token1 = get_or_create_ethereum_token(
                        userdb=self.database,
                        symbol=swap_token1['symbol'],
                        ethereum_address=token1_deserialized,
                        name=swap_token1['name'],
                        decimals=int(swap_token1['decimals']),
                    )

                    try:
                        if swap['amount0'].startswith('-'):
                            amount0_in = AssetAmount(FVal(ZERO))
                            amount0_out = deserialize_asset_amount_force_positive(
                                swap['amount0'])
                            amount1_in = deserialize_asset_amount_force_positive(
                                swap['amount1'])
                            amount1_out = AssetAmount(FVal(ZERO))
                        else:
                            amount0_in = deserialize_asset_amount_force_positive(
                                swap['amount0'])
                            amount0_out = AssetAmount(FVal(ZERO))
                            amount1_in = AssetAmount(FVal(ZERO))
                            amount1_out = deserialize_asset_amount_force_positive(
                                swap['amount1'])
                    except ValueError as e:
                        log.error(
                            f'Failed to read amounts in Uniswap V3 swap {str(swap)}. '
                            f'{str(e)}.', )
                        continue

                    swaps.append(
                        AMMSwap(
                            tx_hash=swap['id'].split('#')[0],
                            log_index=int(swap['logIndex']),
                            address=address,
                            from_address=from_address_deserialized,
                            to_address=to_address_deserialized,
                            timestamp=Timestamp(int(timestamp)),
                            location=Location.UNISWAP,
                            token0=token0,
                            token1=token1,
                            amount0_in=amount0_in,
                            amount1_in=amount1_in,
                            amount0_out=amount0_out,
                            amount1_out=amount1_out,
                        ))

                # with the new logic the list of swaps can be empty, in that case don't try
                # to make trades from the swaps
                if len(swaps) == 0:
                    continue

                # Now that we got all swaps for a transaction, create the trade object
                trades.extend(self._tx_swaps_to_trades(swaps))
            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }
        return trades
Example #26
0
def test_coinbase_query_balances(function_scope_coinbase):
    """Test that coinbase balance query works fine for the happy path"""
    coinbase = function_scope_coinbase

    def mock_coinbase_accounts(url):  # pylint: disable=unused-argument
        response = MockResponse(
            200,
            """
{
  "pagination": {
    "ending_before": null,
    "starting_after": null,
    "limit": 25,
    "order": "desc",
    "previous_uri": null,
    "next_uri": null
  },
  "data": [
    {
      "id": "58542935-67b5-56e1-a3f9-42686e07fa40",
      "name": "My Vault",
      "primary": false,
      "type": "vault",
      "currency": "BTC",
      "balance": {
        "amount": "4.00000000",
        "currency": "BTC"
      },
      "created_at": "2015-01-31T20:49:02Z",
      "updated_at": "2015-01-31T20:49:02Z",
      "resource": "account",
      "resource_path": "/v2/accounts/58542935-67b5-56e1-a3f9-42686e07fa40",
      "ready": true
    },
    {
      "id": "2bbf394c-193b-5b2a-9155-3b4732659ede",
      "name": "My Wallet",
      "primary": true,
      "type": "wallet",
      "currency": "ETH",
      "balance": {
        "amount": "39.59000000",
        "currency": "ETH"
      },
      "created_at": "2015-01-31T20:49:02Z",
      "updated_at": "2015-01-31T20:49:02Z",
      "resource": "account",
      "resource_path": "/v2/accounts/2bbf394c-193b-5b2a-9155-3b4732659ede"
    },
    {
      "id": "68542935-67b5-56e1-a3f9-42686e07fa40",
      "name": "Another Wallet",
      "primary": false,
      "type": "vault",
      "currency": "BTC",
      "balance": {
        "amount": "1.230000000",
        "currency": "BTC"
      },
      "created_at": "2015-01-31T20:49:02Z",
      "updated_at": "2015-01-31T20:49:02Z",
      "resource": "account",
      "resource_path": "/v2/accounts/68542935-67b5-56e1-a3f9-42686e07fa40",
      "ready": true
    }
  ]
}
            """,
        )
        return response

    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_coinbase_accounts):
        balances, msg = coinbase.query_balances()

    assert msg == ''
    assert len(balances) == 2
    assert balances[A_BTC].amount == FVal('5.23')
    assert balances[A_BTC].usd_value == FVal('7.8450000000')
    assert balances[A_ETH].amount == FVal('39.59')
    assert balances[A_ETH].usd_value == FVal('59.385000000')

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
Example #27
0
    def _query_pools_balances(
        self,
        balances: DefaultDict[Asset, Balance],
    ) -> DefaultDict[Asset, Balance]:
        """Queries binance pool balances and if any found adds them to `balances`

        May raise:
        - RemoteError
        """
        def process_pool_asset(asset_name: str, asset_amount: FVal) -> None:
            if asset_amount == ZERO:
                return None

            try:
                asset = asset_from_binance(asset_name)
            except UnsupportedAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found unsupported {self.name} asset {asset_name}. '
                    f'Ignoring its {self.name} pool balance query. {str(e)}', )
                return None
            except UnknownAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found unknown {self.name} asset {asset_name}. '
                    f'Ignoring its {self.name} pool balance query. {str(e)}', )
                return None
            except DeserializationError as e:
                self.msg_aggregator.add_error(
                    f'{self.name} balance deserialization error '
                    f'for asset {asset_name}: {str(e)}. Skipping entry.', )
                return None

            try:
                usd_price = Inquirer().find_usd_price(asset)
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    f'Error processing {self.name} balance entry due to inability to '
                    f'query USD price: {str(e)}. Skipping {self.name} pool balance entry',
                )
                return None

            balances[asset] += Balance(
                amount=asset_amount,
                usd_value=asset_amount * usd_price,
            )
            return None

        try:
            response = self.api_query('sapi', 'bswap/liquidity')
        except BinancePermissionError as e:
            log.warning(
                f'Insufficient permission to query {self.name} pool balances.'
                f'Skipping query. Response details: {str(e)}', )
            return balances

        try:
            for entry in response:
                for asset_name, asset_amount in entry['share']['asset'].items(
                ):
                    process_pool_asset(asset_name, FVal(asset_amount))
        except (KeyError, AttributeError) as e:
            self.msg_aggregator.add_error(
                f'At {self.name} pool balances got unexpected data format. '
                f'Skipping them in the balance query. Check logs for details',
            )
            if isinstance(e, KeyError):
                msg = f'Missing key {str(e)}'
            else:
                msg = str(e)
            log.error(
                f'Unexpected data format returned by {self.name} pools. '
                f'Data: {response}. Error: {msg}', )

        return balances
Example #28
0
def test_coinbase_query_trade_history(function_scope_coinbase):
    """Test that coinbase trade history query works fine for the happy path"""
    coinbase = function_scope_coinbase

    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_normal_coinbase_query):
        trades = coinbase.query_trade_history(
            start_ts=0,
            end_ts=TEST_END_TS,
            only_cache=False,
        )

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
    assert len(trades) == 2
    expected_trades = [
        Trade(
            timestamp=1500853448,
            location=Location.COINBASE,
            pair='BTC_USD',
            trade_type=TradeType.BUY,
            amount=FVal("486.34313725"),
            rate=FVal("9.997920454875299055122012005"),
            fee=FVal("1.01"),
            fee_currency=A_USD,
            link='9e14d574-30fa-5d85-b02c-6be0d851d61d',
        ),
        Trade(
            timestamp=1427402520,
            location=Location.COINBASE,
            pair='ETH_USD',
            trade_type=TradeType.SELL,
            amount=FVal("100.45"),
            rate=FVal("88.90014932802389248382279741"),
            fee=FVal("10.1"),
            fee_currency=A_USD,
            link='1e14d574-30fa-5d85-b02c-6be0d851d61d',
        )
    ]
    assert trades == expected_trades

    # and now try only a smaller time range
    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_normal_coinbase_query):
        trades = coinbase.query_trade_history(
            start_ts=0,
            end_ts=1451606400,
            only_cache=False,
        )

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
    assert len(trades) == 1
    assert trades[0].trade_type == TradeType.SELL
    assert trades[0].timestamp == 1427402520
Example #29
0
def satoshis_to_btc(satoshis: FVal) -> FVal:
    return satoshis * FVal('0.00000001')
Example #30
0
    def _deserialize_channel_withdraw(
            raw_event: Dict[str, Any],
            identity_address_map: Dict[ChecksumAddress, ChecksumAddress],
    ) -> ChannelWithdraw:
        """Deserialize a channel withdraw event. Only for Tom pool.

        May raise DeserializationError.
        """
        inverse_identity_address_map = {
            address: identity for identity, address in identity_address_map.items()
        }
        try:
            event_id = raw_event['id']
            user_address = raw_event['user']
            timestamp = deserialize_timestamp(raw_event['timestamp'])
            amount = FVal(raw_event['amount']) / ADX_AMOUNT_MANTISSA
            channel_id = raw_event['channel']['channelId']
            token_address = raw_event['channel']['tokenAddr']
        except (DeserializationError, KeyError, ValueError) as e:
            msg = str(e)
            if isinstance(e, KeyError):
                msg = f'Missing key in event: {msg}.'

            log.error(
                'Failed to deserialize an AdEx channel withdraw event',
                error=msg,
                raw_event=raw_event,
                identity_address_map=identity_address_map,
            )
            raise DeserializationError(
                'Failed to deserialize an AdEx channel withdraw event. Check logs for more details',  # noqa: E501
            ) from e

        try:
            tx_hash, tx_address, tx_log_index = event_id.split(':')
            log_index = int(tx_log_index)
        except (AttributeError, ValueError) as e:
            msg = str(e)
            if isinstance(e, AttributeError):
                msg = f'Unexpected type in event id: {type(raw_event["id"])}.'

            log.error(
                'Failed to deserialize an AdEx channel withdraw event id',
                error=msg,
                raw_event=raw_event,
                identity_address_map=identity_address_map,
            )
            raise DeserializationError(
                'Failed to deserialize an AdEx channel withdraw event. Check logs for more details',  # noqa: E501
            ) from e

        try:
            address = deserialize_ethereum_address(user_address)
            identity_address = inverse_identity_address_map[address]
            tx_address = deserialize_ethereum_address(tx_address)
            token_address = deserialize_ethereum_address(token_address)
        except (KeyError, DeserializationError) as e:
            if isinstance(e, KeyError):
                msg = f'Missing key in event: {str(e)}.'

            log.error(
                'Failed to deserialize an AdEx channel withdraw event',
                error=f'Invalid ethereum address in channel withdraw event: {token_address}. {msg}.',  # noqa: E501
                raw_event=raw_event,
                identity_address_map=identity_address_map,
            )
            raise DeserializationError(
                'Failed to deserialize an AdEx channel withdraw event. Check logs for more details',  # noqa: E501
            ) from e

        if tx_address != address:
            msg = (
                f'Unexpected ethereum address in channel withdraw event id: {tx_address}. '
                f'The event address does not match the address: {address}.'
            )
            log.error(
                'Failed to deserialize an AdEx channel withdraw event',
                error=msg,
                raw_event=raw_event,
                identity_address_map=identity_address_map,
            )
            raise DeserializationError(
                'Failed to deserialize an AdEx channel withdraw event. Check logs for more details',  # noqa: E501
            )

        if token_address == A_ADX.ethereum_address:
            token = A_ADX
        elif token_address == A_DAI.ethereum_address:
            token = A_DAI
        else:
            log.error(
                'Failed to deserialize an AdEx channel withdraw event',
                error=f'Unexpected token address: {token_address} on channel: {channel_id}',
                raw_event=raw_event,
                identity_address_map=identity_address_map,
            )
            raise DeserializationError(
                'Failed to deserialize an AdEx channel withdraw event. Check logs for more details',  # noqa: E501
            )

        return ChannelWithdraw(
            tx_hash=tx_hash,
            address=address,
            identity_address=identity_address,
            timestamp=timestamp,
            value=Balance(amount=amount),
            channel_id=channel_id,
            pool_id=TOM_POOL_ID,
            token=token,
            log_index=log_index,
        )