コード例 #1
0
ファイル: trades.py プロジェクト: tiemonl/rotki
    def get_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
            has_premium: bool,
    ) -> HistoryResult:
        """Creates trades and loans history from start_ts to end_ts"""
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        now = ts_now()
        # start creating the all trades history list
        history: List[Union[Trade, MarginPosition]] = []
        asset_movements = []
        loans = []
        empty_or_error = ''

        def populate_history_cb(
                trades_history: List[Trade],
                margin_history: List[MarginPosition],
                result_asset_movements: List[AssetMovement],
                exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query"""
            history.extend(trades_history)
            history.extend(margin_history)
            asset_movements.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                loans.extend(process_polo_loans(
                    msg_aggregator=self.msg_aggregator,
                    data=polo_loans_data,
                    # We need to have full history of loans available
                    start_ts=Timestamp(0),
                    end_ts=now,
                ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange.query_history_with_callbacks(
                # We need to have full history of exchanges available
                start_ts=Timestamp(0),
                end_ts=now,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )

        try:
            eth_transactions = self.chain_manager.ethereum.transactions.query(
                address=None,  # all addresses
                # We need to have full history of transactions available
                from_ts=Timestamp(0),
                to_ts=now,
                with_limit=False,  # at the moment ignore the limit for historical processing,
                recent_first=False,  # for history processing we need oldest first
            )
        except RemoteError as e:
            eth_transactions = []
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg

        # Include the external trades in the history
        external_trades = self.db.get_trades(
            # We need to have full history of trades available
            from_ts=Timestamp(0),
            to_ts=now,
            location=Location.EXTERNAL,
        )
        history.extend(external_trades)

        # Include makerdao DSR gains
        defi_events = []
        if self.chain_manager.makerdao_dsr and has_premium:
            dsr_gains = self.chain_manager.makerdao_dsr.get_dsr_gains_in_period(
                from_ts=start_ts,
                to_ts=end_ts,
            )
            for gain, timestamp in dsr_gains:
                if gain > ZERO:
                    defi_events.append(DefiEvent(
                        timestamp=timestamp,
                        event_type=DefiEventType.DSR_LOAN_GAIN,
                        asset=A_DAI,
                        amount=gain,
                    ))

        # Include makerdao vault events
        if self.chain_manager.makerdao_vaults and has_premium:
            vault_details = self.chain_manager.makerdao_vaults.get_vault_details()
            # We count the loss on a vault in the period if the last event is within
            # the given period. It's not a very accurate approach but it's good enough
            # for now. A more detailed approach would need archive node or log querying
            # to find owed debt at any given timestamp
            for detail in vault_details:
                last_event_ts = detail.events[-1].timestamp
                if last_event_ts >= start_ts and last_event_ts <= end_ts:
                    defi_events.append(DefiEvent(
                        timestamp=last_event_ts,
                        event_type=DefiEventType.MAKERDAO_VAULT_LOSS,
                        asset=A_USD,
                        amount=detail.total_liquidated.usd_value + detail.total_interest_owed,
                    ))

        # include yearn vault events
        if self.chain_manager.yearn_vaults and has_premium:
            yearn_vaults_history = self.chain_manager.yearn_vaults.get_history(
                given_defi_balances=self.chain_manager.defi_balances,
                addresses=self.chain_manager.queried_addresses_for_module('yearn_vaults'),
                reset_db_data=False,
                from_timestamp=start_ts,
                to_timestamp=end_ts,
            )
            for _, vault_mappings in yearn_vaults_history.items():
                for _, vault_history in vault_mappings.items():
                    # For the vaults since we can't get historical values of vault tokens
                    # yet, for the purposes of the tax report count everything as USD
                    defi_events.append(DefiEvent(
                        timestamp=ts_now(),
                        event_type=DefiEventType.YEARN_VAULTS_PNL,
                        asset=A_USD,
                        amount=vault_history.profit_loss.usd_value,
                    ))

        # include compound events
        if self.chain_manager.compound and has_premium:
            compound_history = self.chain_manager.compound.get_history(
                given_defi_balances=self.chain_manager.defi_balances,
                addresses=self.chain_manager.queried_addresses_for_module('compound'),
                reset_db_data=False,
                from_timestamp=start_ts,
                to_timestamp=end_ts,
            )
            for event in compound_history['events']:
                if event.event_type != 'liquidation' and event.realized_pnl.amount == ZERO:
                    continue  # skip events with no realized profit/loss

                if event.event_type == 'redeem':
                    defi_events.append(DefiEvent(
                        timestamp=event.timestamp,
                        event_type=DefiEventType.COMPOUND_LOAN_INTEREST,
                        asset=event.to_asset,
                        amount=event.realized_pnl.amount,
                    ))
                elif event.event_type == 'repay':
                    defi_events.append(DefiEvent(
                        timestamp=event.timestamp,
                        event_type=DefiEventType.COMPOUND_DEBT_REPAY,
                        asset=event.asset,
                        amount=event.realized_pnl.amount,
                    ))
                elif event.event_type == 'liquidation':
                    defi_events.append(DefiEvent(
                        timestamp=event.timestamp,
                        event_type=DefiEventType.COMPOUND_LIQUIDATION_DEBT_REPAID,
                        asset=event.asset,
                        amount=event.value.amount,
                    ))
                    defi_events.append(DefiEvent(
                        timestamp=event.timestamp,
                        event_type=DefiEventType.COMPOUND_LIQUIDATION_COLLATERAL_LOST,
                        asset=event.to_asset,
                        amount=event.to_value.amount,
                    ))
                elif event.event_type == 'comp':
                    defi_events.append(DefiEvent(
                        timestamp=event.timestamp,
                        event_type=DefiEventType.COMPOUND_REWARDS,
                        asset=event.asset,
                        amount=event.realized_pnl.amount,
                    ))

        # include aave lending events
        aave = self.chain_manager.aave
        if aave is not None and has_premium:
            mapping = aave.get_history(
                addresses=self.chain_manager.queried_addresses_for_module('aave'),
                reset_db_data=False,
                from_timestamp=start_ts,
                to_timestamp=end_ts,
            )

            now = ts_now()
            for _, aave_history in mapping.items():
                total_amount_per_token: Dict[Asset, FVal] = defaultdict(FVal)
                for event in aave_history.events:
                    if event.timestamp < start_ts:
                        continue
                    if event.timestamp > end_ts:
                        break

                    if event.event_type == 'interest':
                        defi_events.append(DefiEvent(
                            timestamp=event.timestamp,
                            event_type=DefiEventType.AAVE_LOAN_INTEREST,
                            asset=event.asset,
                            amount=event.value.amount,
                        ))
                        total_amount_per_token[event.asset] += event.value.amount

                for token, balance in aave_history.total_earned.items():
                    # Αdd an extra event per token per address for the remaining not paid amount
                    if token in total_amount_per_token:
                        defi_events.append(DefiEvent(
                            timestamp=now,
                            event_type=DefiEventType.AAVE_LOAN_INTEREST,
                            asset=event.asset,
                            amount=balance.amount - total_amount_per_token[token],
                        ))

        history.sort(key=lambda trade: action_get_timestamp(trade))
        return (
            empty_or_error,
            history,
            loans,
            asset_movements,
            eth_transactions,
            defi_events,
        )
コード例 #2
0
ファイル: vaults.py プロジェクト: LefterisJP/rotkehlchen
    def get_vault_history(
            self,
            defi_balances: List['DefiProtocolBalances'],
            vault: YearnVault,
            address: ChecksumEthAddress,
            from_block: int,
            to_block: int,
    ) -> Optional[YearnVaultHistory]:
        from_block = max(from_block, vault.contract.deployed_block)
        last_query = self.database.get_used_query_range(
            name=f'{YEARN_VAULTS_PREFIX}_{vault.name.replace(" ", "_")}_{address}',
        )
        skip_query = last_query and to_block - last_query[1] < MAX_BLOCKTIME_CACHE

        events = self.database.get_yearn_vaults_events(address=address, vault=vault)
        if not skip_query:
            query_from_block = last_query[1] + 1 if last_query else from_block
            new_events = self._get_vault_deposit_events(vault, address, query_from_block, to_block)
            if len(events) == 0 and len(new_events) == 0:
                # After all events have been queried then also update the query range.
                # Even if no events are found for an address we need to remember the range
                self.database.update_used_block_query_range(
                    name=f'{YEARN_VAULTS_PREFIX}_{vault.name.replace(" ", "_")}_{address}',
                    from_block=from_block,
                    to_block=to_block,
                )
                return None

            new_events.extend(
                self._get_vault_withdraw_events(vault, address, query_from_block, to_block),
            )
            # Now update the DB with the new events
            self.database.add_yearn_vaults_events(address, new_events)
            events.extend(new_events)

        # After all events have been queried then also update the query range.
        # Even if no events are found for an address we need to remember the range
        self.database.update_used_block_query_range(
            name=f'{YEARN_VAULTS_PREFIX}_{vault.name.replace(" ", "_")}_{address}',
            from_block=from_block,
            to_block=to_block,
        )
        if len(events) == 0:
            return None

        events.sort(key=lambda x: x.timestamp)
        total_pnl = self._process_vault_events(events)

        current_balance = None
        for balance in defi_balances:
            found_balance = (
                balance.protocol.name == 'yearn.finance • Vaults' and
                balance.base_balance.token_symbol == vault.token.symbol
            )
            if found_balance:
                current_balance = balance.underlying_balances[0].balance
                total_pnl += current_balance
                break

        # Due to the way we calculate usd prices for vaults we need to get the current
        # usd price of the actual pnl amount at this point
        if total_pnl.amount != ZERO:
            usd_price = get_usd_price_zero_if_error(
                asset=vault.underlying_token,
                time=ts_now(),
                location='yearn vault history',
                msg_aggregator=self.msg_aggregator,
            )
            total_pnl.usd_value = usd_price * total_pnl.amount

        return YearnVaultHistory(events=events, profit_loss=total_pnl)
コード例 #3
0
ファイル: history.py プロジェクト: jbrit/rotki
    def get_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
            has_premium: bool,
    ) -> HistoryResult:
        """Creates trades and loans history from start_ts to end_ts"""
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        now = ts_now()
        # start creating the all trades history list
        history: List[Union[Trade, MarginPosition]] = []
        asset_movements = []
        loans = []
        empty_or_error = ''

        def populate_history_cb(
                trades_history: List[Trade],
                margin_history: List[MarginPosition],
                result_asset_movements: List[AssetMovement],
                exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query"""
            history.extend(trades_history)
            history.extend(margin_history)
            asset_movements.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                loans.extend(process_polo_loans(
                    msg_aggregator=self.msg_aggregator,
                    data=polo_loans_data,
                    # We need to have full history of loans available
                    start_ts=Timestamp(0),
                    end_ts=now,
                ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange.query_history_with_callbacks(
                # We need to have full history of exchanges available
                start_ts=Timestamp(0),
                end_ts=now,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )

        try:
            eth_transactions = query_ethereum_transactions(
                database=self.db,
                etherscan=self.chain_manager.ethereum.etherscan,
                # We need to have full history of transactions available
                from_ts=Timestamp(0),
                to_ts=now,
            )
        except RemoteError as e:
            eth_transactions = []
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg

        # Include the external trades in the history
        external_trades = self.db.get_trades(
            # We need to have full history of trades available
            from_ts=Timestamp(0),
            to_ts=now,
            location=Location.EXTERNAL,
        )
        history.extend(external_trades)

        # Include makerdao DSR gains as a simple gains only blockchain loan entry for the given
        # time period
        if self.chain_manager.makerdao and has_premium:
            gain = self.chain_manager.makerdao.get_dsr_gains_in_period(
                from_ts=start_ts,
                to_ts=end_ts,
            )
            if gain > ZERO:
                loans.append(Loan(
                    location=Location.BLOCKCHAIN,
                    open_time=start_ts,
                    close_time=end_ts,
                    currency=A_DAI,
                    fee=Fee(ZERO),
                    earned=AssetAmount(gain),
                    amount_lent=AssetAmount(ZERO),
                ))

        history.sort(key=lambda trade: action_get_timestamp(trade))
        return (
            empty_or_error,
            history,
            loans,
            asset_movements,
            eth_transactions,
        )
コード例 #4
0
def test_query_statistics_asset_balance(
    rotkehlchen_api_server_with_exchanges,
    ethereum_accounts,
    btc_accounts,
    start_with_valid_premium,
):
    """Test that using the statistics asset balance over time endpoint works"""
    start_time = ts_now()
    # Disable caching of query results
    rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen
    rotki.chain_manager.cache_ttl_secs = 0
    setup = setup_balances(rotki, ethereum_accounts, btc_accounts)

    # query balances and save data in DB to have data to test the statistics endpoint
    with ExitStack() as stack:
        setup.enter_all_patches(stack)
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server_with_exchanges,
                'allbalancesresource',
            ),
            json={'save_data': True},
        )
    assert_proper_response(response)

    # and now test that statistics work fine for ETH, with default time range (0 - now)
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsassetbalanceresource",
            asset="ETH",
        ), )
    if start_with_valid_premium:
        result = assert_proper_response_with_result(response)
        assert len(result) == 1
        entry = result[0]
        assert len(entry) == 4
        assert FVal(entry['amount']) == get_asset_balance_total(A_ETH, setup)
        assert entry['category'] == 'asset'
        assert entry['time'] >= start_time
        assert entry['usd_value'] is not None
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )

    # and now test that statistics work fine for BTC, with given time range
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsassetbalanceresource",
            asset="BTC",
        ),
        json={
            'from_timestamp': 0,
            'to_timestamp': start_time + 60000
        },
    )
    if start_with_valid_premium:
        result = assert_proper_response_with_result(response)
        assert len(result) == 1
        entry = result[0]
        assert len(entry) == 4
        assert FVal(entry['amount']) == get_asset_balance_total(A_BTC, setup)
        assert entry['time'] >= start_time
        assert entry['category'] == 'asset'
        assert entry['usd_value'] is not None
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )

    # finally test that if the time range is not including the saved balances we get nothing back
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsassetbalanceresource",
            asset="BTC",
        ),
        json={
            'from_timestamp': 0,
            'to_timestamp': start_time - 1
        },
    )
    if start_with_valid_premium:
        result = assert_proper_response_with_result(response)
        assert len(result) == 0
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )
コード例 #5
0
ファイル: tokens.py プロジェクト: LefterisJP/rotkehlchen
    def query_tokens_for_addresses(
        self,
        addresses: List[ChecksumEthAddress],
        force_detection: bool,
    ) -> TokensReturn:
        """Queries/detects token balances for a list of addresses

        If an address's tokens were recently autodetected they are not detected again but the
        balances are simply queried. Unless force_detection is True.

        Returns the token balances of each address and the usd prices of the tokens
        """
        log.debug(
            'Querying/detecting token balances for all addresses',
            force_detection=force_detection,
        )
        ignored_assets = self.db.get_ignored_assets()
        exceptions = [
            # Ignore the veCRV balance in token query. It's already detected by
            # defi SDK as part of locked CRV in Vote Escrowed CRV. Which is the right way
            # to approach it as there is no way to assign a price to 1 veCRV. It
            # can be 1 CRV locked for 4 years or 4 CRV locked for 1 year etc.
            string_to_ethereum_address(
                '0x5f3b5DfEb7B28CDbD7FAba78963EE202a494e2A2'),
            # Ignore for now xsushi since is queried by defi SDK. We'll do it for now
            # since the SDK entry might return other tokens from sushi and we don't
            # fully support sushi now.
            string_to_ethereum_address(
                '0x8798249c2E607446EfB7Ad49eC89dD1865Ff4272'),
            # Ignore stkAave since it's queried by defi SDK.
            string_to_ethereum_address(
                '0x4da27a545c0c5B758a6BA100e3a049001de870f5'),
            # Ignore the following tokens. They are old tokens of upgraded contracts which
            # duplicated the balances at upgrade instead of doing a token swap.
            # e.g.: https://github.com/rotki/rotki/issues/3548
            # TODO: At some point we should actually remove them from the DB and
            # upgrade possible occurences in the user DB
            #
            # Old contract of Fetch.ai
            string_to_ethereum_address(
                '0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD'),
        ]
        for asset in ignored_assets:  # don't query for the ignored tokens
            if asset.is_eth_token(
            ):  # type ignore since we know asset is a token
                exceptions.append(
                    EthereumToken.from_asset(
                        asset).ethereum_address)  # type: ignore
        all_tokens = GlobalDBHandler().get_ethereum_tokens(
            exceptions=exceptions,
            except_protocols=['balancer'],
        )
        # With etherscan with chunks > 120, we get request uri too large
        # so the limitation is not in the gas, but in the request uri length
        etherscan_chunks = list(
            get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH))
        other_chunks = list(
            get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH))
        now = ts_now()
        token_usd_price: Dict[EthereumToken, Price] = {}
        result = {}

        for address in addresses:
            saved_list = self.db.get_tokens_for_address_if_time(
                address=address, current_time=now)
            if force_detection or saved_list is None:
                balances = self.detect_tokens_for_address(
                    address=address,
                    token_usd_price=token_usd_price,
                    etherscan_chunks=etherscan_chunks,
                    other_chunks=other_chunks,
                )
            else:
                if len(saved_list) == 0:
                    continue  # Do not query if we know the address has no tokens

                balances = defaultdict(FVal)
                self._get_tokens_balance_and_price(
                    address=address,
                    tokens=saved_list,
                    balances=balances,
                    token_usd_price=token_usd_price,
                    call_order=None,  # use defaults
                )

            result[address] = balances

        return result, token_usd_price
コード例 #6
0
ファイル: test_api.py プロジェクト: ltfschoen/rotkehlchen
def test_query_balances(rotkehlchen_server, function_scope_binance):
    """Test that the query_balances call works properly.

    That means that the balances are all returned in the expected format and
    that they are saved in the DB.

    The test is for a user with fiat balances and with some binance balances.
    """

    # First set the fiat balances
    ok, _ = rotkehlchen_server.set_fiat_balance('USD', '100.5')
    assert ok
    ok, _ = rotkehlchen_server.set_fiat_balance('EUR', '75.5')
    assert ok
    rotkehlchen_server.rotkehlchen.binance = function_scope_binance
    rotkehlchen_server.rotkehlchen.connected_exchanges.append('binance')

    def mock_binance_balances(url):  # pylint: disable=unused-argument
        return MockResponse(200, BINANCE_BALANCES_RESPONSE)

    mock_binance = patch.object(
        rotkehlchen_server.rotkehlchen.binance.session,
        'get',
        side_effect=mock_binance_balances,
    )

    eur_usd_rate = Inquirer().query_fiat_pair(A_EUR, A_USD)

    eth_usd_rate = FVal('100.5')
    btc_usd_rate = FVal('120.1')

    def mock_query_cryptocompare_for_fiat_price(asset: Asset) -> Price:
        if asset == A_ETH:
            return Price(eth_usd_rate)
        elif asset == A_BTC:
            return Price(btc_usd_rate)

        # else
        raise AssertionError(
            f'Unexpected asset {asset} at mock cryptocompare query')

    mock_find_usd_price = patch(
        'rotkehlchen.inquirer.query_cryptocompare_for_fiat_price',
        side_effect=mock_query_cryptocompare_for_fiat_price,
    )

    now = ts_now()
    with mock_binance, mock_find_usd_price:
        result = rotkehlchen_server.query_balances(save_data=True)

    assert result['USD']['amount'] == '100.5'
    assert result['USD']['usd_value'] == '100.5'
    eur_amount = FVal('75.5')
    assert result['EUR']['amount'] == str(eur_amount)
    eur_usd_value = eur_amount * eur_usd_rate
    assert result['EUR']['usd_value'] == str(eur_usd_value)
    eth_amount = FVal('4763368.68006011')
    assert result['ETH']['amount'] == str(eth_amount)
    eth_usd_value = eth_amount * eth_usd_rate
    assert result['ETH']['usd_value'] == str(eth_usd_value)
    btc_amount = FVal('4723846.89208129')
    assert result['BTC']['amount'] == str(btc_amount)
    btc_usd_value = btc_amount * btc_usd_rate
    assert result['BTC']['usd_value'] == str(btc_usd_value)
    binance_usd_value = btc_usd_value + eth_usd_value
    assert result['location']['binance']['usd_value'] == str(binance_usd_value)
    banks_usd_value = eur_usd_value + FVal('100.5')
    assert result['location']['banks']['usd_value'] == str(banks_usd_value)
    assert result['net_usd'] == str(banks_usd_value + binance_usd_value)

    # make sure that balances also got saved in the DB
    db = rotkehlchen_server.rotkehlchen.data.db
    save_ts = db.get_last_balance_save_time()
    assert save_ts >= now
    assert save_ts - now < 5, 'Saving balances took too long'

    location_data = db.get_latest_location_value_distribution()
    assert len(location_data) == 4

    assert location_data[0].location == 'banks'
    assert location_data[0].usd_value == str(banks_usd_value)
    assert location_data[1].location == 'binance'
    assert location_data[1].usd_value == str(binance_usd_value)
    assert location_data[2].location == 'blockchain'
    assert location_data[3].location == 'total'
    assert location_data[3].usd_value == str(banks_usd_value +
                                             binance_usd_value)
コード例 #7
0
def test_query_statistics_asset_balance_errors(rotkehlchen_api_server,
                                               rest_api_port):
    """Test that errors at the statistics asset balance over time endpoint are hanled properly"""
    start_time = ts_now()

    # Check that no asset given is an error
    response = requests.get(
        f'http://localhost:{rest_api_port}/api/1/statistics/balance')
    assert_error_response(
        response=response,
        status_code=HTTPStatus.NOT_FOUND,
    )

    # Check that an invalid asset given is an error
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server,
            "statisticsassetbalanceresource",
            asset="NOTAREALASSSETLOL",
        ),
        json={
            'from_timestamp': 0,
            'to_timestamp': start_time
        },
    )
    assert_error_response(
        response=response,
        contained_in_msg='Unknown asset NOTAREALASSSETLOL provided',
        status_code=HTTPStatus.BAD_REQUEST,
    )

    # Check that giving invalid value for from_timestamp is an error
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server,
            "statisticsassetbalanceresource",
            asset="BTC",
        ),
        json={
            'from_timestamp': 'dsad',
            'to_timestamp': start_time
        },
    )
    assert_error_response(
        response=response,
        contained_in_msg=
        'Failed to deserialize a timestamp entry from string dsad',
        status_code=HTTPStatus.BAD_REQUEST,
    )

    # Check that giving invalid value for to_timestamp is an error
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server,
            "statisticsassetbalanceresource",
            asset="BTC",
        ),
        json={
            'from_timestamp': 0,
            'to_timestamp': 53434.32
        },
    )
    assert_error_response(
        response=response,
        contained_in_msg=
        '"Failed to deserialize a timestamp entry. Unexpected type',
        status_code=HTTPStatus.BAD_REQUEST,
    )
コード例 #8
0
ファイル: compound.py プロジェクト: LefterisJP/rotkehlchen
    def get_balances(
        self,
        given_defi_balances: GIVEN_DEFI_BALANCES,
    ) -> Dict[ChecksumEthAddress, Dict[str, Dict[Asset, CompoundBalance]]]:
        compound_balances = {}
        now = ts_now()
        if isinstance(given_defi_balances, dict):
            defi_balances = given_defi_balances
        else:
            defi_balances = given_defi_balances()

        for account, balance_entries in defi_balances.items():
            lending_map = {}
            borrowing_map = {}
            rewards_map = {}
            for balance_entry in balance_entries:
                if balance_entry.protocol.name not in ('Compound Governance',
                                                       'Compound'):
                    continue

                entry = balance_entry.base_balance
                if entry.token_address == ETH_SPECIAL_ADDRESS:
                    asset = A_ETH  # hacky way to specify ETH in compound
                else:
                    try:
                        asset = EthereumToken(entry.token_address)
                    except UnknownAsset:
                        log.error(
                            f'Encountered unknown asset {entry.token_symbol} with address '
                            f'{entry.token_address} in compound. Skipping', )
                        continue

                unclaimed_comp_rewards = (
                    entry.token_address == A_COMP.ethereum_address
                    and balance_entry.protocol.name == 'Compound Governance')
                if unclaimed_comp_rewards:
                    rewards_map[A_COMP] = CompoundBalance(
                        balance_type=BalanceType.ASSET,
                        balance=entry.balance,
                        apy=None,
                    )
                    continue

                if balance_entry.balance_type == 'Asset':
                    # Get the underlying balance
                    underlying_token_address = balance_entry.underlying_balances[
                        0].token_address
                    try:
                        underlying_asset = EthereumToken(
                            underlying_token_address)
                    except UnknownAsset:
                        log.error(
                            f'Encountered unknown token with address '
                            f'{underlying_token_address} in compound. Skipping',
                        )
                        continue

                    lending_map[underlying_asset] = CompoundBalance(
                        balance_type=BalanceType.ASSET,
                        balance=balance_entry.underlying_balances[0].balance,
                        apy=self._get_apy(entry.token_address, supply=True),
                    )
                else:  # 'Debt'
                    try:
                        ctoken = _compound_symbol_to_token(
                            symbol='c' + entry.token_symbol,
                            timestamp=now,
                        )
                    except UnknownAsset:
                        log.error(
                            f'Encountered unknown asset {entry.token_symbol} in '
                            f'compound while figuring out cToken. Skipping', )
                        continue

                    borrowing_map[asset] = CompoundBalance(
                        balance_type=BalanceType.LIABILITY,
                        balance=entry.balance,
                        apy=self._get_apy(ctoken.ethereum_address,
                                          supply=False),
                    )

            if lending_map == {} and borrowing_map == {} and rewards_map == {}:
                # no balances for the account
                continue

            compound_balances[account] = {
                'rewards': rewards_map,
                'lending': lending_map,
                'borrowing': borrowing_map,
            }

        return compound_balances  # type: ignore
コード例 #9
0
def make_random_timestamp(start=1451606400, end=None):
    if end is None:
        end = ts_now()
    return random.randint(start, end)
コード例 #10
0
    def get_historical_data(
            self,
            from_asset: Asset,
            to_asset: Asset,
            timestamp: Timestamp,
            only_check_cache: bool,
    ) -> Optional[List[PriceHistoryEntry]]:
        """
        Get historical hour price data from cryptocompare

        Returns a sorted list of price entries.

        If only_check_cache is True then if the data is not cached locally this will return None

        - May raise RemoteError if there is a problem reaching the cryptocompare server
        or with reading the response returned by the server
        - May raise UnsupportedAsset if from/to asset is not supported by cryptocompare
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )
        cached_data = self._got_cached_data_at_timestamp(
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )
        if cached_data is not None:
            return cached_data.data

        if only_check_cache:
            return None

        cache_key = _get_cache_key(from_asset=from_asset, to_asset=to_asset)
        if cache_key is None:
            return None

        now_ts = ts_now()
        # save time at start of the query, in case the query does not complete due to rate limit
        self.last_histohour_query_ts = now_ts
        if cache_key in self.price_history:
            old_data = self.price_history[cache_key].data
            transformed_old_data = deque([x._asdict() for x in old_data])
            if timestamp > self.price_history[cache_key].end_time:
                # We have a cache but the requested timestamp does not hit it
                new_data = self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=now_ts,
                    to_timestamp=self.price_history[cache_key].end_time,
                )
                if len(new_data) == 0:
                    new_history = transformed_old_data
                else:
                    if len(old_data) != 0 and old_data[-1].time == new_data[0]['time']:
                        transformed_old_data.pop()
                    new_history = transformed_old_data + new_data

            else:
                # only other possibility, timestamp < cached start_time
                # Get all available data, even before to_timestamp
                new_data = self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=self.price_history[cache_key].start_time,
                    to_timestamp=Timestamp(0),
                )
                if len(new_data) == 0:
                    new_history = transformed_old_data
                else:
                    if len(old_data) != 0 and new_data[-1]['time'] == old_data[0].time:
                        new_data.pop()
                    new_history = new_data + transformed_old_data

            calculated_history = list(new_history)

        else:
            calculated_history = list(self._get_histohour_data_for_range(
                from_asset=from_asset,
                to_asset=to_asset,
                from_timestamp=now_ts,
                to_timestamp=Timestamp(0),
            ))

        if len(calculated_history) == 0:
            return []  # empty list means we found nothing

        # Let's always check for data sanity for the hourly prices.
        _check_hourly_data_sanity(calculated_history, from_asset, to_asset)
        # and now since we actually queried the data let's also cache them
        filename = (
            get_or_make_price_history_dir(self.data_directory) /
            (PRICE_HISTORY_FILE_PREFIX + cache_key + '.json')
        )
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=calculated_history[0]['time'],
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': calculated_history[0]['time'],
            'end_time': now_ts,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(data_including_time)
        self.last_histohour_query_ts = ts_now()  # also save when last query finished
        return self.price_history[cache_key].data
コード例 #11
0
    def _generate_reports(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        report_type: Literal['fills', 'account'],
        tempdir: str,
    ) -> List[str]:
        """
        Generates all the reports to get historical data from coinbase.

        https://docs.pro.coinbase.com/#reports
        There are 2 type of reports.
        1. Fill reports which are per product id (market)
        2. Account reports which are per account id

        The fill reports have the following data format:
        portfolio,trade id,product,side,created at,size,size unit,price,fee,
        total,price/fee/total unit

        The account reports have the following data format:
        portfolio,type,time,amount,balance,amount/balance unit,transfer id,trade id,order id

        Returns a list of filepaths where the reports were written.

        - Raises the same exceptions as _api_query()
        - Can raise KeyError if the API does not return the expected response format.
        """
        start_date = timestamp_to_iso8601(start_ts)
        end_date = timestamp_to_iso8601(end_ts)

        if report_type == 'fills':
            account_or_product_ids = self._get_products_ids()
            identifier_key = 'product_id'
        else:
            account_or_product_ids = self._get_account_ids()
            identifier_key = 'account_id'

        report_ids = []
        options = {
            'type': report_type,
            'start_date': start_date,
            'end_date': end_date,
            'format': 'csv',
            # The only way to disable emailing the report link is to give an invalid link
            'email': '*****@*****.**',
        }
        for identifier in account_or_product_ids:
            options[identifier_key] = identifier
            post_result = self._api_query('reports',
                                          request_method='POST',
                                          options=options)
            report_ids.append(post_result['id'])

        # At this point all reports must have been queued for creation at the server
        # Now wait until they are ready and pull them one by one
        report_paths = []
        last_change_ts = ts_now()
        while True:
            finished_ids_indices = []
            for idx, report_id in enumerate(report_ids):
                get_result = self._api_query(f'reports/{report_id}',
                                             request_method='GET')
                # Have to add assert here for mypy since the endpoint string is
                # a variable string and can't be overloaded and type checked
                assert isinstance(get_result, dict)
                if get_result['status'] != 'ready':
                    continue
                # a report is ready here so let's reset the timer
                last_change_ts = ts_now()
                file_url = get_result['file_url']
                response = requests.get(file_url)
                length = len(response.content)
                # empty fill reports have length of 95, empty account reports 85
                # So we assume a report of more than 100 chars has data.
                if length > 100:
                    log.debug(
                        f'Got a populated report for id: {report_id}. Writing it to disk'
                    )
                    filepath = os.path.join(tempdir, f'report_{report_id}.csv')
                    with open(filepath, 'wb') as f:
                        f.write(response.content)
                    report_paths.append(filepath)
                else:
                    log.debug(
                        f'Got report for id: {report_id} with length {length}. Skipping it'
                    )

                finished_ids_indices.append(idx)

            if ts_now() - last_change_ts > SECS_TO_WAIT_FOR_REPORT:
                raise RemoteError(
                    f'There has been no response from CoinbasePro reports for over '
                    f' {MINS_TO_WAIT_FOR_REPORT} minutes. Bailing out.', )

            # Delete the report ids that have been downloaded. Note: reverse order
            # so that we don't mess up the indices
            for idx in reversed(finished_ids_indices):
                del report_ids[idx]

            # When there is no more ids to query break out of the loop
            if len(report_ids) == 0:
                break

        return report_paths
コード例 #12
0
    def _api_query(self, path: str) -> Dict[str, Any]:
        """Queries cryptocompare

        - May raise RemoteError if there is a problem reaching the cryptocompare server
        or with reading the response returned by the server
        """
        querystr = f'https://min-api.cryptocompare.com/data/{path}'
        log.debug('Querying cryptocompare', url=querystr)
        api_key = self._get_api_key()
        if api_key:
            querystr += '?' if '?' not in querystr else '&'
            querystr += f'api_key={api_key}'

        tries = CRYPTOCOMPARE_QUERY_RETRY_TIMES
        while tries >= 0:
            try:
                response = self.session.get(querystr)
            except requests.exceptions.RequestException as e:
                raise RemoteError(f'Cryptocompare API request failed due to {str(e)}') from e

            try:
                json_ret = rlk_jsonloads_dict(response.text)
            except JSONDecodeError as e:
                raise RemoteError(
                    f'Cryptocompare returned invalid JSON response: {response.text}',
                ) from e

            try:
                # backoff and retry 3 times =  1 + 1.5 + 3 = at most 5.5 secs
                # Failing is also fine, since all calls have secondary data sources
                # for example coingecko
                if json_ret.get('Message', None) == RATE_LIMIT_MSG:
                    self.last_rate_limit = ts_now()
                    if tries >= 1:
                        backoff_seconds = 3 / tries
                        log.debug(
                            f'Got rate limited by cryptocompare. '
                            f'Backing off for {backoff_seconds}',
                        )
                        gevent.sleep(backoff_seconds)
                        tries -= 1
                        continue

                    # else
                    log.debug(
                        f'Got rate limited by cryptocompare and did not manage to get a '
                        f'request through even after {CRYPTOCOMPARE_QUERY_RETRY_TIMES} '
                        f'incremental backoff retries',
                    )

                if json_ret.get('Response', 'Success') != 'Success':
                    error_message = f'Failed to query cryptocompare for: "{querystr}"'
                    if 'Message' in json_ret:
                        error_message += f'. Error: {json_ret["Message"]}'

                    log.warning(
                        'Cryptocompare query failure',
                        url=querystr,
                        error=error_message,
                        status_code=response.status_code,
                    )
                    raise RemoteError(error_message)

                return json_ret['Data'] if 'Data' in json_ret else json_ret
            except KeyError as e:
                raise RemoteError(
                    f'Unexpected format of Cryptocompare json_response. '
                    f'Missing key entry for {str(e)}',
                ) from e

        raise AssertionError('We should never get here')
コード例 #13
0
 def rate_limited_in_last(self, seconds: int = CRYPTOCOMPARE_RATE_LIMIT_WAIT_TIME) -> bool:
     """Checks when we were last rate limited by CC and if it was within the given seconds"""
     return ts_now() - self.last_rate_limit <= seconds
コード例 #14
0
    def get_vaults_history(
        self,
        eth_balances: Dict[ChecksumEthAddress, BalanceSheet],
        addresses: List[ChecksumEthAddress],
        from_block: int,
        to_block: int,
    ) -> Dict[ChecksumEthAddress, Dict[str, YearnVaultHistory]]:
        query_addresses: List[EthAddress] = []
        query_checksumed_addresses: List[ChecksumEthAddress] = []

        # Skip addresses recently fetched
        for address in addresses:
            last_query = self.database.get_used_query_range(
                name=f'{YEARN_VAULTS_V2_PREFIX}_{address}', )
            skip_query = last_query and to_block - last_query[
                1] < MAX_BLOCKTIME_CACHE
            if not skip_query:
                query_addresses.append(EthAddress(address.lower()))
                query_checksumed_addresses.append(address)

        # if None of the addresses has yearn v2 positions this
        # will return a map of addresses to empty lists
        new_events_addresses = self.graph_inquirer.get_all_events(
            addresses=query_addresses,
            from_block=from_block,
            to_block=to_block,
        )
        current_time = ts_now()
        vaults_histories_per_address: Dict[ChecksumEthAddress,
                                           Dict[str, YearnVaultHistory]] = {}

        for address, new_events in new_events_addresses.items():
            # Query events from db for address
            db_events = self.database.get_yearn_vaults_v2_events(
                address=address,
                from_block=from_block,
                to_block=to_block,
            )
            # Flatten the data into a unique list
            events = list(new_events['deposits'])
            events.extend(new_events['withdrawals'])

            if len(db_events) == 0 and len(events) == 0:
                # After all events have been queried then also update the query range.
                # Even if no events are found for an address we need to remember the range
                self.database.update_used_block_query_range(
                    name=f'{YEARN_VAULTS_V2_PREFIX}_{address}',
                    from_block=from_block,
                    to_block=to_block,
                )
                continue
            self.database.add_yearn_vaults_events(address, events)

        for address in addresses:
            all_events = self.database.get_yearn_vaults_v2_events(
                address=address,
                from_block=from_block,
                to_block=to_block,
            )
            vaults_histories: Dict[str, YearnVaultHistory] = {}
            # Dict that stores vault token symbol and their events + total pnl
            vaults: Dict[str, Dict[str, List[YearnVaultEvent]]] = defaultdict(
                lambda: defaultdict(list), )
            for event in all_events:
                if event.event_type == 'deposit':
                    vault_token_symbol = event.to_asset.identifier
                    underlying_token = event.from_asset
                else:
                    vault_token_symbol = event.from_asset.identifier
                    underlying_token = event.to_asset
                vaults[vault_token_symbol]['events'].append(event)

            # Sort events in each vault
            for key in vaults.keys():
                vaults[key]['events'].sort(key=lambda x: x.timestamp)
                total_pnl = self._process_vault_events(vaults[key]['events'])
                balances = eth_balances.get(address)

                if balances:
                    for asset, balance in balances.assets.items():
                        found_balance = (
                            isinstance(asset, EthereumToken)
                            and asset.protocol == YEARN_VAULTS_V2_PROTOCOL
                            and asset.symbol == vault_token_symbol)
                        if found_balance:
                            total_pnl += balance.amount
                            break

                # Due to the way we calculate usd prices for vaults we
                # need to get the current usd price of the actual pnl
                # amount at this point
                if total_pnl.amount != ZERO:
                    usd_price = get_usd_price_zero_if_error(
                        asset=underlying_token,
                        time=current_time,
                        location='yearn vault v2 history',
                        msg_aggregator=self.msg_aggregator,
                    )
                    total_pnl.usd_value = usd_price * total_pnl.amount

                vaults_histories[key] = YearnVaultHistory(
                    events=vaults[key]['events'],
                    profit_loss=total_pnl,
                )
            vaults_histories_per_address[address] = vaults_histories

            self.database.update_used_block_query_range(
                name=f'{YEARN_VAULTS_V2_PREFIX}_{address}',
                from_block=from_block,
                to_block=to_block,
            )

        for address in query_checksumed_addresses:
            if (  # the address has no history, omit the key from the final results
                    address in vaults_histories_per_address
                    and len(vaults_histories_per_address[address]) == 0):
                del vaults_histories_per_address[address]

        return vaults_histories_per_address
コード例 #15
0
    def all_coins(self) -> Dict[str, Any]:
        """Gets the list of all the cryptocompare coins"""
        # Get coin list of crypto compare
        invalidate_cache = True
        coinlist_cache_path = os.path.join(self.data_directory,
                                           'cryptocompare_coinlist.json')
        if os.path.isfile(coinlist_cache_path):
            log.info('Found cryptocompare coinlist cache',
                     path=coinlist_cache_path)
            with open(coinlist_cache_path, 'r') as f:
                try:
                    data = rlk_jsonloads_dict(f.read())
                    now = ts_now()
                    invalidate_cache = False

                    # If we got a cache and its' over a month old then requery cryptocompare
                    if data['time'] < now and now - data['time'] > 2629800:
                        log.info(
                            'Cryptocompare coinlist cache is now invalidated')
                        invalidate_cache = True
                        data = data['data']
                except JSONDecodeError:
                    invalidate_cache = True

        if invalidate_cache:
            data = self._api_query('all/coinlist')

            # Also save the cache
            with open(coinlist_cache_path, 'w') as f:
                now = ts_now()
                log.info('Writing coinlist cache', timestamp=now)
                write_data = {'time': now, 'data': data}
                f.write(rlk_jsondumps(write_data))
        else:
            # in any case take the data
            data = data['data']

        # As described in the docs
        # https://min-api.cryptocompare.com/documentation?key=Other&cat=allCoinsWithContentEndpoint
        # This is not the entire list of assets in the system, so I am manually adding
        # here assets I am aware of that they already have historical data for in thei
        # cryptocompare system
        data['DAO'] = object()
        data['USDT'] = object()
        data['VEN'] = object()
        data['AIR*'] = object()  # This is Aircoin
        # This is SpendCoin (https://coinmarketcap.com/currencies/spendcoin/)
        data['SPND'] = object()
        # This is eBitcoinCash (https://coinmarketcap.com/currencies/ebitcoin-cash/)
        data['EBCH'] = object()
        # This is Educare (https://coinmarketcap.com/currencies/educare/)
        data['EKT'] = object()
        # This is Fidelium (https://coinmarketcap.com/currencies/fidelium/)
        data['FID'] = object()
        # This is Knoxstertoken (https://coinmarketcap.com/currencies/knoxstertoken/)
        data['FKX'] = object()
        # This is FNKOS (https://coinmarketcap.com/currencies/fnkos/)
        data['FNKOS'] = object()
        # This is FansTime (https://coinmarketcap.com/currencies/fanstime/)
        data['FTI'] = object()
        # This is Gene Source Code Chain
        # (https://coinmarketcap.com/currencies/gene-source-code-chain/)
        data['GENE*'] = object()
        # This is GazeCoin (https://coinmarketcap.com/currencies/gazecoin/)
        data['GZE'] = object()
        # This is probaly HarmonyCoin (https://coinmarketcap.com/currencies/harmonycoin-hmc/)
        data['HMC*'] = object()
        # This is IoTChain (https://coinmarketcap.com/currencies/iot-chain/)
        data['ITC'] = object()
        # This is MFTU (https://coinmarketcap.com/currencies/mainstream-for-the-underground/)
        data['MFTU'] = object()
        # This is Nexxus (https://coinmarketcap.com/currencies/nexxus/)
        data['NXX'] = object()
        # This is Owndata (https://coinmarketcap.com/currencies/owndata/)
        data['OWN'] = object()
        # This is PiplCoin (https://coinmarketcap.com/currencies/piplcoin/)
        data['PIPL'] = object()
        # This is PKG Token (https://coinmarketcap.com/currencies/pkg-token/)
        data['PKG'] = object()
        # This is Quibitica https://coinmarketcap.com/currencies/qubitica/
        data['QBIT'] = object()
        # This is DPRating https://coinmarketcap.com/currencies/dprating/
        data['RATING'] = object()
        # This is RouletteToken https://coinmarketcap.com/currencies/roulettetoken/
        data['RLT'] = object()
        # This is RocketPool https://coinmarketcap.com/currencies/rocket-pool/
        data['RPL'] = object()
        # This is SpeedMiningService (https://coinmarketcap.com/currencies/speed-mining-service/)
        data['SMS'] = object()
        # This is SmartShare (https://coinmarketcap.com/currencies/smartshare/)
        data['SSP'] = object()
        # This is ThoreCoin (https://coinmarketcap.com/currencies/thorecoin/)
        data['THR'] = object()
        # This is Transcodium (https://coinmarketcap.com/currencies/transcodium/)
        data['TNS'] = object()
        # This is XMedChainToken (https://coinmarketcap.com/currencies/xmct/)
        data['XMCT'] = object()
        # This is Xplay (https://coinmarketcap.com/currencies/xpa)
        data['XPA'] = object()

        return data
コード例 #16
0
def test_add_and_query_manually_tracked_balances(
    rotkehlchen_api_server,
    ethereum_accounts,
):
    """Test that adding and querying manually tracked balances via the API works fine"""
    async_query = random.choice([False, True])
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    setup = setup_balances(rotki,
                           ethereum_accounts=ethereum_accounts,
                           btc_accounts=None)
    _populate_tags(rotkehlchen_api_server)
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server,
            'manuallytrackedbalancesresource',
        ),
        json={'async_query': async_query},
    )
    if async_query:
        task_id = assert_ok_async_response(response)
        outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
        result = outcome['result']
    else:
        result = assert_proper_response_with_result(response)
    assert result[
        'balances'] == [], 'In the beginning we should have no entries'

    balances = _populate_initial_balances(rotkehlchen_api_server)

    # now query and make sure the added balances are returned
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server,
            'manuallytrackedbalancesresource',
        ),
        json={'async_query': async_query},
    )
    if async_query:
        task_id = assert_ok_async_response(response)
        outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
        result = outcome['result']
    else:
        result = assert_proper_response_with_result(response)
    assert_balances_match(expected_balances=balances,
                          returned_balances=result['balances'])

    now = ts_now()
    # Also now test for https://github.com/rotki/rotki/issues/942 by querying for all balances
    # causing all balances to be saved and making sure the manual balances also got saved
    with ExitStack() as stack:
        setup.enter_ethereum_patches(stack)
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server,
                "allbalancesresource",
            ),
            json={'async_query': async_query},
        )
        if async_query:
            task_id = assert_ok_async_response(response)
            outcome = wait_for_async_task(rotkehlchen_api_server, task_id)
            result = outcome['result']
        else:
            result = assert_proper_response_with_result(response)

    assets = result['assets']
    assert len(assets) == 5
    assert assets['BTC']['amount'] == '1.425'
    assert assets['XMR']['amount'] == '50.315'
    assert assets[A_BNB.identifier]['amount'] == '155'
    assert assets['ETH']['amount'] == '3E-12'  # from ethereum on-chain balances
    assert assets[A_RDN.identifier][
        'amount'] == '4E-12'  # from ethereum on-chain balances
    liabilities = result['liabilities']
    assert len(liabilities) == 2
    assert liabilities['ETH']['amount'] == '2'
    assert liabilities['USD']['amount'] == '100'
    # Check DB to make sure a save happened
    assert rotki.data.db.get_last_balance_save_time() >= now
    assert set(rotki.data.db.query_owned_assets()) == {
        'BTC',
        'XMR',
        A_BNB.identifier,
        'ETH',
        A_RDN.identifier,
    }
コード例 #17
0
ファイル: test_bitpanda.py プロジェクト: rotki/rotki
def test_asset_movements(mock_bitpanda):
    """Test that deposits/withdrawals are correctly queried"""

    def mock_bitpanda_query(url: str, **kwargs):  # pylint: disable=unused-argument
        if '/wallets/transactions' in url:
            return MockResponse(status_code=HTTPStatus.OK, text=WALLET_TX_RESPONSE)
        if '/wallets' in url:
            return MockResponse(status_code=HTTPStatus.OK, text=WALLETS_RESPONSE)
        if '/fiatwallets/transactions' in url:
            return MockResponse(status_code=HTTPStatus.OK, text=FIATWALLETS_TX_RESPONSE)
        if '/fiatwallets' in url:
            return MockResponse(status_code=HTTPStatus.OK, text=FIAT_WALLETS_RESPONSE)

        # else
        raise AssertionError(f'Unexpected url {url} in bitpanda test')

    with patch.object(mock_bitpanda.session, 'get', side_effect=mock_bitpanda_query):
        movements = mock_bitpanda.query_deposits_withdrawals(
            start_ts=0,
            end_ts=ts_now(),
            only_cache=False,
        )

    warnings = mock_bitpanda.msg_aggregator.consume_warnings()
    errors = mock_bitpanda.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0

    expected_movements = [AssetMovement(
        location=Location.BITPANDA,
        category=AssetMovementCategory.DEPOSIT,
        address=None,
        transaction_id=None,
        timestamp=1631088548,
        asset=A_EUR,
        amount=FVal('25'),
        fee_asset=A_EUR,
        fee=ZERO,
        link='movementid1',
    ), AssetMovement(
        location=Location.BITPANDA,
        category=AssetMovementCategory.WITHDRAWAL,
        address=None,
        transaction_id=None,
        timestamp=1631888548,
        asset=A_EUR,
        amount=FVal('50'),
        fee_asset=A_EUR,
        fee=FVal('0.01'),
        link='movementid2',
    ), AssetMovement(
        location=Location.BITPANDA,
        category=AssetMovementCategory.DEPOSIT,
        address='0x54dca71a34f498e3053cba240895e51da5f89d24',
        transaction_id='0x28cb2ba8ac14bdedb0ad021662b631952ce2514f1e3ff7870882ebe8a8c1b03f',
        timestamp=1633849272,
        asset=A_USDT,
        amount=FVal('6608.34105600'),
        fee_asset=A_USDT,
        fee=ZERO,
        link='XXX',
    ), AssetMovement(
        location=Location.BITPANDA,
        category=AssetMovementCategory.WITHDRAWAL,
        address='0x54dca71a34f498e3053cba240895e51da5f89d24',
        transaction_id='0xe45c1befc0968d2dab0374bc8d1aa3e193136dc769596d42e4d3274475bc7c60',
        timestamp=1597072246,
        asset=A_ETH,
        amount=FVal('1.55165264'),
        fee_asset=A_ETH,
        fee=FVal('0.00762000'),
        link='XXX',
    )]
    assert expected_movements == movements
コード例 #18
0
ファイル: test_statistics.py プロジェクト: hjorthjort/rotki
def test_query_statistics_value_distribution(
    rotkehlchen_api_server_with_exchanges,
    ethereum_accounts,
    btc_accounts,
    number_of_eth_accounts,
    start_with_valid_premium,
):
    """Test that using the statistics value distribution endpoint works"""
    start_time = ts_now()
    # Disable caching of query results
    rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen
    rotki.chain_manager.cache_ttl_secs = 0
    setup = setup_balances(rotki, ethereum_accounts, btc_accounts)

    # query balances and save data in DB to have data to test the statistics endpoint
    with setup.poloniex_patch, setup.binance_patch, setup.etherscan_patch, setup.bitcoin_patch:
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server_with_exchanges,
                "allbalancesresource",
            ),
            json={'save_data': True},
        )
    assert_proper_response(response)

    def assert_okay_by_location(response):
        """Helper function to run next query and its assertion twice"""
        if start_with_valid_premium:
            assert_proper_response(response)
            data = response.json()
            assert data['message'] == ''
            assert len(data['result']) == 5
            locations = {'poloniex', 'binance', 'banks', 'blockchain', 'total'}
            for entry in data['result']:
                assert len(entry) == 3
                assert entry['time'] >= start_time
                assert entry['usd_value'] is not None
                assert entry['location'] in locations
                locations.remove(entry['location'])
            assert len(locations) == 0
        else:
            assert_error_response(
                response=response,
                contained_in_msg=
                'logged in user testuser does not have a premium subscription',
                status_code=HTTPStatus.CONFLICT,
            )

    # and now test that statistics work fine for distribution by location for json body
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'location'},
    )
    assert_okay_by_location(response)
    # and now test that statistics work fine for distribution by location for query params
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ) + '?distribution_by=location', )
    assert_okay_by_location(response)

    # finally test that statistics work fine for distribution by asset
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'asset'},
    )
    if start_with_valid_premium:
        assert_proper_response(response)
        data = response.json()
        assert data['message'] == ''
        assert len(data['result']) == 4
        totals = {
            'ETH': get_asset_balance_total('ETH', setup),
            'BTC': get_asset_balance_total('BTC', setup),
            'EUR': get_asset_balance_total('EUR', setup),
            'RDN': get_asset_balance_total('RDN', setup),
        }
        for entry in data['result']:
            assert len(entry) == 4
            assert entry['time'] >= start_time
            assert entry['usd_value'] is not None
            assert FVal(entry['amount']) == totals[entry['asset']]
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )
コード例 #19
0
    def maybe_upload_data_to_server(self, force_upload: bool = False) -> bool:
        # if user has no premium do nothing
        if self.premium is None:
            return False

        if not self.data.db.get_premium_sync() and not force_upload:
            return False

        # upload only once per hour
        diff = ts_now() - self.last_data_upload_ts
        if diff < 3600 and not force_upload:
            return False

        try:
            metadata = self.premium.query_last_data_metadata()
        except RemoteError as e:
            log.debug('upload to server -- fetching metadata error',
                      error=str(e))
            return False
        b64_encoded_data, our_hash = self.data.compress_and_encrypt_db(
            self.password)

        log.debug(
            'CAN_PUSH',
            ours=our_hash,
            theirs=metadata.data_hash,
        )
        if our_hash == metadata.data_hash and not force_upload:
            log.debug('upload to server stopped -- same hash')
            # same hash -- no need to upload anything
            return False

        our_last_write_ts = self.data.db.get_last_write_ts()
        if our_last_write_ts <= metadata.last_modify_ts and not force_upload:
            # Server's DB was modified after our local DB
            log.debug(
                f'upload to server stopped -- remote db({metadata.last_modify_ts}) '
                f'more recent than local({our_last_write_ts})', )
            return False

        data_bytes_size = len(base64.b64decode(b64_encoded_data))
        if data_bytes_size < metadata.data_size and not force_upload:
            # Let's be conservative.
            # TODO: Here perhaps prompt user in the future
            log.debug(
                f'upload to server stopped -- remote db({metadata.data_size}) '
                f'bigger than local({data_bytes_size})', )
            return False

        try:
            self.premium.upload_data(
                data_blob=b64_encoded_data,
                our_hash=our_hash,
                last_modify_ts=our_last_write_ts,
                compression_type='zlib',
            )
        except RemoteError as e:
            log.debug('upload to server -- upload error', error=str(e))
            return False

        # update the last data upload value
        self.last_data_upload_ts = ts_now()
        self.data.db.update_last_data_upload_ts(self.last_data_upload_ts)
        log.debug('upload to server -- success')
        return True
コード例 #20
0
ファイル: graph.py プロジェクト: zhiiker/rotki
    def _get_user_data(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        address: ChecksumEthAddress,
        balances: AaveBalances,
    ) -> AaveHistory:
        last_query = self.database.get_used_query_range(
            f'aave_events_{address}')
        db_events = self.database.get_aave_events(address=address)

        now = ts_now()
        last_query_ts = 0
        if last_query is not None:
            last_query_ts = last_query[1]
            from_ts = Timestamp(last_query_ts + 1)

        deposits = withdrawals = borrows = repays = liquidation_calls = []
        query = self.graph.query(
            querystr=USER_EVENTS_QUERY,
            param_types={'$address': 'ID!'},
            param_values={'address': address.lower()},
        )
        user_result = query['users'][0]
        if now - last_query_ts > AAVE_GRAPH_RECENT_SECS:
            # In theory if these were individual queries we should do them only if
            # we have not queried recently. In practise since we only do 1 query above
            # this is useless for now, but keeping the mechanism in case we change
            # the way we query the subgraph
            deposits = self._parse_deposits(user_result['depositHistory'],
                                            from_ts, to_ts)
            withdrawals = self._parse_withdrawals(
                withdrawals=user_result['redeemUnderlyingHistory'],
                from_ts=from_ts,
                to_ts=to_ts,
            )
            borrows = self._parse_borrows(user_result['borrowHistory'],
                                          from_ts, to_ts)
            repays = self._parse_repays(user_result['repayHistory'], from_ts,
                                        to_ts)
            liquidation_calls = self._parse_liquidations(
                user_result['liquidationCallHistory'],
                from_ts,
                to_ts,
            )

        result = self._process_events(
            user_address=address,
            user_result=user_result,
            from_ts=from_ts,
            to_ts=to_ts,
            deposits=deposits,
            withdrawals=withdrawals,
            borrows=borrows,
            repays=repays,
            liquidations=liquidation_calls,
            db_events=db_events,
            balances=balances,
        )

        # Add all new events to the DB
        new_events: List[
            AaveEvent] = deposits + withdrawals + result.interest_events + borrows + repays + liquidation_calls  # type: ignore  # noqa: E501
        self.database.add_aave_events(address, new_events)
        # After all events have been queried then also update the query range.
        # Even if no events are found for an address we need to remember the range
        self.database.update_used_query_range(
            name=f'aave_events_{address}',
            start_ts=Timestamp(0),
            end_ts=now,
        )

        # Sort actions so that actions with same time are sorted deposit -> interest -> withdrawal
        all_events: List[AaveEvent] = new_events + db_events
        sort_map = {
            'deposit': 0,
            'interest': 0.1,
            'withdrawal': 0.2,
            'borrow': 0.3,
            'repay': 0.4,
            'liquidation': 0.5
        }  # noqa: E501
        all_events.sort(
            key=lambda event: sort_map[event.event_type] + event.timestamp)
        return AaveHistory(
            events=all_events,
            total_earned_interest=result.total_earned_interest,
            total_lost=result.total_lost,
            total_earned_liquidations=result.total_earned_liquidations,
        )
コード例 #21
0
def test_query_statistics_value_distribution(
    rotkehlchen_api_server_with_exchanges,
    ethereum_accounts,
    btc_accounts,
    start_with_valid_premium,
):
    """Test that using the statistics value distribution endpoint works"""
    start_time = ts_now()
    # Disable caching of query results
    rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen
    rotki.chain_manager.cache_ttl_secs = 0
    token_balances = {A_RDN: ['111000', '4000000']}
    setup = setup_balances(
        rotki=rotki,
        ethereum_accounts=ethereum_accounts,
        btc_accounts=btc_accounts,
        token_balances=token_balances,
        manually_tracked_balances=[
            ManuallyTrackedBalance(
                id=-1,
                asset=A_EUR,
                label='My EUR bank',
                amount=FVal('1550'),
                location=Location.BANKS,
                tags=None,
                balance_type=BalanceType.ASSET,
            )
        ],
    )

    # query balances and save data in DB to have data to test the statistics endpoint
    with ExitStack() as stack:
        setup.enter_all_patches(stack)
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server_with_exchanges,
                "allbalancesresource",
            ),
            json={'save_data': True},
        )
    assert_proper_response(response)

    def assert_okay_by_location(response):
        """Helper function to run next query and its assertion twice"""
        if start_with_valid_premium:
            result = assert_proper_response_with_result(response)
            assert len(result) == 5
            locations = {'poloniex', 'binance', 'banks', 'blockchain', 'total'}
            for entry in result:
                assert len(entry) == 3
                assert entry['time'] >= start_time
                assert entry['usd_value'] is not None
                assert entry['location'] in locations
                locations.remove(entry['location'])
            assert len(locations) == 0
        else:
            assert_error_response(
                response=response,
                contained_in_msg=
                'logged in user testuser does not have a premium subscription',
                status_code=HTTPStatus.CONFLICT,
            )

    # and now test that statistics work fine for distribution by location for json body
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'location'},
    )
    assert_okay_by_location(response)
    # and now test that statistics work fine for distribution by location for query params
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ) + '?distribution_by=location', )
    assert_okay_by_location(response)

    # finally test that statistics work fine for distribution by asset
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server_with_exchanges,
            "statisticsvaluedistributionresource",
        ),
        json={'distribution_by': 'asset'},
    )
    if start_with_valid_premium:
        result = assert_proper_response_with_result(response)
        assert len(result) == 4
        totals = {
            'ETH': get_asset_balance_total(A_ETH, setup),
            'BTC': get_asset_balance_total(A_BTC, setup),
            'EUR': get_asset_balance_total(A_EUR, setup),
            A_RDN.identifier: get_asset_balance_total(A_RDN, setup),
        }
        for entry in result:
            assert len(entry) == 5
            assert entry['time'] >= start_time
            assert entry['category'] == 'asset'
            assert entry['usd_value'] is not None
            assert FVal(entry['amount']) == totals[entry['asset']]
    else:
        assert_error_response(
            response=response,
            contained_in_msg=
            'logged in user testuser does not have a premium subscription',
            status_code=HTTPStatus.CONFLICT,
        )
コード例 #22
0
ファイル: test_bitmex.py プロジェクト: smfang/rotkehlchen
def test_bitmex_api_withdrawals_deposit_unexpected_data(test_bitmex):
    """Test getting unexpected data in bitmex withdrawals deposit query is handled gracefully"""
    test_bitmex.cache_ttl_secs = 0

    original_input = """[{
"transactID": "b6c6fd2c-4d0c-b101-a41c-fa5aa1ce7ef1", "account": 126541, "currency": "XBt",
 "transactType": "Withdrawal", "amount": 16960386, "fee": 800, "transactStatus": "Completed",
 "address": "", "tx": "", "text": "", "transactTime": "2018-09-15T12:30:56.475Z",
 "walletBalance": 103394923, "marginBalance": null,
 "timestamp": "2018-09-15T12:30:56.475Z"}]"""
    now = ts_now()

    def query_bitmex_and_test(input_str, expected_warnings_num,
                              expected_errors_num):
        def mock_get_deposit_withdrawal(url, data):  # pylint: disable=unused-argument
            return MockResponse(200, input_str)

        with patch.object(test_bitmex.session,
                          'get',
                          side_effect=mock_get_deposit_withdrawal):
            movements = test_bitmex.query_deposits_withdrawals(
                start_ts=0,
                end_ts=now,
                end_at_least_ts=now,
            )

        if expected_warnings_num == 0 and expected_errors_num == 0:
            assert len(movements) == 1
        else:
            assert len(movements) == 0
            errors = test_bitmex.msg_aggregator.consume_errors()
            warnings = test_bitmex.msg_aggregator.consume_warnings()
            assert len(errors) == expected_errors_num
            assert len(warnings) == expected_warnings_num

    # First try with correct data to make sure everything works
    query_bitmex_and_test(original_input,
                          expected_warnings_num=0,
                          expected_errors_num=0)

    # From here and on present unexpected data
    # invalid timestamp
    given_input = original_input.replace('"2018-09-15T12:30:56.475Z"',
                                         '"dasd"')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=1)

    # invalid asset
    given_input = original_input.replace('"XBt"', '[]')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=1)

    # unknown asset
    given_input = original_input.replace('"XBt"', '"dadsdsa"')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=1,
                          expected_errors_num=0)

    # invalid amount
    given_input = original_input.replace('16960386', 'null')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=1)

    # make sure that fee null/none works
    given_input = original_input.replace('800', 'null')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=0)

    # invalid fee
    given_input = original_input.replace('800', '"dadsdsa"')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=1)

    # missing key error
    given_input = original_input.replace('"amount": 16960386,', '')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=1)

    # check that if 'transactType` key is missing things still work
    given_input = original_input.replace('"transactType": "Withdrawal",', '')
    query_bitmex_and_test(given_input,
                          expected_warnings_num=0,
                          expected_errors_num=1)
コード例 #23
0
def test_writing_fetching_data(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    data.db.add_blockchain_accounts(
        SupportedBlockchain.BITCOIN,
        [BlockchainAccountData(address='1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS')],
    )
    data.db.add_blockchain_accounts(
        SupportedBlockchain.ETHEREUM,
        [
            BlockchainAccountData(
                address='0xd36029d76af6fE4A356528e4Dc66B2C18123597D'),
            BlockchainAccountData(
                address='0x80B369799104a47e98A553f3329812a44A7FaCDc'),
        ],
    )
    accounts = data.db.get_blockchain_accounts()
    assert isinstance(accounts, BlockchainAccounts)
    assert accounts.btc == ['1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS']
    # See that after addition the address has been checksummed
    assert set(accounts.eth) == {
        '0xd36029d76af6fE4A356528e4Dc66B2C18123597D',
        '0x80B369799104a47e98A553f3329812a44A7FaCDc',
    }
    # Add existing account should fail
    with pytest.raises(InputError):  # pylint: disable=no-member
        data.db.add_blockchain_accounts(
            SupportedBlockchain.ETHEREUM,
            [
                BlockchainAccountData(
                    address='0xd36029d76af6fE4A356528e4Dc66B2C18123597D')
            ],
        )
    # Remove non-existing account
    with pytest.raises(InputError):
        data.db.remove_blockchain_accounts(
            SupportedBlockchain.ETHEREUM,
            ['0x136029d76af6fE4A356528e4Dc66B2C18123597D'],
        )
    # Remove existing account
    data.db.remove_blockchain_accounts(
        SupportedBlockchain.ETHEREUM,
        ['0xd36029d76af6fE4A356528e4Dc66B2C18123597D'],
    )
    accounts = data.db.get_blockchain_accounts()
    assert accounts.eth == ['0x80B369799104a47e98A553f3329812a44A7FaCDc']

    result, _ = data.add_ignored_assets([A_DAO])
    assert result
    result, _ = data.add_ignored_assets([A_DOGE])
    assert result
    result, _ = data.add_ignored_assets([A_DOGE])
    assert not result

    ignored_assets = data.db.get_ignored_assets()
    assert all(isinstance(asset, Asset) for asset in ignored_assets)
    assert set(ignored_assets) == {A_DAO, A_DOGE}
    # Test removing asset that is not in the list
    result, msg = data.remove_ignored_assets([A_RDN])
    assert 'not in ignored assets' in msg
    assert not result
    result, _ = data.remove_ignored_assets([A_DOGE])
    assert result
    assert data.db.get_ignored_assets() == [A_DAO]

    # With nothing inserted in settings make sure default values are returned
    result = data.db.get_settings()
    last_write_diff = ts_now() - result.last_write_ts
    # make sure last_write was within 3 secs
    assert 0 <= last_write_diff < 3
    expected_dict = {
        'have_premium': False,
        'eth_rpc_endpoint': 'http://localhost:8545',
        'ksm_rpc_endpoint': 'http://localhost:9933',
        'ui_floating_precision': DEFAULT_UI_FLOATING_PRECISION,
        'version': ROTKEHLCHEN_DB_VERSION,
        'include_crypto2crypto': DEFAULT_INCLUDE_CRYPTO2CRYPTO,
        'include_gas_costs': DEFAULT_INCLUDE_GAS_COSTS,
        'taxfree_after_period': YEAR_IN_SECONDS,
        'balance_save_frequency': DEFAULT_BALANCE_SAVE_FREQUENCY,
        'last_balance_save': 0,
        'main_currency': DEFAULT_MAIN_CURRENCY.identifier,
        'anonymized_logs': DEFAULT_ANONYMIZED_LOGS,
        'date_display_format': DEFAULT_DATE_DISPLAY_FORMAT,
        'last_data_upload_ts': 0,
        'premium_should_sync': False,
        'submit_usage_analytics': True,
        'last_write_ts': 0,
        'kraken_account_type': DEFAULT_KRAKEN_ACCOUNT_TYPE,
        'active_modules': DEFAULT_ACTIVE_MODULES,
        'frontend_settings': '',
        'account_for_assets_movements': DEFAULT_ACCOUNT_FOR_ASSETS_MOVEMENTS,
        'btc_derivation_gap_limit': DEFAULT_BTC_DERIVATION_GAP_LIMIT,
        'calculate_past_cost_basis': DEFAULT_CALCULATE_PAST_COST_BASIS,
        'display_date_in_localtime': DEFAULT_DISPLAY_DATE_IN_LOCALTIME,
        'current_price_oracles': DEFAULT_CURRENT_PRICE_ORACLES,
        'historical_price_oracles': DEFAULT_HISTORICAL_PRICE_ORACLES,
        'taxable_ledger_actions': DEFAULT_TAXABLE_LEDGER_ACTIONS,
    }
    assert len(expected_dict) == len(
        DBSettings()), 'One or more settings are missing'

    # Make sure that results are the same. Comparing like this since we ignore last
    # write ts check
    result_dict = result._asdict()
    for key, value in expected_dict.items():
        assert key in result_dict
        if key != 'last_write_ts':
            assert value == result_dict[key]
コード例 #24
0
    def get_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        has_premium: bool,
    ) -> HistoryResult:
        """Creates trades and loans history from start_ts to end_ts"""
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        now = ts_now()
        # start creating the all trades history list
        history: List[Union[Trade, MarginPosition]] = []
        asset_movements = []
        loans = []
        empty_or_error = ''

        def populate_history_cb(
            trades_history: List[Trade],
            margin_history: List[MarginPosition],
            result_asset_movements: List[AssetMovement],
            exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query"""
            history.extend(trades_history)
            history.extend(margin_history)
            asset_movements.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                loans.extend(
                    process_polo_loans(
                        msg_aggregator=self.msg_aggregator,
                        data=polo_loans_data,
                        # We need to have full history of loans available
                        start_ts=Timestamp(0),
                        end_ts=now,
                    ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange.query_history_with_callbacks(
                # We need to have full history of exchanges available
                start_ts=Timestamp(0),
                end_ts=now,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )

        try:
            eth_transactions = query_ethereum_transactions(
                database=self.db,
                etherscan=self.chain_manager.ethereum.etherscan,
                # We need to have full history of transactions available
                from_ts=Timestamp(0),
                to_ts=now,
            )
        except RemoteError as e:
            eth_transactions = []
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg

        # Include the external trades in the history
        external_trades = self.db.get_trades(
            # We need to have full history of trades available
            from_ts=Timestamp(0),
            to_ts=now,
            location=Location.EXTERNAL,
        )
        history.extend(external_trades)

        # Include makerdao DSR gains
        defi_events = []
        if self.chain_manager.makerdao_dsr and has_premium:
            dsr_gains = self.chain_manager.makerdao_dsr.get_dsr_gains_in_period(
                from_ts=start_ts,
                to_ts=end_ts,
            )
            for gain, timestamp in dsr_gains:
                if gain > ZERO:
                    defi_events.append(
                        DefiEvent(
                            timestamp=timestamp,
                            event_type=DefiEventType.DSR_LOAN_GAIN,
                            asset=A_DAI,
                            amount=gain,
                        ))

        # Include makerdao vault events
        if self.chain_manager.makerdao_vaults and has_premium:
            vault_details = self.chain_manager.makerdao_vaults.get_vault_details(
            )
            # We count the loss on a vault in the period if the last event is within
            # the given period. It's not a very accurate approach but it's good enough
            # for now. A more detailed approach would need archive node or log querying
            # to find owed debt at any given timestamp
            for detail in vault_details:
                last_event_ts = detail.events[-1].timestamp
                if last_event_ts >= start_ts and last_event_ts <= end_ts:
                    defi_events.append(
                        DefiEvent(
                            timestamp=last_event_ts,
                            event_type=DefiEventType.MAKERDAO_VAULT_LOSS,
                            asset=A_USD,
                            amount=detail.total_liquidated_usd +
                            detail.total_interest_owed,
                        ))

        # include aave lending events
        aave = self.chain_manager.aave
        if aave is not None and has_premium:
            mapping = aave.get_history(
                addresses=self.chain_manager.accounts.eth,
                reset_db_data=False,
            )

            now = ts_now()
            for _, aave_history in mapping.items():
                total_amount_per_token: Dict[Asset, FVal] = defaultdict(FVal)
                for event in aave_history.events:
                    if event.event_type == 'interest':
                        defi_events.append(
                            DefiEvent(
                                timestamp=event.timestamp,
                                event_type=DefiEventType.AAVE_LOAN_INTEREST,
                                asset=event.asset,
                                amount=event.value.amount,
                            ))
                        total_amount_per_token[
                            event.asset] += event.value.amount

                for token, balance in aave_history.total_earned.items():
                    # Αdd an extra event per token per address for the remaining not paid amount
                    if token in total_amount_per_token:
                        defi_events.append(
                            DefiEvent(
                                timestamp=now,
                                event_type=DefiEventType.AAVE_LOAN_INTEREST,
                                asset=event.asset,
                                amount=balance.amount -
                                total_amount_per_token[token],
                            ))

        history.sort(key=lambda trade: action_get_timestamp(trade))
        return (
            empty_or_error,
            history,
            loans,
            asset_movements,
            eth_transactions,
            defi_events,
        )
コード例 #25
0
def test_add_trades(rotkehlchen_api_server):
    """Test that adding trades to the trades endpoint works as expected"""
    new_trades = [{  # own chain to fiat
        'timestamp': 1575640208,
        'location': 'external',
        'base_asset': 'BTC',
        'quote_asset': 'EUR',
        'trade_type': 'buy',
        'amount': '0.5541',
        'rate': '8422.1',
        'fee': '0.55',
        'fee_currency': 'USD',
        'link': 'optional trader identifier',
        'notes': 'optional notes',
    }, {  # own chain to eth token, with some optional fields (link,notes) missing
        'timestamp': 1585640208,
        'location': 'external',
        'base_asset': 'ETH',
        'quote_asset': A_AAVE.identifier,
        'trade_type': 'buy',
        'amount': '0.5541',
        'rate': '8422.1',
        'fee': '0.55',
        'fee_currency': 'USD',
    }, {  # token to token, with all optional fields (fee,fee_currency,link,notes) missing
        'timestamp': 1595640208,
        'location': 'external',
        'base_asset': A_DAI.identifier,
        'quote_asset': A_AAVE.identifier,
        'trade_type': 'buy',
        'amount': '1.5541',
        'rate': '22.1',
    }]
    # add multple trades
    all_expected_trades = []
    for new_trade in new_trades:
        response = requests.put(
            api_url_for(
                rotkehlchen_api_server,
                'tradesresource',
            ),
            json=new_trade,
        )
        result = assert_proper_response_with_result(response)
        # And check that the identifier is correctly generated when returning the trade
        new_trade['trade_id'] = Trade(
            **TradeSchema().load(new_trade)).identifier
        expected_trade = new_trade.copy()
        for x in ('fee', 'fee_currency', 'link', 'notes'):
            expected_trade[x] = new_trade.get(x, None)
        assert result == expected_trade
        all_expected_trades.insert(0, expected_trade)
        # and now make sure the trade is saved by querying for it
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server,
                "tradesresource",
            ), )
        result = assert_proper_response_with_result(response)
        data = response.json()
        assert data['message'] == ''
        assert result['entries'] == [{
            'entry': x,
            'ignored_in_accounting': False
        } for x in all_expected_trades]  # noqa: E501

    # Test trade with rate 0. Should fail
    zero_rate_trade = {
        'timestamp': 1575640208,
        'location': 'external',
        'base_asset': 'ETH',
        'quote_asset': A_WETH.identifier,
        'trade_type': 'buy',
        'amount': '0.5541',
        'rate': '0',
        'fee': '0.01',
        'fee_currency': 'USD',
        'link': 'optional trader identifier',
        'notes': 'optional notes',
    }
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'tradesresource',
        ),
        json=zero_rate_trade,
    )
    assert_error_response(
        response=response,
        contained_in_msg='A zero rate is not allowed',
        status_code=HTTPStatus.BAD_REQUEST,
    )

    # Test trade with negative rate. Should fail
    negative_rate_trade = {
        'timestamp': 1575640208,
        'location': 'external',
        'base_asset': 'ETH',
        'quote_asset': A_WETH.identifier,
        'trade_type': 'buy',
        'amount': '0.5541',
        'rate': '-1',
        'fee': '0.01',
        'fee_currency': 'USD',
        'link': 'optional trader identifier',
        'notes': 'optional notes',
    }
    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            'tradesresource',
        ),
        json=negative_rate_trade,
    )
    assert_error_response(
        response=response,
        contained_in_msg='A negative price is not allowed',
        status_code=HTTPStatus.BAD_REQUEST,
    )

    # Test trade with invalid timestamp
    zero_rate_trade = {
        'timestamp': Timestamp(ts_now() + 200),
        'location': 'external',
        'base_asset': 'ETH',
        'quote_asset': A_WETH.identifier,
        'trade_type': 'buy',
        'amount': '0.5541',
        'rate': '1',
        'fee': '0.01',
        'fee_currency': 'USD',
        'link': 'optional trader identifier',
        'notes': 'optional notes',
    }

    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            "tradesresource",
        ),
        json=zero_rate_trade,
    )

    assert_error_response(
        response=response,
        contained_in_msg='Given date cannot be in the future',
        status_code=HTTPStatus.BAD_REQUEST,
    )

    # Test with fee & without fee_currency
    fee_and_no_fee_currency_trade = {
        'timestamp': 1595640208,
        'location': 'external',
        'base_asset': 'ETH',
        'quote_asset': 'USD',
        'trade_type': 'buy',
        'amount': '1.5541',
        'rate': '22.1',
        'fee': '0.55',
    }

    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            "tradesresource",
        ),
        json=fee_and_no_fee_currency_trade,
    )

    assert_error_response(
        response=response,
        contained_in_msg='fee and fee_currency must be provided',
        status_code=HTTPStatus.BAD_REQUEST,
    )

    # Test with fee is zero
    fee_is_zero_trade = {
        'timestamp': 1595640208,
        'location': 'external',
        'base_asset': 'ETH',
        'quote_asset': 'USD',
        'trade_type': 'buy',
        'amount': '1.5541',
        'rate': '22.1',
        'fee': '0',
        'fee_currency': 'USD',
    }

    response = requests.put(
        api_url_for(
            rotkehlchen_api_server,
            "tradesresource",
        ),
        json=fee_is_zero_trade,
    )

    assert_error_response(
        response=response,
        contained_in_msg='fee cannot be zero',
        status_code=HTTPStatus.BAD_REQUEST,
    )
コード例 #26
0
def test_upload_data_to_server(rotkehlchen_instance, username, db_password):
    """Test our side of uploading data to the server"""
    last_ts = rotkehlchen_instance.data.db.get_last_data_upload_ts()
    assert last_ts == 0

    # Write anything in the DB to set a non-zero last_write_ts
    rotkehlchen_instance.data.db.set_settings(
        ModifiableDBSettings(main_currency=A_GBP))
    last_write_ts = rotkehlchen_instance.data.db.get_last_write_ts()
    _, our_hash = rotkehlchen_instance.data.compress_and_encrypt_db(
        db_password)
    remote_hash = get_different_hash(our_hash)

    def mock_succesfull_upload_data_to_server(
        url,  # pylint: disable=unused-argument
        data,
        timeout,
    ):
        # Can't compare data blobs as they are encrypted and as such can be
        # different each time
        assert 'data_blob' in data
        assert data['original_hash'] == our_hash
        assert data['last_modify_ts'] == last_write_ts
        assert 'index' in data
        assert len(data['data_blob']) == data['length']
        assert 'nonce' in data
        assert data['compression'] == 'zlib'

        assert timeout == ROTKEHLCHEN_SERVER_TIMEOUT
        return MockResponse(200, '{"success": true}')

    patched_put = patch.object(
        rotkehlchen_instance.premium.session,
        'put',
        side_effect=mock_succesfull_upload_data_to_server,
    )
    patched_get = create_patched_requests_get_for_premium(
        session=rotkehlchen_instance.premium.session,
        metadata_last_modify_ts=0,
        metadata_data_hash=remote_hash,
        # Smaller Remote DB size
        metadata_data_size=2,
        saved_data='foo',
    )

    now = ts_now()
    with patched_get, patched_put:
        rotkehlchen_instance.premium_sync_manager.maybe_upload_data_to_server()

    last_ts = rotkehlchen_instance.data.db.get_last_data_upload_ts()
    msg = 'The last data upload timestamp should have been saved in the db as now'
    assert last_ts >= now and last_ts - now < 50, msg
    last_ts = rotkehlchen_instance.premium_sync_manager.last_data_upload_ts
    msg = 'The last data upload timestamp should also be in memory'
    assert last_ts >= now and last_ts - now < 50, msg

    # and now logout and login again and make sure that the last_data_upload_ts is correct
    rotkehlchen_instance.logout()
    rotkehlchen_instance.data.unlock(username, db_password, create_new=False)
    assert last_ts == rotkehlchen_instance.premium_sync_manager.last_data_upload_ts
    assert last_ts == rotkehlchen_instance.data.db.get_last_data_upload_ts()
コード例 #27
0
    def maybe_upload_data_to_server(self) -> None:
        # if user has no premium do nothing
        if not self.premium:
            return

        # upload only once per hour
        diff = ts_now() - self.last_data_upload_ts
        if diff < 3600:
            return

        b64_encoded_data, our_hash = self.data.compress_and_encrypt_db(
            self.password)
        try:
            metadata = self.premium.query_last_data_metadata()
        except RemoteError as e:
            log.debug(
                'upload to server stopped -- query last metadata failed',
                error=str(e),
            )
            return

        log.debug(
            'CAN_PUSH',
            ours=our_hash,
            theirs=metadata.data_hash,
        )
        if our_hash == metadata.data_hash:
            log.debug('upload to server stopped -- same hash')
            # same hash -- no need to upload anything
            return

        our_last_write_ts = self.data.db.get_last_write_ts()
        if our_last_write_ts <= metadata.last_modify_ts:
            # Server's DB was modified after our local DB
            log.debug(
                'upload to server stopped -- remote db more recent than local')
            return

        data_bytes_size = len(base64.b64decode(b64_encoded_data))
        if data_bytes_size < metadata.data_size:
            # Let's be conservative.
            # TODO: Here perhaps prompt user in the future
            log.debug(
                'upload to server stopped -- remote db bigger than local')
            return

        try:
            self.premium.upload_data(
                data_blob=b64_encoded_data,
                our_hash=our_hash,
                last_modify_ts=our_last_write_ts,
                compression_type='zlib',
            )
        except RemoteError as e:
            log.debug('upload to server -- upload error', error=str(e))
            return

        # update the last data upload value
        self.last_data_upload_ts = ts_now()
        self.data.db.update_last_data_upload_ts(self.last_data_upload_ts)
        log.debug('upload to server -- success')
コード例 #28
0
ファイル: eth2.py プロジェクト: sveitser/rotki
def get_eth2_staking_deposits(
        ethereum: 'EthereumManager',
        addresses: List[ChecksumEthAddress],
        msg_aggregator: MessagesAggregator,
        database: 'DBHandler',
) -> List[Eth2Deposit]:
    """Get the addresses' ETH2 staking deposits

    For any given new address query on-chain from the ETH2 deposit contract
    deployment timestamp until now.

    For any existing address query on-chain from the minimum last used query
    range "end_ts" (among all the existing addresses) until now, as long as the
    difference between both is gte than REQUEST_DELTA_TS.

    Then write in DB all the new deposits and finally return them all.
    """
    new_deposits: List[Eth2Deposit] = []
    new_addresses: List[ChecksumEthAddress] = []
    existing_addresses: List[ChecksumEthAddress] = []
    to_ts = ts_now()
    min_from_ts = to_ts

    # Get addresses' last used query range for ETH2 deposits
    for address in addresses:
        entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}'
        deposits_range = database.get_used_query_range(name=entry_name)

        if not deposits_range:
            new_addresses.append(address)
        else:
            existing_addresses.append(address)
            min_from_ts = min(min_from_ts, deposits_range[1])

    # Get deposits for new addresses
    if new_addresses:
        deposits_ = _get_eth2_staking_deposits_onchain(
            ethereum=ethereum,
            addresses=new_addresses,
            msg_aggregator=msg_aggregator,
            from_ts=ETH2_DEPLOYED_TS,
            to_ts=to_ts,
        )
        new_deposits.extend(deposits_)

        for address in new_addresses:
            entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}'
            database.update_used_query_range(
                name=entry_name,
                start_ts=ETH2_DEPLOYED_TS,
                end_ts=to_ts,
            )

    # Get new deposits for existing addresses
    if existing_addresses and min_from_ts + REQUEST_DELTA_TS <= to_ts:
        deposits_ = _get_eth2_staking_deposits_onchain(
            ethereum=ethereum,
            addresses=existing_addresses,
            msg_aggregator=msg_aggregator,
            from_ts=Timestamp(min_from_ts),
            to_ts=to_ts,
        )
        new_deposits.extend(deposits_)

        for address in existing_addresses:
            entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}'
            database.update_used_query_range(
                name=entry_name,
                start_ts=Timestamp(min_from_ts),
                end_ts=to_ts,
            )

    # Insert new deposits in DB
    if new_deposits:
        database.add_eth2_deposits(new_deposits)

    # Fetch all DB deposits for the given addresses
    deposits: List[Eth2Deposit] = []
    for address in addresses:
        db_deposits = database.get_eth2_deposits(address=address)
        deposits.extend(db_deposits)

    deposits.sort(key=lambda deposit: (deposit.timestamp, deposit.log_index))
    return deposits
コード例 #29
0
 def _manage_call_counter(self, method: str) -> None:
     self.last_query_ts = ts_now()
     if method in ('Ledgers', 'TradesHistory'):
         self.call_counter += 2
     else:
         self.call_counter += 1
コード例 #30
0
    def get_historical_data(
        self,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
        historical_data_start: Timestamp,
    ) -> List[PriceHistoryEntry]:
        """
        Get historical price data from cryptocompare

        Returns a sorted list of price entries.

        - May raise RemoteError if there is a problem reaching the cryptocompare server
        or with reading the response returned by the server
        - May raise UnsupportedAsset if from/to asset is not supported by cryptocompare
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )

        cache_key = PairCacheKey(from_asset.identifier + '_' +
                                 to_asset.identifier)
        got_cached_value = self._got_cached_price(cache_key, timestamp)
        if got_cached_value:
            return self.price_history[cache_key].data

        now_ts = ts_now()
        cryptocompare_hourquerylimit = 2000
        calculated_history: List[Dict[str, Any]] = []

        if historical_data_start <= timestamp:
            end_date = historical_data_start
        else:
            end_date = timestamp
        while True:
            pr_end_date = end_date
            end_date = Timestamp(end_date +
                                 (cryptocompare_hourquerylimit) * 3600)

            log.debug(
                'Querying cryptocompare for hourly historical price',
                from_asset=from_asset,
                to_asset=to_asset,
                cryptocompare_hourquerylimit=cryptocompare_hourquerylimit,
                end_date=end_date,
            )

            resp = self.query_endpoint_histohour(
                from_asset=from_asset,
                to_asset=to_asset,
                limit=2000,
                to_timestamp=end_date,
            )

            if pr_end_date != resp['TimeFrom']:
                # If we get more than we needed, since we are close to the now_ts
                # then skip all the already included entries
                diff = pr_end_date - resp['TimeFrom']
                # If the start date has less than 3600 secs difference from previous
                # end date then do nothing. If it has more skip all already included entries
                if diff >= 3600:
                    if resp['Data'][diff // 3600]['time'] != pr_end_date:
                        raise RemoteError(
                            'Unexpected fata format in cryptocompare query_endpoint_histohour. '
                            'Expected to find the previous date timestamp during '
                            'cryptocompare historical data fetching', )
                    # just add only the part from the previous timestamp and on
                    resp['Data'] = resp['Data'][diff // 3600:]

            # The end dates of a cryptocompare query do not match. The end date
            # can have up to 3600 secs different to the requested one since this is
            # hourly historical data but no more.
            end_dates_dont_match = (end_date < now_ts
                                    and resp['TimeTo'] != end_date)
            if end_dates_dont_match:
                if resp['TimeTo'] - end_date >= 3600:
                    raise RemoteError(
                        'Unexpected fata format in cryptocompare query_endpoint_histohour. '
                        'End dates do not match.', )
                else:
                    # but if it's just a drift within an hour just update the end_date so that
                    # it can be picked up by the next iterations in the loop
                    end_date = resp['TimeTo']

            # If last time slot and first new are the same, skip the first new slot
            last_entry_equal_to_first = (len(calculated_history) != 0
                                         and calculated_history[-1]['time']
                                         == resp['Data'][0]['time'])
            if last_entry_equal_to_first:
                resp['Data'] = resp['Data'][1:]
            calculated_history += resp['Data']
            if end_date >= now_ts:
                break

        # Let's always check for data sanity for the hourly prices.
        _check_hourly_data_sanity(calculated_history, from_asset, to_asset)
        # and now since we actually queried the data let's also cache them
        filename = self.data_directory / ('price_history_' + cache_key +
                                          '.json')
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=historical_data_start,
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': historical_data_start,
            'end_time': end_date,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(
            data_including_time)

        return self.price_history[cache_key].data