Ejemplo n.º 1
0
            rotkehlchen_api_server,
            "aavebalancesresource",
        ), json={'async_query': True})
        task_id = assert_ok_async_response(response)
        outcome = wait_for_async_task(rotkehlchen_api_server, task_id)

    assert outcome['result'] is None
    assert outcome['message'] == 'aave module is not activated'


@pytest.mark.parametrize('ethereum_accounts', [['0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12']])
@pytest.mark.parametrize('ethereum_modules', [['aave']])
@pytest.mark.parametrize('start_with_valid_premium', [True])
@pytest.mark.parametrize('mocked_price_queries', [aave_mocked_historical_prices])
@pytest.mark.parametrize('mocked_current_prices', [aave_mocked_current_prices])
@pytest.mark.parametrize('default_mock_price_value', [FVal(1)])
def test_query_aave_history(rotkehlchen_api_server, ethereum_accounts):  # pylint: disable=unused-argument  # noqa: E501
    """Check querying the aave histoy endpoint works. Uses real data.

    Since this actually queries real blockchain data for aave it is a very slow test
    due to the sheer amount of log queries
    """
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    setup = setup_balances(rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None)
    # Since this test is slow we don't run both async and sync in the same test run
    # Instead we randomly choose one. Eventually both cases will be covered.
    async_query = random.choice([True, False])

    with ExitStack() as stack:
        # patch ethereum/etherscan to not autodetect tokens
        setup.enter_ethereum_patches(stack)
Ejemplo n.º 2
0
def test_coinbase_query_balances(function_scope_coinbase):
    """Test that coinbase balance query works fine for the happy path"""
    coinbase = function_scope_coinbase

    def mock_coinbase_accounts(url):  # pylint: disable=unused-argument
        response = MockResponse(
            200,
            """
{
  "pagination": {
    "ending_before": null,
    "starting_after": null,
    "limit": 25,
    "order": "desc",
    "previous_uri": null,
    "next_uri": null
  },
  "data": [
    {
      "id": "58542935-67b5-56e1-a3f9-42686e07fa40",
      "name": "My Vault",
      "primary": false,
      "type": "vault",
      "currency": "BTC",
      "balance": {
        "amount": "4.00000000",
        "currency": "BTC"
      },
      "created_at": "2015-01-31T20:49:02Z",
      "updated_at": "2015-01-31T20:49:02Z",
      "resource": "account",
      "resource_path": "/v2/accounts/58542935-67b5-56e1-a3f9-42686e07fa40",
      "ready": true
    },
    {
      "id": "2bbf394c-193b-5b2a-9155-3b4732659ede",
      "name": "My Wallet",
      "primary": true,
      "type": "wallet",
      "currency": "ETH",
      "balance": {
        "amount": "39.59000000",
        "currency": "ETH"
      },
      "created_at": "2015-01-31T20:49:02Z",
      "updated_at": "2015-01-31T20:49:02Z",
      "resource": "account",
      "resource_path": "/v2/accounts/2bbf394c-193b-5b2a-9155-3b4732659ede"
    },
    {
      "id": "68542935-67b5-56e1-a3f9-42686e07fa40",
      "name": "Another Wallet",
      "primary": false,
      "type": "vault",
      "currency": "BTC",
      "balance": {
        "amount": "1.230000000",
        "currency": "BTC"
      },
      "created_at": "2015-01-31T20:49:02Z",
      "updated_at": "2015-01-31T20:49:02Z",
      "resource": "account",
      "resource_path": "/v2/accounts/68542935-67b5-56e1-a3f9-42686e07fa40",
      "ready": true
    }
  ]
}
            """,
        )
        return response

    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_coinbase_accounts):
        balances, msg = coinbase.query_balances()

    assert msg == ''
    assert len(balances) == 2
    assert balances[A_BTC]['amount'] == FVal('5.23')
    assert balances[A_ETH]['amount'] == FVal('39.59')
    assert 'usd_value' in balances[A_ETH]
    assert 'usd_value' in balances[A_BTC]

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
Ejemplo n.º 3
0
def make_random_positive_fval(max_num: int = 1000000) -> FVal:
    return FVal(random.uniform(0, max_num))
Ejemplo n.º 4
0
def test_query_vaults_usdc(rotkehlchen_api_server, ethereum_accounts):
    """Check vault info and details for a vault with USDC as collateral"""
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    proxies_mapping = {
        ethereum_accounts[0]: '0xBE79958661741079679aFf75DbEd713cE71a979d',  # 7588
    }

    mock_proxies(rotki, proxies_mapping, 'makerdao_vaults')
    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultsresource",
    ))
    vaults = assert_proper_response_with_result(response)
    vault_7588 = MakerDAOVault(
        identifier=7588,
        owner=ethereum_accounts[0],
        collateral_type='USDC-A',
        urn='0x56D88244073B2fC17af5B1E6088936D5bAaDc37B',
        collateral_asset=A_USDC,
        collateral=Balance(ZERO, ZERO),
        debt=Balance(ZERO, ZERO),
        collateralization_ratio=None,
        liquidation_ratio=FVal('1.03'),
        liquidation_price=None,
        stability_fee=FVal('0.04'),
    )
    expected_vaults = [vault_7588.serialize()]
    assert_serialized_lists_equal(
        expected_vaults,
        vaults, ignore_keys=['stability_fee', 'liquidation_ratio'],
    )
    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultdetailsresource",
    ))
    vault_7588_details = {
        'identifier': 7588,
        'creation_ts': 1585286480,
        'total_interest_owed': '0.00050636718',
        'total_liquidated': {
            'amount': '0',
            'usd_value': '0',
        },
        'events': [{
            'event_type': 'deposit',
            'value': {
                'amount': '45',
                'usd_value': '45',
            },
            'timestamp': 1585286480,
            'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6',
        }, {
            'event_type': 'generate',
            'value': {
                'amount': '20',
                'usd_value': '20.46',
            },
            'timestamp': 1585286480,
            'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6',
        }, {
            'event_type': 'generate',
            'value': {
                'amount': '15.99',
                'usd_value': '16.35777',
            },
            'timestamp': 1585286769,
            'tx_hash': '0xdb861c893a51e4649ff3740cd3658cd4c9b1d048d3b8b4d117f4319bd60aee01',
        }, {
            'event_type': 'payback',
            'value': {
                'amount': '35.990506367',
                'usd_value': '36.818288',
            },
            'timestamp': 1585290263,
            'tx_hash': '0xdd7825fe4a93c6f1ffa25a91b6da2396c229fe16b17242ad5c0bf7962928b2ec',
        }, {
            'event_type': 'withdraw',
            'value': {
                'amount': '45',
                'usd_value': '45',
            },
            'timestamp': 1585290300,
            'tx_hash': '0x97462ebba7ce2467787bf6de25a25c24e538cf8a647919112c5f048b6a293408',
        }],
    }
    details = assert_proper_response_with_result(response)
    expected_details = [vault_7588_details]
    assert_serialized_lists_equal(expected_details, details, ignore_keys=['liquidation_ratio'])
Ejemplo n.º 5
0
    def process_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
            trade_history: List[Trade],
            margin_history: List[Trade],
            loan_history: Dict,
            asset_movements: List[AssetMovement],
            eth_transactions: List[EthereumTransaction],
    ) -> Dict[str, Any]:
        """Processes the entire history of cryptoworld actions in order to determine
        the price and time at which every asset was obtained and also
        the general and taxable profit/loss.
        """
        log.info(
            'Start of history processing',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        self.events.reset(start_ts, end_ts)
        self.last_gas_price = FVal("2000000000")
        self.start_ts = start_ts
        self.eth_transactions_gas_costs = FVal(0)
        self.asset_movement_fees = FVal(0)
        self.csvexporter.reset_csv_lists()
        self.currently_processed_timestamp = start_ts

        actions: List[TaxableAction] = list(trade_history)
        # If we got loans, we need to interleave them with the full history and re-sort
        if len(loan_history) != 0:
            actions.extend(loan_history)

        if len(asset_movements) != 0:
            actions.extend(asset_movements)

        if len(margin_history) != 0:
            actions.extend(margin_history)

        if len(eth_transactions) != 0:
            actions.extend(eth_transactions)

        actions.sort(
            key=lambda action: action_get_timestamp(action),
        )

        prev_time = Timestamp(0)
        count = 0
        for action in actions:
            try:
                (
                    should_continue,
                    prev_time,
                    count,
                ) = self.process_action(action, end_ts, prev_time, count)
            except PriceQueryUnknownFromAsset as e:
                log.error(f'Skipping trade during history processing: {str(e)}')
                continue

            if not should_continue:
                break

        self.events.calculate_asset_details()
        Inquirer().save_historical_forex_data()

        sum_other_actions = (
            self.events.margin_positions_profit_loss +
            self.events.loan_profit -
            self.events.settlement_losses -
            self.asset_movement_fees -
            self.eth_transactions_gas_costs
        )
        total_taxable_pl = self.events.taxable_trade_profit_loss + sum_other_actions
        return {
            'overview': {
                'loan_profit': str(self.events.loan_profit),
                'margin_positions_profit_loss': str(self.events.margin_positions_profit_loss),
                'settlement_losses': str(self.events.settlement_losses),
                'ethereum_transaction_gas_costs': str(self.eth_transactions_gas_costs),
                'asset_movement_fees': str(self.asset_movement_fees),
                'general_trade_profit_loss': str(self.events.general_trade_profit_loss),
                'taxable_trade_profit_loss': str(self.events.taxable_trade_profit_loss),
                'total_taxable_profit_loss': str(total_taxable_pl),
                'total_profit_loss': str(
                    self.events.general_trade_profit_loss +
                    sum_other_actions,
                ),
            },
            'all_events': self.csvexporter.all_events,
        }
Ejemplo n.º 6
0
    def calculate_spend_cost_basis(
        self,
        spending_amount: FVal,
        spending_asset: Asset,
        timestamp: Timestamp,
    ) -> CostBasisInfo:
        """
        When spending `spending_amount` of `spending_asset` at `timestamp` this function
        calculates using the first-in-first-out rule the corresponding buy/s from
        which to do profit calculation. Also applies the "free after given time period"
        rule which applies for some jurisdictions such as 1 year for Germany.

        Returns the information in a CostBasisInfo object if enough acquisitions have
        been found.
        """
        remaining_sold_amount = spending_amount
        stop_index = -1
        taxfree_bought_cost = ZERO
        taxable_bought_cost = ZERO
        taxable_amount = ZERO
        taxfree_amount = ZERO
        remaining_amount_from_last_buy = FVal('-1')
        matched_acquisitions = []
        asset_events = self.get_events(spending_asset)
        for idx, acquisition_event in enumerate(asset_events.acquisitions):
            if self.taxfree_after_period is None:
                at_taxfree_period = False
            else:
                at_taxfree_period = (acquisition_event.timestamp +
                                     self.taxfree_after_period < timestamp)

            if remaining_sold_amount < acquisition_event.remaining_amount:
                stop_index = idx
                buying_cost = remaining_sold_amount.fma(
                    acquisition_event.rate,
                    (acquisition_event.fee_rate * remaining_sold_amount),
                )

                if at_taxfree_period:
                    taxfree_amount += remaining_sold_amount
                    taxfree_bought_cost += buying_cost
                else:
                    taxable_amount += remaining_sold_amount
                    taxable_bought_cost += buying_cost

                remaining_amount_from_last_buy = acquisition_event.remaining_amount - remaining_sold_amount  # noqa: E501
                log.debug(
                    'Spend uses up part of historical acquisition',
                    tax_status='TAX-FREE' if at_taxfree_period else 'TAXABLE',
                    used_amount=remaining_sold_amount,
                    from_amount=acquisition_event.amount,
                    asset=spending_asset,
                    acquisition_rate=acquisition_event.rate,
                    profit_currency=self.profit_currency,
                    time=self.csv_exporter.timestamp_to_date(
                        acquisition_event.timestamp),
                )
                matched_acquisitions.append(
                    MatchedAcquisition(
                        amount=remaining_sold_amount,
                        event=acquisition_event,
                    ))
                # stop iterating since we found all acquisitions to satisfy this spend
                break

            remaining_sold_amount -= acquisition_event.remaining_amount
            if at_taxfree_period:
                taxfree_amount += acquisition_event.remaining_amount
                taxfree_bought_cost += acquisition_event.acquisition_cost
            else:
                taxable_amount += acquisition_event.remaining_amount
                taxable_bought_cost += acquisition_event.acquisition_cost

            log.debug(
                'Spend uses up entire historical acquisition',
                tax_status='TAX-FREE' if at_taxfree_period else 'TAXABLE',
                bought_amount=acquisition_event.remaining_amount,
                asset=spending_asset,
                acquisition_rate=acquisition_event.rate,
                profit_currency=self.profit_currency,
                time=self.csv_exporter.timestamp_to_date(
                    acquisition_event.timestamp),
            )
            matched_acquisitions.append(
                MatchedAcquisition(
                    amount=acquisition_event.remaining_amount,
                    event=acquisition_event,
                ))
            # and since this events is going to be removed, reduce its remaining to zero
            acquisition_event.remaining_amount = ZERO

            # If the sell used up the last historical acquisition
            if idx == len(asset_events.acquisitions) - 1:
                stop_index = idx + 1

        if len(asset_events.acquisitions) == 0:
            self.inform_user_missing_acquisition(spending_asset, timestamp)
            # That means we had no documented acquisition for that asset. This is not good
            # because we can't prove a corresponding acquisition and as such we are burdened
            # calculating the entire spend as profit which needs to be taxed
            return CostBasisInfo(
                taxable_amount=spending_amount,
                taxable_bought_cost=ZERO,
                taxfree_bought_cost=ZERO,
                matched_acquisitions=[],
                is_complete=False,
            )

        is_complete = True
        # Otherwise, delete all the used up acquisitions from the list
        asset_events.used_acquisitions.extend(
            asset_events.acquisitions[:stop_index], )
        del asset_events.acquisitions[:stop_index]
        # and modify the amount of the buy where we stopped if there is one
        if remaining_amount_from_last_buy != FVal('-1'):
            asset_events.acquisitions[
                0].remaining_amount = remaining_amount_from_last_buy  # noqa: E501
        elif remaining_sold_amount != ZERO:
            # if we still have sold amount but no acquisitions to satisfy it then we only
            # found acquisitions to partially satisfy the sell
            adjusted_amount = spending_amount - taxfree_amount
            self.inform_user_missing_acquisition(
                asset=spending_asset,
                time=timestamp,
                found_amount=taxable_amount + taxfree_amount,
                missing_amount=remaining_sold_amount,
            )
            taxable_amount = adjusted_amount
            is_complete = False

        return CostBasisInfo(
            taxable_amount=taxable_amount,
            taxable_bought_cost=taxable_bought_cost,
            taxfree_bought_cost=taxfree_bought_cost,
            matched_acquisitions=matched_acquisitions,
            is_complete=is_complete,
        )
Ejemplo n.º 7
0
def test_query_vaults_details_liquidation(rotkehlchen_api_server, ethereum_accounts):
    """Check vault details of a vault with liquidations

    Also use three accounts, two of which have vaults associated with them to test
    that vaults for multiple accounts get detected
    """
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    proxies_mapping = {
        ethereum_accounts[0]: '0x689D4C2229717f877A644A0aAd742D67E5D0a2FB',
        ethereum_accounts[2]: '0x420F88De6dadA0a77Db7b9EdBe3A0C614346031E',
    }
    mock_proxies(rotki, proxies_mapping, 'makerdao_vaults')
    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultsresource",
    ))
    vaults = assert_proper_response_with_result(response)
    vault_6021 = {
        'identifier': 6021,
        'owner': ethereum_accounts[2],
        'collateral_type': 'ETH-A',
        'collateral_asset': 'ETH',
        'collateral': {
            'amount': ZERO,
            'usd_value': ZERO,
        },
        'debt': {
            'amount': ZERO,
            'usd_value': ZERO,
        },
        'collateralization_ratio': None,
        'liquidation_ratio': '150.00%',
        'liquidation_price': None,
        'stability_fee': '0.00%',
    }
    vault_8015_with_owner = VAULT_8015.copy()
    vault_8015_with_owner['owner'] = ethereum_accounts[0]
    assert_serialized_dicts_equal(vault_6021, vaults[0], ignore_keys=['stability_fee'])
    assert_serialized_dicts_equal(
        vault_8015_with_owner,
        vaults[1],
        ignore_keys=VAULT_IGNORE_KEYS,
    )
    assert len(vaults) == 2

    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultdetailsresource",
    ))
    vault_6021_details = {
        'identifier': 6021,
        'creation_ts': 1582699808,
        'total_interest_owed': '-11078.655097848869',
        'total_liquidated': {
            'amount': '141.7',
            'usd_value': '19191.848',
        },
        'events': [{
            'event_type': 'deposit',
            'value': {
                'amount': '140',
                'usd_value': '31322.2',
            },
            'timestamp': 1582699808,
            'tx_hash': '0x3246ef91fd3d6e1f7c5766de4fa1f0991ba67d92e518447ba8207fe98569c309',
        }, {
            'event_type': 'generate',
            'value': {
                'amount': '14000',
                'usd_value': '14028',
            },
            'timestamp': 1582699808,
            'tx_hash': '0x3246ef91fd3d6e1f7c5766de4fa1f0991ba67d92e518447ba8207fe98569c309',
        }, {
            'event_type': 'deposit',
            'value': {
                'amount': '1.7',
                'usd_value': '331.262',
            },
            'timestamp': 1583958747,
            'tx_hash': '0x65ac798cb9f22068e43fd9ef8303a31e436989062ae87e25650cc44c7788ab62',
        }, {
            'event_type': 'payback',
            'value': {
                'amount': '2921.344902',
                'usd_value': '2927.187591',
            },
            'timestamp': 1584024065,
            'tx_hash': '0x6e44d22d6898ee012369787cd75ea6fb9ace6f995cd157675f370e8ba4a7b9ad',
        }, {
            'event_type': 'liquidation',
            'value': {
                'amount': '50',
                'usd_value': '6772',
            },
            'timestamp': 1584061534,
            'tx_hash': '0xb02050d914ab40f59a9e07eb4f8161ce36eb97cea9c189b027eb1ceeac83a516',
        }, {
            'event_type': 'liquidation',
            'value': {
                'amount': '50',
                'usd_value': '6772',
            },
            'timestamp': 1584061897,
            'tx_hash': '0x678f31d49dd70d76c0ce441343c0060dc600f4c8dbb4cee2b08c6b451b6097cd',
        }, {
            'event_type': 'liquidation',
            'value': {
                'amount': '41.7',
                'usd_value': '5647.848',
            },
            'timestamp': 1584061977,
            'tx_hash': '0xded0f9de641087692555d92a7fa94fa9fa7abf22744b2d16c20a66c5e48a8edf',
        }],
    }
    details = assert_proper_response_with_result(response)
    assert len(details) == 2
    assert_serialized_dicts_equal(vault_6021_details, details[0], ignore_keys=['stability_fee'])
    assert_serialized_dicts_equal(
        VAULT_8015_DETAILS,
        details[1],
        length_list_keymap={'events': 7},
        ignore_keys=['total_interest_owed'],
    )
    assert FVal(details[1]['total_interest_owed']) >= FVal(
        VAULT_8015_DETAILS['total_interest_owed'],
    )
Ejemplo n.º 8
0
    def add_defi_event(
        self,
        event: DefiEvent,
        profit_loss_in_profit_currency_list: List[FVal],
    ) -> None:
        if not self.create_csv:
            return

        profit_loss_sum = FVal(sum(profit_loss_in_profit_currency_list))
        self.defi_events_csv.append({
            'time':
            self.timestamp_to_date(event.timestamp),
            'type':
            str(event.event_type),
            'got_asset':
            str(event.got_asset) if event.got_asset else '',
            'got_amount':
            str(event.got_balance.amount) if event.got_balance else '',
            'spent_asset':
            str(event.spent_asset) if event.spent_asset else '',
            'spent_amount':
            str(event.spent_balance.amount) if event.spent_balance else '',
            f'profit_loss_in_{self.profit_currency.identifier}':
            profit_loss_sum,
            'tx_hash':
            event.tx_hash if event.tx_hash else '',
            'description':
            event.to_string(timestamp_converter=self.timestamp_to_date),
        })

        paid_asset: Union[EmptyStr, Asset]
        received_asset: Union[EmptyStr, Asset]
        if event.pnl is None:
            return  # don't pollute all events csv with entries that are not useful

        for idx, entry in enumerate(event.pnl):
            if entry.balance.amount > ZERO:
                paid_in_profit_currency = ZERO
                paid_in_asset = ZERO
                paid_asset = S_EMPTYSTR
                received_asset = entry.asset
                received_in_asset = entry.balance.amount
                # The index should be the same as the precalculated profit_currency list amounts
                received_in_profit_currency = profit_loss_in_profit_currency_list[
                    idx]
            else:  # pnl is a loss
                # The index should be the same as the precalculated profit_currency list amounts
                paid_in_profit_currency = profit_loss_in_profit_currency_list[
                    idx]
                paid_in_asset = entry.balance.amount
                paid_asset = entry.asset
                received_asset = S_EMPTYSTR
                received_in_asset = ZERO
                received_in_profit_currency = ZERO

            self.add_to_allevents(
                event_type=EV_DEFI,
                location=Location.BLOCKCHAIN,
                paid_in_profit_currency=paid_in_profit_currency,
                paid_asset=paid_asset,
                paid_in_asset=paid_in_asset,
                received_asset=received_asset,
                received_in_asset=received_in_asset,
                taxable_received_in_profit_currency=received_in_profit_currency,
                total_received_in_profit_currency=received_in_profit_currency,
                timestamp=event.timestamp,
            )
Ejemplo n.º 9
0
def test_add_and_get_yearn_vault_events(data_dir, username):
    """Test that get yearn vault events works fine and returns only events for what we need"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    addr1 = make_ethereum_address()
    addr1_events = [
        YearnVaultEvent(
            event_type='deposit',
            from_asset=A_DAI,
            from_value=Balance(amount=ONE, usd_value=ONE),
            to_asset=A_YV1_DAI,
            to_value=Balance(amount=ONE, usd_value=ONE),
            realized_pnl=None,
            block_number=1,
            timestamp=Timestamp(1),
            tx_hash=
            '0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436',
            log_index=1,
            version=1,
        ),
        YearnVaultEvent(
            event_type='withdraw',
            from_asset=A_YV1_DAI,
            from_value=Balance(amount=ONE, usd_value=ONE),
            to_asset=A_DAI,
            to_value=Balance(amount=ONE, usd_value=ONE),
            realized_pnl=Balance(amount=FVal('0.01'), usd_value=FVal('0.01')),
            block_number=2,
            timestamp=Timestamp(2),
            tx_hash=
            '0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce',
            log_index=2,
            version=1,
        )
    ]
    data.db.add_yearn_vaults_events(address=addr1, events=addr1_events)
    addr2 = make_ethereum_address()
    addr2_events = [
        YearnVaultEvent(
            event_type='deposit',
            from_asset=A_DAI,
            from_value=Balance(amount=ONE, usd_value=ONE),
            to_asset=A_YV1_DAI,
            to_value=Balance(amount=ONE, usd_value=ONE),
            realized_pnl=None,
            block_number=1,
            timestamp=Timestamp(1),
            tx_hash=
            '0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55',
            log_index=1,
            version=1,
        ),
        YearnVaultEvent(
            event_type='withdraw',
            from_asset=A_YV1_DAI,
            from_value=Balance(amount=ONE, usd_value=ONE),
            to_asset=A_DAI,
            to_value=Balance(amount=ONE, usd_value=ONE),
            realized_pnl=Balance(amount=FVal('0.01'), usd_value=FVal('0.01')),
            block_number=2,
            timestamp=Timestamp(2),
            tx_hash=
            '0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
            log_index=2,
            version=1,
        )
    ]
    data.db.add_yearn_vaults_events(address=addr2, events=addr2_events)

    events = data.db.get_yearn_vaults_events(address=addr1,
                                             vault=YEARN_VAULTS['yDAI'])
    assert events == addr1_events
    events = data.db.get_yearn_vaults_events(address=addr2,
                                             vault=YEARN_VAULTS['yDAI'])
    assert events == addr2_events
Ejemplo n.º 10
0
 def mock_find_usd_price(asset):  # pylint: disable=unused-argument
     return mocked_prices.get(asset, FVal('1.5'))
Ejemplo n.º 11
0
 def mock_query_fiat_pair(base, quote):  # pylint: disable=unused-argument
     return FVal(1)
Ejemplo n.º 12
0
def test_buying_selling_btc_before_bchfork(accountant):
    history = [{
        "timestamp": 1491593374,  # 04/07/2017
        "pair": "BTC_EUR",
        "type": "buy",
        "rate": 1128.905,
        "cost": 7337.8825,
        "cost_currency": "EUR",
        "fee": 0.55,
        "fee_currency": "EUR",
        "amount": 6.5,
        "location": "external",
    }, {  # selling BTC prefork should also reduce the BCH equivalent -- taxable
        "timestamp": 1500595200,  # 21/07/2017
        "pair": "BTC_EUR",
        "type": "sell",
        "rate": 2380.835,
        "cost": 1190.4175,
        "cost_currency": "EUR",
        "fee": 0.15,
        "fee_currency": "EUR",
        "amount": 0.5,
        "location": "external",
    }, {  # selling BCH after the fork -- taxable
        'timestamp': 1512693374,  # 08/12/2017
        'pair': 'BCH_EUR',  # cryptocompare hourly BCH/EUR price: 995.935
        'type': 'sell',
        'rate': 995.935,
        'cost': 2091.4635,
        'cost_currency': 'EUR',
        'fee': 0.26,
        'fee_currency': 'EUR',
        'amount': 2.1,
        'location': 'kraken',
    }, {
        'timestamp': 1514937600,  # 03/01/2018
        'pair': 'BTC_EUR',  # cryptocompare hourly BCH/EUR price: 995.935
        'type': 'sell',
        'rate': 12404.88,
        'cost': 14885.856,
        'cost_currency': 'EUR',
        'fee': 0.52,
        'fee_currency': 'EUR',
        'amount': 1.2,
        'location': 'kraken',
    }]
    accounting_history_process(accountant, 1436979735, 1519693374, history)

    amount_BCH = FVal(3.9)
    amount_BTC = FVal(4.8)
    buys = accountant.events.events['BCH'].buys
    assert len(buys) == 1
    assert buys[0].amount == amount_BCH
    assert buys[0].timestamp == 1491593374
    assert buys[0].rate == FVal('1128.905')
    assert buys[0].fee_rate.is_close(FVal('0.0846153846154'))
    assert accountant.get_calculated_asset_amount('BCH') == amount_BCH
    assert accountant.get_calculated_asset_amount('BTC') == amount_BTC

    assert accountant.general_trade_pl.is_close("13876.6464615")
    assert accountant.taxable_trade_pl.is_close("13876.6464615")
Ejemplo n.º 13
0
def get_balancer_test_addr2_expected_trades():
    """In a function since the new(unknown) assets needs to have been loaded in the DB"""
    A_WCRES = EthereumToken.initialize(  # noqa: N806
        address=string_to_ethereum_address(
            '0xa0afAA285Ce85974c3C881256cB7F225e3A1178a'),
        decimals=18,
        symbol='wCRES',
    )
    return [
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_WETH,
            quote_asset=A_AAVE,
            amount=AssetAmount(FVal('1.616934038985744521')),
            rate=Price(FVal('6.963972908793392530935439799')),
            trade_index=1,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0x3c457da9b541ae39a7dc781ab04a03938b98b5649512aec2a2d32635c9bbf589',  # noqa: E501
                    log_index=24,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x7c90a3cd7Ec80dd2F633ed562480AbbEEd3bE546'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607008178),
                    location=Location.BALANCER,
                    token0=A_AAVE,
                    token1=A_WETH,
                    amount0_in=AssetAmount(FVal('11.260284842802604032')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('1.616934038985744521')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_AAVE,
            quote_asset=A_WETH,
            amount=AssetAmount(FVal('11.260286362820602094')),
            rate=Price(FVal('0.1416068599966922676173010716')),
            trade_index=0,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0x3c457da9b541ae39a7dc781ab04a03938b98b5649512aec2a2d32635c9bbf589',  # noqa: E501
                    log_index=18,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x70985E557aE0CD6dC88189a532e54FbC61927BAd'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607008178),
                    location=Location.BALANCER,
                    token0=A_WETH,
                    token1=A_AAVE,
                    amount0_in=AssetAmount(FVal('1.594533794502600192')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('11.260286362820602094')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_WETH,
            quote_asset=A_SYN,
            amount=AssetAmount(FVal('1.352902561458047718')),
            rate=Price(FVal('724.4303350385182691258363763')),
            trade_index=0,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0x5e235216cb03e4eb234014f5ccf3efbfddd40c4576424e2a8204f1d12b96ed35',  # noqa: E501
                    log_index=143,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x8982E9bBf7AC6A49c434aD81D2fF8e16895318e5'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607008218),
                    location=Location.BALANCER,
                    token0=A_SYN,
                    token1=A_WETH,
                    amount0_in=AssetAmount(FVal('980.08365587152306176')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('1.352902561458047718')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_WETH,
            quote_asset=A_WCRES,
            amount=AssetAmount(FVal('0.205709519074945018')),
            rate=Price(FVal('232.7409943164679514496089589')),
            trade_index=0,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0xf54be824b4619777f1db0e3da91b0cd52f6dba730c95a75644e2b085e6ab9824',  # noqa: E501
                    log_index=300,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x10996eC4f3E7A1b314EbD966Fa8b1ad0fE0f8307'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607009877),
                    location=Location.BALANCER,
                    token0=A_WCRES,
                    token1=A_WETH,
                    amount0_in=AssetAmount(FVal('47.87703800986513408')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('0.205709519074945018')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_API3,
            quote_asset=A_WETH,
            amount=AssetAmount(FVal('295.881648100500428692')),
            rate=Price(FVal('0.003346787723157288562491614498')),
            trade_index=0,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0xfed4e15051e3ce4dc0d2816f719701e5920e40bf41614b5feaa3c5a6a0186c03',  # noqa: E501
                    log_index=22,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x997c0fc9578a8194EFDdE2E0cD7aa6A69cFCD7c1'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607010888),
                    location=Location.BALANCER,
                    token0=A_WETH,
                    token1=A_API3,
                    amount0_in=AssetAmount(FVal('0.990253067370299904')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('295.881648100500428692')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_WETH,
            quote_asset=A_MFT,
            amount=AssetAmount(FVal('0.686544199299304057')),
            rate=Price(FVal('243775.0324093115004367119900')),
            trade_index=0,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0xf0147c4b81098676c08ae20ae5bf8f8b60d0ad79eec484f3f93ac6ab49a3c51c',  # noqa: E501
                    log_index=97,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x2Eb6CfbFFC8785Cd0D9f2d233d0a617bF4269eeF'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607015059),
                    location=Location.BALANCER,
                    token0=A_MFT,
                    token1=A_WETH,
                    amount0_in=AssetAmount(FVal('167362.334434612660404224')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('0.686544199299304057')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_WETH,
            quote_asset=A_AAVE,
            amount=AssetAmount(FVal('3.055412574642681758')),
            rate=Price(FVal('6.916116208273240607778771150')),
            trade_index=1,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0x67c0e9a0fdd002d0b9d1cca0c8e4ca4d30435bbf57bbf0091396275efaea414b',  # noqa: E501
                    log_index=37,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x0E552307659E70bF61f918f96AA880Cdec40d7E2'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607015339),
                    location=Location.BALANCER,
                    token0=A_AAVE,
                    token1=A_WETH,
                    amount0_in=AssetAmount(FVal('21.131588430448123904')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('3.055412574642681758')),
                ),
            ],
        ),
        AMMTrade(
            trade_type=TradeType.BUY,
            base_asset=A_AAVE,
            quote_asset=A_WETH,
            amount=AssetAmount(FVal('21.131588567541018817')),
            rate=Price(FVal('0.1435213742524287826717337545')),
            trade_index=0,
            swaps=[
                AMMSwap(
                    tx_hash=
                    '0x67c0e9a0fdd002d0b9d1cca0c8e4ca4d30435bbf57bbf0091396275efaea414b',  # noqa: E501
                    log_index=31,
                    address=string_to_ethereum_address(
                        '0x029f388aC4D5C8BfF490550ce0853221030E822b'
                    ),  # noqa: E501
                    from_address=string_to_ethereum_address(
                        '0x0000000000007F150Bd6f54c40A34d7C3d5e9f56'
                    ),  # noqa: E501
                    to_address=string_to_ethereum_address(
                        '0x7c90a3cd7Ec80dd2F633ed562480AbbEEd3bE546'
                    ),  # noqa: E501
                    timestamp=Timestamp(1607015339),
                    location=Location.BALANCER,
                    token0=A_WETH,
                    token1=A_AAVE,
                    amount0_in=AssetAmount(FVal('3.0328346313504')),
                    amount1_in=AssetAmount(ZERO),
                    amount0_out=AssetAmount(ZERO),
                    amount1_out=AssetAmount(FVal('21.131588567541018817')),
                ),
            ],
        ),
    ]
Ejemplo n.º 14
0
# Top holder of WBTC-WETH pool (0x1eff8af5d577060ba4ac8a29a13525bb0ee2a3d5)
BALANCER_TEST_ADDR1 = string_to_ethereum_address(
    '0x49a2DcC237a65Cc1F412ed47E0594602f6141936')
BALANCER_TEST_ADDR2 = string_to_ethereum_address(
    '0x029f388aC4D5C8BfF490550ce0853221030E822b')
BALANCER_TEST_ADDR3 = string_to_ethereum_address(
    '0x7716a99194d758c8537F056825b75Dd0C8FDD89f')
BALANCER_TEST_ADDR3_POOL1 = EthereumToken.initialize(
    address=string_to_ethereum_address(
        '0x59A19D8c652FA0284f44113D0ff9aBa70bd46fB4'),
    symbol='BPT',
    protocol='balancer',
    underlying_tokens=[
        UnderlyingToken(address=string_to_ethereum_address(
            '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'),
                        weight=FVal(0.2)),  # noqa: E501  # WETH
        UnderlyingToken(address=string_to_ethereum_address(
            '0xba100000625a3754423978a60c9317c58a424e3D'),
                        weight=FVal(0.8)),  # noqa: E501  # BAL
    ],
)
BALANCER_TEST_ADDR3_POOL2 = EthereumToken.initialize(
    address=string_to_ethereum_address(
        '0x574FdB861a0247401B317a3E68a83aDEAF758cf6'),
    symbol='BPT',
    protocol='balancer',
    underlying_tokens=[
        UnderlyingToken(address=string_to_ethereum_address(
            '0x0D8775F648430679A709E98d2b0Cb6250d2887EF'),
                        weight=FVal(0.1)),  # noqa: E501  # BAT
        UnderlyingToken(address=string_to_ethereum_address(
Ejemplo n.º 15
0
    cc.price_history = {}
    result = cc.get_cached_data_metadata(
        from_asset=A_BTC,
        to_asset=A_USD,
    )
    assert result is not None
    assert result[0] == 1301536800
    assert result[1] == 1301540400


@pytest.mark.skipif(
    'CI' in os.environ,
    reason='This test would contribute in cryptocompare rate limiting. No need to run often',
)
@pytest.mark.parametrize('from_asset,to_asset,timestamp,price', [
    (A_ETH, A_USD, 1505527200, FVal('262.155')),
    (A_XMR, A_BTC, 1438992000, FVal('0.0026285')),
])
@pytest.mark.parametrize('use_clean_caching_directory', [True])
def test_cryptocompare_historical_data_price(
        data_dir,
        database,
        from_asset,
        to_asset,
        timestamp,
        price,
):
    """Test that the cryptocompare histohour data retrieval works and price is returned

    """
    cc = Cryptocompare(data_directory=data_dir, database=database)
Ejemplo n.º 16
0
def test_add_and_get_aave_events(data_dir, username):
    """Test that get aave events works fine and returns only events for what we need"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    addr1 = make_ethereum_address()
    addr1_events = [
        AaveDepositWithdrawalEvent(
            event_type='deposit',
            asset=A_DAI,
            atoken=A_ADAI_V1,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=1,
            timestamp=Timestamp(1),
            tx_hash=
            '0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436',
            log_index=1,
        ),
        AaveDepositWithdrawalEvent(
            event_type='withdrawal',
            asset=A_DAI,
            atoken=A_ADAI_V1,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=2,
            timestamp=Timestamp(2),
            tx_hash=
            '0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce',
            log_index=2,
        )
    ]
    data.db.add_aave_events(address=addr1, events=addr1_events)

    addr2 = make_ethereum_address()
    addr2_events = [
        AaveDepositWithdrawalEvent(
            event_type='deposit',
            asset=A_DAI,
            atoken=A_ADAI_V1,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=1,
            timestamp=Timestamp(1),
            tx_hash=
            '0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55',
            log_index=1,
        ),
        AaveDepositWithdrawalEvent(
            event_type='withdrawal',
            asset=A_DAI,
            atoken=A_ADAI_V1,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=2,
            timestamp=Timestamp(2),
            tx_hash=
            '0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
            log_index=2,
        )
    ]
    data.db.add_aave_events(address=addr2, events=addr2_events)

    # addr3 has all types of aave events so we test serialization/deserialization
    addr3 = make_ethereum_address()
    addr3_events = [
        AaveDepositWithdrawalEvent(
            event_type='deposit',
            asset=A_DAI,
            atoken=A_ADAI_V1,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=1,
            timestamp=Timestamp(1),
            tx_hash=
            '0x9e394d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55',
            log_index=1,
        ),
        AaveDepositWithdrawalEvent(
            event_type='withdrawal',
            asset=A_DAI,
            atoken=A_ADAI_V1,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=2,
            timestamp=Timestamp(2),
            tx_hash=
            '0x4c167445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
            log_index=2,
        ),
        AaveInterestEvent(
            event_type='interest',
            asset=A_WBTC,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=4,
            timestamp=Timestamp(4),
            tx_hash=
            '0x49c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
            log_index=4,
        ),
        AaveBorrowEvent(
            event_type='borrow',
            asset=A_ETH,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=5,
            timestamp=Timestamp(5),
            tx_hash=
            '0x19c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
            log_index=5,
            borrow_rate_mode='stable',
            borrow_rate=FVal('0.05233232323423432'),
            accrued_borrow_interest=FVal('5.112234'),
        ),
        AaveRepayEvent(
            event_type='repay',
            asset=A_MANA,
            value=Balance(amount=ONE, usd_value=ONE),
            block_number=6,
            timestamp=Timestamp(6),
            tx_hash=
            '0x29c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
            log_index=6,
            fee=Balance(amount=FVal('0.1'), usd_value=FVal('0.1')),
        ),
        AaveLiquidationEvent(
            event_type='liquidation',
            collateral_asset=A_ETH,
            collateral_balance=Balance(amount=ONE, usd_value=ONE),
            principal_asset=A_ETH,
            principal_balance=Balance(amount=ONE, usd_value=ONE),
            block_number=7,
            log_index=7,
            timestamp=Timestamp(7),
            tx_hash=
            '0x39c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        )
    ]
    data.db.add_aave_events(address=addr3, events=addr3_events)

    events = data.db.get_aave_events(address=addr1, atoken=A_ADAI_V1)
    assert events == addr1_events
    events = data.db.get_aave_events(address=addr2, atoken=A_ADAI_V1)
    assert events == addr2_events
    events = data.db.get_aave_events(address=addr3)
    assert events == addr3_events

    # check that all aave events are properly hashable (aka can go in a set)
    test_set = set()
    for event in addr3_events:
        test_set.add(event)
    assert len(test_set) == len(addr3_events)
Ejemplo n.º 17
0
    def process_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
            trade_history: List[Union[Trade, MarginPosition, AMMTrade]],
            loan_history: List[Loan],
            asset_movements: List[AssetMovement],
            eth_transactions: List[EthereumTransaction],
            defi_events: List[DefiEvent],
    ) -> Dict[str, Any]:
        """Processes the entire history of cryptoworld actions in order to determine
        the price and time at which every asset was obtained and also
        the general and taxable profit/loss.

        start_ts here is the timestamp at which to start taking trades and other
        taxable events into account. Not where processing starts from. Processing
        always starts from the very first event we find in the history.
        """
        log.info(
            'Start of history processing',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        self.events.reset(start_ts, end_ts)
        self.last_gas_price = 2000000000
        self.start_ts = start_ts
        self.eth_transactions_gas_costs = FVal(0)
        self.asset_movement_fees = FVal(0)
        self.csvexporter.reset_csv_lists()

        # Ask the DB for the settings once at the start of processing so we got the
        # same settings through the entire task
        db_settings = self.db.get_settings()
        self._customize(db_settings)

        actions: List[TaxableAction] = list(trade_history)
        # If we got loans, we need to interleave them with the full history and re-sort
        if len(loan_history) != 0:
            actions.extend(loan_history)

        if len(asset_movements) != 0:
            actions.extend(asset_movements)

        if len(eth_transactions) != 0:
            actions.extend(eth_transactions)

        if len(defi_events) != 0:
            actions.extend(defi_events)

        actions.sort(
            key=lambda action: action_get_timestamp(action),
        )
        # The first ts is the ts of the first action we have in history or 0 for empty history
        first_ts = Timestamp(0) if len(actions) == 0 else action_get_timestamp(actions[0])
        self.currently_processing_timestamp = first_ts
        self.started_processing_timestamp = first_ts

        prev_time = Timestamp(0)
        count = 0
        for action in actions:
            try:
                (
                    should_continue,
                    prev_time,
                ) = self.process_action(action, end_ts, prev_time, db_settings)
            except PriceQueryUnsupportedAsset as e:
                ts = action_get_timestamp(action)
                self.msg_aggregator.add_error(
                    f'Skipping action at '
                    f' {timestamp_to_date(ts, formatstr="%d/%m/%Y, %H:%M:%S")} '
                    f'during history processing due to an asset unknown to '
                    f'cryptocompare being involved. Check logs for details',
                )
                log.error(
                    f'Skipping action {str(action)} during history processing due to '
                    f'cryptocompare not supporting an involved asset: {str(e)}',
                )
                continue
            except NoPriceForGivenTimestamp as e:
                ts = action_get_timestamp(action)
                self.msg_aggregator.add_error(
                    f'Skipping action at '
                    f' {timestamp_to_date(ts, formatstr="%d/%m/%Y, %H:%M:%S")} '
                    f'during history processing due to inability to find a price '
                    f'at that point in time: {str(e)}. Check the logs for more details',
                )
                log.error(
                    f'Skipping action {str(action)} during history processing due to '
                    f'inability to query a price at that time: {str(e)}',
                )
                continue
            except RemoteError as e:
                ts = action_get_timestamp(action)
                self.msg_aggregator.add_error(
                    f'Skipping action at '
                    f' {timestamp_to_date(ts, formatstr="%d/%m/%Y, %H:%M:%S")} '
                    f'during history processing due to inability to reach an external '
                    f'service at that point in time: {str(e)}. Check the logs for more details',
                )
                log.error(
                    f'Skipping action {str(action)} during history processing due to '
                    f'inability to reach an external service at that time: {str(e)}',
                )
                continue

            if not should_continue:
                break

            if count % 500 == 0:
                # This loop can take a very long time depending on the amount of actions
                # to process. We need to yield to other greenlets or else calls to the
                # API may time out
                gevent.sleep(0.5)
            count += 1

        self.events.calculate_asset_details()
        Inquirer().save_historical_forex_data()

        sum_other_actions = (
            self.events.margin_positions_profit_loss +
            self.events.defi_profit_loss +
            self.events.loan_profit -
            self.events.settlement_losses -
            self.asset_movement_fees -
            self.eth_transactions_gas_costs
        )
        total_taxable_pl = self.events.taxable_trade_profit_loss + sum_other_actions
        return {
            'overview': {
                'defi_profit_loss': str(self.events.defi_profit_loss),
                'loan_profit': str(self.events.loan_profit),
                'margin_positions_profit_loss': str(self.events.margin_positions_profit_loss),
                'settlement_losses': str(self.events.settlement_losses),
                'ethereum_transaction_gas_costs': str(self.eth_transactions_gas_costs),
                'asset_movement_fees': str(self.asset_movement_fees),
                'general_trade_profit_loss': str(self.events.general_trade_profit_loss),
                'taxable_trade_profit_loss': str(self.events.taxable_trade_profit_loss),
                'total_taxable_profit_loss': str(total_taxable_pl),
                'total_profit_loss': str(
                    self.events.general_trade_profit_loss +
                    sum_other_actions,
                ),
            },
            'all_events': self.csvexporter.all_events,
        }
Ejemplo n.º 18
0
    def query_balances(
        self,
        requested_save_data: bool = False,
        timestamp: Timestamp = None,
        ignore_cache: bool = False,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are always saved in the DB,
        if it is False then data are saved if self.data.should_save_balances()
        is True.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB
        If ignore_cache is True then all underlying calls that have a cache ignore it

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called',
                 requested_save_data=requested_save_data)

        balances = {}
        problem_free = True
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange_balances, _ = exchange.query_balances(
                ignore_cache=ignore_cache)
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange.name] = exchange_balances

        try:
            blockchain_result = self.chain_manager.query_balances(
                blockchain=None,
                ignore_cache=ignore_cache,
            )
            balances['blockchain'] = {
                asset: balance.to_dict()
                for asset, balance in blockchain_result.totals.items()
            }
        except (RemoteError, EthSyncError) as e:
            problem_free = False
            log.error(f'Querying blockchain balances failed due to: {str(e)}')

        result = self.query_fiat_balances()
        if result != {}:
            balances['banks'] = result
        balances = account_for_manually_tracked_balances(db=self.data.db,
                                                         balances=balances)

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value'))
                                  for k, v in balances.items()]

        # calculate net usd value
        net_usd = FVal(0)
        for _, v in combined.items():
            net_usd += FVal(v['usd_value'])

        stats: Dict[str, Any] = {
            'location': {},
            'net_usd': net_usd,
        }
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            if net_usd != FVal(0):
                percentage = (total / net_usd).to_percentage()
            else:
                percentage = '0%'
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': percentage,
            }

        for k, v in combined.items():
            if net_usd != FVal(0):
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            combined[k]['percentage_of_net_value'] = percentage

        result_dict = merge_dicts(combined, stats)

        allowed_to_save = requested_save_data or self.data.should_save_balances(
        )

        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        # After adding it to the saved file we can overlay additional data that
        # is not required to be saved in the history file
        try:
            details = self.accountant.events.details
            for asset, (tax_free_amount, average_buy_value) in details.items():
                if asset not in result_dict:
                    continue

                result_dict[asset]['tax_free_amount'] = tax_free_amount
                result_dict[asset]['average_buy_value'] = average_buy_value

                current_price = result_dict[asset]['usd_value'] / result_dict[
                    asset]['amount']
                if average_buy_value != FVal(0):
                    result_dict[asset]['percent_change'] = (
                        ((current_price - average_buy_value) /
                         average_buy_value) * 100)
                else:
                    result_dict[asset]['percent_change'] = 'INF'

        except AttributeError:
            pass

        return result_dict
Ejemplo n.º 19
0
    assert_error_response,
    assert_ok_async_response,
    assert_proper_response_with_result,
    wait_for_async_task,
)
from rotkehlchen.tests.utils.checks import (
    assert_serialized_dicts_equal,
    assert_serialized_lists_equal,
)
from rotkehlchen.tests.utils.constants import A_USDC, A_WBTC
from rotkehlchen.tests.utils.makerdao import mock_proxies

mocked_prices = {
    'ETH': {
        'USD': {
            1582699808: FVal('223.73'),
            1583958747: FVal('194.86'),
            1584061534: FVal('135.44'),
            1584061897: FVal('135.44'),
            1584061977: FVal('135.44'),
            1586785858: FVal('156.82'),
            1587910979: FVal('197.78'),
            1588174425: FVal('215.55'),
            1588964667: FVal('211.54'),
            1589993538: FVal('209.85'),
            1590043499: FVal('198.56'),
            1590043699: FVal('198.56'),
        },
    },
    'DAI': {
        'USD': {
Ejemplo n.º 20
0
    def create_fake_data(self, args: argparse.Namespace) -> None:
        self._clean_tables()
        from_ts, to_ts = StatisticsFaker._get_timestamps(args)
        starting_amount, min_amount, max_amount = StatisticsFaker._get_amounts(args)
        total_amount = starting_amount
        locations = [deserialize_location(location) for location in args.locations.split(',')]
        assets = [Asset(symbol) for symbol in args.assets.split(',')]
        go_up_probability = FVal(args.go_up_probability)

        # Add the first distribution of location data
        location_data = []
        for idx, value in enumerate(divide_number_in_parts(starting_amount, len(locations))):
            location_data.append(LocationData(
                time=from_ts,
                location=locations[idx].serialize_for_db(),
                usd_value=str(value),
            ))
        # add the location data + total to the DB
        self.db.add_multiple_location_data(location_data + [LocationData(
            time=from_ts,
            location=Location.TOTAL.serialize_for_db(),
            usd_value=str(total_amount),
        )])

        # Add the first distribution of assets
        assets_data = []
        for idx, value in enumerate(divide_number_in_parts(starting_amount, len(assets))):
            assets_data.append(AssetBalance(
                category=BalanceType.ASSET,
                time=from_ts,
                asset=assets[idx],
                amount=str(random.randint(1, 20)),
                usd_value=str(value),
            ))
        self.db.add_multiple_balances(assets_data)

        while from_ts < to_ts:
            print(f'At timestamp: {from_ts}/{to_ts} wih total net worth: ${total_amount}')
            new_location_data = []
            new_assets_data = []
            from_ts += args.seconds_between_balance_save
            # remaining_loops = to_ts - from_ts / args.seconds_between_balance_save
            add_usd_value = random.choice([100, 350, 500, 625, 725, 915, 1000])
            add_amount = random.choice([
                FVal('0.1'), FVal('0.23'), FVal('0.34'), FVal('0.69'), FVal('1.85'), FVal('2.54'),
            ])

            go_up = (
                # If any asset's usd value is close to to go below zero, go up
                any(FVal(a.usd_value) - FVal(add_usd_value) < 0 for a in assets_data) or
                # If total is going under the min amount go up
                total_amount - add_usd_value < min_amount or
                # If "dice roll" matched and we won't go over the max amount go up
                (
                    add_usd_value + total_amount < max_amount and
                    FVal(random.random()) <= go_up_probability
                )
            )
            if go_up:
                total_amount += add_usd_value
                action = operator.add
            else:
                total_amount -= add_usd_value
                action = operator.sub

            for idx, value in enumerate(divide_number_in_parts(add_usd_value, len(locations))):
                new_location_data.append(LocationData(
                    time=from_ts,
                    location=location_data[idx].location,
                    usd_value=str(action(FVal(location_data[idx].usd_value), value)),
                ))
            # add the location data + total to the DB
            self.db.add_multiple_location_data(new_location_data + [LocationData(
                time=from_ts,
                location=Location.TOTAL.serialize_for_db(),
                usd_value=str(total_amount),
            )])

            for idx, value in enumerate(divide_number_in_parts(add_usd_value, len(assets))):
                old_amount = FVal(assets_data[idx].amount)
                new_amount = action(old_amount, add_amount)
                if new_amount < FVal('0'):
                    new_amount = old_amount + FVal('0.01')
                new_assets_data.append(AssetBalance(
                    category=BalanceType.ASSET,
                    time=from_ts,
                    asset=assets[idx],
                    amount=str(new_amount),
                    usd_value=str(action(FVal(assets_data[idx].usd_value), value)),
                ))
            self.db.add_multiple_balances(new_assets_data)

            location_data = new_location_data
            assets_data = new_assets_data
Ejemplo n.º 21
0
def test_query_vaults_wbtc(rotkehlchen_api_server, ethereum_accounts):
    """Check vault info and details for a vault with WBTC as collateral"""
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    proxies_mapping = {
        ethereum_accounts[0]: '0x9684e6C1c7B79868839b27F88bA6d5A176367075',  # 8913
    }

    mock_proxies(rotki, proxies_mapping, 'makerdao_vaults')
    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultsresource",
    ))
    # That proxy has 3 vaults. We only want to test 8913, which is closed/repaid so just keep that
    vaults = [x for x in assert_proper_response_with_result(response) if x['identifier'] == 8913]
    vault_8913 = MakerDAOVault(
        identifier=8913,
        owner=ethereum_accounts[0],
        collateral_type='WBTC-A',
        urn='0x37f7B3C82A9Edc13FdCcE66E7d500b3698A13294',
        collateral_asset=A_WBTC,
        collateral=Balance(ZERO, ZERO),
        debt=Balance(ZERO, ZERO),
        collateralization_ratio=None,
        liquidation_ratio=FVal(1.5),
        liquidation_price=None,
        stability_fee=FVal(0.02),
    )
    expected_vaults = [vault_8913.serialize()]
    assert_serialized_lists_equal(expected_vaults, vaults, ignore_keys=['stability_fee'])
    # And also make sure that the internal mapping will only query details of 8913
    rotki.chain_manager.makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_8913]}

    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultdetailsresource",
    ))
    vault_8913_details = {
        'identifier': 8913,
        'creation_ts': 1588664698,
        'total_interest_owed': '0.1903819198',
        'total_liquidated': {
            'amount': '0',
            'usd_value': '0',
        },
        'events': [{
            'event_type': 'deposit',
            'value': {
                'amount': '0.011',
                'usd_value': '87.06599',
            },
            'timestamp': 1588664698,
            'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59',
        }, {
            'event_type': 'generate',
            'value': {
                'amount': '25',
                'usd_value': '25.15',
            },
            'timestamp': 1588664698,
            'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59',
        }, {
            'event_type': 'payback',
            'value': {
                'amount': '25.000248996',
                'usd_value': '25.15025',
            },
            'timestamp': 1588696496,
            'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c',
        }, {
            'event_type': 'deposit',
            'value': {
                'amount': '0.0113',
                'usd_value': '89.440517',
            },
            'timestamp': 1588720248,
            'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a',
        }],  # way too many events in the vault, so no need to check them all
    }
    details = assert_proper_response_with_result(response)
    assert len(details) == 1
    assert_serialized_dicts_equal(
        details[0],
        vault_8913_details,
        # Checking only the first 4 events
        length_list_keymap={'events': 4},
    )
Ejemplo n.º 22
0
def assert_eth_balances_result(
    rotki: Rotkehlchen,
    result: Dict[str, Any],
    eth_accounts: List[str],
    eth_balances: List[str],
    token_balances: Dict[EthereumToken, List[str]],
    also_btc: bool,
    totals_only: bool = False,
) -> None:
    """Asserts for correct ETH blockchain balances when mocked in tests

    If totals_only is given then this is a query for all balances so only the totals are shown
    """
    if not totals_only:
        per_account = result['per_account']
        if also_btc:
            assert len(per_account) == 2
        else:
            assert len(per_account) == 1
        per_account = per_account['ETH']
        assert len(per_account) == len(eth_accounts)
        for idx, account in enumerate(eth_accounts):
            expected_amount = from_wei(FVal(eth_balances[idx]))
            amount = FVal(per_account[account]['assets']['ETH']['amount'])
            usd_value = FVal(
                per_account[account]['assets']['ETH']['usd_value'])
            assert amount == expected_amount
            if amount == ZERO:
                assert usd_value == ZERO
            else:
                assert usd_value > ZERO
            for token, balances in token_balances.items():
                expected_token_amount = FVal(balances[idx])
                if expected_token_amount == ZERO:
                    msg = f'{account} should have no entry for {token}'
                    assert token.identifier not in per_account[account], msg
                else:
                    token_amount = FVal(per_account[account]['assets'][
                        token.identifier]['amount'])
                    usd_value = FVal(
                        per_account[account]['assets'][token.identifier]
                        ['usd_value'], )
                    assert token_amount == from_wei(expected_token_amount)
                    assert usd_value > ZERO

    if totals_only:
        totals = result
    else:
        totals = result['totals']['assets']

    # Check our owned eth tokens here since the test may have changed their number
    owned_assets = set(rotki.chain_manager.totals.assets.keys())
    if not also_btc:
        owned_assets.discard(A_BTC)
    assert len(totals) == len(owned_assets)

    expected_total_eth = sum(
        from_wei(FVal(balance)) for balance in eth_balances)
    assert FVal(totals['ETH']['amount']) == expected_total_eth
    if expected_total_eth == ZERO:
        assert FVal(totals['ETH']['usd_value']) == ZERO
    else:
        assert FVal(totals['ETH']['usd_value']) > ZERO

    for token, balances in token_balances.items():
        symbol = token.identifier

        expected_total_token = sum(
            from_wei(FVal(balance)) for balance in balances)
        assert FVal(totals[symbol]['amount']) == expected_total_token
        if expected_total_token == ZERO:
            msg = f"{FVal(totals[symbol]['usd_value'])} is not ZERO"
            assert FVal(totals[symbol]['usd_value']) == ZERO, msg
        else:
            assert FVal(totals[symbol]['usd_value']) > ZERO
Ejemplo n.º 23
0
def test_query_vaults_usdc_strange(rotkehlchen_api_server, ethereum_accounts):
    """Strange case of a USDC vault that is not queried correctly

    https://oasis.app/borrow/7538?network=mainnet
    """
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    proxies_mapping = {
        ethereum_accounts[0]: '0x15fEaFd4358b8C03c889D6661b0CA1Be3389792F',  # 7538
    }

    mock_proxies(rotki, proxies_mapping, 'makerdao_vaults')
    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultsresource",
    ))
    # That proxy has 3 vaults. We only want to test 7538, which is closed/repaid so just keep that
    vaults = [x for x in assert_proper_response_with_result(response) if x['identifier'] == 7538]
    vault_7538 = MakerDAOVault(
        identifier=7538,
        owner=ethereum_accounts[0],
        collateral_type='USDC-A',
        urn='0x70E58566C7baB6faaFE03fbA69DF45Ef4f48223B',
        collateral_asset=A_USDC,
        collateral=Balance(ZERO, ZERO),
        debt=Balance(ZERO, ZERO),
        collateralization_ratio=None,
        liquidation_ratio=FVal(1.1),
        liquidation_price=None,
    )
    expected_vaults = [vault_7538.serialize()]
    assert_serialized_lists_equal(expected_vaults, vaults)
    # And also make sure that the internal mapping will only query details of 7538
    rotki.chain_manager.makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_7538]}

    response = requests.get(api_url_for(
        rotkehlchen_api_server,
        "makerdaovaultdetailsresource",
    ))
    vault_7538_details = {
        'identifier': 7538,
        'creation_ts': 1585145754,
        'total_interest_owed': '0.0005943266',
        'total_liquidated': {
            'amount': '0',
            'usd_value': '0',
        },
        'events': [{
            'event_type': 'deposit',
            'value': {
                'amount': '250.12',
                'usd_value': '250.12',
            },
            'timestamp': 1588664698,
            'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59',
        }, {
            'event_type': 'generate',
            'value': {
                'amount': '25',
                'usd_value': '25',
            },
            'timestamp': 1588664698,
            'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59',
        }, {
            'event_type': 'payback',
            'value': {
                'amount': '25.000248996',
                'usd_value': '25.000248996',
            },
            'timestamp': 1588696496,
            'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c',
        }, {
            'event_type': 'deposit',
            'value': {
                'amount': '0.0113',
                'usd_value': '0.0113',
            },
            'timestamp': 1588720248,
            'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a',
        }],
    }
    details = assert_proper_response_with_result(response)
    expected_details = [vault_7538_details]
    assert_serialized_lists_equal(expected_details, details)
Ejemplo n.º 24
0
    def query_historical_price(self, from_asset, to_asset, timestamp):
        """
        Query the historical price on `timestamp` for `from_asset` in `to_asset`.
        So how much `to_asset` does 1 unit of `from_asset` cost.

        Args:
            from_asset (str): The ticker symbol of the asset for which we want to know
                              the price.
            to_asset (str): The ticker symbol of the asset against which we want to
                            know the price.
            timestamp (int): The timestamp at which to query the price
        """
        if from_asset == to_asset:
            return 1

        if from_asset not in self.cryptocompare_coin_list:
            raise PriceQueryUnknownFromAsset(from_asset)

        data = self.get_historical_data(from_asset, to_asset, timestamp)

        # all data are sorted and timestamps are always increasing by 1 hour
        # find the closest entry to the provided timestamp
        assert timestamp > data[0]['time']

        index = convert_to_int((timestamp - data[0]['time']) / 3600,
                               accept_only_exact=False)
        # print("timestamp: {} index: {} data_length: {}".format(timestamp, index, len(data)))
        diff = abs(data[index]['time'] - timestamp)
        if index + 1 <= len(data) - 1:
            diff_p1 = abs(data[index + 1]['time'] - timestamp)
            if diff_p1 < diff:
                index = index + 1

        if data[index]['high'] is None or data[index]['low'] is None:
            # If we get some None in the hourly set price to 0 so that we check daily price
            price = FVal(0)
        else:
            price = FVal((data[index]['high'] + data[index]['low'])) / 2

        if price == 0:
            if from_asset != 'BTC' and to_asset != 'BTC':
                # Just get the BTC price
                asset_btc_price = self.query_historical_price(
                    from_asset, 'BTC', timestamp)
                btc_to_asset_price = self.query_historical_price(
                    'BTC', to_asset, timestamp)
                price = asset_btc_price * btc_to_asset_price
            else:
                # attempt to get the daily price by timestamp
                query_string = (
                    'https://min-api.cryptocompare.com/data/pricehistorical?'
                    'fsym={}&tsyms={}&ts={}'.format(from_asset, to_asset,
                                                    timestamp))
                if to_asset == 'BTC':
                    query_string += '&tryConversion=false'
                resp = request_get(query_string)

                if from_asset not in resp:
                    error_message = 'Failed to query cryptocompare for: "{}"'.format(
                        query_string)
                    raise ValueError(error_message)
                price = FVal(resp[from_asset][to_asset])

                if price == 0:
                    raise NoPriceForGivenTimestamp(
                        from_asset, to_asset,
                        tsToDate(timestamp, formatstr='%d/%m/%Y, %H:%M:%S'))

        return price
Ejemplo n.º 25
0
def assert_serialized_dicts_equal(
    a: Dict,
    b: Dict,
    ignore_keys: Optional[List] = None,
    length_list_keymap: Optional[Dict] = None,
    max_diff: str = "1e-6",
) -> None:
    """Compares serialized dicts so that serialized numbers can be compared for equality"""
    assert len(a) == len(
        b), f"Dicts don't have the same key length {len(a)} != {len(b)}"
    for a_key, a_val in a.items():

        if ignore_keys and a_key in ignore_keys:
            continue

        if isinstance(a_val, FVal):
            try:
                compare_val = FVal(b[a_key])
            except ValueError:
                raise AssertionError(
                    f'Could not turn {a_key} amount {b[a_key]} into an FVal')
            msg = f"{a_key} amount doesn't match. {compare_val} != {a_val}"
            assert compare_val.is_close(a_val, max_diff=max_diff), msg
        elif isinstance(b[a_key], FVal):
            try:
                compare_val = FVal(a_val)
            except ValueError:
                raise AssertionError(
                    f'Could not turn {a_key} value {a[a_key]} into an FVal')
            msg = f"{a_key} doesn't match. {compare_val} != {b[a_key]}"
            assert compare_val.is_close(b[a_key], max_diff=max_diff), msg
        elif isinstance(a_val, str) and isinstance(b[a_key], str):
            if a_val == b[a_key]:
                continue

            # if strings are not equal, try to turn them to Fvals
            try:
                afval = FVal(a_val)
            except ValueError:
                raise AssertionError(
                    f'After string comparison failure could not turn {a_val} to a number',
                )

            try:
                bfval = FVal(b[a_key])
            except ValueError:
                raise AssertionError(
                    f'After string comparison failure could not turn {b[a_key]} to a number',
                )
            msg = f"{a_key} doesn't match. {afval} != {bfval}"
            assert afval.is_close(bfval, max_diff=max_diff), msg

        elif isinstance(a_val,
                        dict) and 'amount' in a_val and 'usd_value' in a_val:
            assert 'amount' in b[a_key]
            assert 'usd_value' in b[a_key]

            try:
                compare_val = FVal(b[a_key]['amount'])
            except ValueError:
                raise AssertionError(
                    f'Could not turn {a_key} amount {b[a_key]} into an FVal')
            msg = f"{a_key} amount doesn't match. {compare_val} != {a_val['amount']}"
            assert compare_val.is_close(a_val['amount'],
                                        max_diff=max_diff), msg

            try:
                compare_val = FVal(b[a_key]['usd_value'])
            except ValueError:
                raise AssertionError(
                    f'Could not turn {a_key} usd_value {b[a_key]} into an FVal'
                )
            msg = f"{a_key} usd_value doesn't match. {compare_val} != {a_val['usd_value']}"
            assert compare_val.is_close(a_val['usd_value'],
                                        max_diff=max_diff), msg
        elif isinstance(a_val, list):
            max_length_to_check = None
            if length_list_keymap and a_key in length_list_keymap:
                max_length_to_check = length_list_keymap[a_key]
            assert_serialized_lists_equal(
                a=a_val,
                b=b[a_key],
                max_length_to_check=max_length_to_check,
                ignore_keys=ignore_keys,
                length_list_keymap=length_list_keymap,
            )
        else:
            assert a_val == b[
                a_key], f"{a_key} doesn't match. {a_val} != {b[a_key]}"
Ejemplo n.º 26
0
    def query_balances(self) -> Tuple[Optional[Dict[Asset, Dict[str, Any]]], str]:
        try:
            resp = self.api_query_dict('returnCompleteBalances', {"account": "all"})
        except RemoteError as e:
            msg = (
                'Poloniex API request failed. Could not reach poloniex due '
                'to {}'.format(e)
            )
            log.error(msg)
            return None, msg

        balances = {}
        for poloniex_asset, v in resp.items():
            available = FVal(v['available'])
            on_orders = FVal(v['onOrders'])
            if (available != FVal(0) or on_orders != FVal(0)):
                try:
                    asset = asset_from_poloniex(poloniex_asset)
                except UnsupportedAsset as e:
                    self.msg_aggregator.add_warning(
                        f'Found unsupported poloniex asset {e.asset_name}. '
                        f' Ignoring its balance query.',
                    )
                    continue
                except UnknownAsset as e:
                    self.msg_aggregator.add_warning(
                        f'Found unknown poloniex asset {e.asset_name}. '
                        f' Ignoring its balance query.',
                    )
                    continue
                except DeserializationError:
                    log.error(
                        f'Unexpected poloniex asset type. Expected string '
                        f' but got {type(poloniex_asset)}',
                    )
                    self.msg_aggregator.add_error(
                        'Found poloniex asset entry with non-string type. '
                        ' Ignoring its balance query.',
                    )
                    continue

                entry = {}
                entry['amount'] = available + on_orders
                try:
                    usd_price = Inquirer().find_usd_price(asset=asset)
                except RemoteError as e:
                    self.msg_aggregator.add_error(
                        f'Error processing poloniex balance entry due to inability to '
                        f'query USD price: {str(e)}. Skipping balance entry',
                    )
                    continue

                usd_value = entry['amount'] * usd_price
                entry['usd_value'] = usd_value
                balances[asset] = entry

                log.debug(
                    'Poloniex balance query',
                    sensitive_log=True,
                    currency=asset,
                    amount=entry['amount'],
                    usd_value=usd_value,
                )

        return balances, ''
Ejemplo n.º 27
0
def test_coinbase_query_deposit_withdrawals(function_scope_coinbase):
    """Test that coinbase deposit/withdrawals history query works fine for the happy path"""
    coinbase = function_scope_coinbase

    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_normal_coinbase_query):
        movements = coinbase.query_online_deposits_withdrawals(
            start_ts=0,
            end_ts=1576726126,
        )

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
    assert len(movements) == 3
    expected_movements = [
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.DEPOSIT,
            timestamp=1519001640,
            asset=A_USD,
            amount=FVal('55'),
            fee_asset=A_USD,
            fee=FVal('0.05'),
            link='1130eaec-07d7-54c4-a72c-2e92826897df',
        ),
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.WITHDRAWAL,
            timestamp=1485895742,
            asset=A_USD,
            amount=FVal('10.0'),
            fee_asset=A_USD,
            fee=FVal('0.01'),
            link='146eaec-07d7-54c4-a72c-2e92826897df',
        ),
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.WITHDRAWAL,
            timestamp=1566726126,
            asset=A_ETH,
            amount=FVal('0.05770427'),
            fee_asset=A_ETH,
            fee=FVal('0.00021'),
            link='id1',
        )
    ]
    assert expected_movements == movements

    # and now try to query within a specific range
    with patch.object(coinbase.session,
                      'get',
                      side_effect=mock_normal_coinbase_query):
        movements = coinbase.query_online_deposits_withdrawals(
            start_ts=0,
            end_ts=1519001650,
        )

    warnings = coinbase.msg_aggregator.consume_warnings()
    errors = coinbase.msg_aggregator.consume_errors()
    assert len(warnings) == 0
    assert len(errors) == 0
    assert len(movements) == 2
    assert movements[0].category == AssetMovementCategory.DEPOSIT
    assert movements[0].timestamp == 1519001640
    assert movements[1].category == AssetMovementCategory.WITHDRAWAL
    assert movements[1].timestamp == 1485895742
Ejemplo n.º 28
0
def test_bittrex_query_balances_unknown_asset(bittrex):
    def mock_unknown_asset_return(url):  # pylint: disable=unused-argument
        response = MockResponse(
            200,
            """
{
  "success": true,
  "message": "''",
  "result": [
    {
      "Currency": "BTC",
      "Balance": "5.0",
      "Available": "5.0",
      "Pending": 0,
      "CryptoAddress": "DLxcEt3AatMyr2NTatzjsfHNoB9NT62HiF",
      "Requested": false,
      "Uuid": null
    },
    {
      "Currency": "ETH",
      "Balance": "10.0",
      "Available": "10.0",
      "Pending": 0,
      "CryptoAddress": "0xb55a183bf5db01665f9fc5dfba71fc6f8b5e42e6",
      "Requested": false,
      "Uuid": null
    },
    {
      "Currency": "IDONTEXIST",
      "Balance": "15.0",
      "Available": "15.0",
      "Pending": 0,
      "CryptoAddress": "0xb55a183bf5db01665f9fc5dfba71fc6f8b5e42e6",
      "Requested": false,
      "Uuid": null
    },
    {
      "Currency": "PTON",
      "Balance": "15.0",
      "Available": "15.0",
      "Pending": 0,
      "CryptoAddress": "0xb55a183bf5db01665f9fc5dfba71fc6f8b5e42e6",
      "Requested": false,
      "Uuid": null
    }
  ]
}
            """,
        )
        return response

    with patch.object(bittrex.session,
                      'get',
                      side_effect=mock_unknown_asset_return):
        # Test that after querying the assets only ETH and BTC are there
        balances, msg = bittrex.query_balances()

    assert msg == ''
    assert len(balances) == 2
    assert balances[A_BTC]['amount'] == FVal('5.0')
    assert balances[A_ETH]['amount'] == FVal('10.0')

    warnings = bittrex.msg_aggregator.consume_warnings()
    assert len(warnings) == 2
    assert 'unknown bittrex asset IDONTEXIST' in warnings[0]
    assert 'unsupported bittrex asset PTON' in warnings[1]
Ejemplo n.º 29
0
import pytest
import requests

from rotkehlchen.fval import FVal
from rotkehlchen.tests.utils.api import (
    api_url_for,
    assert_ok_async_response,
    assert_proper_response_with_result,
    wait_for_async_task_with_result,
)


@pytest.mark.parametrize('mocked_current_prices',
                         [{
                             ('BTC', 'USD'): FVal('33183.98'),
                             ('GBP', 'USD'): FVal('1.367'),
                         }])
def test_get_current_assets_price_in_usd(rotkehlchen_api_server):
    async_query = random.choice([False, True])
    response = requests.post(
        api_url_for(
            rotkehlchen_api_server,
            'currentassetspriceresource',
        ),
        json={
            'assets': ['BTC', 'USD', 'GBP'],
            'target_asset': 'USD',
            'async_query': async_query,
        },
    )
Ejemplo n.º 30
0
def test_eth2_deposits_serialization():
    addr1 = make_ethereum_address()
    addr2 = make_ethereum_address()
    deposits = [
        Eth2Deposit(
            from_address=addr1,
            pubkey=
            '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b',  # noqa: E501
            withdrawal_credentials=
            '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499',  # noqa: E501
            value=Balance(FVal(32), FVal(64)),
            deposit_index=9,
            tx_hash=
            '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1',
            log_index=22,
            timestamp=Timestamp(int(1604506685)),
        ),
        Eth2Deposit(
            from_address=addr2,
            pubkey=
            '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3',  # noqa: E501
            withdrawal_credentials=
            '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817',  # noqa: E501
            value=Balance(FVal(32), FVal(64)),
            deposit_index=1650,
            tx_hash=
            '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7',
            log_index=221,
            timestamp=Timestamp(int(1605043544)),
        ),
    ]

    serialized = process_result_list(deposits)
    assert serialized == [
        {
            'from_address': addr1,
            'pubkey':
            '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b',  # noqa: E501
            'withdrawal_credentials':
            '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499',  # noqa: E501
            'value': {
                'amount': '32',
                'usd_value': '64'
            },
            'deposit_index': 9,
            'tx_hash':
            '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1',
            'log_index': 22,
            'timestamp': 1604506685,
        },
        {
            'from_address': addr2,
            'pubkey':
            '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3',  # noqa: E501
            'withdrawal_credentials':
            '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817',  # noqa: E501
            'value': {
                'amount': '32',
                'usd_value': '64'
            },
            'deposit_index': 1650,
            'tx_hash':
            '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7',
            'log_index': 221,
            'timestamp': 1605043544,
        },
    ]