Esempio n. 1
0
    def _get_events_balances(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
    ) -> AddressEventsBalances:
        """Request via graph all events for new addresses and the latest ones
        for already existing addresses. Then the requested events are written
        in DB and finally all DB events are read, and processed for calculating
        total profit/loss per LP (stored within <LiquidityPoolEventsBalance>).
        """
        address_events_balances: AddressEventsBalances = {}
        address_events: DDAddressEvents = defaultdict(list)
        db_address_events: AddressEvents = {}
        new_addresses: List[ChecksumEthAddress] = []
        existing_addresses: List[ChecksumEthAddress] = []
        min_end_ts: Timestamp = to_timestamp

        # Get addresses' last used query range for Uniswap events
        for address in addresses:
            entry_name = f'{UNISWAP_EVENTS_PREFIX}_{address}'
            events_range = self.database.get_used_query_range(name=entry_name)

            if not events_range:
                new_addresses.append(address)
            else:
                existing_addresses.append(address)
                min_end_ts = min(min_end_ts, events_range[1])

        # Request new addresses' events
        if new_addresses:
            start_ts = Timestamp(0)
            for address in new_addresses:
                for event_type in EventType:
                    new_address_events = self._get_events_graph(
                        address=address,
                        start_ts=start_ts,
                        end_ts=to_timestamp,
                        event_type=event_type,
                    )
                    if new_address_events:
                        address_events[address].extend(new_address_events)

                # Insert new address' last used query range
                self.database.update_used_query_range(
                    name=f'{UNISWAP_EVENTS_PREFIX}_{address}',
                    start_ts=start_ts,
                    end_ts=to_timestamp,
                )

        # Request existing DB addresses' events
        if existing_addresses and min_end_ts <= to_timestamp:
            for address in existing_addresses:
                for event_type in EventType:
                    address_new_events = self._get_events_graph(
                        address=address,
                        start_ts=min_end_ts,
                        end_ts=to_timestamp,
                        event_type=event_type,
                    )
                    if address_new_events:
                        address_events[address].extend(address_new_events)

                # Update existing address' last used query range
                self.database.update_used_query_range(
                    name=f'{UNISWAP_EVENTS_PREFIX}_{address}',
                    start_ts=min_end_ts,
                    end_ts=to_timestamp,
                )

        # Insert requested events in DB
        all_events = []
        for address in filter(lambda address: address in address_events,
                              addresses):
            all_events.extend(address_events[address])

        self.database.add_uniswap_events(all_events)

        # Fetch all DB events within the time range
        for address in addresses:
            db_events = self.database.get_uniswap_events(
                from_ts=from_timestamp,
                to_ts=to_timestamp,
                address=address,
            )
            if db_events:
                # return events with the oldest first
                db_events.sort(
                    key=lambda event: (event.timestamp, event.log_index))
                db_address_events[address] = db_events

        # Request addresses' current balances (UNI-V2s and underlying tokens)
        # if there is no specific time range in this endpoint call (i.e. all
        # events). Current balances in the protocol are needed for an accurate
        # profit/loss calculation.
        # TODO: when this endpoint is called with a specific time range,
        # getting the balances and underlying tokens within that time range
        # requires an archive node. Feature pending to be developed.
        address_balances: AddressBalances = {
        }  # Empty when specific time range
        if from_timestamp == Timestamp(0):
            address_balances = self.get_balances(addresses)

        # Calculate addresses' event balances (i.e. profit/loss per pool)
        for address, events in db_address_events.items():
            balances = address_balances.get(
                address, [])  # Empty when specific time range
            events_balances = self._calculate_events_balances(
                address=address,
                events=events,
                balances=balances,
            )
            address_events_balances[address] = events_balances

        return address_events_balances
Esempio n. 2
0
def ts_now() -> Timestamp:
    return Timestamp(int(time.time()))
Esempio n. 3
0
def aave_event_from_db(event_tuple: AAVE_EVENT_DB_TUPLE) -> AaveEvent:
    """Turns a tuple read from the DB into an appropriate AaveEvent

    May raise a DeserializationError if something is wrong with the DB data
    """
    event_type = event_tuple[1]
    block_number = event_tuple[2]
    timestamp = Timestamp(event_tuple[3])
    tx_hash = event_tuple[4]
    log_index = event_tuple[5]
    asset2 = None
    if event_tuple[9] is not None:
        try:
            asset2 = Asset(event_tuple[9])
        except UnknownAsset as e:
            raise DeserializationError(
                f'Unknown asset {event_tuple[6]} encountered during deserialization '
                f'of Aave event from DB for asset2',
            ) from e

    try:
        asset1 = Asset(event_tuple[6])
    except UnknownAsset as e:
        raise DeserializationError(
            f'Unknown asset {event_tuple[6]} encountered during deserialization '
            f'of Aave event from DB for asset1',
        ) from e
    asset1_amount = FVal(event_tuple[7])
    asset1_usd_value = FVal(event_tuple[8])

    if event_type in ('deposit', 'withdrawal'):
        return AaveDepositWithdrawalEvent(
            event_type=event_type,
            block_number=block_number,
            timestamp=timestamp,
            tx_hash=tx_hash,
            log_index=log_index,
            asset=asset1,
            atoken=EthereumToken.from_asset(asset2),  # type: ignore # should be a token
            value=Balance(amount=asset1_amount, usd_value=asset1_usd_value),
        )
    if event_type == 'interest':
        return AaveInterestEvent(
            event_type=event_type,
            block_number=block_number,
            timestamp=timestamp,
            tx_hash=tx_hash,
            log_index=log_index,
            asset=asset1,
            value=Balance(amount=asset1_amount, usd_value=asset1_usd_value),
        )
    if event_type == 'borrow':
        if event_tuple[12] not in ('stable', 'variable'):
            raise DeserializationError(
                f'Invalid borrow rate mode encountered in the DB: {event_tuple[12]}',
            )
        borrow_rate_mode: Literal['stable', 'variable'] = event_tuple[12]  # type: ignore
        borrow_rate = deserialize_optional_to_fval(
            value=event_tuple[10],
            name='borrow_rate',
            location='reading aave borrow event from DB',
        )
        accrued_borrow_interest = deserialize_optional_to_fval(
            value=event_tuple[11],
            name='accrued_borrow_interest',
            location='reading aave borrow event from DB',
        )
        return AaveBorrowEvent(
            event_type=event_type,
            block_number=block_number,
            timestamp=timestamp,
            tx_hash=tx_hash,
            log_index=log_index,
            asset=asset1,
            value=Balance(amount=asset1_amount, usd_value=asset1_usd_value),
            borrow_rate_mode=borrow_rate_mode,
            borrow_rate=borrow_rate,
            accrued_borrow_interest=accrued_borrow_interest,
        )
    if event_type == 'repay':
        fee_amount = deserialize_optional_to_fval(
            value=event_tuple[10],
            name='fee_amount',
            location='reading aave repay event from DB',
        )
        fee_usd_value = deserialize_optional_to_fval(
            value=event_tuple[11],
            name='fee_usd_value',
            location='reading aave repay event from DB',
        )
        return AaveRepayEvent(
            event_type=event_type,
            block_number=block_number,
            timestamp=timestamp,
            tx_hash=tx_hash,
            log_index=log_index,
            asset=asset1,
            value=Balance(amount=asset1_amount, usd_value=asset1_usd_value),
            fee=Balance(amount=fee_amount, usd_value=fee_usd_value),
        )
    if event_type == 'liquidation':
        if asset2 is None:
            raise DeserializationError(
                'Did not find asset2 in an aave liquidation event fom the DB.',
            )
        principal_amount = deserialize_optional_to_fval(
            value=event_tuple[10],
            name='principal_amount',
            location='reading aave liquidation event from DB',
        )
        principal_usd_value = deserialize_optional_to_fval(
            value=event_tuple[11],
            name='principal_usd_value',
            location='reading aave liquidation event from DB',
        )
        return AaveLiquidationEvent(
            event_type=event_type,
            block_number=block_number,
            timestamp=timestamp,
            tx_hash=tx_hash,
            log_index=log_index,
            collateral_asset=asset1,
            collateral_balance=Balance(amount=asset1_amount, usd_value=asset1_usd_value),
            principal_asset=asset2,
            principal_balance=Balance(
                amount=principal_amount,
                usd_value=principal_usd_value,
            ),
        )
    # else
    raise DeserializationError(
        f'Unknown event type {event_type} encountered during '
        f'deserialization of Aave event from DB',
    )
Esempio n. 4
0
def assert_poloniex_asset_movements(
    to_check_list: List[Any],
    deserialized: bool,
    movements_to_check: Optional[Tuple[int, ...]] = None,
) -> None:
    expected = [
        AssetMovement(
            location=Location.POLONIEX,
            category=AssetMovementCategory.WITHDRAWAL,
            address='0xB7E033598Cb94EF5A35349316D3A2e4f95f308Da',
            transaction_id=
            '0xbd4da74e1a0b81c21d056c6f58a5b306de85d21ddf89992693b812bb117eace4',
            timestamp=Timestamp(1468994442),
            asset=A_ETH,
            amount=FVal('10.0'),
            fee_asset=A_ETH,
            fee=Fee(FVal('0.1')),
            link='2',
        ),
        AssetMovement(
            location=Location.POLONIEX,
            category=AssetMovementCategory.WITHDRAWAL,
            address='131rdg5Rzn6BFufnnQaHhVa5ZtRU1J2EZR',
            transaction_id=
            '2d27ae26fa9c70d6709e27ac94d4ce2fde19b3986926e9f3bfcf3e2d68354ec5',
            timestamp=Timestamp(1458994442),
            asset=A_BTC,
            amount=FVal('5.0'),
            fee_asset=A_BTC,
            fee=Fee(FVal('0.5')),
            link='1',
        ),
        AssetMovement(
            location=Location.POLONIEX,
            category=AssetMovementCategory.DEPOSIT,
            address='131rdg5Rzn6BFufnnQaHhVa5ZtRU1J2EZR',
            transaction_id=
            'b05bdec7430a56b5a5ed34af4a31a54859dda9b7c88a5586bc5d6540cdfbfc7a',
            timestamp=Timestamp(1448994442),
            asset=A_BTC,
            amount=FVal('50.0'),
            fee_asset=A_BTC,
            fee=Fee(FVal('0')),
            link='1',
        ),
        AssetMovement(
            location=Location.POLONIEX,
            category=AssetMovementCategory.DEPOSIT,
            address='0xB7E033598Cb94EF5A35349316D3A2e4f95f308Da',
            transaction_id=
            '0xf7e7eeb44edcad14c0f90a5fffb1cbb4b80e8f9652124a0838f6906ca939ccd2',
            timestamp=Timestamp(1438994442),
            asset=A_ETH,
            amount=FVal('100.0'),
            fee_asset=A_ETH,
            fee=Fee(FVal('0')),
            link='2',
        )
    ]
    assert_asset_movements(expected, to_check_list, deserialized,
                           movements_to_check)
Esempio n. 5
0
def assert_cryptocom_import_results(rotki: Rotkehlchen):
    """A utility function to help assert on correctness of importing data from crypto.com"""
    trades = rotki.data.db.get_trades()
    asset_movements = rotki.data.db.get_asset_movements()
    warnings = rotki.msg_aggregator.consume_warnings()
    errors = rotki.msg_aggregator.consume_warnings()
    assert len(errors) == 0
    assert len(warnings) == 0

    def get_trade_note(desc: str):
        return f'{desc}\nSource: crypto.com (CSV import)'

    expected_trades = [
        Trade(
            timestamp=Timestamp(1595833195),
            location=Location.CRYPTOCOM,
            pair=TradePair('ETH_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('1.0')),
            rate=Price(FVal('281.14')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Buy ETH'),
        ),
        Trade(
            timestamp=Timestamp(1596014214),
            location=Location.CRYPTOCOM,
            pair=TradePair('MCO_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('50.0')),
            rate=Price(FVal('3.521')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Buy MCO'),
        ),
        Trade(
            timestamp=Timestamp(1596014223),
            location=Location.CRYPTOCOM,
            pair=TradePair('MCO_USD'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('12.32402069')),
            rate=Price(FVal('4.057117499045678736198226879')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Sign-up Bonus Unlocked'),
        ),
        Trade(
            timestamp=Timestamp(1596209827),
            location=Location.CRYPTOCOM,
            pair=TradePair('ETH_MCO'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('0.14445954600007045')),
            rate=Price(FVal('85.28339137929999991192917299')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('MCO -> ETH'),
        ),
        Trade(
            timestamp=Timestamp(1596429934),
            location=Location.CRYPTOCOM,
            pair=TradePair('ETH_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('0.00061475')),
            rate=Price(FVal('309.0687271248474989833265555')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Crypto Earn'),
        ),
        Trade(
            timestamp=Timestamp(1596465565),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_MCO'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('1382.306147552291')),
            rate=Price(FVal('27.6439')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('MCO/CRO Overall Swap'),
        ),
        Trade(
            timestamp=Timestamp(1596730165),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_MCO'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('1301.64')),
            rate=Price(FVal('26.0328')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('MCO/CRO Overall Swap'),
        ),
        Trade(
            timestamp=Timestamp(1599934176),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('138.256')),
            rate=Price(FVal('0.1429232727693553986807082514')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Card Rebate: Deliveries'),
        ),
        Trade(
            timestamp=Timestamp(1602515376),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('52.151')),
            rate=Price(FVal('0.06692105616383194953116910510')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Card Cashback'),
        ),
        Trade(
            timestamp=Timestamp(1602526176),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('482.2566417')),
            rate=Price(FVal('0.08756748243245604635910191136')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Referral Bonus Reward'),
        ),
        Trade(
            timestamp=Timestamp(1606833565),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_DAI'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('0.007231228760408149')),
            rate=Price(FVal('14.26830000900286970270179629')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Convert Dust'),
        ),
        Trade(
            timestamp=Timestamp(1608024314),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_UNI'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('105.9475889306405164438345865')),
            rate=Price(FVal('144.1809293808791657040427665')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Convert Dust'),
        ),
        Trade(
            timestamp=Timestamp(1608024314),
            location=Location.CRYPTOCOM,
            pair=TradePair('CRO_DOT'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('87.08021007997850666616541352')),
            rate=Price(FVal('306.6322128582378511862892551')),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes=get_trade_note('Convert Dust'),
        )
    ]
    assert expected_trades == trades

    expected_movements = [
        AssetMovement(
            location=Location.CRYPTOCOM,
            category=AssetMovementCategory.DEPOSIT,
            timestamp=Timestamp(1596992965),
            address=None,
            transaction_id=None,
            asset=A_DAI,
            amount=AssetAmount(FVal('115')),
            fee_asset=A_DAI,
            fee=Fee(ZERO),
            link='',
        ),
        AssetMovement(
            location=Location.CRYPTOCOM,
            category=AssetMovementCategory.WITHDRAWAL,
            address=None,
            transaction_id=None,
            timestamp=Timestamp(1596993025),
            asset=A_DAI,
            amount=AssetAmount(FVal('115')),
            fee_asset=A_DAI,
            fee=Fee(ZERO),
            link='',
        )
    ]
    assert expected_movements == asset_movements
Esempio n. 6
0
ADDR1 = deserialize_ethereum_address('0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397')
ADDR2 = deserialize_ethereum_address('0x00F8a0D8EE1c21151BCcB416bCa1C152f9952D19')
ADDR3 = deserialize_ethereum_address('0x3266F3546a1e5Dc6A15588f3324741A0E20a3B6c')

# List of ADDR1, ADDR2 and ADDR3 deposit events from 1604506685 to 1605044577
# sorted by (timestamp, log_index).
EXPECTED_DEPOSITS = [
    Eth2Deposit(
        from_address=ADDR1,
        pubkey='0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b',  # noqa: E501
        withdrawal_credentials='0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499',  # noqa: E501
        value=Balance(FVal(32), FVal(64)),
        deposit_index=9,
        tx_hash='0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1',
        log_index=22,
        timestamp=Timestamp(int(1604506685)),
    ),
    Eth2Deposit(
        from_address=ADDR3,
        pubkey='0x90b2f65cb43d9cdb2279af9f76010d667b9d8d72e908f2515497a7102820ce6bb15302fe2b8dc082fce9718569344ad8',  # noqa: E501
        withdrawal_credentials='0x00a257d19e1650dec1ab59fc9e1cb9a9fc2fe7265b0f27e7d79ff61aeff0a1f0',  # noqa: E501
        value=Balance(FVal(32), FVal(64)),
        deposit_index=993,
        tx_hash='0x3403bd94a1bf185ee18a525499e408a1b9b7d801cff6418e31efda346762e754',
        log_index=266,
        timestamp=Timestamp(int(1604611131)),
    ),
    Eth2Deposit(
        from_address=ADDR3,
        pubkey='0xb4610a24815f1874a12eba7ea9b77126ca16c0aa29a127ba14ba4ee179834f4feb0aa4497baaa50985ad748d15a286cf',  # noqa: E501
        withdrawal_credentials='0x00f7ce43bfb18009abe0e8e5b3a8c0da3c014bc80e4a0a8dccda647f48ea8547',  # noqa: E501
Esempio n. 7
0
    def process_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        trade_history: List[Union[Trade, MarginPosition, AMMTrade]],
        loan_history: List[Loan],
        asset_movements: List[AssetMovement],
        eth_transactions: List[EthereumTransaction],
        defi_events: List[DefiEvent],
    ) -> Dict[str, Any]:
        """Processes the entire history of cryptoworld actions in order to determine
        the price and time at which every asset was obtained and also
        the general and taxable profit/loss.

        start_ts here is the timestamp at which to start taking trades and other
        taxable events into account. Not where processing starts from. Processing
        always starts from the very first event we find in the history.
        """
        log.info(
            'Start of history processing',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        self.events.reset(start_ts, end_ts)
        self.last_gas_price = 2000000000
        self.start_ts = start_ts
        self.eth_transactions_gas_costs = FVal(0)
        self.asset_movement_fees = FVal(0)
        self.csvexporter.reset_csv_lists()

        # Ask the DB for the settings once at the start of processing so we got the
        # same settings through the entire task
        db_settings = self.db.get_settings()
        self._customize(db_settings)

        actions: List[TaxableAction] = list(trade_history)
        # If we got loans, we need to interleave them with the full history and re-sort
        if len(loan_history) != 0:
            actions.extend(loan_history)

        if len(asset_movements) != 0:
            actions.extend(asset_movements)

        if len(eth_transactions) != 0:
            actions.extend(eth_transactions)

        if len(defi_events) != 0:
            actions.extend(defi_events)

        actions.sort(key=lambda action: action_get_timestamp(action), )
        # The first ts is the ts of the first action we have in history or 0 for empty history
        first_ts = Timestamp(0) if len(actions) == 0 else action_get_timestamp(
            actions[0])
        self.currently_processing_timestamp = first_ts
        self.started_processing_timestamp = first_ts

        prev_time = Timestamp(0)
        count = 0
        for action in actions:
            try:
                (
                    should_continue,
                    prev_time,
                ) = self.process_action(action, end_ts, prev_time, db_settings)
            except PriceQueryUnsupportedAsset as e:
                ts = action_get_timestamp(action)
                self.msg_aggregator.add_error(
                    f'Skipping action at '
                    f' {timestamp_to_date(ts, formatstr="%d/%m/%Y, %H:%M:%S")} '
                    f'during history processing due to an asset unknown to '
                    f'cryptocompare being involved. Check logs for details', )
                log.error(
                    f'Skipping action {str(action)} during history processing due to '
                    f'cryptocompare not supporting an involved asset: {str(e)}',
                )
                continue
            except NoPriceForGivenTimestamp as e:
                ts = action_get_timestamp(action)
                self.msg_aggregator.add_error(
                    f'Skipping action at '
                    f' {timestamp_to_date(ts, formatstr="%d/%m/%Y, %H:%M:%S")} '
                    f'during history processing due to inability to find a price '
                    f'at that point in time: {str(e)}. Check the logs for more details',
                )
                log.error(
                    f'Skipping action {str(action)} during history processing due to '
                    f'inability to query a price at that time: {str(e)}', )
                continue
            except RemoteError as e:
                ts = action_get_timestamp(action)
                self.msg_aggregator.add_error(
                    f'Skipping action at '
                    f' {timestamp_to_date(ts, formatstr="%d/%m/%Y, %H:%M:%S")} '
                    f'during history processing due to inability to reach an external '
                    f'service at that point in time: {str(e)}. Check the logs for more details',
                )
                log.error(
                    f'Skipping action {str(action)} during history processing due to '
                    f'inability to reach an external service at that time: {str(e)}',
                )
                continue

            if not should_continue:
                break

            if count % 500 == 0:
                # This loop can take a very long time depending on the amount of actions
                # to process. We need to yield to other greenlets or else calls to the
                # API may time out
                gevent.sleep(0.5)
            count += 1

        self.events.calculate_asset_details()
        Inquirer().save_historical_forex_data()

        sum_other_actions = (self.events.margin_positions_profit_loss +
                             self.events.defi_profit_loss +
                             self.events.loan_profit -
                             self.events.settlement_losses -
                             self.asset_movement_fees -
                             self.eth_transactions_gas_costs)
        total_taxable_pl = self.events.taxable_trade_profit_loss + sum_other_actions
        return {
            'overview': {
                'defi_profit_loss':
                str(self.events.defi_profit_loss),
                'loan_profit':
                str(self.events.loan_profit),
                'margin_positions_profit_loss':
                str(self.events.margin_positions_profit_loss),
                'settlement_losses':
                str(self.events.settlement_losses),
                'ethereum_transaction_gas_costs':
                str(self.eth_transactions_gas_costs),
                'asset_movement_fees':
                str(self.asset_movement_fees),
                'general_trade_profit_loss':
                str(self.events.general_trade_profit_loss),
                'taxable_trade_profit_loss':
                str(self.events.taxable_trade_profit_loss),
                'total_taxable_profit_loss':
                str(total_taxable_pl),
                'total_profit_loss':
                str(self.events.general_trade_profit_loss +
                    sum_other_actions, ),
            },
            'all_events': self.csvexporter.all_events,
        }
Esempio n. 8
0
        assert detect_sqlcipher_version() == 4

        sql_mock.return_value = ConnectionMock('5.10.13 somethingelse')
        assert detect_sqlcipher_version() == 5

        sql_mock.return_value = ConnectionMock('3.1.15 somethingelse')
        assert detect_sqlcipher_version() == 3

        with pytest.raises(ValueError):
            sql_mock.return_value = ConnectionMock('no version')
            detect_sqlcipher_version()


asset_balances = [
    AssetBalance(
        time=Timestamp(1451606400),
        asset=A_USD,
        amount='10',
        usd_value='10',
    ), AssetBalance(
        time=Timestamp(1451606401),
        asset=A_ETH,
        amount='2',
        usd_value='1.7068',
    ), AssetBalance(
        time=Timestamp(1465171200),
        asset=A_USD,
        amount='500',
        usd_value='500',
    ), AssetBalance(
        time=Timestamp(1465171201),
Esempio n. 9
0
def test_add_ethereum_transactions(data_dir, username):
    """Test that adding and retrieving ethereum transactions from the DB works fine.

    Also duplicates should be ignored and an error returned
    """
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    tx1 = EthereumTransaction(
        tx_hash=b'1',
        timestamp=Timestamp(1451606400),
        block_number=1,
        from_address=ETH_ADDRESS1,
        to_address=ETH_ADDRESS3,
        value=FVal('2000000'),
        gas=FVal('5000000'),
        gas_price=FVal('2000000000'),
        gas_used=FVal('25000000'),
        input_data=MOCK_INPUT_DATA,
        nonce=1,
    )
    tx2 = EthereumTransaction(
        tx_hash=b'2',
        timestamp=Timestamp(1451706400),
        block_number=3,
        from_address=ETH_ADDRESS2,
        to_address=ETH_ADDRESS3,
        value=FVal('4000000'),
        gas=FVal('5000000'),
        gas_price=FVal('2000000000'),
        gas_used=FVal('25000000'),
        input_data=MOCK_INPUT_DATA,
        nonce=1,
    )
    tx3 = EthereumTransaction(
        tx_hash=b'3',
        timestamp=Timestamp(1452806400),
        block_number=5,
        from_address=ETH_ADDRESS3,
        to_address=ETH_ADDRESS1,
        value=FVal('1000000'),
        gas=FVal('5000000'),
        gas_price=FVal('2000000000'),
        gas_used=FVal('25000000'),
        input_data=MOCK_INPUT_DATA,
        nonce=3,
    )

    # Add and retrieve the first 2 margins. All should be fine.
    data.db.add_ethereum_transactions([tx1, tx2], from_etherscan=True)
    errors = msg_aggregator.consume_errors()
    warnings = msg_aggregator.consume_warnings()
    assert len(errors) == 0
    assert len(warnings) == 0
    returned_transactions = data.db.get_ethereum_transactions()
    assert returned_transactions == [tx1, tx2]

    # Add the last 2 transactions. Since tx2 already exists in the DB it should be
    # ignored (no errors shown for attempting to add already existing transaction)
    data.db.add_ethereum_transactions([tx2, tx3], from_etherscan=True)
    errors = msg_aggregator.consume_errors()
    warnings = msg_aggregator.consume_warnings()
    assert len(errors) == 0
    assert len(warnings) == 0
    returned_transactions = data.db.get_ethereum_transactions()
    assert returned_transactions == [tx1, tx2, tx3]
Esempio n. 10
0
TEST_ACCOUNTS = [
    # For mint/redeem
    '0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12',
    # For borrowing/liquidations
    '0xC440f3C87DC4B6843CABc413916220D4f4FeD117',
    # For mint/redeem + comp
    '0xF59D4937BF1305856C3a267bB07791507a3377Ee',
    # For repay
    '0x65304d6aff5096472519ca86a6a1fea31cb47Ced',
]

EXPECTED_EVENTS = [CompoundEvent(
    event_type='mint',
    address=deserialize_ethereum_address('0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12'),
    block_number=9443573,
    timestamp=Timestamp(1581184577),
    asset=A_DAI,
    value=Balance(amount=FVal('2988.4343'), usd_value=FVal('3012.3417744')),
    to_asset=A_CDAI,
    to_value=Balance(amount=FVal('148015.6966153'), usd_value=FVal('3012.3417744')),
    realized_pnl=None,
    tx_hash='0xacc2e21f911a4e438966694e9ad16747878a15dae52de62a09f1ebabc8b26c8d',
    log_index=130,
), CompoundEvent(
    event_type='redeem',
    address=deserialize_ethereum_address('0x2B888954421b424C5D3D9Ce9bB67c9bD47537d12'),
    block_number=9533397,
    timestamp=Timestamp(1582378248),
    asset=A_CDAI,
    value=Balance(amount=FVal('148015.6966153'), usd_value=FVal('3075.319825609865034570156')),
    to_asset=A_DAI,
Esempio n. 11
0
    def get_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        has_premium: bool,
    ) -> HistoryResult:
        """Creates trades and loans history from start_ts to end_ts"""
        self._reset_variables()
        step = 0
        total_steps = len(
            self.exchange_manager.connected_exchanges
        ) + NUM_HISTORY_QUERY_STEPS_EXCL_EXCHANGES  # noqa: E501
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        # start creating the all trades history list
        history: List[Union[Trade, MarginPosition, AMMTrade]] = []
        asset_movements = []
        loans = []
        empty_or_error = ''

        def populate_history_cb(
            trades_history: List[Trade],
            margin_history: List[MarginPosition],
            result_asset_movements: List[AssetMovement],
            exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query"""
            history.extend(trades_history)
            history.extend(margin_history)
            asset_movements.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                loans.extend(
                    process_polo_loans(
                        msg_aggregator=self.msg_aggregator,
                        data=polo_loans_data,
                        # We need to have history of loans since before the range
                        start_ts=Timestamp(0),
                        end_ts=end_ts,
                    ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for exchange in self.exchange_manager.iterate_exchanges():
            self.processing_state_name = (
                f'Querying {str(exchange.location)} {exchange.name} exchange history'
            )
            exchange.query_history_with_callbacks(
                # We need to have history of exchanges since before the range
                start_ts=Timestamp(0),
                end_ts=end_ts,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )
            step = self._increase_progress(step, total_steps)

        try:
            self.processing_state_name = 'Querying ethereum transactions history'
            eth_transactions = self.chain_manager.ethereum.transactions.query(
                addresses=None,  # all addresses
                # We need to have history of transactions since before the range
                from_ts=Timestamp(0),
                to_ts=end_ts,
                with_limit=
                False,  # at the moment ignore the limit for historical processing,
                recent_first=
                False,  # for history processing we need oldest first
            )
        except RemoteError as e:
            eth_transactions = []
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg
        step = self._increase_progress(step, total_steps)

        # Include the external trades in the history
        self.processing_state_name = 'Querying external trades history'
        external_trades = self.db.get_trades(
            # We need to have history of trades since before the range
            from_ts=Timestamp(0),
            to_ts=end_ts,
            location=Location.EXTERNAL,
        )
        history.extend(external_trades)
        step = self._increase_progress(step, total_steps)

        # include the ledger actions
        self.processing_state_name = 'Querying ledger actions history'
        ledger_actions, _ = self.query_ledger_actions(has_premium,
                                                      from_ts=None,
                                                      to_ts=end_ts)
        step = self._increase_progress(step, total_steps)

        # include AMM trades: balancer, uniswap
        for amm_location in AMMTradeLocations:
            amm_module_name = cast(AMMTRADE_LOCATION_NAMES, str(amm_location))
            amm_module = self.chain_manager.get_module(amm_module_name)
            if has_premium and amm_module:
                self.processing_state_name = f'Querying {amm_module_name} trade history'
                amm_module_trades = amm_module.get_trades(
                    addresses=self.chain_manager.queried_addresses_for_module(
                        amm_module_name),
                    from_timestamp=Timestamp(0),
                    to_timestamp=end_ts,
                    only_cache=False,
                )
                history.extend(amm_module_trades)
            step = self._increase_progress(step, total_steps)

        # Include makerdao DSR gains
        defi_events = []
        makerdao_dsr = self.chain_manager.get_module('makerdao_dsr')
        if makerdao_dsr and has_premium:
            self.processing_state_name = 'Querying makerDAO DSR history'
            defi_events.extend(
                makerdao_dsr.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                ))
        step = self._increase_progress(step, total_steps)

        # Include makerdao vault events
        makerdao_vaults = self.chain_manager.get_module('makerdao_vaults')
        if makerdao_vaults and has_premium:
            self.processing_state_name = 'Querying makerDAO vaults history'
            defi_events.extend(
                makerdao_vaults.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                ))
        step = self._increase_progress(step, total_steps)

        # include yearn vault events
        yearn_vaults = self.chain_manager.get_module('yearn_vaults')
        if yearn_vaults and has_premium:
            self.processing_state_name = 'Querying yearn vaults history'
            defi_events.extend(
                yearn_vaults.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'yearn_vaults'),
                ))
        step = self._increase_progress(step, total_steps)

        # include compound events
        compound = self.chain_manager.get_module('compound')
        if compound and has_premium:
            self.processing_state_name = 'Querying compound history'
            defi_events.extend(
                compound.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'compound'),
                ))
        step = self._increase_progress(step, total_steps)

        # include adex events
        adex = self.chain_manager.get_module('adex')
        if adex is not None and has_premium:
            self.processing_state_name = 'Querying adex staking history'
            defi_events.extend(
                adex.get_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'adex'),
                ))
        step = self._increase_progress(step, total_steps)

        # include aave events
        aave = self.chain_manager.get_module('aave')
        if aave is not None and has_premium:
            self.processing_state_name = 'Querying aave history'
            defi_events.extend(
                aave.get_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'aave'),
                ))
        self._increase_progress(step, total_steps)

        # include eth2 staking events
        eth2 = self.chain_manager.get_module('eth2')
        if eth2 is not None and has_premium:
            self.processing_state_name = 'Querying ETH2 staking history'
            defi_events.extend(
                self.chain_manager.get_eth2_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                ))
        self._increase_progress(step, total_steps)

        history.sort(key=action_get_timestamp)
        return (
            empty_or_error,
            history,
            loans,
            asset_movements,
            eth_transactions,
            defi_events,
            ledger_actions,
        )
Esempio n. 12
0
    def _get_trades_graph_for_address(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AMMTrade]:
        """Get the address' trades data querying the Uniswap subgraph

        Each trade (swap) instantiates an <AMMTrade>.

        The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`.
        Translated to Uniswap lingo:

        Trade type BUY:
        - `asset1In` (QUOTE, reserve1) is gt 0.
        - `asset0Out` (BASE, reserve0) is gt 0.

        Trade type SELL:
        - `asset0In` (BASE, reserve0) is gt 0.
        - `asset1Out` (QUOTE, reserve1) is gt 0.
        """
        trades: List[AMMTrade] = []
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(SWAPS_QUERY.format())

        while True:
            result = self.graph.query(  # type: ignore # caller already checks
                querystr=querystr,
                param_types=param_types,
                param_values=param_values,
            )
            result_data = result['swaps']
            for entry in result_data:
                swaps = []
                for swap in entry['transaction']['swaps']:
                    timestamp = swap['timestamp']
                    swap_token0 = swap['pair']['token0']
                    swap_token1 = swap['pair']['token1']
                    token0 = get_ethereum_token(
                        symbol=swap_token0['symbol'],
                        ethereum_address=to_checksum_address(
                            swap_token0['id']),
                        name=swap_token0['name'],
                        decimals=swap_token0['decimals'],
                    )
                    token1 = get_ethereum_token(
                        symbol=swap_token1['symbol'],
                        ethereum_address=to_checksum_address(
                            swap_token1['id']),
                        name=swap_token1['name'],
                        decimals=int(swap_token1['decimals']),
                    )
                    amount0_in = FVal(swap['amount0In'])
                    amount1_in = FVal(swap['amount1In'])
                    amount0_out = FVal(swap['amount0Out'])
                    amount1_out = FVal(swap['amount1Out'])
                    swaps.append(
                        AMMSwap(
                            tx_hash=swap['id'].split('-')[0],
                            log_index=int(swap['logIndex']),
                            address=address,
                            from_address=to_checksum_address(swap['sender']),
                            to_address=to_checksum_address(swap['to']),
                            timestamp=Timestamp(int(timestamp)),
                            location=Location.UNISWAP,
                            token0=token0,
                            token1=token1,
                            amount0_in=AssetAmount(amount0_in),
                            amount1_in=AssetAmount(amount1_in),
                            amount0_out=AssetAmount(amount0_out),
                            amount1_out=AssetAmount(amount1_out),
                        ))

                # Now that we got all swaps for a transaction, create the trade object
                trades.extend(self._tx_swaps_to_trades(swaps))

            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }
        return trades
Esempio n. 13
0
    def _get_trades(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
    ) -> AddressTrades:
        """Request via graph all trades for new addresses and the latest ones
        for already existing addresses. Then the requested trade are written in
        DB and finally all DB trades are read and returned.
        """
        address_amm_trades: AddressTrades = {}
        db_address_trades: AddressTrades = {}
        new_addresses: List[ChecksumEthAddress] = []
        existing_addresses: List[ChecksumEthAddress] = []
        min_end_ts: Timestamp = to_timestamp

        # Get addresses' last used query range for Uniswap trades
        for address in addresses:
            entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}'
            trades_range = self.database.get_used_query_range(name=entry_name)

            if not trades_range:
                new_addresses.append(address)
            else:
                existing_addresses.append(address)
                min_end_ts = min(min_end_ts, trades_range[1])

        # Request new addresses' trades
        if new_addresses:
            start_ts = Timestamp(0)
            new_address_trades = self._get_trades_graph(
                addresses=new_addresses,
                start_ts=start_ts,
                end_ts=to_timestamp,
            )
            address_amm_trades.update(new_address_trades)

            # Insert last used query range for new addresses
            for address in new_addresses:
                entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}'
                self.database.update_used_query_range(
                    name=entry_name,
                    start_ts=start_ts,
                    end_ts=to_timestamp,
                )

        # Request existing DB addresses' trades
        if existing_addresses and min_end_ts <= to_timestamp:
            address_new_trades = self._get_trades_graph(
                addresses=existing_addresses,
                start_ts=min_end_ts,
                end_ts=to_timestamp,
            )
            address_amm_trades.update(address_new_trades)

            # Update last used query range for existing addresses
            for address in existing_addresses:
                entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}'
                self.database.update_used_query_range(
                    name=entry_name,
                    start_ts=min_end_ts,
                    end_ts=to_timestamp,
                )

        # Insert all unique swaps to the D
        all_swaps = set()
        for address in filter(lambda address: address in address_amm_trades,
                              addresses):
            for trade in address_amm_trades[address]:
                for swap in trade.swaps:
                    all_swaps.add(swap)

        self.database.add_amm_swaps(list(all_swaps))

        # Fetch all DB Uniswap trades within the time range
        for address in addresses:
            db_swaps = self.database.get_amm_swaps(
                from_ts=from_timestamp,
                to_ts=to_timestamp,
                location=Location.UNISWAP,
                address=address,
            )
            db_trades = self.swaps_to_trades(db_swaps)
            if db_trades:
                db_address_trades[address] = db_trades

        return db_address_trades
Esempio n. 14
0
    def _get_events_graph(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
        event_type: EventType,
    ) -> List[LiquidityPoolEvent]:
        """Get the address' events (mints & burns) querying the Uniswap subgraph
        Each event data is stored in a <LiquidityPoolEvent>.
        """
        address_events: List[LiquidityPoolEvent] = []
        if event_type == EventType.MINT:
            query = MINTS_QUERY
            query_schema = 'mints'
        elif event_type == EventType.BURN:
            query = BURNS_QUERY
            query_schema = 'burns'
        else:
            log.error(
                f'Unexpected event_type: {event_type}. Skipping events query.')
            return address_events

        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(query.format())

        while True:
            result = self.graph.query(  # type: ignore # caller already checks
                querystr=querystr,
                param_types=param_types,
                param_values=param_values,
            )
            result_data = result[query_schema]

            for event in result_data:
                token0_ = event['pair']['token0']
                token1_ = event['pair']['token1']
                token0 = get_ethereum_token(
                    symbol=token0_['symbol'],
                    ethereum_address=to_checksum_address(token0_['id']),
                    name=token0_['name'],
                    decimals=token0_['decimals'],
                )
                token1 = get_ethereum_token(
                    symbol=token1_['symbol'],
                    ethereum_address=to_checksum_address(token1_['id']),
                    name=token1_['name'],
                    decimals=int(token1_['decimals']),
                )
                lp_event = LiquidityPoolEvent(
                    tx_hash=event['transaction']['id'],
                    log_index=int(event['logIndex']),
                    address=address,
                    timestamp=Timestamp(int(event['timestamp'])),
                    event_type=event_type,
                    pool_address=to_checksum_address(event['pair']['id']),
                    token0=token0,
                    token1=token1,
                    amount0=AssetAmount(FVal(event['amount0'])),
                    amount1=AssetAmount(FVal(event['amount1'])),
                    usd_price=Price(FVal(event['amountUSD'])),
                    lp_amount=AssetAmount(FVal(event['liquidity'])),
                )
                address_events.append(lp_event)

            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }

        return address_events
Esempio n. 15
0
    def _get_staking_events(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
        fee_rewards: FeeRewards,
    ) -> ADXStakingEvents:
        """Given a list of addresses returns all their staking events within
        the given time range. The returned events are grouped by type in
        <ADXStakingEvents>.

        For new addresses it requests all the events via subgraph.
        For existing addresses it requests all the events since the latest
        request timestamp (the minimum timestamp among all the existing
        addresses).
        """
        new_addresses: List[ChecksumEthAddress] = []
        existing_addresses: List[ChecksumEthAddress] = []
        min_from_timestamp: Timestamp = to_timestamp

        # Get addresses' last used query range for AdEx events
        for address in addresses:
            entry_name = f'{ADEX_EVENTS_PREFIX}_{address}'
            events_range = self.database.get_used_query_range(name=entry_name)

            if not events_range:
                new_addresses.append(address)
            else:
                existing_addresses.append(address)
                min_from_timestamp = min(min_from_timestamp, events_range[1])

        # Request new addresses' events
        all_new_events: List[Union[Bond, Unbond, UnbondRequest,
                                   ChannelWithdraw]] = []
        if new_addresses:
            new_events = self._get_new_staking_events_graph(
                addresses=addresses,
                from_timestamp=Timestamp(0),
                to_timestamp=to_timestamp,
            )
            all_new_events.extend(new_events)

        # Request existing DB addresses' events
        if existing_addresses and to_timestamp > min_from_timestamp:
            new_events = self._get_new_staking_events_graph(
                addresses=addresses,
                from_timestamp=min_from_timestamp,
                to_timestamp=to_timestamp,
            )
            all_new_events.extend(new_events)

        # Add new events in DB
        if all_new_events:
            new_staking_events = self._get_addresses_staking_events_grouped_by_type(
                events=all_new_events,
                addresses=set(addresses),
            )
            self._update_channel_withdraw_events_token(
                channel_withdraws=new_staking_events.channel_withdraws,
                fee_rewards=fee_rewards,
            )
            self._update_events_value(staking_events=new_staking_events)
            self.database.add_adex_events(new_staking_events.get_all())

        # Fetch all DB events within the time range
        db_events = self.database.get_adex_events(
            from_timestamp=from_timestamp,
            to_timestamp=to_timestamp,
        )
        staking_events = self._get_addresses_staking_events_grouped_by_type(
            events=db_events,
            addresses=set(addresses),
        )
        return staking_events
Esempio n. 16
0
        from_asset=asset,
        to_asset=A_USD,
        timestamp=1584662400,
    )
    assert price == expected_price2
    price = cryptocompare.query_current_price(
        from_asset=asset,
        to_asset=A_USD,
    )
    assert price is not None


@pytest.mark.parametrize(
    'from_asset, to_asset, timestamp, expected_price',
    [
        (A_ETH, A_USD, Timestamp(1592629200), Price(ZERO)),
        (A_COMP, A_COMP, Timestamp(1592629200),
         Price(ZERO)),  # both assets COMP
        (A_USD, A_USD, Timestamp(1592629200), Price(ZERO)),  # both assets USD
        (A_COMP, A_USDT, Timestamp(1592629200), Price(ZERO)),  # to_asset USDT
        (A_USDT, A_COMP, Timestamp(1592629200),
         Price(ZERO)),  # from_asset USDT
        (A_COMP, A_USD, Timestamp(1592629200), Price(FVal('239.13'))),
        (A_USD, A_COMP, Timestamp(1592629200),
         Price(FVal('0.004181825785137791159620290219'))),
        (A_COMP, A_USD, Timestamp(1592629201), Price(ZERO)),  # timestamp gt
        (A_USD, A_COMP, Timestamp(1592629201), Price(ZERO)),  # timestamp gt
    ])
def test_check_and_get_special_histohour_price(
    cryptocompare,
    from_asset,
Esempio n. 17
0
def query_ethereum_txlist(
    address: EthAddress,
    internal: bool,
    from_block: Optional[int] = None,
    to_block: Optional[int] = None,
) -> List[EthereumTransaction]:
    log.debug(
        'Querying etherscan for tx list',
        sensitive_log=True,
        internal=internal,
        eth_address=address,
        from_block=from_block,
        to_block=to_block,
    )

    result = list()
    if internal:
        reqstring = ('https://api.etherscan.io/api?module=account&action='
                     'txlistinternal&address={}'.format(address))
    else:
        reqstring = ('https://api.etherscan.io/api?module=account&action='
                     'txlist&address={}'.format(address))
    if from_block:
        reqstring += '&startblock={}'.format(from_block)
    if to_block:
        reqstring += '&endblock={}'.format(to_block)

    resp = request_get_dict(reqstring)

    if 'status' not in resp or convert_to_int(resp['status']) != 1:
        status = convert_to_int(resp['status'])
        if status == 0 and resp['message'] == 'No transactions found':
            return list()

        log.error(
            'Querying etherscan for tx list failed',
            sensitive_log=True,
            internal=internal,
            eth_address=address,
            from_block=from_block,
            to_block=to_block,
            error=resp['message'],
        )
        # else unknown error
        raise ValueError(
            'Failed to query txlist from etherscan with query: {} . '
            'Response was: {}'.format(reqstring, resp), )

    log.debug('Etherscan tx list query result',
              results_num=len(resp['result']))
    for v in resp['result']:
        # internal tx list contains no gasprice
        gas_price = FVal(-1) if internal else FVal(v['gasPrice'])
        result.append(
            EthereumTransaction(
                timestamp=Timestamp(convert_to_int(v['timeStamp'])),
                block_number=convert_to_int(v['blockNumber']),
                hash=v['hash'],
                from_address=v['from'],
                to_address=v['to'],
                value=FVal(v['value']),
                gas=FVal(v['gas']),
                gas_price=gas_price,
                gas_used=FVal(v['gasUsed']),
            ))

    return result
Esempio n. 18
0
def test_query_online_deposits_withdrawals_case_2(mock_bitfinex):
    """Test pagination logic for asset movements works as expected when a
    request returns a result already processed in the previous request.

    Other things tested:
      - Stop requesting when number of entries is less than limit.

    First request: 2 results
    Second request: 2 results, both movements are repeated from the 1st request.
    Third request: 2 results, first movement is repeated from the 2nd request.
    Fourth request: 1 result

    Trades with id 1 to 4 are expected to be returned.
    """
    api_limit = 2
    mock_bitfinex.first_connection = MagicMock()
    mock_bitfinex.currency_map = {'WBT': 'WBTC'}
    # Deposit WBTC
    movement_1 = """
    [
        1,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606899600000,
        1606899700000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """
    # Withdraw WBTC
    movement_2 = """
    [
        2,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606901400000,
        1606901500000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        -0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """
    # Withdraw EUR
    movement_3 = """
    [
        3,
        "EUR",
        "Euro",
        null,
        null,
        1606986000000,
        1606986100000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        -0.26300954,
        -0.00135,
        null,
        null,
        "",
        null,
        null,
        null,
        "",
        null
    ]
    """
    # Deposit WBTC
    movement_4 = """
    [
        4,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606996800000,
        1606996900000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """

    def get_paginated_response():
        results = [
            f'[{movement_2},{movement_1}]',
            f'[{movement_2},{movement_1}]',
            f'[{movement_3},{movement_2}]',
            f'[{movement_4}]',
        ]
        for result_ in results:
            yield result_

    def mock_api_query_response(endpoint, options):  # pylint: disable=unused-argument
        return MockResponse(HTTPStatus.OK, next(get_response))

    get_response = get_paginated_response()
    api_limit_patch = patch(
        target='rotkehlchen.exchanges.bitfinex.API_MOVEMENTS_MAX_LIMIT',
        new=api_limit,
    )
    api_query_patch = patch.object(
        target=mock_bitfinex,
        attribute='_api_query',
        side_effect=mock_api_query_response,
    )
    with ExitStack() as stack:
        stack.enter_context(api_limit_patch)
        stack.enter_context(api_query_patch)
        asset_movements = mock_bitfinex.query_online_deposits_withdrawals(
            start_ts=Timestamp(0),
            end_ts=Timestamp(int(datetime.now().timestamp())),
        )
        wbtc_fee_asset = Asset('WBTC')
        eur_fee_asset = Asset('EUR')
        expected_asset_movements = [
            AssetMovement(
                timestamp=Timestamp(1606899600),
                location=Location.BITFINEX,
                category=AssetMovementCategory.DEPOSIT,
                address='DESTINATION_ADDRESS',
                transaction_id='TRANSACTION_ID',
                asset=wbtc_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=wbtc_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(1),
            ),
            AssetMovement(
                timestamp=Timestamp(1606901400),
                location=Location.BITFINEX,
                category=AssetMovementCategory.WITHDRAWAL,
                address='DESTINATION_ADDRESS',
                transaction_id='TRANSACTION_ID',
                asset=wbtc_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=wbtc_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(2),
            ),
            AssetMovement(
                timestamp=Timestamp(1606986000),
                location=Location.BITFINEX,
                category=AssetMovementCategory.WITHDRAWAL,
                address=None,
                transaction_id=None,
                asset=eur_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=eur_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(3),
            ),
            AssetMovement(
                timestamp=Timestamp(1606996800),
                location=Location.BITFINEX,
                category=AssetMovementCategory.DEPOSIT,
                address='DESTINATION_ADDRESS',
                transaction_id='TRANSACTION_ID',
                asset=wbtc_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=wbtc_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(4),
            ),
        ]
        assert asset_movements == expected_asset_movements
Esempio n. 19
0
def test_get_eth2_staking_deposits_fetch_from_db(  # pylint: disable=unused-argument
        ethereum_manager,
        call_order,
        ethereum_manager_connect_at_start,
        inquirer,
        price_historian,
        freezer,
):
    """
    Test new on-chain requests for existing addresses requires a difference of
    REQUEST_DELTA_TS since last used query range `end_ts`.
    """
    freezer.move_to(datetime.fromtimestamp(EXPECTED_DEPOSITS[0].timestamp))
    ts_now = int(datetime.now().timestamp())  # 1604506685

    database = MagicMock()
    database.get_used_query_range.side_effect = [
        (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)),
        (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)),
        (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)),
    ]
    dbeth2 = MagicMock()
    dbeth2.get_eth2_deposits.side_effect = [
        [],  # no on-chain request, nothing in DB
        [],  # no on-chain request, nothing in DB
        [EXPECTED_DEPOSITS[0]],  # on-chain request, deposit in DB
    ]
    dbeth2_mock = patch('rotkehlchen.chain.ethereum.eth2.DBEth2', return_value=dbeth2)
    with dbeth2_mock, patch(
        'rotkehlchen.chain.ethereum.eth2._get_eth2_staking_deposits_onchain',
    ) as mock_get_eth2_staking_deposits_onchain:
        # 3rd call return
        mock_get_eth2_staking_deposits_onchain.return_value = [EXPECTED_DEPOSITS[0]]

        wait_until_all_nodes_connected(
            ethereum_manager_connect_at_start=ethereum_manager_connect_at_start,
            ethereum=ethereum_manager,
        )
        message_aggregator = MessagesAggregator()

        # First call
        deposit_results_onchain = get_eth2_staking_deposits(
            ethereum=ethereum_manager,
            addresses=[ADDR1],
            msg_aggregator=message_aggregator,
            database=database,
        )
        assert deposit_results_onchain == []
        mock_get_eth2_staking_deposits_onchain.assert_not_called()

        # NB: Move time to ts_now + REQUEST_DELTA_TS - 1s
        freezer.move_to(datetime.fromtimestamp(ts_now + REQUEST_DELTA_TS - 1))

        # Second call
        deposit_results_onchain = get_eth2_staking_deposits(
            ethereum=ethereum_manager,
            addresses=[ADDR1],
            msg_aggregator=message_aggregator,
            database=database,
        )
        assert deposit_results_onchain == []
        mock_get_eth2_staking_deposits_onchain.assert_not_called()

        # NB: Move time to ts_now + REQUEST_DELTA_TS (triggers request)
        freezer.move_to(datetime.fromtimestamp(ts_now + REQUEST_DELTA_TS))

        # Third call
        deposit_results_onchain = get_eth2_staking_deposits(
            ethereum=ethereum_manager,
            addresses=[ADDR1],
            msg_aggregator=message_aggregator,
            database=database,
        )
        assert deposit_results_onchain == [EXPECTED_DEPOSITS[0]]
        mock_get_eth2_staking_deposits_onchain.assert_called_with(
            ethereum=ethereum_manager,
            addresses=[ADDR1],
            msg_aggregator=message_aggregator,
            from_ts=Timestamp(ts_now),
            to_ts=Timestamp(ts_now + REQUEST_DELTA_TS),
        )
Esempio n. 20
0
def test_query_online_trade_history_case_1(mock_bitfinex):
    """Test pagination logic for trades works as expected when each request
    does not return a result already processed.

    Other things tested:
      - Stop requesting (break the loop) when result timestamp is greater than
      'end_ts'.
      - '_api_query' call arguments.

    First request: 2 results
    Second request: 2 results
    Third request: 1 result, out of time range (its timestamp is gt 'end_ts')

    Trades with id 1 to 4 are expected to be returned.
    """
    api_limit = 2
    mock_bitfinex.first_connection = MagicMock()
    mock_bitfinex.currency_map = {
        'UST': 'USDt',
        'WBT': 'WBTC',
    }
    mock_bitfinex.pair_bfx_symbols_map = {
        'ETHUST': ('ETH', 'UST'),
        'WBTUSD': ('WBT', 'USD'),
        'ETHEUR': ('ETH', 'EUR'),
    }
    # Buy ETH with USDT
    trade_1 = """
    [
        1,
        "tETH:UST",
        1606899600000,
        10,
        0.26334268,
        187.37,
        "LIMIT",
        null,
        -1,
        -0.09868591,
        "USD"
    ]
    """
    # Sell ETH for USDT
    trade_2 = """
    [
        2,
        "tETH:UST",
        1606901400000,
        20,
        -0.26334268,
        187.37,
        "LIMIT",
        null,
        -1,
        -0.09868591,
        "ETH"
    ]
    """
    # Buy WBTC for USD
    trade_3 = """
    [
        3,
        "tWBTUSD",
        1606932000000,
        30,
        10000.00000000,
        0.00005000,
        "LIMIT",
        null,
        -1,
        -20.00000000,
        "USD"
    ]
    """
    # Sell WBTC for USD
    trade_4 = """
    [
        4,
        "tWBTUSD",
        1606986000000,
        40,
        -10000.00000000,
        0.00005000,
        "LIMIT",
        null,
        -1,
        -20.00000000,
        "BTC"
    ]
    """
    # Sell ETH for EUR, outside time range (gt 'end_ts')
    trade_5 = """
    [
        5,
        "tETH:EUR",
        1606996801000,
        50,
        -0.26334268,
        163.29,
        "LIMIT",
        null,
        -1,
        -0.09868591,
        "ETH"
    ]
    """
    expected_calls = [
        call(
            endpoint='trades',
            options={
                'start': 0,
                'end': 1606996800000,
                'limit': 2,
                'sort': 1,
            },
        ),
        call(
            endpoint='trades',
            options={
                'start': 1606901400000,
                'end': 1606996800000,
                'limit': 2,
                'sort': 1,
            },
        ),
        call(
            endpoint='trades',
            options={
                'start': 1606986000000,
                'end': 1606996800000,
                'limit': 2,
                'sort': 1,
            },
        ),
    ]

    def get_paginated_response():
        results = [
            f'[{trade_1},{trade_2}]',
            f'[{trade_3},{trade_4}]',
            f'[{trade_5}]',
        ]
        for result_ in results:
            yield result_

    def mock_api_query_response(endpoint, options):  # pylint: disable=unused-argument
        return MockResponse(HTTPStatus.OK, next(get_response))

    get_response = get_paginated_response()
    api_limit_patch = patch(
        target='rotkehlchen.exchanges.bitfinex.API_TRADES_MAX_LIMIT',
        new=api_limit,
    )
    api_query_patch = patch.object(
        target=mock_bitfinex,
        attribute='_api_query',
        side_effect=mock_api_query_response,
    )
    with ExitStack() as stack:
        stack.enter_context(api_limit_patch)
        api_query_mock = stack.enter_context(api_query_patch)
        trades = mock_bitfinex.query_online_trade_history(
            start_ts=Timestamp(0),
            end_ts=Timestamp(int(datetime.now().timestamp())),
        )
        assert api_query_mock.call_args_list == expected_calls
        expected_trades = [
            Trade(
                timestamp=Timestamp(1606899600),
                location=Location.BITFINEX,
                pair=TradePair('ETH_USDT'),
                trade_type=TradeType.BUY,
                amount=AssetAmount(FVal('0.26334268')),
                rate=Price(FVal('187.37')),
                fee=Fee(FVal('0.09868591')),
                fee_currency=Asset('USD'),
                link='1',
                notes='',
            ),
            Trade(
                timestamp=Timestamp(1606901400),
                location=Location.BITFINEX,
                pair=TradePair('ETH_USDT'),
                trade_type=TradeType.SELL,
                amount=AssetAmount(FVal('0.26334268')),
                rate=Price(FVal('187.37')),
                fee=Fee(FVal('0.09868591')),
                fee_currency=Asset('ETH'),
                link='2',
                notes='',
            ),
            Trade(
                timestamp=Timestamp(1606932000),
                location=Location.BITFINEX,
                pair=TradePair('WBTC_USD'),
                trade_type=TradeType.BUY,
                amount=AssetAmount(FVal('10000.0')),
                rate=Price(FVal('0.00005')),
                fee=Fee(FVal('20.0')),
                fee_currency=Asset('USD'),
                link='3',
                notes='',
            ),
            Trade(
                timestamp=Timestamp(1606986000),
                location=Location.BITFINEX,
                pair=TradePair('WBTC_USD'),
                trade_type=TradeType.SELL,
                amount=AssetAmount(FVal('10000.0')),
                rate=Price(FVal('0.00005')),
                fee=Fee(FVal('20.0')),
                fee_currency=Asset('BTC'),
                link='4',
                notes='',
            ),
        ]
        assert trades == expected_trades
Esempio n. 21
0
def accounting_history_process(
    accountant,
    start_ts: Timestamp,
    end_ts: Timestamp,
    history_list: List[Dict],
    margin_list: List[MarginPosition] = None,
    loans_list: List[Dict] = None,
    asset_movements_list: List[AssetMovement] = None,
    eth_transaction_list: List[Dict] = None,
    defi_events_list: List[DefiEvent] = None,
    ledger_actions_list: List[LedgerAction] = None,
) -> Dict[str, Any]:
    trade_history: Sequence[Union[Trade, MarginPosition]]
    # For filtering the taxable actions list we start with 0 ts so that we have the
    # full history available
    trade_history = trades_from_dictlist(
        given_trades=history_list,
        start_ts=Timestamp(0),
        end_ts=end_ts,
        location='accounting_history_process for tests',
        msg_aggregator=accountant.msg_aggregator,
    )
    # if present, append margin positions to trade history
    if margin_list:
        trade_history.extend(margin_list)  # type: ignore

    asset_movements = []
    if asset_movements_list:
        asset_movements = asset_movements_list

    loan_history = []
    if loans_list:
        loan_history = process_polo_loans(
            msg_aggregator=accountant.msg_aggregator,
            data=loans_list,
            start_ts=Timestamp(0),
            end_ts=end_ts,
        )

    eth_transactions = []
    if eth_transaction_list:
        eth_transactions = eth_transaction_list

    defi_events = []
    if defi_events_list:
        defi_events = defi_events_list

    ledger_actions = []
    if ledger_actions_list:
        ledger_actions = ledger_actions_list

    result = accountant.process_history(
        start_ts=start_ts,
        end_ts=end_ts,
        trade_history=trade_history,
        loan_history=loan_history,
        asset_movements=asset_movements,
        eth_transactions=eth_transactions,
        defi_events=defi_events,
        ledger_actions=ledger_actions,
    )
    return result
Esempio n. 22
0
def test_query_online_trade_history_case_2(mock_bitfinex):
    """Test pagination logic for trades works as expected when a request
    returns a result already processed in the previous request.

    Other things tested:
      - Stop requesting when number of entries is less than limit.

    First request: 2 results
    Second request: 2 results, both trades are repeated from the 1st request.
    Third request: 2 results, first trade is repeated from the 2nd request.
    Fourth request: 1 result

    Trades with id 1 to 4 are expected to be returned.
    """
    api_limit = 2
    mock_bitfinex.first_connection = MagicMock()
    mock_bitfinex.currency_map = {
        'UST': 'USDt',
        'WBT': 'WBTC',
    }
    mock_bitfinex.pair_bfx_symbols_map = {
        'ETHUST': ('ETH', 'UST'),
        'WBTUSD': ('WBT', 'USD'),
        'ETHEUR': ('ETH', 'EUR'),
    }
    # Buy ETH with USDT
    trade_1 = """
    [
        1,
        "tETH:UST",
        1606899600000,
        10,
        0.26334268,
        187.37,
        "LIMIT",
        null,
        -1,
        -0.09868591,
        "UST"
    ]
    """
    # Sell ETH for USDT
    trade_2 = """
    [
        2,
        "tETH:UST",
        1606901400000,
        20,
        -0.26334268,
        187.37,
        "LIMIT",
        null,
        -1,
        -0.09868591,
        "ETH"
    ]
    """
    # Buy WBTC for USD
    trade_3 = """
    [
        3,
        "tWBTUSD",
        1606932000000,
        30,
        10000.00000000,
        0.00005000,
        "LIMIT",
        null,
        -1,
        -20.00000000,
        "USD"
    ]
    """
    # Sell WBTC for USD
    trade_4 = """
    [
        4,
        "tWBTUSD",
        1606986000000,
        40,
        -10000.00000000,
        0.00005000,
        "LIMIT",
        null,
        -1,
        -20.00000000,
        "WBT"
    ]
    """

    def get_paginated_response():
        results = [
            f'[{trade_1},{trade_2}]',
            f'[{trade_1},{trade_2}]',  # repeated line
            f'[{trade_2},{trade_3}]',  # contains repeated
            f'[{trade_4}]',
        ]
        for result_ in results:
            yield result_

    def mock_api_query_response(endpoint, options):  # pylint: disable=unused-argument
        return MockResponse(HTTPStatus.OK, next(get_response))

    get_response = get_paginated_response()
    api_limit_patch = patch(
        target='rotkehlchen.exchanges.bitfinex.API_TRADES_MAX_LIMIT',
        new=api_limit,
    )
    api_query_patch = patch.object(
        target=mock_bitfinex,
        attribute='_api_query',
        side_effect=mock_api_query_response,
    )
    with ExitStack() as stack:
        stack.enter_context(api_limit_patch)
        stack.enter_context(api_query_patch)
        trades = mock_bitfinex.query_online_trade_history(
            start_ts=Timestamp(0),
            end_ts=Timestamp(int(datetime.now().timestamp())),
        )
        expected_trades = [
            Trade(
                timestamp=Timestamp(1606899600),
                location=Location.BITFINEX,
                pair=TradePair('ETH_USDT'),
                trade_type=TradeType.BUY,
                amount=AssetAmount(FVal('0.26334268')),
                rate=Price(FVal('187.37')),
                fee=Fee(FVal('0.09868591')),
                fee_currency=Asset('USDT'),
                link='1',
                notes='',
            ),
            Trade(
                timestamp=Timestamp(1606901400),
                location=Location.BITFINEX,
                pair=TradePair('ETH_USDT'),
                trade_type=TradeType.SELL,
                amount=AssetAmount(FVal('0.26334268')),
                rate=Price(FVal('187.37')),
                fee=Fee(FVal('0.09868591')),
                fee_currency=Asset('ETH'),
                link='2',
                notes='',
            ),
            Trade(
                timestamp=Timestamp(1606932000),
                location=Location.BITFINEX,
                pair=TradePair('WBTC_USD'),
                trade_type=TradeType.BUY,
                amount=AssetAmount(FVal('10000.0')),
                rate=Price(FVal('0.00005')),
                fee=Fee(FVal('20.0')),
                fee_currency=Asset('USD'),
                link='3',
                notes='',
            ),
            Trade(
                timestamp=Timestamp(1606986000),
                location=Location.BITFINEX,
                pair=TradePair('WBTC_USD'),
                trade_type=TradeType.SELL,
                amount=AssetAmount(FVal('10000.0')),
                rate=Price(FVal('0.00005')),
                fee=Fee(FVal('20.0')),
                fee_currency=Asset('WBTC'),
                link='4',
                notes='',
            ),
        ]
        assert trades == expected_trades
Esempio n. 23
0
def assert_cointracking_import_results(rotki: Rotkehlchen):
    """A utility function to help assert on correctness of importing data from cointracking.info"""
    trades = rotki.data.db.get_trades()
    asset_movements = rotki.data.db.get_asset_movements()
    warnings = rotki.msg_aggregator.consume_warnings()
    errors = rotki.msg_aggregator.consume_warnings()
    assert len(errors) == 0
    assert len(warnings) == 3

    expected_trades = [
        Trade(
            timestamp=Timestamp(1566687719),
            location=Location.COINBASE,
            pair=TradePair('ETH_EUR'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('0.05772716')),
            rate=Price(FVal('190.3783245183029963712055123')),
            fee=Fee(FVal("0.02")),
            fee_currency=A_EUR,
            link='',
            notes='',
        ),
        Trade(
            timestamp=Timestamp(1567418410),
            location=Location.EXTERNAL,
            pair=TradePair('BTC_USD'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('0.00100000')),
            rate=Price(ZERO),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes='Just a small gift from someone',
        ),
        Trade(
            timestamp=Timestamp(1567504805),
            location=Location.EXTERNAL,
            pair=TradePair('ETH_USD'),
            trade_type=TradeType.BUY,
            amount=AssetAmount(FVal('2')),
            rate=Price(ZERO),
            fee=Fee(ZERO),
            fee_currency=A_USD,
            link='',
            notes='Sign up bonus',
        )
    ]
    assert expected_trades == trades

    expected_movements = [
        AssetMovement(
            location=Location.POLONIEX,
            category=AssetMovementCategory.DEPOSIT,
            timestamp=Timestamp(1565848624),
            address=None,
            transaction_id=None,
            asset=A_XMR,
            amount=AssetAmount(FVal('5')),
            fee_asset=A_USD,
            fee=Fee(ZERO),
            link='',
        ),
        AssetMovement(
            location=Location.COINBASE,
            category=AssetMovementCategory.WITHDRAWAL,
            address=None,
            transaction_id=None,
            timestamp=Timestamp(1566726155),
            asset=A_ETH,
            amount=AssetAmount(FVal('0.05770427')),
            fee_asset=A_ETH,
            fee=Fee(FVal("0.0001")),
            link='',
        )
    ]
    assert expected_movements == asset_movements
Esempio n. 24
0
def test_query_online_deposits_withdrawals_case_1(mock_bitfinex):
    """Test pagination logic for asset movements works as expected when each
    request does not return a result already processed.

    Other things tested:
      - Results are sorted by id in ascending mode.
      - Skip result when status is not 'COMPLETED'.
      - Stop requesting (break the loop) when result timestamp is greater than
      'end_ts'.
      - '_api_query' call arguments.

    First request: 2 results
    Second request: 2 results, 1 not completed.
    Third request: 1 result, out of time range (its timestamp is gt 'end_ts')

    Movements with id 1, 2 and 4 are expected to be returned.
    """
    api_limit = 2
    mock_bitfinex.first_connection = MagicMock()
    mock_bitfinex.currency_map = {'WBT': 'WBTC'}
    # Deposit WBTC
    movement_1 = """
    [
        1,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606899600000,
        1606899700000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """
    # Withdraw WBTC
    movement_2 = """
    [
        2,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606901400000,
        1606901500000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        -0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """
    # Deposit WBTC, not completed
    movement_3 = """
    [
        3,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606932000000,
        1606932100000,
        null,
        null,
        "WHATEVER",
        null,
        null,
        0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """
    # Withdraw EUR
    movement_4 = """
    [
        4,
        "EUR",
        "Euro",
        null,
        null,
        1606986000000,
        1606986100000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        -0.26300954,
        -0.00135,
        null,
        null,
        "",
        null,
        null,
        null,
        "",
        null
    ]
    """
    # Deposit WBTC, outside time range (gt 'end_ts')
    movement_5 = """
    [
        5,
        "WBT",
        "Wrapped Bitcoin",
        null,
        null,
        1606996801000,
        1606996901000,
        null,
        null,
        "COMPLETED",
        null,
        null,
        0.26300954,
        -0.00135,
        null,
        null,
        "DESTINATION_ADDRESS",
        null,
        null,
        null,
        "TRANSACTION_ID",
        null
    ]
    """
    expected_calls = [
        call(
            endpoint='movements',
            options={
                'start': 0,
                'end': 1606996800000,
                'limit': 2,
            },
        ),
        call(
            endpoint='movements',
            options={
                'start': 1606901400000,
                'end': 1606996800000,
                'limit': 2,
            },
        ),
        call(
            endpoint='movements',
            options={
                'start': 1606986000000,
                'end': 1606996800000,
                'limit': 2,
            },
        ),
    ]

    def get_paginated_response():
        results = [
            f'[{movement_2},{movement_1}]',
            f'[{movement_4},{movement_3}]',
            f'[{movement_5}]',
        ]
        for result_ in results:
            yield result_

    def mock_api_query_response(endpoint, options):  # pylint: disable=unused-argument
        return MockResponse(HTTPStatus.OK, next(get_response))

    get_response = get_paginated_response()
    api_limit_patch = patch(
        target='rotkehlchen.exchanges.bitfinex.API_MOVEMENTS_MAX_LIMIT',
        new=api_limit,
    )
    api_query_patch = patch.object(
        target=mock_bitfinex,
        attribute='_api_query',
        side_effect=mock_api_query_response,
    )
    with ExitStack() as stack:
        stack.enter_context(api_limit_patch)
        api_query_mock = stack.enter_context(api_query_patch)
        asset_movements = mock_bitfinex.query_online_deposits_withdrawals(
            start_ts=Timestamp(0),
            end_ts=Timestamp(int(datetime.now().timestamp())),
        )
        assert api_query_mock.call_args_list == expected_calls

        wbtc_fee_asset = Asset('WBTC')
        eur_fee_asset = Asset('EUR')
        expected_asset_movements = [
            AssetMovement(
                timestamp=Timestamp(1606899600),
                location=Location.BITFINEX,
                category=AssetMovementCategory.DEPOSIT,
                address='DESTINATION_ADDRESS',
                transaction_id='TRANSACTION_ID',
                asset=wbtc_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=wbtc_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(1),
            ),
            AssetMovement(
                timestamp=Timestamp(1606901400),
                location=Location.BITFINEX,
                category=AssetMovementCategory.WITHDRAWAL,
                address='DESTINATION_ADDRESS',
                transaction_id='TRANSACTION_ID',
                asset=wbtc_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=wbtc_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(2),
            ),
            AssetMovement(
                timestamp=Timestamp(1606986000),
                location=Location.BITFINEX,
                category=AssetMovementCategory.WITHDRAWAL,
                address=None,
                transaction_id=None,
                asset=eur_fee_asset,
                amount=FVal('0.26300954'),
                fee_asset=eur_fee_asset,
                fee=Fee(FVal('0.00135')),
                link=str(4),
            ),
        ]
        assert asset_movements == expected_asset_movements
Esempio n. 25
0
    API_SYSTEM_CLOCK_NOT_SYNCED_ERROR_CODE:
    'Invalid timestamp. Is your system clock synced?',
    400003: 'Invalid API key value.',
    400004: 'Invalid API passphrase.',
    400005: 'Invalid API secret.',
    400007:
    'Provided KuCoin API key needs to have "General" permission activated.',
    411100: 'Contact KuCoin support to unfreeze your account',
}
API_PAGE_SIZE_LIMIT = 500
# Rate limit is 1800 requests per minute, exceed it multiple times the system
# will restrict the IP
API_REQUEST_RETRY_TIMES = 2
API_REQUEST_RETRIES_AFTER_SECONDS = 1

API_V2_TIMESTART = Timestamp(1550448000)  # 2019-02-18T00:00:00Z
API_V2_TIMESTART_MS = API_V2_TIMESTART * 1000
KUCOIN_LAUNCH_TS = Timestamp(1504224000)  # 01/09/2017


class KucoinCase(Enum):
    API_KEY = 1
    BALANCES = 2
    TRADES = 3
    OLD_TRADES = 4
    DEPOSITS = 5
    WITHDRAWALS = 6

    def __str__(self) -> str:
        if self == KucoinCase.API_KEY:
            return 'api_key'
Esempio n. 26
0
    def __init__(
        self,
        blockchain_accounts: BlockchainAccounts,
        ethereum_manager: 'EthereumManager',
        msg_aggregator: MessagesAggregator,
        database: DBHandler,
        greenlet_manager: GreenletManager,
        premium: Optional[Premium],
        eth_modules: Optional[List[str]] = None,
    ):
        log.debug('Initializing ChainManager')
        super().__init__()
        self.ethereum = ethereum_manager
        self.database = database
        self.msg_aggregator = msg_aggregator
        self.accounts = blockchain_accounts

        self.defi_balances_last_query_ts = Timestamp(0)
        self.defi_balances: Dict[ChecksumEthAddress,
                                 List[DefiProtocolBalances]] = {}
        # Per account balances
        self.balances = BlockchainBalances(db=database)
        # Per asset total balances
        self.totals: Totals = defaultdict(Balance)
        # TODO: Perhaps turn this mapping into a typed dict?
        self.eth_modules: Dict[str, Union[EthereumModule,
                                          Literal['loading']]] = {}
        if eth_modules:
            for given_module in eth_modules:
                if given_module == 'makerdao_dsr':
                    self.eth_modules['makerdao_dsr'] = MakerDAODSR(
                        ethereum_manager=ethereum_manager,
                        database=self.database,
                        premium=premium,
                        msg_aggregator=msg_aggregator,
                    )
                elif given_module == 'makerdao_vaults':
                    self.eth_modules['makerdao_vaults'] = MakerDAOVaults(
                        ethereum_manager=ethereum_manager,
                        database=self.database,
                        premium=premium,
                        msg_aggregator=msg_aggregator,
                    )
                elif given_module == 'aave':
                    self.eth_modules['aave'] = Aave(
                        ethereum_manager=ethereum_manager,
                        database=self.database,
                        premium=premium,
                        msg_aggregator=msg_aggregator,
                    )
                elif given_module == 'compound':
                    self.eth_modules['compound'] = 'loading'
                    # Since Compound initialization needs a few network calls we do it async
                    greenlet_manager.spawn_and_track(
                        after_seconds=None,
                        task_name='Initialize Compound object',
                        method=self._initialize_compound,
                        premium=premium,
                    )
                elif given_module == 'yearn_vaults':
                    self.eth_modules['yearn_vaults'] = YearnVaults(
                        ethereum_manager=ethereum_manager,
                        database=self.database,
                        premium=premium,
                        msg_aggregator=msg_aggregator,
                    )
                else:
                    log.error(
                        f'Unrecognized module value {given_module} given. Skipping...'
                    )

        self.greenlet_manager = greenlet_manager

        for name, module in self.iterate_modules():
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=f'startup of {name}',
                method=module.on_startup,
            )

        # Since Zerion initialization needs a few ENS calls we do it asynchronously
        self.zerion: Optional[Zerion] = None
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='Initialize Zerion object',
            method=self._initialize_zerion,
        )
Esempio n. 27
0
def create_timestamp(datestr: str, formatstr: str = '%Y-%m-%d %H:%M:%S') -> Timestamp:
    """Can throw ValueError due to strptime"""
    return Timestamp(calendar.timegm(time.strptime(datestr, formatstr)))
        'timestamp': 1523399409,  # 10/04/2018
        'block_number': 5417790,
        'hash': DUMMY_HASH,
        'from_address': DUMMY_ADDRESS,
        'to_address': DUMMY_ADDRESS,
        'value': 12323,
        'gas': 5000000,
        'gas_price': 2100000000,
        'gas_used': 1900000,
    },
]

margin_history = [
    MarginPosition(  # before query period -- BTC/EUR: 422.90
        exchange='poloniex',
        open_time=Timestamp(1463184190),  # 14/05/2016
        close_time=Timestamp(1464393600),  # 28/05/2016
        profit_loss=FVal(0.05),
        pl_currency=A_BTC,
        notes='margin1',
    ), MarginPosition(  # before query period -- BTC/EUR: 542.87
        exchange='poloniex',
        open_time=Timestamp(1472428800),  # 29/08/2016
        close_time=Timestamp(1473897600),  # 15/09/2016
        profit_loss=FVal('-0.042'),
        pl_currency=A_BTC,
        notes='margin2',
    ), MarginPosition(  # BTC/EUR: 1039.935
        exchange='poloniex',
        open_time=Timestamp(1489276800),  # 12/03/2017
        close_time=Timestamp(1491177600),  # 03/04/2017
Esempio n. 29
0
    def _get_user_data(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        address: ChecksumEthAddress,
        balances: AaveBalances,
    ) -> AaveHistory:
        last_query = self.database.get_used_query_range(
            f'aave_events_{address}')
        db_events = self.database.get_aave_events(address=address)

        now = ts_now()
        last_query_ts = 0
        if last_query is not None:
            last_query_ts = last_query[1]
            from_ts = Timestamp(last_query_ts + 1)

        deposits = withdrawals = borrows = repays = liquidation_calls = []
        query = self.graph.query(
            querystr=USER_EVENTS_QUERY,
            param_types={'$address': 'ID!'},
            param_values={'address': address.lower()},
        )
        user_result = query['users'][0]
        if now - last_query_ts > AAVE_GRAPH_RECENT_SECS:
            # In theory if these were individual queries we should do them only if
            # we have not queried recently. In practise since we only do 1 query above
            # this is useless for now, but keeping the mechanism in case we change
            # the way we query the subgraph
            deposits = self._parse_deposits(user_result['depositHistory'],
                                            from_ts, to_ts)
            withdrawals = self._parse_withdrawals(
                withdrawals=user_result['redeemUnderlyingHistory'],
                from_ts=from_ts,
                to_ts=to_ts,
            )
            borrows = self._parse_borrows(user_result['borrowHistory'],
                                          from_ts, to_ts)
            repays = self._parse_repays(user_result['repayHistory'], from_ts,
                                        to_ts)
            liquidation_calls = self._parse_liquidations(
                user_result['liquidationCallHistory'],
                from_ts,
                to_ts,
            )

        result = self._process_events(
            user_address=address,
            user_result=user_result,
            from_ts=from_ts,
            to_ts=to_ts,
            deposits=deposits,
            withdrawals=withdrawals,
            borrows=borrows,
            repays=repays,
            liquidations=liquidation_calls,
            db_events=db_events,
            balances=balances,
        )

        # Add all new events to the DB
        new_events: List[
            AaveEvent] = deposits + withdrawals + result.interest_events + borrows + repays + liquidation_calls  # type: ignore  # noqa: E501
        self.database.add_aave_events(address, new_events)
        # After all events have been queried then also update the query range.
        # Even if no events are found for an address we need to remember the range
        self.database.update_used_query_range(
            name=f'aave_events_{address}',
            start_ts=Timestamp(0),
            end_ts=now,
        )

        # Sort actions so that actions with same time are sorted deposit -> interest -> withdrawal
        all_events: List[AaveEvent] = new_events + db_events
        sort_map = {
            'deposit': 0,
            'interest': 0.1,
            'withdrawal': 0.2,
            'borrow': 0.3,
            'repay': 0.4,
            'liquidation': 0.5
        }  # noqa: E501
        all_events.sort(
            key=lambda event: sort_map[event.event_type] + event.timestamp)
        return AaveHistory(
            events=all_events,
            total_earned_interest=result.total_earned_interest,
            total_lost=result.total_lost,
            total_earned_liquidations=result.total_earned_liquidations,
        )
Esempio n. 30
0
    def _get_trades_graph_v3_for_address(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AMMTrade]:
        """Get the address' trades data querying the Uniswap subgraph

        Each trade (swap) instantiates an <AMMTrade>.

        The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`.
        Translated to Uniswap lingo:

        Trade type BUY:
        - `amount1` (QUOTE, reserve1) is gt 0.
        - `amount0` (BASE, reserve0) is lt 0.

        Trade type SELL:
        - `amount0` (BASE, reserve0) is gt 0.
        - `amount1` (QUOTE, reserve1) is lt 0.

        May raise:
        - RemoteError
        """
        trades: List[AMMTrade] = []
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(V3_SWAPS_QUERY.format())

        while True:
            try:
                result = self.graph_v3.query(
                    querystr=querystr,
                    param_types=param_types,
                    param_values=param_values,
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    SUBGRAPH_REMOTE_ERROR_MSG.format(
                        error_msg=str(e),
                        location=self.location,
                    ), )
                raise

            result_data = result['swaps']
            for entry in result_data:
                swaps = []
                for swap in entry['transaction']['swaps']:
                    timestamp = swap['timestamp']
                    swap_token0 = swap['token0']
                    swap_token1 = swap['token1']

                    try:
                        token0_deserialized = deserialize_ethereum_address(
                            swap_token0['id'])
                        token1_deserialized = deserialize_ethereum_address(
                            swap_token1['id'])
                        from_address_deserialized = deserialize_ethereum_address(
                            swap['sender'])
                        to_address_deserialized = deserialize_ethereum_address(
                            swap['recipient'])
                    except DeserializationError:
                        msg = (
                            f'Failed to deserialize addresses in trade from uniswap graph with '
                            f'token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, '
                            f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}'
                        )
                        log.error(msg)
                        continue

                    token0 = get_or_create_ethereum_token(
                        userdb=self.database,
                        symbol=swap_token0['symbol'],
                        ethereum_address=token0_deserialized,
                        name=swap_token0['name'],
                        decimals=swap_token0['decimals'],
                    )
                    token1 = get_or_create_ethereum_token(
                        userdb=self.database,
                        symbol=swap_token1['symbol'],
                        ethereum_address=token1_deserialized,
                        name=swap_token1['name'],
                        decimals=int(swap_token1['decimals']),
                    )

                    try:
                        if swap['amount0'].startswith('-'):
                            amount0_in = AssetAmount(FVal(ZERO))
                            amount0_out = deserialize_asset_amount_force_positive(
                                swap['amount0'])
                            amount1_in = deserialize_asset_amount_force_positive(
                                swap['amount1'])
                            amount1_out = AssetAmount(FVal(ZERO))
                        else:
                            amount0_in = deserialize_asset_amount_force_positive(
                                swap['amount0'])
                            amount0_out = AssetAmount(FVal(ZERO))
                            amount1_in = AssetAmount(FVal(ZERO))
                            amount1_out = deserialize_asset_amount_force_positive(
                                swap['amount1'])
                    except ValueError as e:
                        log.error(
                            f'Failed to read amounts in Uniswap V3 swap {str(swap)}. '
                            f'{str(e)}.', )
                        continue

                    swaps.append(
                        AMMSwap(
                            tx_hash=swap['id'].split('#')[0],
                            log_index=int(swap['logIndex']),
                            address=address,
                            from_address=from_address_deserialized,
                            to_address=to_address_deserialized,
                            timestamp=Timestamp(int(timestamp)),
                            location=Location.UNISWAP,
                            token0=token0,
                            token1=token1,
                            amount0_in=amount0_in,
                            amount1_in=amount1_in,
                            amount0_out=amount0_out,
                            amount1_out=amount1_out,
                        ))

                # with the new logic the list of swaps can be empty, in that case don't try
                # to make trades from the swaps
                if len(swaps) == 0:
                    continue

                # Now that we got all swaps for a transaction, create the trade object
                trades.extend(self._tx_swaps_to_trades(swaps))
            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }
        return trades