Exemplo n.º 1
0
    def _get_user_reserves(
            self, address: ChecksumEthAddress) -> List[AaveUserReserve]:
        query = self.graph.query(
            querystr=USER_RESERVES_QUERY.format(address=address.lower()), )
        query_v2 = self.graph_v2.query(
            querystr=USER_RESERVES_QUERY.format(address=address.lower()), )
        result = []
        for entry in query['userReserves'] + query_v2['userReserves']:
            reserve = entry['reserve']
            try:
                result.append(
                    AaveUserReserve(
                        # The ID of reserve is the address of the asset and the address of the market's LendingPoolAddressProvider, in lower case  # noqa: E501
                        address=deserialize_ethereum_address(
                            reserve['id'][:42]),
                        symbol=reserve['symbol'],
                    ))
            except DeserializationError:
                log.error(
                    f'Failed to deserialize reserve address {reserve["id"]} '
                    f'Skipping reserve address {reserve["id"]} for user address {address}',
                )
                continue

        return result
Exemplo n.º 2
0
    def get_reverse_ens(
        self,
        addresses: List[ChecksumEthAddress],
    ) -> Dict[ChecksumEthAddress, Union[EnsMapping, Timestamp]]:
        """Returns a mapping of addresses to ens mappings if found in the DB

        - If the address has a name mapping in the DB it is returned as part of the dict
        - If the address maps to None in the DB then address maps to last update in return dict
        - If address is not found in the DB it's not in the result
        """
        cursor = self.db.conn.cursor()
        data = cursor.execute(
            f'SELECT ens_name, address, last_update FROM ens_mappings WHERE address IN (? {", ?"*(len(addresses) - 1)})',  # noqa: E501
            addresses,
        )
        result = {}
        for ens_name, raw_address, last_update in data:
            address = ChecksumEthAddress(raw_address)
            if ens_name is None:
                result[address] = last_update
            else:
                result[address] = EnsMapping(
                    address=address,
                    name=ens_name,
                    last_update=last_update,
                )

        return result
Exemplo n.º 3
0
def hex_or_bytes_to_address(value: Union[bytes, str]) -> ChecksumEthAddress:
    """Turns a 32bit bytes/HexBytes or a hexstring into an address

    May raise:
    - DeserializationError if it can't convert a value to an int or if an unexpected
    type is given.
    """
    try:
        hexstr = hex_or_bytes_to_str(value)
    except ConversionError as e:
        raise DeserializationError(
            f'Could not turn {value!r} to an ethereum address') from e
    try:
        return ChecksumEthAddress(to_checksum_address('0x' + hexstr[24:]))
    except ValueError as e:
        raise DeserializationError(
            f'Invalid ethereum address: {hexstr[24:]}', ) from e
Exemplo n.º 4
0
def get_common_params(
    from_ts: Timestamp,
    to_ts: Timestamp,
    address: ChecksumEthAddress,
    address_type: Literal['Bytes!', 'String!'] = 'Bytes!',
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
    param_types = {
        '$start_ts': 'Int!',
        '$end_ts': 'Int!',
        '$address': address_type,
    }
    param_values = {
        'start_ts': from_ts,
        'end_ts': to_ts,
        'address': address.lower(),
    }
    return param_types, param_values
Exemplo n.º 5
0
    def _read_subgraph_trades(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AMMTrade]:
        """Get the address' trades data querying the AMM subgraph

        Each trade (swap) instantiates an <AMMTrade>.

        The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`.
        Translated to AMM lingo:

        Trade type BUY:
        - `asset1In` (QUOTE, reserve1) is gt 0.
        - `asset0Out` (BASE, reserve0) is gt 0.

        Trade type SELL:
        - `asset0In` (BASE, reserve0) is gt 0.
        - `asset1Out` (QUOTE, reserve1) is gt 0.

        May raise
        - RemoteError
        """
        trades: List[AMMTrade] = []
        query_id = '0'
        query_offset = 0
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
            '$id': 'ID!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
            'id': query_id,
        }
        querystr = format_query_indentation(self.swaps_query.format())

        while True:
            try:
                result = self.graph.query(
                    querystr=querystr,
                    param_types=param_types,
                    param_values=param_values,
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e),
                                                     location=self.location), )
                raise

            for entry in result['swaps']:
                swaps = []
                try:
                    for swap in entry['transaction']['swaps']:
                        timestamp = swap['timestamp']
                        swap_token0 = swap['pair']['token0']
                        swap_token1 = swap['pair']['token1']

                        try:
                            token0_deserialized = deserialize_ethereum_address(
                                swap_token0['id'])
                            token1_deserialized = deserialize_ethereum_address(
                                swap_token1['id'])
                            from_address_deserialized = deserialize_ethereum_address(
                                swap['sender'])  # noqa
                            to_address_deserialized = deserialize_ethereum_address(
                                swap['to'])
                        except DeserializationError:
                            msg = (
                                f'Failed to deserialize addresses in trade from {self.location} graph'  # noqa
                                f' with token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, '  # noqa
                                f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}'
                            )
                            log.error(msg)
                            continue

                        token0 = get_or_create_ethereum_token(
                            userdb=self.database,
                            symbol=swap_token0['symbol'],
                            ethereum_address=token0_deserialized,
                            name=swap_token0['name'],
                            decimals=swap_token0['decimals'],
                        )
                        token1 = get_or_create_ethereum_token(
                            userdb=self.database,
                            symbol=swap_token1['symbol'],
                            ethereum_address=token1_deserialized,
                            name=swap_token1['name'],
                            decimals=int(swap_token1['decimals']),
                        )

                        try:
                            amount0_in = FVal(swap['amount0In'])
                            amount1_in = FVal(swap['amount1In'])
                            amount0_out = FVal(swap['amount0Out'])
                            amount1_out = FVal(swap['amount1Out'])
                        except ValueError as e:
                            log.error(
                                f'Failed to read amounts in {self.location} swap {str(swap)}. '
                                f'{str(e)}.', )
                            continue

                        swaps.append(
                            AMMSwap(
                                tx_hash=swap['id'].split('-')[0],
                                log_index=int(swap['logIndex']),
                                address=address,
                                from_address=from_address_deserialized,
                                to_address=to_address_deserialized,
                                timestamp=Timestamp(int(timestamp)),
                                location=self.location,
                                token0=token0,
                                token1=token1,
                                amount0_in=AssetAmount(amount0_in),
                                amount1_in=AssetAmount(amount1_in),
                                amount0_out=AssetAmount(amount0_out),
                                amount1_out=AssetAmount(amount1_out),
                            ))
                    query_id = entry['id']
                except KeyError as e:
                    log.error(
                        f'Failed to read trade in {self.location} swap {str(entry)}. '
                        f'{str(e)}.', )
                    continue

                # with the new logic the list of swaps can be empty, in that case don't try
                # to make trades from the swaps
                if len(swaps) == 0:
                    continue

                # Now that we got all swaps for a transaction, create the trade object
                trades.extend(self._tx_swaps_to_trades(swaps))

            # Check whether an extra request is needed
            if len(result['swaps']) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            if query_offset == GRAPH_QUERY_SKIP_LIMIT:
                query_offset = 0
                new_query_id = query_id
            else:
                query_offset += GRAPH_QUERY_LIMIT
                new_query_id = '0'
            param_values = {
                **param_values,
                'id': new_query_id,
                'offset': query_offset,
            }

        return trades
Exemplo n.º 6
0
    def _get_events_graph(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
        event_type: EventType,
    ) -> List[LiquidityPoolEvent]:
        """Get the address' events (mints & burns) querying the AMM's subgraph
        Each event data is stored in a <LiquidityPoolEvent>.
        """
        address_events: List[LiquidityPoolEvent] = []
        if event_type == self.mint_event:
            query = MINTS_QUERY
            query_schema = 'mints'
        elif event_type == self.burn_event:
            query = BURNS_QUERY
            query_schema = 'burns'
        else:
            log.error(
                f'Unexpected {self.location} event_type: {event_type}. Skipping events query.',
            )
            return address_events

        query_id = '0'
        query_offset = 0
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
            '$id': 'ID!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': query_offset,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
            'id': query_id,
        }
        querystr = format_query_indentation(query.format())

        while True:
            try:
                result = self.graph.query(
                    querystr=querystr,
                    param_types=param_types,
                    param_values=param_values,
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e),
                                                     location=self.location), )
                raise
            except AttributeError as e:
                raise ModuleInitializationFailure(
                    f'{self.location} subgraph remote error') from e

            result_data = result[query_schema]

            for event in result_data:
                token0_ = event['pair']['token0']
                token1_ = event['pair']['token1']

                try:
                    token0_deserialized = deserialize_ethereum_address(
                        token0_['id'])
                    token1_deserialized = deserialize_ethereum_address(
                        token1_['id'])
                    pool_deserialized = deserialize_ethereum_address(
                        event['pair']['id'])
                except DeserializationError as e:
                    msg = (
                        f'Failed to deserialize address involved in liquidity pool event for'
                        f' {self.location}. Token 0: {token0_["id"]}, token 1: {token0_["id"]},'
                        f' pair: {event["pair"]["id"]}.')
                    log.error(msg)
                    raise RemoteError(msg) from e

                token0 = get_or_create_ethereum_token(
                    userdb=self.database,
                    symbol=token0_['symbol'],
                    ethereum_address=token0_deserialized,
                    name=token0_['name'],
                    decimals=token0_['decimals'],
                )
                token1 = get_or_create_ethereum_token(
                    userdb=self.database,
                    symbol=token1_['symbol'],
                    ethereum_address=token1_deserialized,
                    name=token1_['name'],
                    decimals=int(token1_['decimals']),
                )
                lp_event = LiquidityPoolEvent(
                    tx_hash=event['transaction']['id'],
                    log_index=int(event['logIndex']),
                    address=address,
                    timestamp=Timestamp(int(event['timestamp'])),
                    event_type=event_type,
                    pool_address=pool_deserialized,
                    token0=token0,
                    token1=token1,
                    amount0=AssetAmount(FVal(event['amount0'])),
                    amount1=AssetAmount(FVal(event['amount1'])),
                    usd_price=Price(FVal(event['amountUSD'])),
                    lp_amount=AssetAmount(FVal(event['liquidity'])),
                )
                address_events.append(lp_event)
                query_id = event['id']

            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            if query_offset == GRAPH_QUERY_SKIP_LIMIT:
                query_offset = 0
                new_query_id = query_id
            else:
                query_offset += GRAPH_QUERY_LIMIT
                new_query_id = '0'
            param_values = {
                **param_values,
                'id': new_query_id,
                'offset': query_offset,
            }

        return address_events
Exemplo n.º 7
0
def test_associated_locations(database):
    """Test that locations imported in different places are correctly stored in database"""
    # Add trades from different locations
    trades = [Trade(
        timestamp=Timestamp(1595833195),
        location=Location.CRYPTOCOM,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('1.0')),
        rate=Price(FVal('281.14')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1587825824),
        location=Location.CRYPTOCOM,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('50.0')),
        rate=Price(FVal('3.521')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1596014214),
        location=Location.BLOCKFI,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('50.0')),
        rate=Price(FVal('3.521')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1565888464),
        location=Location.NEXO,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('50.0')),
        rate=Price(FVal('3.521')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1596014214),
        location=Location.NEXO,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('50.0')),
        rate=Price(FVal('3.521')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1612051199),
        location=Location.BLOCKFI,
        base_asset=symbol_to_asset_or_token('USDC'),
        quote_asset=symbol_to_asset_or_token('LTC'),
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('6404.6')),
        rate=Price(FVal('151.6283999982779809352223797')),
        fee=None,
        fee_currency=None,
        link='',
        notes='One Time',
    ), Trade(
        timestamp=Timestamp(1595833195),
        location=Location.POLONIEX,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('1.0')),
        rate=Price(FVal('281.14')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1596429934),
        location=Location.COINBASE,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('0.00061475')),
        rate=Price(FVal('309.0687271248474989833265555')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    ), Trade(
        timestamp=Timestamp(1596429934),
        location=Location.EXTERNAL,
        base_asset=A_ETH,
        quote_asset=A_EUR,
        trade_type=TradeType.BUY,
        amount=AssetAmount(FVal('1')),
        rate=Price(FVal('320')),
        fee=Fee(ZERO),
        fee_currency=A_USD,
        link='',
        notes='',
    )]

    # Add multiple entries for same exchange + connected exchange
    database.add_trades(trades)
    kraken_api_key1 = ApiKey('kraken_api_key')
    kraken_api_secret1 = ApiSecret(b'kraken_api_secret')
    kraken_api_key2 = ApiKey('kraken_api_key2')
    kraken_api_secret2 = ApiSecret(b'kraken_api_secret2')
    binance_api_key = ApiKey('binance_api_key')
    binance_api_secret = ApiSecret(b'binance_api_secret')

    # add mock kraken and binance
    database.add_exchange('kraken1', Location.KRAKEN, kraken_api_key1, kraken_api_secret1)
    database.add_exchange('kraken2', Location.KRAKEN, kraken_api_key2, kraken_api_secret2)
    database.add_exchange('binance', Location.BINANCE, binance_api_key, binance_api_secret)

    # Add uniswap and sushiswap events
    database.add_amm_events([
        LiquidityPoolEvent(
            tx_hash='0x47ea26957ce09e84a51b51dfdab6a4ac1c3672a372eef77b15ef7677174ac847',
            log_index=23,
            address=ChecksumEthAddress('0x3163Bb273E8D9960Ce003fD542bF26b4C529f515'),
            timestamp=Timestamp(1590011534),
            event_type=EventType.MINT_SUSHISWAP,
            pool_address=ChecksumEthAddress('0xa2107FA5B38d9bbd2C461D6EDf11B11A50F6b974'),
            token0=EthereumToken('0x514910771AF9Ca656af840dff83E8264EcF986CA'),
            token1=EthereumToken('0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'),
            amount0=FVal('3.313676003468974932'),
            amount1=FVal('0.064189269269768657'),
            usd_price=FVal('26.94433946158740371839009166230438'),
            lp_amount=FVal('0.460858304063739927'),
        ),
    ])
    database.add_amm_swaps([
        AMMSwap(
            tx_hash='0xa54bf4c68d435e3c8f432fd7e62b7f8aca497a831a3d3fca305a954484ddd7b2',
            log_index=208,
            address=ChecksumEthAddress('0xa2107FA5B38d9bbd2C461D6EDf11B11A50F6b974'),
            from_address=string_to_ethereum_address('0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F'),
            to_address=string_to_ethereum_address('0xC9cB53B48A2f3A9e75982685644c1870F1405CCb'),
            timestamp=Timestamp(1609301469),
            location=Location.UNISWAP,
            token0=EthereumToken('0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'),
            token1=EthereumToken('0xdAC17F958D2ee523a2206206994597C13D831ec7'),
            amount0_in=AssetAmount(FVal('2.6455727132446468')),
            amount1_in=AssetAmount(ZERO),
            amount0_out=AssetAmount(ZERO),
            amount1_out=AssetAmount(FVal('1936.810111')),
        ),
    ])
    database.add_balancer_events([
        BalancerEvent(
            tx_hash='0xa54bf4c68d435e3c8f432fd7e62b7f8aca497a831a3d3fca305a954484ddd7b3',
            log_index=23,
            address=ChecksumEthAddress('0xa2107FA5B38d9bbd2C461D6EDf11B11A50F6b974'),
            timestamp=Timestamp(1609301469),
            event_type=BalancerBPTEventType.MINT,
            pool_address_token=EthereumToken('0x514910771AF9Ca656af840dff83E8264EcF986CA'),
            lp_balance=Balance(amount=FVal(2), usd_value=FVal(3)),
            amounts=[
                AssetAmount(FVal(1)),
                AssetAmount(FVal(2)),
            ],
        ),
    ])
    expected_locations = {
        Location.KRAKEN,
        Location.BINANCE,
        Location.BLOCKFI,
        Location.NEXO,
        Location.CRYPTOCOM,
        Location.POLONIEX,
        Location.COINBASE,
        Location.EXTERNAL,
        Location.SUSHISWAP,
        Location.UNISWAP,
        Location.BALANCER,
    }

    assert set(database.get_associated_locations()) == expected_locations
Exemplo n.º 8
0
def address_to_bytes32(address: ChecksumEthAddress) -> str:
    return '0x' + 24 * '0' + address.lower()[2:]
Exemplo n.º 9
0
    def _get_user_data(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        address: ChecksumEthAddress,
        balances: AaveBalances,
    ) -> AaveHistory:
        last_query = self.database.get_used_query_range(
            f'aave_events_{address}')
        db_events = self.database.get_aave_events(address=address)

        now = ts_now()
        last_query_ts = 0
        if last_query is not None:
            last_query_ts = last_query[1]
            from_ts = Timestamp(last_query_ts + 1)

        deposits: List[AaveDepositWithdrawalEvent] = []
        withdrawals: List[AaveDepositWithdrawalEvent] = []
        borrows: List[AaveBorrowEvent] = []
        repays: List[AaveRepayEvent] = []
        liquidation_calls: List[AaveLiquidationEvent] = []
        query = self.graph.query(
            querystr=USER_EVENTS_QUERY,
            param_types={'$address': 'ID!'},
            param_values={'address': address.lower()},
        )
        query_v2 = self.graph_v2.query(
            querystr=USER_EVENTS_QUERY_V2,
            param_types={'$address': 'ID!'},
            param_values={'address': address.lower()},
        )

        user_merged_data: Dict[str, Any] = defaultdict(list)
        if now - last_query_ts > AAVE_GRAPH_RECENT_SECS:
            # In theory if these were individual queries we should do them only if
            # we have not queried recently. In practise since we only do 1 query above
            # this is useless for now, but keeping the mechanism in case we change
            # the way we query the subgraph
            self._process_graph_query_result(
                query=query,
                deposits=deposits,
                withdrawals=withdrawals,
                borrows=borrows,
                repays=repays,
                liquidation_calls=liquidation_calls,
                user_merged_data=user_merged_data,
                from_ts=from_ts,
                to_ts=to_ts,
            )
            self._process_graph_query_result(
                query=query_v2,
                deposits=deposits,
                withdrawals=withdrawals,
                borrows=borrows,
                repays=repays,
                liquidation_calls=liquidation_calls,
                user_merged_data=user_merged_data,
                from_ts=from_ts,
                to_ts=to_ts,
            )

        result = self._process_events(
            user_address=address,
            user_result=user_merged_data,
            from_ts=from_ts,
            to_ts=to_ts,
            deposits=deposits,
            withdrawals=withdrawals,
            borrows=borrows,
            repays=repays,
            liquidations=liquidation_calls,
            db_events=db_events,
            balances=balances,
        )

        # Add all new events to the DB
        new_events: List[
            AaveEvent] = deposits + withdrawals + result.interest_events + borrows + repays + liquidation_calls  # type: ignore  # noqa: E501
        self.database.add_aave_events(address, new_events)
        # After all events have been queried then also update the query range.
        # Even if no events are found for an address we need to remember the range
        self.database.update_used_query_range(
            name=f'aave_events_{address}',
            start_ts=Timestamp(0),
            end_ts=now,
        )

        # Sort actions so that actions with same time are sorted deposit -> interest -> withdrawal
        all_events: List[AaveEvent] = new_events + db_events
        sort_map = {
            'deposit': 0,
            'interest': 0.1,
            'withdrawal': 0.2,
            'borrow': 0.3,
            'repay': 0.4,
            'liquidation': 0.5
        }  # noqa: E501
        all_events.sort(
            key=lambda event: sort_map[event.event_type] + event.timestamp)
        return AaveHistory(
            events=all_events,
            total_earned_interest=result.total_earned_interest,
            total_lost=result.total_lost,
            total_earned_liquidations=result.total_earned_liquidations,
        )
Exemplo n.º 10
0
    def _get_trades_graph_v3_for_address(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AMMTrade]:
        """Get the address' trades data querying the Uniswap subgraph

        Each trade (swap) instantiates an <AMMTrade>.

        The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`.
        Translated to Uniswap lingo:

        Trade type BUY:
        - `amount1` (QUOTE, reserve1) is gt 0.
        - `amount0` (BASE, reserve0) is lt 0.

        Trade type SELL:
        - `amount0` (BASE, reserve0) is gt 0.
        - `amount1` (QUOTE, reserve1) is lt 0.

        May raise:
        - RemoteError
        """
        trades: List[AMMTrade] = []
        param_types = {
            '$limit': 'Int!',
            '$offset': 'Int!',
            '$address': 'Bytes!',
            '$start_ts': 'BigInt!',
            '$end_ts': 'BigInt!',
        }
        param_values = {
            'limit': GRAPH_QUERY_LIMIT,
            'offset': 0,
            'address': address.lower(),
            'start_ts': str(start_ts),
            'end_ts': str(end_ts),
        }
        querystr = format_query_indentation(V3_SWAPS_QUERY.format())

        while True:
            try:
                result = self.graph_v3.query(
                    querystr=querystr,
                    param_types=param_types,
                    param_values=param_values,
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    SUBGRAPH_REMOTE_ERROR_MSG.format(
                        error_msg=str(e),
                        location=self.location,
                    ), )
                raise

            result_data = result['swaps']
            for entry in result_data:
                swaps = []
                for swap in entry['transaction']['swaps']:
                    timestamp = swap['timestamp']
                    swap_token0 = swap['token0']
                    swap_token1 = swap['token1']

                    try:
                        token0_deserialized = deserialize_ethereum_address(
                            swap_token0['id'])
                        token1_deserialized = deserialize_ethereum_address(
                            swap_token1['id'])
                        from_address_deserialized = deserialize_ethereum_address(
                            swap['sender'])
                        to_address_deserialized = deserialize_ethereum_address(
                            swap['recipient'])
                    except DeserializationError:
                        msg = (
                            f'Failed to deserialize addresses in trade from uniswap graph with '
                            f'token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, '
                            f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}'
                        )
                        log.error(msg)
                        continue

                    token0 = get_or_create_ethereum_token(
                        userdb=self.database,
                        symbol=swap_token0['symbol'],
                        ethereum_address=token0_deserialized,
                        name=swap_token0['name'],
                        decimals=swap_token0['decimals'],
                    )
                    token1 = get_or_create_ethereum_token(
                        userdb=self.database,
                        symbol=swap_token1['symbol'],
                        ethereum_address=token1_deserialized,
                        name=swap_token1['name'],
                        decimals=int(swap_token1['decimals']),
                    )

                    try:
                        if swap['amount0'].startswith('-'):
                            amount0_in = AssetAmount(FVal(ZERO))
                            amount0_out = deserialize_asset_amount_force_positive(
                                swap['amount0'])
                            amount1_in = deserialize_asset_amount_force_positive(
                                swap['amount1'])
                            amount1_out = AssetAmount(FVal(ZERO))
                        else:
                            amount0_in = deserialize_asset_amount_force_positive(
                                swap['amount0'])
                            amount0_out = AssetAmount(FVal(ZERO))
                            amount1_in = AssetAmount(FVal(ZERO))
                            amount1_out = deserialize_asset_amount_force_positive(
                                swap['amount1'])
                    except ValueError as e:
                        log.error(
                            f'Failed to read amounts in Uniswap V3 swap {str(swap)}. '
                            f'{str(e)}.', )
                        continue

                    swaps.append(
                        AMMSwap(
                            tx_hash=swap['id'].split('#')[0],
                            log_index=int(swap['logIndex']),
                            address=address,
                            from_address=from_address_deserialized,
                            to_address=to_address_deserialized,
                            timestamp=Timestamp(int(timestamp)),
                            location=Location.UNISWAP,
                            token0=token0,
                            token1=token1,
                            amount0_in=amount0_in,
                            amount1_in=amount1_in,
                            amount0_out=amount0_out,
                            amount1_out=amount1_out,
                        ))

                # with the new logic the list of swaps can be empty, in that case don't try
                # to make trades from the swaps
                if len(swaps) == 0:
                    continue

                # Now that we got all swaps for a transaction, create the trade object
                trades.extend(self._tx_swaps_to_trades(swaps))
            # Check whether an extra request is needed
            if len(result_data) < GRAPH_QUERY_LIMIT:
                break

            # Update pagination step
            param_values = {
                **param_values,
                'offset':
                param_values['offset'] + GRAPH_QUERY_LIMIT,  # type: ignore
            }
        return trades
Exemplo n.º 11
0
def test_update_conflicts(rotkehlchen_api_server, globaldb):
    """Test that conflicts in an asset update are handled properly"""
    async_query = random.choice([False, True])
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    update_1 = """INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("121-ada-FADS-as", "F","A name","SYMBOL",NULL, NULL,"", "", "121-ada-FADS-as");INSERT INTO common_asset_details(asset_id, forked) VALUES("121-ada-FADS-as", "BTC");
*
INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x6B175474E89094C44Da98b954EedeAC495271d0F", 8, "maker");INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F", "C", "New Multi Collateral DAI", "NDAI", 1573672677, NULL, "dai", NULL, "0x6B175474E89094C44Da98b954EedeAC495271d0F");
*
INSERT INTO assets(identifier,type,name,symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("DASH", "B","Dash","DASH",1337, NULL, "dash-coingecko", NULL, "DASH");INSERT INTO common_asset_details(asset_id, forked) VALUES("DASH", "BTC");
*
INSERT INTO ethereum_tokens(address, decimals, protocol) VALUES("0x1B175474E89094C44Da98b954EedeAC495271d0F", 18, NULL); INSERT INTO assets(identifier,type, name, symbol,started, swapped_for, coingecko, cryptocompare, details_reference) VALUES("_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F", "C", "Conflicting token", "CTK", 1573672677, NULL, "ctk", NULL, "0x1B175474E89094C44Da98b954EedeAC495271d0F");
*
    """  # noqa: E501
    globaldb.add_asset(  # add a conflicting token
        asset_id='_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F',
        asset_type=AssetType.ETHEREUM_TOKEN,
        data=EthereumToken.initialize(
            address=ChecksumEthAddress('0x1B175474E89094C44Da98b954EedeAC495271d0F'),
            decimals=12,
            name='Conflicting token',
            symbol='CTK',
            started=None,
            swapped_for=None,
            coingecko='ctk',
            cryptocompare=None,
            protocol=None,
            underlying_tokens=None,
        ),
    )
    globaldb.add_user_owned_assets([Asset('_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F')])
    update_patch = mock_asset_updates(
        original_requests_get=requests.get,
        latest=999999991,
        updates={"999999991": {
            "changes": 3,
            "min_schema_version": GLOBAL_DB_VERSION,
            "max_schema_version": GLOBAL_DB_VERSION,
        }},
        sql_actions={"999999991": update_1},
    )
    globaldb.add_setting_value(ASSETS_VERSION_KEY, 999999990)
    start_assets_num = len(globaldb.get_all_asset_data(mapping=False))
    with update_patch:
        response = requests.get(
            api_url_for(
                rotkehlchen_api_server,
                'assetupdatesresource',
            ),
            json={'async_query': async_query},
        )
        if async_query:
            task_id = assert_ok_async_response(response)
            outcome = wait_for_async_task(
                rotkehlchen_api_server,
                task_id,
            )
            result = outcome['result']
            assert outcome['message'] == ''
        else:
            result = assert_proper_response_with_result(response)
        assert result['local'] == 999999990
        assert result['remote'] == 999999991
        assert result['new_changes'] == 3

        response = requests.post(
            api_url_for(
                rotkehlchen_api_server,
                'assetupdatesresource',
            ),
            json={'async_query': async_query},
        )
        if async_query:
            task_id = assert_ok_async_response(response)
            outcome = wait_for_async_task(
                rotkehlchen_api_server,
                task_id,
            )
            assert outcome['message'] == 'Found conflicts during assets upgrade'
            result = outcome['result']
        else:
            result = assert_proper_response_with_result(
                response,
                message='Found conflicts during assets upgrade',
                status_code=HTTPStatus.CONFLICT,
            )

        # Make sure that nothing was committed
        assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999990
        assert len(globaldb.get_all_asset_data(mapping=False)) == start_assets_num
        with pytest.raises(UnknownAsset):
            Asset('121-ada-FADS-as')
        errors = rotki.msg_aggregator.consume_errors()
        warnings = rotki.msg_aggregator.consume_warnings()
        assert len(errors) == 0, f'Found errors: {errors}'
        assert len(warnings) == 0, f'Found warnings: {warnings}'
        # See that we get 3 conflicts
        expected_result = [{
            'identifier': '_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F',
            'local': {
                'name': 'Multi Collateral Dai',
                'symbol': 'DAI',
                'asset_type': 'ethereum token',
                'started': 1573672677,
                'forked': None,
                'swapped_for': None,
                'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F',
                'decimals': 18,
                'cryptocompare': None,
                'coingecko': 'dai',
                'protocol': None,
            },
            'remote': {
                'name': 'New Multi Collateral DAI',
                'symbol': 'NDAI',
                'asset_type': 'ethereum token',
                'started': 1573672677,
                'forked': None,
                'swapped_for': None,
                'ethereum_address': '0x6B175474E89094C44Da98b954EedeAC495271d0F',
                'decimals': 8,
                'cryptocompare': None,
                'coingecko': 'dai',
                'protocol': 'maker',
            },
        }, {
            'identifier': 'DASH',
            'local': {
                'name': 'Dash',
                'symbol': 'DASH',
                'asset_type': 'own chain',
                'started': 1390095618,
                'forked': None,
                'swapped_for': None,
                'ethereum_address': None,
                'decimals': None,
                'cryptocompare': None,
                'coingecko': 'dash',
                'protocol': None,
            },
            'remote': {
                'name': 'Dash',
                'symbol': 'DASH',
                'asset_type': 'own chain',
                'started': 1337,
                'forked': 'BTC',
                'swapped_for': None,
                'ethereum_address': None,
                'decimals': None,
                'cryptocompare': None,
                'coingecko': 'dash-coingecko',
                'protocol': None,
            },
        }, {
            'identifier': '_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F',
            'local': {
                'asset_type': 'ethereum token',
                'coingecko': 'ctk',
                'cryptocompare': None,
                'decimals': 12,
                'ethereum_address': '0x1B175474E89094C44Da98b954EedeAC495271d0F',
                'forked': None,
                'name': 'Conflicting token',
                'protocol': None,
                'started': None,
                'swapped_for': None,
                'symbol': 'CTK',
            },
            'remote': {
                'asset_type': 'ethereum token',
                'coingecko': 'ctk',
                'cryptocompare': None,
                'decimals': 18,
                'ethereum_address': '0x1b175474E89094C44DA98B954EeDEAC495271d0f',
                'forked': None,
                'name': 'Conflicting token',
                'protocol': None,
                'started': 1573672677,
                'swapped_for': None,
                'symbol': 'CTK',
            },
        }]
        assert result == expected_result

        # now try the update again but specify the conflicts resolution
        conflicts = {'_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F': 'remote', 'DASH': 'local', '_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F': 'remote'}  # noqa: E501
        response = requests.post(
            api_url_for(
                rotkehlchen_api_server,
                'assetupdatesresource',
            ),
            json={'async_query': async_query, 'conflicts': conflicts},
        )
        if async_query:
            task_id = assert_ok_async_response(response)
            outcome = wait_for_async_task(
                rotkehlchen_api_server,
                task_id,
            )
            assert outcome['message'] == ''
            result = outcome['result']
        else:
            result = assert_proper_response_with_result(
                response,
                message='',
                status_code=HTTPStatus.OK,
            )

        cursor = globaldb.conn.cursor()
        # check conflicts were solved as per the given choices and new asset also added
        assert result is True
        assert globaldb.get_setting_value(ASSETS_VERSION_KEY, None) == 999999991
        errors = rotki.msg_aggregator.consume_errors()
        warnings = rotki.msg_aggregator.consume_warnings()
        assert len(errors) == 0, f'Found errors: {errors}'
        assert len(warnings) == 0, f'Found warnings: {warnings}'
        dai = EthereumToken('0x6B175474E89094C44Da98b954EedeAC495271d0F')
        assert dai.identifier == strethaddress_to_identifier('0x6B175474E89094C44Da98b954EedeAC495271d0F')  # noqa: E501
        assert dai.name == 'New Multi Collateral DAI'
        assert dai.symbol == 'NDAI'
        assert dai.asset_type == AssetType.ETHEREUM_TOKEN
        assert dai.started == 1573672677
        assert dai.forked is None
        assert dai.swapped_for is None
        assert dai.coingecko == 'dai'
        assert dai.cryptocompare is None
        assert dai.ethereum_address == '0x6B175474E89094C44Da98b954EedeAC495271d0F'
        assert dai.decimals == 8
        assert dai.protocol == 'maker'
        # make sure data is in both tables
        assert cursor.execute('SELECT COUNT(*) from ethereum_tokens WHERE address="0x6B175474E89094C44Da98b954EedeAC495271d0F";').fetchone()[0] == 1  # noqa: E501
        assert cursor.execute('SELECT COUNT(*) from assets WHERE identifier="_ceth_0x6B175474E89094C44Da98b954EedeAC495271d0F";').fetchone()[0] == 1  # noqa: E501

        dash = Asset('DASH')
        assert dash.identifier == 'DASH'
        assert dash.name == 'Dash'
        assert dash.symbol == 'DASH'
        assert dash.asset_type == AssetType.OWN_CHAIN
        assert dash.started == 1390095618
        assert dash.forked is None
        assert dash.swapped_for is None
        assert dash.coingecko == 'dash'
        assert dash.cryptocompare is None
        assert cursor.execute('SELECT COUNT(*) from common_asset_details WHERE asset_id="DASH";').fetchone()[0] == 1  # noqa: E501
        assert cursor.execute('SELECT COUNT(*) from assets WHERE identifier="DASH";').fetchone()[0] == 1  # noqa: E501

        new_asset = Asset('121-ada-FADS-as')
        assert new_asset.identifier == '121-ada-FADS-as'
        assert new_asset.name == 'A name'
        assert new_asset.symbol == 'SYMBOL'
        assert new_asset.asset_type == AssetType.COUNTERPARTY_TOKEN
        assert new_asset.started is None
        assert new_asset.forked == 'BTC'
        assert new_asset.swapped_for is None
        assert new_asset.coingecko == ''
        assert new_asset.cryptocompare == ''
        assert cursor.execute('SELECT COUNT(*) from common_asset_details WHERE asset_id="121-ada-FADS-as";').fetchone()[0] == 1  # noqa: E501
        assert cursor.execute('SELECT COUNT(*) from assets WHERE identifier="121-ada-FADS-as";').fetchone()[0] == 1  # noqa: E501

        ctk = EthereumToken('0x1B175474E89094C44Da98b954EedeAC495271d0F')
        assert ctk.name == 'Conflicting token'
        assert ctk.symbol == 'CTK'
        assert ctk.asset_type == AssetType.ETHEREUM_TOKEN
        assert ctk.started == 1573672677
        assert ctk.forked is None
        assert ctk.swapped_for is None
        assert ctk.coingecko == 'ctk'
        assert ctk.cryptocompare is None
        assert ctk.ethereum_address == '0x1B175474E89094C44Da98b954EedeAC495271d0F'
        assert ctk.decimals == 18
        assert ctk.protocol is None
        assert cursor.execute('SELECT COUNT(*) from ethereum_tokens WHERE address="0x1B175474E89094C44Da98b954EedeAC495271d0F";').fetchone()[0] == 1  # noqa: E501
        assert cursor.execute('SELECT COUNT(*) from assets WHERE identifier="_ceth_0x1B175474E89094C44Da98b954EedeAC495271d0F";').fetchone()[0] == 1  # noqa: E501