Ejemplo n.º 1
0
    def query_deposits_withdrawals(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
    ) -> List[AssetMovement]:
        """Queries the local DB and the exchange for the deposits/withdrawal history of the user"""
        asset_movements = self.db.get_asset_movements(
            from_ts=start_ts,
            to_ts=end_ts,
            location=deserialize_location(self.name),
        )
        ranges = DBQueryRanges(self.db)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'{self.name}_asset_movements',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_movements = []
        for query_start_ts, query_end_ts in ranges_to_query:
            new_movements.extend(self.query_online_deposits_withdrawals(
                start_ts=query_start_ts,
                end_ts=query_end_ts,
            ))

        if new_movements != []:
            self.db.add_asset_movements(new_movements)
        ranges.update_used_query_range(
            location_string=f'{self.name}_asset_movements',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        asset_movements.extend(new_movements)

        return asset_movements
Ejemplo n.º 2
0
def test_update_used_query_range(database):
    dbranges = DBQueryRanges(database)
    location1 = 'location1'
    location2 = 'location2'

    database.update_used_query_range(location1, 15, 25)
    database.update_used_query_range(location2, 10, 125)

    dbranges.update_used_query_range(location1, 0, 10, [])
    msg = 'empty used query range should do nothing'
    assert database.get_used_query_range(location1) == (15, 25), msg

    start_ts = 12
    end_ts = 90
    query_range = dbranges.get_location_query_ranges(location1, start_ts,
                                                     end_ts)
    dbranges.update_used_query_range(
        location1,
        start_ts=start_ts,
        end_ts=end_ts,
        ranges_to_query=query_range,
    )
    assert database.get_used_query_range(location1) == (12, 90)

    start_ts = 250
    end_ts = 500
    query_range = dbranges.get_location_query_ranges(location2, start_ts,
                                                     end_ts)
    dbranges.update_used_query_range(
        location2,
        start_ts=start_ts,
        end_ts=end_ts,
        ranges_to_query=query_range,
    )
    assert database.get_used_query_range(location2) == (10, 500)
Ejemplo n.º 3
0
    def query_trade_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        only_cache: bool,
    ) -> List[Trade]:
        """Queries the local DB and the remote exchange for the trade history of the user

        Limits the query to the given time range and also if only_cache is True returns
        only what is already saved in the DB without performing an exchange query
        """
        log.debug(f'Querying trade history for {self.name} exchange')
        filter_query = TradesFilterQuery.make(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.location,
        )
        trades = self.db.get_trades(
            filter_query=filter_query,
            has_premium=
            True,  # this is okay since the returned trades don't make it to the user
        )
        if only_cache:
            return trades

        ranges = DBQueryRanges(self.db)
        location_string = f'{str(self.location)}_trades_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )

        for query_start_ts, query_end_ts in ranges_to_query:
            # If we have a time frame we have not asked the exchange for trades then
            # go ahead and do that now
            log.debug(
                f'Querying online trade history for {self.name} between '
                f'{query_start_ts} and {query_end_ts}', )
            new_trades, queried_range = self.query_online_trade_history(
                start_ts=query_start_ts,
                end_ts=query_end_ts,
            )

            # make sure to add them to the DB
            if new_trades != []:
                self.db.add_trades(new_trades)

            # and also set the used queried timestamp range for the exchange
            ranges.update_used_query_range(
                location_string=location_string,
                start_ts=queried_range[0],
                end_ts=queried_range[1],
                ranges_to_query=[queried_range],
            )
            # finally append them to the already returned DB trades
            trades.extend(new_trades)

        return trades
Ejemplo n.º 4
0
    def query_deposits_withdrawals(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        only_cache: bool,
    ) -> List[AssetMovement]:
        """Queries the local DB and the exchange for the deposits/withdrawal history of the user

        If only_cache is true only what is already cached in the DB is returned without
        an actual exchange query.
        """
        log.debug(
            f'Querying deposits/withdrawals history for {self.name} exchange')
        filter_query = AssetMovementsFilterQuery.make(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.location,
        )
        asset_movements = self.db.get_asset_movements(
            filter_query=filter_query,
            has_premium=
            True,  # this is okay since the returned trades don't make it to the user
        )
        if only_cache:
            return asset_movements

        ranges = DBQueryRanges(self.db)
        location_string = f'{str(self.location)}_asset_movements_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_movements = []
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying online deposits/withdrawals for {self.name} between '
                f'{query_start_ts} and {query_end_ts}', )
            new_movements.extend(
                self.query_online_deposits_withdrawals(
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                ))

        if new_movements != []:
            self.db.add_asset_movements(new_movements)
        ranges.update_used_query_range(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        asset_movements.extend(new_movements)

        return asset_movements
Ejemplo n.º 5
0
    def query_grant_history(
        self,
        grant_id: Optional[int],
        from_ts: Optional[Timestamp] = None,
        to_ts: Optional[Timestamp] = None,
        only_cache: bool = False,
    ) -> Dict[int, Dict[str, Any]]:
        """May raise:
        - RemotError if there is an error querying the gitcoin API
        - InputError if only_cache is False and grant_id is missing
        """
        if only_cache:
            return self.get_history_from_db(
                grant_id=grant_id,
                from_ts=from_ts,
                to_ts=to_ts,
            )

        if grant_id is None:
            raise InputError(
                'Attempted to query gitcoin events from the api without specifying a grant id',
            )

        entry_name = f'{GITCOIN_GRANTS_PREFIX}_{grant_id}'
        dbranges = DBQueryRanges(self.db)
        from_timestamp = GITCOIN_START_TS if from_ts is None else from_ts
        to_timestamp = ts_now() if to_ts is None else to_ts
        ranges = dbranges.get_location_query_ranges(
            location_string=entry_name,
            start_ts=from_timestamp,
            end_ts=to_timestamp,
        )
        grant_created_on: Optional[Timestamp] = None

        for period_range in ranges:
            actions, grant_created_on = self.query_grant_history_period(
                grant_id=grant_id,
                grant_created_on=grant_created_on,
                from_timestamp=period_range[0],
                to_timestamp=period_range[1],
            )
            self.db_ledger.add_ledger_actions(actions)

        dbranges.update_used_query_range(
            location_string=entry_name,
            start_ts=from_timestamp,
            end_ts=to_timestamp,
            ranges_to_query=ranges,
        )
        return self.get_history_from_db(
            grant_id=grant_id,
            from_ts=from_ts,
            to_ts=to_ts,
        )
Ejemplo n.º 6
0
def test_update_used_query_range(database):
    dbranges = DBQueryRanges(database)
    location1 = 'location1'
    location2 = 'location2'

    database.update_used_query_range(location1, 15, 25)
    database.update_used_query_range(location2, 10, 125)

    start_ts = 12
    end_ts = 90
    query_range = dbranges.get_location_query_ranges(location1, start_ts,
                                                     end_ts)
    dbranges.update_used_query_range(
        location1,
        queried_ranges=[(start_ts, end_ts)] + query_range,
    )
    assert database.get_used_query_range(location1) == (12, 90)

    start_ts = 250
    end_ts = 500
    query_range = dbranges.get_location_query_ranges(location2, start_ts,
                                                     end_ts)
    dbranges.update_used_query_range(
        location2,
        queried_ranges=[(start_ts, end_ts)] + query_range,
    )
    assert database.get_used_query_range(location2) == (10, 500)
Ejemplo n.º 7
0
    def query_trade_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        only_cache: bool,
    ) -> List[Trade]:
        """Queries the local DB and the remote exchange for the trade history of the user

        Limits the query to the given time range and also if only_cache is True returns
        only what is already saved in the DB without performing an exchange query
        """
        trades = self.db.get_trades(
            from_ts=start_ts,
            to_ts=end_ts,
            location=deserialize_location(self.name),
        )
        if only_cache:
            return trades

        ranges = DBQueryRanges(self.db)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'{self.name}_trades',
            start_ts=start_ts,
            end_ts=end_ts,
        )

        new_trades = []
        for query_start_ts, query_end_ts in ranges_to_query:
            # If we have a time frame we have not asked the exchange for trades then
            # go ahead and do that now
            new_trades.extend(
                self.query_online_trade_history(
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                ))

        # make sure to add them to the DB
        if new_trades != []:
            self.db.add_trades(new_trades)
        # and also set the used queried timestamp range for the exchange
        ranges.update_used_query_range(
            location_string=f'{self.name}_trades',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        # finally append them to the already returned DB trades
        trades.extend(new_trades)

        return trades
Ejemplo n.º 8
0
    def query_income_loss_expense(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        only_cache: bool,
    ) -> List[LedgerAction]:
        """Queries the local DB and the exchange for the income/loss/expense history of the user

        If only_cache is true only what is already cached in the DB is returned without
        an actual exchange query.
        """
        db = DBLedgerActions(self.db, self.db.msg_aggregator)
        filter_query = LedgerActionsFilterQuery.make(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.location,
        )
        # has_premium True is fine here since the result of this is not user facing atm
        ledger_actions = db.get_ledger_actions(filter_query=filter_query,
                                               has_premium=True)
        if only_cache:
            return ledger_actions

        ranges = DBQueryRanges(self.db)
        location_string = f'{str(self.location)}_ledger_actions_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_ledger_actions = []
        for query_start_ts, query_end_ts in ranges_to_query:
            new_ledger_actions.extend(
                self.query_online_income_loss_expense(
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                ))

        if new_ledger_actions != []:
            db.add_ledger_actions(new_ledger_actions)
        ranges.update_used_query_range(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        ledger_actions.extend(new_ledger_actions)

        return ledger_actions
Ejemplo n.º 9
0
    def query_trade_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[Trade]:
        """Queries the local DB and the remote exchange for the trade history of the user"""
        trades = self.db.get_trades(
            from_ts=start_ts,
            to_ts=end_ts,
            location=deserialize_location(self.name),
        )
        ranges = DBQueryRanges(self.db)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'{self.name}_trades',
            start_ts=start_ts,
            end_ts=end_ts,
        )

        new_trades = []
        for query_start_ts, query_end_ts in ranges_to_query:
            # If we have a time frame we have not asked the exchange for trades then
            # go ahead and do that now
            try:
                new_trades.extend(
                    self.query_online_trade_history(
                        start_ts=query_start_ts,
                        end_ts=query_end_ts,
                    ))
            except NotImplementedError:
                msg = 'query_online_trade_history should only not be implemented by bitmex'
                assert self.name == 'bitmex', msg

        # make sure to add them to the DB
        if new_trades != []:
            self.db.add_trades(new_trades)
        # and also set the used queried timestamp range for the exchange
        ranges.update_used_query_range(
            location_string=f'{self.name}_trades',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        # finally append them to the already returned DB trades
        trades.extend(new_trades)

        return trades
Ejemplo n.º 10
0
    def query_margin_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[MarginPosition]:
        """Queries the local DB and the remote exchange for the margin positions history of the user
        """
        log.debug(f'Querying margin history for {self.name} exchange')
        margin_positions = self.db.get_margin_positions(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.location,
        )
        ranges = DBQueryRanges(self.db)
        location_string = f'{str(self.location)}_margins_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_positions = []
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying online margin history for {self.name} between '
                f'{query_start_ts} and {query_end_ts}', )
            new_positions.extend(
                self.query_online_margin_history(
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                ))

        # make sure to add them to the DB
        if new_positions != []:
            self.db.add_margin_positions(new_positions)
        # and also set the last queried timestamp for the exchange
        ranges.update_used_query_range(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        # finally append them to the already returned DB margin positions
        margin_positions.extend(new_positions)

        return margin_positions
Ejemplo n.º 11
0
    def query_deposits_withdrawals(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        only_cache: bool,
    ) -> List[AssetMovement]:
        """Queries the local DB and the exchange for the deposits/withdrawal history of the user

        If only_cache is true only what is already cached in the DB is returned without
        an actual exchange query.
        """
        asset_movements = self.db.get_asset_movements(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.location,
        )
        if only_cache:
            return asset_movements

        ranges = DBQueryRanges(self.db)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'{str(self.location)}_asset_movements',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_movements = []
        for query_start_ts, query_end_ts in ranges_to_query:
            new_movements.extend(
                self.query_online_deposits_withdrawals(
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                ))

        if new_movements != []:
            self.db.add_asset_movements(new_movements)
        ranges.update_used_query_range(
            location_string=f'{str(self.location)}_asset_movements',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        asset_movements.extend(new_movements)

        return asset_movements
Ejemplo n.º 12
0
    def query_margin_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[MarginPosition]:
        """Queries the local DB and the remote exchange for the margin positions history of the user
        """
        margin_positions = self.db.get_margin_positions(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.name,
        )
        ranges = DBQueryRanges(self.db)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'{self.name}_margins',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_positions = []
        for query_start_ts, query_end_ts in ranges_to_query:
            try:
                new_positions.extend(
                    self.query_online_margin_history(
                        start_ts=query_start_ts,
                        end_ts=query_end_ts,
                    ))
            except NotImplementedError:
                pass

        # make sure to add them to the DB
        if new_positions != []:
            self.db.add_margin_positions(new_positions)
        # and also set the last queried timestamp for the exchange
        ranges.update_used_query_range(
            location_string=f'{self.name}_margins',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        # finally append them to the already returned DB margin positions
        margin_positions.extend(new_positions)

        return margin_positions
Ejemplo n.º 13
0
    def single_address_query_transactions(
            self,
            address: ChecksumEthAddress,
            start_ts: Timestamp,
            end_ts: Timestamp,
    ) -> None:
        """Only queries new transactions and adds them to the DB"""
        ranges = DBQueryRanges(self.database)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_transactions = []
        dbethtx = DBEthTx(self.database)
        for query_start_ts, query_end_ts in ranges_to_query:
            try:
                new_transactions.extend(self.ethereum.etherscan.get_transactions(
                    account=address,
                    from_ts=query_start_ts,
                    to_ts=query_end_ts,
                ))
            except RemoteError as e:
                self.ethereum.msg_aggregator.add_error(
                    f'Got error "{str(e)}" while querying ethereum transactions '
                    f'from Etherscan. Transactions not added to the DB '
                    f'from_ts: {query_start_ts} '
                    f'to_ts: {query_end_ts} ',
                )

        # add new transactions to the DB
        if new_transactions != []:
            dbethtx.add_ethereum_transactions(new_transactions)

        # and also set the last queried timestamps for the address
        ranges.update_used_query_range(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
Ejemplo n.º 14
0
def test_query_transactions_removed_address(
    rotkehlchen_api_server,
    ethereum_accounts,
):
    """Make sure that if an address is removed so are the transactions from the DB"""
    start_ts = 0
    end_ts = 1598453214
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    db = rotki.data.db
    transactions = [
        EthereumTransaction(
            tx_hash=b'1',
            timestamp=0,
            block_number=0,
            from_address=ethereum_accounts[0],
            to_address=make_ethereum_address(),
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=0,
        ),
        EthereumTransaction(
            tx_hash=b'2',
            timestamp=0,
            block_number=0,
            from_address=ethereum_accounts[0],
            to_address=make_ethereum_address(),
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=1,
        ),
        EthereumTransaction(  # should remain after deletining account[0]
            tx_hash=b'3',
            timestamp=0,
            block_number=0,
            from_address=make_ethereum_address(),
            to_address=ethereum_accounts[1],
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=55,
        ),
        EthereumTransaction(  # should remain after deletining account[0]
            tx_hash=b'4',
            timestamp=0,
            block_number=0,
            from_address=ethereum_accounts[1],
            to_address=ethereum_accounts[0],
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=0,
        ),
        EthereumTransaction(  # should remain after deletining account[0]
            tx_hash=b'5',
            timestamp=0,
            block_number=0,
            from_address=ethereum_accounts[0],
            to_address=ethereum_accounts[1],
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=0,
        )
    ]
    dbethtx = DBEthTx(db)
    dbethtx.add_ethereum_transactions(transactions)
    # Also make sure to update query ranges so as not to query etherscan at all
    for address in ethereum_accounts:
        DBQueryRanges(db).update_used_query_range(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=[],
        )

    # Now remove the first account (do the mocking to not query etherscan for balances)
    setup = setup_balances(
        rotki,
        ethereum_accounts=ethereum_accounts,
        btc_accounts=[],
        eth_balances=['10000', '10000'],
    )
    with ExitStack() as stack:
        setup.enter_ethereum_patches(stack)
        response = requests.delete(api_url_for(
            rotkehlchen_api_server,
            "blockchainsaccountsresource",
            blockchain='ETH',
        ),
                                   json={'accounts': [ethereum_accounts[0]]})
    assert_proper_response_with_result(response)

    # Check that only the 3 remanining transactions from the other account is returned
    response = requests.get(
        api_url_for(
            rotkehlchen_api_server,
            'ethereumtransactionsresource',
        ), )
    result = assert_proper_response_with_result(response)
    assert len(result['entries']) == 3
    assert result['entries_found'] == 3
Ejemplo n.º 15
0
def test_query_transactions_from_to_address(
    rotkehlchen_api_server,
    ethereum_accounts,
):
    """Make sure that if a transaction is just being sent to an address it's also returned."""
    start_ts = 0
    end_ts = 1598453214
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    db = rotki.data.db
    transactions = [
        EthereumTransaction(
            tx_hash=b'1',
            timestamp=0,
            block_number=0,
            from_address=ethereum_accounts[0],
            to_address=make_ethereum_address(),
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=0,
        ),
        EthereumTransaction(
            tx_hash=b'2',
            timestamp=0,
            block_number=0,
            from_address=ethereum_accounts[0],
            to_address=ethereum_accounts[1],
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=1,
        ),
        EthereumTransaction(
            tx_hash=b'3',
            timestamp=0,
            block_number=0,
            from_address=make_ethereum_address(),
            to_address=ethereum_accounts[0],
            value=1,
            gas=1,
            gas_price=1,
            gas_used=1,
            input_data=b'',
            nonce=55,
        )
    ]
    dbethtx = DBEthTx(db)
    dbethtx.add_ethereum_transactions(transactions)
    # Also make sure to update query ranges so as not to query etherscan at all
    for address in ethereum_accounts:
        DBQueryRanges(db).update_used_query_range(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=[],
        )

    expected_entries = {ethereum_accounts[0]: 3, ethereum_accounts[1]: 1}
    # Check that we get all transactions correctly even if we query two times
    for _ in range(2):
        for address in ethereum_accounts:
            response = requests.get(
                api_url_for(
                    rotkehlchen_api_server,
                    'ethereumtransactionsresource',
                ),
                json={
                    'from_timestamp': start_ts,
                    'to_timestamp': end_ts,
                    'address': address
                },
            )
            result = assert_proper_response_with_result(response)
            assert len(result['entries']) == expected_entries[address]
            assert result['entries_limit'] == FREE_ETH_TX_LIMIT
            assert result['entries_found'] == expected_entries[address]
            assert result['entries_total'] == 3
Ejemplo n.º 16
0
def test_query_transactions_over_limit(
    rotkehlchen_api_server,
    ethereum_accounts,
    start_with_valid_premium,
):
    start_ts = 0
    end_ts = 1598453214
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    db = rotki.data.db
    all_transactions_num = FREE_ETH_TX_LIMIT + 50
    transactions = [
        EthereumTransaction(
            tx_hash=x.to_bytes(2, byteorder='little'),
            timestamp=x,
            block_number=x,
            from_address=ethereum_accounts[0],
            to_address=make_ethereum_address(),
            value=x,
            gas=x,
            gas_price=x,
            gas_used=x,
            input_data=x.to_bytes(2, byteorder='little'),
            nonce=x,
        ) for x in range(FREE_ETH_TX_LIMIT - 10)
    ]
    transactions.extend([
        EthereumTransaction(
            tx_hash=(x + 500).to_bytes(2, byteorder='little'),
            timestamp=x,
            block_number=x,
            from_address=ethereum_accounts[1],
            to_address=make_ethereum_address(),
            value=x,
            gas=x,
            gas_price=x,
            gas_used=x,
            input_data=x.to_bytes(2, byteorder='little'),
            nonce=x,
        ) for x in range(60)
    ])

    dbethtx = DBEthTx(db)
    dbethtx.add_ethereum_transactions(transactions)
    # Also make sure to update query ranges so as not to query etherscan at all
    for address in ethereum_accounts:
        DBQueryRanges(db).update_used_query_range(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=[],
        )

    free_expected_entries_total = [FREE_ETH_TX_LIMIT - 10, 10]
    free_expected_entries_found = [FREE_ETH_TX_LIMIT - 10, 60]
    premium_expected_entries = [FREE_ETH_TX_LIMIT - 10, 60]

    # Check that we get all transactions correctly even if we query two times
    for _ in range(2):
        response = requests.post(
            api_url_for(
                rotkehlchen_api_server,
                'limitscounterresetresource',
                location='ethereum_transactions',
            ), )
        assert_simple_ok_response(response)
        for idx, address in enumerate(ethereum_accounts):
            response = requests.get(
                api_url_for(
                    rotkehlchen_api_server,
                    'ethereumtransactionsresource',
                ),
                json={
                    'from_timestamp': start_ts,
                    'to_timestamp': end_ts,
                    'address': address
                },
            )
            result = assert_proper_response_with_result(response)
            if start_with_valid_premium:
                assert len(result['entries']) == premium_expected_entries[idx]
                assert result['entries_total'] == all_transactions_num
                assert result['entries_found'] == premium_expected_entries[idx]
                assert result['entries_limit'] == -1
            else:
                assert len(
                    result['entries']) == free_expected_entries_total[idx]
                assert result['entries_total'] == all_transactions_num
                assert result['entries_found'] == free_expected_entries_found[
                    idx]
                assert result['entries_limit'] == FREE_ETH_TX_LIMIT
Ejemplo n.º 17
0
def test_query_asset_movements_over_limit(
    rotkehlchen_api_server_with_exchanges,
    start_with_valid_premium,
):
    """Test that using the asset movements query endpoint works fine"""
    start_ts = 0
    end_ts = 1598453214
    server = rotkehlchen_api_server_with_exchanges
    rotki = server.rest_api.rotkehlchen
    # Make sure online kraken is not queried by setting query ranges
    DBQueryRanges(rotki.data.db).update_used_query_range(
        location_string='kraken_asset_movements',
        start_ts=start_ts,
        end_ts=end_ts,
        ranges_to_query=[],
    )
    polo_entries_num = 4
    # Set a ton of kraken asset movements in the DB
    kraken_entries_num = FREE_ASSET_MOVEMENTS_LIMIT + 50
    movements = [
        AssetMovement(location=Location.KRAKEN,
                      category=AssetMovementCategory.DEPOSIT,
                      address=None,
                      transaction_id=None,
                      timestamp=x,
                      asset=A_BTC,
                      amount=FVal(x * 100),
                      fee_asset=A_BTC,
                      fee=FVal(x),
                      link='') for x in range(kraken_entries_num)
    ]
    rotki.data.db.add_asset_movements(movements)
    all_movements_num = kraken_entries_num + polo_entries_num
    setup = prepare_rotki_for_history_processing_test(
        server.rest_api.rotkehlchen)

    # Check that querying movements with/without limits works even if we query two times
    for _ in range(2):
        # query asset movements of polo which has less movements than the limit
        with setup.polo_patch:
            response = requests.get(
                api_url_for(
                    server,
                    "assetmovementsresource",
                ),
                json={'location': 'poloniex'},
            )
        result = assert_proper_response_with_result(response)
        assert result['entries_found'] == all_movements_num
        assert result[
            'entries_limit'] == -1 if start_with_valid_premium else FREE_ASSET_MOVEMENTS_LIMIT  # noqa: E501
        assert_poloniex_asset_movements(
            [x['entry'] for x in result['entries']], deserialized=True)

        # now query kraken which has a ton of DB entries
        response = requests.get(
            api_url_for(server, "assetmovementsresource"),
            json={'location': 'kraken'},
        )
        result = assert_proper_response_with_result(response)

        if start_with_valid_premium:
            assert len(result['entries']) == kraken_entries_num
            assert result['entries_limit'] == -1
            assert result['entries_found'] == all_movements_num
        else:
            assert len(result['entries']
                       ) == FREE_ASSET_MOVEMENTS_LIMIT - polo_entries_num
            assert result['entries_limit'] == FREE_ASSET_MOVEMENTS_LIMIT
            assert result['entries_found'] == all_movements_num
Ejemplo n.º 18
0
    def single_address_query_transactions(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
        with_limit: bool,
        only_cache: bool,
    ) -> List[EthereumTransaction]:
        self.tx_per_address[address] = 0
        transactions = self.database.get_ethereum_transactions(
            from_ts=start_ts,
            to_ts=end_ts,
            address=address,
        )
        if only_cache:
            return self._return_transactions_maybe_limit(
                address=address,
                transactions=transactions,
                with_limit=with_limit,
            )

        ranges = DBQueryRanges(self.database)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_transactions = []
        for query_start_ts, query_end_ts in ranges_to_query:
            for internal in (False, True):
                try:
                    new_transactions.extend(
                        self.etherscan.get_transactions(
                            account=address,
                            internal=internal,
                            from_ts=query_start_ts,
                            to_ts=query_end_ts,
                        ))
                except RemoteError as e:
                    self.msg_aggregator.add_error(
                        f'Got error "{str(e)}" while querying ethereum transactions '
                        f'from Etherscan. Transactions not added to the DB '
                        f'from_ts: {query_start_ts} '
                        f'to_ts: {query_end_ts} '
                        f'internal: {internal}', )

        # add new transactions to the DB
        if new_transactions != []:
            self.database.add_ethereum_transactions(new_transactions,
                                                    from_etherscan=True)
            # And since at least for now the increasingly negative nonce for the internal
            # transactions happens only in the DB writing, requery the entire batch from
            # the DB to get the updated transactions
            transactions = self.database.get_ethereum_transactions(
                from_ts=start_ts,
                to_ts=end_ts,
                address=address,
            )

        # and also set the last queried timestamps for the address
        ranges.update_used_query_range(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )

        return self._return_transactions_maybe_limit(
            address=address,
            transactions=transactions,
            with_limit=with_limit,
        )
Ejemplo n.º 19
0
    def query_kraken_ledgers(self, start_ts: Timestamp,
                             end_ts: Timestamp) -> bool:
        """
        Query Kraken's ledger to retrieve events and transform them to our internal representation
        of history events. Internally we look for the query range that needs to be queried in the
        range (start_ts, end_ts) to avoid double quering the kraken API when this method is called
        for deposits/withdrawals and trades. The events queried are then stored in the database.

        Returns true if any query to the kraken API was not successful
        """
        ranges = DBQueryRanges(self.db)
        range_query_name = f'{self.location}_history_events_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=range_query_name,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        with_errors = False
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying kraken ledger entries from {query_start_ts} to {query_end_ts}'
            )
            try:
                response, with_errors = self.query_until_finished(
                    endpoint='Ledgers',
                    keyname='ledger',
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                    extra_dict={},
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    f'Failed to query kraken ledger between {query_start_ts} and '
                    f'{query_end_ts}. {str(e)}', )
                return True

            # Group related events
            raw_events_groupped = defaultdict(list)
            for raw_event in response:
                raw_events_groupped[raw_event['refid']].append(raw_event)

            new_events = []
            for events in raw_events_groupped.values():
                try:
                    events = sorted(
                        events,
                        key=lambda x: deserialize_fval(x[
                            'time'], 'time', 'kraken ledgers') * 1000,
                    )
                except DeserializationError as e:
                    self.msg_aggregator.add_error(
                        f'Failed to read timestamp in kraken event group '
                        f'due to {str(e)}. For more information read the logs. Skipping event',
                    )
                    log.error(f'Failed to read timestamp for {events}')
                    continue
                group_events, found_unknown_event = history_event_from_kraken(
                    events=events,
                    name=self.name,
                    msg_aggregator=self.msg_aggregator,
                )
                if found_unknown_event:
                    for event in group_events:
                        event.event_type = HistoryEventType.INFORMATIONAL
                new_events.extend(group_events)

            if len(new_events) != 0:
                try:
                    self.history_events_db.add_history_events(new_events)
                except InputError as e:
                    self.msg_aggregator.add_error(
                        f'Failed to save kraken events from {query_start_ts} to {query_end_ts} '
                        f'in database. {str(e)}', )

                ranges.update_used_query_range(
                    location_string=range_query_name,
                    queried_ranges=[(start_ts, end_ts)] + ranges_to_query,
                )

            if with_errors is True:
                return True  # we had errors so stop any further queries and quit

        return False  # no errors
Ejemplo n.º 20
0
    def _get_internal_transactions_for_ranges(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> None:
        """Queries etherscan for all internal transactions of address in the given ranges.

        If any internal transactions are found, they are added in the DB
        """
        location_string = f'{RANGE_PREFIX_ETHINTERNALTX}_{address}'
        ranges = DBQueryRanges(self.database)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        dbethtx = DBEthTx(self.database)
        new_internal_txs = []
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying Internal Transactions for {address} -> {query_start_ts} - {query_end_ts}'
            )  # noqa: E501
            try:
                for new_internal_txs in self.ethereum.etherscan.get_transactions(
                        account=address,
                        from_ts=query_start_ts,
                        to_ts=query_end_ts,
                        action='txlistinternal',
                ):
                    if len(new_internal_txs) != 0:
                        for internal_tx in new_internal_txs:
                            # make sure all internal transaction parent transactions are in the DB
                            gevent.sleep(0)
                            result = dbethtx.get_ethereum_transactions(
                                ETHTransactionsFilterQuery.make(
                                    tx_hash=internal_tx.parent_tx_hash
                                ),  # noqa: E501
                                has_premium=True,  # ignore limiting here
                            )
                            if len(
                                    result
                            ) == 0:  # parent transaction is not in the DB. Get it
                                transaction = self.ethereum.get_transaction_by_hash(
                                    internal_tx.parent_tx_hash)  # noqa: E501
                                gevent.sleep(0)
                                dbethtx.add_ethereum_transactions(
                                    ethereum_transactions=[transaction],
                                    relevant_address=address,
                                )
                                timestamp = transaction.timestamp
                            else:
                                timestamp = result[0].timestamp

                            dbethtx.add_ethereum_internal_transactions(
                                transactions=[internal_tx],
                                relevant_address=address,
                            )
                            log.debug(
                                f'Internal Transactions for {address} -> update range {query_start_ts} - {timestamp}'
                            )  # noqa: E501
                            ranges.update_used_query_range(  # update last queried time for address
                                location_string=location_string,
                                queried_ranges=[(query_start_ts, timestamp)],
                            )
                            self.msg_aggregator.add_message(
                                message_type=WSMessageType.
                                ETHEREUM_TRANSACTION_STATUS,
                                data={
                                    'address':
                                    address,
                                    'period': [query_start_ts, timestamp],
                                    'status':
                                    str(TransactionStatusStep.
                                        QUERYING_INTERNAL_TRANSACTIONS
                                        ),  # noqa: E501
                                },
                            )

            except RemoteError as e:
                self.ethereum.msg_aggregator.add_error(
                    f'Got error "{str(e)}" while querying internal ethereum transactions '
                    f'from Etherscan. Transactions not added to the DB '
                    f'address: {address} '
                    f'from_ts: {query_start_ts} '
                    f'to_ts: {query_end_ts} ', )
                return

        log.debug(
            f'Internal Transactions for address {address} done. Update range {start_ts} - {end_ts}'
        )  # noqa: E501
        ranges.update_used_query_range(  # entire range is now considered queried
            location_string=location_string,
            queried_ranges=[(start_ts, end_ts)],
        )
Ejemplo n.º 21
0
    def _get_trades(
        self,
        addresses: List[ChecksumEthAddress],
        from_timestamp: Timestamp,
        to_timestamp: Timestamp,
        only_cache: bool,
    ) -> AddressTrades:
        """Request via graph all trades for new addresses and the latest ones
        for already existing addresses. Then the requested trade are written in
        DB and finally all DB trades are read and returned.
        """
        address_amm_trades: AddressTrades = {}
        new_addresses: List[ChecksumEthAddress] = []
        existing_addresses: List[ChecksumEthAddress] = []
        min_end_ts: Timestamp = to_timestamp

        if only_cache:
            return self._fetch_trades_from_db(addresses, from_timestamp,
                                              to_timestamp)

        dbranges = DBQueryRanges(self.database)
        # Get addresses' last used query range for this AMM's trades
        for address in addresses:
            entry_name = f'{self.trades_prefix}_{address}'
            trades_range = self.database.get_used_query_range(name=entry_name)

            if not trades_range:
                new_addresses.append(address)
            else:
                existing_addresses.append(address)
                min_end_ts = min(min_end_ts, trades_range[1])

        # Request new addresses' trades
        if new_addresses:
            start_ts = Timestamp(0)
            new_address_trades = self._get_trades_graph(
                addresses=new_addresses,
                start_ts=start_ts,
                end_ts=to_timestamp,
            )
            address_amm_trades.update(new_address_trades)

            # Insert last used query range for new addresses
            for address in new_addresses:
                entry_name = f'{self.trades_prefix}_{address}'
                dbranges.update_used_query_range(
                    location_string=entry_name,
                    queried_ranges=[(start_ts, to_timestamp)],
                )

        # Request existing DB addresses' trades
        if existing_addresses and to_timestamp > min_end_ts:
            address_new_trades = self._get_trades_graph(
                addresses=existing_addresses,
                start_ts=min_end_ts,
                end_ts=to_timestamp,
            )
            address_amm_trades.update(address_new_trades)

            # Update last used query range for existing addresses
            for address in existing_addresses:
                entry_name = f'{self.trades_prefix}_{address}'
                dbranges.update_used_query_range(
                    location_string=entry_name,
                    queried_ranges=[(min_end_ts, to_timestamp)],
                )

        # Insert all unique swaps to the DB
        all_swaps = set()
        for address in filter(lambda x: x in address_amm_trades, addresses):
            for trade in address_amm_trades[address]:
                for swap in trade.swaps:
                    all_swaps.add(swap)

        self.database.add_amm_swaps(list(all_swaps))
        return self._fetch_trades_from_db(addresses, from_timestamp,
                                          to_timestamp)
Ejemplo n.º 22
0
def test_get_location_query_ranges(database):
    dbranges = DBQueryRanges(database)
    location1 = 'location1'
    location2 = 'location2'

    database.update_used_query_range(location1, 15, 25)
    database.update_used_query_range(location2, 10, 125)

    result = dbranges.get_location_query_ranges(location1, 0, 2)
    assert result == [(0, 14)]
    result = dbranges.get_location_query_ranges(location1, 8, 17)
    assert result == [(8, 14)]
    result = dbranges.get_location_query_ranges(location1, 19, 23)
    assert result == []
    result = dbranges.get_location_query_ranges(location1, 22, 57)
    assert result == [(26, 57)]
    result = dbranges.get_location_query_ranges(location1, 26, 125)
    assert result == [(26, 125)]

    result = dbranges.get_location_query_ranges(location2, 3, 9)
    assert result == [(3, 9)]
    result = dbranges.get_location_query_ranges(location2, 9, 17)
    assert result == [(9, 9)]
    result = dbranges.get_location_query_ranges(location2, 19, 23)
    assert result == []
    result = dbranges.get_location_query_ranges(location2, 120, 250)
    assert result == [(126, 250)]
    result = dbranges.get_location_query_ranges(location2, 126, 170)
    assert result == [(126, 170)]
Ejemplo n.º 23
0
    def _single_address_query_transactions(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
        with_limit: bool,
    ) -> List[EthereumTransaction]:
        self.tx_per_address[address] = 0
        transactions = self.database.get_ethereum_transactions(
            from_ts=start_ts,
            to_ts=end_ts,
            address=address,
        )
        ranges = DBQueryRanges(self.database)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_transactions = []
        for query_start_ts, query_end_ts in ranges_to_query:
            for internal in (False, True):
                try:
                    new_transactions.extend(
                        self.etherscan.get_transactions(
                            account=address,
                            internal=internal,
                            from_ts=query_start_ts,
                            to_ts=query_end_ts,
                        ))
                except RemoteError as e:
                    self.msg_aggregator.add_error(
                        f'Got error "{str(e)}" while querying ethereum transactions '
                        f'from Etherscan. Transactions not added to the DB '
                        f'from_ts: {query_start_ts} '
                        f'to_ts: {query_end_ts} '
                        f'internal: {internal}', )

        # add new transactions to the DB
        if new_transactions != []:
            self.database.add_ethereum_transactions(new_transactions,
                                                    from_etherscan=True)
            # And since at least for now the increasingly negative nonce for the internal
            # transactions happens only in the DB writing, requery the entire batch from
            # the DB to get the updated transactions
            transactions = self.database.get_ethereum_transactions(
                from_ts=start_ts,
                to_ts=end_ts,
                address=address,
            )

        # and also set the last queried timestamps for the address
        ranges.update_used_query_range(
            location_string=f'ethtxs_{address}',
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )

        if with_limit:
            transactions_queried_so_far = sum(
                x for _, x in self.tx_per_address.items())
            remaining_num_tx = FREE_ETH_TX_LIMIT - transactions_queried_so_far
            returning_tx_length = min(remaining_num_tx, len(transactions))
            # Note down how many we got for this address
            self.tx_per_address[address] = returning_tx_length
            return transactions[:returning_tx_length]

        return transactions
Ejemplo n.º 24
0
    def _get_transactions_for_range(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> None:
        """Queries etherscan for all ethereum transactions of address in the given ranges.

        If any transactions are found, they are added in the DB
        """
        location_string = f'{RANGE_PREFIX_ETHTX}_{address}'
        ranges = DBQueryRanges(self.database)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        dbethtx = DBEthTx(self.database)
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying Transactions for {address} -> {query_start_ts} - {query_end_ts}'
            )
            try:
                for new_transactions in self.ethereum.etherscan.get_transactions(
                        account=address,
                        from_ts=query_start_ts,
                        to_ts=query_end_ts,
                        action='txlist',
                ):
                    # add new transactions to the DB
                    if len(new_transactions) != 0:
                        dbethtx.add_ethereum_transactions(
                            ethereum_transactions=new_transactions,
                            relevant_address=address,
                        )
                        ranges.update_used_query_range(  # update last queried time for the address
                            location_string=location_string,
                            queried_ranges=[(query_start_ts,
                                             new_transactions[-1].timestamp)],
                        )
                        self.msg_aggregator.add_message(
                            message_type=WSMessageType.
                            ETHEREUM_TRANSACTION_STATUS,
                            data={
                                'address':
                                address,
                                'period': [
                                    query_start_ts,
                                    new_transactions[-1].timestamp
                                ],
                                'status':
                                str(TransactionStatusStep.QUERYING_TRANSACTIONS
                                    ),
                            },
                        )

            except RemoteError as e:
                self.ethereum.msg_aggregator.add_error(
                    f'Got error "{str(e)}" while querying ethereum transactions '
                    f'from Etherscan. Some transactions not added to the DB '
                    f'address: {address} '
                    f'from_ts: {query_start_ts} '
                    f'to_ts: {query_end_ts} ', )
                return

        log.debug(
            f'Transactions done for {address}. Update range {start_ts} - {end_ts}'
        )
        ranges.update_used_query_range(  # entire range is now considered queried
            location_string=location_string,
            queried_ranges=[(start_ts, end_ts)],
        )
Ejemplo n.º 25
0
    def _get_erc20_transfers_for_ranges(
        self,
        address: ChecksumEthAddress,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> None:
        """Queries etherscan for all erc20 transfers of address in the given ranges.

        If any transfers are found, they are added in the DB
        """
        location_string = f'{RANGE_PREFIX_ETHTOKENTX}_{address}'
        dbethtx = DBEthTx(self.database)
        ranges = DBQueryRanges(self.database)
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying ERC20 Transfers for {address} -> {query_start_ts} - {query_end_ts}'
            )  # noqa: E501
            try:
                for erc20_tx_hashes in self.ethereum.etherscan.get_token_transaction_hashes(
                        account=address,
                        from_ts=query_start_ts,
                        to_ts=query_end_ts,
                ):
                    for tx_hash in erc20_tx_hashes:
                        tx_hash_bytes = deserialize_evm_tx_hash(tx_hash)
                        result = dbethtx.get_ethereum_transactions(
                            ETHTransactionsFilterQuery.make(
                                tx_hash=tx_hash_bytes),
                            has_premium=True,  # ignore limiting here
                        )
                        if len(result
                               ) == 0:  # if transaction is not there add it
                            gevent.sleep(0)
                            transaction = self.ethereum.get_transaction_by_hash(
                                tx_hash_bytes)
                            dbethtx.add_ethereum_transactions(
                                [transaction],
                                relevant_address=address,
                            )
                            timestamp = transaction.timestamp
                        else:
                            timestamp = result[0].timestamp

                        log.debug(
                            f'ERC20 Transfers for {address} -> update range {query_start_ts} - {timestamp}'
                        )  # noqa: E501
                        ranges.update_used_query_range(  # update last queried time for the address
                            location_string=location_string,
                            queried_ranges=[(query_start_ts, timestamp)],
                        )
                        self.msg_aggregator.add_message(
                            message_type=WSMessageType.
                            ETHEREUM_TRANSACTION_STATUS,
                            data={
                                'address':
                                address,
                                'period': [query_start_ts, timestamp],
                                'status':
                                str(TransactionStatusStep.
                                    QUERYING_ETHEREUM_TOKENS_TRANSACTIONS
                                    ),  # noqa: E501
                            },
                        )
            except RemoteError as e:
                self.ethereum.msg_aggregator.add_error(
                    f'Got error "{str(e)}" while querying token transactions'
                    f'from Etherscan. Transactions not added to the DB '
                    f'address: {address} '
                    f'from_ts: {query_start_ts} '
                    f'to_ts: {query_end_ts} ', )

        log.debug(
            f'ERC20 Transfers done for address {address}. Update range {start_ts} - {end_ts}'
        )  # noqa: E501
        ranges.update_used_query_range(  # entire range is now considered queried
            location_string=location_string,
            queried_ranges=[(start_ts, end_ts)],
        )