コード例 #1
0
ファイル: decoder.py プロジェクト: LefterisJP/rotkehlchen
 def __init__(
     self,
     database: 'DBHandler',
     ethereum_manager: 'EthereumManager',
     eth_transactions: 'EthTransactions',
     msg_aggregator: MessagesAggregator,
 ):
     self.database = database
     self.all_counterparties: Set[str] = set()
     self.ethereum_manager = ethereum_manager
     self.eth_transactions = eth_transactions
     self.msg_aggregator = msg_aggregator
     self.dbethtx = DBEthTx(self.database)
     self.dbevents = DBHistoryEvents(self.database)
     self.base = BaseDecoderTools(database=database)
     self.event_rules = [  # rules to try for all tx receipt logs decoding
         self._maybe_decode_erc20_approve,
         self._maybe_decode_erc20_721_transfer,
         self._maybe_enrich_transfers,
         self._maybe_decode_governance,
     ]
     self.token_enricher_rules: List[Callable] = [
     ]  # enrichers to run for token transfers
     self.initialize_all_decoders()
     self.undecoded_tx_query_lock = Semaphore()
コード例 #2
0
    def query_history_events(
        self,
        filter_query: HistoryEventFilterQuery,
        only_cache: bool,
    ) -> Tuple[List[HistoryBaseEntry], int]:
        if only_cache is False:
            exchanges_list = self.exchange_manager.connected_exchanges.get(
                Location.KRAKEN, [])
            kraken_names = []
            for kraken_instance in exchanges_list:
                with_errors = kraken_instance.query_kraken_ledgers(  # type: ignore
                    start_ts=filter_query.from_ts,
                    end_ts=filter_query.to_ts,
                )
                if with_errors:
                    kraken_names.append(kraken_instance.name)
            if len(kraken_names) != 0:
                self.msg_aggregator.add_error(
                    f'Failed to query some events from Kraken exchanges '
                    f'{",".join(kraken_names)}', )

        db = DBHistoryEvents(self.db)
        has_premium = self.chain_manager.premium is not None
        events, filter_total_found = db.get_history_events_and_limit_info(
            filter_query=filter_query,
            has_premium=has_premium,
        )
        return events, filter_total_found
コード例 #3
0
ファイル: ethtx.py プロジェクト: LefterisJP/rotkehlchen
    def delete_transactions(self, address: ChecksumEthAddress) -> None:
        """Delete all transactions related data to the given address from the DB

        So transactions, receipts, logs and decoded events
        """
        cursor = self.db.conn.cursor()
        dbevents = DBHistoryEvents(self.db)
        cursor.executemany(
            'DELETE FROM used_query_ranges WHERE name=?;',
            [
                (f'{RANGE_PREFIX_ETHTX}_{address}',),
                (f'{RANGE_PREFIX_ETHINTERNALTX}_{address}',),
                (f'{RANGE_PREFIX_ETHTOKENTX}_{address}',),
            ],
        )
        # Get all tx_hashes that are touched by this address and no other address
        result = cursor.execute(
            'SELECT tx_hash from ethtx_address_mappings WHERE address=? AND tx_hash NOT IN ( '
            'SELECT tx_hash from ethtx_address_mappings WHERE address!=?'
            ')',
            (address, address),
        )
        tx_hashes = [make_evm_tx_hash(x[0]) for x in result]
        dbevents.delete_events_by_tx_hash(tx_hashes)
        # Now delete all relevant transactions. By deleting all relevant transactions all tables
        # are cleared thanks to cascading (except for history_events which was cleared above)
        cursor.executemany(
            'DELETE FROM ethereum_transactions WHERE tx_hash=? AND ? NOT IN (SELECT event_identifier FROM history_events)',  # noqa: E501
            [(x, x.hex()) for x in tx_hashes],
        )
        # Delete all remaining evm_tx_mappings so decoding can happen again for customized events
        cursor.executemany(
            'DELETE FROM evm_tx_mappings WHERE tx_hash=? AND blockchain=? AND value=?',
            [(x, 'ETH', HISTORY_MAPPING_DECODED) for x in tx_hashes],
        )
コード例 #4
0
ファイル: manager.py プロジェクト: LefterisJP/rotkehlchen
 def get_base_entries_missing_prices(
     self,
     query_filter: HistoryEventFilterQuery,
 ) -> List[Tuple[str, FVal, Asset, Timestamp]]:
     """
     Searches base entries missing usd prices that have not previously been checked in
     this session.
     """
     # Use a deepcopy to avoid mutations in the filter query if it is used later
     db = DBHistoryEvents(self.database)
     new_query_filter = copy.deepcopy(query_filter)
     new_query_filter.filters.append(
         DBStringFilter(and_op=True, column='usd_value', value='0'), )
     new_query_filter.filters.append(
         DBIgnoreValuesFilter(
             and_op=True,
             column='identifier',
             values=list(self.base_entries_ignore_set),
         ), )
     return db.rows_missing_prices_in_base_entries(
         filter_query=new_query_filter)
コード例 #5
0
 def __init__(
     self,
     name: str,
     api_key: ApiKey,
     secret: ApiSecret,
     database: 'DBHandler',
     msg_aggregator: MessagesAggregator,
     kraken_account_type: Optional[KrakenAccountType] = None,
 ):
     super().__init__(
         name=name,
         location=Location.KRAKEN,
         api_key=api_key,
         secret=secret,
         database=database,
     )
     self.msg_aggregator = msg_aggregator
     self.session.headers.update({'API-Key': self.api_key})
     self.set_account_type(kraken_account_type)
     self.call_counter = 0
     self.last_query_ts = 0
     self.history_events_db = DBHistoryEvents(self.db)
コード例 #6
0
    def get_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        has_premium: bool,
    ) -> Tuple[str, List['AccountingEventMixin']]:
        """
        Creates all events history from start_ts to end_ts. Returns it
        sorted by ascending timestamp.
        """
        self._reset_variables()
        step = 0
        total_steps = len(
            self.exchange_manager.connected_exchanges
        ) + NUM_HISTORY_QUERY_STEPS_EXCL_EXCHANGES  # noqa: E501
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        # start creating the all trades history list
        history: List['AccountingEventMixin'] = []
        empty_or_error = ''

        def populate_history_cb(
            trades_history: List[Trade],
            margin_history: List[MarginPosition],
            result_asset_movements: List[AssetMovement],
            exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query

            We don't include ledger actions here since we simply gather all of them at the end
            """
            history.extend(trades_history)
            history.extend(margin_history)
            history.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                history.extend(
                    process_polo_loans(
                        msg_aggregator=self.msg_aggregator,
                        data=polo_loans_data,
                        # We need to have history of loans since before the range
                        start_ts=Timestamp(0),
                        end_ts=end_ts,
                    ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for exchange in self.exchange_manager.iterate_exchanges():
            self.processing_state_name = f'Querying {exchange.name} exchange history'
            exchange.query_history_with_callbacks(
                # We need to have history of exchanges since before the range
                start_ts=Timestamp(0),
                end_ts=end_ts,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )
            step = self._increase_progress(step, total_steps)

        self.processing_state_name = 'Querying ethereum transactions history'
        tx_filter_query = ETHTransactionsFilterQuery.make(
            limit=None,
            offset=None,
            addresses=None,
            # We need to have history of transactions since before the range
            from_ts=Timestamp(0),
            to_ts=end_ts,
        )
        try:
            _, _ = self.eth_transactions.query(
                filter_query=tx_filter_query,
                has_premium=
                True,  # ignore limits here. Limit applied at processing
                only_cache=False,
            )
        except RemoteError as e:
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg
        step = self._increase_progress(step, total_steps)

        self.processing_state_name = 'Querying ethereum transaction receipts'
        self.eth_transactions.get_receipts_for_transactions_missing_them()
        step = self._increase_progress(step, total_steps)

        self.processing_state_name = 'Decoding raw transactions'
        self.evm_tx_decoder.get_and_decode_undecoded_transactions(limit=None)
        step = self._increase_progress(step, total_steps)

        # Include all external trades and trades from external exchanges
        for location in EXTERNAL_LOCATION:
            self.processing_state_name = f'Querying {location} trades history'
            external_trades = self.db.get_trades(
                filter_query=TradesFilterQuery.make(location=location),
                has_premium=
                True,  # we need all trades for accounting -- limit happens later
            )
            history.extend(external_trades)
            step = self._increase_progress(step, total_steps)

        # include all ledger actions
        self.processing_state_name = 'Querying ledger actions history'
        ledger_actions, _ = self.query_ledger_actions(
            filter_query=LedgerActionsFilterQuery.make(),
            only_cache=True,
        )
        history.extend(ledger_actions)
        step = self._increase_progress(step, total_steps)

        # include eth2 staking events
        eth2 = self.chain_manager.get_module('eth2')
        if eth2 is not None and has_premium:
            self.processing_state_name = 'Querying ETH2 staking history'
            try:
                eth2_events = self.chain_manager.get_eth2_history_events(
                    from_timestamp=Timestamp(0),
                    to_timestamp=end_ts,
                )
                history.extend(eth2_events)
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    f'Eth2 events are not included in the PnL report due to {str(e)}',
                )

        step = self._increase_progress(step, total_steps)

        # Include base history entries
        history_events_db = DBHistoryEvents(self.db)
        base_entries, _ = history_events_db.get_history_events_and_limit_info(
            filter_query=HistoryEventFilterQuery.make(
                # We need to have history since before the range
                from_ts=Timestamp(0),
                to_ts=end_ts,
            ),
            has_premium=True,  # ignore limits here. Limit applied at processing
        )
        history.extend(base_entries)
        self._increase_progress(step, total_steps)

        history.sort(  # sort events first by timestamp and if history base by sequence index
            key=lambda x: (
                x.get_timestamp(),
                x.sequence_index if isinstance(x, HistoryBaseEntry) else 1,
            ), )
        return empty_or_error, history
コード例 #7
0
class Kraken(ExchangeInterface):  # lgtm[py/missing-call-to-init]
    def __init__(
        self,
        name: str,
        api_key: ApiKey,
        secret: ApiSecret,
        database: 'DBHandler',
        msg_aggregator: MessagesAggregator,
        kraken_account_type: Optional[KrakenAccountType] = None,
    ):
        super().__init__(
            name=name,
            location=Location.KRAKEN,
            api_key=api_key,
            secret=secret,
            database=database,
        )
        self.msg_aggregator = msg_aggregator
        self.session.headers.update({'API-Key': self.api_key})
        self.set_account_type(kraken_account_type)
        self.call_counter = 0
        self.last_query_ts = 0
        self.history_events_db = DBHistoryEvents(self.db)

    def set_account_type(self,
                         account_type: Optional[KrakenAccountType]) -> None:
        if account_type is None:
            account_type = DEFAULT_KRAKEN_ACCOUNT_TYPE

        self.account_type = account_type
        if self.account_type == KrakenAccountType.STARTER:
            self.call_limit = 15
            self.reduction_every_secs = 3
        elif self.account_type == KrakenAccountType.INTERMEDIATE:
            self.call_limit = 20
            self.reduction_every_secs = 2
        else:  # Pro
            self.call_limit = 20
            self.reduction_every_secs = 1

    def edit_exchange_credentials(
        self,
        api_key: Optional[ApiKey],
        api_secret: Optional[ApiSecret],
        passphrase: Optional[str],
    ) -> bool:
        changed = super().edit_exchange_credentials(api_key, api_secret,
                                                    passphrase)
        if api_key is not None:
            self.session.headers.update({'API-Key': self.api_key})

        return changed

    def edit_exchange(
        self,
        name: Optional[str],
        api_key: Optional[ApiKey],
        api_secret: Optional[ApiSecret],
        **kwargs: Any,
    ) -> Tuple[bool, str]:
        success, msg = super().edit_exchange(
            name=name,
            api_key=api_key,
            api_secret=api_secret,
            **kwargs,
        )
        if success is False:
            return success, msg

        account_type = kwargs.get(KRAKEN_ACCOUNT_TYPE_KEY)
        if account_type is None:
            return success, msg

        # here we can finally update the account type
        self.set_account_type(account_type)
        return True, ''

    def validate_api_key(self) -> Tuple[bool, str]:
        """Validates that the Kraken API Key is good for usage in Rotkehlchen

        Makes sure that the following permission are given to the key:
        - Ability to query funds
        - Ability to query open/closed trades
        - Ability to query ledgers
        """
        valid, msg = self._validate_single_api_key_action('Balance')
        if not valid:
            return False, msg
        valid, msg = self._validate_single_api_key_action(
            method_str='TradesHistory',
            req={
                'start': 0,
                'end': 0
            },
        )
        if not valid:
            return False, msg
        valid, msg = self._validate_single_api_key_action(
            method_str='Ledgers',
            req={
                'start': 0,
                'end': 0,
                'type': 'deposit'
            },
        )
        if not valid:
            return False, msg
        return True, ''

    def _validate_single_api_key_action(
        self,
        method_str: str,
        req: Optional[Dict[str, Any]] = None,
    ) -> Tuple[bool, str]:
        try:
            self.api_query(method_str, req)
        except (RemoteError, ValueError) as e:
            error = str(e)
            if 'Incorrect padding' in error:
                return False, 'Provided API Key or secret is invalid'
            if 'EAPI:Invalid key' in error:
                return False, 'Provided API Key is invalid'
            if 'EGeneral:Permission denied' in error:
                msg = (
                    'Provided API Key does not have appropriate permissions. Make '
                    'sure that the "Query Funds", "Query Open/Closed Order and Trades"'
                    'and "Query Ledger Entries" actions are allowed for your Kraken API Key.'
                )
                return False, msg

            # else
            log.error(f'Kraken API key validation error: {str(e)}')
            msg = ('Unknown error at Kraken API key validation. Perhaps API '
                   'Key/Secret combination invalid?')
            return False, msg
        return True, ''

    def first_connection(self) -> None:
        self.first_connection_made = True

    def _manage_call_counter(self, method: str) -> None:
        self.last_query_ts = ts_now()
        if method in ('Ledgers', 'TradesHistory'):
            self.call_counter += 2
        else:
            self.call_counter += 1

    def _query_public(self,
                      method: str,
                      req: Optional[dict] = None) -> Union[Dict, str]:
        """API queries that do not require a valid key/secret pair.

        Arguments:
        method -- API method name (string, no default)
        req    -- additional API request parameters (default: {})
        """
        if req is None:
            req = {}
        urlpath = f'{KRAKEN_BASE_URL}/{KRAKEN_API_VERSION}/public/{method}'
        try:
            response = self.session.post(urlpath,
                                         data=req,
                                         timeout=DEFAULT_TIMEOUT_TUPLE)
        except requests.exceptions.RequestException as e:
            raise RemoteError(
                f'Kraken API request failed due to {str(e)}') from e

        self._manage_call_counter(method)
        return _check_and_get_response(response, method)

    def api_query(self, method: str, req: Optional[dict] = None) -> dict:
        tries = KRAKEN_QUERY_TRIES
        query_method = (self._query_public if method in KRAKEN_PUBLIC_METHODS
                        else self._query_private)
        while tries > 0:
            if self.call_counter + MAX_CALL_COUNTER_INCREASE > self.call_limit:
                # If we are close to the limit, check how much our call counter reduced
                # https://www.kraken.com/features/api#api-call-rate-limit
                secs_since_last_call = ts_now() - self.last_query_ts
                self.call_counter = max(
                    0,
                    self.call_counter -
                    int(secs_since_last_call / self.reduction_every_secs),
                )
                # If still at limit, sleep for an amount big enough for smallest tier reduction
                if self.call_counter + MAX_CALL_COUNTER_INCREASE > self.call_limit:
                    backoff_in_seconds = self.reduction_every_secs * 2
                    log.debug(
                        f'Doing a Kraken API call would now exceed our call counter limit. '
                        f'Backing off for {backoff_in_seconds} seconds',
                        call_counter=self.call_counter,
                    )
                    tries -= 1
                    gevent.sleep(backoff_in_seconds)
                    continue

            log.debug(
                'Kraken API query',
                method=method,
                data=req,
                call_counter=self.call_counter,
            )
            result = query_method(method, req)
            if isinstance(result, str):
                # Got a recoverable error
                backoff_in_seconds = int(KRAKEN_BACKOFF_DIVIDEND / tries)
                log.debug(
                    f'Got recoverable error {result} in a Kraken query of {method}. Will backoff '
                    f'for {backoff_in_seconds} seconds', )
                tries -= 1
                gevent.sleep(backoff_in_seconds)
                continue

            # else success
            return result

        raise RemoteError(
            f'After {KRAKEN_QUERY_TRIES} kraken queries for {method} could still not be completed',
        )

    def _query_private(self,
                       method: str,
                       req: Optional[dict] = None) -> Union[Dict, str]:
        """API queries that require a valid key/secret pair.

        Arguments:
        method -- API method name (string, no default)
        req    -- additional API request parameters (default: {})

        """
        if req is None:
            req = {}

        urlpath = '/' + KRAKEN_API_VERSION + '/private/' + method
        req['nonce'] = int(1000 * time.time())
        post_data = urlencode(req)
        # any unicode strings must be turned to bytes
        hashable = (str(req['nonce']) + post_data).encode()
        message = urlpath.encode() + hashlib.sha256(hashable).digest()
        signature = hmac.new(
            base64.b64decode(self.secret),
            message,
            hashlib.sha512,
        )
        self.session.headers.update({
            'API-Sign':
            base64.b64encode(signature.digest()),  # type: ignore
        })
        try:
            response = self.session.post(
                KRAKEN_BASE_URL + urlpath,
                data=post_data.encode(),
                timeout=DEFAULT_TIMEOUT_TUPLE,
            )
        except requests.exceptions.RequestException as e:
            raise RemoteError(
                f'Kraken API request failed due to {str(e)}') from e
        self._manage_call_counter(method)

        return _check_and_get_response(response, method)

    # ---- General exchanges interface ----
    @protect_with_lock()
    @cache_response_timewise()
    def query_balances(self) -> ExchangeQueryBalances:
        try:
            kraken_balances = self.api_query('Balance', req={})
        except RemoteError as e:
            if "Missing key: 'result'" in str(e):
                # handle https://github.com/rotki/rotki/issues/946
                kraken_balances = {}
            else:
                msg = ('Kraken API request failed. Could not reach kraken due '
                       'to {}'.format(e))
                log.error(msg)
                return None, msg

        assets_balance: DefaultDict[Asset, Balance] = defaultdict(Balance)
        for kraken_name, amount_ in kraken_balances.items():
            try:
                amount = deserialize_asset_amount(amount_)
                if amount == ZERO:
                    continue

                our_asset = asset_from_kraken(kraken_name)
            except UnknownAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found unsupported/unknown kraken asset {e.asset_name}. '
                    f' Ignoring its balance query.', )
                continue
            except DeserializationError as e:
                msg = str(e)
                self.msg_aggregator.add_error(
                    'Error processing kraken balance for {kraken_name}. Check logs '
                    'for details. Ignoring it.', )
                log.error(
                    'Error processing kraken balance',
                    kraken_name=kraken_name,
                    amount=amount_,
                    error=msg,
                )
                continue

            balance = Balance(amount=amount)
            if our_asset.identifier != 'KFEE':
                # There is no price value for KFEE. TODO: Shouldn't we then just skip the balance?
                try:
                    usd_price = Inquirer().find_usd_price(our_asset)
                except RemoteError as e:
                    self.msg_aggregator.add_error(
                        f'Error processing kraken balance entry due to inability to '
                        f'query USD price: {str(e)}. Skipping balance entry', )
                    continue

                balance.usd_value = balance.amount * usd_price

            assets_balance[our_asset] += balance
            log.debug(
                'kraken balance query result',
                currency=our_asset,
                amount=balance.amount,
                usd_value=balance.usd_value,
            )

        return dict(assets_balance), ''

    def query_until_finished(
        self,
        endpoint: str,
        keyname: str,
        start_ts: Timestamp,
        end_ts: Timestamp,
        extra_dict: Optional[dict] = None,
    ) -> Tuple[List, bool]:
        """ Abstracting away the functionality of querying a kraken endpoint where
        you need to check the 'count' of the returned results and provide sufficient
        calls with enough offset to gather all the data of your query.
        """
        result: List = []

        with_errors = False
        log.debug(
            f'Querying Kraken {endpoint} from {start_ts} to '
            f'{end_ts} with extra_dict {extra_dict}', )
        response = self._query_endpoint_for_period(
            endpoint=endpoint,
            start_ts=start_ts,
            end_ts=end_ts,
            extra_dict=extra_dict,
        )
        count = response['count']
        offset = len(response[keyname])
        result.extend(response[keyname].values())

        log.debug(f'Kraken {endpoint} Query Response with count:{count}')

        while offset < count:
            log.debug(
                f'Querying Kraken {endpoint} from {start_ts} to {end_ts} '
                f'with offset {offset} and extra_dict {extra_dict}', )
            try:
                response = self._query_endpoint_for_period(
                    endpoint=endpoint,
                    start_ts=start_ts,
                    end_ts=end_ts,
                    offset=offset,
                    extra_dict=extra_dict,
                )
            except RemoteError as e:
                with_errors = True
                log.error(
                    f'One of krakens queries when querying endpoint for period failed '
                    f'with {str(e)}. Returning only results we have.', )
                break

            if count != response['count']:
                log.error(
                    f'Kraken unexpected response while querying endpoint for period. '
                    f'Original count was {count} and response returned {response["count"]}',
                )
                with_errors = True
                break

            response_length = len(response[keyname])
            offset += response_length
            if response_length == 0 and offset != count:
                # If we have provided specific filtering then this is a known
                # issue documented below, so skip the warning logging
                # https://github.com/rotki/rotki/issues/116
                if extra_dict:
                    break
                # it is possible that kraken misbehaves and either does not
                # send us enough results or thinks it has more than it really does
                log.warning(
                    'Missing {} results when querying kraken endpoint {}'.
                    format(count - offset, endpoint), )
                with_errors = True
                break

            result.extend(response[keyname].values())

        return result, with_errors

    def query_online_trade_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> Tuple[List[Trade], Tuple[Timestamp, Timestamp]]:
        """
        Query kraken events from database and create trades from them. May raise:
        - RemoteError if the kraken pairs couldn't be queried
        """
        with_errors = self.query_kraken_ledgers(start_ts=start_ts,
                                                end_ts=end_ts)
        filter_query = HistoryEventFilterQuery.make(
            from_ts=Timestamp(start_ts),
            to_ts=Timestamp(end_ts),
            event_types=[
                HistoryEventType.TRADE,
                HistoryEventType.RECEIVE,
                HistoryEventType.SPEND,
                HistoryEventType.ADJUSTMENT,
            ],
            location=Location.KRAKEN,
            location_label=self.name,
        )
        trades_raw = self.history_events_db.get_history_events(
            filter_query=filter_query,
            has_premium=True,
        )
        trades, max_ts = self.process_kraken_trades(trades_raw)
        queried_range = (start_ts,
                         Timestamp(max_ts)) if with_errors else (start_ts,
                                                                 end_ts)
        return trades, queried_range

    def _query_endpoint_for_period(
        self,
        endpoint: str,
        start_ts: Timestamp,
        end_ts: Timestamp,
        offset: Optional[int] = None,
        extra_dict: Optional[dict] = None,
    ) -> dict:
        request: Dict[str, Union[Timestamp, int]] = {}
        request['start'] = start_ts
        request['end'] = end_ts
        if offset is not None:
            request['ofs'] = offset
        if extra_dict is not None:
            request.update(extra_dict)
        result = self.api_query(endpoint, request)
        return result

    def query_online_deposits_withdrawals(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> List[AssetMovement]:
        self.query_kraken_ledgers(start_ts=start_ts, end_ts=end_ts)
        filter_query = HistoryEventFilterQuery.make(
            from_ts=Timestamp(start_ts),
            to_ts=Timestamp(end_ts),
            event_types=[
                HistoryEventType.DEPOSIT,
                HistoryEventType.WITHDRAWAL,
            ],
            location=Location.KRAKEN,
            location_label=self.name,
        )
        events = self.history_events_db.get_history_events(
            filter_query=filter_query,
            has_premium=True,
        )
        log.debug('Kraken deposit/withdrawals query result',
                  num_results=len(events))
        movements = []
        get_attr = operator.attrgetter('event_identifier')
        # Create a list of lists where each sublist has the events for the same event identifier
        grouped_events = [
            list(g) for k, g in itertools.groupby(sorted(events, key=get_attr),
                                                  get_attr)
        ]  # noqa: E501
        for movement_events in grouped_events:
            if len(movement_events) == 2:
                if movement_events[0].event_subtype == HistoryEventSubType.FEE:
                    fee = Fee(movement_events[0].balance.amount)
                    movement = movement_events[1]
                elif movement_events[
                        1].event_subtype == HistoryEventSubType.FEE:
                    fee = Fee(movement_events[1].balance.amount)
                    movement = movement_events[0]
                else:
                    self.msg_aggregator.add_error(
                        f'Failed to process deposit/withdrawal. {grouped_events}. Ignoring ...',
                    )
                    continue
            else:
                movement = movement_events[0]
                fee = Fee(ZERO)

            amount = movement.balance.amount
            if movement.event_type == HistoryEventType.WITHDRAWAL:
                amount = amount * -1

            try:
                asset = movement.asset
                movement_type = movement.event_type
                movements.append(
                    AssetMovement(
                        location=Location.KRAKEN,
                        category=deserialize_asset_movement_category(
                            movement_type),
                        timestamp=ts_ms_to_sec(movement.timestamp),
                        address=None,  # no data from kraken ledger endpoint
                        transaction_id=
                        None,  # no data from kraken ledger endpoint
                        asset=asset,
                        amount=amount,
                        fee_asset=asset,
                        fee=fee,
                        link=movement.event_identifier,
                    ))
            except UnknownAsset as e:
                self.msg_aggregator.add_warning(
                    f'Found unknown kraken asset {e.asset_name}. '
                    f'Ignoring its deposit/withdrawals query.', )
                continue
            except (DeserializationError, KeyError) as e:
                msg = str(e)
                if isinstance(e, KeyError):
                    msg = f'Missing key entry for {msg}.'
                self.msg_aggregator.add_error(
                    'Failed to deserialize a kraken deposit/withdrawal. '
                    'Check logs for details. Ignoring it.', )
                log.error(
                    'Error processing a kraken deposit/withdrawal.',
                    raw_asset_movement=movement_events,
                    error=msg,
                )
                continue

        return movements

    def query_online_margin_history(
            self,  # pylint: disable=no-self-use
            start_ts: Timestamp,  # pylint: disable=unused-argument
            end_ts: Timestamp,  # pylint: disable=unused-argument
    ) -> List[MarginPosition]:
        return []  # noop for kraken

    def query_online_income_loss_expense(
            self,  # pylint: disable=no-self-use
            start_ts: Timestamp,  # pylint: disable=unused-argument
            end_ts: Timestamp,  # pylint: disable=unused-argument
    ) -> List[LedgerAction]:
        return []  # noop for kraken

    def process_kraken_events_for_trade(
        self,
        trade_parts: List[HistoryBaseEntry],
        adjustments: List[HistoryBaseEntry],
    ) -> Optional[Trade]:
        """Processes events from trade parts to a trade. If it's an adjustment
        adds it to a separate list"""
        if trade_parts[0].event_type == HistoryEventType.ADJUSTMENT:
            adjustments.append(trade_parts[0])
            return None  # skip as they don't have same refid

        event_id = trade_parts[0].event_identifier
        is_spend_receive = False
        trade_assets = []
        spend_part, receive_part, fee_part, kfee_part = None, None, None, None

        for trade_part in trade_parts:
            if trade_part.event_type == HistoryEventType.RECEIVE:
                is_spend_receive = True
                receive_part = trade_part
            elif trade_part.event_type == HistoryEventType.SPEND:
                if trade_part.event_subtype == HistoryEventSubType.FEE:
                    fee_part = trade_part
                else:
                    is_spend_receive = True
                    spend_part = trade_part
            elif trade_part.event_type == HistoryEventType.TRADE:
                if trade_part.event_subtype == HistoryEventSubType.FEE:
                    fee_part = trade_part
                elif trade_part.asset == A_KFEE:
                    kfee_part = trade_part
                elif trade_part.balance.amount < ZERO:
                    spend_part = trade_part
                else:
                    receive_part = trade_part

            if (trade_part.balance.amount != ZERO
                    and trade_part.event_subtype != HistoryEventSubType.FEE):
                trade_assets.append(trade_part.asset)

        if is_spend_receive and len(trade_parts) < 2:
            log.warning(
                f'Found kraken spend/receive events {event_id} with '
                f'less than 2 parts. {trade_parts}', )
            self.msg_aggregator.add_warning(
                f'Found kraken spend/receive events {event_id} with '
                f'less than 2 parts. Skipping...', )
            return None

        timestamp = ts_ms_to_sec(trade_parts[0].timestamp)
        exchange_uuid = (str(event_id) + str(timestamp))
        if len(trade_assets) != 2:
            # This can happen some times (for lefteris 5 times since start of kraken usage)
            # when the other part of a trade is so small it's 0. So it's either a
            # receive event with no counterpart or a spend event with no counterpart.
            # This happens for really really small amounts. So we add rate 0 trades
            if spend_part is not None:
                base_asset = spend_part.asset
                trade_type = TradeType.SELL
                amount = spend_part.balance.amount * -1
            elif receive_part is not None:
                base_asset = receive_part.asset
                trade_type = TradeType.BUY
                amount = receive_part.balance.amount
            else:
                log.warning(
                    f'Found historic trade entries with no counterpart {trade_parts}'
                )
                return None

            trade = Trade(
                timestamp=timestamp,
                location=Location.KRAKEN,
                base_asset=base_asset,
                quote_asset=A_USD,  # whatever
                trade_type=trade_type,
                amount=AssetAmount(amount),
                rate=Price(ZERO),
                fee=None,
                fee_currency=None,
                link=exchange_uuid,
            )
            return trade

        if spend_part is None or receive_part is None:
            log.error(
                f'Failed to process {event_id}. Couldnt find spend/receive parts {trade_parts}',
            )
            self.msg_aggregator.add_error(
                f'Failed to read trades for event {event_id}. '
                f'More details are available at the logs', )
            return None

        spend_asset = spend_part.asset
        receive_asset = receive_part.asset
        if spend_asset.is_fiat() or trade_parts[0] == receive_part:
            trade_type = TradeType.BUY
            base_asset = receive_asset
            quote_asset = spend_asset
            amount = receive_part.balance.amount
            if amount == ZERO:
                self.msg_aggregator.add_warning(
                    f'Rate for kraken trade couldnt be calculated. Base amount is ZERO '
                    f'for event {event_id}. Skipping event', )
                return None

            rate = Price((spend_part.balance.amount / amount) * -1)
        else:
            trade_type = TradeType.SELL
            base_asset = spend_asset
            quote_asset = receive_asset
            amount = -1 * spend_part.balance.amount
            if amount == ZERO:
                self.msg_aggregator.add_warning(
                    f'Rate for kraken trade couldnt be calculated. Base amount is ZERO '
                    f'for event {event_id}. Skipping event', )
                return None

            rate = Price((receive_part.balance.amount / amount))

        # If kfee was found we use it as the fee for the trade
        if kfee_part is not None and fee_part is None:
            fee = Fee(kfee_part.balance.amount)
            fee_asset = A_KFEE
        elif (None, None) == (fee_part, kfee_part):
            fee = None
            fee_asset = None
        elif fee_part is not None:
            fee = Fee(fee_part.balance.amount)
            fee_asset = fee_part.asset

        trade = Trade(
            timestamp=timestamp,
            location=Location.KRAKEN,
            base_asset=base_asset,
            quote_asset=quote_asset,
            trade_type=trade_type,
            amount=AssetAmount(amount),
            rate=rate,
            fee=fee,
            fee_currency=fee_asset,
            link=exchange_uuid,
        )
        return trade

    def process_kraken_trades(
        self,
        raw_data: List[HistoryBaseEntry],
    ) -> Tuple[List[Trade], Timestamp]:
        """
        Given a list of history events we process them to create Trade objects. The valid
        History events type are
        - Trade
        - Receive
        - Spend
        - Adjustment

        A pair of receive and spend events can be a trade and kraken uses this kind of event
        for instant trades and trades made from the phone app. What we do in order to verify
        that it is a trade is to check if we can find a pair with the same event id.

        Also in some rare occasions Kraken may forcibly adjust something for you.
        Example would be delisting of DAO token and forcible exchange to ETH.

        Returns:
        - The list of trades processed
        - The biggest timestamp of all the trades processed

        May raise:
        - RemoteError if the pairs couldn't be correctly queried
        """
        trades = []
        max_ts = 0
        get_attr = operator.attrgetter('event_identifier')
        adjustments: List[HistoryBaseEntry] = []
        # Create a list of lists where each sublist has the events for the same event identifier
        grouped_events = [
            list(g) for k, g in itertools.groupby(
                sorted(raw_data, key=get_attr), get_attr)
        ]  # noqa: E501
        for trade_parts in grouped_events:
            trade = self.process_kraken_events_for_trade(
                trade_parts, adjustments)
            if trade is None:
                continue

            trades.append(trade)
            max_ts = max(max_ts, trade.timestamp)

        adjustments.sort(key=lambda x: x.timestamp)
        if len(adjustments) % 2 == 0:
            for a1, a2 in pairwise(adjustments):
                if a1.event_subtype is None or a2.event_subtype is None:
                    log.warning(
                        f'Found two kraken adjustment entries without a subtype: {a1} {a2}',
                    )
                    continue

                if a1.event_subtype == HistoryEventSubType.SPEND and a2.event_subtype == HistoryEventSubType.RECEIVE:  # noqa: E501
                    spend_event = a1
                    receive_event = a2
                elif a2.event_subtype == HistoryEventSubType.SPEND and a2.event_subtype == HistoryEventSubType.RECEIVE:  # noqa: E501
                    spend_event = a2
                    receive_event = a1
                else:
                    log.warning(
                        f'Found two kraken adjustment with unmatching subtype {a1} {a2}',
                    )
                    continue

                rate = Price(
                    abs(receive_event.balance.amount /
                        spend_event.balance.amount))
                trade = Trade(
                    timestamp=ts_ms_to_sec(a1.timestamp),
                    location=Location.KRAKEN,
                    base_asset=receive_event.asset,
                    quote_asset=spend_event.asset,
                    trade_type=TradeType.BUY,
                    amount=AssetAmount(receive_event.balance.amount),
                    rate=rate,
                    fee=None,
                    fee_currency=None,
                    link='adjustment' + a1.event_identifier +
                    a2.event_identifier,
                )
                trades.append(trade)

        else:
            log.warning(
                f'Got even number of kraken adjustment historic entries. '
                f'Skipping reading them. {adjustments}', )

        return trades, Timestamp(max_ts)

    @protect_with_lock()
    def query_kraken_ledgers(self, start_ts: Timestamp,
                             end_ts: Timestamp) -> bool:
        """
        Query Kraken's ledger to retrieve events and transform them to our internal representation
        of history events. Internally we look for the query range that needs to be queried in the
        range (start_ts, end_ts) to avoid double quering the kraken API when this method is called
        for deposits/withdrawals and trades. The events queried are then stored in the database.

        Returns true if any query to the kraken API was not successful
        """
        ranges = DBQueryRanges(self.db)
        range_query_name = f'{self.location}_history_events_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=range_query_name,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        with_errors = False
        for query_start_ts, query_end_ts in ranges_to_query:
            log.debug(
                f'Querying kraken ledger entries from {query_start_ts} to {query_end_ts}'
            )
            try:
                response, with_errors = self.query_until_finished(
                    endpoint='Ledgers',
                    keyname='ledger',
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                    extra_dict={},
                )
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    f'Failed to query kraken ledger between {query_start_ts} and '
                    f'{query_end_ts}. {str(e)}', )
                return True

            # Group related events
            raw_events_groupped = defaultdict(list)
            for raw_event in response:
                raw_events_groupped[raw_event['refid']].append(raw_event)

            new_events = []
            for events in raw_events_groupped.values():
                try:
                    events = sorted(
                        events,
                        key=lambda x: deserialize_fval(x[
                            'time'], 'time', 'kraken ledgers') * 1000,
                    )
                except DeserializationError as e:
                    self.msg_aggregator.add_error(
                        f'Failed to read timestamp in kraken event group '
                        f'due to {str(e)}. For more information read the logs. Skipping event',
                    )
                    log.error(f'Failed to read timestamp for {events}')
                    continue
                group_events, found_unknown_event = history_event_from_kraken(
                    events=events,
                    name=self.name,
                    msg_aggregator=self.msg_aggregator,
                )
                if found_unknown_event:
                    for event in group_events:
                        event.event_type = HistoryEventType.INFORMATIONAL
                new_events.extend(group_events)

            if len(new_events) != 0:
                try:
                    self.history_events_db.add_history_events(new_events)
                except InputError as e:
                    self.msg_aggregator.add_error(
                        f'Failed to save kraken events from {query_start_ts} to {query_end_ts} '
                        f'in database. {str(e)}', )

                ranges.update_used_query_range(
                    location_string=range_query_name,
                    queried_ranges=[(start_ts, end_ts)] + ranges_to_query,
                )

            if with_errors is True:
                return True  # we had errors so stop any further queries and quit

        return False  # no errors
コード例 #8
0
def test_add_edit_delete_entries(rotkehlchen_api_server):
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    entries = _add_entries(rotkehlchen_api_server)
    db = DBHistoryEvents(rotki.data.db)
    saved_events = db.get_history_events(HistoryEventFilterQuery.make(), True)
    for idx, event in enumerate(saved_events):
        assert event == entries[idx]

    entry = entries[2]
    # test editing unknown fails
    unknown_id = 42
    json_data = entry_to_input_dict(entry, include_identifier=True)
    json_data['identifier'] = unknown_id
    response = requests.patch(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json=json_data,
    )
    assert_error_response(
        response=response,
        contained_in_msg=f'Tried to edit event with id {unknown_id} but could not find it in the DB',  # noqa: E501
        status_code=HTTPStatus.CONFLICT,
    )
    # test editing by making sequence index same as an existing one fails
    entry.sequence_index = 3
    entry.timestamp = Timestamp(1649924575000)
    json_data = entry_to_input_dict(entry, include_identifier=True)
    response = requests.patch(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json=json_data,
    )
    assert_error_response(
        response=response,
        contained_in_msg='Tried to edit event to have event_identifier 0xf32e81dbaae8a763cad17bc96b77c7d9e8c59cc31ed4378b8109ce4b301adbbc and sequence_index 3 but it already exists',  # noqa: E501
        status_code=HTTPStatus.CONFLICT,
    )
    # test adding event with  sequence index same as an existing one fails
    entry.sequence_index = 3
    entry.timestamp = Timestamp(1649924575000)
    json_data = entry_to_input_dict(entry, include_identifier=True)
    response = requests.put(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json=json_data,
    )
    assert_error_response(
        response=response,
        contained_in_msg='Failed to add event to the DB due to a DB error: UNIQUE constraint failed: history_events.event_identifier, history_events.sequence_index',  # noqa: E501
        status_code=HTTPStatus.CONFLICT,
    )
    # test editing works
    entry.sequence_index = 4
    entry.timestamp = Timestamp(1639924575000)
    entry.location = Location.UNISWAP
    entry.event_type = HistoryEventType.DEPOSIT
    entry.asset = A_USDT
    entry.balance = Balance(amount=FVal('1500.1'), usd_value=FVal('1499.45'))
    entry.location_label = '0x9531C059098e3d194fF87FebB587aB07B30B1306'
    entry.notes = 'Deposit stuff for staking somewhere'
    entry.event_subtype = HistoryEventSubType.NONE
    entry.counterparty = '0xAB8d71d59827dcc90fEDc5DDb97f87eFfB1B1A5B'
    json_data = entry_to_input_dict(entry, include_identifier=True)
    response = requests.patch(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json=json_data,
    )
    assert_simple_ok_response(response)

    entries.sort(key=lambda x: x.timestamp)  # resort by timestamp
    saved_events = db.get_history_events(HistoryEventFilterQuery.make(), True)
    assert len(saved_events) == 5
    for idx, event in enumerate(saved_events):
        assert event == entries[idx]

    # test deleting unknown fails
    response = requests.delete(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json={'identifiers': [19, 1, 3]},
    )
    assert_error_response(
        response=response,
        contained_in_msg='Tried to remove history event with id 19 which does not exist',
        status_code=HTTPStatus.CONFLICT,
    )
    saved_events = db.get_history_events(HistoryEventFilterQuery.make(), True)
    assert len(saved_events) == 5
    for idx, event in enumerate(saved_events):
        assert event == entries[idx]

    # test deleting works
    response = requests.delete(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json={'identifiers': [2, 4]},
    )
    result = assert_proper_response_with_result(response)
    assert result is True
    saved_events = db.get_history_events(HistoryEventFilterQuery.make(), True)
    # entry is now last since the timestamp was modified
    assert saved_events == [entries[0], entries[3], entry]

    # test that deleting last event of a transaction hash fails
    response = requests.delete(
        api_url_for(rotkehlchen_api_server, 'historybaseentryresource'),
        json={'identifiers': [1]},
    )
    assert_error_response(
        response=response,
        contained_in_msg='Tried to remove history event with id 1 which was the last event of a transaction',  # noqa: E501
        status_code=HTTPStatus.CONFLICT,
    )
    saved_events = db.get_history_events(HistoryEventFilterQuery.make(), True)
    assert saved_events == [entries[0], entries[3], entry]
コード例 #9
0
ファイル: decoder.py プロジェクト: LefterisJP/rotkehlchen
class EVMTransactionDecoder():
    def __init__(
        self,
        database: 'DBHandler',
        ethereum_manager: 'EthereumManager',
        eth_transactions: 'EthTransactions',
        msg_aggregator: MessagesAggregator,
    ):
        self.database = database
        self.all_counterparties: Set[str] = set()
        self.ethereum_manager = ethereum_manager
        self.eth_transactions = eth_transactions
        self.msg_aggregator = msg_aggregator
        self.dbethtx = DBEthTx(self.database)
        self.dbevents = DBHistoryEvents(self.database)
        self.base = BaseDecoderTools(database=database)
        self.event_rules = [  # rules to try for all tx receipt logs decoding
            self._maybe_decode_erc20_approve,
            self._maybe_decode_erc20_721_transfer,
            self._maybe_enrich_transfers,
            self._maybe_decode_governance,
        ]
        self.token_enricher_rules: List[Callable] = [
        ]  # enrichers to run for token transfers
        self.initialize_all_decoders()
        self.undecoded_tx_query_lock = Semaphore()

    def _recursively_initialize_decoders(
        self,
        package: Union[str, ModuleType],
    ) -> Tuple[Dict[ChecksumEthAddress, Tuple[Any, ...]], List[Callable],
               List[Callable], ]:
        if isinstance(package, str):
            package = importlib.import_module(package)
        address_results = {}
        rules_results = []
        enricher_results = []
        for _, name, is_pkg in pkgutil.walk_packages(package.__path__):
            full_name = package.__name__ + '.' + name
            if full_name == __name__:
                continue  # skip -- this is this source file

            if is_pkg:
                submodule = importlib.import_module(full_name)
                # take module name, transform it and find decoder if exists
                class_name = full_name[MODULES_PREFIX_LENGTH:].translate(
                    {ord('.'): None})
                parts = class_name.split('_')
                class_name = ''.join([x.capitalize() for x in parts])
                submodule_decoder = getattr(submodule, f'{class_name}Decoder',
                                            None)

                if submodule_decoder:
                    if class_name in self.decoders:
                        raise ModuleLoadingError(
                            f'Decoder with name {class_name} already loaded')
                    self.decoders[class_name] = submodule_decoder(
                        ethereum_manager=self.ethereum_manager,
                        base_tools=self.base,
                        msg_aggregator=self.msg_aggregator,
                    )
                    address_results.update(
                        self.decoders[class_name].addresses_to_decoders())
                    rules_results.extend(
                        self.decoders[class_name].decoding_rules())
                    enricher_results.extend(
                        self.decoders[class_name].enricher_rules())
                    self.all_counterparties.update(
                        self.decoders[class_name].counterparties())

                recursive_addrs, recursive_rules, recurisve_enricher_results = self._recursively_initialize_decoders(
                    full_name)  # noqa: E501
                address_results.update(recursive_addrs)
                rules_results.extend(recursive_rules)
                enricher_results.extend(recurisve_enricher_results)

        return address_results, rules_results, enricher_results

    def initialize_all_decoders(self) -> None:
        """Recursively check all submodules to get all decoder address mappings and rules
        """
        self.decoders: Dict[str, 'DecoderInterface'] = {}
        address_result, rules_result, enrichers_result = self._recursively_initialize_decoders(
            MODULES_PACKAGE)  # noqa: E501
        self.address_mappings = address_result
        self.event_rules.extend(rules_result)
        self.token_enricher_rules.extend(enrichers_result)
        # update with counterparties not in any module
        self.all_counterparties.update([CPT_GAS, CPT_GNOSIS_CHAIN])

    def reload_from_db(self) -> None:
        """Reload all related settings from DB so that decoding happens with latest"""
        self.base.refresh_tracked_accounts()
        for _, decoder in self.decoders.items():
            if isinstance(decoder, CustomizableDateMixin):
                decoder.reload_settings()

    def try_all_rules(
        self,
        token: Optional[EthereumToken],
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,
        decoded_events: List[HistoryBaseEntry],
        action_items: List[ActionItem],
    ) -> Optional[HistoryBaseEntry]:
        for rule in self.event_rules:
            event = rule(token=token,
                         tx_log=tx_log,
                         transaction=transaction,
                         decoded_events=decoded_events,
                         action_items=action_items)  # noqa: E501
            if event:
                return event

        return None

    def decode_by_address_rules(
        self,
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,
        decoded_events: List[HistoryBaseEntry],
        all_logs: List[EthereumTxReceiptLog],
        action_items: List[ActionItem],
    ) -> Tuple[Optional[HistoryBaseEntry], Optional[ActionItem]]:
        """
        Sees if the log is on an address for which we have specific decoders and calls it

        Should catch all underlying errors these decoders will raise. So far known are:
        - DeserializationError
        - ConversionError
        - UnknownAsset
        """
        mapping_result = self.address_mappings.get(tx_log.address)
        if mapping_result is None:
            return None, None
        method = mapping_result[0]

        try:
            if len(mapping_result) == 1:
                result = method(tx_log, transaction, decoded_events, all_logs,
                                action_items)
            else:
                result = method(tx_log, transaction, decoded_events, all_logs,
                                action_items,
                                *mapping_result[1:])  # noqa: E501
        except (DeserializationError, ConversionError, UnknownAsset) as e:
            log.debug(
                f'Decoding tx log with index {tx_log.log_index} of transaction '
                f'{transaction.tx_hash.hex()} through {method.__name__} failed due to {str(e)}'
            )
            return None, None

        return result

    def decode_transaction(
        self,
        transaction: EthereumTransaction,
        tx_receipt: EthereumTxReceipt,
    ) -> List[HistoryBaseEntry]:
        """Decodes an ethereum transaction and its receipt and saves result in the DB"""
        cursor = self.database.conn.cursor()
        self.base.reset_sequence_counter()
        # check if any eth transfer happened in the transaction, including in internal transactions
        events = self._maybe_decode_simple_transactions(
            transaction, tx_receipt)
        action_items: List[ActionItem] = []

        # decode transaction logs from the receipt
        for tx_log in tx_receipt.logs:
            event, action_item = self.decode_by_address_rules(
                tx_log, transaction, events, tx_receipt.logs,
                action_items)  # noqa: E501
            if action_item:
                action_items.append(action_item)
            if event:
                events.append(event)
                continue

            token = GlobalDBHandler.get_ethereum_token(tx_log.address)
            event = self.try_all_rules(token=token,
                                       tx_log=tx_log,
                                       transaction=transaction,
                                       decoded_events=events,
                                       action_items=action_items)  # noqa: E501
            if event:
                events.append(event)

        self.dbevents.add_history_events(events)
        cursor.execute(
            'INSERT OR IGNORE INTO evm_tx_mappings(tx_hash, blockchain, value) VALUES(?, ?, ?)',
            (transaction.tx_hash, 'ETH', HISTORY_MAPPING_DECODED),
        )
        self.database.update_last_write()
        return sorted(events, key=lambda x: x.sequence_index, reverse=False)

    def get_and_decode_undecoded_transactions(self,
                                              limit: Optional[int] = None
                                              ) -> None:
        """Checks the DB for up to `limit` undecoded transactions and decodes them.

        This is protected by concurrent access from a lock"""
        with self.undecoded_tx_query_lock:
            hashes = self.dbethtx.get_transaction_hashes_not_decoded(
                limit=limit)
            self.decode_transaction_hashes(ignore_cache=False,
                                           tx_hashes=hashes)

    def decode_transaction_hashes(
        self, ignore_cache: bool, tx_hashes: Optional[List[EVMTxHash]]
    ) -> List[HistoryBaseEntry]:  # noqa: E501
        """Make sure that receipts are pulled + events decoded for the given transaction hashes.

        The transaction hashes must exist in the DB at the time of the call

        May raise:
        - DeserializationError if there is a problem with conacting a remote to get receipts
        - RemoteError if there is a problem with contacting a remote to get receipts
        - InputError if the transaction hash is not found in the DB
        """
        events = []
        self.reload_from_db()

        # If no transaction hashes are passed, decode all transactions.
        if tx_hashes is None:
            tx_hashes = []
            cursor = self.database.conn.cursor()
            for entry in cursor.execute(
                    'SELECT tx_hash FROM ethereum_transactions'):
                tx_hashes.append(EVMTxHash(entry[0]))

        for tx_hash in tx_hashes:
            try:
                receipt = self.eth_transactions.get_or_query_transaction_receipt(
                    tx_hash)
            except RemoteError as e:
                raise InputError(
                    f'Hash {tx_hash.hex()} does not correspond to a transaction'
                ) from e  # noqa: E501

            # TODO: Change this if transaction filter query can accept multiple hashes
            txs = self.dbethtx.get_ethereum_transactions(
                filter_=ETHTransactionsFilterQuery.make(tx_hash=tx_hash),
                has_premium=True,  # ignore limiting here
            )
            events.extend(
                self.get_or_decode_transaction_events(
                    transaction=txs[0],
                    tx_receipt=receipt,
                    ignore_cache=ignore_cache,
                ))

        return events

    def get_or_decode_transaction_events(
        self,
        transaction: EthereumTransaction,
        tx_receipt: EthereumTxReceipt,
        ignore_cache: bool,
    ) -> List[HistoryBaseEntry]:
        """Get a transaction's events if existing in the DB or decode them"""
        cursor = self.database.conn.cursor()
        if ignore_cache is True:  # delete all decoded events
            self.dbevents.delete_events_by_tx_hash([transaction.tx_hash])
            cursor.execute(
                'DELETE from evm_tx_mappings WHERE tx_hash=? AND blockchain=? AND value=?',
                (transaction.tx_hash, 'ETH', HISTORY_MAPPING_DECODED),
            )
        else:  # see if events are already decoded and return them
            results = cursor.execute(
                'SELECT COUNT(*) from evm_tx_mappings WHERE tx_hash=? AND blockchain=? AND value=?',  # noqa: E501
                (transaction.tx_hash, 'ETH', HISTORY_MAPPING_DECODED),
            )
            if results.fetchone()[0] != 0:  # already decoded and in the DB
                events = self.dbevents.get_history_events(
                    filter_query=HistoryEventFilterQuery.make(
                        event_identifier=transaction.tx_hash.hex(), ),
                    has_premium=
                    True,  # for this function we don't limit anything
                )
                return events

        # else we should decode now
        events = self.decode_transaction(transaction, tx_receipt)
        return events

    def _maybe_decode_internal_transactions(
        self,
        tx: EthereumTransaction,
        tx_receipt: EthereumTxReceipt,
        events: List[HistoryBaseEntry],
        tx_hash_hex: str,
        ts_ms: TimestampMS,
    ) -> None:
        """
        check for internal transactions if the transaction is not canceled. This function mutates
        the events argument.
        """
        if tx_receipt.status is False:
            return

        internal_txs = self.dbethtx.get_ethereum_internal_transactions(
            parent_tx_hash=tx.tx_hash, )
        for internal_tx in internal_txs:
            if internal_tx.to_address is None:
                continue  # can that happen? Internal transaction deploying a contract?
            direction_result = self.base.decode_direction(
                internal_tx.from_address, internal_tx.to_address)  # noqa: E501
            if direction_result is None:
                continue

            amount = ZERO if internal_tx.value == 0 else from_wei(
                FVal(internal_tx.value))
            if amount == ZERO:
                continue

            event_type, location_label, counterparty, verb = direction_result
            events.append(
                HistoryBaseEntry(
                    event_identifier=tx_hash_hex,
                    sequence_index=self.base.get_next_sequence_counter(),
                    timestamp=ts_ms,
                    location=Location.BLOCKCHAIN,
                    location_label=location_label,
                    asset=A_ETH,
                    balance=Balance(amount=amount),
                    notes=
                    f'{verb} {amount} ETH {internal_tx.from_address} -> {internal_tx.to_address}',  # noqa: E501
                    event_type=event_type,
                    event_subtype=HistoryEventSubType.NONE,
                    counterparty=counterparty,
                ))

    def _maybe_decode_simple_transactions(
        self,
        tx: EthereumTransaction,
        tx_receipt: EthereumTxReceipt,
    ) -> List[HistoryBaseEntry]:
        """Decodes normal ETH transfers, internal transactions and gas cost payments"""
        events: List[HistoryBaseEntry] = []
        tx_hash_hex = tx.tx_hash.hex()
        ts_ms = ts_sec_to_ms(tx.timestamp)

        # check for gas spent
        direction_result = self.base.decode_direction(tx.from_address,
                                                      tx.to_address)
        if direction_result is not None:
            event_type, location_label, counterparty, verb = direction_result
            if event_type in (HistoryEventType.SPEND,
                              HistoryEventType.TRANSFER):
                eth_burned_as_gas = from_wei(FVal(tx.gas_used * tx.gas_price))
                events.append(
                    HistoryBaseEntry(
                        event_identifier=tx_hash_hex,
                        sequence_index=self.base.get_next_sequence_counter(),
                        timestamp=ts_ms,
                        location=Location.BLOCKCHAIN,
                        location_label=location_label,
                        asset=A_ETH,
                        balance=Balance(amount=eth_burned_as_gas),
                        notes=
                        f'Burned {eth_burned_as_gas} ETH in gas from {location_label}',
                        event_type=HistoryEventType.SPEND,
                        event_subtype=HistoryEventSubType.FEE,
                        counterparty=CPT_GAS,
                    ))

        # Decode internal transactions after gas so gas is always 0 indexed
        self._maybe_decode_internal_transactions(
            tx=tx,
            tx_receipt=tx_receipt,
            events=events,
            tx_hash_hex=tx_hash_hex,
            ts_ms=ts_ms,
        )

        if tx_receipt.status is False or direction_result is None:
            # Not any other action to do for failed transactions or transaction where
            # any tracked address is not involved
            return events

        # now decode the actual transaction eth transfer itself
        amount = ZERO if tx.value == 0 else from_wei(FVal(tx.value))
        if tx.to_address is None:
            if not self.base.is_tracked(tx.from_address):
                return events

            events.append(
                HistoryBaseEntry(  # contract deployment
                    event_identifier=tx_hash_hex,
                    sequence_index=self.base.get_next_sequence_counter(),
                    timestamp=ts_ms,
                    location=Location.BLOCKCHAIN,
                    location_label=tx.from_address,
                    asset=A_ETH,
                    balance=Balance(amount=amount),
                    notes='Contract deployment',
                    event_type=HistoryEventType.INFORMATIONAL,
                    event_subtype=HistoryEventSubType.DEPLOY,
                    counterparty=None,  # TODO: Find out contract address
                ))
            return events

        if amount == ZERO:
            return events

        events.append(
            HistoryBaseEntry(
                event_identifier=tx_hash_hex,
                sequence_index=self.base.get_next_sequence_counter(),
                timestamp=ts_ms,
                location=Location.BLOCKCHAIN,
                location_label=location_label,
                asset=A_ETH,
                balance=Balance(amount=amount),
                notes=
                f'{verb} {amount} ETH {tx.from_address} -> {tx.to_address}',
                event_type=event_type,
                event_subtype=HistoryEventSubType.NONE,
                counterparty=counterparty,
            ))
        return events

    def _maybe_decode_erc20_approve(
            self,
            token: Optional[EthereumToken],
            tx_log: EthereumTxReceiptLog,
            transaction: EthereumTransaction,
            decoded_events: List[HistoryBaseEntry],  # pylint: disable=unused-argument
            action_items: List[ActionItem],  # pylint: disable=unused-argument
    ) -> Optional[HistoryBaseEntry]:
        if tx_log.topics[0] != ERC20_APPROVE or token is None:
            return None

        owner_address = hex_or_bytes_to_address(tx_log.topics[1])
        spender_address = hex_or_bytes_to_address(tx_log.topics[2])

        if not any(
                self.base.is_tracked(x)
                for x in (owner_address, spender_address)):
            return None

        amount_raw = hex_or_bytes_to_int(tx_log.data)
        amount = token_normalized_value(token_amount=amount_raw, token=token)
        notes = f'Approve {amount} {token.symbol} of {owner_address} for spending by {spender_address}'  # noqa: E501
        return HistoryBaseEntry(
            event_identifier=transaction.tx_hash.hex(),
            sequence_index=self.base.get_sequence_index(tx_log),
            timestamp=ts_sec_to_ms(transaction.timestamp),
            location=Location.BLOCKCHAIN,
            location_label=owner_address,
            asset=token,
            balance=Balance(amount=amount),
            notes=notes,
            event_type=HistoryEventType.INFORMATIONAL,
            event_subtype=HistoryEventSubType.APPROVE,
            counterparty=spender_address,
        )

    def _maybe_decode_erc20_721_transfer(
        self,
        token: Optional[EthereumToken],
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,
        decoded_events: List[HistoryBaseEntry],  # pylint: disable=unused-argument
        action_items: List[ActionItem],
    ) -> Optional[HistoryBaseEntry]:
        if tx_log.topics[0] != ERC20_OR_ERC721_TRANSFER:
            return None

        if token is None:
            try:
                found_token = get_or_create_ethereum_token(
                    userdb=self.database,
                    ethereum_address=tx_log.address,
                    ethereum_manager=self.ethereum_manager,
                )
            except NotERC20Conformant:
                return None  # ignore non-ERC20 transfers for now
        else:
            found_token = token

        transfer = self.base.decode_erc20_721_transfer(
            token=found_token,
            tx_log=tx_log,
            transaction=transaction,
        )
        if transfer is None:
            return None

        for idx, action_item in enumerate(action_items):
            if action_item.asset == found_token and action_item.amount == transfer.balance.amount and action_item.from_event_type == transfer.event_type and action_item.from_event_subtype == transfer.event_subtype:  # noqa: E501
                if action_item.action == 'skip':
                    action_items.pop(idx)
                    return None

                # else atm only transform
                if action_item.to_event_type is not None:
                    transfer.event_type = action_item.to_event_type
                if action_item.to_event_subtype is not None:
                    transfer.event_subtype = action_item.to_event_subtype
                if action_item.to_notes is not None:
                    transfer.notes = action_item.to_notes
                if action_item.to_counterparty is not None:
                    transfer.counterparty = action_item.to_counterparty
                if action_item.extra_data is not None:
                    transfer.extra_data = action_item.extra_data

                if action_item.paired_event_data is not None:
                    # If there is a paired event to this, take care of the order
                    out_event = transfer
                    in_event = action_item.paired_event_data[0]
                    if action_item.paired_event_data[1] is True:
                        out_event = action_item.paired_event_data[0]
                        in_event = transfer
                    maybe_reshuffle_events(
                        out_event=out_event,
                        in_event=in_event,
                        events_list=decoded_events + [transfer],
                    )

                action_items.pop(idx)
                break  # found an action item and acted on it

        # Add additional information to transfers for different protocols
        self._enrich_protocol_tranfers(
            token=found_token,
            tx_log=tx_log,
            transaction=transaction,
            event=transfer,
            action_items=action_items,
        )
        return transfer

    def _maybe_enrich_transfers(  # pylint: disable=no-self-use
            self,
            token: Optional[EthereumToken],  # pylint: disable=unused-argument
            tx_log: EthereumTxReceiptLog,
            transaction: EthereumTransaction,  # pylint: disable=unused-argument
            decoded_events: List[HistoryBaseEntry],
            action_items: List[ActionItem],  # pylint: disable=unused-argument
    ) -> Optional[HistoryBaseEntry]:
        if tx_log.topics[
                0] == GTC_CLAIM and tx_log.address == '0xDE3e5a990bCE7fC60a6f017e7c4a95fc4939299E':  # noqa: E501
            for event in decoded_events:
                if event.asset == A_GTC and event.event_type == HistoryEventType.RECEIVE:
                    event.event_subtype = HistoryEventSubType.AIRDROP
                    event.notes = f'Claim {event.balance.amount} GTC from the GTC airdrop'
            return None

        if tx_log.topics[
                0] == ONEINCH_CLAIM and tx_log.address == '0xE295aD71242373C37C5FdA7B57F26f9eA1088AFe':  # noqa: E501
            for event in decoded_events:
                if event.asset == A_1INCH and event.event_type == HistoryEventType.RECEIVE:
                    event.event_subtype = HistoryEventSubType.AIRDROP
                    event.notes = f'Claim {event.balance.amount} 1INCH from the 1INCH airdrop'  # noqa: E501
            return None

        if tx_log.topics[
                0] == GNOSIS_CHAIN_BRIDGE_RECEIVE and tx_log.address == '0x88ad09518695c6c3712AC10a214bE5109a655671':  # noqa: E501
            for event in decoded_events:
                if event.event_type == HistoryEventType.RECEIVE:
                    # user bridged from gnosis chain
                    event.event_type = HistoryEventType.TRANSFER
                    event.event_subtype = HistoryEventSubType.BRIDGE
                    event.counterparty = CPT_GNOSIS_CHAIN
                    event.notes = (
                        f'Bridge {event.balance.amount} {event.asset.symbol} from gnosis chain'
                    )

        return None

    def _enrich_protocol_tranfers(  # pylint: disable=no-self-use
        self,
        token: EthereumToken,
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,
        event: HistoryBaseEntry,
        action_items: List[ActionItem],
    ) -> None:
        """
        Decode special transfers made by contract execution for example at the moment
        of depositing assets or withdrawing.
        It assumes that the event being decoded has been already filtered and is a
        transfer.
        """
        for enrich_call in self.token_enricher_rules:
            transfer_enriched = enrich_call(
                token=token,
                tx_log=tx_log,
                transaction=transaction,
                event=event,
                action_items=action_items,
            )
            if transfer_enriched:
                break

    def _maybe_decode_governance(  # pylint: disable=no-self-use
            self,
            token: Optional[EthereumToken],  # pylint: disable=unused-argument
            tx_log: EthereumTxReceiptLog,
            transaction: EthereumTransaction,
            decoded_events: List[HistoryBaseEntry],  # pylint: disable=unused-argument
            action_items: List[ActionItem],  # pylint: disable=unused-argument
    ) -> Optional[HistoryBaseEntry]:
        if tx_log.topics[0] == GOVERNORALPHA_PROPOSE:
            if tx_log.address == '0xDbD27635A534A3d3169Ef0498beB56Fb9c937489':
                governance_name = 'Gitcoin'
            else:
                governance_name = tx_log.address

            try:
                _, decoded_data = decode_event_data_abi_str(
                    tx_log, GOVERNORALPHA_PROPOSE_ABI)
            except DeserializationError as e:
                log.debug(
                    f'Failed to decode governor alpha event due to {str(e)}')
                return None

            proposal_id = decoded_data[0]
            proposal_text = decoded_data[8]
            notes = f'Create {governance_name} proposal {proposal_id}. {proposal_text}'
            return HistoryBaseEntry(
                event_identifier=transaction.tx_hash.hex(),
                sequence_index=self.base.get_sequence_index(tx_log),
                timestamp=ts_sec_to_ms(transaction.timestamp),
                location=Location.BLOCKCHAIN,
                location_label=transaction.from_address,
                # TODO: This should be null for proposals and other informational events
                asset=A_ETH,
                balance=Balance(),
                notes=notes,
                event_type=HistoryEventType.INFORMATIONAL,
                event_subtype=HistoryEventSubType.GOVERNANCE_PROPOSE,
                counterparty=governance_name,
            )

        return None