예제 #1
0
def test_all_action_types_writtable_in_db(database,
                                          function_scope_messages_aggregator):
    db = DBLedgerActions(database, function_scope_messages_aggregator)

    query = 'SELECT COUNT(*) FROM ledger_actions WHERE identifier=?'
    cursor = database.conn.cursor()

    for entry in LedgerActionType:
        action = LedgerAction(
            identifier=0,  # whatever
            timestamp=1,
            action_type=entry,
            location=Location.EXTERNAL,
            amount=FVal(1),
            asset=A_ETH,
            rate=None,
            rate_asset=None,
            link=None,
            notes=None,
        )
        identifier = db.add_ledger_action(action)
        # Check that changes have been committed to db
        cursor.execute(query, (identifier, ))
        assert cursor.fetchone() == (1, )
    assert len(
        db.get_ledger_actions(
            filter_query=LedgerActionsFilterQuery.make(),
            has_premium=True,
        )) == len(LedgerActionType)
예제 #2
0
파일: test_gitcoin.py 프로젝트: rotki/rotki
def test_csv_import(database, price_historian):  # pylint: disable=unused-argument
    imp = GitcoinDataImporter(database)
    csv_path = Path(__file__).resolve().parent.parent / 'data' / 'gitcoin.csv'
    imp.import_gitcoin_csv(csv_path)

    actions = imp.db_ledger.get_ledger_actions(
        filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN),
        has_premium=True,
    )

    assert len(actions) == 10
    expected_actions = [
        LedgerAction(
            identifier=1,
            timestamp=Timestamp(1624798800),
            action_type=LedgerActionType.DONATION_RECEIVED,
            location=Location.GITCOIN,
            amount=FVal('0.0004789924016679019628604417823'),
            asset=A_ETH,
            rate=FVal('1983.33'),
            rate_asset=A_USD,
            link=
            '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1',
            notes='Gitcoin grant 149 event',
            extra_data=GitcoinEventData(
                tx_id=
                '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1',
                grant_id=149,
                clr_round=None,
                tx_type=GitcoinEventTxType.ETHEREUM,
            ),
        ),
        LedgerAction(
            identifier=2,
            timestamp=Timestamp(1624798800),
            action_type=LedgerActionType.DONATION_RECEIVED,
            location=Location.GITCOIN,
            amount=FVal('0.0005092445533521905078832065264'),
            asset=A_ETH,
            rate=FVal('1983.33'),
            rate_asset=A_USD,
            link=
            'sync-tx:5612f84bc20cda25b911af39b792c973bdd5916b3b6868db2420b5dafd705a90',
            notes='Gitcoin grant 149 event',
            extra_data=GitcoinEventData(
                tx_id=
                '5612f84bc20cda25b911af39b792c973bdd5916b3b6868db2420b5dafd705a90',
                grant_id=149,
                clr_round=None,
                tx_type=GitcoinEventTxType.ZKSYNC,
            ),
        )
    ]
    assert expected_actions == actions[:2]
예제 #3
0
    def query_income_loss_expense(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        only_cache: bool,
    ) -> List[LedgerAction]:
        """Queries the local DB and the exchange for the income/loss/expense history of the user

        If only_cache is true only what is already cached in the DB is returned without
        an actual exchange query.
        """
        db = DBLedgerActions(self.db, self.db.msg_aggregator)
        filter_query = LedgerActionsFilterQuery.make(
            from_ts=start_ts,
            to_ts=end_ts,
            location=self.location,
        )
        # has_premium True is fine here since the result of this is not user facing atm
        ledger_actions = db.get_ledger_actions(filter_query=filter_query,
                                               has_premium=True)
        if only_cache:
            return ledger_actions

        ranges = DBQueryRanges(self.db)
        location_string = f'{str(self.location)}_ledger_actions_{self.name}'
        ranges_to_query = ranges.get_location_query_ranges(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
        )
        new_ledger_actions = []
        for query_start_ts, query_end_ts in ranges_to_query:
            new_ledger_actions.extend(
                self.query_online_income_loss_expense(
                    start_ts=query_start_ts,
                    end_ts=query_end_ts,
                ))

        if new_ledger_actions != []:
            db.add_ledger_actions(new_ledger_actions)
        ranges.update_used_query_range(
            location_string=location_string,
            start_ts=start_ts,
            end_ts=end_ts,
            ranges_to_query=ranges_to_query,
        )
        ledger_actions.extend(new_ledger_actions)

        return ledger_actions
예제 #4
0
    def get_ledger_actions_and_limit_info(
            self,
            filter_query: LedgerActionsFilterQuery,
            has_premium: bool,
    ) -> Tuple[List[LedgerAction], int]:
        """Gets all ledger actions for the query from the DB

        Also returns how many are the total found for the filter
        """
        actions = self.get_ledger_actions(filter_query=filter_query, has_premium=has_premium)
        cursor = self.db.conn.cursor()
        query, bindings = filter_query.prepare(with_pagination=False)
        query = 'SELECT COUNT(*) from ledger_actions ' + query
        total_found_result = cursor.execute(query, bindings)
        return actions, total_found_result.fetchone()[0]
예제 #5
0
 def get_gitcoin_grant_events(
         self,
         grant_id: Optional[int],
         from_ts: Optional[Timestamp] = None,
         to_ts: Optional[Timestamp] = None,
 ) -> List[LedgerAction]:
     filter_query = LedgerActionsFilterQuery.make(
         from_ts=from_ts,
         to_ts=to_ts,
         location=Location.GITCOIN,
     )
     ledger_actions = self.get_ledger_actions(filter_query=filter_query, has_premium=True)
     if grant_id is None:
         return ledger_actions
     # else
     return [x for x in ledger_actions if x.extra_data.grant_id == grant_id]  # type: ignore
예제 #6
0
    def get_ledger_actions(
            self,
            filter_query: LedgerActionsFilterQuery,
            has_premium: bool,
    ) -> List[LedgerAction]:
        """Returns a list of ledger actions optionally filtered by the given filter.

        Returned list is ordered according to the passed filter query
        """
        cursor = self.db.conn.cursor()
        query_filter, bindings = filter_query.prepare()
        if has_premium:
            query = 'SELECT * from ledger_actions ' + query_filter
            results = cursor.execute(query, bindings)
        else:
            query = 'SELECT * FROM (SELECT * from ledger_actions ORDER BY timestamp DESC LIMIT ?) ' + query_filter  # noqa: E501
            results = cursor.execute(query, [FREE_LEDGER_ACTIONS_LIMIT] + bindings)

        original_query = 'SELECT identifier from ledger_actions ' + query_filter
        gitcoin_query = f'SELECT * from ledger_actions_gitcoin_data WHERE parent_id IN ({original_query});'  # noqa: E501
        cursor2 = self.db.conn.cursor()
        gitcoin_results = cursor2.execute(gitcoin_query, bindings)
        gitcoin_map = {x[0]: x for x in gitcoin_results}

        actions = []
        for result in results:
            try:
                action = LedgerAction.deserialize_from_db(result, gitcoin_map)
            except DeserializationError as e:
                self.msg_aggregator.add_error(
                    f'Error deserializing Ledger Action from the DB. Skipping it.'
                    f'Error was: {str(e)}',
                )
                continue
            except UnknownAsset as e:
                self.msg_aggregator.add_error(
                    f'Error deserializing Ledger Action from the DB. Skipping it. '
                    f'Unknown asset {e.asset_name} found',
                )
                continue

            actions.append(action)

        return actions
예제 #7
0
    def import_gitcoin_csv(self, filepath: Path) -> None:
        with open(filepath, 'r', encoding='utf-8-sig') as csvfile:
            data = csv.DictReader(csvfile, delimiter=',', quotechar='"')
            actions = []
            for row in data:
                try:
                    action = self._consume_grant_entry(row)
                except UnknownAsset as e:
                    self.db.msg_aggregator.add_warning(
                        f'During gitcoin grant CSV processing found asset {e.asset_name} '
                        f'that cant be matched to a single known asset. Skipping entry.',
                    )
                    continue
                except (DeserializationError, KeyError) as e:
                    msg = str(e)
                    if isinstance(e, KeyError):
                        msg = f'Missing key entry for {msg}.'
                    self.db.msg_aggregator.add_error(
                        'Unexpected data encountered during deserialization of a gitcoin CSV '
                        'entry. Check logs for details and open a bug report.',
                    )
                    log.error(
                        f'Unexpected data encountered during deserialization of a gitcoin '
                        f'CSV entry: {row} . Error was: {msg}', )
                    continue

                if action:
                    actions.append(action)

        db_actions = self.db_ledger.get_ledger_actions(
            filter_query=LedgerActionsFilterQuery.make(
                location=Location.GITCOIN),
            has_premium=True,
        )
        existing_txids = [x.link for x in db_actions]
        self.db_ledger.add_ledger_actions(
            [x for x in actions if x.link not in existing_txids])
예제 #8
0
    def get_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        has_premium: bool,
    ) -> Tuple[str, List['AccountingEventMixin']]:
        """
        Creates all events history from start_ts to end_ts. Returns it
        sorted by ascending timestamp.
        """
        self._reset_variables()
        step = 0
        total_steps = len(
            self.exchange_manager.connected_exchanges
        ) + NUM_HISTORY_QUERY_STEPS_EXCL_EXCHANGES  # noqa: E501
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        # start creating the all trades history list
        history: List['AccountingEventMixin'] = []
        empty_or_error = ''

        def populate_history_cb(
            trades_history: List[Trade],
            margin_history: List[MarginPosition],
            result_asset_movements: List[AssetMovement],
            exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query

            We don't include ledger actions here since we simply gather all of them at the end
            """
            history.extend(trades_history)
            history.extend(margin_history)
            history.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                history.extend(
                    process_polo_loans(
                        msg_aggregator=self.msg_aggregator,
                        data=polo_loans_data,
                        # We need to have history of loans since before the range
                        start_ts=Timestamp(0),
                        end_ts=end_ts,
                    ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for exchange in self.exchange_manager.iterate_exchanges():
            self.processing_state_name = f'Querying {exchange.name} exchange history'
            exchange.query_history_with_callbacks(
                # We need to have history of exchanges since before the range
                start_ts=Timestamp(0),
                end_ts=end_ts,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )
            step = self._increase_progress(step, total_steps)

        self.processing_state_name = 'Querying ethereum transactions history'
        tx_filter_query = ETHTransactionsFilterQuery.make(
            limit=None,
            offset=None,
            addresses=None,
            # We need to have history of transactions since before the range
            from_ts=Timestamp(0),
            to_ts=end_ts,
        )
        try:
            _, _ = self.eth_transactions.query(
                filter_query=tx_filter_query,
                has_premium=
                True,  # ignore limits here. Limit applied at processing
                only_cache=False,
            )
        except RemoteError as e:
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg
        step = self._increase_progress(step, total_steps)

        self.processing_state_name = 'Querying ethereum transaction receipts'
        self.eth_transactions.get_receipts_for_transactions_missing_them()
        step = self._increase_progress(step, total_steps)

        self.processing_state_name = 'Decoding raw transactions'
        self.evm_tx_decoder.get_and_decode_undecoded_transactions(limit=None)
        step = self._increase_progress(step, total_steps)

        # Include all external trades and trades from external exchanges
        for location in EXTERNAL_LOCATION:
            self.processing_state_name = f'Querying {location} trades history'
            external_trades = self.db.get_trades(
                filter_query=TradesFilterQuery.make(location=location),
                has_premium=
                True,  # we need all trades for accounting -- limit happens later
            )
            history.extend(external_trades)
            step = self._increase_progress(step, total_steps)

        # include all ledger actions
        self.processing_state_name = 'Querying ledger actions history'
        ledger_actions, _ = self.query_ledger_actions(
            filter_query=LedgerActionsFilterQuery.make(),
            only_cache=True,
        )
        history.extend(ledger_actions)
        step = self._increase_progress(step, total_steps)

        # include eth2 staking events
        eth2 = self.chain_manager.get_module('eth2')
        if eth2 is not None and has_premium:
            self.processing_state_name = 'Querying ETH2 staking history'
            try:
                eth2_events = self.chain_manager.get_eth2_history_events(
                    from_timestamp=Timestamp(0),
                    to_timestamp=end_ts,
                )
                history.extend(eth2_events)
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    f'Eth2 events are not included in the PnL report due to {str(e)}',
                )

        step = self._increase_progress(step, total_steps)

        # Include base history entries
        history_events_db = DBHistoryEvents(self.db)
        base_entries, _ = history_events_db.get_history_events_and_limit_info(
            filter_query=HistoryEventFilterQuery.make(
                # We need to have history since before the range
                from_ts=Timestamp(0),
                to_ts=end_ts,
            ),
            has_premium=True,  # ignore limits here. Limit applied at processing
        )
        history.extend(base_entries)
        self._increase_progress(step, total_steps)

        history.sort(  # sort events first by timestamp and if history base by sequence index
            key=lambda x: (
                x.get_timestamp(),
                x.sequence_index if isinstance(x, HistoryBaseEntry) else 1,
            ), )
        return empty_or_error, history
예제 #9
0
파일: events.py 프로젝트: rotki/rotki
    def get_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        has_premium: bool,
    ) -> HistoryResult:
        """Creates trades and loans history from start_ts to end_ts"""
        self._reset_variables()
        step = 0
        total_steps = len(
            self.exchange_manager.connected_exchanges
        ) + NUM_HISTORY_QUERY_STEPS_EXCL_EXCHANGES  # noqa: E501
        log.info(
            'Get/create trade history',
            start_ts=start_ts,
            end_ts=end_ts,
        )
        # start creating the all trades history list
        history: List[Union[Trade, MarginPosition, AMMTrade]] = []
        asset_movements = []
        ledger_actions = []
        loans = []
        empty_or_error = ''

        def populate_history_cb(
            trades_history: List[Trade],
            margin_history: List[MarginPosition],
            result_asset_movements: List[AssetMovement],
            result_ledger_actions: List[LedgerAction],
            exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query"""
            history.extend(trades_history)
            history.extend(margin_history)
            asset_movements.extend(result_asset_movements)
            ledger_actions.extend(result_ledger_actions)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                loans.extend(
                    process_polo_loans(
                        msg_aggregator=self.msg_aggregator,
                        data=polo_loans_data,
                        # We need to have history of loans since before the range
                        start_ts=Timestamp(0),
                        end_ts=end_ts,
                    ))

        def fail_history_cb(error_msg: str) -> None:
            """This callback will run for failure in exchange history query"""
            nonlocal empty_or_error
            empty_or_error += '\n' + error_msg

        for exchange in self.exchange_manager.iterate_exchanges():
            self.processing_state_name = f'Querying {exchange.name} exchange history'
            exchange.query_history_with_callbacks(
                # We need to have history of exchanges since before the range
                start_ts=Timestamp(0),
                end_ts=end_ts,
                success_callback=populate_history_cb,
                fail_callback=fail_history_cb,
            )
            step = self._increase_progress(step, total_steps)

        try:
            self.processing_state_name = 'Querying ethereum transactions history'
            filter_query = ETHTransactionsFilterQuery.make(
                order_ascending=
                True,  # for history processing we need oldest first
                limit=None,
                offset=None,
                addresses=None,
                # We need to have history of transactions since before the range
                from_ts=Timestamp(0),
                to_ts=end_ts,
            )
            ethtx_module = EthTransactions(
                ethereum=self.chain_manager.ethereum, database=self.db)
            eth_transactions, _ = ethtx_module.query(
                filter_query=filter_query,
                with_limit=
                False,  # at the moment ignore the limit for historical processing
                only_cache=False,
            )
        except RemoteError as e:
            eth_transactions = []
            msg = str(e)
            self.msg_aggregator.add_error(
                f'There was an error when querying etherscan for ethereum transactions: {msg}'
                f'The final history result will not include ethereum transactions',
            )
            empty_or_error += '\n' + msg
        step = self._increase_progress(step, total_steps)

        # Include all external trades and trades from external exchanges
        for location in EXTERNAL_LOCATION:
            self.processing_state_name = f'Querying {location} trades history'
            external_trades = self.db.get_trades(
                filter_query=TradesFilterQuery.make(location=location),
                has_premium=
                True,  # we need all trades for accounting -- limit happens later
            )
            history.extend(external_trades)
            step = self._increase_progress(step, total_steps)

        # include the ledger actions from offline sources
        self.processing_state_name = 'Querying ledger actions history'
        offline_ledger_actions, _ = self.query_ledger_actions(
            filter_query=LedgerActionsFilterQuery.make(),
            only_cache=True,
        )
        unique_ledger_actions = list(
            set(offline_ledger_actions) - set(ledger_actions))
        ledger_actions.extend(unique_ledger_actions)
        step = self._increase_progress(step, total_steps)

        # include AMM trades: balancer, uniswap
        for amm_location in AMMTradeLocations:
            amm_module_name = cast(AMMTRADE_LOCATION_NAMES, str(amm_location))
            amm_module = self.chain_manager.get_module(amm_module_name)
            if has_premium and amm_module:
                self.processing_state_name = f'Querying {amm_module_name} trade history'
                amm_module_trades = amm_module.get_trades(
                    addresses=self.chain_manager.queried_addresses_for_module(
                        amm_module_name),
                    from_timestamp=Timestamp(0),
                    to_timestamp=end_ts,
                    only_cache=False,
                )
                history.extend(amm_module_trades)
            step = self._increase_progress(step, total_steps)

        # Include makerdao DSR gains
        defi_events = []
        makerdao_dsr = self.chain_manager.get_module('makerdao_dsr')
        if makerdao_dsr and has_premium:
            self.processing_state_name = 'Querying makerDAO DSR history'
            defi_events.extend(
                makerdao_dsr.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                ))
        step = self._increase_progress(step, total_steps)

        # Include makerdao vault events
        makerdao_vaults = self.chain_manager.get_module('makerdao_vaults')
        if makerdao_vaults and has_premium:
            self.processing_state_name = 'Querying makerDAO vaults history'
            defi_events.extend(
                makerdao_vaults.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                ))
        step = self._increase_progress(step, total_steps)

        # include yearn vault events
        yearn_vaults = self.chain_manager.get_module('yearn_vaults')
        if yearn_vaults and has_premium:
            self.processing_state_name = 'Querying yearn vaults history'
            defi_events.extend(
                yearn_vaults.get_history_events(
                    from_timestamp=Timestamp(
                        0),  # we need to process all events from history start
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'yearn_vaults'),
                ))
        step = self._increase_progress(step, total_steps)

        # include compound events
        compound = self.chain_manager.get_module('compound')
        if compound and has_premium:
            self.processing_state_name = 'Querying compound history'
            try:
                # we need to process all events from history start
                defi_events.extend(
                    compound.get_history_events(
                        from_timestamp=Timestamp(0),
                        to_timestamp=end_ts,
                        addresses=self.chain_manager.
                        queried_addresses_for_module('compound'),
                    ))
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    SUBGRAPH_REMOTE_ERROR_MSG.format(protocol="Compound",
                                                     error_msg=str(e)), )

        step = self._increase_progress(step, total_steps)

        # include adex events
        adex = self.chain_manager.get_module('adex')
        if adex is not None and has_premium:
            self.processing_state_name = 'Querying adex staking history'
            defi_events.extend(
                adex.get_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'adex'),
                ))
        step = self._increase_progress(step, total_steps)

        # include aave events
        aave = self.chain_manager.get_module('aave')
        if aave is not None and has_premium:
            self.processing_state_name = 'Querying aave history'
            defi_events.extend(
                aave.get_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'aave'),
                ))
        step = self._increase_progress(step, total_steps)

        # include eth2 staking events
        eth2 = self.chain_manager.get_module('eth2')
        if eth2 is not None and has_premium:
            self.processing_state_name = 'Querying ETH2 staking history'
            try:
                eth2_events = self.chain_manager.get_eth2_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                )
                defi_events.extend(eth2_events)
            except RemoteError as e:
                self.msg_aggregator.add_error(
                    f'Eth2 events are not included in the PnL report due to {str(e)}',
                )

        step = self._increase_progress(step, total_steps)

        # include liquity events
        liquity = self.chain_manager.get_module('liquity')
        if liquity is not None and has_premium:
            self.processing_state_name = 'Querying Liquity staking history'
            defi_events.extend(
                liquity.get_history_events(
                    from_timestamp=start_ts,
                    to_timestamp=end_ts,
                    addresses=self.chain_manager.queried_addresses_for_module(
                        'liquity'),
                ))
        self._increase_progress(step, total_steps)

        history.sort(key=action_get_timestamp)
        return (
            empty_or_error,
            history,
            loans,
            asset_movements,
            eth_transactions,
            defi_events,
            ledger_actions,
        )
예제 #10
0
def test_delete_grant_events(rotkehlchen_api_server):
    rotki = rotkehlchen_api_server.rest_api.rotkehlchen
    # Get and save data of 3 different grants in the DB
    id1 = 149
    metadata1 = GitcoinGrantMetadata(
        grant_id=id1,
        name='Rotki - The portfolio tracker and accounting tool that protects your privacy',
        created_on=1571694841,
    )
    json_data = {
        'from_timestamp': 1622162468,  # 28/05/2021
        'to_timestamp': 1622246400,  # 29/05/2021
        'grant_id': id1,
    }
    response = requests.post(api_url_for(
        rotkehlchen_api_server,
        'gitcoineventsresource',
    ), json=json_data)
    assert_proper_response(response)
    id2 = 184
    metadata2 = GitcoinGrantMetadata(
        grant_id=id2,
        name='TrueBlocks',
        created_on=1575424305,
    )
    json_data = {
        'from_timestamp': 1622162468,  # 28/05/2021
        'to_timestamp': 1622246400,  # 29/05/2021
        'grant_id': id2,
    }
    response = requests.post(api_url_for(
        rotkehlchen_api_server,
        'gitcoineventsresource',
    ), json=json_data)
    assert_proper_response(response)
    id3 = 223
    metadata3 = GitcoinGrantMetadata(
        grant_id=id3,
        name='Ethereum Magicians',
        created_on=1578054753,
    )
    json_data = {
        'from_timestamp': 1622162468,  # 28/05/2021
        'to_timestamp': 1622246400,  # 29/05/2021
        'grant_id': id3,
    }
    response = requests.post(api_url_for(
        rotkehlchen_api_server,
        'gitcoineventsresource',
    ), json=json_data)
    assert_proper_response(response)

    # make sure events are saved
    db = rotki.data.db
    ledgerdb = DBLedgerActions(db, db.msg_aggregator)
    actions = ledgerdb.get_ledger_actions(
        filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN),
        has_premium=True,
    )
    assert len(actions) == 5
    assert len([x for x in actions if x.extra_data.grant_id == id1]) == 3
    assert len([x for x in actions if x.extra_data.grant_id == id2]) == 1
    assert len([x for x in actions if x.extra_data.grant_id == id3]) == 1
    # make sure db ranges were written
    queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id1}')
    assert queryrange == (1622162468, 1622246400)
    queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id2}')
    assert queryrange == (1622162468, 1622246400)
    queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id3}')
    assert queryrange == (1622162468, 1622246400)
    # make sure grant metadata were written
    assert ledgerdb.get_gitcoin_grant_metadata() == {
        id1: metadata1,
        id2: metadata2,
        id3: metadata3,
    }

    # delete 1 grant's data
    response = requests.delete(api_url_for(
        rotkehlchen_api_server,
        'gitcoineventsresource',
    ), json={'grant_id': id2})
    assert_proper_response(response)
    # check that they got deleted but rest is fine
    actions = ledgerdb.get_ledger_actions(
        filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN),
        has_premium=True,
    )
    assert len(actions) == 4
    assert len([x for x in actions if x.extra_data.grant_id == id1]) == 3
    assert len([x for x in actions if x.extra_data.grant_id == id2]) == 0
    assert len([x for x in actions if x.extra_data.grant_id == id3]) == 1
    # make sure db ranges were written
    queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id1}')
    assert queryrange == (1622162468, 1622246400)
    assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id2}') is None
    queryrange = db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id3}')
    assert queryrange == (1622162468, 1622246400)
    # make sure grant metadata were written
    assert ledgerdb.get_gitcoin_grant_metadata() == {id1: metadata1, id3: metadata3}

    # delete all remaining grant data
    response = requests.delete(api_url_for(
        rotkehlchen_api_server,
        'gitcoineventsresource',
    ))
    assert_proper_response(response)
    # check that they got deleted but rest is fine
    actions = ledgerdb.get_ledger_actions(
        filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN),
        has_premium=True,
    )
    assert len(actions) == 0
    # make sure db ranges were written
    assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id1}') is None
    assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id2}') is None
    assert db.get_used_query_range(f'{GITCOIN_GRANTS_PREFIX}_{id3}') is None
    # make sure grant metadata were written
    assert ledgerdb.get_gitcoin_grant_metadata() == {}
예제 #11
0
def test_query_ledger_actions(events_historian,
                              function_scope_messages_aggregator):
    """
    Create actions and query the events historian to check that the history
    has events previous to the selected from_ts. This allows us to verify that
    actions before one period are counted in the PnL report to calculate cost basis.
    https://github.com/rotki/rotki/issues/2541
    """

    selected_timestamp = 10
    db = DBLedgerActions(events_historian.db,
                         function_scope_messages_aggregator)

    action = LedgerAction(
        identifier=0,  # whatever
        timestamp=selected_timestamp - 2,
        action_type=LedgerActionType.INCOME,
        location=Location.EXTERNAL,
        amount=FVal(1),
        asset=A_ETH,
        rate=None,
        rate_asset=None,
        link=None,
        notes=None,
    )
    db.add_ledger_action(action)

    action = LedgerAction(
        identifier=0,  # whatever
        timestamp=selected_timestamp + 3,
        action_type=LedgerActionType.EXPENSE,
        location=Location.EXTERNAL,
        amount=FVal(0.5),
        asset=A_ETH,
        rate=None,
        rate_asset=None,
        link=None,
        notes=None,
    )
    db.add_ledger_action(action)

    action = LedgerAction(
        identifier=0,  # whatever
        timestamp=selected_timestamp + 5,
        action_type=LedgerActionType.INCOME,
        location=Location.EXTERNAL,
        amount=FVal(10),
        asset=A_USDC,
        rate=None,
        rate_asset=None,
        link=None,
        notes=None,
    )
    db.add_ledger_action(action)

    actions, length = events_historian.query_ledger_actions(
        filter_query=LedgerActionsFilterQuery.make(to_ts=selected_timestamp +
                                                   4),
        only_cache=False,
    )

    assert any((action.timestamp < selected_timestamp for action in actions))
    assert length == 2
예제 #12
0
파일: test_gitcoin.py 프로젝트: rotki/rotki
def test_store_same_tx_hash_in_db(database):
    """Test that if somehow during addition a duplicate is added,
    it's ignored and only 1 ends up in the db"""
    action1 = LedgerAction(
        identifier=1,
        timestamp=Timestamp(1624791600),
        action_type=LedgerActionType.DONATION_RECEIVED,
        location=Location.GITCOIN,
        amount=FVal('0.0004789924016679019628604417823'),
        asset=A_ETH,
        rate=FVal('1983.33'),
        rate_asset=A_USD,
        link=
        '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1',
        notes='Gitcoin grant 149 event',
        extra_data=GitcoinEventData(
            tx_id=
            '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1',
            grant_id=149,
            clr_round=None,
            tx_type=GitcoinEventTxType.ETHEREUM,
        ),
    )
    action2 = LedgerAction(
        identifier=2,
        timestamp=Timestamp(1634791600),
        action_type=LedgerActionType.DONATION_RECEIVED,
        location=Location.GITCOIN,
        amount=FVal('0.789924016679019628604417823'),
        asset=A_ETH,
        rate=FVal('1913.33'),
        rate_asset=A_USD,
        link=
        '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1',
        notes='Gitcoin grant 149 event',
        extra_data=GitcoinEventData(
            tx_id=
            '0x00298f72ad40167051e111e6dc2924de08cce7cf0ad00d04ad5a9e58426536a1',
            grant_id=149,
            clr_round=None,
            tx_type=GitcoinEventTxType.ETHEREUM,
        ),
    )
    action3 = LedgerAction(
        identifier=2,
        timestamp=Timestamp(1654791600),
        action_type=LedgerActionType.DONATION_RECEIVED,
        location=Location.GITCOIN,
        amount=FVal('2445533521905078832065264'),
        asset=A_ETH,
        rate=FVal('1973.33'),
        rate_asset=A_USD,
        link=
        'sync-tx:5612f84bc20cda25b911af39b792c973bdd5916b3b6868db2420b5dafd705a90',
        notes='Gitcoin grant 149 event',
        extra_data=GitcoinEventData(
            tx_id=
            '5612f84bc20cda25b911af39b792c973bdd5916b3b6868db2420b5dafd705a90',
            grant_id=149,
            clr_round=None,
            tx_type=GitcoinEventTxType.ZKSYNC,
        ),
    )
    dbledger = DBLedgerActions(database, database.msg_aggregator)
    dbledger.add_ledger_actions([action1, action2, action3])
    stored_actions = dbledger.get_ledger_actions(
        filter_query=LedgerActionsFilterQuery.make(location=Location.GITCOIN),
        has_premium=True,
    )
    assert stored_actions == [action1, action3]
    errors = database.msg_aggregator.consume_errors()
    warnings = database.msg_aggregator.consume_warnings()
    assert len(errors) == 0
    assert len(warnings) == 1
    assert 'Did not add ledger action to DB' in warnings[0]