Beispiel #1
0
def test_cryptocompare_historical_data_use_cached_price(data_dir, database):
    """Test that the cryptocompare cache is used and also properly deserialized"""
    # Create a cache file for SNGLS_BTC
    contents = """{"start_time": 0, "end_time": 1439390800,
    "data": [{"time": 1438387200, "close": 10, "high": 10, "low": 10, "open": 10,
    "volumefrom": 10, "volumeto": 10}, {"time": 1438390800, "close": 20, "high": 20,
    "low": 20, "open": 20, "volumefrom": 20, "volumeto": 20}]}"""
    with open(os.path.join(data_dir, 'price_history_SNGLS_BTC.json'), 'w') as f:
        f.write(contents)

    cc = Cryptocompare(data_directory=data_dir, database=database)
    with patch.object(cc, 'query_endpoint_histohour') as histohour_mock:
        result = cc.get_historical_data(
            from_asset=A_SNGLS,
            to_asset=A_BTC,
            timestamp=1438390801,
            historical_data_start=0,
        )
        # make sure that histohour was not called, in essence that the cache was used
        assert histohour_mock.call_count == 0

    assert len(result) == 2
    assert isinstance(result[0].low, FVal)
    assert result[0].low == FVal(10)
    assert isinstance(result[0].high, FVal)
    assert result[0].high == FVal(10)
    assert isinstance(result[1].low, FVal)
    assert result[1].low == FVal(20)
    assert isinstance(result[1].high, FVal)
    assert result[1].high == FVal(20)
Beispiel #2
0
def test_cryptocompare_historical_data_price(
        data_dir,
        database,
        from_asset,
        to_asset,
        timestamp,
        price,
):
    """Test that the cryptocompare histohour data retrieval works and price is returned

    """
    cc = Cryptocompare(data_directory=data_dir, database=database)
    # Get lots of historical prices from at least 1 query after the ts we need
    result = cc.get_historical_data(
        from_asset=from_asset,
        to_asset=to_asset,
        timestamp=timestamp + 2020 * 3600,
        only_check_cache=False,
    )
    # Query the ts we need from the cached data
    result_price = cc._retrieve_price_from_data(
        data=result,
        from_asset=from_asset,
        to_asset=to_asset,
        timestamp=timestamp,
    )
    assert result_price == price
Beispiel #3
0
def test_cryptocompare_histohour_data_going_backward(data_dir, database, freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the past workd properly
    and that the cached data are properly appended to the cached result. In production
    this scenario should not happen often. Only way to happen if cryptocompare somehow adds
    older data than what was previously queried.
    """
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279936800
    # first timestamp cryptocompare has histohour BTC/USD is: 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    # create a cache file for BTC_USD
    contents = """{"start_time": 1301536800, "end_time": 1301540400,
    "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298,
    "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298,
    "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f:
        f.write(contents)
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    result = cc.get_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
        only_check_cache=False,
    )
    cache_key = PairCacheKey('BTC_USD')
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=False)
    assert cache_key in cc.price_history
    assert cc.price_history[cache_key].start_time == btc_start_ts
    assert cc.price_history[cache_key].end_time == now_ts
    check_cc_result(cc.price_history[cache_key].data, forward=False)
Beispiel #4
0
def test_cryptocompare_historical_data_price(
    data_dir,
    database,
    from_asset,
    to_asset,
    timestamp,
    price,
):
    """Test that the cryptocompare histohour data retrieval works and price is returned

    """
    cc = Cryptocompare(data_directory=data_dir, database=database)
    # Get lots of historical prices from at least 1 query after the ts we need
    cc.query_and_store_historical_data(
        from_asset=from_asset,
        to_asset=to_asset,
        timestamp=timestamp + 2020 * 3600,
    )
    # Query the ts we need directly from the cached data
    price_cache_entry = GlobalDBHandler().get_historical_price(
        from_asset=from_asset,
        to_asset=to_asset,
        timestamp=timestamp,
        max_seconds_distance=3600,
        source=HistoricalPriceOracle.CRYPTOCOMPARE,
    )
    assert price_cache_entry.price == price
Beispiel #5
0
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(msg_aggregator=self.msg_aggregator)
        # Initialize the AssetResolver singleton
        AssetResolver(data_directory=self.data_dir)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None)
        self.coingecko = Coingecko()
        self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko)
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='periodically_query_icons_until_all_cached',
            method=self.icon_manager.periodically_query_icons_until_all_cached,
            batch_size=ICONS_BATCH_SIZE,
            sleep_time_secs=ICONS_QUERY_SLEEP,
        )
        # Initialize the Inquirer singleton
        Inquirer(
            data_dir=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        # Keeps how many trades we have found per location. Used for free user limiting
        self.actions_per_location: Dict[str, Dict[Location, int]] = {
            'trade': defaultdict(int),
            'asset_movement': defaultdict(int),
        }

        self.lock.release()
        self.shutdown_event = gevent.event.Event()
Beispiel #6
0
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        This runs during backend initialization so it should be as light as possible.

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)
            self.data_dir.mkdir(parents=True, exist_ok=True)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.main_loop_spawned = False
        self.args = args
        self.api_task_greenlets: List[gevent.Greenlet] = []
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        # Initialize the GlobalDBHandler singleton. Has to be initialized BEFORE asset resolver
        GlobalDBHandler(data_dir=self.data_dir)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir,
                                           database=None)
        self.coingecko = Coingecko()
        self.icon_manager = IconManager(data_dir=self.data_dir,
                                        coingecko=self.coingecko)
        self.assets_updater = AssetsUpdater(self.msg_aggregator)
        # Initialize the Inquirer singleton
        Inquirer(
            data_dir=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        # Keeps how many trades we have found per location. Used for free user limiting
        self.actions_per_location: Dict[str, Dict[Location, int]] = {
            'trade': defaultdict(int),
            'asset_movement': defaultdict(int),
        }
        self.task_manager: Optional[TaskManager] = None
        self.shutdown_event = gevent.event.Event()
Beispiel #7
0
    def __init__(self, args: argparse.Namespace) -> None:
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in = False

        logfilename = None
        if args.logtarget == 'file':
            logfilename = args.logfile

        if args.loglevel == 'debug':
            loglevel = logging.DEBUG
        elif args.loglevel == 'info':
            loglevel = logging.INFO
        elif args.loglevel == 'warn':
            loglevel = logging.WARN
        elif args.loglevel == 'error':
            loglevel = logging.ERROR
        elif args.loglevel == 'critical':
            loglevel = logging.CRITICAL
        else:
            raise AssertionError('Should never get here. Illegal log value')

        logging.basicConfig(
            filename=logfilename,
            filemode='w',
            level=loglevel,
            format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s',
            datefmt='%d/%m/%Y %H:%M:%S %Z',
        )

        if not args.logfromothermodules:
            logging.getLogger('urllib3').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3.connectionpool').setLevel(
                logging.CRITICAL)

        self.sleep_secs = args.sleep_secs
        self.data_dir = args.data_dir
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        self.all_eth_tokens = AssetResolver().get_all_eth_tokens()
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir,
                                           database=None)
        # Initialize the Inquirer singleton
        Inquirer(data_dir=self.data_dir, cryptocompare=self.cryptocompare)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()
def test_cryptocompare_query_pricehistorical(accounting_data_dir):
    """Test that cryptocompare price historical query works fine"""
    cc = Cryptocompare(data_directory=accounting_data_dir)
    price = cc.query_endpoint_pricehistorical(
        from_asset=A_SNGLS,
        to_asset=A_BTC,
        timestamp=1475413990,
    )
    # Just test a price is returned
    assert price
Beispiel #9
0
def test_cryptocompare_historical_data_use_cached_price(data_dir, database, historical_price_test_data):  # pylint: disable=unused-argument  # noqa: E501
    """Test that the cryptocompare cache is used"""
    cc = Cryptocompare(data_directory=data_dir, database=database)
    with patch.object(cc, 'query_endpoint_histohour') as histohour_mock:
        result = cc.query_historical_price(
            from_asset=A_ETH,
            to_asset=A_EUR,
            timestamp=1511627623,
        )
        # make sure that histohour was not called, in essence that the cache was used
        assert histohour_mock.call_count == 0

    assert result == FVal(396.56)
Beispiel #10
0
def test_cryptocompare_histohour_data_going_backward(data_dir, database,
                                                     freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the past workd properly
    and that the cached data are properly appended to the cached result. In production
    this scenario should not happen often. Only way to happen if cryptocompare somehow adds
    older data than what was previously queried.
    """
    globaldb = GlobalDBHandler()
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279936800
    # first timestamp cryptocompare has histohour BTC/USD is: 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    # create a cache file for BTC_USD
    cache_data = [
        HistoricalPrice(
            from_asset=A_BTC,
            to_asset=A_USD,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
            timestamp=Timestamp(1301536800),
            price=Price(FVal('0.298')),
        ),
        HistoricalPrice(
            from_asset=A_BTC,
            to_asset=A_USD,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
            timestamp=Timestamp(1301540400),
            price=Price(FVal('0.298')),
        )
    ]
    globaldb.add_historical_prices(cache_data)

    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    cc.query_and_store_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
    )
    result = get_globaldb_cache_entries(from_asset=A_BTC, to_asset=A_USD)
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=False)
    data_range = globaldb.get_historical_price_range(
        A_BTC, A_USD, HistoricalPriceOracle.CRYPTOCOMPARE)  # noqa: E501
    assert data_range[0] == btc_start_ts
    assert data_range[
        1] == 1301540400  # that's the closest ts to now_ts cc returns
Beispiel #11
0
def create_inquirer(data_directory, should_mock_current_price_queries,
                    mocked_prices) -> Inquirer:
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    Inquirer._Inquirer__instance = None  # type: ignore
    # Get a cryptocompare without a DB since invoking DB fixture here causes problems
    # of existing user for some tests
    cryptocompare = Cryptocompare(data_directory=data_directory, database=None)
    gecko = Coingecko(data_directory=data_directory)
    inquirer = Inquirer(data_dir=data_directory,
                        cryptocompare=cryptocompare,
                        coingecko=gecko)
    if not should_mock_current_price_queries:
        return inquirer

    def mock_find_usd_price(asset):  # pylint: disable=unused-argument
        return mocked_prices.get(asset, FVal('1.5'))

    inquirer.find_usd_price = mock_find_usd_price  # type: ignore

    def mock_query_fiat_pair(base, quote):  # pylint: disable=unused-argument
        return FVal(1)

    inquirer.query_fiat_pair = mock_query_fiat_pair  # type: ignore

    return inquirer
Beispiel #12
0
def test_price_queries(price_historian, data_dir, database):
    """Test some historical price queries. Make sure that we test some
    assets not in cryptocompare but in coigecko so the backup mechanism triggers and works"""

    # These should hit cryptocompare
    assert price_historian.query_historical_price(A_BTC, A_EUR,
                                                  1479200704) == FVal('663.66')
    assert price_historian.query_historical_price(
        A_XMR, A_BTC, 1579200704) == FVal('0.007526')
    # this should hit the cryptocompare cache we are creating here
    contents = """{"start_time": 0, "end_time": 1439390800,
    "data": [{"time": 1438387200, "close": 10, "high": 10, "low": 10, "open": 10,
    "volumefrom": 10, "volumeto": 10}, {"time": 1438390800, "close": 20, "high": 20,
    "low": 20, "open": 20, "volumefrom": 20, "volumeto": 20}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}DASH_USD.json',
              'w') as f:
        f.write(contents)
    price_historian._PriceHistorian__instance._cryptocompare = Cryptocompare(
        data_directory=data_dir,
        database=database,
    )
    price_historian.set_oracles_order(price_historian._oracles)
    assert price_historian.query_historical_price(A_DASH, A_USD,
                                                  1438387700) == FVal('10')
    # this should hit coingecko, since cornichon is not in cryptocompare
    cornichon = Asset('CORN-2')
    expected_price = FVal('0.07830444726516915')
    assert price_historian.query_historical_price(cornichon, A_USD,
                                                  1608854400) == expected_price
Beispiel #13
0
def test_empty_histohour(data_dir, database, freezer):
    """Histohour can be empty and can have also floating point zeros like in CHI/EUR

    This test makes sure that an empty list is returned at the very first all zeros
    result that also has floating point and querying stops.

    If cryptocompare actually fixes their zero historical price problem this test can go away
    """
    now_ts = 1610365553
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    result = cc.get_historical_data(
        from_asset=Asset('CHI'),
        to_asset=Asset('EUR'),
        timestamp=now_ts,
        only_check_cache=False,
    )
    assert len(result) == 0
Beispiel #14
0
def test_get_cached_data_metadata(data_dir, database):
    """Test that the get_cached_data_metadata function works correctly
    and returns just the metadata by reading part ofthe file
    """
    contents = """{"start_time": 1301536800, "end_time": 1301540400,
    "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298,
    "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298,
    "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f:
        f.write(contents)
    cc = Cryptocompare(data_directory=data_dir, database=database)
    result = cc.get_cached_data_metadata(
        from_asset=A_BTC,
        to_asset=A_USD,
    )
    assert result is not None
    assert result[0] == 1301536800
    assert result[1] == 1301540400
Beispiel #15
0
    def __init__(self, data_directory='.'):
        self.data_directory = Path(data_directory)
        self.config = yaml.load(open(self.data_directory / 'buchfink.yaml',
                                     'r'),
                                Loader=yaml.SafeLoader)

        self.reports_directory = self.data_directory / "reports"
        self.trades_directory = self.data_directory / "trades"
        self.cache_directory = self.data_directory / "cache"

        self.reports_directory.mkdir(exist_ok=True)
        self.trades_directory.mkdir(exist_ok=True)
        self.cache_directory.mkdir(exist_ok=True)
        (self.cache_directory / 'cryptocompare').mkdir(exist_ok=True)
        (self.cache_directory / 'history').mkdir(exist_ok=True)
        (self.cache_directory / 'inquirer').mkdir(exist_ok=True)

        self.cryptocompare = Cryptocompare(
            self.cache_directory / 'cryptocompare', self)
        self.historian = PriceHistorian(self.cache_directory / 'history',
                                        '01/01/2014', self.cryptocompare)
        self.inquirer = Inquirer(self.cache_directory / 'inquirer',
                                 self.cryptocompare)
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)

        # Initialize blockchain querying modules
        self.etherscan = Etherscan(database=self,
                                   msg_aggregator=self.msg_aggregator)
        self.all_eth_tokens = AssetResolver().get_all_eth_tokens()
        self.alethio = Alethio(
            database=self,
            msg_aggregator=self.msg_aggregator,
            all_eth_tokens=self.all_eth_tokens,
        )
        self.ethereum_manager = EthereumManager(
            ethrpc_endpoint=self.get_eth_rpc_endpoint(),
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
        )
        #self.chain_manager = ChainManager(
        #    blockchain_accounts=[],
        #    owned_eth_tokens=[],
        #    ethereum_manager=self.ethereum_manager,
        #    msg_aggregator=self.msg_aggregator,
        #    alethio=alethio,
        #    greenlet_manager=self.greenlet_manager,
        #    premium=False,
        #    eth_modules=ethereum_modules,
        #)
        self.ethereum_analyzer = EthereumAnalyzer(
            ethereum_manager=self.ethereum_manager,
            database=self,
        )
Beispiel #16
0
def test_cryptocompare_histohour_data_going_forward(data_dir, database,
                                                    freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the future works properly
    and appends the cached data with the newly returned data
    """
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    result = cc.get_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
        only_check_cache=False,
    )
    cache_key = PairCacheKey('BTC_USD')
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT + 1
    assert all(x.low == x.high == FVal('0.05454') for x in result)
    assert cache_key in cc.price_history
    assert cc.price_history[cache_key].start_time == btc_start_ts
    assert cc.price_history[cache_key].end_time == now_ts
    assert all(x.low == x.high == FVal('0.05454')
               for x in cc.price_history[cache_key].data)

    # now let's move a bit to the future and query again to see the cache is appended to
    now_ts = now_ts + 3600 * 2000 * 2 + 4700
    freezer.move_to(datetime.fromtimestamp(now_ts))
    result = cc.get_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 4 - 55,
        only_check_cache=False,
    )
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=True)
    assert cache_key in cc.price_history
    assert cc.price_history[cache_key].start_time == btc_start_ts
    assert cc.price_history[cache_key].end_time == now_ts
    check_cc_result(cc.price_history[cache_key].data, forward=True)
Beispiel #17
0
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir,
                                           database=None)
        # Initialize the Inquirer singleton
        Inquirer(data_dir=self.data_dir, cryptocompare=self.cryptocompare)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()
Beispiel #18
0
def test_get_cached_data_metadata(data_dir, database):
    """Test that the get_cached_data_metadata function works correctly
    and returns just the metadata by reading part ofthe file

    The json cache data in production are saved as one line files.
    So here we also keep it in one line on purpose. The previous
    regex we used failed with 1 line json file
    """
    contents = """{"start_time": 1301536800, "end_time": 1301540400, "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}"""  # noqa: E501
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f:
        f.write(contents)
    cc = Cryptocompare(data_directory=data_dir, database=database)
    # make sure that _read_cachefile_metadata runs and they are read from file and not from memory
    cc.price_history = {}
    result = cc.get_cached_data_metadata(
        from_asset=A_BTC,
        to_asset=A_USD,
    )
    assert result is not None
    assert result[0] == 1301536800
    assert result[1] == 1301540400
def test_cryptocompare_histohour_query_old_ts_xcp(
        accounting_data_dir,
        price_historian,  # pylint: disable=unused-argument
):
    """Test that as a result of this query a crash does not happen.

    Regression for: https://github.com/rotkehlchenio/rotkehlchen/issues/432
    Unfortunately still no price is found so we have to expect a NoPriceForGivenTimestamp

    This test is now skipped since it's a subset of:
    test_end_to_end_tax_report::test_unknown_cryptocompare_asset_and_price_not_found_in_history

    When more price data sources are introduced then this should probably be unskipped
    to focus on the cryptocompare case. But at the moment both tests follow the same
    path and are probably slow due to the price querying.
    """
    with pytest.raises(NoPriceForGivenTimestamp):
        cc = Cryptocompare(data_directory=accounting_data_dir)
        cc.query_historical_price(
            from_asset=Asset('XCP'),
            to_asset=A_USD,
            timestamp=1392685761,
            historical_data_start=1438387200,
        )
def test_price_queries(price_historian, data_dir, database):
    """Test some historical price queries. Make sure that we test some
    assets not in cryptocompare but in coigecko so the backup mechanism triggers and works"""

    # These should hit cryptocompare
    assert price_historian.query_historical_price(A_BTC, A_EUR,
                                                  1479200704) == FVal('663.66')
    assert price_historian.query_historical_price(
        A_XMR, A_BTC, 1579200704) == FVal('0.007526')
    # this should hit the cryptocompare cache we are creating here
    cache_data = [
        HistoricalPrice(
            from_asset=A_DASH,
            to_asset=A_USD,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
            timestamp=Timestamp(1438387200),
            price=Price(FVal('10')),
        ),
        HistoricalPrice(
            from_asset=A_DASH,
            to_asset=A_USD,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
            timestamp=Timestamp(1438390800),
            price=Price(FVal('20')),
        )
    ]
    GlobalDBHandler().add_historical_prices(cache_data)
    price_historian._PriceHistorian__instance._cryptocompare = Cryptocompare(
        data_directory=data_dir,
        database=database,
    )
    price_historian.set_oracles_order(price_historian._oracles)
    assert price_historian.query_historical_price(A_DASH, A_USD,
                                                  1438387700) == FVal('10')
    # this should hit coingecko, since cornichon is not in cryptocompare
    expected_price = FVal('0.07830444726516915')
    assert price_historian.query_historical_price(A_CORN, A_USD,
                                                  1608854400) == expected_price
Beispiel #21
0
def test_cryptocompare_histohour_data_going_forward(data_dir, database,
                                                    freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the future works properly
    and appends the cached data with the newly returned data
    """
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    cc.query_and_store_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
    )

    globaldb = GlobalDBHandler()
    result = get_globaldb_cache_entries(from_asset=A_BTC, to_asset=A_USD)
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT + 1
    assert all(x.price == Price(FVal(0.05454)) for x in result)
    data_range = globaldb.get_historical_price_range(
        A_BTC, A_USD, HistoricalPriceOracle.CRYPTOCOMPARE)  # noqa: E501
    assert data_range[0] == btc_start_ts
    assert data_range[
        1] == 1287140400  # that's the closest ts to now_ts cc returns

    # now let's move a bit to the future and query again to see the cache is appended to
    now_ts = now_ts + 3600 * 2000 * 2 + 4700
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc.query_and_store_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 4 - 55,
    )
    result = get_globaldb_cache_entries(from_asset=A_BTC, to_asset=A_USD)
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=True)
    data_range = globaldb.get_historical_price_range(
        A_BTC, A_USD, HistoricalPriceOracle.CRYPTOCOMPARE)  # noqa: E501
    assert data_range[0] == btc_start_ts
    assert data_range[
        1] == 1301544000  # that's the closest ts to now_ts cc returns
Beispiel #22
0
def price_historian(
    accounting_data_dir,
    inquirer,  # pylint: disable=unused-argument
    should_mock_price_queries,
    mocked_price_queries,
):
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    PriceHistorian._PriceHistorian__instance = None
    historian = PriceHistorian(
        data_directory=accounting_data_dir,
        history_date_start=TEST_HISTORY_DATA_START,
        cryptocompare=Cryptocompare(data_directory=accounting_data_dir),
    )
    if should_mock_price_queries:

        def mock_historical_price_query(from_asset, to_asset, timestamp):
            if from_asset == to_asset:
                return FVal(1)
            return mocked_price_queries[from_asset][to_asset][timestamp]

        historian.query_historical_price = mock_historical_price_query

    return historian
Beispiel #23
0
class Rotkehlchen():
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.main_loop_spawned = False
        self.args = args
        self.api_task_greenlets: List[gevent.Greenlet] = []
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        # Initialize the AssetResolver singleton
        AssetResolver(data_directory=self.data_dir)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir,
                                           database=None)
        self.coingecko = Coingecko(data_directory=self.data_dir)
        self.icon_manager = IconManager(data_dir=self.data_dir,
                                        coingecko=self.coingecko)
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='periodically_query_icons_until_all_cached',
            exception_is_error=False,
            method=self.icon_manager.periodically_query_icons_until_all_cached,
            batch_size=ICONS_BATCH_SIZE,
            sleep_time_secs=ICONS_QUERY_SLEEP,
        )
        # Initialize the Inquirer singleton
        Inquirer(
            data_dir=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        # Keeps how many trades we have found per location. Used for free user limiting
        self.actions_per_location: Dict[str, Dict[Location, int]] = {
            'trade': defaultdict(int),
            'asset_movement': defaultdict(int),
        }

        self.lock.release()
        self.task_manager: Optional[TaskManager] = None
        self.shutdown_event = gevent.event.Event()

    def reset_after_failed_account_creation_or_login(self) -> None:
        """If the account creation or login failed make sure that the Rotki instance is clear

        Tricky instances are when after either failed premium credentials or user refusal
        to sync premium databases we relogged in.
        """
        self.cryptocompare.db = None

    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: Literal['yes', 'no', 'unknown'],
        premium_credentials: Optional[PremiumCredentials],
        initial_settings: Optional[ModifiableDBSettings] = None,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        - SystemPermissionError if the directory or DB file can not be accessed
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
            initial_settings=initial_settings,
        )

        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new,
                                               initial_settings)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data,
                                                       password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            self.msg_aggregator.add_warning(
                'Could not authenticate the Rotki premium API keys found in the DB.'
                ' Has your subscription expired?', )
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='submit_usage_analytics',
            exception_is_error=False,
            method=maybe_submit_usage_analytics,
            should_submit=settings.submit_usage_analytics,
        )
        self.etherscan = Etherscan(database=self.data.db,
                                   msg_aggregator=self.msg_aggregator)
        self.beaconchain = BeaconChain(database=self.data.db,
                                       msg_aggregator=self.msg_aggregator)
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        PriceHistorian().set_oracles_order(settings.historical_price_oracles)

        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=settings.anonymized_logs)
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethereum_manager = EthereumManager(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=ETHEREUM_NODES_TO_CONNECT_AT_START,
        )
        kusama_manager = SubstrateManager(
            chain=SubstrateChain.KUSAMA,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=KUSAMA_NODES_TO_CONNECT_AT_START,
            connect_on_startup=self._connect_ksm_manager_on_startup(),
            own_rpc_endpoint=settings.ksm_rpc_endpoint,
        )

        Inquirer().inject_ethereum(ethereum_manager)
        Inquirer().set_oracles_order(settings.current_price_oracles)

        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            ethereum_manager=ethereum_manager,
            kusama_manager=kusama_manager,
            msg_aggregator=self.msg_aggregator,
            database=self.data.db,
            greenlet_manager=self.greenlet_manager,
            premium=self.premium,
            eth_modules=settings.active_modules,
            data_directory=self.data_dir,
            beaconchain=self.beaconchain,
            btc_derivation_gap_limit=settings.btc_derivation_gap_limit,
        )
        self.events_historian = EventsHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.task_manager = TaskManager(
            max_tasks_num=DEFAULT_MAX_TASKS_NUM,
            greenlet_manager=self.greenlet_manager,
            api_task_greenlets=self.api_task_greenlets,
            database=self.data.db,
            cryptocompare=self.cryptocompare,
            premium_sync_manager=self.premium_sync_manager,
            chain_manager=self.chain_manager,
            exchange_manager=self.exchange_manager,
        )
        self.user_is_logged_in = True
        log.debug('User unlocking complete')

    def logout(self) -> None:
        if not self.user_is_logged_in:
            return

        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )
        self.greenlet_manager.clear()
        del self.chain_manager
        self.exchange_manager.delete_all_exchanges()

        # Reset rotkehlchen logger to default
        LoggingSettings(anonymized_logs=DEFAULT_ANONYMIZED_LOGS)

        del self.accountant
        del self.events_historian
        del self.data_importer

        if self.premium is not None:
            del self.premium
        self.data.logout()
        self.password = ''
        self.cryptocompare.unset_database()

        # Make sure no messages leak to other user sessions
        self.msg_aggregator.consume_errors()
        self.msg_aggregator.consume_warnings()
        self.task_manager = None

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def set_premium_credentials(self, credentials: PremiumCredentials) -> None:
        """
        Sets the premium credentials for Rotki

        Raises PremiumAuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')
        if self.premium is not None:
            self.premium.set_credentials(credentials)
        else:
            self.premium = premium_create_and_verify(credentials)

        self.data.db.set_rotkehlchen_premium(credentials)

    def delete_premium_credentials(self) -> Tuple[bool, str]:
        """Deletes the premium credentials for Rotki"""
        msg = ''

        success = self.data.db.del_rotkehlchen_premium()
        if success is False:
            msg = 'The database was unable to delete the Premium keys for the logged-in user'
        self.deactivate_premium_status()
        return success, msg

    def deactivate_premium_status(self) -> None:
        """Deactivate premium in the current session"""
        self.premium = None
        self.premium_sync_manager.premium = None
        self.chain_manager.deactivate_premium_status()

    def start(self) -> gevent.Greenlet:
        assert not self.main_loop_spawned, 'Tried to spawn the main loop twice'
        greenlet = gevent.spawn(self.main_loop)
        self.main_loop_spawned = True
        return greenlet

    def main_loop(self) -> None:
        """Rotki main loop that fires often and runs the task manager's scheduler"""
        while self.shutdown_event.wait(
                timeout=MAIN_LOOP_SECS_DELAY) is not True:
            if self.task_manager is not None:
                self.task_manager.schedule()

    def get_blockchain_account_data(
        self,
        blockchain: SupportedBlockchain,
    ) -> Union[List[BlockchainAccountData], Dict[str, Any]]:
        account_data = self.data.db.get_blockchain_account_data(blockchain)
        if blockchain != SupportedBlockchain.BITCOIN:
            return account_data

        xpub_data = self.data.db.get_bitcoin_xpub_data()
        addresses_to_account_data = {x.address: x for x in account_data}
        address_to_xpub_mappings = self.data.db.get_addresses_to_xpub_mapping(
            list(addresses_to_account_data.keys()),  # type: ignore
        )

        xpub_mappings: Dict['XpubData', List[BlockchainAccountData]] = {}
        for address, xpub_entry in address_to_xpub_mappings.items():
            if xpub_entry not in xpub_mappings:
                xpub_mappings[xpub_entry] = []
            xpub_mappings[xpub_entry].append(
                addresses_to_account_data[address])

        data: Dict[str, Any] = {'standalone': [], 'xpubs': []}
        # Add xpub data
        for xpub_entry in xpub_data:
            data_entry = xpub_entry.serialize()
            addresses = xpub_mappings.get(xpub_entry, None)
            data_entry['addresses'] = addresses if addresses and len(
                addresses) != 0 else None
            data['xpubs'].append(data_entry)
        # Add standalone addresses
        for account in account_data:
            if account.address not in address_to_xpub_mappings:
                data['standalone'].append(account)

        return data

    def add_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        account_data: List[BlockchainAccountData],
    ) -> BlockchainBalancesUpdate:
        """Adds new blockchain accounts

        Adds the accounts to the blockchain instance and queries them to get the
        updated balances. Also adds them in the DB

        May raise:
        - EthSyncError from modify_blockchain_account
        - InputError if the given accounts list is empty.
        - TagConstraintError if any of the given account data contain unknown tags.
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        """
        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='adding',
            data_type='blockchain accounts',
        )
        address_type = blockchain.get_address_type()
        updated_balances = self.chain_manager.add_blockchain_accounts(
            blockchain=blockchain,
            accounts=[address_type(entry.address) for entry in account_data],
        )
        self.data.db.add_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return updated_balances

    def edit_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        account_data: List[BlockchainAccountData],
    ) -> None:
        """Edits blockchain accounts

        Edits blockchain account data for the given accounts

        May raise:
        - InputError if the given accounts list is empty or if
        any of the accounts to edit do not exist.
        - TagConstraintError if any of the given account data contain unknown tags.
        """
        # First check for validity of account data addresses
        if len(account_data) == 0:
            raise InputError(
                'Empty list of blockchain account data to edit was given')
        accounts = [x.address for x in account_data]
        unknown_accounts = set(accounts).difference(
            self.chain_manager.accounts.get(blockchain))
        if len(unknown_accounts) != 0:
            raise InputError(
                f'Tried to edit unknown {blockchain.value} '
                f'accounts {",".join(unknown_accounts)}', )

        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='editing',
            data_type='blockchain accounts',
        )

        # Finally edit the accounts
        self.data.db.edit_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

    def remove_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        accounts: ListOfBlockchainAddresses,
    ) -> BlockchainBalancesUpdate:
        """Removes blockchain accounts

        Removes the accounts from the blockchain instance and queries them to get
        the updated balances. Also removes them from the DB

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - InputError if a non-existing account was given to remove
        """
        balances_update = self.chain_manager.remove_blockchain_accounts(
            blockchain=blockchain,
            accounts=accounts,
        )
        self.data.db.remove_blockchain_accounts(blockchain, accounts)
        return balances_update

    def get_history_query_status(self) -> Dict[str, str]:
        if self.events_historian.progress < FVal('100'):
            processing_state = self.events_historian.processing_state_name
            progress = self.events_historian.progress / 2
        elif self.accountant.first_processed_timestamp == -1:
            processing_state = 'Processing all retrieved historical events'
            progress = FVal(50)
        else:
            processing_state = 'Processing all retrieved historical events'
            # start_ts is min of the query start or the first action timestamp since action
            # processing can start well before query start to calculate cost basis
            start_ts = min(
                self.accountant.events.query_start_ts,
                self.accountant.first_processed_timestamp,
            )
            diff = self.accountant.events.query_end_ts - start_ts
            progress = 50 + 100 * (
                FVal(self.accountant.currently_processing_timestamp - start_ts)
                / FVal(diff) / 2)

        return {
            'processing_state': str(processing_state),
            'total_progress': str(progress)
        }

    def process_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> Tuple[Dict[str, Any], str]:
        (
            error_or_empty,
            history,
            loan_history,
            asset_movements,
            eth_transactions,
            defi_events,
            ledger_actions,
        ) = self.events_historian.get_history(
            start_ts=start_ts,
            end_ts=end_ts,
            has_premium=self.premium is not None,
        )
        result = self.accountant.process_history(
            start_ts=start_ts,
            end_ts=end_ts,
            trade_history=history,
            loan_history=loan_history,
            asset_movements=asset_movements,
            eth_transactions=eth_transactions,
            defi_events=defi_events,
            ledger_actions=ledger_actions,
        )
        return result, error_or_empty

    @overload
    def _apply_actions_limit(
        self,
        location: Location,
        action_type: Literal['trade'],
        location_actions: TRADES_LIST,
        all_actions: TRADES_LIST,
    ) -> TRADES_LIST:
        ...

    @overload
    def _apply_actions_limit(
        self,
        location: Location,
        action_type: Literal['asset_movement'],
        location_actions: List[AssetMovement],
        all_actions: List[AssetMovement],
    ) -> List[AssetMovement]:
        ...

    def _apply_actions_limit(
        self,
        location: Location,
        action_type: Literal['trade', 'asset_movement'],
        location_actions: Union[TRADES_LIST, List[AssetMovement]],
        all_actions: Union[TRADES_LIST, List[AssetMovement]],
    ) -> Union[TRADES_LIST, List[AssetMovement]]:
        """Take as many actions from location actions and add them to all actions as the limit permits

        Returns the modified (or not) all_actions
        """
        # If we are already at or above the limit return current actions disregarding this location
        actions_mapping = self.actions_per_location[action_type]
        current_num_actions = sum(x for _, x in actions_mapping.items())
        limit = LIMITS_MAPPING[action_type]
        if current_num_actions >= limit:
            return all_actions

        # Find out how many more actions can we return, and depending on that get
        # the number of actions from the location actions and add them to the total
        remaining_num_actions = limit - current_num_actions
        if remaining_num_actions < 0:
            remaining_num_actions = 0

        num_actions_to_take = min(len(location_actions), remaining_num_actions)

        actions_mapping[location] = num_actions_to_take
        all_actions.extend(
            location_actions[0:num_actions_to_take])  # type: ignore
        return all_actions

    def query_trades(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        location: Optional[Location],
    ) -> TRADES_LIST:
        """Queries trades for the given location and time range.
        If no location is given then all external, all exchange and DEX trades are queried.

        DEX Trades are queried only if the user has premium
        If the user does not have premium then a trade limit is applied.

        May raise:
        - RemoteError: If there are problems connecting to any of the remote exchanges
        """
        trades: TRADES_LIST
        if location is not None:
            trades = self.query_location_trades(from_ts, to_ts, location)
        else:
            trades = self.query_location_trades(from_ts, to_ts,
                                                Location.EXTERNAL)
            # crypto.com is not an API key supported exchange but user can import from CSV
            trades.extend(
                self.query_location_trades(from_ts, to_ts, Location.CRYPTOCOM))
            for name, exchange in self.exchange_manager.connected_exchanges.items(
            ):
                exchange_trades = exchange.query_trade_history(
                    start_ts=from_ts, end_ts=to_ts)
                if self.premium is None:
                    trades = self._apply_actions_limit(
                        location=deserialize_location(name),
                        action_type='trade',
                        location_actions=exchange_trades,
                        all_actions=trades,
                    )
                else:
                    trades.extend(exchange_trades)

            # for all trades we also need uniswap trades
            if self.premium is not None:
                uniswap = self.chain_manager.uniswap
                if uniswap is not None:
                    trades.extend(
                        uniswap.get_trades(
                            addresses=self.chain_manager.
                            queried_addresses_for_module('uniswap'),
                            from_timestamp=from_ts,
                            to_timestamp=to_ts,
                        ), )

        # return trades with most recent first
        trades.sort(key=lambda x: x.timestamp, reverse=True)
        return trades

    def query_location_trades(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        location: Location,
    ) -> TRADES_LIST:
        # clear the trades queried for this location
        self.actions_per_location['trade'][location] = 0

        location_trades: TRADES_LIST
        if location in (Location.EXTERNAL, Location.CRYPTOCOM):
            location_trades = self.data.db.get_trades(  # type: ignore  # list invariance
                from_ts=from_ts,
                to_ts=to_ts,
                location=location,
            )
        elif location == Location.UNISWAP:
            if self.premium is not None:
                uniswap = self.chain_manager.uniswap
                if uniswap is not None:
                    location_trades = uniswap.get_trades(  # type: ignore  # list invariance
                        addresses=self.chain_manager.
                        queried_addresses_for_module('uniswap'),
                        from_timestamp=from_ts,
                        to_timestamp=to_ts,
                    )
        else:
            # should only be an exchange
            exchange = self.exchange_manager.get(str(location))
            if not exchange:
                logger.warning(
                    f'Tried to query trades from {location} which is either not an '
                    f'exchange or not an exchange the user has connected to', )
                return []

            location_trades = exchange.query_trade_history(start_ts=from_ts,
                                                           end_ts=to_ts)

        trades: TRADES_LIST = []
        if self.premium is None:
            trades = self._apply_actions_limit(
                location=location,
                action_type='trade',
                location_actions=location_trades,
                all_actions=trades,
            )
        else:
            trades = location_trades

        return trades

    def query_balances(
        self,
        requested_save_data: bool = False,
        timestamp: Timestamp = None,
        ignore_cache: bool = False,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are always saved in the DB,
        if it is False then data are saved if self.data.should_save_balances()
        is True.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB
        If ignore_cache is True then all underlying calls that have a cache ignore it

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called',
                 requested_save_data=requested_save_data)

        balances: Dict[str, Dict[Asset, Balance]] = {}
        problem_free = True
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange_balances, _ = exchange.query_balances(
                ignore_cache=ignore_cache)
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange.name] = exchange_balances

        liabilities: Dict[Asset, Balance]
        try:
            blockchain_result = self.chain_manager.query_balances(
                blockchain=None,
                force_token_detection=ignore_cache,
                ignore_cache=ignore_cache,
            )
            balances[str(
                Location.BLOCKCHAIN)] = blockchain_result.totals.assets
            liabilities = blockchain_result.totals.liabilities
        except (RemoteError, EthSyncError) as e:
            problem_free = False
            liabilities = {}
            log.error(f'Querying blockchain balances failed due to: {str(e)}')

        balances = account_for_manually_tracked_balances(db=self.data.db,
                                                         balances=balances)

        # Calculate usd totals
        assets_total_balance: DefaultDict[Asset,
                                          Balance] = defaultdict(Balance)
        total_usd_per_location: Dict[str, FVal] = {}
        for location, asset_balance in balances.items():
            total_usd_per_location[location] = ZERO
            for asset, balance in asset_balance.items():
                assets_total_balance[asset] += balance
                total_usd_per_location[location] += balance.usd_value

        net_usd = sum((balance.usd_value
                       for _, balance in assets_total_balance.items()), ZERO)
        liabilities_total_usd = sum(
            (liability.usd_value for _, liability in liabilities.items()),
            ZERO)  # noqa: E501
        net_usd -= liabilities_total_usd

        # Calculate location stats
        location_stats: Dict[str, Any] = {}
        for location, total_usd in total_usd_per_location.items():
            if location == str(Location.BLOCKCHAIN):
                total_usd -= liabilities_total_usd

            percentage = (total_usd /
                          net_usd).to_percentage() if net_usd != ZERO else '0%'
            location_stats[location] = {
                'usd_value': total_usd,
                'percentage_of_net_value': percentage,
            }

        # Calculate 'percentage_of_net_value' per asset
        assets_total_balance_as_dict: Dict[Asset, Dict[str, Any]] = {
            asset: balance.to_dict()
            for asset, balance in assets_total_balance.items()
        }
        liabilities_as_dict: Dict[Asset, Dict[str, Any]] = {
            asset: balance.to_dict()
            for asset, balance in liabilities.items()
        }
        for asset, balance_dict in assets_total_balance_as_dict.items():
            percentage = (balance_dict['usd_value'] / net_usd).to_percentage(
            ) if net_usd != ZERO else '0%'  # noqa: E501
            assets_total_balance_as_dict[asset][
                'percentage_of_net_value'] = percentage

        for asset, balance_dict in liabilities_as_dict.items():
            percentage = (balance_dict['usd_value'] / net_usd).to_percentage(
            ) if net_usd != ZERO else '0%'  # noqa: E501
            liabilities_as_dict[asset]['percentage_of_net_value'] = percentage

        # Compose balances response
        result_dict = {
            'assets': assets_total_balance_as_dict,
            'liabilities': liabilities_as_dict,
            'location': location_stats,
            'net_usd': net_usd,
        }
        allowed_to_save = requested_save_data or self.data.should_save_balances(
        )

        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.db.save_balances_data(data=result_dict,
                                            timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        return result_dict

    def _query_exchange_asset_movements(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        all_movements: List[AssetMovement],
        exchange: Union[ExchangeInterface, Location],
    ) -> List[AssetMovement]:
        if isinstance(exchange, ExchangeInterface):
            location = deserialize_location(exchange.name)
            # clear the asset movements queried for this exchange
            self.actions_per_location['asset_movement'][location] = 0
            location_movements = exchange.query_deposits_withdrawals(
                start_ts=from_ts,
                end_ts=to_ts,
            )
        else:
            assert isinstance(exchange,
                              Location), 'only a location should make it here'
            assert exchange == Location.CRYPTOCOM, 'only cryptocom should make it here'
            location = exchange
            # cryptocom has no exchange integration but we may have DB entries
            self.actions_per_location['asset_movement'][location] = 0
            location_movements = self.data.db.get_asset_movements(
                from_ts=from_ts,
                to_ts=to_ts,
                location=location,
            )

        movements: List[AssetMovement] = []
        if self.premium is None:
            movements = self._apply_actions_limit(
                location=location,
                action_type='asset_movement',
                location_actions=location_movements,
                all_actions=all_movements,
            )
        else:
            all_movements.extend(location_movements)
            movements = all_movements

        return movements

    def query_asset_movements(
        self,
        from_ts: Timestamp,
        to_ts: Timestamp,
        location: Optional[Location],
    ) -> List[AssetMovement]:
        """Queries AssetMovements for the given location and time range.

        If no location is given then all exchange asset movements are queried.
        If the user does not have premium then a limit is applied.
        May raise:
        - RemoteError: If there are problems connecting to any of the remote exchanges
        """
        movements: List[AssetMovement] = []
        if location is not None:
            if location == Location.CRYPTOCOM:
                movements = self._query_exchange_asset_movements(
                    from_ts=from_ts,
                    to_ts=to_ts,
                    all_movements=movements,
                    exchange=Location.CRYPTOCOM,
                )
            else:
                exchange = self.exchange_manager.get(str(location))
                if not exchange:
                    logger.warning(
                        f'Tried to query deposits/withdrawals from {location} which is either '
                        f'not at exchange or not an exchange the user has connected to',
                    )
                    return []
                movements = self._query_exchange_asset_movements(
                    from_ts=from_ts,
                    to_ts=to_ts,
                    all_movements=movements,
                    exchange=exchange,
                )
        else:
            # cryptocom has no exchange integration but we may have DB entries due to csv import
            movements = self._query_exchange_asset_movements(
                from_ts=from_ts,
                to_ts=to_ts,
                all_movements=movements,
                exchange=Location.CRYPTOCOM,
            )
            for _, exchange in self.exchange_manager.connected_exchanges.items(
            ):
                self._query_exchange_asset_movements(
                    from_ts=from_ts,
                    to_ts=to_ts,
                    all_movements=movements,
                    exchange=exchange,
                )

        # return movements with most recent first
        movements.sort(key=lambda x: x.timestamp, reverse=True)
        return movements

    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        with self.lock:
            if settings.eth_rpc_endpoint is not None:
                result, msg = self.chain_manager.set_eth_rpc_endpoint(
                    settings.eth_rpc_endpoint)
                if not result:
                    return False, msg

            if settings.ksm_rpc_endpoint is not None:
                result, msg = self.chain_manager.set_ksm_rpc_endpoint(
                    settings.ksm_rpc_endpoint)
                if not result:
                    return False, msg

            if settings.kraken_account_type is not None:
                kraken = self.exchange_manager.get('kraken')
                if kraken:
                    kraken.set_account_type(
                        settings.kraken_account_type)  # type: ignore

            if settings.btc_derivation_gap_limit is not None:
                self.chain_manager.btc_derivation_gap_limit = settings.btc_derivation_gap_limit

            if settings.current_price_oracles is not None:
                Inquirer().set_oracles_order(settings.current_price_oracles)

            if settings.historical_price_oracles is not None:
                PriceHistorian().set_oracles_order(
                    settings.historical_price_oracles)

            self.data.db.set_settings(settings)
            return True, ''

    def get_settings(self) -> DBSettings:
        """Returns the db settings with a check whether premium is active or not"""
        db_settings = self.data.db.get_settings(
            have_premium=self.premium is not None)
        return db_settings

    def setup_exchange(
        self,
        name: str,
        api_key: ApiKey,
        api_secret: ApiSecret,
        passphrase: Optional[str] = None,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret and optionally a passphrase

        By default the api keys are always validated unless validate is False.
        """
        is_success, msg = self.exchange_manager.setup_exchange(
            name=name,
            api_key=api_key,
            api_secret=api_secret,
            database=self.data.db,
            passphrase=passphrase,
        )

        if is_success:
            # Success, save the result in the DB
            self.data.db.add_exchange(name,
                                      api_key,
                                      api_secret,
                                      passphrase=passphrase)
        return is_success, msg

    def remove_exchange(self, name: str) -> Tuple[bool, str]:
        if not self.exchange_manager.has_exchange(name):
            return False, 'Exchange {} is not registered'.format(name)

        self.exchange_manager.delete_exchange(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        self.data.db.delete_used_query_range_for_exchange(name)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result: Dict[str, Union[bool, Timestamp]] = {}

        if self.user_is_logged_in:
            result[
                'last_balance_save'] = self.data.db.get_last_balance_save_time(
                )
            result[
                'eth_node_connection'] = self.chain_manager.ethereum.web3_mapping.get(
                    NodeName.OWN, None) is not None  # noqa : E501
            result['last_data_upload_ts'] = Timestamp(
                self.premium_sync_manager.last_data_upload_ts)  # noqa : E501
        return result

    def shutdown(self) -> None:
        self.logout()
        self.shutdown_event.set()

    def _connect_ksm_manager_on_startup(self) -> bool:
        return bool(self.data.db.get_blockchain_accounts().ksm)

    def create_oracle_cache(
        self,
        oracle: HistoricalPriceOracle,
        from_asset: Asset,
        to_asset: Asset,
        purge_old: bool,
    ) -> None:
        """Creates the cache of the given asset pair from the start of time
        until now for the given oracle.

        if purge_old is true then any old cache in memory and in a file is purged

        May raise:
            - RemoteError if there is a problem reaching the oracle
            - UnsupportedAsset if any of the two assets is not supported by the oracle
        """
        if oracle != HistoricalPriceOracle.CRYPTOCOMPARE:
            return  # only for cryptocompare for now

        self.cryptocompare.create_cache(from_asset, to_asset, purge_old)

    def delete_oracle_cache(
        self,
        oracle: HistoricalPriceOracle,
        from_asset: Asset,
        to_asset: Asset,
    ) -> None:
        if oracle != HistoricalPriceOracle.CRYPTOCOMPARE:
            return  # only for cryptocompare for now

        self.cryptocompare.delete_cache(from_asset, to_asset)

    def get_oracle_cache(
            self, oracle: HistoricalPriceOracle) -> List[Dict[str, Any]]:
        if oracle != HistoricalPriceOracle.CRYPTOCOMPARE:
            return []  # only for cryptocompare for now

        return self.cryptocompare.get_all_cache_data()
Beispiel #24
0
def main():
    arg_parser = aggregator_args()
    args = arg_parser.parse_args()
    msg_aggregator = MessagesAggregator()
    user_data_dir = Path(default_data_directory()) / args.db_user
    database = DBHandler(
        user_data_dir=user_data_dir,
        password=args.db_password,
        msg_aggregator=msg_aggregator,
    )
    our_data = AssetResolver().assets
    paprika = CoinPaprika()
    cmc = None
    cmc_list = None
    root_path = os.path.dirname(
        os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
    data_directory = f'{Path.home()}/.rotkehlchen'
    if args.cmc_api_key:
        cmc = Coinmarketcap(
            data_directory=data_directory,
            api_key=args.cmc_api_key,
        )
        cmc_list = cmc.get_cryptocyrrency_map()

    cryptocompare = Cryptocompare(data_directory=data_directory,
                                  database=database)
    paprika_coins_list = paprika.get_coins_list()
    cryptocompare_coins_map = cryptocompare.all_coins()

    if args.input_file:
        if not os.path.isfile(args.input_file):
            print(f'Given input file {args.input_file} is not a file')
            sys.exit(1)

        with open(args.input_file, 'r') as f:
            input_data = rlk_jsonloads(f.read())

        given_symbols = set(input_data.keys())
        current_symbols = set(our_data.keys())
        if not given_symbols.isdisjoint(current_symbols):
            print(
                f'The following given input symbols already exist in the '
                f'all_assets.json file {given_symbols.intersection(current_symbols)}',
            )
            sys.exit(1)

        # If an input file is given, iterate only its assets and perform checks
        for asset_symbol in input_data.keys():
            input_data = process_asset(
                our_data=input_data,
                asset_symbol=asset_symbol,
                paprika_coins_list=paprika_coins_list,
                paprika=paprika,
                cmc_list=cmc_list,
                cryptocompare_coins_map=cryptocompare_coins_map,
                always_keep_our_time=args.always_keep_our_time,
            )

        # and now combine the two dictionaries to get the final one. Note that no
        # checks are perfomed for what was in all_assets.json before the script
        # ran in this case
        our_data = {**our_data, **input_data}

    else:

        # Iterate all of the assets of the all_assets.json file and perform checks
        for asset_symbol in our_data.keys():
            our_data = process_asset(
                our_data=our_data,
                asset_symbol=asset_symbol,
                paprika_coins_list=paprika_coins_list,
                paprika=paprika,
                cmc_list=cmc_list,
                cryptocompare_coins_map=cryptocompare_coins_map,
                always_keep_our_time=args.always_keep_our_time,
            )

    # Finally overwrite the all_assets.json with the modified assets
    with open(
            os.path.join(root_path, 'rotkehlchen', 'data', 'all_assets.json'),
            'w') as f:
        f.write(json.dumps(
            our_data,
            sort_keys=True,
            indent=4,
        ), )
Beispiel #25
0
def create_inquirer(
    data_directory,
    should_mock_current_price_queries,
    mocked_prices,
    current_price_oracles_order,
    ethereum_manager,
    ignore_mocked_prices_for=None,
) -> Inquirer:
    # Since this is a singleton and we want it initialized everytime the fixture
    # is called make sure its instance is always starting from scratch
    Inquirer._Inquirer__instance = None  # type: ignore
    # Get a cryptocompare without a DB since invoking DB fixture here causes problems
    # of existing user for some tests
    cryptocompare = Cryptocompare(data_directory=data_directory, database=None)
    gecko = Coingecko()
    inquirer = Inquirer(
        data_dir=data_directory,
        cryptocompare=cryptocompare,
        coingecko=gecko,
    )
    if ethereum_manager is not None:
        inquirer.inject_ethereum(ethereum_manager)
        uniswap_v2_oracle = UniswapV2Oracle(ethereum_manager)
        uniswap_v3_oracle = UniswapV3Oracle(ethereum_manager)
        saddle_oracle = SaddleOracle(ethereum_manager)
        Inquirer().add_defi_oracles(
            uniswap_v2=uniswap_v2_oracle,
            uniswap_v3=uniswap_v3_oracle,
            saddle=saddle_oracle,
        )
    inquirer.set_oracles_order(current_price_oracles_order)

    if not should_mock_current_price_queries:
        return inquirer

    def mock_find_price(
            from_asset,
            to_asset,
            ignore_cache: bool = False,  # pylint: disable=unused-argument
    ):
        return mocked_prices.get((from_asset, to_asset), FVal('1.5'))

    def mock_find_usd_price(asset, ignore_cache: bool = False):  # pylint: disable=unused-argument
        return mocked_prices.get(asset, FVal('1.5'))

    if ignore_mocked_prices_for is None:
        inquirer.find_price = mock_find_price  # type: ignore
        inquirer.find_usd_price = mock_find_usd_price  # type: ignore
    else:

        def mock_some_prices(from_asset, to_asset, ignore_cache=False):
            if from_asset.symbol in ignore_mocked_prices_for:
                return inquirer.find_price_old(from_asset, to_asset,
                                               ignore_cache)
            return mock_find_price(from_asset, to_asset, ignore_cache)

        def mock_some_usd_prices(asset, ignore_cache=False):
            if asset.symbol in ignore_mocked_prices_for:
                return inquirer.find_usd_price_old(asset, ignore_cache)
            return mock_find_usd_price(asset, ignore_cache)

        inquirer.find_price_old = inquirer.find_price  # type: ignore
        inquirer.find_usd_price_old = inquirer.find_usd_price  # type: ignore
        inquirer.find_price = mock_some_prices  # type: ignore
        inquirer.find_usd_price = mock_some_usd_prices  # type: ignore

    def mock_query_fiat_pair(base, quote):  # pylint: disable=unused-argument
        return FVal(1)

    inquirer._query_fiat_pair = mock_query_fiat_pair  # type: ignore

    return inquirer
Beispiel #26
0
class Rotkehlchen():
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir,
                                           database=None)
        # Initialize the Inquirer singleton
        Inquirer(data_dir=self.data_dir, cryptocompare=self.cryptocompare)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()

    def reset_after_failed_account_creation_or_login(self) -> None:
        """If the account creation or login failed make sure that the Rotki instance is clear

        Tricky instances are when after either failed premium credentials or user refusal
        to sync premium databases we relogged in.
        """
        self.cryptocompare.db = None

    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: Literal['yes', 'no', 'unknown'],
        premium_credentials: Optional[PremiumCredentials],
        initial_settings: Optional[ModifiableDBSettings] = None,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        - SystemPermissionError if the directory or DB file can not be accessed
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
            initial_settings=initial_settings,
        )

        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new,
                                               initial_settings)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data,
                                                       password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        self.greenlet_manager.spawn_and_track(
            task_name='submit_usage_analytics',
            method=maybe_submit_usage_analytics,
            should_submit=settings.submit_usage_analytics,
        )
        self.etherscan = Etherscan(database=self.data.db,
                                   msg_aggregator=self.msg_aggregator)
        historical_data_start = settings.historical_data_start
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            history_date_start=historical_data_start,
            cryptocompare=self.cryptocompare,
        )
        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=settings.anonymized_logs)
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethereum_manager = EthereumManager(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=ETHEREUM_NODES_TO_CONNECT_AT_START,
        )
        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            ethereum_manager=ethereum_manager,
            msg_aggregator=self.msg_aggregator,
            database=self.data.db,
            greenlet_manager=self.greenlet_manager,
            premium=self.premium,
            eth_modules=settings.active_modules,
        )
        self.ethereum_analyzer = EthereumAnalyzer(
            ethereum_manager=ethereum_manager,
            database=self.data.db,
        )
        self.trades_historian = TradesHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.user_is_logged_in = True
        log.debug('User unlocking complete')

    def logout(self) -> None:
        if not self.user_is_logged_in:
            return

        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )
        del self.chain_manager
        self.exchange_manager.delete_all_exchanges()

        # Reset rotkehlchen logger to default
        LoggingSettings(anonymized_logs=DEFAULT_ANONYMIZED_LOGS)

        del self.accountant
        del self.trades_historian
        del self.data_importer

        if self.premium is not None:
            del self.premium
        self.data.logout()
        self.password = ''
        self.cryptocompare.unset_database()

        # Make sure no messages leak to other user sessions
        self.msg_aggregator.consume_errors()
        self.msg_aggregator.consume_warnings()

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def set_premium_credentials(self, credentials: PremiumCredentials) -> None:
        """
        Sets the premium credentials for Rotki

        Raises PremiumAuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')
        if self.premium is not None:
            self.premium.set_credentials(credentials)
        else:
            self.premium = premium_create_and_verify(credentials)

        self.data.db.set_rotkehlchen_premium(credentials)

    def delete_premium_credentials(self, name: str) -> Tuple[bool, str]:
        """Deletes the premium credentials for Rotki"""
        success: bool
        msg = ''

        if name != self.data.username:
            msg = f'Provided user "{name}" is not the logged in user'
            success = False

        success = self.data.db.del_rotkehlchen_premium()
        if success is False:
            msg = 'The database was unable to delete the Premium keys for the logged-in user'
        self.deactivate_premium_status()
        return success, msg

    def deactivate_premium_status(self) -> None:
        """Deactivate premium in the current session"""
        self.premium = None
        self.premium_sync_manager.premium = None
        self.chain_manager.deactivate_premium_status()

    def start(self) -> gevent.Greenlet:
        return gevent.spawn(self.main_loop)

    def main_loop(self) -> None:
        """Rotki main loop that fires often and manages many different tasks

        Each task remembers the last time it run sucesfully and know how often it
        should run. So each task manages itself.
        """
        while self.shutdown_event.wait(MAIN_LOOP_SECS_DELAY) is not True:
            if self.user_is_logged_in:
                log.debug('Main loop start')
                self.premium_sync_manager.maybe_upload_data_to_server()
                self.ethereum_analyzer.analyze_ethereum_transactions()
                log.debug('Main loop end')

    def add_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        account_data: List[BlockchainAccountData],
    ) -> BlockchainBalancesUpdate:
        """Adds new blockchain accounts

        Adds the accounts to the blockchain instance and queries them to get the
        updated balances. Also adds them in the DB

        May raise:
        - EthSyncError from modify_blockchain_account
        - InputError if the given accounts list is empty.
        - TagConstraintError if any of the given account data contain unknown tags.
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        """
        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='adding',
            data_type='blockchain accounts',
        )
        address_type = blockchain.get_address_type()
        updated_balances = self.chain_manager.add_blockchain_accounts(
            blockchain=blockchain,
            accounts=[address_type(entry.address) for entry in account_data],
        )
        self.data.db.add_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return updated_balances

    def edit_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        account_data: List[BlockchainAccountData],
    ) -> None:
        """Edits blockchain accounts

        Edits blockchain account data for the given accounts

        May raise:
        - InputError if the given accounts list is empty or if
        any of the accounts to edit do not exist.
        - TagConstraintError if any of the given account data contain unknown tags.
        """
        # First check for validity of account data addresses
        if len(account_data) == 0:
            raise InputError(
                'Empty list of blockchain account data to edit was given')
        accounts = [x.address for x in account_data]
        unknown_accounts = set(accounts).difference(
            self.chain_manager.accounts.get(blockchain))
        if len(unknown_accounts) != 0:
            raise InputError(
                f'Tried to edit unknown {blockchain.value} '
                f'accounts {",".join(unknown_accounts)}', )

        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='editing',
            data_type='blockchain accounts',
        )

        # Finally edit the accounts
        self.data.db.edit_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return None

    def remove_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        accounts: ListOfBlockchainAddresses,
    ) -> BlockchainBalancesUpdate:
        """Removes blockchain accounts

        Removes the accounts from the blockchain instance and queries them to get
        the updated balances. Also removes them from the DB

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - InputError if a non-existing account was given to remove
        """
        balances_update = self.chain_manager.remove_blockchain_accounts(
            blockchain=blockchain,
            accounts=accounts,
        )
        self.data.db.remove_blockchain_accounts(blockchain, accounts)
        return balances_update

    def process_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> Tuple[Dict[str, Any], str]:
        (
            error_or_empty,
            history,
            loan_history,
            asset_movements,
            eth_transactions,
            defi_events,
        ) = self.trades_historian.get_history(
            start_ts=start_ts,
            end_ts=end_ts,
            has_premium=self.premium is not None,
        )
        result = self.accountant.process_history(
            start_ts=start_ts,
            end_ts=end_ts,
            trade_history=history,
            loan_history=loan_history,
            asset_movements=asset_movements,
            eth_transactions=eth_transactions,
            defi_events=defi_events,
        )
        return result, error_or_empty

    def query_balances(
        self,
        requested_save_data: bool = False,
        timestamp: Timestamp = None,
        ignore_cache: bool = False,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are always saved in the DB,
        if it is False then data are saved if self.data.should_save_balances()
        is True.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB
        If ignore_cache is True then all underlying calls that have a cache ignore it

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called',
                 requested_save_data=requested_save_data)

        balances = {}
        problem_free = True
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange_balances, _ = exchange.query_balances(
                ignore_cache=ignore_cache)
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange.name] = exchange_balances

        try:
            blockchain_result = self.chain_manager.query_balances(
                blockchain=None,
                force_token_detection=ignore_cache,
                ignore_cache=ignore_cache,
            )
            balances['blockchain'] = {
                asset: balance.to_dict()
                for asset, balance in blockchain_result.totals.items()
            }
        except (RemoteError, EthSyncError) as e:
            problem_free = False
            log.error(f'Querying blockchain balances failed due to: {str(e)}')

        balances = account_for_manually_tracked_balances(db=self.data.db,
                                                         balances=balances)

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value'))
                                  for k, v in balances.items()]

        # calculate net usd value
        net_usd = FVal(0)
        for _, v in combined.items():
            net_usd += FVal(v['usd_value'])

        stats: Dict[str, Any] = {
            'location': {},
            'net_usd': net_usd,
        }
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            if net_usd != FVal(0):
                percentage = (total / net_usd).to_percentage()
            else:
                percentage = '0%'
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': percentage,
            }

        for k, v in combined.items():
            if net_usd != FVal(0):
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            combined[k]['percentage_of_net_value'] = percentage

        result_dict = merge_dicts(combined, stats)

        allowed_to_save = requested_save_data or self.data.should_save_balances(
        )

        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        # After adding it to the saved file we can overlay additional data that
        # is not required to be saved in the history file
        try:
            details = self.accountant.events.details
            for asset, (tax_free_amount, average_buy_value) in details.items():
                if asset not in result_dict:
                    continue

                result_dict[asset]['tax_free_amount'] = tax_free_amount
                result_dict[asset]['average_buy_value'] = average_buy_value

                current_price = result_dict[asset]['usd_value'] / result_dict[
                    asset]['amount']
                if average_buy_value != FVal(0):
                    result_dict[asset]['percent_change'] = (
                        ((current_price - average_buy_value) /
                         average_buy_value) * 100)
                else:
                    result_dict[asset]['percent_change'] = 'INF'

        except AttributeError:
            pass

        return result_dict

    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        with self.lock:
            if settings.eth_rpc_endpoint is not None:
                result, msg = self.chain_manager.set_eth_rpc_endpoint(
                    settings.eth_rpc_endpoint)
                if not result:
                    return False, msg

            if settings.kraken_account_type is not None:
                kraken = self.exchange_manager.get('kraken')
                if kraken:
                    kraken.set_account_type(
                        settings.kraken_account_type)  # type: ignore

            self.data.db.set_settings(settings)
            return True, ''

    def get_settings(self) -> DBSettings:
        """Returns the db settings with a check whether premium is active or not"""
        db_settings = self.data.db.get_settings(
            have_premium=self.premium is not None)
        return db_settings

    def setup_exchange(
        self,
        name: str,
        api_key: ApiKey,
        api_secret: ApiSecret,
        passphrase: Optional[str] = None,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret and optionally a passphrase

        By default the api keys are always validated unless validate is False.
        """
        is_success, msg = self.exchange_manager.setup_exchange(
            name=name,
            api_key=api_key,
            api_secret=api_secret,
            database=self.data.db,
            passphrase=passphrase,
        )

        if is_success:
            # Success, save the result in the DB
            self.data.db.add_exchange(name,
                                      api_key,
                                      api_secret,
                                      passphrase=passphrase)
        return is_success, msg

    def remove_exchange(self, name: str) -> Tuple[bool, str]:
        if not self.exchange_manager.has_exchange(name):
            return False, 'Exchange {} is not registered'.format(name)

        self.exchange_manager.delete_exchange(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        self.data.db.delete_used_query_range_for_exchange(name)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result: Dict[str, Union[bool, Timestamp]] = {}

        if self.user_is_logged_in:
            result[
                'last_balance_save'] = self.data.db.get_last_balance_save_time(
                )
            result[
                'eth_node_connection'] = self.chain_manager.ethereum.web3_mapping.get(
                    NodeName.OWN, None) is not None  # noqa : E501
            result[
                'history_process_start_ts'] = self.accountant.started_processing_timestamp
            result[
                'history_process_current_ts'] = self.accountant.currently_processing_timestamp
        return result

    def shutdown(self) -> None:
        self.logout()
        self.shutdown_event.set()
Beispiel #27
0
def fixture_cryptocompare(data_dir, database):
    return Cryptocompare(data_directory=data_dir, database=database)
Beispiel #28
0
class Rotkehlchen():
    def __init__(self, args: argparse.Namespace) -> None:
        self.lock = Semaphore()
        self.lock.acquire()

        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in = False

        logfilename = None
        if args.logtarget == 'file':
            logfilename = args.logfile

        if args.loglevel == 'debug':
            loglevel = logging.DEBUG
        elif args.loglevel == 'info':
            loglevel = logging.INFO
        elif args.loglevel == 'warn':
            loglevel = logging.WARN
        elif args.loglevel == 'error':
            loglevel = logging.ERROR
        elif args.loglevel == 'critical':
            loglevel = logging.CRITICAL
        else:
            raise AssertionError('Should never get here. Illegal log value')

        logging.basicConfig(
            filename=logfilename,
            filemode='w',
            level=loglevel,
            format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s',
            datefmt='%d/%m/%Y %H:%M:%S %Z',
        )

        if not args.logfromothermodules:
            logging.getLogger('urllib3').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3.connectionpool').setLevel(
                logging.CRITICAL)

        self.sleep_secs = args.sleep_secs
        self.data_dir = args.data_dir
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(
            msg_aggregator=self.msg_aggregator)
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        self.all_eth_tokens = AssetResolver().get_all_eth_tokens()
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir,
                                           database=None)
        # Initialize the Inquirer singleton
        Inquirer(data_dir=self.data_dir, cryptocompare=self.cryptocompare)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()

    def reset_after_failed_account_creation_or_login(self) -> None:
        """If the account creation or login failed make sure that the Rotki instance is clear

        Tricky instances are when after either failed premium credentials or user refusal
        to sync premium databases we relogged in.
        """
        self.cryptocompare.db = None

    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: str,
        premium_credentials: Optional[PremiumCredentials],
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
        )
        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data,
                                                       password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        maybe_submit_usage_analytics(settings.submit_usage_analytics)
        self.etherscan = Etherscan(database=self.data.db,
                                   msg_aggregator=self.msg_aggregator)
        alethio = Alethio(
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
            all_eth_tokens=self.all_eth_tokens,
        )
        historical_data_start = settings.historical_data_start
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            history_date_start=historical_data_start,
            cryptocompare=self.cryptocompare,
        )
        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=settings.anonymized_logs)
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethchain = Ethchain(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
        )
        makerdao = MakerDAO(
            ethchain=ethchain,
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
        )
        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            owned_eth_tokens=self.data.db.get_owned_tokens(),
            ethchain=ethchain,
            msg_aggregator=self.msg_aggregator,
            alethio=alethio,
            greenlet_manager=self.greenlet_manager,
            eth_modules={'makerdao': makerdao},
        )
        self.ethereum_analyzer = EthereumAnalyzer(
            ethchain=ethchain,
            database=self.data.db,
        )
        self.trades_historian = TradesHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.user_is_logged_in = True

    def logout(self) -> None:
        if not self.user_is_logged_in:
            return

        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )
        del self.chain_manager
        self.exchange_manager.delete_all_exchanges()

        # Reset rotkehlchen logger to default
        LoggingSettings(anonymized_logs=DEFAULT_ANONYMIZED_LOGS)

        del self.accountant
        del self.trades_historian
        del self.data_importer

        if self.premium is not None:
            del self.premium
        self.data.logout()
        self.password = ''
        self.cryptocompare.unset_database()

        # Make sure no messages leak to other user sessions
        self.msg_aggregator.consume_errors()
        self.msg_aggregator.consume_warnings()

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def set_premium_credentials(self, credentials: PremiumCredentials) -> None:
        """
        Sets the premium credentials for Rotki

        Raises PremiumAuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')
        if self.premium is not None:
            self.premium.set_credentials(credentials)
        else:
            self.premium = premium_create_and_verify(credentials)

        self.data.db.set_rotkehlchen_premium(credentials)

    def start(self) -> gevent.Greenlet:
        return gevent.spawn(self.main_loop)

    def main_loop(self) -> None:
        """Rotki main loop that fires often and manages many different tasks

        Each task remembers the last time it run sucesfully and know how often it
        should run. So each task manages itself.
        """
        while self.shutdown_event.wait(MAIN_LOOP_SECS_DELAY) is not True:
            if self.user_is_logged_in:
                log.debug('Main loop start')
                self.premium_sync_manager.maybe_upload_data_to_server()
                self.ethereum_analyzer.analyze_ethereum_transactions()
                log.debug('Main loop end')

    def add_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        account_data: List[BlockchainAccountData],
    ) -> BlockchainBalancesUpdate:
        """Adds new blockchain accounts

        Adds the accounts to the blockchain instance and queries them to get the
        updated balances. Also adds them in the DB

        May raise:
        - EthSyncError from modify_blockchain_account
        - InputError if the given accounts list is empty.
        - TagConstraintError if any of the given account data contain unknown tags.
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        """
        # First see if any of the given tags for the accounts do not exist in the DB
        existing_tags = self.data.db.get_tags()
        existing_tag_keys = existing_tags.keys()
        unknown_tags: Set[str] = set()
        for entry in account_data:
            if entry.tags is not None:
                unknown_tags.update(
                    set(entry.tags).difference(existing_tag_keys))

        if len(unknown_tags) != 0:
            raise TagConstraintError(
                f'When adding blockchain accounts, unknown tags '
                f'{", ".join(unknown_tags)} were found', )

        updated_balances = self.chain_manager.add_blockchain_accounts(
            blockchain=blockchain,
            accounts=[entry.address for entry in account_data],
        )
        self.data.db.add_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return updated_balances

    def edit_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        account_data: List[BlockchainAccountData],
    ) -> None:
        """Edits blockchain accounts

        Edits blockchain account data for the given accounts

        May raise:
        - InputError if the given accounts list is empty or if
        any of the accounts to edit do not exist.
        - TagConstraintError if any of the given account data contain unknown tags.
        """
        # First check for validity of account data addresses
        if len(account_data) == 0:
            raise InputError(
                'Empty list of blockchain account data to edit was given')
        accounts = [x.address for x in account_data]
        unknown_accounts = set(accounts).difference(
            self.chain_manager.accounts.get(blockchain))
        if len(unknown_accounts) != 0:
            raise InputError(
                f'Tried to edit unknown {blockchain.value} '
                f'accounts {",".join(unknown_accounts)}', )

        # Then See if any of the given tags for the accounts do not exist in the DB
        existing_tags = self.data.db.get_tags()
        existing_tag_keys = existing_tags.keys()
        unknown_tags: Set[str] = set()
        for entry in account_data:
            if entry.tags is not None:
                unknown_tags.update(
                    set(entry.tags).difference(existing_tag_keys))

        if len(unknown_tags) != 0:
            raise TagConstraintError(
                f'When editing blockchain accounts, unknown tags '
                f'{", ".join(unknown_tags)} were found', )

        # Finally edit the accounts
        self.data.db.edit_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return None

    def remove_blockchain_accounts(
        self,
        blockchain: SupportedBlockchain,
        accounts: ListOfBlockchainAddresses,
    ) -> BlockchainBalancesUpdate:
        """Removes blockchain accounts

        Removes the accounts from the blockchain instance and queries them to get
        the updated balances. Also removes them from the DB

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - InputError if a non-existing account was given to remove
        """
        balances_update = self.chain_manager.remove_blockchain_accounts(
            blockchain=blockchain,
            accounts=accounts,
        )
        self.data.db.remove_blockchain_accounts(blockchain, accounts)
        return balances_update

    def add_owned_eth_tokens(
        self,
        tokens: List[EthereumToken],
    ) -> BlockchainBalancesUpdate:
        """Adds tokens to the blockchain state and updates balance of all accounts

        May raise:
        - InputError if some of the tokens already exist
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - EthSyncError if querying the token balances through a provided ethereum
          client and the chain is not synced
        """
        new_data = self.chain_manager.track_new_tokens(tokens)
        self.data.write_owned_eth_tokens(self.chain_manager.owned_eth_tokens)
        return new_data

    def remove_owned_eth_tokens(
        self,
        tokens: List[EthereumToken],
    ) -> BlockchainBalancesUpdate:
        """
        Removes tokens from the state and stops their balance from being tracked
        for each account

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - EthSyncError if querying the token balances through a provided ethereum
          client and the chain is not synced
        """
        new_data = self.chain_manager.remove_eth_tokens(tokens)
        self.data.write_owned_eth_tokens(self.chain_manager.owned_eth_tokens)
        return new_data

    def process_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> Tuple[Dict[str, Any], str]:
        (
            error_or_empty,
            history,
            loan_history,
            asset_movements,
            eth_transactions,
        ) = self.trades_historian.get_history(
            start_ts=start_ts,
            end_ts=end_ts,
            has_premium=self.premium is not None,
        )
        result = self.accountant.process_history(
            start_ts=start_ts,
            end_ts=end_ts,
            trade_history=history,
            loan_history=loan_history,
            asset_movements=asset_movements,
            eth_transactions=eth_transactions,
        )
        return result, error_or_empty

    def query_fiat_balances(self) -> Dict[Asset, Dict[str, FVal]]:
        result = {}
        balances = self.data.get_fiat_balances()
        for currency, str_amount in balances.items():
            amount = FVal(str_amount)
            usd_rate = Inquirer().query_fiat_pair(currency, A_USD)
            result[currency] = {
                'amount': amount,
                'usd_value': amount * usd_rate,
            }

        return result

    def query_balances(
        self,
        requested_save_data: bool = True,
        timestamp: Timestamp = None,
        ignore_cache: bool = False,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are saved in the DB.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB
        If ignore_cache is True then all underlying calls that have a cache ignore it

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called',
                 requested_save_data=requested_save_data)

        balances = {}
        problem_free = True
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange_balances, _ = exchange.query_balances(
                ignore_cache=ignore_cache)
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange.name] = exchange_balances

        try:
            blockchain_result = self.chain_manager.query_balances(
                blockchain=None,
                ignore_cache=ignore_cache,
            )
            balances['blockchain'] = {
                asset: balance.to_dict()
                for asset, balance in blockchain_result.totals.items()
            }
        except (RemoteError, EthSyncError) as e:
            problem_free = False
            log.error(f'Querying blockchain balances failed due to: {str(e)}')

        result = self.query_fiat_balances()
        if result != {}:
            balances['banks'] = result

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value'))
                                  for k, v in balances.items()]

        # calculate net usd value
        net_usd = FVal(0)
        for _, v in combined.items():
            net_usd += FVal(v['usd_value'])

        stats: Dict[str, Any] = {
            'location': {},
            'net_usd': net_usd,
        }
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            if net_usd != FVal(0):
                percentage = (total / net_usd).to_percentage()
            else:
                percentage = '0%'
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': percentage,
            }

        for k, v in combined.items():
            if net_usd != FVal(0):
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            combined[k]['percentage_of_net_value'] = percentage

        result_dict = merge_dicts(combined, stats)

        allowed_to_save = requested_save_data and self.data.should_save_balances(
        )
        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        # After adding it to the saved file we can overlay additional data that
        # is not required to be saved in the history file
        try:
            details = self.accountant.events.details
            for asset, (tax_free_amount, average_buy_value) in details.items():
                if asset not in result_dict:
                    continue

                result_dict[asset]['tax_free_amount'] = tax_free_amount
                result_dict[asset]['average_buy_value'] = average_buy_value

                current_price = result_dict[asset]['usd_value'] / result_dict[
                    asset]['amount']
                if average_buy_value != FVal(0):
                    result_dict[asset]['percent_change'] = (
                        ((current_price - average_buy_value) /
                         average_buy_value) * 100)
                else:
                    result_dict[asset]['percent_change'] = 'INF'

        except AttributeError:
            pass

        return result_dict

    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        with self.lock:
            if settings.eth_rpc_endpoint is not None:
                result, msg = self.chain_manager.set_eth_rpc_endpoint(
                    settings.eth_rpc_endpoint)
                if not result:
                    return False, msg

            self.data.db.set_settings(settings)
            return True, ''

    def get_settings(self) -> DBSettings:
        """Returns the db settings with a check whether premium is active or not"""
        db_settings = self.data.db.get_settings(
            have_premium=self.premium is not None)
        return db_settings

    def setup_exchange(
        self,
        name: str,
        api_key: ApiKey,
        api_secret: ApiSecret,
        passphrase: Optional[str] = None,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret

        By default the api keys are always validated unless validate is False.
        """
        is_success, msg = self.exchange_manager.setup_exchange(
            name=name,
            api_key=api_key,
            api_secret=api_secret,
            database=self.data.db,
            passphrase=passphrase,
        )

        if is_success:
            # Success, save the result in the DB
            self.data.db.add_exchange(name,
                                      api_key,
                                      api_secret,
                                      passphrase=passphrase)
        return is_success, msg

    def remove_exchange(self, name: str) -> Tuple[bool, str]:
        if not self.exchange_manager.has_exchange(name):
            return False, 'Exchange {} is not registered'.format(name)

        self.exchange_manager.delete_exchange(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        self.data.db.delete_used_query_range_for_exchange(name)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result: Dict[str, Union[bool, Timestamp]] = {}

        if self.user_is_logged_in:
            result[
                'last_balance_save'] = self.data.db.get_last_balance_save_time(
                )
            result[
                'eth_node_connection'] = self.chain_manager.ethchain.connected
            result[
                'history_process_start_ts'] = self.accountant.started_processing_timestamp
            result[
                'history_process_current_ts'] = self.accountant.currently_processing_timestamp
        return result

    def shutdown(self) -> None:
        self.logout()
        self.shutdown_event.set()
Beispiel #29
0
def fixture_session_cryptocompare(session_data_dir, session_database):
    return Cryptocompare(data_directory=session_data_dir,
                         database=session_database)
Beispiel #30
0
class Rotkehlchen():
    def __init__(self, args: argparse.Namespace) -> None:
        """Initialize the Rotkehlchen object

        This runs during backend initialization so it should be as light as possible.

        May Raise:
        - SystemPermissionError if the given data directory's permissions
        are not correct.
        """
        # Can also be None after unlock if premium credentials did not
        # authenticate or premium server temporarily offline
        self.premium: Optional[Premium] = None
        self.user_is_logged_in: bool = False
        configure_logging(args)

        self.sleep_secs = args.sleep_secs
        if args.data_dir is None:
            self.data_dir = default_data_directory()
        else:
            self.data_dir = Path(args.data_dir)
            self.data_dir.mkdir(parents=True, exist_ok=True)

        if not os.access(self.data_dir, os.W_OK | os.R_OK):
            raise SystemPermissionError(
                f'The given data directory {self.data_dir} is not readable or writable',
            )
        self.main_loop_spawned = False
        self.args = args
        self.api_task_greenlets: List[gevent.Greenlet] = []
        self.msg_aggregator = MessagesAggregator()
        self.greenlet_manager = GreenletManager(msg_aggregator=self.msg_aggregator)
        self.rotki_notifier = RotkiNotifier(greenlet_manager=self.greenlet_manager)
        self.msg_aggregator.rotki_notifier = self.rotki_notifier
        self.exchange_manager = ExchangeManager(msg_aggregator=self.msg_aggregator)
        # Initialize the GlobalDBHandler singleton. Has to be initialized BEFORE asset resolver
        GlobalDBHandler(data_dir=self.data_dir)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        self.cryptocompare = Cryptocompare(data_directory=self.data_dir, database=None)
        self.coingecko = Coingecko()
        self.icon_manager = IconManager(data_dir=self.data_dir, coingecko=self.coingecko)
        self.assets_updater = AssetsUpdater(self.msg_aggregator)
        # Initialize the Inquirer singleton
        Inquirer(
            data_dir=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        self.task_manager: Optional[TaskManager] = None
        self.shutdown_event = gevent.event.Event()

    def reset_after_failed_account_creation_or_login(self) -> None:
        """If the account creation or login failed make sure that the rotki instance is clear

        Tricky instances are when after either failed premium credentials or user refusal
        to sync premium databases we relogged in
        """
        self.cryptocompare.db = None

    def unlock_user(
            self,
            user: str,
            password: str,
            create_new: bool,
            sync_approval: Literal['yes', 'no', 'unknown'],
            premium_credentials: Optional[PremiumCredentials],
            initial_settings: Optional[ModifiableDBSettings] = None,
            sync_database: bool = True,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True

        May raise:
        - PremiumAuthenticationError if the password can't unlock the database.
        - AuthenticationError if premium_credentials are given and are invalid
        or can't authenticate with the server
        - DBUpgradeError if the rotki DB version is newer than the software or
        there is a DB upgrade and there is an error.
        - SystemPermissionError if the directory or DB file can not be accessed
        """
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
            sync_database=sync_database,
            initial_settings=initial_settings,
        )

        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new, initial_settings)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data, password=password)
        # set the DB in the external services instances that need it
        self.cryptocompare.set_database(self.data.db)

        # Anything that was set above here has to be cleaned in case of failure in the next step
        # by reset_after_failed_account_creation_or_login()
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                given_premium_credentials=premium_credentials,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
                sync_database=sync_database,
            )
        except PremiumAuthenticationError:
            # Reraise it only if this is during the creation of a new account where
            # the premium credentials were given by the user
            if create_new:
                raise
            self.msg_aggregator.add_warning(
                'Could not authenticate the rotki premium API keys found in the DB.'
                ' Has your subscription expired?',
            )
            # else let's just continue. User signed in succesfully, but he just
            # has unauthenticable/invalid premium credentials remaining in his DB

        settings = self.get_settings()
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='submit_usage_analytics',
            exception_is_error=False,
            method=maybe_submit_usage_analytics,
            data_dir=self.data_dir,
            should_submit=settings.submit_usage_analytics,
        )
        self.etherscan = Etherscan(database=self.data.db, msg_aggregator=self.msg_aggregator)
        self.beaconchain = BeaconChain(database=self.data.db, msg_aggregator=self.msg_aggregator)
        eth_rpc_endpoint = settings.eth_rpc_endpoint
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            cryptocompare=self.cryptocompare,
            coingecko=self.coingecko,
        )
        PriceHistorian().set_oracles_order(settings.historical_price_oracles)

        self.accountant = Accountant(
            db=self.data.db,
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
            premium=self.premium,
        )

        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        # Initialize blockchain querying modules
        ethereum_manager = EthereumManager(
            ethrpc_endpoint=eth_rpc_endpoint,
            etherscan=self.etherscan,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=ETHEREUM_NODES_TO_CONNECT_AT_START,
        )
        kusama_manager = SubstrateManager(
            chain=SubstrateChain.KUSAMA,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=KUSAMA_NODES_TO_CONNECT_AT_START,
            connect_on_startup=self._connect_ksm_manager_on_startup(),
            own_rpc_endpoint=settings.ksm_rpc_endpoint,
        )
        polkadot_manager = SubstrateManager(
            chain=SubstrateChain.POLKADOT,
            msg_aggregator=self.msg_aggregator,
            greenlet_manager=self.greenlet_manager,
            connect_at_start=POLKADOT_NODES_TO_CONNECT_AT_START,
            connect_on_startup=self._connect_dot_manager_on_startup(),
            own_rpc_endpoint=settings.dot_rpc_endpoint,
        )
        self.covalent_avalanche = Covalent(
            database=self.data.db,
            msg_aggregator=self.msg_aggregator,
            chain_id=chains_id['avalanche'],
        )
        avalanche_manager = AvalancheManager(
            avaxrpc_endpoint="https://api.avax.network/ext/bc/C/rpc",
            covalent=self.covalent_avalanche,
            msg_aggregator=self.msg_aggregator,
        )

        Inquirer().inject_ethereum(ethereum_manager)
        Inquirer().set_oracles_order(settings.current_price_oracles)

        self.chain_manager = ChainManager(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            ethereum_manager=ethereum_manager,
            kusama_manager=kusama_manager,
            polkadot_manager=polkadot_manager,
            avalanche_manager=avalanche_manager,
            msg_aggregator=self.msg_aggregator,
            database=self.data.db,
            greenlet_manager=self.greenlet_manager,
            premium=self.premium,
            eth_modules=settings.active_modules,
            data_directory=self.data_dir,
            beaconchain=self.beaconchain,
            btc_derivation_gap_limit=settings.btc_derivation_gap_limit,
        )
        self.events_historian = EventsHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
            chain_manager=self.chain_manager,
        )
        self.task_manager = TaskManager(
            max_tasks_num=DEFAULT_MAX_TASKS_NUM,
            greenlet_manager=self.greenlet_manager,
            api_task_greenlets=self.api_task_greenlets,
            database=self.data.db,
            cryptocompare=self.cryptocompare,
            premium_sync_manager=self.premium_sync_manager,
            chain_manager=self.chain_manager,
            exchange_manager=self.exchange_manager,
        )
        DataMigrationManager(self).maybe_migrate_data()
        self.greenlet_manager.spawn_and_track(
            after_seconds=5,
            task_name='periodically_query_icons_until_all_cached',
            exception_is_error=False,
            method=self.icon_manager.periodically_query_icons_until_all_cached,
            batch_size=ICONS_BATCH_SIZE,
            sleep_time_secs=ICONS_QUERY_SLEEP,
        )
        self.user_is_logged_in = True
        log.debug('User unlocking complete')

    def _logout(self) -> None:
        if not self.user_is_logged_in:
            return
        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )

        self.deactivate_premium_status()
        self.greenlet_manager.clear()
        del self.chain_manager
        self.exchange_manager.delete_all_exchanges()

        del self.accountant
        del self.events_historian
        del self.data_importer

        self.data.logout()
        self.password = ''
        self.cryptocompare.unset_database()

        # Make sure no messages leak to other user sessions
        self.msg_aggregator.consume_errors()
        self.msg_aggregator.consume_warnings()
        self.task_manager = None

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def logout(self) -> None:
        if self.task_manager is None:  # no user logged in?
            return

        with self.task_manager.schedule_lock:
            self._logout()

    def set_premium_credentials(self, credentials: PremiumCredentials) -> None:
        """
        Sets the premium credentials for rotki

        Raises PremiumAuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')
        if self.premium is not None:
            self.premium.set_credentials(credentials)
        else:
            self.premium = premium_create_and_verify(credentials)

        self.premium_sync_manager.premium = self.premium
        self.accountant.activate_premium_status(self.premium)
        self.chain_manager.activate_premium_status(self.premium)

        self.data.db.set_rotkehlchen_premium(credentials)

    def deactivate_premium_status(self) -> None:
        """Deactivate premium in the current session"""
        self.premium = None
        self.premium_sync_manager.premium = None
        self.accountant.deactivate_premium_status()
        self.chain_manager.deactivate_premium_status()

    def delete_premium_credentials(self) -> Tuple[bool, str]:
        """Deletes the premium credentials for rotki"""
        msg = ''

        success = self.data.db.del_rotkehlchen_premium()
        if success is False:
            msg = 'The database was unable to delete the Premium keys for the logged-in user'
        self.deactivate_premium_status()
        return success, msg

    def start(self) -> gevent.Greenlet:
        assert not self.main_loop_spawned, 'Tried to spawn the main loop twice'
        greenlet = gevent.spawn(self.main_loop)
        self.main_loop_spawned = True
        return greenlet

    def main_loop(self) -> None:
        """rotki main loop that fires often and runs the task manager's scheduler"""
        while self.shutdown_event.wait(timeout=MAIN_LOOP_SECS_DELAY) is not True:
            if self.task_manager is not None:
                self.task_manager.schedule()

    def get_blockchain_account_data(
            self,
            blockchain: SupportedBlockchain,
    ) -> Union[List[BlockchainAccountData], Dict[str, Any]]:
        account_data = self.data.db.get_blockchain_account_data(blockchain)
        if blockchain != SupportedBlockchain.BITCOIN:
            return account_data

        xpub_data = self.data.db.get_bitcoin_xpub_data()
        addresses_to_account_data = {x.address: x for x in account_data}
        address_to_xpub_mappings = self.data.db.get_addresses_to_xpub_mapping(
            list(addresses_to_account_data.keys()),  # type: ignore
        )

        xpub_mappings: Dict['XpubData', List[BlockchainAccountData]] = {}
        for address, xpub_entry in address_to_xpub_mappings.items():
            if xpub_entry not in xpub_mappings:
                xpub_mappings[xpub_entry] = []
            xpub_mappings[xpub_entry].append(addresses_to_account_data[address])

        data: Dict[str, Any] = {'standalone': [], 'xpubs': []}
        # Add xpub data
        for xpub_entry in xpub_data:
            data_entry = xpub_entry.serialize()
            addresses = xpub_mappings.get(xpub_entry, None)
            data_entry['addresses'] = addresses if addresses and len(addresses) != 0 else None
            data['xpubs'].append(data_entry)
        # Add standalone addresses
        for account in account_data:
            if account.address not in address_to_xpub_mappings:
                data['standalone'].append(account)

        return data

    def add_blockchain_accounts(
            self,
            blockchain: SupportedBlockchain,
            account_data: List[BlockchainAccountData],
    ) -> BlockchainBalancesUpdate:
        """Adds new blockchain accounts

        Adds the accounts to the blockchain instance and queries them to get the
        updated balances. Also adds them in the DB

        May raise:
        - EthSyncError from modify_blockchain_account
        - InputError if the given accounts list is empty.
        - TagConstraintError if any of the given account data contain unknown tags.
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        """
        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='adding',
            data_type='blockchain accounts',
        )
        address_type = blockchain.get_address_type()
        updated_balances = self.chain_manager.add_blockchain_accounts(
            blockchain=blockchain,
            accounts=[address_type(entry.address) for entry in account_data],
        )
        self.data.db.add_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

        return updated_balances

    def edit_blockchain_accounts(
            self,
            blockchain: SupportedBlockchain,
            account_data: List[BlockchainAccountData],
    ) -> None:
        """Edits blockchain accounts

        Edits blockchain account data for the given accounts

        May raise:
        - InputError if the given accounts list is empty or if
        any of the accounts to edit do not exist.
        - TagConstraintError if any of the given account data contain unknown tags.
        """
        # First check for validity of account data addresses
        if len(account_data) == 0:
            raise InputError('Empty list of blockchain account data to edit was given')
        accounts = [x.address for x in account_data]
        unknown_accounts = set(accounts).difference(self.chain_manager.accounts.get(blockchain))
        if len(unknown_accounts) != 0:
            raise InputError(
                f'Tried to edit unknown {blockchain.value} '
                f'accounts {",".join(unknown_accounts)}',
            )

        self.data.db.ensure_tags_exist(
            given_data=account_data,
            action='editing',
            data_type='blockchain accounts',
        )

        # Finally edit the accounts
        self.data.db.edit_blockchain_accounts(
            blockchain=blockchain,
            account_data=account_data,
        )

    def remove_blockchain_accounts(
            self,
            blockchain: SupportedBlockchain,
            accounts: ListOfBlockchainAddresses,
    ) -> BlockchainBalancesUpdate:
        """Removes blockchain accounts

        Removes the accounts from the blockchain instance and queries them to get
        the updated balances. Also removes them from the DB

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - InputError if a non-existing account was given to remove
        """
        balances_update = self.chain_manager.remove_blockchain_accounts(
            blockchain=blockchain,
            accounts=accounts,
        )
        self.data.db.remove_blockchain_accounts(blockchain, accounts)
        return balances_update

    def get_history_query_status(self) -> Dict[str, str]:
        if self.events_historian.progress < FVal('100'):
            processing_state = self.events_historian.processing_state_name
            progress = self.events_historian.progress / 2
        elif self.accountant.first_processed_timestamp == -1:
            processing_state = 'Processing all retrieved historical events'
            progress = FVal(50)
        else:
            processing_state = 'Processing all retrieved historical events'
            # start_ts is min of the query start or the first action timestamp since action
            # processing can start well before query start to calculate cost basis
            start_ts = min(
                self.accountant.events.query_start_ts,
                self.accountant.first_processed_timestamp,
            )
            diff = self.accountant.events.query_end_ts - start_ts
            progress = 50 + 100 * (
                FVal(self.accountant.currently_processing_timestamp - start_ts) /
                FVal(diff) / 2)

        return {'processing_state': str(processing_state), 'total_progress': str(progress)}

    def process_history(
            self,
            start_ts: Timestamp,
            end_ts: Timestamp,
    ) -> Tuple[int, str]:
        (
            error_or_empty,
            history,
            loan_history,
            asset_movements,
            eth_transactions,
            defi_events,
            ledger_actions,
        ) = self.events_historian.get_history(
            start_ts=start_ts,
            end_ts=end_ts,
            has_premium=self.premium is not None,
        )
        report_id = self.accountant.process_history(
            start_ts=start_ts,
            end_ts=end_ts,
            trade_history=history,
            loan_history=loan_history,
            asset_movements=asset_movements,
            eth_transactions=eth_transactions,
            defi_events=defi_events,
            ledger_actions=ledger_actions,
        )
        return report_id, error_or_empty

    def query_balances(
            self,
            requested_save_data: bool = False,
            save_despite_errors: bool = False,
            timestamp: Timestamp = None,
            ignore_cache: bool = False,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are always saved in the DB,
        if it is False then data are saved if self.data.should_save_balances()
        is True.
        If save_despite_errors is True then even if there is any error the snapshot
        will be saved.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB
        If ignore_cache is True then all underlying calls that have a cache ignore it

        Returns a dictionary with the queried balances.
        """
        log.info(
            'query_balances called',
            requested_save_data=requested_save_data,
            save_despite_errors=save_despite_errors,
        )

        balances: Dict[str, Dict[Asset, Balance]] = {}
        problem_free = True
        for exchange in self.exchange_manager.iterate_exchanges():
            exchange_balances, error_msg = exchange.query_balances(ignore_cache=ignore_cache)
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
                self.msg_aggregator.add_message(
                    message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR,
                    data={'location': exchange.name, 'error': error_msg},
                )
            else:
                location_str = str(exchange.location)
                if location_str not in balances:
                    balances[location_str] = exchange_balances
                else:  # multiple exchange of same type. Combine balances
                    balances[location_str] = combine_dicts(
                        balances[location_str],
                        exchange_balances,
                    )

        liabilities: Dict[Asset, Balance]
        try:
            blockchain_result = self.chain_manager.query_balances(
                blockchain=None,
                force_token_detection=ignore_cache,
                ignore_cache=ignore_cache,
            )
            if len(blockchain_result.totals.assets) != 0:
                balances[str(Location.BLOCKCHAIN)] = blockchain_result.totals.assets
            liabilities = blockchain_result.totals.liabilities
        except (RemoteError, EthSyncError) as e:
            problem_free = False
            liabilities = {}
            log.error(f'Querying blockchain balances failed due to: {str(e)}')
            self.msg_aggregator.add_message(
                message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR,
                data={'location': 'blockchain balances query', 'error': str(e)},
            )

        manually_tracked_liabilities = get_manually_tracked_balances(
            db=self.data.db,
            balance_type=BalanceType.LIABILITY,
        )
        manual_liabilities_as_dict: DefaultDict[Asset, Balance] = defaultdict(Balance)
        for manual_liability in manually_tracked_liabilities:
            manual_liabilities_as_dict[manual_liability.asset] += manual_liability.value

        liabilities = combine_dicts(liabilities, manual_liabilities_as_dict)
        # retrieve loopring balances if module is activated
        if self.chain_manager.get_module('loopring'):
            try:
                loopring_balances = self.chain_manager.get_loopring_balances()
            except RemoteError as e:
                problem_free = False
                self.msg_aggregator.add_message(
                    message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR,
                    data={'location': 'loopring', 'error': str(e)},
                )
            else:
                if len(loopring_balances) != 0:
                    balances[str(Location.LOOPRING)] = loopring_balances

        # retrieve nft balances if module is activated
        nfts = self.chain_manager.get_module('nfts')
        if nfts is not None:
            try:
                nft_mapping = nfts.get_balances(
                    addresses=self.chain_manager.queried_addresses_for_module('nfts'),
                    return_zero_values=False,
                    ignore_cache=False,
                )
            except RemoteError as e:
                problem_free = False
                self.msg_aggregator.add_message(
                    message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR,
                    data={'location': 'nfts', 'error': str(e)},
                )
            else:
                if len(nft_mapping) != 0:
                    if str(Location.BLOCKCHAIN) not in balances:
                        balances[str(Location.BLOCKCHAIN)] = {}

                    for _, nft_balances in nft_mapping.items():
                        for balance_entry in nft_balances:
                            balances[str(Location.BLOCKCHAIN)][Asset(
                                balance_entry['id'])] = Balance(
                                amount=FVal(1),
                                usd_value=balance_entry['usd_price'],
                            )

        balances = account_for_manually_tracked_asset_balances(db=self.data.db, balances=balances)

        # Calculate usd totals
        assets_total_balance: DefaultDict[Asset, Balance] = defaultdict(Balance)
        total_usd_per_location: Dict[str, FVal] = {}
        for location, asset_balance in balances.items():
            total_usd_per_location[location] = ZERO
            for asset, balance in asset_balance.items():
                assets_total_balance[asset] += balance
                total_usd_per_location[location] += balance.usd_value

        net_usd = sum((balance.usd_value for _, balance in assets_total_balance.items()), ZERO)
        liabilities_total_usd = sum((liability.usd_value for _, liability in liabilities.items()), ZERO)  # noqa: E501
        net_usd -= liabilities_total_usd

        # Calculate location stats
        location_stats: Dict[str, Any] = {}
        for location, total_usd in total_usd_per_location.items():
            if location == str(Location.BLOCKCHAIN):
                total_usd -= liabilities_total_usd

            percentage = (total_usd / net_usd).to_percentage() if net_usd != ZERO else '0%'
            location_stats[location] = {
                'usd_value': total_usd,
                'percentage_of_net_value': percentage,
            }

        # Calculate 'percentage_of_net_value' per asset
        assets_total_balance_as_dict: Dict[Asset, Dict[str, Any]] = {
            asset: balance.to_dict() for asset, balance in assets_total_balance.items()
        }
        liabilities_as_dict: Dict[Asset, Dict[str, Any]] = {
            asset: balance.to_dict() for asset, balance in liabilities.items()
        }
        for asset, balance_dict in assets_total_balance_as_dict.items():
            percentage = (balance_dict['usd_value'] / net_usd).to_percentage() if net_usd != ZERO else '0%'  # noqa: E501
            assets_total_balance_as_dict[asset]['percentage_of_net_value'] = percentage

        for asset, balance_dict in liabilities_as_dict.items():
            percentage = (balance_dict['usd_value'] / net_usd).to_percentage() if net_usd != ZERO else '0%'  # noqa: E501
            liabilities_as_dict[asset]['percentage_of_net_value'] = percentage

        # Compose balances response
        result_dict = {
            'assets': assets_total_balance_as_dict,
            'liabilities': liabilities_as_dict,
            'location': location_stats,
            'net_usd': net_usd,
        }
        allowed_to_save = requested_save_data or self.data.should_save_balances()

        if (problem_free or save_despite_errors) and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.db.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
                save_despite_errors=save_despite_errors,
            )

        return result_dict

    def set_settings(self, settings: ModifiableDBSettings) -> Tuple[bool, str]:
        """Tries to set new settings. Returns True in success or False with message if error"""
        if settings.eth_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_eth_rpc_endpoint(settings.eth_rpc_endpoint)
            if not result:
                return False, msg

        if settings.ksm_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_ksm_rpc_endpoint(settings.ksm_rpc_endpoint)
            if not result:
                return False, msg

        if settings.dot_rpc_endpoint is not None:
            result, msg = self.chain_manager.set_dot_rpc_endpoint(settings.dot_rpc_endpoint)
            if not result:
                return False, msg

        if settings.btc_derivation_gap_limit is not None:
            self.chain_manager.btc_derivation_gap_limit = settings.btc_derivation_gap_limit

        if settings.current_price_oracles is not None:
            Inquirer().set_oracles_order(settings.current_price_oracles)

        if settings.historical_price_oracles is not None:
            PriceHistorian().set_oracles_order(settings.historical_price_oracles)
        if settings.active_modules is not None:
            self.chain_manager.process_new_modules_list(settings.active_modules)

        self.data.db.set_settings(settings)
        return True, ''

    def get_settings(self) -> DBSettings:
        """Returns the db settings with a check whether premium is active or not"""
        db_settings = self.data.db.get_settings(have_premium=self.premium is not None)
        return db_settings

    def setup_exchange(
            self,
            name: str,
            location: Location,
            api_key: ApiKey,
            api_secret: ApiSecret,
            passphrase: Optional[str] = None,
            kraken_account_type: Optional['KrakenAccountType'] = None,
            PAIRS: Optional[List[str]] = None,  # noqa: N803
            ftx_subaccount: Optional[str] = None,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret and optionally a passphrase
        """
        is_success, msg = self.exchange_manager.setup_exchange(
            name=name,
            location=location,
            api_key=api_key,
            api_secret=api_secret,
            database=self.data.db,
            passphrase=passphrase,
            ftx_subaccount=ftx_subaccount,
            PAIRS=PAIRS,
        )

        if is_success:
            # Success, save the result in the DB
            self.data.db.add_exchange(
                name=name,
                location=location,
                api_key=api_key,
                api_secret=api_secret,
                passphrase=passphrase,
                kraken_account_type=kraken_account_type,
                PAIRS=PAIRS,
                ftx_subaccount=ftx_subaccount,
            )
        return is_success, msg

    def remove_exchange(self, name: str, location: Location) -> Tuple[bool, str]:
        if self.exchange_manager.get_exchange(name=name, location=location) is None:
            return False, f'{str(location)} exchange {name} is not registered'

        self.exchange_manager.delete_exchange(name=name, location=location)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name=name, location=location)
        if self.exchange_manager.connected_exchanges.get(location) is None:
            # was last exchange of the location type. Delete used query ranges
            self.data.db.delete_used_query_range_for_exchange(location)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result: Dict[str, Union[bool, Timestamp]] = {}

        if self.user_is_logged_in:
            result['last_balance_save'] = self.data.db.get_last_balance_save_time()
            result['eth_node_connection'] = self.chain_manager.ethereum.web3_mapping.get(NodeName.OWN, None) is not None  # noqa : E501
            result['last_data_upload_ts'] = Timestamp(self.premium_sync_manager.last_data_upload_ts)  # noqa : E501
        return result

    def shutdown(self) -> None:
        self.logout()
        self.shutdown_event.set()

    def _connect_ksm_manager_on_startup(self) -> bool:
        return bool(self.data.db.get_blockchain_accounts().ksm)

    def _connect_dot_manager_on_startup(self) -> bool:
        return bool(self.data.db.get_blockchain_accounts().dot)

    def create_oracle_cache(
            self,
            oracle: HistoricalPriceOracle,
            from_asset: Asset,
            to_asset: Asset,
            purge_old: bool,
    ) -> None:
        """Creates the cache of the given asset pair from the start of time
        until now for the given oracle.

        if purge_old is true then any old cache in memory and in a file is purged

        May raise:
            - RemoteError if there is a problem reaching the oracle
            - UnsupportedAsset if any of the two assets is not supported by the oracle
        """
        if oracle != HistoricalPriceOracle.CRYPTOCOMPARE:
            return  # only for cryptocompare for now

        self.cryptocompare.create_cache(from_asset, to_asset, purge_old)