Пример #1
0
def test_db_newer_than_software_raises_error(data_dir, username):
    """
    If the DB version is greater than the current known version in the
    software warn the user to use the latest version of the software
    """
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)
    # Manually set a bigger version than the current known one
    cursor = data.db.conn.cursor()
    cursor.execute(
        'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
        ('version', str(ROTKEHLCHEN_DB_VERSION + 1)),
    )
    data.db.conn.commit()

    # now relogin and check that an error is thrown
    del data
    data = DataHandler(data_dir, msg_aggregator)
    with pytest.raises(DBUpgradeError):
        data.unlock(username, '123', create_new=False)
Пример #2
0
def test_iconomi_assets_are_known(
        database,
        inquirer,  # pylint: disable=unused-argument
):
    # use a real Iconomi instance so that we always get the latest data
    iconomi = Iconomi(
        api_key=make_api_key(),
        secret=make_api_secret(),
        database=database,
        msg_aggregator=MessagesAggregator(),
    )

    supported_tickers = iconomi.query_supported_tickers()
    for ticker in supported_tickers:
        try:
            _ = asset_from_iconomi(ticker)
        except UnknownAsset as e:
            test_warnings.warn(
                UserWarning(
                    f'Found unknown asset {e.asset_name} in ICONOMI. '
                    f'Support for it has to be added', ))
Пример #3
0
def init_ethereum(rpc_endpoint: str, use_other_nodes: bool) -> EthereumManager:
    nodes_to_connect = ETHEREUM_NODES_TO_CONNECT_AT_START if use_other_nodes else (
        NodeName.OWN, )
    msg_aggregator = MessagesAggregator()
    etherscan = Etherscan(database=None, msg_aggregator=msg_aggregator)
    api_key = os.environ.get('ETHERSCAN_API_KEY', None)
    greenlet_manager = GreenletManager(msg_aggregator=msg_aggregator)
    etherscan.api_key = api_key
    ethereum = EthereumManager(
        ethrpc_endpoint=rpc_endpoint,
        etherscan=etherscan,
        database=None,
        msg_aggregator=msg_aggregator,
        greenlet_manager=greenlet_manager,
        connect_at_start=nodes_to_connect,
    )
    wait_until_all_nodes_connected(
        ethereum_manager_connect_at_start=nodes_to_connect,
        ethereum=ethereum,
    )
    return ethereum
Пример #4
0
def test_balance_save_frequency_check(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    now = int(time.time())
    data_save_ts = now - 24 * 60 * 60 + 20
    data.db.add_multiple_location_data([
        LocationData(
            time=data_save_ts,
            location=Location.KRAKEN.serialize_for_db(),
            usd_value='1500',
        )
    ])

    assert not data.should_save_balances()
    data.db.set_settings(ModifiableDBSettings(balance_save_frequency=5))
    assert data.should_save_balances()

    last_save_ts = data.db.get_last_balance_save_time()
    assert last_save_ts == data_save_ts
Пример #5
0
def test_binance_assets_are_known(
        accounting_data_dir,
        inquirer,  # pylint: disable=unused-argument
):
    # use a real binance instance so that we always get the latest data
    binance = Binance(
        api_key=base64.b64encode(make_random_b64bytes(128)),
        secret=base64.b64encode(make_random_b64bytes(128)),
        data_dir=accounting_data_dir,
        msg_aggregator=MessagesAggregator(),
    )

    mapping = binance.symbols_to_pair
    binance_assets = set()
    for _, pair in mapping.items():
        binance_assets.add(pair.binance_base_asset)
        binance_assets.add(pair.binance_quote_asset)

    sorted_assets = sorted(binance_assets)
    for binance_asset in sorted_assets:
        _ = asset_from_binance(binance_asset)
Пример #6
0
def test_upgrade_sqlcipher_v3_to_v4_without_dbinfo(user_data_dir):
    """Test that we can upgrade from an sqlcipher v3 to v4 rotkehlchen database
    Issue: https://github.com/rotki/rotki/issues/229
    """
    sqlcipher_version = detect_sqlcipher_version()
    if sqlcipher_version != 4:
        # nothing to test
        return

    # get the v3 database file and copy it into the user's data directory
    dir_path = os.path.dirname(os.path.realpath(__file__))
    copyfile(
        os.path.join(os.path.dirname(dir_path), 'data',
                     'sqlcipher_v3_rotkehlchen.db'),
        user_data_dir / 'rotkehlchen.db',
    )

    # the constructor should migrate it in-place and we should have a working DB
    msg_aggregator = MessagesAggregator()
    db = DBHandler(user_data_dir, '123', msg_aggregator, None)
    assert db.get_version() == ROTKEHLCHEN_DB_VERSION
Пример #7
0
def test_upgrade_db_11_to_12(user_data_dir):
    """Test upgrading the DB from version 11 to version 12.

    Deleting all bittrex data from the DB"""
    msg_aggregator = MessagesAggregator()
    _use_prepared_db(user_data_dir, 'v11_rotkehlchen.db')
    db = _init_db_with_target_version(
        target_version=12,
        user_data_dir=user_data_dir,
        msg_aggregator=msg_aggregator,
    )

    # Make sure that only one trade is left
    cursor = db.conn.cursor()
    results = cursor.execute('SELECT * FROM trades;')
    assert len(results.fetchall()) == 1
    # Same thing for used query ranges
    results = cursor.execute('SELECT * FROM used_query_ranges;')
    assert len(results.fetchall()) == 1
    # Finally also make sure that we have updated to the target version
    assert db.get_version() == 12
Пример #8
0
def test_query_timed_balances(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)
    data.db.add_multiple_balances(asset_balances)

    result = data.db.query_timed_balances(
        from_ts=1451606401,
        to_ts=1485907100,
        asset=A_USD,
    )
    assert len(result) == 1
    assert result[0].time == 1465171200
    assert result[0].category == BalanceType.ASSET
    assert result[0].amount == '500'
    assert result[0].usd_value == '500'

    result = data.db.query_timed_balances(
        from_ts=1451606300,
        to_ts=1485907000,
        asset=A_ETH,
    )
    assert len(result) == 2
    assert result[0].time == 1451606401
    assert result[0].category == BalanceType.ASSET
    assert result[0].amount == '2'
    assert result[0].usd_value == '1.7068'
    assert result[1].time == 1465171201
    assert result[1].category == BalanceType.ASSET
    assert result[1].amount == '10'
    assert result[1].usd_value == '123'

    result = data.db.query_timed_balances(A_ETH)
    assert len(result) == 4
    result = data.db.query_timed_balances(A_ETH, balance_type=BalanceType.LIABILITY)
    assert len(result) == 1
    assert result[0].time == 1485907201
    assert result[0].category == BalanceType.LIABILITY
    assert result[0].amount == '1'
    assert result[0].usd_value == '9.98'
Пример #9
0
def test_multiple_location_data_and_balances_same_timestamp(user_data_dir):
    """Test that adding location and balance data with same timestamp does not crash.

    Regression test for https://github.com/rotki/rotki/issues/1043
    """
    msg_aggregator = MessagesAggregator()
    db = DBHandler(user_data_dir, '123', msg_aggregator, None)

    balances = [
        AssetBalance(
            time=1590676728,
            asset=A_BTC,
            amount='1.0',
            usd_value='8500',
        ), AssetBalance(
            time=1590676728,
            asset=A_BTC,
            amount='1.1',
            usd_value='9100',
        ),
    ]
    db.add_multiple_balances(balances)
    balances = db.query_timed_balances(from_ts=0, to_ts=1590676728, asset=A_BTC)
    assert len(balances) == 1

    locations = [
        LocationData(
            time=1590676728,
            location='H',
            usd_value='55',
        ), LocationData(
            time=1590676728,
            location='H',
            usd_value='56',
        ),
    ]
    db.add_multiple_location_data(locations)
    locations = db.get_latest_location_value_distribution()
    assert len(locations) == 1
    assert locations[0].usd_value == '55'
Пример #10
0
def test_export_import_db(data_dir, username):
    """Create a DB, write some data and then after export/import confirm it's there"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)
    starting_balance = ManuallyTrackedBalance(
        id=-1,
        asset=A_EUR,
        label='foo',
        amount=FVal(10),
        location=Location.BANKS,
        tags=None,
        balance_type=BalanceType.ASSET,
    )
    data.db.add_manually_tracked_balances([starting_balance])
    encoded_data, _ = data.compress_and_encrypt_db('123')

    # The server would return them decoded
    encoded_data = encoded_data.decode()  # pylint: disable=no-member
    data.decompress_and_decrypt_db('123', encoded_data)
    balances = data.db.get_manually_tracked_balances()
    assert balances == [starting_balance]
Пример #11
0
def test_set_get_rotkehlchen_premium_credentials(data_dir, username):
    """Test that setting the premium credentials and getting them back from the DB works
    """
    api_key = (
        'kWT/MaPHwM2W1KUEl2aXtkKG6wJfMW9KxI7SSerI6/QzchC45/GebPV9xYZy7f+VKBeh5nDRBJBCYn7WofMO4Q=='
    )
    secret = (
        'TEF5dFFrOFcwSXNrM2p1aDdHZmlndFRoMTZQRWJhU2dacTdscUZSeHZTRmJLRm5ZaVRlV2NYU'
        'llYR1lxMjlEdUtRdFptelpCYmlXSUZGRTVDNWx3NDNYbjIx')
    credentials = PremiumCredentials(
        given_api_key=api_key,
        given_api_secret=secret,
    )

    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)
    data.db.set_rotkehlchen_premium(credentials)
    returned_credentials = data.db.get_rotkehlchen_premium()
    assert returned_credentials == credentials
    assert returned_credentials.serialize_key() == api_key
    assert returned_credentials.serialize_secret() == secret
Пример #12
0
def test_query_owned_assets(data_dir, username):
    """Test the get_owned_assets with also an unknown asset in the DB"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    balances = deepcopy(asset_balances)
    balances.extend([
        AssetBalance(
            category=BalanceType.ASSET,
            time=Timestamp(1488326400),
            asset=A_BTC,
            amount='1',
            usd_value='1222.66',
        ),
        AssetBalance(
            category=BalanceType.ASSET,
            time=Timestamp(1489326500),
            asset=A_XMR,
            amount='2',
            usd_value='33.8',
        ),
    ])
    data.db.add_multiple_balances(balances)
    cursor = data.db.conn.cursor()
    cursor.execute(
        'INSERT INTO timed_balances('
        '    time, currency, amount, usd_value, category) '
        ' VALUES(?, ?, ?, ?, ?)',
        (1469326500, 'ADSADX', '10.1', '100.5', 'A'),
    )
    data.db.conn.commit()

    assets_list = data.db.query_owned_assets()
    assert assets_list == [A_USD, A_ETH, A_DAI, A_BTC, A_XMR]
    assert all(isinstance(x, Asset) for x in assets_list)
    warnings = data.db.msg_aggregator.consume_warnings()
    assert len(warnings) == 1
    assert 'Unknown/unsupported asset ADSADX' in warnings[0]
Пример #13
0
def test_binance_query_trade_history_custom_markets(function_scope_binance,
                                                    user_data_dir):
    """Test that custom pairs are queried correctly"""
    msg_aggregator = MessagesAggregator()
    db = DBHandler(user_data_dir, '123', msg_aggregator, None)

    binance_api_key = ApiKey('binance_api_key')
    binance_api_secret = ApiSecret(b'binance_api_secret')
    db.add_exchange('binance', Location.BINANCE, binance_api_key,
                    binance_api_secret)

    binance = function_scope_binance

    markets = ['ETHBTC', 'BNBBTC', 'BTCUSDC']
    binance.edit_exchange(name=None,
                          api_key=None,
                          api_secret=None,
                          PAIRS=markets)
    count = 0
    p = re.compile(r'symbol=[A-Z]*')
    seen = set()

    def mock_my_trades(url, timeout):  # pylint: disable=unused-argument
        nonlocal count
        if '/fiat/payments' not in url:
            count += 1
            market = p.search(url).group()[7:]
            assert market in markets and market not in seen
            seen.add(market)
        text = '[]'
        return MockResponse(200, text)

    with patch.object(binance.session, 'get', side_effect=mock_my_trades):
        binance.query_trade_history(start_ts=0,
                                    end_ts=1564301134,
                                    only_cache=False)

    assert count == len(markets)
Пример #14
0
def test_balance_save_frequency_check(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    now = int(time.time())
    data_save_ts = now - 24 * 60 * 60 + 20
    data.db.add_multiple_location_data([
        LocationData(
            time=data_save_ts,
            location='kraken',
            usd_value='1500',
        )
    ])

    assert not data.should_save_balances()
    success, msg = data.set_settings({'balance_save_frequency': 5})
    assert success
    assert msg == '', f'set settings returned error: "{msg}"'
    assert data.should_save_balances()

    last_save_ts = data.db.get_last_balance_save_time()
    assert last_save_ts == data_save_ts
Пример #15
0
def test_upgrade_sqlcipher_v3_to_v4_with_dbinfo(data_dir):
    sqlcipher_version = detect_sqlcipher_version()
    if sqlcipher_version != 4:
        # nothing to test
        return

    username = '******'
    userdata_dir = os.path.join(data_dir, username)
    os.mkdir(userdata_dir)
    # get the v3 database file and copy it into the user's data directory
    dir_path = os.path.dirname(os.path.realpath(__file__))
    copyfile(
        os.path.join(dir_path, 'data', 'sqlcipher_v3_rotkehlchen.db'),
        os.path.join(userdata_dir, 'rotkehlchen.db'),
    )
    dbinfo = {'sqlcipher_version': 3, 'md5_hash': '20c910c28ca42370e4a5f24d6d4a73d2'}
    with open(os.path.join(userdata_dir, DBINFO_FILENAME), 'w') as f:
        f.write(rlk_jsondumps(dbinfo))

    # the constructor should migrate it in-place and we should have a working DB
    msg_aggregator = MessagesAggregator()
    db = DBHandler(userdata_dir, '123', msg_aggregator)
    assert db.get_version() == ROTKEHLCHEN_DB_VERSION
Пример #16
0
def test_upgrade_db_4_to_5(data_dir, username):
    """Test upgrading the DB from version 4 to version 5, rename BCC to BCH"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)
    # Manually set version
    cursor = data.db.conn.cursor()
    cursor.execute(
        'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
        ('version', str(4)),
    )
    data.db.conn.commit()
    populate_db_and_check_for_asset_renaming(
        cursor=cursor,
        data=data,
        data_dir=data_dir,
        msg_aggregator=msg_aggregator,
        username=username,
        to_rename_asset='BCC',
        renamed_asset=A_BCH,
    )
    # Also make sure that we have updated the latest DB version constant
    assert data.db.get_version() > 4
Пример #17
0
def test_add_remove_exchange(data_dir, username):
    """Tests that adding and removing an exchange in the DB works.

    Also unknown exchanges should fail
    """
    msg_aggregator = MessagesAggregator()
    username = '******'
    userdata_dir = os.path.join(data_dir, username)
    os.mkdir(userdata_dir)
    db = DBHandler(userdata_dir, '123', msg_aggregator)

    # Test that an unknown exchange fails
    with pytest.raises(InputError):
        db.add_exchange('non_existing_exchange', 'api_key', 'api_secret')
    credentials = db.get_exchange_credentials()
    assert len(credentials) == 0

    kraken_api_key = ApiKey('kraken_api_key')
    kraken_api_secret = ApiSecret(b'kraken_api_secret')
    binance_api_key = ApiKey('binance_api_key')
    binance_api_secret = ApiSecret(b'binance_api_secret')

    # add mock kraken and binance
    db.add_exchange('kraken', kraken_api_key, kraken_api_secret)
    db.add_exchange('binance', binance_api_key, binance_api_secret)
    # and check the credentials can be retrieved
    credentials = db.get_exchange_credentials()
    assert len(credentials) == 2
    assert credentials['kraken'].api_key == kraken_api_key
    assert credentials['kraken'].api_secret == kraken_api_secret
    assert credentials['binance'].api_key == binance_api_key
    assert credentials['binance'].api_secret == binance_api_secret

    # remove an exchange and see it works
    db.remove_exchange('kraken')
    credentials = db.get_exchange_credentials()
    assert len(credentials) == 1
Пример #18
0
def test_upgrade_db_8_to_9(data_dir, username):
    """Test upgrading the DB from version 8 to version 9.

    Adding the passphrase column to user credentials"""
    msg_aggregator = MessagesAggregator()
    userdata_dir = os.path.join(data_dir, username)
    os.mkdir(userdata_dir)
    dir_path = os.path.dirname(os.path.realpath(__file__))
    copyfile(
        os.path.join(os.path.dirname(dir_path), 'data', 'v8_rotkehlchen.db'),
        os.path.join(userdata_dir, 'rotkehlchen.db'),
    )

    with target_patch(target_version=9):
        db = DBHandler(user_data_dir=userdata_dir,
                       password='******',
                       msg_aggregator=msg_aggregator)

    cursor = db.conn.cursor()
    results = cursor.execute(
        'SELECT name, api_key, api_secret, passphrase FROM user_credentials;',
    )
    names = {
        'coinbase', 'coinbasepro', 'binance', 'bittrex', 'kraken', 'bitmex'
    }
    for result in results:
        assert result[0] in names
        names.remove(result[0])
        assert result[1] == '9f07a6f548f3d0ddb68fb406353063ba'  # api key
        assert result[2] == (
            'auIO4FWI3HmL1AnhYaNoK0vr4tTaZyAU3/TI9M46V9IeeCPTxyWV'
            '3JCVzHmcVV9+n+v4TbsIyRndaL9XbFkCuQ==')  # api secret
        assert result[3] is None  # passphrase

    assert len(names) == 0, 'not all exchanges were found in the new DB'
    # Finally also make sure that we have updated to the target version
    assert db.get_version() == 9
Пример #19
0
def test_binance_assets_are_known(
        accounting_data_dir,
        inquirer,  # pylint: disable=unused-argument
):
    # use a real binance instance so that we always get the latest data
    binance = Binance(
        api_key=make_api_key(),
        secret=make_api_secret(),
        user_directory=accounting_data_dir,
        msg_aggregator=MessagesAggregator(),
    )

    mapping = binance.symbols_to_pair
    binance_assets = set()
    for _, pair in mapping.items():
        binance_assets.add(pair.binance_base_asset)
        binance_assets.add(pair.binance_quote_asset)

    sorted_assets = sorted(binance_assets)
    for binance_asset in sorted_assets:
        try:
            _ = asset_from_binance(binance_asset)
        except UnsupportedAsset:
            assert binance_asset in UNSUPPORTED_BINANCE_ASSETS
Пример #20
0
def test_data_set_fiat_balances(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    amount_eur = AssetAmount(FVal('100'))
    amount_cny = AssetAmount(FVal('500'))

    data.set_fiat_balances({A_EUR: amount_eur})
    data.set_fiat_balances({A_CNY: amount_cny})
    balances = data.get_fiat_balances()
    assert len(balances) == 2
    assert FVal(balances[A_EUR]) == amount_eur
    assert FVal(balances[A_CNY]) == amount_cny

    data.set_fiat_balances({A_EUR: ZERO})
    balances = data.get_fiat_balances()
    assert len(balances) == 1
    assert FVal(balances[A_CNY]) == amount_cny

    # also check that all the fiat assets in the fiat table are in
    # all_assets.json
    for fiat_asset in FIAT_CURRENCIES:
        assert fiat_asset.is_fiat()
Пример #21
0
def test_upgrade_db_15_to_16(user_data_dir):
    """Test upgrading the DB from version 15 to version 16.

    Deletes all transactions and asset movements from the DB, also asset movement query ranges
    """
    msg_aggregator = MessagesAggregator()
    _use_prepared_db(user_data_dir, 'v15_rotkehlchen.db')
    db = _init_db_with_target_version(
        target_version=16,
        user_data_dir=user_data_dir,
        msg_aggregator=msg_aggregator,
    )
    cursor = db.conn.cursor()

    assert cursor.execute('SELECT COUNT(*) FROM ethereum_transactions;').fetchone()[0] == 0
    assert cursor.execute('SELECT COUNT(*) FROM asset_movements;').fetchone()[0] == 0
    # Make sure address and transaction_id exist as part of asset movements
    assert cursor.execute('SELECT address FROM asset_movements;').fetchall() == []
    assert cursor.execute('SELECT transaction_id FROM asset_movements;').fetchall() == []
    # Test that the only remaining query ranges are the non-asset movements ones
    assert cursor.execute('SELECT COUNT(*) FROM used_query_ranges;').fetchone()[0] == 2

    # Finally also make sure that we have updated to the target version
    assert db.get_version() == 16
Пример #22
0
def test_upgrade_db_18_to_19(user_data_dir):
    """Test upgrading the DB from version 18 to version 19.

    Deletes all aave data and recreates table with all the new attributes
    """
    msg_aggregator = MessagesAggregator()
    _use_prepared_db(user_data_dir, 'v18_rotkehlchen.db')
    db = _init_db_with_target_version(
        target_version=19,
        user_data_dir=user_data_dir,
        msg_aggregator=msg_aggregator,
    )
    cursor = db.conn.cursor()

    assert cursor.execute('SELECT COUNT(*) FROM aave_events;').fetchone()[0] == 0
    # Test that query ranges also get cleared
    assert cursor.execute(
        'SELECT COUNT(*) FROM used_query_ranges WHERE name LIKE "aave_events%";',
    ).fetchone()[0] == 0
    # test schema upgrade by using a new column from the upgraded schema. If nonexisting it raises
    cursor.execute('SELECT asset2usd_value_accruedinterest_feeusdvalue FROM aave_events;')

    # Finally also make sure that we have updated to the target version
    assert db.get_version() == 19
Пример #23
0
def test_upgrade_db_1_to_2(data_dir, username):
    """Test upgrading the DB from version 1 to version 2, which means that
    ethereum accounts are now checksummed"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    with creation_patch, target_patch(1):
        data.unlock(username, '123', create_new=True)
    # Manually input a non checksummed account
    data.db.conn.commit()
    data.db.add_blockchain_account(
        SupportedBlockchain.ETHEREUM,
        '0xe3580c38b0106899f45845e361ea7f8a0062ef12',
    )

    # now relogin and check that the account has been re-saved as checksummed
    del data
    data = DataHandler(data_dir, msg_aggregator)
    with target_patch(target_version=2):
        data.unlock(username, '123', create_new=False)
    accounts = data.db.get_blockchain_accounts()
    assert accounts.eth[0] == '0xe3580C38B0106899F45845E361EA7F8a0062Ef12'
    version = data.db.get_version()
    # Also make sure that we have updated to the target_version
    assert version == 2
Пример #24
0
def test_upgrade_db_3_to_4(data_dir, username):
    """Test upgrading the DB from version 3 to version 4, which means that
    the eth_rpc_port setting is changed to eth_rpc_endpoint"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    with creation_patch, target_patch(3):
        data.unlock(username, '123', create_new=True)
    # Manually set version and input the old rpcport setting
    cursor = data.db.conn.cursor()
    cursor.execute(
        'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
        ('version', str(3)),
    )
    cursor.execute(
        'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
        ('eth_rpc_port', '8585'),
    )
    data.db.conn.commit()

    # now relogin and check that the setting has been changed and the version bumped
    del data
    data = DataHandler(data_dir, msg_aggregator)
    with target_patch(target_version=4):
        data.unlock(username, '123', create_new=False)
    cursor = data.db.conn.cursor()
    query = cursor.execute(
        'SELECT value FROM settings where name="eth_rpc_endpoint";')
    query = query.fetchall()
    assert query[0][0] == 'http://localhost:8585'
    query = cursor.execute(
        'SELECT value FROM settings where name="eth_rpc_port";')
    query = query.fetchall()
    assert len(query) == 0
    version = data.db.get_version()
    # Also make sure that we have updated to the target_version
    assert version == 4
Пример #25
0
def test_get_latest_location_value_distribution(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    add_starting_balances(data)
    distribution = data.db.get_latest_location_value_distribution()
    assert len(distribution) == 5
    assert all(entry.time == Timestamp(1491607800) for entry in distribution)
    assert distribution[
        0].location == 'B'  # kraken location serialized for DB enum
    assert distribution[0].usd_value == '2000'
    assert distribution[
        1].location == 'C'  # poloniex location serialized for DB enum
    assert distribution[1].usd_value == '100'
    assert distribution[
        2].location == 'H'  # total location serialized for DB enum
    assert distribution[2].usd_value == '10700.5'
    assert distribution[
        3].location == 'I'  # banks location serialized for DB enum
    assert distribution[3].usd_value == '10000'
    assert distribution[
        4].location == 'J'  # blockchain location serialized for DB enum
    assert distribution[4].usd_value == '200000'
Пример #26
0
def test_upgrade_db_9_to_10(data_dir, username):
    """Test upgrading the DB from version 9 to version 10.

    Deleting all entries from used_query_ranges"""
    msg_aggregator = MessagesAggregator()
    userdata_dir = os.path.join(data_dir, username)
    os.mkdir(userdata_dir)
    dir_path = os.path.dirname(os.path.realpath(__file__))
    copyfile(
        os.path.join(os.path.dirname(dir_path), 'data', 'v9_rotkehlchen.db'),
        os.path.join(userdata_dir, 'rotkehlchen.db'),
    )

    with target_patch(target_version=10):
        db = DBHandler(user_data_dir=userdata_dir,
                       password='******',
                       msg_aggregator=msg_aggregator)

    cursor = db.conn.cursor()
    results = cursor.execute(
        'SELECT name, start_ts, end_ts FROM used_query_ranges;', )
    assert len(results.fetchall()) == 0
    # Finally also make sure that we have updated to the target version
    assert db.get_version() == 10
Пример #27
0
def test_add_and_get_aave_events(data_dir, username):
    """Test that get aave events works fine and returns only events for what we need"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    addr1 = make_ethereum_address()
    addr1_events = [AaveSimpleEvent(
        event_type='deposit',
        asset=A_DAI,
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=1,
        timestamp=Timestamp(1),
        tx_hash='0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436',
        log_index=1,
    ), AaveSimpleEvent(
        event_type='withdrawal',
        asset=A_DAI,
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=2,
        timestamp=Timestamp(2),
        tx_hash='0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce',
        log_index=2,
    )]
    data.db.add_aave_events(address=addr1, events=addr1_events)

    addr2 = make_ethereum_address()
    addr2_events = [AaveSimpleEvent(
        event_type='deposit',
        asset=A_DAI,
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=1,
        timestamp=Timestamp(1),
        tx_hash='0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55',
        log_index=1,
    ), AaveSimpleEvent(
        event_type='withdrawal',
        asset=A_DAI,
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=2,
        timestamp=Timestamp(2),
        tx_hash='0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        log_index=2,
    )]
    data.db.add_aave_events(address=addr2, events=addr2_events)

    # addr3 has all types of aave events so we test serialization/deserialization
    addr3 = make_ethereum_address()
    addr3_events = [AaveSimpleEvent(
        event_type='deposit',
        asset=A_DAI,
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=1,
        timestamp=Timestamp(1),
        tx_hash='0x9e394d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55',
        log_index=1,
    ), AaveSimpleEvent(
        event_type='withdrawal',
        asset=A_DAI,
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=2,
        timestamp=Timestamp(2),
        tx_hash='0x4c167445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        log_index=2,
    ), AaveSimpleEvent(
        event_type='interest',
        asset=Asset('WBTC'),
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=4,
        timestamp=Timestamp(4),
        tx_hash='0x49c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        log_index=4,
    ), AaveBorrowEvent(
        event_type='borrow',
        asset=Asset('ETH'),
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=5,
        timestamp=Timestamp(5),
        tx_hash='0x19c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        log_index=5,
        borrow_rate_mode='stable',
        borrow_rate=FVal('0.05233232323423432'),
        accrued_borrow_interest=FVal('5.112234'),
    ), AaveRepayEvent(
        event_type='repay',
        asset=Asset('MANA'),
        value=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=6,
        timestamp=Timestamp(6),
        tx_hash='0x29c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        log_index=6,
        fee=Balance(amount=FVal('0.1'), usd_value=FVal('0.1')),
    ), AaveLiquidationEvent(
        event_type='liquidation',
        collateral_asset=Asset('ETH'),
        collateral_balance=Balance(amount=FVal(1), usd_value=FVal(1)),
        principal_asset=Asset('ETH'),
        principal_balance=Balance(amount=FVal(1), usd_value=FVal(1)),
        block_number=7,
        log_index=7,
        timestamp=Timestamp(7),
        tx_hash='0x39c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
    )]
    data.db.add_aave_events(address=addr3, events=addr3_events)

    events = data.db.get_aave_events(address=addr1, atoken=EthereumToken('aDAI'))
    assert events == addr1_events
    events = data.db.get_aave_events(address=addr2, atoken=EthereumToken('aDAI'))
    assert events == addr2_events
    events = data.db.get_aave_events(address=addr3)
    assert events == addr3_events

    # check that all aave events are properly hashable (aka can go in a set)
    test_set = set()
    for event in addr3_events:
        test_set.add(event)
    assert len(test_set) == len(addr3_events)
Пример #28
0
class TaskManager():
    def __init__(
        self,
        max_tasks_num: int,
        greenlet_manager: GreenletManager,
        api_task_greenlets: List[gevent.Greenlet],
        database: DBHandler,
        cryptocompare: Cryptocompare,
        premium_sync_manager: Optional[PremiumSyncManager],
        chain_manager: ChainManager,
        exchange_manager: ExchangeManager,
        evm_tx_decoder: 'EVMTransactionDecoder',
        eth_transactions: 'EthTransactions',
        deactivate_premium: Callable,
        query_balances: Callable,
    ) -> None:
        self.max_tasks_num = max_tasks_num
        self.greenlet_manager = greenlet_manager
        self.api_task_greenlets = api_task_greenlets
        self.database = database
        self.cryptocompare = cryptocompare
        self.exchange_manager = exchange_manager
        self.evm_tx_decoder = evm_tx_decoder
        self.eth_transactions = eth_transactions
        self.cryptocompare_queries: Set[CCHistoQuery] = set()
        self.chain_manager = chain_manager
        self.last_xpub_derivation_ts = 0
        self.last_eth_tx_query_ts: DefaultDict[ChecksumEthAddress,
                                               int] = defaultdict(int)
        self.last_exchange_query_ts: DefaultDict[ExchangeLocationID,
                                                 int] = defaultdict(int)
        self.base_entries_ignore_set: Set[str] = set()
        self.prepared_cryptocompare_query = False
        self.greenlet_manager.spawn_and_track(  # Needs to run in greenlet, is slow
            after_seconds=None,
            task_name='Prepare cryptocompare queries',
            exception_is_error=True,
            method=self._prepare_cryptocompare_queries,
        )
        self.deactivate_premium = deactivate_premium
        self.query_balances = query_balances
        self.last_premium_status_check = ts_now()
        self.msg_aggregator = MessagesAggregator()

        self.potential_tasks = [
            self._maybe_schedule_cryptocompare_query,
            self._maybe_schedule_xpub_derivation,
            self._maybe_query_ethereum_transactions,
            self._maybe_schedule_exchange_history_query,
            self._maybe_schedule_ethereum_txreceipts,
            self._maybe_query_missing_prices,
            self._maybe_decode_evm_transactions,
            self._maybe_check_premium_status,
            self._maybe_update_snapshot_balances,
        ]
        if premium_sync_manager is not None:
            self.potential_tasks.append(
                premium_sync_manager.maybe_upload_data_to_server)
        self.schedule_lock = gevent.lock.Semaphore()

    def _prepare_cryptocompare_queries(self) -> None:
        """Prepare the queries to do to cryptocompare

        This would be really slow if the entire json cache files were read but we
        have implemented get_cached_data_metadata to only read the relevant part of the file.
        Before doing that we had to yield with gevent.sleep() at each loop iteration.

        Runs only once in the beginning and then has a number of queries prepared
        for the task manager to schedule
        """
        now_ts = ts_now()
        if self.prepared_cryptocompare_query is True:
            return

        if len(self.cryptocompare_queries) != 0:
            return

        assets = self.database.query_owned_assets()
        main_currency = self.database.get_main_currency()
        for asset in assets:

            if asset.is_fiat() and main_currency.is_fiat():
                continue  # ignore fiat to fiat

            if asset.cryptocompare == '' or main_currency.cryptocompare == '':
                continue  # not supported in cryptocompare

            if asset.cryptocompare is None and asset.symbol is None:
                continue  # type: ignore  # asset.symbol may be None for auto generated underlying tokens # noqa: E501

            data_range = GlobalDBHandler().get_historical_price_range(
                from_asset=asset,
                to_asset=main_currency,
                source=HistoricalPriceOracle.CRYPTOCOMPARE,
            )
            if data_range is not None and now_ts - data_range[
                    1] < CRYPTOCOMPARE_QUERY_AFTER_SECS:
                continue

            self.cryptocompare_queries.add(
                CCHistoQuery(from_asset=asset, to_asset=main_currency))

        self.prepared_cryptocompare_query = True

    def _maybe_schedule_cryptocompare_query(self) -> bool:
        """Schedules a cryptocompare query for a single asset history"""
        if self.prepared_cryptocompare_query is False:
            return False

        if len(self.cryptocompare_queries) == 0:
            return False

        # If there is already a cryptocompary query running don't schedule another
        if any('Cryptocompare historical prices' in x.task_name
               for x in self.greenlet_manager.greenlets):
            return False

        now_ts = ts_now()
        # Make sure there is a long enough period  between an asset's histohour query
        # to avoid getting rate limited by cryptocompare
        if now_ts - self.cryptocompare.last_histohour_query_ts <= CRYPTOCOMPARE_HISTOHOUR_FREQUENCY:  # noqa: E501
            return False

        query = self.cryptocompare_queries.pop()
        task_name = f'Cryptocompare historical prices {query.from_asset} / {query.to_asset} query'
        log.debug(f'Scheduling task for {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=False,
            method=self.cryptocompare.query_and_store_historical_data,
            from_asset=query.from_asset,
            to_asset=query.to_asset,
            timestamp=now_ts,
        )
        return True

    def _maybe_schedule_xpub_derivation(self) -> None:
        """Schedules the xpub derivation task if enough time has passed and if user has xpubs"""
        now = ts_now()
        if now - self.last_xpub_derivation_ts <= XPUB_DERIVATION_FREQUENCY:
            return

        xpubs = self.database.get_bitcoin_xpub_data()
        if len(xpubs) == 0:
            return

        log.debug('Scheduling task for Xpub derivation')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name='Derive new xpub addresses',
            exception_is_error=True,
            method=XpubManager(
                self.chain_manager).check_for_new_xpub_addresses,
        )
        self.last_xpub_derivation_ts = now

    def _maybe_query_ethereum_transactions(self) -> None:
        """Schedules the ethereum transaction query task if enough time has passed"""
        accounts = self.database.get_blockchain_accounts().eth
        if len(accounts) == 0:
            return

        now = ts_now()
        dbethtx = DBEthTx(self.database)
        queriable_accounts = []
        for account in accounts:
            _, end_ts = dbethtx.get_queried_range(account)
            if now - max(self.last_eth_tx_query_ts[account],
                         end_ts) > ETH_TX_QUERY_FREQUENCY:
                queriable_accounts.append(account)

        if len(queriable_accounts) == 0:
            return

        address = random.choice(queriable_accounts)
        task_name = f'Query ethereum transactions for {address}'
        log.debug(f'Scheduling task to {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=True,
            method=self.eth_transactions.single_address_query_transactions,
            address=address,
            start_ts=0,
            end_ts=now,
        )
        self.last_eth_tx_query_ts[address] = now

    def _maybe_schedule_ethereum_txreceipts(self) -> None:
        """Schedules the ethereum transaction receipts query task

        The DB check happens first here to see if scheduling would even be needed.
        But the DB query will happen again inside the query task while having the
        lock acquired.
        """
        dbethtx = DBEthTx(self.database)
        hash_results = dbethtx.get_transaction_hashes_no_receipt(
            tx_filter_query=None, limit=TX_RECEIPTS_QUERY_LIMIT)  # noqa: E501
        if len(hash_results) == 0:
            return

        task_name = f'Query {len(hash_results)} ethereum transactions receipts'
        log.debug(f'Scheduling task to {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=True,
            method=self.eth_transactions.
            get_receipts_for_transactions_missing_them,
            limit=TX_RECEIPTS_QUERY_LIMIT,
        )

    def _maybe_schedule_exchange_history_query(self) -> None:
        """Schedules the exchange history query task if enough time has passed"""
        if len(self.exchange_manager.connected_exchanges) == 0:
            return

        now = ts_now()
        queriable_exchanges = []
        for exchange in self.exchange_manager.iterate_exchanges():
            if exchange.location in (Location.BINANCE, Location.BINANCEUS):
                continue  # skip binance due to the way their history is queried and rate limiting
            queried_range = self.database.get_used_query_range(
                f'{str(exchange.location)}_trades')
            end_ts = queried_range[1] if queried_range else 0
            if now - max(self.last_exchange_query_ts[exchange.location_id()],
                         end_ts) > EXCHANGE_QUERY_FREQUENCY:  # noqa: E501
                queriable_exchanges.append(exchange)

        if len(queriable_exchanges) == 0:
            return

        exchange = random.choice(queriable_exchanges)
        task_name = f'Query history of {exchange.name} exchange'
        log.debug(f'Scheduling task to {task_name}')
        self.greenlet_manager.spawn_and_track(
            after_seconds=None,
            task_name=task_name,
            exception_is_error=True,
            method=exchange.query_history_with_callbacks,
            start_ts=0,
            end_ts=now,
            success_callback=noop_exchange_success_cb,
            fail_callback=exchange_fail_cb,
        )
        self.last_exchange_query_ts[exchange.location_id()] = now

    def _maybe_query_missing_prices(self) -> None:
        query_filter = HistoryEventFilterQuery.make(limit=100)
        entries = self.get_base_entries_missing_prices(query_filter)
        if len(entries) > 0:
            task_name = 'Periodically query history events prices'
            log.debug(f'Scheduling task to {task_name}')
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=task_name,
                exception_is_error=True,
                method=self.query_missing_prices_of_base_entries,
                entries_missing_prices=entries,
            )

    def get_base_entries_missing_prices(
        self,
        query_filter: HistoryEventFilterQuery,
    ) -> List[Tuple[str, FVal, Asset, Timestamp]]:
        """
        Searches base entries missing usd prices that have not previously been checked in
        this session.
        """
        # Use a deepcopy to avoid mutations in the filter query if it is used later
        db = DBHistoryEvents(self.database)
        new_query_filter = copy.deepcopy(query_filter)
        new_query_filter.filters.append(
            DBStringFilter(and_op=True, column='usd_value', value='0'), )
        new_query_filter.filters.append(
            DBIgnoreValuesFilter(
                and_op=True,
                column='identifier',
                values=list(self.base_entries_ignore_set),
            ), )
        return db.rows_missing_prices_in_base_entries(
            filter_query=new_query_filter)

    def query_missing_prices_of_base_entries(
        self,
        entries_missing_prices: List[Tuple[str, FVal, Asset, Timestamp]],
    ) -> None:
        """Queries missing prices for HistoryBaseEntry in database updating
        the price if it is found. Otherwise we add the id to the ignore list
        for this session.
        """
        inquirer = PriceHistorian()
        updates = []
        for identifier, amount, asset, timestamp in entries_missing_prices:
            try:
                price = inquirer.query_historical_price(
                    from_asset=asset,
                    to_asset=A_USD,
                    timestamp=timestamp,
                )
            except (NoPriceForGivenTimestamp, RemoteError) as e:
                log.error(
                    f'Failed to find price for {asset} at {timestamp} in base '
                    f'entry {identifier}. {str(e)}.', )
                self.base_entries_ignore_set.add(identifier)
                continue

            usd_value = amount * price
            updates.append((str(usd_value), identifier))

        query = 'UPDATE history_events SET usd_value=? WHERE rowid=?'
        cursor = self.database.conn.cursor()
        cursor.executemany(query, updates)
        self.database.update_last_write()

    def _maybe_decode_evm_transactions(self) -> None:
        """Schedules the evm transaction decoding task

        The DB check happens first here to see if scheduling would even be needed.
        But the DB query will happen again inside the query task while having the
        lock acquired.
        """
        dbethtx = DBEthTx(self.database)
        hashes = dbethtx.get_transaction_hashes_not_decoded(
            limit=TX_DECODING_LIMIT)
        hashes_length = len(hashes)
        if hashes_length > 0:
            task_name = f'decode {hashes_length} evm trasactions'
            log.debug(f'Scheduling periodic task to {task_name}')
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=task_name,
                exception_is_error=True,
                method=self.evm_tx_decoder.
                get_and_decode_undecoded_transactions,
                limit=TX_DECODING_LIMIT,
            )

    def _maybe_check_premium_status(self) -> None:
        """
        Validates the premium status of the account and if the credentials are not valid
        it deactivates the user's premium status.
        """
        now = ts_now()
        if now - self.last_premium_status_check < PREMIUM_STATUS_CHECK:
            return

        db_credentials = self.database.get_rotkehlchen_premium()
        if db_credentials:
            try:
                premium_create_and_verify(db_credentials)
            except PremiumAuthenticationError as e:
                message = (
                    f'Could not authenticate with the rotkehlchen server with '
                    f'the API keys found in the Database. Error: {str(e)}. Will '
                    f'deactivate the premium status.')
                self.msg_aggregator.add_error(message)
                self.deactivate_premium()
        self.last_premium_status_check = now

    def _maybe_update_snapshot_balances(self) -> None:
        """
        Update the balances of a user if the difference between last time they were updated
        and the current time exceeds the `balance_save_frequency`.
        """
        if self.database.should_save_balances():
            task_name = 'Periodically update snapshot balances'
            log.debug(f'Scheduling task to {task_name}')
            self.greenlet_manager.spawn_and_track(
                after_seconds=None,
                task_name=task_name,
                exception_is_error=True,
                method=self.query_balances,
                requested_save_data=True,
                save_despite_errors=False,
                timestamp=None,
                ignore_cache=True,
            )

    def _schedule(self) -> None:
        """Schedules background tasks"""
        self.greenlet_manager.clear_finished()
        current_greenlets = len(self.greenlet_manager.greenlets) + len(
            self.api_task_greenlets)
        not_proceed = current_greenlets >= self.max_tasks_num
        log.debug(
            f'At task scheduling. Current greenlets: {current_greenlets} '
            f'Max greenlets: {self.max_tasks_num}. '
            f'{"Will not schedule" if not_proceed else "Will schedule"}.', )
        if not_proceed:
            return  # too busy

        callables = random.sample(
            population=self.potential_tasks,
            k=min(self.max_tasks_num - current_greenlets,
                  len(self.potential_tasks)),
        )

        for callable_fn in callables:
            callable_fn()

    def schedule(self) -> None:
        """Schedules background task while holding the scheduling lock

        Used during logout to make sure no task is being scheduled at the same time
        as logging out
        """
        with self.schedule_lock:
            self._schedule()
Пример #29
0
def test_add_and_get_yearn_vault_events(data_dir, username):
    """Test that get yearn vault events works fine and returns only events for what we need"""
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    addr1 = make_ethereum_address()
    addr1_events = [YearnVaultEvent(
        event_type='deposit',
        from_asset=A_DAI,
        from_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        to_asset=Asset('yDAI'),
        to_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        realized_pnl=None,
        block_number=1,
        timestamp=Timestamp(1),
        tx_hash='0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436',
        log_index=1,
    ), YearnVaultEvent(
        event_type='withdraw',
        from_asset=Asset('yDAI'),
        from_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        to_asset=A_DAI,
        to_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        realized_pnl=Balance(amount=FVal('0.01'), usd_value=FVal('0.01')),
        block_number=2,
        timestamp=Timestamp(2),
        tx_hash='0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce',
        log_index=2,
    )]
    data.db.add_yearn_vaults_events(address=addr1, events=addr1_events)
    addr2 = make_ethereum_address()
    addr2_events = [YearnVaultEvent(
        event_type='deposit',
        from_asset=A_DAI,
        from_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        to_asset=Asset('yDAI'),
        to_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        realized_pnl=None,
        block_number=1,
        timestamp=Timestamp(1),
        tx_hash='0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55',
        log_index=1,
    ), YearnVaultEvent(
        event_type='withdraw',
        from_asset=Asset('yDAI'),
        from_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        to_asset=A_DAI,
        to_value=Balance(amount=FVal(1), usd_value=FVal(1)),
        realized_pnl=Balance(amount=FVal('0.01'), usd_value=FVal('0.01')),
        block_number=2,
        timestamp=Timestamp(2),
        tx_hash='0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de',
        log_index=2,
    )]
    data.db.add_yearn_vaults_events(address=addr2, events=addr2_events)

    events = data.db.get_yearn_vaults_events(address=addr1, vault=YEARN_VAULTS['yDAI'])
    assert events == addr1_events
    events = data.db.get_yearn_vaults_events(address=addr2, vault=YEARN_VAULTS['yDAI'])
    assert events == addr2_events
Пример #30
0
def db_settings_from_dict(
    settings_dict: Dict[str, Any],
    msg_aggregator: MessagesAggregator,
) -> DBSettings:
    specified_args: Dict[str, Any] = {}
    for key, value in settings_dict.items():
        if key == 'have_premium':
            specified_args[key] = read_boolean(value)
        elif key == 'version':
            specified_args[key] = int(value)
        elif key == 'historical_data_start':
            specified_args[key] = str(value)
        elif key == 'eth_rpc_endpoint':
            specified_args[key] = str(value)
        elif key == 'ui_floating_precision':
            specified_args[key] = int(value)
        elif key == 'include_crypto2crypto':
            specified_args[key] = read_boolean(value)
        elif key == 'taxfree_after_period':
            # taxfree_after_period can also be None, to signify disabled setting
            if value is None:
                specified_args[key] = value
            else:
                int_value = int(value)
                if int_value <= 0:
                    value = None
                    msg_aggregator.add_warning(
                        f'A negative or zero value ({int_value}) for taxfree_after_period '
                        f'ended up in the DB. Setting it to None. Please open an issue in '
                        f'Github: https://github.com/rotki/rotki/issues/new/choose',
                    )

                else:
                    value = int_value

                specified_args[key] = value
        elif key == 'balance_save_frequency':
            specified_args[key] = int(value)
        elif key == 'main_currency':
            specified_args[key] = Asset(str(value))
        elif key == 'anonymized_logs':
            specified_args[key] = read_boolean(value)
        elif key == 'include_gas_costs':
            specified_args[key] = read_boolean(value)
        elif key == 'date_display_format':
            specified_args[key] = str(value)
        elif key == 'thousand_separator':
            specified_args[key] = str(value)
        elif key == 'decimal_separator':
            specified_args[key] = str(value)
        elif key == 'currency_location':
            specified_args[key] = str(value)
        elif key == 'premium_should_sync':
            specified_args[key] = read_boolean(value)
        elif key == 'last_write_ts':
            specified_args[key] = Timestamp(int(value))
        elif key == 'last_data_upload_ts':
            specified_args[key] = Timestamp(int(value))
        elif key == 'last_balance_save':
            specified_args[key] = Timestamp(int(value))
        elif key == 'submit_usage_analytics':
            specified_args[key] = read_boolean(value)
        elif key == 'kraken_account_type':
            specified_args[key] = KrakenAccountType.deserialize(value)
        elif key == 'active_modules':
            specified_args[key] = json.loads(value)
        elif key == 'frontend_settings':
            specified_args[key] = str(value)
        else:
            msg_aggregator.add_warning(
                f'Unknown DB setting {key} given. Ignoring it. Should not '
                f'happen so please open an issue in Github.', )

    return DBSettings(**specified_args)