예제 #1
0
def test_writting_fetching_data(data_dir, username):
    msg_aggregator = MessagesAggregator()
    data = DataHandler(data_dir, msg_aggregator)
    data.unlock(username, '123', create_new=True)

    tokens = [A_GNO, A_RDN]
    data.write_owned_eth_tokens(tokens)
    result = data.db.get_owned_tokens()
    assert set(tokens) == set(result)

    data.add_blockchain_account(SupportedBlockchain.BITCOIN,
                                '1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS')
    data.add_blockchain_account(
        SupportedBlockchain.ETHEREUM,
        '0xd36029d76af6fE4A356528e4Dc66B2C18123597D',
    )
    # Add a non checksummed address
    data.add_blockchain_account(
        SupportedBlockchain.ETHEREUM,
        '0x80b369799104a47e98a553f3329812a44a7facdc',
    )
    accounts = data.db.get_blockchain_accounts()
    assert isinstance(accounts, BlockchainAccounts)
    assert accounts.btc == ['1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS']
    # See that after addition the address has been checksummed
    assert set(accounts.eth) == set([
        '0xd36029d76af6fE4A356528e4Dc66B2C18123597D',
        to_checksum_address('0x80b369799104a47e98a553f3329812a44a7facdc'),
    ])
    # Add existing account should fail
    with pytest.raises(sqlcipher.IntegrityError):  # pylint: disable=no-member
        data.add_blockchain_account(
            SupportedBlockchain.ETHEREUM,
            '0xd36029d76af6fE4A356528e4Dc66B2C18123597D',
        )
    # Remove non-existing account
    with pytest.raises(InputError):
        data.remove_blockchain_account(
            SupportedBlockchain.ETHEREUM,
            '0x136029d76af6fE4A356528e4Dc66B2C18123597D',
        )
    # Remove existing account
    data.remove_blockchain_account(
        SupportedBlockchain.ETHEREUM,
        '0xd36029d76af6fE4A356528e4Dc66B2C18123597D',
    )
    accounts = data.db.get_blockchain_accounts()
    assert accounts.eth == [
        to_checksum_address('0x80b369799104a47e98a553f3329812a44a7facdc')
    ]

    result, _ = data.add_ignored_asset('DAO')
    assert result
    result, _ = data.add_ignored_asset('DOGE')
    assert result
    result, _ = data.add_ignored_asset('DOGE')
    assert not result
    # Test adding non existing asset
    result, msg = data.add_ignored_asset('dsajdhskajdad')
    assert not result
    assert 'for ignoring is not known/supported' in msg

    ignored_assets = data.db.get_ignored_assets()
    assert all([isinstance(asset, Asset) for asset in ignored_assets])
    assert set(ignored_assets) == set([A_DAO, A_DOGE])
    # Test removing asset that is not in the list
    result, msg = data.remove_ignored_asset('RDN')
    assert 'not in ignored assets' in msg
    # Test removing non existing asset
    result, msg = data.remove_ignored_asset('dshajdhsjkahdjssad')
    assert 'is not known/supported' in msg
    assert not result
    result, _ = data.remove_ignored_asset('DOGE')
    assert result
    assert data.db.get_ignored_assets() == ['DAO']

    # With nothing inserted in settings make sure default values are returned
    result = data.db.get_settings()
    last_write_diff = ts_now() - result['last_write_ts']
    # make sure last_write was within 3 secs
    assert last_write_diff >= 0 and last_write_diff < 3
    del result['last_write_ts']
    assert result == {
        'historical_data_start': DEFAULT_START_DATE,
        'eth_rpc_endpoint': 'http://localhost:8545',
        'ui_floating_precision': DEFAULT_UI_FLOATING_PRECISION,
        'db_version': ROTKEHLCHEN_DB_VERSION,
        'include_crypto2crypto': DEFAULT_INCLUDE_CRYPTO2CRYPTO,
        'include_gas_costs': DEFAULT_INCLUDE_GAS_COSTS,
        'taxfree_after_period': YEAR_IN_SECONDS,
        'balance_save_frequency': DEFAULT_BALANCE_SAVE_FREQUENCY,
        'last_balance_save': 0,
        'main_currency': DEFAULT_MAIN_CURRENCY,
        'anonymized_logs': DEFAULT_ANONYMIZED_LOGS,
        'date_display_format': DEFAULT_DATE_DISPLAY_FORMAT,
        'last_data_upload_ts': 0,
        'premium_should_sync': False,
    }

    # Check setting non-existing settings. Should be ignored
    success, msg = data.set_settings({'nonexisting_setting': 1},
                                     accountant=None)
    assert success
    assert msg != '' and 'nonexisting_setting' in msg
    _, msg = data.set_settings(
        {
            'nonexisting_setting': 1,
            'eth_rpc_endpoint': 'http://localhost:8555',
            'ui_floating_precision': 3,
        },
        accountant=None)
    assert msg != '' and 'nonexisting_setting' in msg

    # Now check nothing funny made it in the db
    result = data.db.get_settings()
    assert result['eth_rpc_endpoint'] == 'http://localhost:8555'
    assert result['ui_floating_precision'] == 3
    assert 'nonexisting_setting' not in result
예제 #2
0
class Rotkehlchen():
    def __init__(self, args):
        self.lock = Semaphore()
        self.lock.acquire()
        self.results_cache: ResultCache = dict()
        self.premium = None
        self.connected_exchanges = []
        self.user_is_logged_in = False

        logfilename = None
        if args.logtarget == 'file':
            logfilename = args.logfile

        if args.loglevel == 'debug':
            loglevel = logging.DEBUG
        elif args.loglevel == 'info':
            loglevel = logging.INFO
        elif args.loglevel == 'warn':
            loglevel = logging.WARN
        elif args.loglevel == 'error':
            loglevel = logging.ERROR
        elif args.loglevel == 'critical':
            loglevel = logging.CRITICAL
        else:
            raise ValueError('Should never get here. Illegal log value')

        logging.basicConfig(
            filename=logfilename,
            filemode='w',
            level=loglevel,
            format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s',
            datefmt='%d/%m/%Y %H:%M:%S %Z',
        )

        if not args.logfromothermodules:
            logging.getLogger('zerorpc').setLevel(logging.CRITICAL)
            logging.getLogger('zerorpc.channel').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3.connectionpool').setLevel(
                logging.CRITICAL)

        self.sleep_secs = args.sleep_secs
        self.data_dir = args.data_dir
        self.args = args
        self.last_data_upload_ts = 0

        self.poloniex = None
        self.kraken = None
        self.bittrex = None
        self.bitmex = None
        self.binance = None

        self.msg_aggregator = MessagesAggregator()
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        # Initialize the Inquirer singleton
        Inquirer(data_dir=self.data_dir)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()

    def initialize_exchanges(self, secret_data):
        # initialize exchanges for which we have keys and are not already initialized
        if self.kraken is None and 'kraken' in secret_data:
            self.kraken = Kraken(
                api_key=str.encode(secret_data['kraken']['api_key']),
                secret=str.encode(secret_data['kraken']['api_secret']),
                user_directory=self.user_directory,
                usd_eur_price=Inquirer().query_fiat_pair(S_EUR, A_USD),
            )
            self.connected_exchanges.append('kraken')
            self.trades_historian.set_exchange('kraken', self.kraken)

        if self.poloniex is None and 'poloniex' in secret_data:
            self.poloniex = Poloniex(
                api_key=str.encode(secret_data['poloniex']['api_key']),
                secret=str.encode(secret_data['poloniex']['api_secret']),
                user_directory=self.user_directory,
                msg_aggregator=self.msg_aggregator,
            )
            self.connected_exchanges.append('poloniex')
            self.trades_historian.set_exchange('poloniex', self.poloniex)

        if self.bittrex is None and 'bittrex' in secret_data:
            self.bittrex = Bittrex(
                api_key=str.encode(secret_data['bittrex']['api_key']),
                secret=str.encode(secret_data['bittrex']['api_secret']),
                user_directory=self.user_directory,
                msg_aggregator=self.msg_aggregator,
            )
            self.connected_exchanges.append('bittrex')
            self.trades_historian.set_exchange('bittrex', self.bittrex)

        if self.binance is None and 'binance' in secret_data:
            self.binance = Binance(
                api_key=str.encode(secret_data['binance']['api_key']),
                secret=str.encode(secret_data['binance']['api_secret']),
                data_dir=self.user_directory,
                msg_aggregator=self.msg_aggregator,
            )
            self.connected_exchanges.append('binance')
            self.trades_historian.set_exchange('binance', self.binance)

        if self.bitmex is None and 'bitmex' in secret_data:
            self.bitmex = Bitmex(
                api_key=str.encode(secret_data['bitmex']['api_key']),
                secret=str.encode(secret_data['bitmex']['api_secret']),
                user_directory=self.user_directory,
            )
            self.connected_exchanges.append('bitmex')
            self.trades_historian.set_exchange('bitmex', self.bitmex)

    def remove_all_exchanges(self):
        if self.kraken is not None:
            self.delete_exchange_data('kraken')
        if self.poloniex is not None:
            self.delete_exchange_data('poloniex')
        if self.bittrex is not None:
            self.delete_exchange_data('bittrex')
        if self.binance is not None:
            self.delete_exchange_data('binance')
        if self.bitmex is not None:
            self.delete_exchange_data('bitmex')

    def try_premium_at_start(self, api_key, api_secret, username, create_new,
                             sync_approval):
        """Check if new user provided api pair or we already got one in the DB"""

        if api_key != '':
            assert create_new, 'We should never get here for an already existing account'
            try:
                self.premium = premium_create_and_verify(api_key, api_secret)
            except (IncorrectApiKeyFormat, AuthenticationError) as e:
                log.error('Given API key is invalid')
                # At this point we are at a new user trying to create an account with
                # premium API keys and we failed. But a directory was created. Remove it.
                # But create a backup of it in case something went really wrong
                # and the directory contained data we did not want to lose
                shutil.move(
                    self.user_directory,
                    os.path.join(
                        self.data_dir,
                        f'auto_backup_{username}_{ts_now()}',
                    ),
                )
                shutil.rmtree(self.user_directory)
                raise AuthenticationError(
                    'Could not verify keys for the new account. '
                    '{}'.format(str(e)), )

        # else, if we got premium initialize it and try to sync with the server
        premium_credentials = self.data.db.get_rotkehlchen_premium()
        if premium_credentials:
            api_key = premium_credentials[0]
            api_secret = premium_credentials[1]
            try:
                self.premium = premium_create_and_verify(api_key, api_secret)
            except (IncorrectApiKeyFormat, AuthenticationError) as e:
                log.error(
                    f'Could not authenticate with the rotkehlchen server with '
                    f'the API keys found in the Database. Error: {str(e)}', )
                del self.premium
                self.premium = None

        if not self.premium:
            return

        if self.can_sync_data_from_server():
            if sync_approval == 'unknown' and not create_new:
                log.info('DB data at server newer than local')
                raise RotkehlchenPermissionError(
                    'Rotkehlchen Server has newer version of your DB data. '
                    'Should we replace local data with the server\'s?', )
            elif sync_approval == 'yes' or sync_approval == 'unknown' and create_new:
                log.info('User approved data sync from server')
                if self.sync_data_from_server():
                    if create_new:
                        # if we successfully synced data from the server and this is
                        # a new account, make sure the api keys are properly stored
                        # in the DB
                        self.data.db.set_rotkehlchen_premium(
                            api_key, api_secret)
            else:
                log.debug('Could sync data from server but user refused')

    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: bool,
        api_key: ApiKey,
        api_secret: ApiSecret,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True"""
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
        )
        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new)
        self.try_premium_at_start(
            api_key=api_key,
            api_secret=api_secret,
            username=user,
            create_new=create_new,
            sync_approval=sync_approval,
        )

        secret_data = self.data.db.get_exchange_secrets()
        settings = self.data.db.get_settings()
        historical_data_start = settings['historical_data_start']
        eth_rpc_port = settings['eth_rpc_port']
        self.trades_historian = TradesHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            eth_accounts=self.data.get_eth_accounts(),
            historical_data_start=historical_data_start,
            msg_aggregator=self.msg_aggregator,
        )
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            history_date_start=historical_data_start,
            cryptocompare=Cryptocompare(data_directory=self.data_dir),
        )
        db_settings = self.data.db.get_settings()
        self.accountant = Accountant(
            profit_currency=self.data.main_currency(),
            user_directory=self.user_directory,
            create_csv=True,
            ignored_assets=self.data.db.get_ignored_assets(),
            include_crypto2crypto=db_settings['include_crypto2crypto'],
            taxfree_after_period=db_settings['taxfree_after_period'],
            include_gas_costs=db_settings['include_gas_costs'],
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=db_settings['anonymized_logs'])
        self.initialize_exchanges(secret_data)

        ethchain = Ethchain(eth_rpc_port)
        self.blockchain = Blockchain(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            owned_eth_tokens=self.data.db.get_owned_tokens(),
            ethchain=ethchain,
            msg_aggregator=self.msg_aggregator,
        )
        self.user_is_logged_in = True

    def logout(self):
        if not self.user_is_logged_in:
            return

        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )
        del self.blockchain
        self.blockchain = None
        self.remove_all_exchanges()

        # Reset rotkehlchen logger to default
        LoggingSettings(anonymized_logs=DEFAULT_ANONYMIZED_LOGS)

        del self.accountant
        self.accountant = None
        del self.trades_historian
        self.trades_historian = None

        if self.premium is not None:
            del self.premium
            self.premium = None
        self.data.logout()
        self.password = None

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def set_premium_credentials(self, api_key: ApiKey,
                                api_secret: ApiSecret) -> None:
        """
        Raises IncorrectApiKeyFormat if the given key is not in a proper format
        Raises AuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')

        if self.premium is not None:
            self.premium.set_credentials(api_key, api_secret)
        else:
            self.premium = premium_create_and_verify(api_key, api_secret)

        self.data.set_premium_credentials(api_key, api_secret)

    def maybe_upload_data_to_server(self):
        # upload only if unlocked user has premium
        if self.premium is None:
            return

        # upload only once per hour
        diff = ts_now() - self.last_data_upload_ts
        if diff > 3600:
            self.upload_data_to_server()

    def upload_data_to_server(self) -> None:
        log.debug('upload to server -- start')
        data, our_hash = self.data.compress_and_encrypt_db(self.password)
        try:
            result = self.premium.query_last_data_metadata()
        except RemoteError as e:
            log.debug(
                'upload to server -- query last metadata failed',
                error=str(e),
            )
            return

        log.debug(
            'CAN_PUSH',
            ours=our_hash,
            theirs=result['data_hash'],
        )
        if our_hash == result['data_hash']:
            log.debug('upload to server -- same hash')
            # same hash -- no need to upload anything
            return

        our_last_write_ts = self.data.db.get_last_write_ts()
        if our_last_write_ts <= result['last_modify_ts']:
            # Server's DB was modified after our local DB
            log.debug("CAN_PUSH -> 3")
            log.debug('upload to server -- remote db more recent than local')
            return

        try:
            self.premium.upload_data(
                data_blob=data,
                our_hash=our_hash,
                last_modify_ts=our_last_write_ts,
                compression_type='zlib',
            )
        except RemoteError as e:
            log.debug('upload to server -- upload error', error=str(e))
            return

        self.last_data_upload_ts = ts_now()
        log.debug('upload to server -- success')

    def can_sync_data_from_server(self) -> bool:
        log.debug('sync data from server -- start')
        _, our_hash = self.data.compress_and_encrypt_db(self.password)
        try:
            result = self.premium.query_last_data_metadata()
        except RemoteError as e:
            log.debug('sync data from server failed', error=str(e))
            return False

        log.debug(
            'CAN_PULL',
            ours=our_hash,
            theirs=result['data_hash'],
        )
        if our_hash == result['data_hash']:
            log.debug('sync from server -- same hash')
            # same hash -- no need to get anything
            return False

        our_last_write_ts = self.data.db.get_last_write_ts()
        if our_last_write_ts >= result['last_modify_ts']:
            # Local DB is newer than Server DB
            log.debug('sync from server -- local DB more recent than remote')
            return False

        return True

    def sync_data_from_server(self) -> bool:
        try:
            result = self.premium.pull_data()
        except RemoteError as e:
            log.debug('sync from server -- pulling failed.', error=str(e))
            return False

        self.data.decompress_and_decrypt_db(self.password, result)
        return True

    def start(self):
        return gevent.spawn(self.main_loop)

    def main_loop(self):
        while self.shutdown_event.wait(MAIN_LOOP_SECS_DELAY) is not True:
            log.debug('Main loop start')
            if self.poloniex is not None:
                self.poloniex.main_logic()
            if self.kraken is not None:
                self.kraken.main_logic()

            self.maybe_upload_data_to_server()

            log.debug('Main loop end')

    def add_blockchain_account(
        self,
        blockchain: SupportedBlockchain,
        account: BlockchainAddress,
    ) -> Dict:
        try:
            new_data = self.blockchain.add_blockchain_account(
                blockchain, account)
        except (InputError, EthSyncError) as e:
            return simple_result(False, str(e))
        self.data.add_blockchain_account(blockchain, account)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def remove_blockchain_account(
        self,
        blockchain: SupportedBlockchain,
        account: BlockchainAddress,
    ):
        try:
            new_data = self.blockchain.remove_blockchain_account(
                blockchain, account)
        except (InputError, EthSyncError) as e:
            return simple_result(False, str(e))
        self.data.remove_blockchain_account(blockchain, account)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def add_owned_eth_tokens(self, tokens: List[str]):
        ethereum_tokens = [
            EthereumToken(identifier=identifier) for identifier in tokens
        ]
        try:
            new_data = self.blockchain.track_new_tokens(ethereum_tokens)
        except (InputError, EthSyncError) as e:
            return simple_result(False, str(e))

        self.data.write_owned_eth_tokens(self.blockchain.owned_eth_tokens)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def remove_owned_eth_tokens(self, tokens: List[str]):
        ethereum_tokens = [
            EthereumToken(identifier=identifier) for identifier in tokens
        ]
        try:
            new_data = self.blockchain.remove_eth_tokens(ethereum_tokens)
        except InputError as e:
            return simple_result(False, str(e))
        self.data.write_owned_eth_tokens(self.blockchain.owned_eth_tokens)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def process_history(self, start_ts, end_ts):
        (
            error_or_empty,
            history,
            margin_history,
            loan_history,
            asset_movements,
            eth_transactions,
        ) = self.trades_historian.get_history(
            start_ts=
            0,  # For entire history processing we need to have full history available
            end_ts=ts_now(),
            end_at_least_ts=end_ts,
        )
        result = self.accountant.process_history(
            start_ts,
            end_ts,
            history,
            margin_history,
            loan_history,
            asset_movements,
            eth_transactions,
        )
        return result, error_or_empty

    def query_fiat_balances(self):
        log.info('query_fiat_balances called')
        result = {}
        balances = self.data.get_fiat_balances()
        for currency, amount in balances.items():
            amount = FVal(amount)
            usd_rate = Inquirer().query_fiat_pair(currency, 'USD')
            result[currency] = {
                'amount': amount,
                'usd_value': amount * usd_rate,
            }

        return result

    def query_balances(
        self,
        requested_save_data: bool = False,
        timestamp: Timestamp = None,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are saved in the DB.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called',
                 requested_save_data=requested_save_data)

        balances = {}
        problem_free = True
        for exchange in self.connected_exchanges:
            exchange_balances, _ = getattr(self, exchange).query_balances()
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange] = exchange_balances

        result, error_or_empty = self.blockchain.query_balances()
        if error_or_empty == '':
            balances['blockchain'] = result['totals']
        else:
            problem_free = False

        result = self.query_fiat_balances()
        if result != {}:
            balances['banks'] = result

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value'))
                                  for k, v in balances.items()]

        # calculate net usd value
        net_usd = FVal(0)
        for _, v in combined.items():
            net_usd += FVal(v['usd_value'])

        stats: Dict[str, Any] = {
            'location': {},
            'net_usd': net_usd,
        }
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            if net_usd != FVal(0):
                percentage = (total / net_usd).to_percentage()
            else:
                percentage = '0%'
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': percentage,
            }

        for k, v in combined.items():
            if net_usd != FVal(0):
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            combined[k]['percentage_of_net_value'] = percentage

        result_dict = merge_dicts(combined, stats)

        allowed_to_save = requested_save_data or self.data.should_save_balances(
        )
        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        # After adding it to the saved file we can overlay additional data that
        # is not required to be saved in the history file
        try:
            details = self.data.accountant.details
            for asset, (tax_free_amount, average_buy_value) in details.items():
                if asset not in result_dict:
                    continue

                result_dict[asset]['tax_free_amount'] = tax_free_amount
                result_dict[asset]['average_buy_value'] = average_buy_value

                current_price = result_dict[asset]['usd_value'] / result_dict[
                    asset]['amount']
                if average_buy_value != FVal(0):
                    result_dict[asset]['percent_change'] = (
                        ((current_price - average_buy_value) /
                         average_buy_value) * 100)
                else:
                    result_dict[asset]['percent_change'] = 'INF'

        except AttributeError:
            pass

        return result_dict

    def set_main_currency(self, currency):
        with self.lock:
            self.data.set_main_currency(currency, self.accountant)
            if currency != 'USD':
                self.usd_to_main_currency_rate = Inquirer().query_fiat_pair(
                    'USD', currency)

    def set_settings(self, settings):
        log.info('Add new settings')

        message = ''
        with self.lock:
            if 'eth_rpc_port' in settings:
                result, msg = self.blockchain.set_eth_rpc_port(
                    settings['eth_rpc_port'])
                if not result:
                    # Don't save it in the DB
                    del settings['eth_rpc_port']
                    message += "\nEthereum RPC port not set: " + msg

            if 'main_currency' in settings:
                main_currency = settings['main_currency']
                if main_currency != 'USD':
                    self.usd_to_main_currency_rate = Inquirer(
                    ).query_fiat_pair(
                        'USD',
                        main_currency,
                    )

            res, msg = self.accountant.customize(settings)
            if not res:
                message += '\n' + msg
                return False, message

            _, msg, = self.data.set_settings(settings, self.accountant)
            if msg != '':
                message += '\n' + msg

            # Always return success here but with a message
            return True, message

    def usd_to_main_currency(self, amount):
        main_currency = self.data.main_currency()
        if main_currency != 'USD' and not hasattr(self,
                                                  'usd_to_main_currency_rate'):
            self.usd_to_main_currency_rate = Inquirer().query_fiat_pair(
                'USD', main_currency)

        return self.usd_to_main_currency_rate * amount

    def setup_exchange(
        self,
        name: str,
        api_key: ApiKey,
        api_secret: ApiSecret,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret

        By default the api keys are always validated unless validate is False.
        """
        log.info('setup_exchange', name=name)
        if name not in SUPPORTED_EXCHANGES:
            return False, 'Attempted to register unsupported exchange {}'.format(
                name)

        if getattr(self, name) is not None:
            return False, 'Exchange {} is already registered'.format(name)

        secret_data = {}
        secret_data[name] = {
            'api_key': api_key,
            'api_secret': api_secret,
        }
        self.initialize_exchanges(secret_data)

        exchange = getattr(self, name)
        result, message = exchange.validate_api_key()
        if not result:
            log.error(
                'Failed to validate API key for exchange',
                name=name,
                error=message,
            )
            self.delete_exchange_data(name)
            return False, message

        # Success, save the result in the DB
        self.data.db.add_exchange(name, api_key, api_secret)
        return True, ''

    def delete_exchange_data(self, name):
        self.connected_exchanges.remove(name)
        self.trades_historian.set_exchange(name, None)
        delattr(self, name)
        setattr(self, name, None)

    def remove_exchange(self, name):
        if getattr(self, name) is None:
            return False, 'Exchange {} is not registered'.format(name)

        self.delete_exchange_data(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result = {}

        if self.user_is_logged_in:
            result[
                'last_balance_save'] = self.data.db.get_last_balance_save_time(
                )
            result['eth_node_connection'] = self.blockchain.ethchain.connected
            result[
                'history_process_current_ts'] = self.accountant.currently_processed_timestamp
        return result

    def shutdown(self):
        self.logout()
        self.shutdown_event.set()
예제 #3
0
class Rotkehlchen():
    def __init__(self, args: argparse.Namespace) -> None:
        self.lock = Semaphore()
        self.lock.acquire()

        self.premium = None
        self.user_is_logged_in = False

        logfilename = None
        if args.logtarget == 'file':
            logfilename = args.logfile

        if args.loglevel == 'debug':
            loglevel = logging.DEBUG
        elif args.loglevel == 'info':
            loglevel = logging.INFO
        elif args.loglevel == 'warn':
            loglevel = logging.WARN
        elif args.loglevel == 'error':
            loglevel = logging.ERROR
        elif args.loglevel == 'critical':
            loglevel = logging.CRITICAL
        else:
            raise ValueError('Should never get here. Illegal log value')

        logging.basicConfig(
            filename=logfilename,
            filemode='w',
            level=loglevel,
            format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s',
            datefmt='%d/%m/%Y %H:%M:%S %Z',
        )

        if not args.logfromothermodules:
            logging.getLogger('zerorpc').setLevel(logging.CRITICAL)
            logging.getLogger('zerorpc.channel').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3.connectionpool').setLevel(
                logging.CRITICAL)

        self.sleep_secs = args.sleep_secs
        self.data_dir = args.data_dir
        self.args = args
        self.msg_aggregator = MessagesAggregator()
        self.exchange_manager = ExchangeManager(
            msg_aggregator=self.msg_aggregator)
        self.data = DataHandler(self.data_dir, self.msg_aggregator)
        # Initialize the Inquirer singleton
        Inquirer(data_dir=self.data_dir)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()

    def unlock_user(
        self,
        user: str,
        password: str,
        create_new: bool,
        sync_approval: bool,
        api_key: ApiKey,
        api_secret: ApiSecret,
    ) -> None:
        """Unlocks an existing user or creates a new one if `create_new` is True"""
        log.info(
            'Unlocking user',
            user=user,
            create_new=create_new,
            sync_approval=sync_approval,
        )
        # unlock or create the DB
        self.password = password
        self.user_directory = self.data.unlock(user, password, create_new)
        self.data_importer = DataImporter(db=self.data.db)
        self.last_data_upload_ts = self.data.db.get_last_data_upload_ts()
        self.premium_sync_manager = PremiumSyncManager(data=self.data,
                                                       password=password)
        try:
            self.premium = self.premium_sync_manager.try_premium_at_start(
                api_key=api_key,
                api_secret=api_secret,
                username=user,
                create_new=create_new,
                sync_approval=sync_approval,
            )
        except AuthenticationError:
            # It means that our credentials were not accepted by the server
            # or some other error happened
            pass

        settings = self.data.db.get_settings()
        historical_data_start = settings['historical_data_start']
        # TODO: Once settings returns a named tuple this should go away
        msg = 'setting historical_data_start should be a string'
        assert isinstance(historical_data_start, str), msg
        eth_rpc_endpoint = settings['eth_rpc_endpoint']
        msg = 'setting eth_rpc_endpoint should be a string'
        assert isinstance(eth_rpc_endpoint, str), msg
        self.trades_historian = TradesHistorian(
            user_directory=self.user_directory,
            db=self.data.db,
            eth_accounts=self.data.get_eth_accounts(),
            msg_aggregator=self.msg_aggregator,
            exchange_manager=self.exchange_manager,
        )
        # Initialize the price historian singleton
        PriceHistorian(
            data_directory=self.data_dir,
            history_date_start=historical_data_start,
            cryptocompare=Cryptocompare(data_directory=self.data_dir),
        )
        db_settings = self.data.db.get_settings()
        # TODO: Once settings returns a named tuple these should go away
        crypto2crypto = db_settings['include_crypto2crypto']
        msg = 'settings include_crypto2crypto should be a bool'
        assert isinstance(crypto2crypto, bool), msg
        taxfree_after_period = db_settings['taxfree_after_period']
        msg = 'settings taxfree_after_period should be an int'
        assert isinstance(taxfree_after_period, int), msg
        include_gas_costs = db_settings['include_gas_costs']
        msg = 'settings include_gas_costs should be a bool'
        assert isinstance(include_gas_costs, bool), msg
        self.accountant = Accountant(
            profit_currency=self.data.main_currency(),
            user_directory=self.user_directory,
            msg_aggregator=self.msg_aggregator,
            create_csv=True,
            ignored_assets=self.data.db.get_ignored_assets(),
            include_crypto2crypto=crypto2crypto,
            taxfree_after_period=taxfree_after_period,
            include_gas_costs=include_gas_costs,
        )

        # Initialize the rotkehlchen logger
        LoggingSettings(anonymized_logs=db_settings['anonymized_logs'])
        exchange_credentials = self.data.db.get_exchange_credentials()
        self.exchange_manager.initialize_exchanges(
            exchange_credentials=exchange_credentials,
            database=self.data.db,
        )

        ethchain = Ethchain(eth_rpc_endpoint)
        self.blockchain = Blockchain(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            owned_eth_tokens=self.data.db.get_owned_tokens(),
            ethchain=ethchain,
            msg_aggregator=self.msg_aggregator,
        )
        self.user_is_logged_in = True

    def logout(self) -> None:
        if not self.user_is_logged_in:
            return

        user = self.data.username
        log.info(
            'Logging out user',
            user=user,
        )
        del self.blockchain
        self.exchange_manager.delete_all_exchanges()

        # Reset rotkehlchen logger to default
        LoggingSettings(anonymized_logs=DEFAULT_ANONYMIZED_LOGS)

        del self.accountant
        del self.trades_historian
        del self.data_importer

        if self.premium is not None:
            # For some reason mypy does not see that self.premium is set
            del self.premium  # type: ignore
        self.data.logout()
        self.password = ''

        self.user_is_logged_in = False
        log.info(
            'User successfully logged out',
            user=user,
        )

    def set_premium_credentials(self, api_key: ApiKey,
                                api_secret: ApiSecret) -> None:
        """
        Raises IncorrectApiKeyFormat if the given key is not in a proper format
        Raises AuthenticationError if the given key is rejected by the Rotkehlchen server
        """
        log.info('Setting new premium credentials')

        if self.premium is not None:
            # For some reason mypy does not see that self.premium is set
            self.premium.set_credentials(api_key, api_secret)  # type: ignore
        else:
            self.premium = premium_create_and_verify(api_key, api_secret)

        self.data.set_premium_credentials(api_key, api_secret)

    def start(self) -> None:
        return gevent.spawn(self.main_loop)

    def main_loop(self) -> None:
        while self.shutdown_event.wait(MAIN_LOOP_SECS_DELAY) is not True:
            log.debug('Main loop start')
            self.premium_sync_manager.maybe_upload_data_to_server()

            log.debug('Main loop end')

    def add_blockchain_account(
        self,
        blockchain: SupportedBlockchain,
        account: BlockchainAddress,
    ) -> Dict:
        try:
            new_data = self.blockchain.add_blockchain_account(
                blockchain, account)
        except (InputError, EthSyncError) as e:
            return simple_result(False, str(e))
        self.data.add_blockchain_account(blockchain, account)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def remove_blockchain_account(
        self,
        blockchain: SupportedBlockchain,
        account: BlockchainAddress,
    ) -> Dict[str, Any]:
        try:
            new_data = self.blockchain.remove_blockchain_account(
                blockchain, account)
        except (InputError, EthSyncError) as e:
            return simple_result(False, str(e))
        self.data.remove_blockchain_account(blockchain, account)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def add_owned_eth_tokens(self, tokens: List[str]) -> Dict[str, Any]:
        ethereum_tokens = [
            EthereumToken(identifier=identifier) for identifier in tokens
        ]
        try:
            new_data = self.blockchain.track_new_tokens(ethereum_tokens)
        except (InputError, EthSyncError) as e:
            return simple_result(False, str(e))

        self.data.write_owned_eth_tokens(self.blockchain.owned_eth_tokens)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def remove_owned_eth_tokens(self, tokens: List[str]) -> Dict[str, Any]:
        ethereum_tokens = [
            EthereumToken(identifier=identifier) for identifier in tokens
        ]
        try:
            new_data = self.blockchain.remove_eth_tokens(ethereum_tokens)
        except InputError as e:
            return simple_result(False, str(e))
        self.data.write_owned_eth_tokens(self.blockchain.owned_eth_tokens)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def process_history(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
    ) -> Tuple[Dict[str, Any], str]:
        (
            error_or_empty,
            history,
            loan_history,
            asset_movements,
            eth_transactions,
        ) = self.trades_historian.get_history(
            # For entire history processing we need to have full history available
            start_ts=Timestamp(0),
            end_ts=ts_now(),
        )
        result = self.accountant.process_history(
            start_ts=start_ts,
            end_ts=end_ts,
            trade_history=history,
            loan_history=loan_history,
            asset_movements=asset_movements,
            eth_transactions=eth_transactions,
        )
        return result, error_or_empty

    def query_fiat_balances(self) -> Dict[Asset, Dict[str, FVal]]:
        result = {}
        balances = self.data.get_fiat_balances()
        for currency, str_amount in balances.items():
            amount = FVal(str_amount)
            usd_rate = Inquirer().query_fiat_pair(currency, A_USD)
            result[currency] = {
                'amount': amount,
                'usd_value': amount * usd_rate,
            }

        return result

    def query_balances(
        self,
        requested_save_data: bool = False,
        timestamp: Timestamp = None,
    ) -> Dict[str, Any]:
        """Query all balances rotkehlchen can see.

        If requested_save_data is True then the data are saved in the DB.
        If timestamp is None then the current timestamp is used.
        If a timestamp is given then that is the time that the balances are going
        to be saved in the DB

        Returns a dictionary with the queried balances.
        """
        log.info('query_balances called',
                 requested_save_data=requested_save_data)

        balances = {}
        problem_free = True
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            exchange_balances, _ = exchange.query_balances()
            # If we got an error, disregard that exchange but make sure we don't save data
            if not isinstance(exchange_balances, dict):
                problem_free = False
            else:
                balances[exchange.name] = exchange_balances

        result, error_or_empty = self.blockchain.query_balances()
        if error_or_empty == '':
            balances['blockchain'] = result['totals']
        else:
            problem_free = False

        result = self.query_fiat_balances()
        if result != {}:
            balances['banks'] = result

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value'))
                                  for k, v in balances.items()]

        # calculate net usd value
        net_usd = FVal(0)
        for _, v in combined.items():
            net_usd += FVal(v['usd_value'])

        stats: Dict[str, Any] = {
            'location': {},
            'net_usd': net_usd,
        }
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            if net_usd != FVal(0):
                percentage = (total / net_usd).to_percentage()
            else:
                percentage = '0%'
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': percentage,
            }

        for k, v in combined.items():
            if net_usd != FVal(0):
                percentage = (v['usd_value'] / net_usd).to_percentage()
            else:
                percentage = '0%'
            combined[k]['percentage_of_net_value'] = percentage

        result_dict = merge_dicts(combined, stats)

        allowed_to_save = requested_save_data or self.data.should_save_balances(
        )
        if problem_free and allowed_to_save:
            if not timestamp:
                timestamp = Timestamp(int(time.time()))
            self.data.save_balances_data(data=result_dict, timestamp=timestamp)
            log.debug('query_balances data saved')
        else:
            log.debug(
                'query_balances data not saved',
                allowed_to_save=allowed_to_save,
                problem_free=problem_free,
            )

        # After adding it to the saved file we can overlay additional data that
        # is not required to be saved in the history file
        try:
            details = self.accountant.events.details
            for asset, (tax_free_amount, average_buy_value) in details.items():
                if asset not in result_dict:
                    continue

                result_dict[asset]['tax_free_amount'] = tax_free_amount
                result_dict[asset]['average_buy_value'] = average_buy_value

                current_price = result_dict[asset]['usd_value'] / result_dict[
                    asset]['amount']
                if average_buy_value != FVal(0):
                    result_dict[asset]['percent_change'] = (
                        ((current_price - average_buy_value) /
                         average_buy_value) * 100)
                else:
                    result_dict[asset]['percent_change'] = 'INF'

        except AttributeError:
            pass

        return result_dict

    def set_main_currency(self, currency_string: str) -> Tuple[bool, str]:
        """Takes a currency string from the API and sets it as the main currency for rotki

        Returns True and empty string for success and False and error string for error
        """
        try:
            currency = Asset(currency_string)
        except UnknownAsset:
            msg = f'An unknown asset {currency_string} was given for main currency'
            log.critical(msg)
            return False, msg

        if not currency.is_fiat():
            msg = f'A non-fiat asset {currency_string} was given for main currency'
            log.critical(msg)
            return False, msg

        fiat_currency = FiatAsset(currency.identifier)
        with self.lock:
            self.data.set_main_currency(fiat_currency, self.accountant)
            if currency != A_USD:
                self.usd_to_main_currency_rate = Inquirer().query_fiat_pair(
                    A_USD, currency)

        return True, ''

    def set_settings(self, settings: Dict[str, Any]) -> Tuple[bool, str]:
        log.info('Add new settings')

        message = ''
        with self.lock:
            if 'eth_rpc_endpoint' in settings:
                result, msg = self.blockchain.set_eth_rpc_endpoint(
                    settings['eth_rpc_endpoint'])
                if not result:
                    # Don't save it in the DB
                    del settings['eth_rpc_endpoint']
                    message += "\nEthereum RPC endpoint not set: " + msg

            if 'main_currency' in settings:
                given_symbol = settings['main_currency']
                try:
                    main_currency = Asset(given_symbol)
                except UnknownAsset:
                    return False, f'Unknown fiat currency {given_symbol} provided'
                except DeserializationError:
                    return False, 'Non string type given for fiat currency'

                if not main_currency.is_fiat():
                    msg = (
                        f'Provided symbol for main currency {given_symbol} is '
                        f'not a fiat currency')
                    return False, msg

                if main_currency != A_USD:
                    self.usd_to_main_currency_rate = Inquirer(
                    ).query_fiat_pair(
                        A_USD,
                        main_currency,
                    )

            res, msg = self.accountant.customize(settings)
            if not res:
                message += '\n' + msg
                return False, message

            _, msg, = self.data.set_settings(settings, self.accountant)
            if msg != '':
                message += '\n' + msg

            # Always return success here but with a message
            return True, message

    def setup_exchange(
        self,
        name: str,
        api_key: str,
        api_secret: str,
    ) -> Tuple[bool, str]:
        """
        Setup a new exchange with an api key and an api secret

        By default the api keys are always validated unless validate is False.
        """
        is_success, msg = self.exchange_manager.setup_exchange(
            name=name,
            api_key=api_key,
            api_secret=api_secret,
            database=self.data.db,
        )

        if is_success:
            # Success, save the result in the DB
            self.data.db.add_exchange(name, api_key, api_secret)
        return is_success, msg

    def remove_exchange(self, name: str) -> Tuple[bool, str]:
        if not self.exchange_manager.has_exchange(name):
            return False, 'Exchange {} is not registered'.format(name)

        self.exchange_manager.delete_exchange(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        return True, ''

    def query_periodic_data(self) -> Dict[str, Union[bool, Timestamp]]:
        """Query for frequently changing data"""
        result: Dict[str, Union[bool, Timestamp]] = {}

        if self.user_is_logged_in:
            result[
                'last_balance_save'] = self.data.db.get_last_balance_save_time(
                )
            result['eth_node_connection'] = self.blockchain.ethchain.connected
            result[
                'history_process_start_ts'] = self.accountant.started_processing_timestamp
            result[
                'history_process_current_ts'] = self.accountant.currently_processing_timestamp
        return result

    def shutdown(self) -> None:
        self.logout()
        self.shutdown_event.set()
예제 #4
0
class Rotkehlchen(object):
    def __init__(self, args):
        self.lock = Semaphore()
        self.lock.acquire()
        self.results_cache: typing.ResultsCacheMap = dict()
        self.connected_exchanges = []

        logfilename = None
        if args.logtarget == 'file':
            logfilename = args.logfile

        if args.loglevel == 'debug':
            loglevel = logging.DEBUG
        elif args.loglevel == 'info':
            loglevel = logging.INFO
        elif args.loglevel == 'warn':
            loglevel = logging.WARN
        elif args.loglevel == 'error':
            loglevel = logging.ERROR
        elif args.loglevel == 'critical':
            loglevel = logging.CRITICAL
        else:
            raise ValueError('Should never get here. Illegal log value')

        logging.basicConfig(
            filename=logfilename,
            filemode='w',
            level=loglevel,
            format='%(asctime)s -- %(levelname)s:%(name)s:%(message)s',
            datefmt='%d/%m/%Y %H:%M:%S %Z',
        )

        if not args.logfromothermodules:
            logging.getLogger('zerorpc').setLevel(logging.CRITICAL)
            logging.getLogger('zerorpc.channel').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3').setLevel(logging.CRITICAL)
            logging.getLogger('urllib3.connectionpool').setLevel(
                logging.CRITICAL)

        self.sleep_secs = args.sleep_secs
        self.data_dir = args.data_dir
        self.args = args
        self.last_data_upload_ts = 0

        self.poloniex = None
        self.kraken = None
        self.bittrex = None
        self.binance = None

        self.data = DataHandler(self.data_dir)

        self.lock.release()
        self.shutdown_event = gevent.event.Event()

    def initialize_exchanges(self, secret_data):
        # initialize exchanges for which we have keys and are not already initialized
        if self.kraken is None and 'kraken' in secret_data:
            self.kraken = Kraken(
                str.encode(secret_data['kraken']['api_key']),
                str.encode(secret_data['kraken']['api_secret']), self.data_dir)
            self.connected_exchanges.append('kraken')
            self.trades_historian.set_exchange('kraken', self.kraken)

        if self.poloniex is None and 'poloniex' in secret_data:
            self.poloniex = Poloniex(
                str.encode(secret_data['poloniex']['api_key']),
                str.encode(secret_data['poloniex']['api_secret']),
                self.inquirer, self.data_dir)
            self.connected_exchanges.append('poloniex')
            self.trades_historian.set_exchange('poloniex', self.poloniex)

        if self.bittrex is None and 'bittrex' in secret_data:
            self.bittrex = Bittrex(
                str.encode(secret_data['bittrex']['api_key']),
                str.encode(secret_data['bittrex']['api_secret']),
                self.inquirer, self.data_dir)
            self.connected_exchanges.append('bittrex')
            self.trades_historian.set_exchange('bittrex', self.bittrex)

        if self.binance is None and 'binance' in secret_data:
            self.binance = Binance(
                str.encode(secret_data['binance']['api_key']),
                str.encode(secret_data['binance']['api_secret']),
                self.inquirer, self.data_dir)
            self.connected_exchanges.append('binance')
            self.trades_historian.set_exchange('binance', self.binance)

    def try_premium_at_start(self, api_key, api_secret, create_new,
                             sync_approval, user_dir):
        """Check if new user provided api pair or we already got one in the DB"""

        if api_key != '':
            self.premium, valid, empty_or_error = premium_create_and_verify(
                api_key, api_secret)
            if not valid:
                # At this point we are at a new user trying to create an account with
                # premium API keys and we failed. But a directory was created. Remove it.
                shutil.rmtree(user_dir)
                raise AuthenticationError(
                    'Could not verify keys for the new account. '
                    '{}'.format(empty_or_error))
        else:
            # If we got premium initialize it and try to sync with the server
            premium_credentials = self.data.db.get_rotkehlchen_premium()
            if premium_credentials:
                api_key = premium_credentials[0]
                api_secret = premium_credentials[1]
                self.premium, valid, empty_or_error = premium_create_and_verify(
                    api_key, api_secret)
                if not valid:
                    logger.error(
                        'The API keys found in the Database are not valid. Perhaps '
                        'they expired?')
                del self.premium
                return
            else:
                # no premium credentials in the DB
                return

        if self.can_sync_data_from_server():
            if sync_approval == 'unknown' and not create_new:
                raise PermissionError(
                    'Rotkehlchen Server has newer version of your DB data. '
                    'Should we replace local data with the server\'s?')
            elif sync_approval == 'yes' or sync_approval == 'unknown' and create_new:
                logger.debug('User approved data sync from server')
                if self.sync_data_from_server():
                    if create_new:
                        # if we successfully synced data from the server and this is
                        # a new account, make sure the api keys are properly stored
                        # in the DB
                        self.data.db.set_rotkehlchen_premium(
                            api_key, api_secret)
            else:
                logger.debug('Could sync data from server but user refused')

    def unlock_user(self, user, password, create_new, sync_approval, api_key,
                    api_secret):
        # unlock or create the DB
        self.password = password
        user_dir = self.data.unlock(user, password, create_new)
        self.try_premium_at_start(api_key, api_secret, create_new,
                                  sync_approval, user_dir)

        secret_data = self.data.db.get_exchange_secrets()
        settings = self.data.db.get_settings()
        historical_data_start = settings['historical_data_start']
        eth_rpc_port = settings['eth_rpc_port']
        self.trades_historian = TradesHistorian(
            self.data_dir,
            self.data.db,
            self.data.get_eth_accounts(),
            historical_data_start,
        )
        self.price_historian = PriceHistorian(
            self.data_dir,
            historical_data_start,
        )
        db_settings = self.data.db.get_settings()
        self.accountant = Accountant(
            price_historian=self.price_historian,
            profit_currency=self.data.main_currency(),
            user_directory=user_dir,
            create_csv=True,
            ignored_assets=self.data.db.get_ignored_assets(),
            include_crypto2crypto=db_settings['include_crypto2crypto'],
            taxfree_after_period=db_settings['taxfree_after_period'],
        )

        self.inquirer = Inquirer(kraken=self.kraken)
        self.initialize_exchanges(secret_data)

        ethchain = Ethchain(eth_rpc_port)
        self.blockchain = Blockchain(
            blockchain_accounts=self.data.db.get_blockchain_accounts(),
            all_eth_tokens=self.data.eth_tokens,
            owned_eth_tokens=self.data.db.get_owned_tokens(),
            inquirer=self.inquirer,
            ethchain=ethchain,
        )

    def set_premium_credentials(self, api_key, api_secret):
        if hasattr(self, 'premium'):
            valid, empty_or_error = self.premium.set_credentials(
                api_key, api_secret)
        else:
            self.premium, valid, empty_or_error = premium_create_and_verify(
                api_key, api_secret)

        if valid:
            self.data.set_premium_credentials(api_key, api_secret)
            return True, ''
        return False, empty_or_error

    def maybe_upload_data_to_server(self):
        logger.debug('Maybe upload to server')
        # upload only if unlocked user has premium
        if not hasattr(self, 'premium'):
            return

        # upload only once per hour
        diff = ts_now() - self.last_data_upload_ts
        if diff > 3600:
            self.upload_data_to_server()

    def upload_data_to_server(self):
        logger.debug('upload to server -- start')
        data, our_hash = self.data.compress_and_encrypt_db(self.password)
        success, result_or_error = self.premium.query_last_data_metadata()
        if not success:
            logger.debug(
                'upload to server -- query last metadata error: {}'.format(
                    result_or_error))
            return

        logger.debug("CAN_PUSH--> OURS: {} THEIRS: {}".format(
            our_hash, result_or_error['data_hash']))
        if our_hash == result_or_error['data_hash']:
            logger.debug('upload to server -- same hash')
            # same hash -- no need to upload anything
            return

        our_last_write_ts = self.data.db.get_last_write_ts()
        if our_last_write_ts <= result_or_error['last_modify_ts']:
            # Server's DB was modified after our local DB
            logger.debug("CAN_PUSH -> 3")
            logger.debug(
                'upload to server -- remote db more recent than local')
            return

        success, result_or_error = self.premium.upload_data(
            data, our_hash, our_last_write_ts, 'zlib')
        if not success:
            logger.debug(
                'upload to server -- upload error: {}'.format(result_or_error))
            return

        self.last_data_upload_ts = ts_now()
        logger.debug('upload to server -- success')

    def can_sync_data_from_server(self):
        logger.debug('sync data from server -- start')
        data, our_hash = self.data.compress_and_encrypt_db(self.password)
        success, result_or_error = self.premium.query_last_data_metadata()
        if not success:
            logger.debug(
                'sync data from server-- error: {}'.format(result_or_error))
            return False

        logger.debug("CAN_PULL--> OURS: {} THEIRS: {}".format(
            our_hash, result_or_error['data_hash']))
        if our_hash == result_or_error['data_hash']:
            logger.debug('sync from server -- same hash')
            # same hash -- no need to get anything
            return False

        our_last_write_ts = self.data.db.get_last_write_ts()
        if our_last_write_ts >= result_or_error['last_modify_ts']:
            # Local DB is newer than Server DB
            logger.debug(
                'sync from server -- local DB more recent than remote')
            return False

        return True

    def sync_data_from_server(self):
        success, error_or_result = self.premium.pull_data()
        if not success:
            logger.debug(
                'sync from server -- pulling error {}'.format(error_or_result))
            return False

        self.data.decompress_and_decrypt_db(self.password,
                                            error_or_result['data'])
        return True

    def start(self):
        return gevent.spawn(self.main_loop)

    def main_loop(self):
        while True and not self.shutdown_event.is_set():
            logger.debug('Main loop start')
            if self.poloniex is not None:
                self.poloniex.main_logic()
            if self.kraken is not None:
                self.kraken.main_logic()

            self.maybe_upload_data_to_server()

            logger.debug('Main loop end')
            gevent.sleep(MAIN_LOOP_SECS_DELAY)

    def add_blockchain_account(self, blockchain, account):
        try:
            new_data = self.blockchain.add_blockchain_account(
                blockchain, account)
        except InputError as e:
            return simple_result(False, str(e))
        self.data.add_blockchain_account(blockchain, account)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def remove_blockchain_account(self, blockchain, account):
        try:
            new_data = self.blockchain.remove_blockchain_account(
                blockchain, account)
        except InputError as e:
            return simple_result(False, str(e))
        self.data.remove_blockchain_account(blockchain, account)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def add_owned_eth_tokens(self, tokens):
        try:
            new_data = self.blockchain.track_new_tokens(tokens)
        except InputError as e:
            return simple_result(False, str(e))

        self.data.write_owned_eth_tokens(self.blockchain.owned_eth_tokens)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def remove_owned_eth_tokens(self, tokens):
        try:
            new_data = self.blockchain.remove_eth_tokens(tokens)
        except InputError as e:
            return simple_result(False, str(e))
        self.data.write_owned_eth_tokens(self.blockchain.owned_eth_tokens)
        return accounts_result(new_data['per_account'], new_data['totals'])

    def process_history(self, start_ts, end_ts):
        (
            error_or_empty, history, margin_history, loan_history,
            asset_movements, eth_transactions
        ) = self.trades_historian.get_history(
            start_ts=
            0,  # For entire history processing we need to have full history available
            end_ts=ts_now(),
            end_at_least_ts=end_ts)
        result = self.accountant.process_history(start_ts, end_ts, history,
                                                 margin_history, loan_history,
                                                 asset_movements,
                                                 eth_transactions)
        return result, error_or_empty

    def query_fiat_balances(self):
        result = {}
        balances = self.data.get_fiat_balances()
        for currency, amount in balances.items():
            amount = FVal(amount)
            usd_rate = query_fiat_pair(currency, 'USD')
            result[currency] = {
                'amount': amount,
                'usd_value': amount * usd_rate
            }

        return result

    def query_balances(self, requested_save_data=False):
        balances = {}
        problem_free = True
        for exchange in self.connected_exchanges:
            exchange_balances, msg = getattr(self, exchange).query_balances()
            # If we got an error, disregard that exchange but make sure we don't save data
            if not exchange_balances:
                problem_free = False
            else:
                balances[exchange] = exchange_balances

        result = self.blockchain.query_balances()['totals']
        if result != {}:
            balances['blockchain'] = result

        result = self.query_fiat_balances()
        if result != {}:
            balances['banks'] = result

        combined = combine_stat_dicts([v for k, v in balances.items()])
        total_usd_per_location = [(k, dict_get_sumof(v, 'usd_value'))
                                  for k, v in balances.items()]

        # calculate net usd value
        net_usd = FVal(0)
        for k, v in combined.items():
            net_usd += FVal(v['usd_value'])

        stats = {'location': {}, 'net_usd': net_usd}
        for entry in total_usd_per_location:
            name = entry[0]
            total = entry[1]
            stats['location'][name] = {
                'usd_value': total,
                'percentage_of_net_value': (total / net_usd).to_percentage(),
            }

        for k, v in combined.items():
            combined[k]['percentage_of_net_value'] = (v['usd_value'] /
                                                      net_usd).to_percentage()

        result_dict = merge_dicts(combined, stats)

        allowed_to_save = requested_save_data or self.data.should_save_balances(
        )
        if problem_free and allowed_to_save:
            self.data.save_balances_data(result_dict)

        # After adding it to the saved file we can overlay additional data that
        # is not required to be saved in the history file
        try:
            details = self.data.accountant.details
            for asset, (tax_free_amount, average_buy_value) in details.items():
                if asset not in result_dict:
                    continue

                result_dict[asset]['tax_free_amount'] = tax_free_amount
                result_dict[asset]['average_buy_value'] = average_buy_value

                current_price = result_dict[asset]['usd_value'] / result_dict[
                    asset]['amount']
                if average_buy_value != FVal(0):
                    result_dict[asset]['percent_change'] = (
                        ((current_price - average_buy_value) /
                         average_buy_value) * 100)
                else:
                    result_dict[asset]['percent_change'] = 'INF'

        except AttributeError:
            pass

        return result_dict

    def set_main_currency(self, currency):
        with self.lock:
            self.data.set_main_currency(currency, self.accountant)
            if currency != 'USD':
                self.usd_to_main_currency_rate = query_fiat_pair(
                    'USD', currency)

    def set_settings(self, settings):
        message = ''
        with self.lock:
            if 'eth_rpc_port' in settings:
                result, msg = self.blockchain.set_eth_rpc_port(
                    settings['eth_rpc_port'])
                if not result:
                    # Don't save it in the DB
                    del settings['eth_rpc_port']
                    message += "\nEthereum RPC port not set: " + msg

            if 'main_currency' in settings:
                main_currency = settings['main_currency']
                if main_currency != 'USD':
                    self.usd_to_main_currency_rate = query_fiat_pair(
                        'USD', main_currency)

            res, msg = self.accountant.customize(settings)
            if not res:
                message += '\n' + msg
                return False, message

            _, msg, = self.data.set_settings(settings, self.accountant)
            if msg != '':
                message += '\n' + msg

            # Always return success here but with a message
            return True, message

    def usd_to_main_currency(self, amount):
        main_currency = self.data.main_currency()
        if main_currency != 'USD' and not hasattr(self,
                                                  'usd_to_main_currency_rate'):
            self.usd_to_main_currency_rate = query_fiat_pair(
                'USD', main_currency)

        return self.usd_to_main_currency_rate * amount

    def setup_exchange(self, name, api_key, api_secret):
        if name not in SUPPORTED_EXCHANGES:
            return False, 'Attempted to register unsupported exchange {}'.format(
                name)

        if getattr(self, name) is not None:
            return False, 'Exchange {} is already registered'.format(name)

        secret_data = {}
        secret_data[name] = {
            'api_key': api_key,
            'api_secret': api_secret,
        }
        self.initialize_exchanges(secret_data)

        exchange = getattr(self, name)
        result, message = exchange.validate_api_key()
        if not result:
            self.delete_exchange_data(name)
            return False, message

        # Success, save the result in the DB
        self.data.db.add_exchange(name, api_key, api_secret)
        return True, ''

    def delete_exchange_data(self, name):
        self.connected_exchanges.remove(name)
        self.trades_historian.set_exchange(name, None)
        delattr(self, name)
        setattr(self, name, None)

    def remove_exchange(self, name):
        if getattr(self, name) is None:
            return False, 'Exchange {} is not registered'.format(name)

        self.delete_exchange_data(name)
        # Success, remove it also from the DB
        self.data.db.remove_exchange(name)
        return True, ''

    def shutdown(self):
        print("Shutting Down...")
        self.shutdown_event.set()

    def set(self, *args):
        if len(args) < 2:
            return ("ERROR: set requires at least two arguments but "
                    "got: {}".format(args))

        if args[0] == 'poloniex':
            resp = self.poloniex.set(*args[1:])
        else:
            return "ERROR: Unrecognized first argument: {}".format(args[0])

        self.save_data()
        return resp
예제 #5
0
def test_writting_fetching_data(data_dir, username):
    data = DataHandler(data_dir)
    data.unlock(username, '123', create_new=True)

    tokens = ['GNO', 'RDN']
    data.write_owned_eth_tokens(tokens)
    result = data.db.get_owned_tokens()
    assert set(tokens) == set(result)

    data.add_blockchain_account('BTC', '1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS')
    data.add_blockchain_account('ETH',
                                '0xd36029d76af6fE4A356528e4Dc66B2C18123597D')
    data.add_blockchain_account('ETH',
                                '0x80b369799104a47e98a553f3329812a44a7facdc')
    accounts = data.db.get_blockchain_accounts()
    assert len(accounts) == 2
    assert accounts['BTC'] == ['1CB7Pbji3tquDtMRp8mBkerimkFzWRkovS']
    assert set(accounts['ETH']) == set([
        '0xd36029d76af6fE4A356528e4Dc66B2C18123597D',
        to_checksum_address('0x80b369799104a47e98a553f3329812a44a7facdc')
    ])
    # Add existing account should fail
    with pytest.raises(sqlcipher.IntegrityError):
        data.add_blockchain_account(
            'ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D')
    # Remove non-existing account
    with pytest.raises(InputError):
        data.remove_blockchain_account(
            'ETH', '0x136029d76af6fE4A356528e4Dc66B2C18123597D')
    # Remove existing account
    data.remove_blockchain_account(
        'ETH', '0xd36029d76af6fE4A356528e4Dc66B2C18123597D')
    accounts = data.db.get_blockchain_accounts()
    assert accounts['ETH'] == [
        to_checksum_address('0x80b369799104a47e98a553f3329812a44a7facdc')
    ]

    result, _ = data.add_ignored_asset('DAO')
    assert result
    result, _ = data.add_ignored_asset('DOGE')
    assert result
    result, _ = data.add_ignored_asset('DOGE')
    assert not result
    assert set(data.db.get_ignored_assets()) == set(['DAO', 'DOGE'])
    result, _ = data.remove_ignored_asset('XXX')
    assert not result
    result, _ = data.remove_ignored_asset('DOGE')
    assert result
    assert data.db.get_ignored_assets() == ['DAO']

    # With nothing inserted in settings make sure default values are returned
    result = data.db.get_settings()
    last_write_diff = ts_now() - result['last_write_ts']
    # make sure last_write was within 3 secs
    assert last_write_diff >= 0 and last_write_diff < 3
    del result['last_write_ts']
    assert result == {
        'historical_data_start': DEFAULT_START_DATE,
        'eth_rpc_port': '8545',
        'ui_floating_precision': DEFAULT_UI_FLOATING_PRECISION,
        'db_version': ROTKEHLCHEN_DB_VERSION,
        'include_crypto2crypto': True,
        'taxfree_after_period': YEAR_IN_SECONDS,
        'balance_save_frequency': DEFAULT_BALANCE_SAVE_FREQUENCY,
        'last_balance_save': 0,
    }

    # Check setting non-existing settings. Should be ignored
    _, msg = data.set_settings({'nonexisting_setting': 1}, accountant=None)
    assert msg != '' and 'nonexisting_setting' in msg
    _, msg = data.set_settings(
        {
            'nonexisting_setting': 1,
            'eth_rpc_port': '8555',
            'ui_floating_precision': 3,
        },
        accountant=None)
    assert msg != '' and 'nonexisting_setting' in msg

    # Now check nothing funny made it in the db
    result = data.db.get_settings()
    assert result['eth_rpc_port'] == '8555'
    assert result['ui_floating_precision'] == 3
    assert 'nonexisting_setting' not in result