def _api_query( # noqa: F811 self, endpoint: str, options: Optional[Dict[str, Any]], ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: querystr = self.base_url + endpoint if options is not None: querystr += '?' + urlencode(options) logger.debug(f'Querying loopring {querystr}') try: response = self.session.get(querystr) except requests.exceptions.RequestException as e: raise RemoteError( f'Loopring api query {querystr} failed due to {str(e)}') from e if response.status_code == HTTPStatus.BAD_REQUEST: try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError as e: raise RemoteError( f'Loopring API {response.url} returned invalid ' f'JSON response: {response.text}', ) from e if isinstance(json_ret, dict): result_info = json_ret.get('resultInfo', None) if result_info: code = result_info.get('code', None) if code and code == 104002: raise LoopringAPIKeyMismatch() # else just let it hit the generic remote error below if response.status_code != HTTPStatus.OK: raise RemoteError( f'Loopring API request {response.url} failed ' f'with HTTP status code {response.status_code} and text ' f'{response.text}', ) try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError as e: raise RemoteError( f'Loopring API {response.url} returned invalid ' f'JSON response: {response.text}', ) from e if isinstance(json_ret, dict) and 'code' in json_ret: code = json_ret['code'] msg = json_ret.get('msg', 'no message') raise RemoteError( f'Loopring API {response.url} returned an error ' f'with code: {code} and message: {msg}', ) return json_ret
def _query( self, module: str, subpath: Optional[str] = None, options: Optional[Dict[str, Any]] = None, ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: """Performs a coingecko query May raise: - RemoteError if there is a problem querying coingecko """ if options is None: options = {} url = f'https://api.coingecko.com/api/v3/{module}/' if subpath: url += subpath logger.debug(f'Querying coingecko: {url}?{urlencode(options)}') tries = COINGECKO_QUERY_RETRY_TIMES while tries >= 0: try: response = self.session.get(f'{url}?{urlencode(options)}') except requests.exceptions.RequestException as e: raise RemoteError( f'Coingecko API request failed due to {str(e)}') from e if response.status_code == 429: # Coingecko allows only 100 calls per minute. If you get 429 it means you # exceeded this and are throttled until the next minute window # backoff and retry 4 times = 2.5 + 3.33 + 5 + 10 = at most 20.8 secs if tries >= 1: backoff_seconds = 10 / tries log.debug( f'Got rate limited by coingecko. ' f'Backing off for {backoff_seconds}', ) gevent.sleep(backoff_seconds) tries -= 1 continue # else log.debug( f'Got rate limited by coingecko and did not manage to get a ' f'request through even after {COINGECKO_QUERY_RETRY_TIMES} ' f'incremental backoff retries', ) break if response.status_code != 200: msg = ( f'Coingecko API request {response.url} failed with HTTP status ' f'code: {response.status_code}') raise RemoteError(msg) try: decoded_json = rlk_jsonloads(response.text) except json.decoder.JSONDecodeError as e: msg = f'Invalid JSON in Coingecko response. {e}' raise RemoteError(msg) from e return decoded_json
def request_get( url: str, timeout: int = ALL_REMOTES_TIMEOUT, handle_429: bool = False, backoff_in_seconds: Union[int, float] = 0, ) -> Union[Dict, List]: # TODO make this a bit more smart. Perhaps conditional on the type of request. # Not all requests would need repeated attempts response = retry_calls( times=QUERY_RETRY_TIMES, location='', handle_429=handle_429, backoff_in_seconds=backoff_in_seconds, method_name=url, function=requests.get, # function's arguments url=url, timeout=timeout, ) if response.status_code != 200: if 'https://blockchain.info/q/addressbalance' in url and response.status_code == 500: # For some weird reason blockchain.info returns # 500 server error when giving invalid account raise InvalidBTCAddress('Invalid BTC address given to blockchain.info') try: result = rlk_jsonloads(response.text) except json.decoder.JSONDecodeError: raise UnableToDecryptRemoteData(f'{url} returned malformed json') return result
def request_get( url: str, timeout: int = ALL_REMOTES_TIMEOUT, handle_429: bool = False, backoff_in_seconds: Union[int, float] = 0, ) -> Union[Dict, List]: # TODO make this a bit more smart. Perhaps conditional on the type of request. # Not all requests would need repeated attempts response = retry_calls( times=5, location='', handle_429=handle_429, backoff_in_seconds=backoff_in_seconds, method_name=url, function=requests.get, # function's arguments url=url, timeout=timeout, ) if response.status_code != 200: raise RemoteError('Get {} returned status code {}'.format(url, response.status_code)) try: result = rlk_jsonloads(response.text) except json.decoder.JSONDecodeError: raise ValueError('{} returned malformed json'.format(url)) return result
def do_read_manual_margin_positions( user_directory: FilePath) -> List[MarginPosition]: manual_margin_path = os.path.join(user_directory, MANUAL_MARGINS_LOGFILE) if os.path.isfile(manual_margin_path): with open(manual_margin_path, 'r') as f: margin_data = rlk_jsonloads(f.read()) else: margin_data = [] logger.info( 'Could not find manual margins log file at {}'.format( manual_margin_path), ) # Now turn the manual margin data to our MarginPosition format # The poloniex manual data format is: # { "open_time": unix_timestamp, "close_time": unix_timestamp, # "btc_profit_loss": floating_point_number for profit or loss, # "notes": "optional string with notes on the margin position" # } margin_positions = list() for position in margin_data: margin_positions.append( MarginPosition( exchange='poloniex', open_time=position['open_time'], close_time=position['close_time'], profit_loss=FVal(position['btc_profit_loss']), pl_currency=A_BTC, notes=position['notes'], ), ) return margin_positions
def _query( self, module: str, subpath: Optional[str] = None, options: Optional[Dict[str, Any]] = None, ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: """Performs a coingecko query May raise: - RemoteError if there is a problem querying coingecko """ if options is None: options = {} url = f'https://api.coingecko.com/api/v3/{module}/' if subpath: url += subpath try: response = self.session.get(f'{url}?{urlencode(options)}') except requests.exceptions.RequestException as e: raise RemoteError(f'Coingecko API request failed due to {str(e)}') if response.status_code != 200: raise RemoteError( f'Coingecko API request {response.url} failed with HTTP status ' f'code: {response.status_code}', ) try: decoded_json = rlk_jsonloads(response.text) except json.decoder.JSONDecodeError as e: raise RemoteError(f'Invalid JSON in Kraken response. {e}') return decoded_json
def request_get( url: str, timeout: int = ALL_REMOTES_TIMEOUT, handle_429: bool = False, backoff_in_seconds: Union[int, float] = 0, ) -> Union[Dict, List]: """ May raise: - UnableToDecryptRemoteData from request_get - Remote error if the get request fails """ # TODO make this a bit more smart. Perhaps conditional on the type of request. # Not all requests would need repeated attempts response = retry_calls( times=QUERY_RETRY_TIMES, location='', handle_429=handle_429, backoff_in_seconds=backoff_in_seconds, method_name=url, function=requests.get, # function's arguments url=url, timeout=timeout, ) try: result = rlk_jsonloads(response.text) except json.decoder.JSONDecodeError: raise UnableToDecryptRemoteData(f'{url} returned malformed json') return result
def query_private(self, method: str, req: Optional[dict] = None) -> dict: self.first_connection() if method == 'Balance': if self.random_balance_data: return generate_random_kraken_balance_response() # else return self.balance_data_return elif method == 'TradesHistory': if self.random_trade_data: return generate_random_kraken_trades_data( start=req['start'], end=req['end'], tradeable_pairs=list(self.tradeable_pairs.keys()), ) # else return rlk_jsonloads(KRAKEN_SPECIFIC_TRADES_HISTORY_RESPONSE) elif method == 'Ledgers': if self.random_ledgers_data: return generate_random_kraken_ledger_data( start=req['start'], end=req['end'], ledger_type=req['type'], ) # else return self.ledger_data_generate_cb( start=req['start'], end=req['end'], ledger_type=req['type'], ) return super().query_private(method, req)
def test_decoding(): strdata = ( '{"a": 3.14, "b":5, "c": "foo", "d": "5.42323143", "e": { "u1": "3.221"}, ' '"f": [2.1, "boo", 3, "4.2324"]}') data = rlk_jsonloads(strdata) assert isinstance(data['a'], FVal) assert data['a'] == FVal('3.14') assert isinstance(data['b'], int) assert data['b'] == 5 assert isinstance(data['c'], (str, bytes)) assert data['c'] == 'foo' assert isinstance(data['d'], FVal) assert data['d'] == FVal('5.42323143') assert isinstance(data['e']['u1'], FVal) assert data['e']['u1'] == FVal('3.221') assert isinstance(data['f'][0], FVal) assert data['f'][0] == FVal('2.1') assert isinstance(data['f'][1], (str, bytes)) assert data['f'][1] == "boo" assert isinstance(data['f'][2], int) assert data['f'][2] == 3 assert isinstance(data['f'][3], FVal) assert data['f'][3] == FVal('4.2324')
def test_rlk_jsonloads(): data = '{"a": "5.4", "b": "foo", "c": 32.1, "d": 5, "e": [1, "a", "5.1"]}' result = rlk_jsonloads(data) assert result == { 'a': FVal('5.4'), 'b': 'foo', 'c': FVal('32.1'), 'd': 5, 'e': [1, 'a', FVal('5.1')], }
def attempt_connect(self, ethrpc_port: int, mainnet_check=True) -> Tuple[bool, str]: if self.rpc_port == ethrpc_port and self.connected: # We are already connected return True, 'Already connected to an ethereum node' if self.web3: del self.web3 try: self.web3 = Web3(HTTPProvider('http://localhost:{}'.format(ethrpc_port))) except requests.exceptions.ConnectionError: log.warning('Could not connect to a local ethereum node. Will use etherscan only') self.connected = False return False, 'Failed to connect to ethereum node at port {}'.format(ethrpc_port) if self.web3.isConnected(): dir_path = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(dir_path, 'data', 'token_abi.json'), 'r') as f: self.token_abi = rlk_jsonloads(f.read()) # Also make sure we are actually connected to the Ethereum mainnet if mainnet_check: genesis_hash = self.web3.eth.getBlock(0)['hash'].hex() # pylint: disable=no-member target = '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' if genesis_hash != target: log.warning( 'Connected to a local ethereum node but it is not on the ethereum mainnet', ) self.connected = False message = ( 'Connected to ethereum node at port {} but it is not on ' 'the ethereum mainnet'.format(ethrpc_port) ) return False, message if self.web3.eth.syncing: # pylint: disable=no-member current_block = self.web3.eth.syncing.currentBlock # pylint: disable=no-member latest_block = self.web3.eth.syncing.highestBlock # pylint: disable=no-member return self.is_synchronized(current_block, latest_block) else: current_block = self.web3.eth.blockNumber # pylint: disable=no-member latest_block = self.query_eth_highest_block() if latest_block is None: return False, 'Could not query latest block from blockcypher.' return self.is_synchronized(current_block, latest_block) self.connected = True return True, '' else: log.warning('Could not connect to a local ethereum node. Will use etherscan only') self.connected = False message = 'Failed to connect to ethereum node at port {}'.format(ethrpc_port) # If we get here we did not connnect return False, message
def test_rlk_jsonloads(): data = '{"a": "5.4", "b": "foo", "c": 32.1, "d": 5, "e": [1, "a", "5.1"], "f": "37451082560000003241"}' # noqa: E501 result = rlk_jsonloads(data) assert result == { 'a': FVal('5.4'), 'b': 'foo', 'c': FVal('32.1'), 'd': 5, 'e': [1, 'a', FVal('5.1')], 'f': 37451082560000003241, }
def query_private(self, method: str, req: Optional[dict] = None) -> dict: # Pretty ugly ... mock a kraken remote eror if self.remote_errors: raise RemoteError('Kraken remote error') if method == 'Balance': if self.random_balance_data: return generate_random_kraken_balance_response() # else return self.balance_data_return elif method == 'TradesHistory': if self.random_trade_data: return generate_random_kraken_trades_data( start=req['start'], end=req['end'], tradeable_pairs=list(self.tradeable_pairs.keys()), ) # else return rlk_jsonloads(KRAKEN_SPECIFIC_TRADES_HISTORY_RESPONSE) elif method == 'Ledgers': ledger_type = req['type'] if self.random_ledgers_data: return generate_random_kraken_ledger_data( start=req['start'], end=req['end'], ledger_type=ledger_type, ) # else use specific data if ledger_type == 'deposit': response = KRAKEN_SPECIFIC_DEPOSITS_RESPONSE elif ledger_type == 'withdrawal': response = KRAKEN_SPECIFIC_WITHDRAWALS_RESPONSE else: raise AssertionError( 'Unknown ledger type at kraken ledgers mock query') return rlk_jsonloads(response) return super().query_private(method, req)
def query_private(self, method: str, req: Optional[dict] = None) -> dict: self.first_connection() if method == 'Balance': return generate_random_kraken_balance_response() elif method == 'TradesHistory': trades_num = random.randint(1, 49) start = req['start'] end = req['end'] # Trades is a dict with txid as the key trades = {} for _ in range(trades_num): trade = generate_random_kraken_trade_data( list(self.tradeable_pairs.keys()), start, end, ) trades[trade['ordertxid']] = trade response_str = json.dumps({'trades': trades, 'count': trades_num}) return rlk_jsonloads(response_str) elif method == 'Ledgers': ledgers_num = random.randint(1, 49) start = req['start'] end = req['end'] ledger_type = req['type'] # Ledgers is a dict with txid as the key ledgers = {} for _ in range(ledgers_num): ledger = generate_random_kraken_ledger_data( start_ts=start, end_ts=end, ledger_type=ledger_type, ) ledgers[ledger['refid']] = ledger response_str = json.dumps({'ledger': ledgers, 'count': ledgers_num}) return rlk_jsonloads(response_str) return super().query_private(method, req)
def generate_random_kraken_ledger_data(start: Timestamp, end: Timestamp, ledger_type): ledgers_num = random.randint(1, 49) # Ledgers is a dict with txid as the key ledgers = {} for _ in range(ledgers_num): ledger = generate_random_single_kraken_ledger_data( start_ts=start, end_ts=end, ledger_type=ledger_type, ) ledgers[ledger['refid']] = ledger response_str = json.dumps({'ledger': ledgers, 'count': ledgers_num}) return rlk_jsonloads(response_str)
def _api_query(self, command: str, req: Optional[Dict] = None) -> Union[Dict, List]: if req is None: req = {} if command == 'returnTicker' or command == 'returnCurrencies': log.debug(f'Querying poloniex for {command}') ret = self.session.get(self.public_uri + command) return rlk_jsonloads(ret.text) req['command'] = command with self.lock: # Protect this region with a lock since poloniex will reject # non-increasing nonces. So if two greenlets come in here at # the same time one of them will fail req['nonce'] = int(time.time() * 1000) post_data = str.encode(urlencode(req)) sign = hmac.new(self.secret, post_data, hashlib.sha512).hexdigest() self.session.headers.update({'Sign': sign}) log.debug( 'Poloniex private API query', command=command, post_data=req, ) ret = self.session.post('https://poloniex.com/tradingApi', req) if ret.status_code != 200: raise RemoteError( f'Poloniex query responded with error status code: {ret.status_code}' f' and text: {ret.text}', ) try: if command == 'returnLendingHistory': return rlk_jsonloads_list(ret.text) else: # For some reason poloniex can also return [] for an empty trades result if ret.text == '[]': return {} else: result = rlk_jsonloads_dict(ret.text) return _post_process(result) except JSONDecodeError: raise RemoteError( f'Poloniex returned invalid JSON response: {ret.text}')
def generate_random_kraken_trades_data( start: Timestamp, end: Timestamp, tradeable_pairs: List[str], ): trades_num = random.randint(1, 49) # Trades is a dict with txid as the key trades = {} for _ in range(trades_num): trade = generate_random_kraken_trade_data( tradeable_pairs, start, end, ) trades[trade['ordertxid']] = trade response_str = json.dumps({'trades': trades, 'count': trades_num}) return rlk_jsonloads(response_str)
def request_get(uri: str, timeout: int = ALL_REMOTES_TIMEOUT) -> Union[Dict, List]: # TODO make this a bit more smart. Perhaps conditional on the type of request. # Not all requests would need repeated attempts response = retry_calls( 5, '', uri, requests.get, uri, str(timeout), ) if response.status_code != 200: raise RemoteError('Get {} returned status code {}'.format(uri, response.status_code)) try: result = rlk_jsonloads(response.text) except json.decoder.JSONDecodeError: raise ValueError('{} returned malformed json'.format(uri)) return result
def main(): arg_parser = aggregator_args() args = arg_parser.parse_args() msg_aggregator = MessagesAggregator() user_data_dir = Path(default_data_directory()) / args.db_user database = DBHandler( user_data_dir=user_data_dir, password=args.db_password, msg_aggregator=msg_aggregator, ) our_data = AssetResolver().assets paprika = CoinPaprika() cmc = None cmc_list = None root_path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) data_directory = f'{Path.home()}/.rotkehlchen' if args.cmc_api_key: cmc = Coinmarketcap( data_directory=data_directory, api_key=args.cmc_api_key, ) cmc_list = cmc.get_cryptocyrrency_map() cryptocompare = Cryptocompare(data_directory=data_directory, database=database) paprika_coins_list = paprika.get_coins_list() cryptocompare_coins_map = cryptocompare.all_coins() if args.input_file: if not os.path.isfile(args.input_file): print(f'Given input file {args.input_file} is not a file') sys.exit(1) with open(args.input_file, 'r') as f: input_data = rlk_jsonloads(f.read()) given_symbols = set(input_data.keys()) current_symbols = set(our_data.keys()) if not given_symbols.isdisjoint(current_symbols): print( f'The following given input symbols already exist in the ' f'all_assets.json file {given_symbols.intersection(current_symbols)}', ) sys.exit(1) # If an input file is given, iterate only its assets and perform checks for asset_symbol in input_data.keys(): input_data = process_asset( our_data=input_data, asset_symbol=asset_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, ) # and now combine the two dictionaries to get the final one. Note that no # checks are perfomed for what was in all_assets.json before the script # ran in this case our_data = {**our_data, **input_data} else: # Iterate all of the assets of the all_assets.json file and perform checks for asset_symbol in our_data.keys(): our_data = process_asset( our_data=our_data, asset_symbol=asset_symbol, paprika_coins_list=paprika_coins_list, paprika=paprika, cmc_list=cmc_list, cryptocompare_coins_map=cryptocompare_coins_map, always_keep_our_time=args.always_keep_our_time, ) # Finally overwrite the all_assets.json with the modified assets with open( os.path.join(root_path, 'rotkehlchen', 'data', 'all_assets.json'), 'w') as f: f.write(json.dumps( our_data, sort_keys=True, indent=4, ), )
def attempt_connect( self, ethrpc_endpoint: str, mainnet_check: bool = True, ) -> Tuple[bool, str]: message = '' if self.rpc_endpoint == ethrpc_endpoint and self.connected: # We are already connected return True, 'Already connected to an ethereum node' if self.web3: del self.web3 try: parsed_eth_rpc_endpoint = urlparse(ethrpc_endpoint) if not parsed_eth_rpc_endpoint.scheme: ethrpc_endpoint = f"http://{ethrpc_endpoint}" provider = HTTPProvider( endpoint_uri=ethrpc_endpoint, request_kwargs={'timeout': self.eth_rpc_timeout}, ) self.web3 = Web3(provider) self.web3.middleware_onion.inject(http_retry_request_middleware, layer=0) except requests.exceptions.ConnectionError: log.warning( 'Could not connect to an ethereum node. Will use etherscan only' ) self.connected = False return False, f'Failed to connect to ethereum node at endpoint {ethrpc_endpoint}' if self.web3.isConnected(): dir_path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.realpath(__file__))), ) with open(os.path.join(dir_path, 'data', 'token_abi.json'), 'r') as f: self.token_abi = rlk_jsonloads(f.read()) # Also make sure we are actually connected to the Ethereum mainnet synchronized = True msg = '' if mainnet_check: chain_id = self.web3.eth.chainId if chain_id != 1: message = ( f'Connected to ethereum node at endpoint {ethrpc_endpoint} but ' f'it is not on the ethereum mainnet. The chain id ' f'the node is in is {chain_id}.') log.warning(message) self.connected = False return False, message if self.web3.eth.syncing: # pylint: disable=no-member current_block = self.web3.eth.syncing.currentBlock # pylint: disable=no-member latest_block = self.web3.eth.syncing.highestBlock # pylint: disable=no-member synchronized, msg = self.is_synchronized( current_block, latest_block) else: current_block = self.web3.eth.blockNumber # pylint: disable=no-member latest_block = self.query_eth_highest_block() if latest_block is None: msg = 'Could not query latest block' log.warning(msg) synchronized = False else: synchronized, msg = self.is_synchronized( current_block, latest_block) if not synchronized: self.msg_aggregator.add_warning( 'You are using an ethereum node but we could not verify that it is ' 'synchronized in the ethereum mainnet. Balances and other queries ' 'may be incorrect.', ) self.connected = True log.info(f'Connected to ethereum node at {ethrpc_endpoint}') return True, '' else: log.warning( 'Could not connect to an ethereum node. Will use etherscan only' ) self.connected = False message = f'Failed to connect to ethereum node at endpoint {ethrpc_endpoint}' # If we get here we did not connnect return False, message
def get_history(self, start_ts, end_ts, end_at_least_ts=None): """Gets or creates trades and loans history from start_ts to end_ts or if `end_at_least` is given and we have a cache history which satisfies it we return the cache """ if end_at_least_ts is None: end_at_least_ts = end_ts log.info( 'Get or create trade history', start_ts=start_ts, end_ts=end_ts, end_at_least_ts=end_at_least_ts, ) historyfile_path = os.path.join(self.user_directory, TRADES_HISTORYFILE) if os.path.isfile(historyfile_path): with open(historyfile_path, 'r') as infile: try: history_json_data = rlk_jsonloads(infile.read()) except JSONDecodeError: pass all_history_okay = data_up_todate(history_json_data, start_ts, end_at_least_ts) poloniex_history_okay = True if self.poloniex is not None: poloniex_history_okay = self.poloniex.check_trades_cache( start_ts, end_at_least_ts, ) is not None kraken_history_okay = True if self.kraken is not None: kraken_history_okay = self.kraken.check_trades_cache( start_ts, end_at_least_ts, ) is not None bittrex_history_okay = True if self.bittrex is not None: bittrex_history_okay = self.bittrex.check_trades_cache( start_ts, end_at_least_ts, ) is not None bitmex_history_okay = True if self.bitmex is not None: bitmex_history_okay = self.bitmex.check_trades_cache( start_ts, end_at_least_ts, ) is not None binance_history_okay = True if self.binance is not None: binance_history_okay = self.binance.check_trades_cache( start_ts, end_at_least_ts, ) is not None if not self.read_manual_margin_positions: marginfile_path = os.path.join(self.user_directory, MARGIN_HISTORYFILE) margin_file_contents = get_jsonfile_contents_or_empty_dict( marginfile_path) margin_history_is_okay = data_up_todate( margin_file_contents, start_ts, end_at_least_ts, ) else: margin_history_is_okay = True margin_file_contents = do_read_manual_margin_positions( self.user_directory, ) loansfile_path = os.path.join(self.user_directory, LOANS_HISTORYFILE) loan_file_contents = get_jsonfile_contents_or_empty_dict( loansfile_path) loan_history_is_okay = data_up_todate( loan_file_contents, start_ts, end_at_least_ts, ) assetmovementsfile_path = os.path.join( self.user_directory, ASSETMOVEMENTS_HISTORYFILE, ) asset_movements_contents = get_jsonfile_contents_or_empty_dict( assetmovementsfile_path, ) asset_movements_history_is_okay = data_up_todate( asset_movements_contents, start_ts, end_at_least_ts, ) eth_tx_log_path = os.path.join(self.user_directory, ETHEREUM_TX_LOGFILE) eth_tx_log_contents = get_jsonfile_contents_or_empty_dict( eth_tx_log_path) eth_tx_log_history_history_is_okay = data_up_todate( eth_tx_log_contents, start_ts, end_at_least_ts, ) if (all_history_okay and poloniex_history_okay and kraken_history_okay and bittrex_history_okay and bitmex_history_okay and binance_history_okay and margin_history_is_okay and loan_history_is_okay and asset_movements_history_is_okay and eth_tx_log_history_history_is_okay): log.info( 'Using cached history', start_ts=start_ts, end_ts=end_ts, end_at_least_ts=end_at_least_ts, ) history_trades = trades_from_dictlist( history_json_data['data'], start_ts, end_ts, ) if not self.read_manual_margin_positions: margin_trades = trades_from_dictlist( margin_file_contents['data'], start_ts, end_ts, ) else: margin_trades = margin_file_contents eth_transactions = transactions_from_dictlist( eth_tx_log_contents['data'], start_ts, end_ts, ) asset_movements = asset_movements_from_dictlist( asset_movements_contents['data'], start_ts, end_ts, ) history_trades = include_external_trades( self.db, start_ts, end_ts, history_trades, ) # make sure that this is the same as what is returned # from create_history return ( '', history_trades, margin_trades, loan_file_contents['data'], asset_movements, eth_transactions, ) return self.create_history(start_ts, end_ts, end_at_least_ts)
def api_query(self, method: str, options: Optional[Dict] = None) -> Union[List, Dict]: if not options: options = {} backoff = self.initial_backoff while True: with self.lock: # Protect this region with a lock since binance will reject # non-increasing nonces. So if two greenlets come in here at # the same time one of them will fail if method in V3_ENDPOINTS or method in WAPI_ENDPOINTS: api_version = 3 # Recommended recvWindows is 5000 but we get timeouts with it options['recvWindow'] = 10000 options['timestamp'] = str(int(time.time() * 1000)) signature = hmac.new( self.secret, urlencode(options).encode('utf-8'), hashlib.sha256, ).hexdigest() options['signature'] = signature elif method in V1_ENDPOINTS: api_version = 1 else: raise ValueError('Unexpected binance api method {}'.format(method)) apistr = 'wapi/' if method in WAPI_ENDPOINTS else 'api/' request_url = f'{self.uri}{apistr}v{str(api_version)}/{method}?' request_url += urlencode(options) log.debug('Binance API request', request_url=request_url) response = self.session.get(request_url) limit_ban = response.status_code == 429 and backoff > self.backoff_limit if limit_ban or response.status_code not in (200, 429): code = 'no code found' msg = 'no message found' try: result = rlk_jsonloads(response.text) if isinstance(result, dict): code = result.get('code', code) msg = result.get('msg', msg) except JSONDecodeError: pass raise RemoteError( 'Binance API request {} for {} failed with HTTP status ' 'code: {}, error code: {} and error message: {}'.format( response.url, method, response.status_code, code, msg, )) elif response.status_code == 429: if backoff > self.backoff_limit: break # Binance has limits and if we hit them we should backoff # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#limits log.debug('Got 429 from Binance. Backing off', seconds=backoff) gevent.sleep(backoff) backoff = backoff * 2 continue else: # success break try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError: raise RemoteError(f'Binance returned invalid JSON response: {response.text}') return json_ret
def _api_query( self, verb: Literal['get', 'post'], path: str, options: Optional[Dict] = None, ) -> Dict: """ Queries Bitcoin.de with the given verb for the given path and options """ assert verb in ('get', 'post'), ( 'Given verb {} is not a valid HTTP verb'.format(verb)) request_path_no_args = '/v4/' + path data = '' if not options: request_path = request_path_no_args else: request_path = request_path_no_args + '?' + urlencode(options) nonce = str(int(time.time() * 1000)) request_url = self.uri + request_path self._generate_signature( request_type=verb.upper(), url=request_url, nonce=nonce, ) headers = { 'x-api-nonce': nonce, } if data != '': headers.update({ 'Content-Type': 'application/json', 'Content-Length': str(len(data)), }) log.debug('Bitcoin.de API Query', verb=verb, request_url=request_url) try: response = getattr(self.session, verb)(request_url, data=data, headers=headers) except requests.exceptions.RequestException as e: raise RemoteError( f'Bitcoin.de API request failed due to {str(e)}') from e try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError as exc: raise RemoteError( 'Bitcoin.de returned invalid JSON response') from exc if response.status_code not in (200, 401): if isinstance(json_ret, dict) and 'errors' in json_ret: for error in json_ret['errors']: if error.get('field') == 'X-API-KEY' and error.get( 'code') == 1: raise RemoteError( 'Provided API Key is in invalid Format') if error.get('code') == 3: raise RemoteError('Provided API Key is invalid') raise RemoteError(json_ret['errors']) raise RemoteError( 'Bitcoin.de api request for {} failed with HTTP status code {}' .format( response.url, response.status_code, ), ) if not isinstance(json_ret, dict): raise RemoteError('Bitcoin.de returned invalid non-dict response') return json_ret
def _api_query( self, verb: Literal['get', 'post'], path: str, options: Optional[Dict] = None, authenticated: bool = True, ) -> Any: """ Queries ICONOMI with the given verb for the given path and options """ assert verb in ('get', 'post'), ( 'Given verb {} is not a valid HTTP verb'.format(verb) ) request_path_no_args = '/v1/' + path data = '' if not options: request_path = request_path_no_args else: request_path = request_path_no_args + '?' + urlencode(options) timestamp = str(int(time.time() * 1000)) request_url = self.uri + request_path headers = {} if authenticated: signature = self._generate_signature( request_type=verb.upper(), request_path=request_path_no_args, timestamp=timestamp, ) headers.update({ 'ICN-SIGN': signature, 'ICN-TIMESTAMP': timestamp, 'ICN-API-KEY': self.api_key, }) if data != '': headers.update({ 'Content-Type': 'application/json', 'Content-Length': str(len(data)), }) log.debug('ICONOMI API Query', verb=verb, request_url=request_url) try: response = getattr(self.session, verb)( request_url, data=data, timeout=30, headers=headers, ) except requests.exceptions.RequestException as e: raise RemoteError(f'ICONOMI API request failed due to {str(e)}') from e try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError as exc: raise RemoteError('ICONOMI returned invalid JSON response') from exc if response.status_code not in (200, 201): if isinstance(json_ret, dict) and 'message' in json_ret: raise RemoteError(json_ret['message']) raise RemoteError( 'ICONOMI api request for {} failed with HTTP status code {}'.format( response.url, response.status_code, ), ) return json_ret
def _api_query( self, verb: str, path: str, options: Optional[Dict] = None, ) -> Union[List, Dict]: """ Queries Bitmex with the given verb for the given path and options """ assert verb in ('get', 'post', 'push'), ( 'Given verb {} is not a valid HTTP verb'.format(verb)) # 20 seconds expiration expires = int(time.time()) + 20 request_path_no_args = '/api/v1/' + path data = '' if not options: request_path = request_path_no_args else: request_path = request_path_no_args + '?' + urlencode(options) if path in BITMEX_PRIVATE_ENDPOINTS: self._generate_signature( verb=verb, path=request_path, expires=expires, data=data, ) self.session.headers.update({ 'api-expires': str(expires), }) if data != '': self.session.headers.update({ 'Content-Type': 'application/json', 'Content-Length': str(len(data)), }) request_url = self.uri + request_path log.debug('Bitmex API Query', verb=verb, request_url=request_url) response = getattr(self.session, verb)(request_url, data=data) if response.status_code not in (200, 401): raise RemoteError( 'Bitmex api request for {} failed with HTTP status code {}'. format( response.url, response.status_code, ), ) try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError: raise RemoteError('Bitmex returned invalid JSON response') if isinstance(json_ret, dict) and 'error' in json_ret: raise RemoteError(json_ret['error']['message']) return json_ret
def get_cached_history(self, start_ts, end_ts, end_at_least_ts=None): """Gets all the cached history data instead of querying all external sources to create the history through create_history() Can raise: - HistoryCacheInvalid: If any of the cache files are corrupt in any way, missing or do not cover the given time range """ if end_at_least_ts is None: end_at_least_ts = end_ts historyfile_path = os.path.join(self.user_directory, TRADES_HISTORYFILE) if not os.path.isfile(historyfile_path): raise HistoryCacheInvalid() with open(historyfile_path, 'r') as infile: try: history_json_data = rlk_jsonloads(infile.read()) except JSONDecodeError: pass if not data_up_todate(history_json_data, start_ts, end_at_least_ts): raise HistoryCacheInvalid('Historical trades cache invalid') try: history_trades = trades_from_dictlist( given_trades=history_json_data['data'], start_ts=start_ts, end_ts=end_ts, location='historical trades', msg_aggregator=self.msg_aggregator, ) except KeyError: raise HistoryCacheInvalid('Historical trades cache invalid') history_trades = maybe_add_external_trades_to_history( db=self.db, start_ts=start_ts, end_ts=end_ts, history=history_trades, msg_aggregator=self.msg_aggregator, ) kraken_okay = (self.kraken is None or self.kraken.check_trades_cache( start_ts, end_at_least_ts, ) is not None) if not kraken_okay: raise HistoryCacheInvalid('Kraken cache is invalid') bittrex_okay = (self.bittrex is None or self.bittrex.check_trades_cache( start_ts, end_at_least_ts, ) is not None) if not bittrex_okay: raise HistoryCacheInvalid('Bittrex cache is invalid') binance_okay = (self.binance is None or self.binance.check_trades_cache( start_ts, end_at_least_ts, ) is not None) if not binance_okay: raise HistoryCacheInvalid('Binance cache is invalid') bitmex_okay = (self.bitmex is None or self.bitmex.check_trades_cache( start_ts, end_at_least_ts, ) is not None) if not bitmex_okay: raise HistoryCacheInvalid('Bitmex cache is invalid') # Poloniex specific loan_data = [] if self.poloniex: if not self.poloniex.check_trades_cache(start_ts, end_at_least_ts): raise HistoryCacheInvalid('Poloniex cache is invalid') loansfile_path = os.path.join(self.user_directory, LOANS_HISTORYFILE) loan_file_contents = get_jsonfile_contents_or_empty_dict( loansfile_path) loan_history_is_okay = data_up_todate( loan_file_contents, start_ts, end_at_least_ts, ) if not loan_history_is_okay: raise HistoryCacheInvalid('Poloniex loan cache is invalid') loan_data = loan_file_contents['data'] # margin positions that have been manually input if not self.read_manual_margin_positions: marginfile_path = os.path.join(self.user_directory, MARGIN_HISTORYFILE) margin_file_contents = get_jsonfile_contents_or_empty_dict( marginfile_path) margin_history_is_okay = data_up_todate( margin_file_contents, start_ts, end_at_least_ts, ) if not margin_history_is_okay: raise HistoryCacheInvalid('Margin Positions cache is invalid') try: margin_trades = trades_from_dictlist( given_trades=margin_file_contents['data'], start_ts=start_ts, end_ts=end_ts, location='Margin position trades', msg_aggregator=self.msg_aggregator, ) except KeyError: raise HistoryCacheInvalid('Margin Positions cache is invalid') else: margin_trades = do_read_manual_margin_positions( self.user_directory, ) asset_movements = self._get_cached_asset_movements( start_ts=start_ts, end_ts=end_ts, end_at_least_ts=end_at_least_ts, ) eth_tx_log_path = os.path.join(self.user_directory, ETHEREUM_TX_LOGFILE) eth_tx_log_contents = get_jsonfile_contents_or_empty_dict( eth_tx_log_path) eth_tx_log_history_is_okay = data_up_todate( eth_tx_log_contents, start_ts, end_at_least_ts, ) if not eth_tx_log_history_is_okay: raise HistoryCacheInvalid('Ethereum transactions cache is invalid') try: eth_transactions = transactions_from_dictlist( eth_tx_log_contents['data'], start_ts, end_ts, ) except KeyError: raise HistoryCacheInvalid('Ethereum transactions cache is invalid') # make sure that this is the same as what is returned # from create_history, except for the first argument return ( history_trades, margin_trades, loan_data, asset_movements, eth_transactions, )
def attempt_connect(self, ethrpc_endpoint: str, mainnet_check=True) -> Tuple[bool, str]: message = '' if self.rpc_endpoint == ethrpc_endpoint and self.connected: # We are already connected return True, 'Already connected to an ethereum node' if self.web3: del self.web3 try: parsed_eth_rpc_endpoint = urlparse(ethrpc_endpoint) if not parsed_eth_rpc_endpoint.scheme: ethrpc_endpoint = f"http://{ethrpc_endpoint}" provider = HTTPProvider(ethrpc_endpoint) self.web3 = Web3(provider) except requests.exceptions.ConnectionError: log.warning( 'Could not connect to an ethereum node. Will use etherscan only' ) self.connected = False return False, f'Failed to connect to ethereum node at endpoint {ethrpc_endpoint}' if self.web3.isConnected(): dir_path = os.path.dirname(os.path.realpath(__file__)) with open(os.path.join(dir_path, 'data', 'token_abi.json'), 'r') as f: self.token_abi = rlk_jsonloads(f.read()) # Also make sure we are actually connected to the Ethereum mainnet synchronized = True msg = '' if mainnet_check: genesis_hash = self.web3.eth.getBlock(0)['hash'].hex() # pylint: disable=no-member target = '0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3' if genesis_hash != target: log.warning( 'Connected to an ethereum node but it is not on the ethereum mainnet', ) self.connected = False message = ( f'Connected to ethereum node at endpoint {ethrpc_endpoint} but ' f'it is not on the ethereum mainnet') return False, message if self.web3.eth.syncing: # pylint: disable=no-member current_block = self.web3.eth.syncing.currentBlock # pylint: disable=no-member latest_block = self.web3.eth.syncing.highestBlock # pylint: disable=no-member synchronized, msg = self.is_synchronized( current_block, latest_block) else: current_block = self.web3.eth.blockNumber # pylint: disable=no-member latest_block = self.query_eth_highest_block() if latest_block is None: return False, 'Could not query latest block from blockcypher.' synchronized, msg = self.is_synchronized( current_block, latest_block) if not synchronized: return False, msg self.connected = True log.info(f'Connected to ethereum node at {ethrpc_endpoint}') return True, '' else: log.warning( 'Could not connect to an ethereum node. Will use etherscan only' ) self.connected = False message = f'Failed to connect to ethereum node at endpoint {ethrpc_endpoint}' # If we get here we did not connnect return False, message
def get_cached_history(self, start_ts, end_ts, end_at_least_ts=None): """Gets all the cached history data instead of querying all external sources to create the history through create_history() Can raise: - HistoryCacheInvalid: If any of the cache files are corrupt in any way, missing or do not cover the given time range """ if end_at_least_ts is None: end_at_least_ts = end_ts historyfile_path = os.path.join(self.user_directory, TRADES_HISTORYFILE) if not os.path.isfile(historyfile_path): raise HistoryCacheInvalid() with open(historyfile_path, 'r') as infile: try: history_json_data = rlk_jsonloads(infile.read()) except JSONDecodeError: pass if not data_up_todate(history_json_data, start_ts, end_at_least_ts): raise HistoryCacheInvalid('Historical trades cache invalid') try: history_trades = trades_from_dictlist( given_trades=history_json_data['data'], start_ts=start_ts, end_ts=end_ts, location='historical trades', msg_aggregator=self.msg_aggregator, ) except (KeyError, DeserializationError): raise HistoryCacheInvalid('Historical trades cache invalid') history_trades = maybe_add_external_trades_to_history( db=self.db, start_ts=start_ts, end_ts=end_ts, history=history_trades, msg_aggregator=self.msg_aggregator, ) # Check the cache of each exchange poloniex = None for _, exchange in self.exchange_manager.connected_exchanges.items(): if exchange.name == 'poloniex': poloniex = exchange if not exchange.check_trades_cache(start_ts, end_at_least_ts): raise HistoryCacheInvalid(f'{exchange.name} cache is invalid') # Poloniex specific loan_data = [] if poloniex: loansfile_path = os.path.join(self.user_directory, LOANS_HISTORYFILE) loan_file_contents = get_jsonfile_contents_or_empty_dict( loansfile_path) loan_history_is_okay = data_up_todate( loan_file_contents, start_ts, end_at_least_ts, ) if not loan_history_is_okay: raise HistoryCacheInvalid('Poloniex loan cache is invalid') loan_data = loan_file_contents['data'] asset_movements = self._get_cached_asset_movements( start_ts=start_ts, end_ts=end_ts, end_at_least_ts=end_at_least_ts, ) eth_tx_log_path = os.path.join(self.user_directory, ETHEREUM_TX_LOGFILE) eth_tx_log_contents = get_jsonfile_contents_or_empty_dict( eth_tx_log_path) eth_tx_log_history_is_okay = data_up_todate( eth_tx_log_contents, start_ts, end_at_least_ts, ) if not eth_tx_log_history_is_okay: raise HistoryCacheInvalid('Ethereum transactions cache is invalid') try: eth_transactions = transactions_from_dictlist( eth_tx_log_contents['data'], start_ts, end_ts, ) except KeyError: raise HistoryCacheInvalid('Ethereum transactions cache is invalid') # make sure that this is the same as what is returned # from create_history, except for the first argument return ( history_trades, loan_data, asset_movements, eth_transactions, )
raise AssertionError(f'Unexpected {self.name} case: {case}') call_options = options.copy() limit = options['limit'] results: Union[List[Trade], List[AssetMovement]] = [ ] # type: ignore # bug list nothing processed_result_ids: Set[int] = set() retries_left = API_REQUEST_RETRY_TIMES while retries_left >= 0: response = self._api_query( endpoint=endpoint, options=call_options, ) if response.status_code != HTTPStatus.OK: try: error_response = rlk_jsonloads(response.text) except JSONDecodeError: msg = f'{self.name} {case} returned an invalid JSON response: {response.text}.' log.error(msg, options=call_options) self.msg_aggregator.add_error( f'Got remote error while querying {self.name} {case}: {msg}', ) return [] # type: ignore # bug list nothing # Check if the rate limits have been hit (response JSON as dict) if isinstance(error_response, dict): if error_response.get( 'error', None) == API_RATE_LIMITS_ERROR_MESSAGE: if retries_left == 0: msg = ( f'{self.name} {case} request failed after retrying '
def api_query( self, api_type: BINANCE_API_TYPE, method: str, options: Optional[Dict] = None, ) -> Union[List, Dict]: """Performs a binance api query May raise: - RemoteError - BinancePermissionError """ call_options = options.copy() if options else {} while True: with self.nonce_lock: # Protect this region with a lock since binance will reject # non-increasing nonces. So if two greenlets come in here at # the same time one of them will fail if 'signature' in call_options: del call_options['signature'] is_v3_api_method = api_type == 'api' and method in V3_METHODS is_new_futures_api = api_type in ('fapi', 'dapi') call_needs_signature = ( (api_type == 'fapi' and method in FAPI_METHODS) or (api_type == 'dapi' and method in FAPI_METHODS) or # same as fapi (api_type == 'sapi' and method in SAPI_METHODS) or (api_type == 'wapi' and method in WAPI_METHODS) or is_v3_api_method) if call_needs_signature: if api_type in ('sapi', 'dapi'): api_version = 1 elif api_type == 'fapi': api_version = 2 elif api_type == 'wapi' or is_v3_api_method: api_version = 3 else: raise AssertionError( f'Should never get to signed binance api call for ' f'api_type: {api_type} and method {method}', ) # Recommended recvWindows is 5000 but we get timeouts with it call_options['recvWindow'] = 10000 call_options['timestamp'] = str(ts_now_in_ms() + self.offset_ms) signature = hmac.new( self.secret, urlencode(call_options).encode('utf-8'), hashlib.sha256, ).hexdigest() call_options['signature'] = signature elif api_type == 'api' and method in V1_METHODS: api_version = 1 else: raise AssertionError( f'Unexpected {self.name} API method {method}') api_subdomain = api_type if is_new_futures_api else 'api' request_url = ( f'https://{api_subdomain}.{self.uri}{api_type}/v{str(api_version)}/{method}?' ) request_url += urlencode(call_options) log.debug(f'{self.name} API request', request_url=request_url) try: response = self.session.get(request_url) except requests.exceptions.RequestException as e: raise RemoteError( f'{self.name} API request failed due to {str(e)}', ) from e if response.status_code not in (200, 418, 429): code = 'no code found' msg = 'no message found' try: result = rlk_jsonloads(response.text) if isinstance(result, dict): code = result.get('code', code) msg = result.get('msg', msg) except JSONDecodeError: pass exception_class: Union[Type[RemoteError], Type[BinancePermissionError]] if response.status_code == 401 and code == REJECTED_MBX_KEY: # Either API key permission error or if futures/dapi then not enables yet exception_class = BinancePermissionError else: exception_class = RemoteError raise exception_class( '{} API request {} for {} failed with HTTP status ' 'code: {}, error code: {} and error message: {}'.format( self.name, response.url, method, response.status_code, code, msg, )) if response.status_code in (418, 429): # Binance has limits and if we hit them we should backoff. # A Retry-After header is sent with a 418 or 429 responses and # will give the number of seconds required to wait, in the case # of a 429, to prevent a ban, or, in the case of a 418, until # the ban is over. # https://binance-docs.github.io/apidocs/spot/en/#limits retry_after = int(response.headers.get('retry-after', '0')) log.debug( f'Got status code {response.status_code} from {self.name}. Backing off', seconds=retry_after, ) if retry_after > RETRY_AFTER_LIMIT: raise RemoteError( '{} API request {} for {} failed with HTTP status ' 'code: {} due to a too long retry after value (> {})'. format( self.name, response.url, method, response.status_code, RETRY_AFTER_LIMIT, )) gevent.sleep(retry_after) continue # else success break try: json_ret = rlk_jsonloads(response.text) except JSONDecodeError as e: raise RemoteError( f'{self.name} returned invalid JSON response: {response.text}', ) from e return json_ret