def process_polo_loans(data, start_ts, end_ts): new_data = list() for loan in reversed(data): close_time = createTimeStamp(loan['close'], formatstr="%Y-%m-%d %H:%M:%S") open_time = createTimeStamp(loan['open'], formatstr="%Y-%m-%d %H:%M:%S") if open_time < start_ts: continue if close_time > end_ts: break loan_data = { 'open_time': open_time, 'close_time': close_time, 'currency': loan['currency'], 'fee': FVal(loan['fee']), 'earned': FVal(loan['earned']), 'amount_lent': FVal(loan['amount']), } log.debug('processing poloniex loan', **make_sensitive(loan_data)) new_data.append(loan_data) new_data.sort(key=lambda loan: loan['open_time']) return new_data
def parseLoanCSV(self, path): self.lending_history = [] with open(path, 'rb') as csvfile: history = csv.reader(csvfile, delimiter=',', quotechar='|') next(history) # skip header row for row in history: self.lending_history.append({ 'currency': row[0], 'earned': FVal(row[6]), 'amount': FVal(row[2]), 'opened': createTimeStamp(row[7]), 'closed': createTimeStamp(row[8]) }) return self.lending_history
def query_trade_history( self, start_ts: typing.Timestamp, end_ts: typing.Timestamp, end_at_least_ts: typing.Timestamp, market: Optional[str] = None, count: Optional[int] = None, ) -> List: options: Dict[str, Union[str, int]] = dict() cache = self.check_trades_cache(start_ts, end_at_least_ts) cache = cast(List, cache) if market is not None: options['market'] = world_pair_to_bittrex(market) elif cache is not None: return cache if count is not None: options['count'] = count order_history = self.api_query('getorderhistory', options) returned_history = list() for order in order_history: order_timestamp = createTimeStamp(order['TimeStamp'], formatstr="%Y-%m-%dT%H:%M:%S.%f") if start_ts is not None and order_timestamp < start_ts: continue if end_ts is not None and order_timestamp > end_ts: break order['TimeStamp'] = order_timestamp returned_history.append(order) self.update_trades_cache(returned_history, start_ts, end_ts) return returned_history
def __init__(self, data_directory, history_date_start=DEFAULT_START_DATE): self.data_directory = data_directory # get the start date for historical data self.historical_data_start = createTimeStamp(history_date_start, formatstr="%d/%m/%Y") self.price_history = dict() # TODO: Check if historical data is after the requested start date # Check the data folder and load any cached history prefix = os.path.join(self.data_directory, 'price_history_') regex = re.compile(prefix + '(.*)\.json') files_list = glob.glob(prefix + '*.json') for file_ in files_list: match = regex.match(file_) assert match cache_key = match.group(1) with open(file_, 'rb') as f: data = rlk_jsonloads(f.read()) self.price_history[cache_key] = data # Get coin list of crypto compare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): with open(coinlist_cache_path, 'rb') as f: try: data = rlk_jsonloads(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: query_string = 'https://www.cryptocompare.com/api/data/coinlist/' resp = urlopen(Request(query_string)) resp = rlk_jsonloads(resp.read()) if 'Response' not in resp or resp['Response'] != 'Success': error_message = 'Failed to query cryptocompare for: "{}"'.format(query_string) if 'Message' in resp: error_message += ". Error: {}".format(resp['Message']) raise ValueError(error_message) data = resp['Data'] # Also save the cache with open(coinlist_cache_path, 'w') as f: write_data = {'time': ts_now(), 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] self.cryptocompare_coin_list = data # For some reason even though price for the following assets is returned # it's not in the coinlist so let's add them here. self.cryptocompare_coin_list['DAO'] = object() self.cryptocompare_coin_list['USDT'] = object()
def verify_otctrade_data( data: ExternalTrade, ) -> Tuple[Optional[typing.Trade], str]: """Takes in the trade data dictionary, validates it and returns a trade instance""" for field in otc_fields: if field not in data: return None, '{} was not provided'.format(field) if data[field] in ('', None) and field not in otc_optional_fields: return None, '{} was empty'.format(field) if field in otc_numerical_fields and not is_number(data[field]): return None, '{} should be a number'.format(field) pair = data['otc_pair'] assert isinstance(pair, str) first = get_pair_position(pair, 'first') second = get_pair_position(pair, 'second') trade_type = cast(str, data['otc_type']) amount = FVal(data['otc_amount']) rate = FVal(data['otc_rate']) fee = FVal(data['otc_fee']) fee_currency = cast(typing.Asset, data['otc_fee_currency']) try: assert isinstance(data['otc_timestamp'], str) timestamp = createTimeStamp(data['otc_timestamp'], formatstr='%d/%m/%Y %H:%M') except ValueError as e: return None, 'Could not process the given datetime: {}'.format(e) log.debug( 'Creating OTC trade data', sensitive_log=True, pair=pair, trade_type=trade_type, amount=amount, rate=rate, fee=fee, fee_currency=fee_currency, ) if data['otc_fee_currency'] not in (first, second): return None, 'Trade fee currency should be one of the two in the currency pair' if data['otc_type'] not in ('buy', 'sell'): return None, 'Trade type can only be buy or sell' trade = typing.Trade( time=timestamp, location='external', pair=cast(str, pair), trade_type=trade_type, amount=amount, rate=rate, fee=fee, fee_currency=fee_currency, link=cast(str, data['otc_link']), notes=cast(str, data['otc_notes']), ) return trade, ''
def check_otctrade_data_valid(data): for field in otc_fields: if field not in data: return None, '{} was not provided'.format(field) if data[field] in ('', None) and field not in otc_optional_fields: return None, '{} was empty'.format(field) if field in otc_numerical_fields and not is_number(data[field]): return None, '{} should be a number'.format(field) pair = data['otc_pair'] first = get_pair_position(pair, 'first') second = get_pair_position(pair, 'second') if data['otc_fee_currency'] not in (first, second): return None, 'Trade fee currency should be one of the two in the currency pair' if data['otc_type'] not in ('buy', 'sell'): return None, 'Trade type can only be buy or sell' try: timestamp = createTimeStamp(data['otc_time'], formatstr='%d/%m/%Y %H:%M') except ValueError as e: return None, 'Could not process the given datetime: {}'.format(e) return timestamp, ''
def trade_from_poloniex(poloniex_trade: Dict[str, Any], pair: TradePair) -> Trade: """Turn a poloniex trade returned from poloniex trade history to our common trade history format""" trade_type = trade_type_from_string(poloniex_trade['type']) amount = FVal(poloniex_trade['amount']) rate = FVal(poloniex_trade['rate']) perc_fee = FVal(poloniex_trade['fee']) base_currency = get_pair_position(pair, 'first') quote_currency = get_pair_position(pair, 'second') timestamp = createTimeStamp(poloniex_trade['date'], formatstr="%Y-%m-%d %H:%M:%S") cost = rate * amount if trade_type == TradeType.BUY: fee = amount * perc_fee fee_currency = quote_currency elif trade_type == TradeType.SELL: fee = cost * perc_fee fee_currency = base_currency else: raise ValueError( 'Got unexpected trade type "{}" for poloniex trade'.format( trade_type)) if poloniex_trade['category'] == 'settlement': if trade_type == TradeType.BUY: trade_type = TradeType.SETTLEMENT_BUY else: trade_type = TradeType.SETTLEMENT_SELL log.debug( 'Processing poloniex Trade', sensitive_log=True, timestamp=timestamp, order_type=trade_type, pair=pair, base_currency=base_currency, quote_currency=quote_currency, amount=amount, fee=fee, rate=rate, ) # Since in Poloniex the base currency is the cost currency, iow in poloniex # for BTC_ETH we buy ETH with BTC and sell ETH for BTC, we need to turn it # into the Rotkehlchen way which is following the base/quote approach. pair = invert_pair(pair) return Trade( timestamp=timestamp, location='poloniex', pair=pair, trade_type=trade_type, amount=amount, rate=rate, fee=fee, fee_currency=fee_currency, )
def trade_from_poloniex(poloniex_trade, pair): """Turn a poloniex trade returned from poloniex trade history to our common trade history format""" trade_type = poloniex_trade['type'] amount = FVal(poloniex_trade['amount']) rate = FVal(poloniex_trade['rate']) perc_fee = FVal(poloniex_trade['fee']) base_currency = get_pair_position(pair, 'first') quote_currency = get_pair_position(pair, 'second') timestamp = createTimeStamp(poloniex_trade['date'], formatstr="%Y-%m-%d %H:%M:%S"), if trade_type == 'buy': cost = rate * amount cost_currency = base_currency fee = amount * perc_fee fee_currency = quote_currency elif trade_type == 'sell': cost = amount * rate cost_currency = base_currency fee = cost * perc_fee fee_currency = base_currency else: raise ValueError( 'Got unexpected trade type "{}" for poloniex trade'.format( trade_type)) if poloniex_trade['category'] == 'settlement': trade_type = "settlement_%s" % trade_type log.debug( 'Processing poloniex Trade', sensitive_log=True, timestamp=timestamp, order_type=trade_type, pair=pair, base_currency=base_currency, quote_currency=quote_currency, amount=amount, cost=cost, fee=fee, rate=rate, ) return Trade(timestamp=timestamp, pair=pair, type=trade_type, rate=rate, cost=cost, cost_currency=cost_currency, fee=fee, fee_currency=fee_currency, amount=amount, location='poloniex')
def __init__( self, data_directory, db, eth_accounts, historical_data_start, start_date='01/11/2015', ): self.poloniex = None self.kraken = None self.bittrex = None self.binance = None self.start_ts = createTimeStamp(start_date, formatstr="%d/%m/%Y") self.data_directory = data_directory self.db = db self.eth_accounts = eth_accounts # get the start date for historical data self.historical_data_start = createTimeStamp(historical_data_start, formatstr="%d/%m/%Y") # If this flag is true we attempt to read from the manually logged margin positions file self.read_manual_margin_positions = True
def _post_process(before: Dict) -> Dict: """Poloniex uses datetimes so turn them into timestamps here""" after = before if ('return' in after): if (isinstance(after['return'], list)): for x in range(0, len(after['return'])): if (isinstance(after['return'][x], dict)): if ('datetime' in after['return'][x] and 'timestamp' not in after['return'][x]): after['return'][x]['timestamp'] = float( createTimeStamp(after['return'][x]['datetime']), ) return after
def post_process(self, before: Dict) -> Dict: after = before # Add timestamps if there isnt one but is a datetime if('return' in after): if(isinstance(after['return'], list)): for x in range(0, len(after['return'])): if(isinstance(after['return'][x], dict)): if('datetime' in after['return'][x] and 'timestamp' not in after['return'][x]): after['return'][x]['timestamp'] = float( createTimeStamp(after['return'][x]['datetime']) ) return after
def from_otc_trade(trade): ts = createTimeStamp(trade['otc_timestamp'], formatstr='%d/%m/%Y %H:%M') new_trade = { 'timestamp': ts, 'location': 'external', 'pair': trade['otc_pair'], 'type': trade['otc_type'], 'amount': str(trade['otc_amount']), 'rate': str(trade['otc_rate']), 'fee': str(trade['otc_fee']), 'fee_currency': trade['otc_fee_currency'], 'link': trade['otc_link'], 'notes': trade['otc_notes'], } if 'otc_id' in trade: new_trade['id'] = trade['otc_id'] return new_trade
def query_loan_history( self, start_ts: Timestamp, end_ts: Timestamp, end_at_least_ts: Timestamp, from_csv: Optional[bool] = False, ) -> List: """ WARNING: Querying from returnLendingHistory endpoing instead of reading from the CSV file can potentially return unexpected/wrong results. That is because the `returnLendingHistory` endpoint has a hidden limit of 12660 results. In our code we use the limit of 12000 but poloniex may change the endpoint to have a lower limit at which case this code will break. To be safe compare results of both CSV and endpoint to make sure they agree! """ try: if from_csv: return self.parseLoanCSV() except (OSError, IOError, csv.Error): pass with self.lock: # We know Loan history cache is a list cache = cast( List, self.check_trades_cache(start_ts, end_at_least_ts, special_name='loan_history'), ) if cache is not None: return cache loans_query_return_limit = 12000 result = self.returnLendingHistory( start_ts=start_ts, end_ts=end_ts, limit=loans_query_return_limit, ) data = list(result) log.debug('Poloniex loan history query', results_num=len(data)) # since I don't think we have any guarantees about order of results # using a set of loan ids is one way to make sure we get no duplicates # if poloniex can guarantee me that the order is going to be ascending/descending # per open/close time then this can be improved id_set = set() while len(result) == loans_query_return_limit: # Find earliest timestamp to re-query the next batch min_ts = end_ts for loan in result: ts = createTimeStamp(loan['close'], formatstr="%Y-%m-%d %H:%M:%S") min_ts = min(min_ts, ts) id_set.add(loan['id']) result = self.returnLendingHistory( start_ts=start_ts, end_ts=min_ts, limit=loans_query_return_limit, ) log.debug('Poloniex loan history query', results_num=len(result)) for loan in result: if loan['id'] not in id_set: data.append(loan) with self.lock: self.update_trades_cache(data, start_ts, end_ts, special_name='loan_history') return data
def __init__(self, data_directory, history_date_start): self.data_directory = data_directory # get the start date for historical data self.historical_data_start = createTimeStamp(history_date_start, formatstr="%d/%m/%Y") self.price_history = dict() self.price_history_file = dict() # Check the data folder and remember the filenames of any cached history prefix = os.path.join(self.data_directory, 'price_history_') prefix = prefix.replace('\\', '\\\\') regex = re.compile(prefix + '(.*)\\.json') files_list = glob.glob(prefix + '*.json') for file_ in files_list: match = regex.match(file_) assert match cache_key = match.group(1) self.price_history_file[cache_key] = file_ # Get coin list of crypto compare invalidate_cache = True coinlist_cache_path = os.path.join(self.data_directory, 'cryptocompare_coinlist.json') if os.path.isfile(coinlist_cache_path): log.info('Found coinlist cache', path=coinlist_cache_path) with open(coinlist_cache_path, 'rb') as f: try: data = rlk_jsonloads(f.read()) now = ts_now() invalidate_cache = False # If we got a cache and its' over a month old then requery cryptocompare if data['time'] < now and now - data['time'] > 2629800: log.info('Coinlist cache is now invalidated') invalidate_cache = True data = data['data'] except JSONDecodeError: invalidate_cache = True if invalidate_cache: query_string = 'https://www.cryptocompare.com/api/data/coinlist/' log.debug('Querying cryptocompare', url=query_string) resp = request_get(query_string) if 'Response' not in resp or resp['Response'] != 'Success': error_message = 'Failed to query cryptocompare for: "{}"'.format( query_string) if 'Message' in resp: error_message += ". Error: {}".format(resp['Message']) log.error('Cryptocompare query failure', url=query_string, error=error_message) raise ValueError(error_message) data = resp['Data'] # Also save the cache with open(coinlist_cache_path, 'w') as f: now = ts_now() log.info('Writting coinlist cache', timestamp=now) write_data = {'time': now, 'data': data} f.write(rlk_jsondumps(write_data)) else: # in any case take the data data = data['data'] self.cryptocompare_coin_list = data # For some reason even though price for the following assets is returned # it's not in the coinlist so let's add them here. self.cryptocompare_coin_list['DAO'] = object() self.cryptocompare_coin_list['USDT'] = object()
def parseLoanCSV_for_analysis(self, from_timestamp, to_timestamp, currency): lending_history = [] with open(self.lending_history_file, 'rb') as csvfile: history = csv.reader(csvfile, delimiter=',', quotechar='|') next(history) # skip header row for row in history: lending_history.append({ 'currency': row[0], 'rate': float(row[1]), 'amount': float(row[2]), 'earned': float(row[6]), 'opened': createTimeStamp(row[7]), 'closed': createTimeStamp(row[8]) }) if not isinstance(from_timestamp, (int, long)): from_timestamp = dateToTs(from_timestamp) if not isinstance(to_timestamp, (int, long)): to_timestamp = dateToTs(to_timestamp) # find average lending rate over all loans average_rate = 0.0 average_duration = 0 count = 0 minTs = 9999999999999999999 maxTs = 0 secscount = 0 daycount = 0 daily_earned = 0 average_daily_earned = 0 daily_lent = 0 average_daily_lent = 0 previousTs = lending_history[0]['opened'] for loan in lending_history: if loan['currency'] != currency: continue if loan['opened'] < from_timestamp: continue if loan['closed'] > to_timestamp: continue # TODO just break since csv is ordered if loan['opened'] > maxTs: maxTs = loan['opened'] if loan['opened'] < minTs: minTs = loan['opened'] diff = previousTs - loan['opened'] duration = loan['closed'] - loan['opened'] if secscount + diff > 86400: daycount += 1 secscount = 0 average_daily_earned += daily_earned daily_earned = 0 average_daily_lent += daily_lent daily_lent = 0 else: secscount += diff daily_earned += loan['earned'] daily_lent += loan['amount'] * duration / 86400 previousTs = loan['opened'] average_rate += loan['rate'] average_duration += duration count += 1 average_rate /= count average_duration /= count average_daily_earned /= daycount average_daily_lent /= daycount s = """LENDING ANALYSIS for {} ----------------------------------- From {} to {} Average Lending rate: {} Average duration: {} hours Average {} lent per day: {} Average {} earned per day: {} """.format( currency, unicode(datetime.utcfromtimestamp(minTs)), unicode(datetime.utcfromtimestamp(maxTs)), rateToStr(average_rate), average_duration / 3600, currency, average_daily_lent, currency, average_daily_earned, ) return s