Exemplo n.º 1
0
 def parseLoanCSV(self, path):
     self.lending_history = []
     with open(path, 'rb') as csvfile:
         history = csv.reader(csvfile, delimiter=',', quotechar='|')
         next(history)  # skip header row
         for row in history:
             self.lending_history.append({
                 'currency': row[0],
                 'earned': FVal(row[6]),
                 'amount': FVal(row[2]),
                 'opened': createTimeStamp(row[7]),
                 'closed': createTimeStamp(row[8])
             })
     return self.lending_history
Exemplo n.º 2
0
    def query_trade_history(self,
                            start_ts=None,
                            end_ts=None,
                            end_at_least_ts=None,
                            market=None,
                            count=None):

        options = dict()
        cache = self.check_trades_cache(start_ts, end_at_least_ts)
        if market is not None:
            options['market'] = world_pair_to_bittrex(market)
        elif cache is not None:
            return cache

        if count is not None:
            options['count'] = count
        order_history = self.api_query('getorderhistory', options)

        returned_history = list()
        for order in order_history:
            order_timestamp = createTimeStamp(order['TimeStamp'],
                                              formatstr="%Y-%m-%dT%H:%M:%S.%f")
            if start_ts is not None and order_timestamp < start_ts:
                continue
            if end_ts is not None and order_timestamp > end_ts:
                break
            order['TimeStamp'] = order_timestamp
            returned_history.append(order)

        self.update_trades_cache(returned_history, start_ts, end_ts)
        return returned_history
Exemplo n.º 3
0
    def __init__(
        self,
        data_directory,
        eth_accounts,
        historical_data_start,
        start_date='01/11/2015',
    ):

        self.poloniex = None
        self.kraken = None
        self.bittrex = None
        self.binance = None
        self.start_ts = createTimeStamp(start_date, formatstr="%d/%m/%Y")
        self.data_directory = data_directory
        self.eth_accounts = eth_accounts
        # get the start date for historical data
        self.historical_data_start = createTimeStamp(historical_data_start,
                                                     formatstr="%d/%m/%Y")
        # If this flag is true we attempt to read from the manually logged margin positions file
        self.read_manual_margin_positions = True
Exemplo n.º 4
0
def process_polo_loans(data, start_ts, end_ts):
    new_data = list()
    for loan in reversed(data):
        close_time = createTimeStamp(loan['close'],
                                     formatstr="%Y-%m-%d %H:%M:%S")
        open_time = createTimeStamp(loan['open'],
                                    formatstr="%Y-%m-%d %H:%M:%S")
        if open_time < start_ts:
            continue
        if close_time > end_ts:
            break
        new_data.append({
            'open_time': open_time,
            'close_time': close_time,
            'currency': loan['currency'],
            'fee': FVal(loan['fee']),
            'earned': FVal(loan['earned']),
            'amount_lent': FVal(loan['amount']),
        })

    new_data.sort(key=lambda loan: loan['open_time'])
    return new_data
Exemplo n.º 5
0
    def post_process(self, before):
        after = before

        # Add timestamps if there isnt one but is a datetime
        if ('return' in after):
            if (isinstance(after['return'], list)):
                for x in range(0, len(after['return'])):
                    if (isinstance(after['return'][x], dict)):
                        if ('datetime' in after['return'][x]
                                and 'timestamp' not in after['return'][x]):
                            after['return'][x]['timestamp'] = float(
                                createTimeStamp(
                                    after['return'][x]['datetime']))

        return after
Exemplo n.º 6
0
def check_otctrade_data_valid(data):
    for field in otc_fields:
        if field not in data:
            return None, '{} was not provided'.format(field)

        if data[field] in ('', None) and field not in otc_optional_fields:
            return None, '{} was empty'.format(field)

        if field in otc_numerical_fields and not is_number(data[field]):
            return None, '{} should be a number'.format(field)

    if data['otc_type'] not in ('buy', 'sell'):
        return None, 'Trade type can only be buy or sell'

    try:
        timestamp = createTimeStamp(data['otc_time'], formatstr='%d/%m/%Y %H:%M')
    except ValueError as e:
        return None, 'Could not process the given datetime: {}'.format(e)

    return timestamp, ''
Exemplo n.º 7
0
def trade_from_poloniex(poloniex_trade, pair):
    """Turn a poloniex trade returned from poloniex trade history to our common trade
    history format"""

    trade_type = poloniex_trade['type']
    amount = FVal(poloniex_trade['amount'])
    rate = FVal(poloniex_trade['rate'])
    perc_fee = FVal(poloniex_trade['fee'])
    base_currency = get_pair_position(pair, 'first')
    quote_currency = get_pair_position(pair, 'second')
    if trade_type == 'buy':
        cost = rate * amount
        cost_currency = base_currency
        fee = amount * perc_fee
        fee_currency = quote_currency
    elif trade_type == 'sell':
        cost = amount * rate
        cost_currency = base_currency
        fee = cost * perc_fee
        fee_currency = base_currency
    else:
        raise ValueError(
            'Got unexpected trade type "{}" for poloniex trade'.format(
                trade_type))

    if poloniex_trade['category'] == 'settlement':
        trade_type = "settlement_%s" % trade_type

    return Trade(timestamp=createTimeStamp(poloniex_trade['date'],
                                           formatstr="%Y-%m-%d %H:%M:%S"),
                 pair=pair,
                 type=trade_type,
                 rate=rate,
                 cost=cost,
                 cost_currency=cost_currency,
                 fee=fee,
                 fee_currency=fee_currency,
                 amount=amount,
                 location='poloniex')
Exemplo n.º 8
0
    def parseLoanCSV_for_analysis(self, from_timestamp, to_timestamp,
                                  currency):
        lending_history = []
        with open(self.lending_history_file, 'rb') as csvfile:
            history = csv.reader(csvfile, delimiter=',', quotechar='|')
            next(history)  # skip header row
            for row in history:
                lending_history.append({
                    'currency': row[0],
                    'rate': float(row[1]),
                    'amount': float(row[2]),
                    'earned': float(row[6]),
                    'opened': createTimeStamp(row[7]),
                    'closed': createTimeStamp(row[8])
                })

        if not isinstance(from_timestamp, (int, long)):
            from_timestamp = dateToTs(from_timestamp)
        if not isinstance(to_timestamp, (int, long)):
            to_timestamp = dateToTs(to_timestamp)

        # find average lending rate over all loans
        average_rate = 0.0
        average_duration = 0
        count = 0
        minTs = 9999999999999999999
        maxTs = 0
        secscount = 0
        daycount = 0
        daily_earned = 0
        average_daily_earned = 0
        daily_lent = 0
        average_daily_lent = 0
        previousTs = lending_history[0]['opened']
        for loan in lending_history:
            if loan['currency'] != currency:
                continue

            if loan['opened'] < from_timestamp:
                continue
            if loan['closed'] > to_timestamp:
                continue  # TODO just break since csv is ordered

            if loan['opened'] > maxTs:
                maxTs = loan['opened']
            if loan['opened'] < minTs:
                minTs = loan['opened']

            diff = previousTs - loan['opened']
            duration = loan['closed'] - loan['opened']
            if secscount + diff > 86400:
                daycount += 1
                secscount = 0
                average_daily_earned += daily_earned
                daily_earned = 0
                average_daily_lent += daily_lent
                daily_lent = 0
            else:
                secscount += diff
                daily_earned += loan['earned']
                daily_lent += loan['amount'] * duration / 86400

            previousTs = loan['opened']
            average_rate += loan['rate']
            average_duration += duration
            count += 1

        average_rate /= count
        average_duration /= count
        average_daily_earned /= daycount
        average_daily_lent /= daycount

        s = """LENDING ANALYSIS for {}
-----------------------------------
From {} to {}
Average Lending rate: {}
Average duration: {} hours
Average {} lent per day: {}
Average {} earned per day: {}
""".format(
            currency,
            unicode(datetime.utcfromtimestamp(minTs)),
            unicode(datetime.utcfromtimestamp(maxTs)),
            rateToStr(average_rate),
            average_duration / 3600,
            currency,
            average_daily_lent,
            currency,
            average_daily_earned,
        )
        return s
Exemplo n.º 9
0
    def query_loan_history(self,
                           start_ts,
                           end_ts,
                           end_at_least_ts=None,
                           from_csv=False):
        """
        WARNING: Querying from returnLendingHistory endpoing instead of reading from
        the CSV file can potentially  return unexpected/wrong results.

        That is because the `returnLendingHistory` endpoint has a hidden limit
        of 12660 results. In our code we use the limit of 12000 but poloniex may change
        the endpoint to have a lower limit at which case this code will break.

        To be safe compare results of both CSV and endpoint to make sure they agree!
        """
        try:
            if from_csv:
                return self.parseLoanCSV()
        except:
            pass

        with self.lock:
            cache = self.check_trades_cache(start_ts,
                                            end_at_least_ts,
                                            special_name='loan_history')
        if cache is not None:
            return cache

        loans_query_return_limit = 12000
        result = self.returnLendingHistory(start_ts=start_ts,
                                           end_ts=end_ts,
                                           limit=loans_query_return_limit)
        data = list(result)

        # since I don't think we have any guarantees about order of results
        # using a set of loan ids is one way to make sure we get no duplicates
        # if poloniex can guarantee me that the order is going to be ascending/descending
        # per open/close time then this can be improved
        id_set = set()

        while len(result) == loans_query_return_limit:
            # Find earliest timestamp to re-query the next batch
            min_ts = end_ts
            for loan in result:
                ts = createTimeStamp(loan['close'],
                                     formatstr="%Y-%m-%d %H:%M:%S")
                min_ts = min(min_ts, ts)
                id_set.add(loan['id'])

            result = self.returnLendingHistory(start_ts=start_ts,
                                               end_ts=min_ts,
                                               limit=loans_query_return_limit)
            for loan in result:
                if loan['id'] not in id_set:
                    data.append(loan)

        with self.lock:
            self.update_trades_cache(data,
                                     start_ts,
                                     end_ts,
                                     special_name='loan_history')
        return data
Exemplo n.º 10
0
    def __init__(self, data_directory, history_date_start=DEFAULT_START_DATE):
        self.data_directory = data_directory
        # get the start date for historical data
        self.historical_data_start = createTimeStamp(history_date_start,
                                                     formatstr="%d/%m/%Y")

        self.price_history = dict()
        # TODO: Check if historical data is after the requested start date
        # Check the data folder and load any cached history
        prefix = os.path.join(self.data_directory, 'price_history_')
        regex = re.compile(prefix + '(.*)\.json')
        files_list = glob.glob(prefix + '*.json')
        for file_ in files_list:
            match = regex.match(file_)
            assert match
            cache_key = match.group(1)
            with open(file_, 'rb') as f:
                data = rlk_jsonloads(f.read())
                self.price_history[cache_key] = data

        # Get coin list of crypto compare
        invalidate_cache = True
        coinlist_cache_path = os.path.join(self.data_directory,
                                           'cryptocompare_coinlist.json')
        if os.path.isfile(coinlist_cache_path):
            with open(coinlist_cache_path, 'rb') as f:
                try:
                    data = rlk_jsonloads(f.read())
                    now = ts_now()
                    invalidate_cache = False

                    # If we got a cache and its' over a month old then requery cryptocompare
                    if data['time'] < now and now - data['time'] > 2629800:
                        invalidate_cache = True
                        data = data['data']
                except JSONDecodeError:
                    invalidate_cache = True

        if invalidate_cache:
            query_string = 'https://www.cryptocompare.com/api/data/coinlist/'
            resp = urlopen(Request(query_string))
            resp = rlk_jsonloads(resp.read())
            if 'Response' not in resp or resp['Response'] != 'Success':
                error_message = 'Failed to query cryptocompare for: "{}"'.format(
                    query_string)
                if 'Message' in resp:
                    error_message += ". Error: {}".format(resp['Message'])
                raise ValueError(error_message)
            data = resp['Data']

            # Also save the cache
            with open(coinlist_cache_path, 'w') as f:
                write_data = {'time': ts_now(), 'data': data}
                f.write(rlk_jsondumps(write_data))
        else:
            # in any case take the data
            data = data['data']

        self.cryptocompare_coin_list = data
        # For some reason even though price for the following assets is returned
        # it's not in the coinlist so let's add them here.
        self.cryptocompare_coin_list['DAO'] = object()
        self.cryptocompare_coin_list['USDT'] = object()