Esempio n. 1
0
        def populate_history_cb(
            result_history: Union[List[Trade], List[MarginPosition]],
            result_asset_movements: List[AssetMovement],
            exchange_specific_data: Any,
        ) -> None:
            """This callback will run for succesfull exchange history query"""
            history.extend(result_history)
            asset_movements.extend(result_asset_movements)

            if exchange_specific_data:
                # This can only be poloniex at the moment
                polo_loans_data = exchange_specific_data
                polo_loans.extend(
                    process_polo_loans(
                        msg_aggregator=self.msg_aggregator,
                        data=polo_loans_data,
                        start_ts=start_ts,
                        end_ts=end_ts,
                    ))
                loansfile_path = os.path.join(self.user_directory,
                                              LOANS_HISTORYFILE)
                write_history_data_in_file(polo_loans, loansfile_path,
                                           start_ts, end_ts)
Esempio n. 2
0
    def get_historical_data(
        self,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
        historical_data_start: Timestamp,
    ) -> List[PriceHistoryEntry]:
        """
        Get historical price data from cryptocompare

        Returns a sorted list of price entries.

        - May raise RemoteError if there is a problem reaching the cryptocompare server
        or with reading the response returned by the server
        - May raise UnsupportedAsset if from/to asset is not supported by cryptocompare
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )

        cache_key = PairCacheKey(from_asset.identifier + '_' +
                                 to_asset.identifier)
        got_cached_value = self._got_cached_price(cache_key, timestamp)
        if got_cached_value:
            return self.price_history[cache_key].data

        now_ts = ts_now()
        cryptocompare_hourquerylimit = 2000
        calculated_history: List[Dict[str, Any]] = []

        if historical_data_start <= timestamp:
            end_date = historical_data_start
        else:
            end_date = timestamp
        while True:
            pr_end_date = end_date
            end_date = Timestamp(end_date +
                                 (cryptocompare_hourquerylimit) * 3600)

            log.debug(
                'Querying cryptocompare for hourly historical price',
                from_asset=from_asset,
                to_asset=to_asset,
                cryptocompare_hourquerylimit=cryptocompare_hourquerylimit,
                end_date=end_date,
            )

            resp = self.query_endpoint_histohour(
                from_asset=from_asset,
                to_asset=to_asset,
                limit=2000,
                to_timestamp=end_date,
            )

            if pr_end_date != resp['TimeFrom']:
                # If we get more than we needed, since we are close to the now_ts
                # then skip all the already included entries
                diff = pr_end_date - resp['TimeFrom']
                # If the start date has less than 3600 secs difference from previous
                # end date then do nothing. If it has more skip all already included entries
                if diff >= 3600:
                    if resp['Data'][diff // 3600]['time'] != pr_end_date:
                        raise RemoteError(
                            'Unexpected fata format in cryptocompare query_endpoint_histohour. '
                            'Expected to find the previous date timestamp during '
                            'cryptocompare historical data fetching', )
                    # just add only the part from the previous timestamp and on
                    resp['Data'] = resp['Data'][diff // 3600:]

            # The end dates of a cryptocompare query do not match. The end date
            # can have up to 3600 secs different to the requested one since this is
            # hourly historical data but no more.
            end_dates_dont_match = (end_date < now_ts
                                    and resp['TimeTo'] != end_date)
            if end_dates_dont_match:
                if resp['TimeTo'] - end_date >= 3600:
                    raise RemoteError(
                        'Unexpected fata format in cryptocompare query_endpoint_histohour. '
                        'End dates do not match.', )
                else:
                    # but if it's just a drift within an hour just update the end_date so that
                    # it can be picked up by the next iterations in the loop
                    end_date = resp['TimeTo']

            # If last time slot and first new are the same, skip the first new slot
            last_entry_equal_to_first = (len(calculated_history) != 0
                                         and calculated_history[-1]['time']
                                         == resp['Data'][0]['time'])
            if last_entry_equal_to_first:
                resp['Data'] = resp['Data'][1:]
            calculated_history += resp['Data']
            if end_date >= now_ts:
                break

        # Let's always check for data sanity for the hourly prices.
        _check_hourly_data_sanity(calculated_history, from_asset, to_asset)
        # and now since we actually queried the data let's also cache them
        filename = self.data_directory / ('price_history_' + cache_key +
                                          '.json')
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=historical_data_start,
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': historical_data_start,
            'end_time': end_date,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(
            data_including_time)

        return self.price_history[cache_key].data
Esempio n. 3
0
    def create_history(self, start_ts, end_ts, end_at_least_ts):
        """Creates trades and loans history from start_ts to end_ts or if
        `end_at_least` is given and we have a cache history for that particular source
        which satisfies it we return the cache
        """
        log.info(
            'Starting trade history creation',
            start_ts=start_ts,
            end_ts=end_ts,
            end_at_least_ts=end_at_least_ts,
        )

        # start creating the all trades history list
        history = list()
        asset_movements = list()
        empty_or_error = ''

        if self.kraken is not None:
            try:
                kraken_history = self.kraken.query_trade_history(
                    start_ts=start_ts,
                    end_ts=end_ts,
                    end_at_least_ts=end_at_least_ts,
                )
                history.extend(kraken_history)

                kraken_asset_movements = self.kraken.query_deposits_withdrawals(
                    start_ts=start_ts,
                    end_ts=end_ts,
                    end_at_least_ts=end_at_least_ts,
                )
                asset_movements.extend(kraken_asset_movements)

            except RemoteError as e:
                empty_or_error += '\n' + str(e)

        poloniex_query_error = False
        poloniex_margin_trades = []
        polo_loans = []
        try:
            (
                history,
                asset_movements,
                poloniex_margin_trades,
                polo_loans,
            ) = self.query_poloniex_history(
                history,
                asset_movements,
                start_ts,
                end_ts,
                end_at_least_ts,
            )
        except RemoteError as e:
            empty_or_error += '\n' + str(e)
            poloniex_query_error = True

        if self.bittrex is not None:
            try:
                bittrex_history = self.bittrex.query_trade_history(
                    start_ts=start_ts,
                    end_ts=end_ts,
                    end_at_least_ts=end_at_least_ts,
                )
                history.extend(bittrex_history)

            except RemoteError as e:
                empty_or_error += '\n' + str(e)

        if self.bitmex is not None:
            try:
                bitmex_history = self.bitmex.query_trade_history(
                    start_ts=start_ts,
                    end_ts=end_ts,
                    end_at_least_ts=end_at_least_ts,
                )
                for trade in bitmex_history:
                    history.append(trade_from_bitmex(trade))

                bitmex_asset_movements = self.bitmex.query_deposits_withdrawals(
                    start_ts=start_ts,
                    end_ts=end_ts,
                    end_at_least_ts=end_at_least_ts,
                )
                asset_movements.extend(bitmex_asset_movements)

            except RemoteError as e:
                empty_or_error += '\n' + str(e)

        if self.binance is not None:
            try:
                binance_history = self.binance.query_trade_history(
                    start_ts=start_ts,
                    end_ts=end_ts,
                    end_at_least_ts=end_at_least_ts,
                )
                history.extend(binance_history)
            except RemoteError as e:
                empty_or_error += '\n' + str(e)

        try:
            eth_transactions = query_etherscan_for_transactions(
                self.eth_accounts)
        except RemoteError as e:
            empty_or_error += '\n' + str(e)

        # We sort it here ... but when accounting runs through the entire actions list,
        # it resorts, so unless the fact that we sort is used somewhere else too, perhaps
        # we can skip it?
        history.sort(key=lambda trade: trade.timestamp)
        history = limit_trade_list_to_period(history, start_ts, end_ts)

        # Write to files
        historyfile_path = os.path.join(self.user_directory,
                                        TRADES_HISTORYFILE)
        write_tupledata_history_in_file(history, historyfile_path, start_ts,
                                        end_ts)
        if self.poloniex is not None:
            if not self.read_manual_margin_positions and not poloniex_query_error:
                marginfile_path = os.path.join(self.user_directory,
                                               MARGIN_HISTORYFILE)
                write_tupledata_history_in_file(
                    poloniex_margin_trades,
                    marginfile_path,
                    start_ts,
                    end_ts,
                )

        if not poloniex_query_error:
            loansfile_path = os.path.join(self.user_directory,
                                          LOANS_HISTORYFILE)
            write_history_data_in_file(polo_loans, loansfile_path, start_ts,
                                       end_ts)
        assetmovementsfile_path = os.path.join(self.user_directory,
                                               ASSETMOVEMENTS_HISTORYFILE)
        write_tupledata_history_in_file(asset_movements,
                                        assetmovementsfile_path, start_ts,
                                        end_ts)
        eth_tx_log_path = os.path.join(self.user_directory,
                                       ETHEREUM_TX_LOGFILE)
        write_tupledata_history_in_file(eth_transactions, eth_tx_log_path,
                                        start_ts, end_ts)

        # After writting everything to files include the external trades in the history
        history = maybe_add_external_trades_to_history(
            db=self.db,
            start_ts=start_ts,
            end_ts=end_ts,
            history=history,
            msg_aggregator=self.msg_aggregator,
        )

        return (
            empty_or_error,
            history,
            poloniex_margin_trades,
            polo_loans,
            asset_movements,
            eth_transactions,
        )
Esempio n. 4
0
def write_tupledata_history_in_file(history, filepath, start_ts, end_ts):
    out_history = [tr._asdict() for tr in history]
    write_history_data_in_file(out_history, filepath, start_ts, end_ts)
Esempio n. 5
0
    def get_historical_data(
        self,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
        only_check_cache: bool,
    ) -> Optional[List[PriceHistoryEntry]]:
        """
        Get historical hour price data from cryptocompare

        Returns a sorted list of price entries.

        If only_check_cache is True then if the data is not cached locally this will return None

        - May raise RemoteError if there is a problem reaching the cryptocompare server
        or with reading the response returned by the server
        - May raise UnsupportedAsset if from/to asset is not supported by cryptocompare
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )
        cache_key = PairCacheKey(from_asset.identifier + '_' +
                                 to_asset.identifier)
        cached_data = self._got_cached_data_at_timestamp(
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )
        if cached_data is not None:
            return cached_data.data

        if only_check_cache:
            return None

        now_ts = ts_now()
        # save time at start of the query, in case the query does not complete due to rate ilmit
        self.last_histohour_query_ts = now_ts
        if cache_key in self.price_history:
            old_data = self.price_history[cache_key].data
            if timestamp > self.price_history[cache_key].end_time:
                # We have a cache but the requested timestamp does not hit it
                new_data = self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=now_ts,
                    to_timestamp=self.price_history[cache_key].end_time,
                )
                if old_data[-1].time == new_data[0]['time']:
                    old_data = old_data[:-1]
                new_history = deque([x._asdict() for x in old_data]) + new_data
            else:
                # only other possibility, timestamp < cached start_time
                # Get all available data, even before to_timestamp
                new_data = self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=self.price_history[cache_key].start_time,
                    to_timestamp=Timestamp(0),
                )
                if new_data[-1]['time'] == old_data[0].time:
                    new_data.pop()
                new_history = new_data + deque([x._asdict() for x in old_data])

            calculated_history = list(new_history)

        else:
            calculated_history = list(
                self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=now_ts,
                    to_timestamp=Timestamp(0),
                ))

        if len(calculated_history) == 0:
            return []  # empty list means we found nothing

        # Let's always check for data sanity for the hourly prices.
        _check_hourly_data_sanity(calculated_history, from_asset, to_asset)
        # and now since we actually queried the data let's also cache them
        filename = (get_or_make_price_history_dir(self.data_directory) /
                    (PRICE_HISTORY_FILE_PREFIX + cache_key + '.json'))
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=calculated_history[0]['time'],
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': calculated_history[0]['time'],
            'end_time': now_ts,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(
            data_including_time)
        self.last_histohour_query_ts = ts_now(
        )  # also save when last query finished
        return self.price_history[cache_key].data
Esempio n. 6
0
    def get_historical_data(
        self,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
        historical_data_start: Timestamp,
    ) -> List[PriceHistoryEntry]:
        """
        Get historical price data from cryptocompare

        Returns a sorted list of price entries.
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )

        cache_key = PairCacheKey(from_asset.identifier + '_' +
                                 to_asset.identifier)
        got_cached_value = self.got_cached_price(cache_key, timestamp)
        if got_cached_value:
            return self.price_history[cache_key].data

        now_ts = int(time.time())
        cryptocompare_hourquerylimit = 2000
        calculated_history: List = list()

        if historical_data_start <= timestamp:
            end_date = historical_data_start
        else:
            end_date = timestamp
        while True:
            pr_end_date = end_date
            end_date = Timestamp(end_date +
                                 (cryptocompare_hourquerylimit) * 3600)

            log.debug(
                'Querying cryptocompare for hourly historical price',
                from_asset=from_asset,
                to_asset=to_asset,
                cryptocompare_hourquerylimit=cryptocompare_hourquerylimit,
                end_date=end_date,
            )

            resp = self.query_endpoint_histohour(
                from_asset=from_asset,
                to_asset=to_asset,
                limit=2000,
                to_timestamp=end_date,
            )

            if pr_end_date != resp['TimeFrom']:
                # If we get more than we needed, since we are close to the now_ts
                # then skip all the already included entries
                diff = pr_end_date - resp['TimeFrom']
                if resp['Data'][diff // 3600]['time'] != pr_end_date:
                    raise ValueError(
                        'Expected to find the previous date timestamp during '
                        'historical data fetching', )
                # just add only the part from the previous timestamp and on
                resp['Data'] = resp['Data'][diff // 3600:]

            if end_date < now_ts and resp['TimeTo'] != end_date:
                raise ValueError('End dates no match')

            # If last time slot and first new are the same, skip the first new slot
            last_entry_equal_to_first = (len(calculated_history) != 0
                                         and calculated_history[-1]['time']
                                         == resp['Data'][0]['time'])
            if last_entry_equal_to_first:
                resp['Data'] = resp['Data'][1:]
            calculated_history += resp['Data']
            if end_date >= now_ts:
                break

        # Let's always check for data sanity for the hourly prices.
        assert _check_hourly_data_sanity(calculated_history, from_asset,
                                         to_asset)
        # and now since we actually queried the data let's also cache them
        filename = FilePath(
            os.path.join(self.data_directory,
                         'price_history_' + cache_key + '.json'), )
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=historical_data_start,
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': historical_data_start,
            'end_time': end_date,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(
            data_including_time)

        return self.price_history[cache_key].data