Пример #1
0
    def _get_cached_asset_movements(
        self,
        start_ts: Timestamp,
        end_ts: Timestamp,
        end_at_least_ts: Timestamp,
    ) -> List[AssetMovement]:
        """
        Attetmps to read the cache of asset movements and returns a list of them.

        If there is a problem can raise HistoryCacheInvalid
        """
        assetmovementsfile_path = os.path.join(
            self.user_directory,
            ASSETMOVEMENTS_HISTORYFILE,
        )
        asset_movements_contents = get_jsonfile_contents_or_empty_dict(
            FilePath(assetmovementsfile_path), )
        asset_movements_history_is_okay = data_up_todate(
            asset_movements_contents,
            start_ts,
            end_at_least_ts,
        )
        if not asset_movements_history_is_okay:
            raise HistoryCacheInvalid('Asset Movements cache is invalid')

        try:
            asset_movements = asset_movements_from_dictlist(
                asset_movements_contents['data'],
                start_ts,
                end_ts,
            )
        except KeyError:
            raise HistoryCacheInvalid('Asset Movements cache is invalid')

        return asset_movements
Пример #2
0
    def get_cached_history(self, start_ts, end_ts, end_at_least_ts=None):
        """Gets all the cached history data instead of querying all external sources
        to create the history through create_history()

        Can raise:
            - HistoryCacheInvalid:
                If any of the cache files are corrupt in any way, missing or
                do not cover the given time range
        """
        if end_at_least_ts is None:
            end_at_least_ts = end_ts

        historyfile_path = os.path.join(self.user_directory,
                                        TRADES_HISTORYFILE)
        if not os.path.isfile(historyfile_path):
            raise HistoryCacheInvalid()

        with open(historyfile_path, 'r') as infile:
            try:
                history_json_data = rlk_jsonloads(infile.read())
            except JSONDecodeError:
                pass

        if not data_up_todate(history_json_data, start_ts, end_at_least_ts):
            raise HistoryCacheInvalid('Historical trades cache invalid')
        try:
            history_trades = trades_from_dictlist(
                given_trades=history_json_data['data'],
                start_ts=start_ts,
                end_ts=end_ts,
                location='historical trades',
                msg_aggregator=self.msg_aggregator,
            )
        except KeyError:
            raise HistoryCacheInvalid('Historical trades cache invalid')
        history_trades = maybe_add_external_trades_to_history(
            db=self.db,
            start_ts=start_ts,
            end_ts=end_ts,
            history=history_trades,
            msg_aggregator=self.msg_aggregator,
        )

        kraken_okay = (self.kraken is None or self.kraken.check_trades_cache(
            start_ts,
            end_at_least_ts,
        ) is not None)
        if not kraken_okay:
            raise HistoryCacheInvalid('Kraken cache is invalid')

        bittrex_okay = (self.bittrex is None
                        or self.bittrex.check_trades_cache(
                            start_ts,
                            end_at_least_ts,
                        ) is not None)
        if not bittrex_okay:
            raise HistoryCacheInvalid('Bittrex cache is invalid')

        binance_okay = (self.binance is None
                        or self.binance.check_trades_cache(
                            start_ts,
                            end_at_least_ts,
                        ) is not None)
        if not binance_okay:
            raise HistoryCacheInvalid('Binance cache is invalid')

        bitmex_okay = (self.bitmex is None or self.bitmex.check_trades_cache(
            start_ts,
            end_at_least_ts,
        ) is not None)
        if not bitmex_okay:
            raise HistoryCacheInvalid('Bitmex cache is invalid')

        # Poloniex specific
        loan_data = []
        if self.poloniex:
            if not self.poloniex.check_trades_cache(start_ts, end_at_least_ts):
                raise HistoryCacheInvalid('Poloniex cache is invalid')

            loansfile_path = os.path.join(self.user_directory,
                                          LOANS_HISTORYFILE)
            loan_file_contents = get_jsonfile_contents_or_empty_dict(
                loansfile_path)
            loan_history_is_okay = data_up_todate(
                loan_file_contents,
                start_ts,
                end_at_least_ts,
            )
            if not loan_history_is_okay:
                raise HistoryCacheInvalid('Poloniex loan cache is invalid')
            loan_data = loan_file_contents['data']

        # margin positions that have been manually input
        if not self.read_manual_margin_positions:
            marginfile_path = os.path.join(self.user_directory,
                                           MARGIN_HISTORYFILE)
            margin_file_contents = get_jsonfile_contents_or_empty_dict(
                marginfile_path)
            margin_history_is_okay = data_up_todate(
                margin_file_contents,
                start_ts,
                end_at_least_ts,
            )
            if not margin_history_is_okay:
                raise HistoryCacheInvalid('Margin Positions cache is invalid')

            try:
                margin_trades = trades_from_dictlist(
                    given_trades=margin_file_contents['data'],
                    start_ts=start_ts,
                    end_ts=end_ts,
                    location='Margin position trades',
                    msg_aggregator=self.msg_aggregator,
                )
            except KeyError:
                raise HistoryCacheInvalid('Margin Positions cache is invalid')

        else:
            margin_trades = do_read_manual_margin_positions(
                self.user_directory, )

        asset_movements = self._get_cached_asset_movements(
            start_ts=start_ts,
            end_ts=end_ts,
            end_at_least_ts=end_at_least_ts,
        )

        eth_tx_log_path = os.path.join(self.user_directory,
                                       ETHEREUM_TX_LOGFILE)
        eth_tx_log_contents = get_jsonfile_contents_or_empty_dict(
            eth_tx_log_path)
        eth_tx_log_history_is_okay = data_up_todate(
            eth_tx_log_contents,
            start_ts,
            end_at_least_ts,
        )
        if not eth_tx_log_history_is_okay:
            raise HistoryCacheInvalid('Ethereum transactions cache is invalid')

        try:
            eth_transactions = transactions_from_dictlist(
                eth_tx_log_contents['data'],
                start_ts,
                end_ts,
            )
        except KeyError:
            raise HistoryCacheInvalid('Ethereum transactions cache is invalid')

        # make sure that this is the same as what is returned
        # from create_history, except for the first argument
        return (
            history_trades,
            margin_trades,
            loan_data,
            asset_movements,
            eth_transactions,
        )
Пример #3
0
    def get_cached_history(self, start_ts, end_ts, end_at_least_ts=None):
        """Gets all the cached history data instead of querying all external sources
        to create the history through create_history()

        Can raise:
            - HistoryCacheInvalid:
                If any of the cache files are corrupt in any way, missing or
                do not cover the given time range
        """
        if end_at_least_ts is None:
            end_at_least_ts = end_ts

        historyfile_path = os.path.join(self.user_directory,
                                        TRADES_HISTORYFILE)
        if not os.path.isfile(historyfile_path):
            raise HistoryCacheInvalid()

        with open(historyfile_path, 'r') as infile:
            try:
                history_json_data = rlk_jsonloads(infile.read())
            except JSONDecodeError:
                pass

        if not data_up_todate(history_json_data, start_ts, end_at_least_ts):
            raise HistoryCacheInvalid('Historical trades cache invalid')
        try:
            history_trades = trades_from_dictlist(
                given_trades=history_json_data['data'],
                start_ts=start_ts,
                end_ts=end_ts,
                location='historical trades',
                msg_aggregator=self.msg_aggregator,
            )
        except (KeyError, DeserializationError):
            raise HistoryCacheInvalid('Historical trades cache invalid')

        history_trades = maybe_add_external_trades_to_history(
            db=self.db,
            start_ts=start_ts,
            end_ts=end_ts,
            history=history_trades,
            msg_aggregator=self.msg_aggregator,
        )

        # Check the cache of each exchange
        poloniex = None
        for _, exchange in self.exchange_manager.connected_exchanges.items():
            if exchange.name == 'poloniex':
                poloniex = exchange
            if not exchange.check_trades_cache(start_ts, end_at_least_ts):
                raise HistoryCacheInvalid(f'{exchange.name} cache is invalid')

        # Poloniex specific
        loan_data = []
        if poloniex:
            loansfile_path = os.path.join(self.user_directory,
                                          LOANS_HISTORYFILE)
            loan_file_contents = get_jsonfile_contents_or_empty_dict(
                loansfile_path)
            loan_history_is_okay = data_up_todate(
                loan_file_contents,
                start_ts,
                end_at_least_ts,
            )
            if not loan_history_is_okay:
                raise HistoryCacheInvalid('Poloniex loan cache is invalid')
            loan_data = loan_file_contents['data']

        asset_movements = self._get_cached_asset_movements(
            start_ts=start_ts,
            end_ts=end_ts,
            end_at_least_ts=end_at_least_ts,
        )

        eth_tx_log_path = os.path.join(self.user_directory,
                                       ETHEREUM_TX_LOGFILE)
        eth_tx_log_contents = get_jsonfile_contents_or_empty_dict(
            eth_tx_log_path)
        eth_tx_log_history_is_okay = data_up_todate(
            eth_tx_log_contents,
            start_ts,
            end_at_least_ts,
        )
        if not eth_tx_log_history_is_okay:
            raise HistoryCacheInvalid('Ethereum transactions cache is invalid')

        try:
            eth_transactions = transactions_from_dictlist(
                eth_tx_log_contents['data'],
                start_ts,
                end_ts,
            )
        except KeyError:
            raise HistoryCacheInvalid('Ethereum transactions cache is invalid')

        # make sure that this is the same as what is returned
        # from create_history, except for the first argument
        return (
            history_trades,
            loan_data,
            asset_movements,
            eth_transactions,
        )