コード例 #1
0
    def fork_new_thread(self, creator_env: Optional['ExecEnv'] = None) -> None:
        """
        Copies over creator_env's settings and creates new database accessors for this thread.
        """
        if creator_env is None and self._creator_env is None:
            raise ValueError(
                'Can\'t clone an execution environment on the same thread without a creator env'
            )
        elif creator_env is None:
            creator_env = self._creator_env

        self.env_type = creator_env.env_type
        self._time = creator_env._time
        self._data_collector = creator_env._data_collector
        self._data_loaded = creator_env._data_loaded
        self._pid = os.getpid()

        # Create new database accessors for the new thread
        self._mongo = MongoManager(logfeed_program=self.logfeed_program,
                                   env_type=self.env_type)
        self._redis = RedisManager(logfeed_process=self.logfeed_process,
                                   env_type=self.env_type)

        # Load settings and use them to init db connections
        try:
            self._load_settings_from_config()
            self._init_db_connections()
            self._load_settings_from_redis()
        except Exception as e:
            self.error_main(
                'ExecEnv could not load settings and connect to databases:')
            self.warn_main(traceback.format_exc())
コード例 #2
0
def fork_sim_env_visuals() -> 'ExecEnv':
    """
    Returns an execution environment of type VISUALS_GENERATION that can be used by the calling thread.
    """
    from tc2.env.ExecEnv import ExecEnv
    from tc2.env.EnvType import EnvType
    from tc2.env.TimeEnv import TimeEnv
    from tc2.data.data_storage.redis.RedisManager import RedisManager
    from tc2.data.data_storage.mongo.MongoManager import MongoManager
    from tc2.data.stock_data_collection.PolygonDataCollector import PolygonDataCollector

    if shared.sim_env_visuals is None:
        shared.sim_env_visuals = ExecEnv(shared.program.logfeed_program,
                                         shared.program.logfeed_visuals)
        sim_time = TimeEnv(datetime.now())
        shared.sim_env_visuals.setup_first_time(
            env_type=EnvType.VISUAL_GENERATION,
            time=sim_time,
            data_collector=PolygonDataCollector(
                logfeed_program=shared.program.logfeed_program,
                logfeed_process=shared.program.logfeed_visuals,
                time_env=sim_time),
            mongo=MongoManager(shared.program.logfeed_visuals,
                               EnvType.VISUAL_GENERATION),
            redis=RedisManager(shared.program.logfeed_visuals,
                               EnvType.VISUAL_GENERATION))
        return shared.sim_env_visuals

    # Wipe databases
    shared.sim_env_visuals.reset_dbs()

    shared.sim_env_visuals.fork_new_thread(creator_env=shared.sim_env_visuals)
    return shared.sim_env_visuals
コード例 #3
0
def fork_sim_env_health() -> 'ExecEnv':
    """
    Returns an execution environment of type HEALTH_CHECKING that can be used by the calling thread.
    """
    from tc2.env.ExecEnv import ExecEnv
    from tc2.env.EnvType import EnvType
    from tc2.env.TimeEnv import TimeEnv
    from tc2.data.data_storage.redis.RedisManager import RedisManager
    from tc2.data.data_storage.mongo.MongoManager import MongoManager
    from tc2.data.stock_data_collection.PolygonDataCollector import PolygonDataCollector

    if shared.sim_env_health is None:
        shared.sim_env_health = ExecEnv(None, None)
        sim_time = TimeEnv(datetime.now())
        shared.sim_env_health.setup_first_time(
            env_type=EnvType.HEALTH_CHECKING,
            time=sim_time,
            data_collector=PolygonDataCollector(logfeed_program=None,
                                                logfeed_process=None,
                                                time_env=sim_time),
            mongo=MongoManager(None, EnvType.HEALTH_CHECKING),
            redis=RedisManager(None, EnvType.HEALTH_CHECKING))
        return shared.sim_env_health

    # Wipe databases
    shared.sim_env_visuals.reset_dbs()

    shared.sim_env_health.fork_new_thread(creator_env=shared.sim_env_health)
    return shared.sim_env_health
コード例 #4
0
ファイル: TC2Program.py プロジェクト: maxilie/TC2_public
    def load_pre_reqs(self) -> None:
        # Initialize log feeds.
        self.logfeed_data = LogFeed(LogCategory.DATA)
        self.logfeed_data.log(LogLevel.ERROR,
                              '.             ...PROGRAM RESTARTED...')
        self.logfeed_trading = LogFeed(LogCategory.LIVE_TRADING)
        self.logfeed_trading.log(LogLevel.ERROR,
                                 '.             ...PROGRAM RESTARTED...')
        self.logfeed_optimization = LogFeed(LogCategory.OPTIMIZATION)
        self.logfeed_optimization.log(LogLevel.ERROR,
                                      '             ...PROGRAM RESTARTED...')
        self.logfeed_visuals = LogFeed(LogCategory.VISUALS)
        self.logfeed_visuals.log(LogLevel.ERROR,
                                 '.             ...PROGRAM RESTARTED...')
        self.logfeed_api = LogFeed(LogCategory.API)
        self.logfeed_api.log(LogLevel.ERROR,
                             '.             ...PROGRAM RESTARTED...')

        # Create time environment for live data collection and trading.
        live_time_env = TimeEnv(datetime.now())

        # Create database managers but don't initialize connections.
        live_redis = RedisManager(self.logfeed_program, EnvType.LIVE)
        live_mongo = MongoManager(self.logfeed_program, EnvType.LIVE)

        # Initialize collector manager to access polygon.io.
        live_data_collector = PolygonDataCollector(
            logfeed_program=self.logfeed_program,
            logfeed_process=self.logfeed_data,
            time_env=live_time_env)

        # Initialize the live execution environment with program logs.
        self.live_env = ExecEnv(logfeed_program=self.logfeed_program,
                                logfeed_process=self.logfeed_program)

        # Setup the live execution environment with live time & data variables.
        self.live_env.setup_first_time(env_type=EnvType.LIVE,
                                       time=live_time_env,
                                       data_collector=live_data_collector,
                                       mongo=live_mongo,
                                       redis=live_redis)

        # Set Alpaca credentials as environment variables so we don't have to pass them around.
        live_trading = True if Settings.get_endpoint(
            self.live_env) == BrokerEndpoint.LIVE else False
        os.environ['APCA_API_BASE_URL'] = 'https://api.alpaca.markets' \
            if live_trading else 'https://paper-api.alpaca.markets'
        os.environ['APCA_API_KEY_ID'] = self.live_env.get_setting('alpaca.live_key_id') \
            if live_trading else self.live_env.get_setting('alpaca.paper_key_id')
        os.environ['APCA_API_SECRET_KEY'] = self.live_env.get_setting('alpaca.live_secret_key') \
            if live_trading else self.live_env.get_setting('alpaca.paper_secret_key')
        os.environ['POLYGON_KEY_ID'] = self.live_env.get_setting(
            'alpaca.live_key_id')
コード例 #5
0
    def run(self) -> None:
        self.program.logfeed_program.log(
            LogLevel.INFO, 'Debug task setting up simulation environment')

        # Set simulation parameters.
        day_date = date(year=2020, month=3, day=10)

        # Clone live environment so it can run on this thread.
        live_env = ExecEnv(self.program.logfeed_program,
                           self.program.logfeed_program, self.program.live_env)
        live_env.fork_new_thread()

        # Initialize simulation environment.
        sim_time_env = TimeEnv(
            datetime.combine(day_date, time(hour=11, minute=3, second=40)))
        sim_data_collector = PolygonDataCollector(
            logfeed_program=self.program.logfeed_program,
            logfeed_process=self.program.logfeed_program,
            time_env=sim_time_env)
        sim_redis = RedisManager(self.program.logfeed_program,
                                 EnvType.STARTUP_DEBUG_1)
        sim_mongo = MongoManager(self.program.logfeed_program,
                                 EnvType.STARTUP_DEBUG_1)
        sim_env = ExecEnv(self.program.logfeed_program,
                          self.program.logfeed_program)
        sim_env.setup_first_time(env_type=EnvType.STARTUP_DEBUG_1,
                                 time=sim_time_env,
                                 data_collector=sim_data_collector,
                                 mongo=sim_mongo,
                                 redis=sim_redis)

        # Place the strategy in a simulated environment.
        strategy = LongShortStrategy(env=sim_env,
                                     symbols=['SPY', 'SPXL', 'SPXS'])

        # Simulate the strategy so its output gets printed to logfeed_optimization.
        self.program.logfeed_program.log(
            LogLevel.INFO, 'Creating StrategySimulator for debug task')
        simulator = StrategySimulator(strategy,
                                      live_env,
                                      all_symbols=['SPY', 'SPXL', 'SPXS'])
        self.program.logfeed_program.log(
            LogLevel.INFO, 'Running simulation of LongShortStrategy')
        simulator.run(warmup_days=2)
        self.program.logfeed_program.log(
            LogLevel.INFO, f'Completed LongShortStrategy simulation. '
            f'Results: {strategy.run_info.to_json()}')
コード例 #6
0
class ExecEnv(Loggable):
    """
    Mediates sharing and separation of data and time environments (live vs simulated) across multiple threads/processes.
    Every thread/process has its own ExecEnv for each EnvType it uses.
    One thread/process can access the mongo data of another by forking it's ExecEnv.
    """

    # The type of this execution environment.
    env_type: EnvType

    # Program settings - one instance shared across threads and ExecEnv's, even of different EnvType.
    _settings: 'multiprocessing dict'

    # Time environment - identical clones shared across threads running the same EnvType.
    _time: TimeEnv

    # Data collector - identical clones shared across threads running the same EnvType.
    _data_collector: AbstractDataCollector

    # Data load status - one instance shared across threads running the same EnvType.
    _data_loaded: 'multiprocessing dict'

    # MongoDB manager - specific to this (thread, ExecEnv) pair.
    _mongo: MongoManager

    # Redis manager - specific to this (thread, ExecEnv) pair.
    _redis: RedisManager

    # The process id of this thread.
    _pid: int

    # List shared across threads to ensure each EnvType is created from scratch only once.
    instantiated_env_types: 'multiprocessing list' = None
    """
    Time accessor...
    """
    def time(self) -> TimeEnv:
        """Provides access to the environment's time variables and methods."""
        return self._time

    """
    Database accessors...
    """

    def mongo(self) -> MongoManager:
        """Provides access to MongoDB while ensuring calls are made from only one thread."""
        if os.getpid() != self._pid:
            raise EnvironmentError(
                f'Thread with pid {os.getpid()} tried to access MongoDB '
                f'for thread with pid {self._pid}')
        return self._mongo

    def redis(self) -> RedisManager:
        """Provides access to Redis while ensuring calls are made from only one thread."""
        if os.getpid() != self._pid:
            raise EnvironmentError(
                f'Thread with pid {os.getpid()} tried to access Redis '
                f'for thread with pid {self._pid}')
        return self._redis

    """
    Settings getter & setter...
    """

    def get_setting(self, setting_name: str) -> str:
        """Returns a setting value as a raw string."""
        return self._settings[setting_name.lower()] if setting_name.lower(
        ) in self._settings else ''

    def save_setting(self, setting_name: str, setting_val: str) -> None:
        """Saves the setting to Redis, which will cause it to persist even if the config.properties file is reset."""
        self._settings[setting_name.lower()] = setting_val
        self.redis().set_setting(setting_name.lower(), setting_val)

    """
    DataCollector accessor...
    """

    def data_collector(self) -> AbstractDataCollector:
        """Provides access to the environment's data collection functionalities."""
        return self._data_collector

    """
    Data load marking methods...
    """

    def is_data_loaded(self) -> bool:
        """Returns True only if a thread on this EnvType has called mark_data_as_loaded()."""
        return self._data_loaded[self.env_type.value]

    def mark_data_as_loaded(self) -> None:
        """Marks data as loaded for all ExecEnv's of the same EnvType (even across different threads)."""
        self._data_loaded[self.env_type.value] = True

    def mark_data_as_busy(self) -> None:
        """Marks data as busy for all ExecEnv's of this EnvType (even across different threads)."""
        self._data_loaded[self.env_type.value] = False

    """
    Data fetching methods...
    """

    def get_latest_candles(self, symbol: str, minutes: float) -> List[Candle]:
        """
        Combines today's cached redis candles with historical candles from MongoDB.
        NOTE: if current time is 10:32:54, minutes=32 means "fetch candles starting at 10:00:00"

       :param minutes: minutes of open market data to fetch, NOT total minutes including closing hours
       """

        # Calculate the furthest back we should go in time
        start_moment: datetime = self.time().now().replace(microsecond=0)
        mins_accounted_for = 0
        while mins_accounted_for < minutes:
            # Go back minute-by-minute
            start_moment = start_moment - timedelta(minutes=1)

            # When market close is reached, go back to the previous market day
            if not self.time().is_open(start_moment):
                start_moment = datetime.combine(
                    self.time().get_prev_mkt_day(start_moment.date()),
                    CLOSE_TIME.replace(
                        second=start_moment.second)) - timedelta(minutes=1)

            mins_accounted_for += 1

        # Fetch candles by working backward from now
        candles = []
        day_date = self.time().get_next_mkt_day()
        while day_date >= start_moment.date():
            day_date = self.time().get_prev_mkt_day(day_date)
            if day_date == self.time().now().date(
            ) and self.env_type is EnvType.LIVE:
                # Fetch today's candles from redis cache
                cached_candles = self.redis().get_cached_candles(
                    symbol, day_date)
                # Use candles that fall within the desired time interval
                cached_candles.sort(
                    key=lambda candle_to_sort: candle_to_sort.moment,
                    reverse=True)
                for candle in cached_candles:
                    if start_moment - timedelta(
                            milliseconds=1) <= candle.moment <= self.time(
                            ).now():
                        candles.append(candle)
            else:
                # Fetch previous days' candles from MongoDB
                mongo_candles = self.mongo().load_symbol_day(symbol,
                                                             day_date).candles
                # Use candles that fall within the desired time interval
                for candle in mongo_candles:
                    if start_moment - timedelta(
                            milliseconds=1) <= candle.moment <= self.time(
                            ).now():
                        candles.append(candle)

        # Sort candles into ascending order
        candles.sort(key=lambda candle_to_sort: candle_to_sort.moment)
        return candles

    """
    Init methods...
    """

    def __init__(self,
                 logfeed_program: LogFeed,
                 logfeed_process: LogFeed,
                 creator_env: Optional['ExecEnv'] = None):
        super().__init__(logfeed_program, logfeed_process)
        self._creator_env = creator_env

    def setup_first_time(self, env_type: EnvType, time: TimeEnv,
                         data_collector: AbstractDataCollector,
                         mongo: MongoManager, redis: RedisManager) -> None:
        """
        Initializes database connections and marks data as not loaded.
        """

        # Ensure this is called only once per EnvType
        if ExecEnv.instantiated_env_types is None:
            ExecEnv.instantiated_env_types = multiprocessing.Manager().list()
        if env_type.name in ExecEnv.instantiated_env_types:
            raise Exception(f'Tried to setup {env_type.name} ExecEnv twice')
        else:
            ExecEnv.instantiated_env_types.append(env_type.name)

        # Init the environment's variables
        self.env_type = env_type
        self._time = time
        self._data_collector = data_collector
        try:
            if env_type.value not in self._data_loaded.keys():
                self._data_loaded[env_type.value] = False
        except AttributeError as e:
            self._data_loaded = multiprocessing.Manager().dict()
            if env_type.value not in self._data_loaded.keys():
                self._data_loaded[env_type.value] = False
        self._mongo = mongo
        self._redis = redis
        self._settings = multiprocessing.Manager().dict()
        self._pid = os.getpid()

        # Validate environment types of this environment and its database managers
        if env_type != mongo.env_type != redis.env_type:
            self.error_main(
                'COULD NOT SETUP EXECUTION ENVIRONMENT! '
                'EnvType of the ExecEnv must match that of its mongo and redis managers...'
            )

        # Load settings and use them to init db connections
        try:
            self._load_settings_from_config()
            self._init_db_connections()
            self._load_settings_from_redis()
        except Exception as e:
            self.error_main(
                'ExecEnv could not load settings and connect to the databases:'
            )
            self.warn_main(traceback.format_exc())

    def clone_same_thread(self,
                          creator_env: Optional['ExecEnv'] = None) -> None:
        """
        Copies creator_env's variables into this ExecEnv.
        """
        if creator_env is None and self._creator_env is None:
            raise ValueError(
                'Can\'t clone an execution environment on the same thread without a creator env'
            )
        elif creator_env is None:
            creator_env = self._creator_env

        # Ensure this ExecEnv and creator_env are on the same thread
        if os.getpid() != creator_env._pid:
            raise EnvironmentError(
                f'Tried to clone thread #{creator_env._pid}\'s ExecEnv variables in '
                f'thread #{os.getpid()}. Use self.fork_new_thread() instead.')
        self.env_type = creator_env.env_type
        self._time = creator_env._time
        self._data_collector = creator_env._data_collector
        self._mongo = creator_env._mongo
        self._redis = creator_env._redis
        self._pid = creator_env._pid
        self._settings = creator_env._settings
        self._data_loaded = creator_env._data_loaded

    def fork_new_thread(self, creator_env: Optional['ExecEnv'] = None) -> None:
        """
        Copies over creator_env's settings and creates new database accessors for this thread.
        """
        if creator_env is None and self._creator_env is None:
            raise ValueError(
                'Can\'t clone an execution environment on the same thread without a creator env'
            )
        elif creator_env is None:
            creator_env = self._creator_env

        self.env_type = creator_env.env_type
        self._time = creator_env._time
        self._data_collector = creator_env._data_collector
        self._data_loaded = creator_env._data_loaded
        self._pid = os.getpid()

        # Create new database accessors for the new thread
        self._mongo = MongoManager(logfeed_program=self.logfeed_program,
                                   env_type=self.env_type)
        self._redis = RedisManager(logfeed_process=self.logfeed_process,
                                   env_type=self.env_type)

        # Load settings and use them to init db connections
        try:
            self._load_settings_from_config()
            self._init_db_connections()
            self._load_settings_from_redis()
        except Exception as e:
            self.error_main(
                'ExecEnv could not load settings and connect to databases:')
            self.warn_main(traceback.format_exc())

    """
    Private init methods...
    """

    def _load_settings_from_config(self) -> None:
        """
        Parses the config file and loads its settings into memory.
        These should only include static settings (database credentials).
        """
        try:
            file = open("config.properties")
            lines = file.readlines()
            self._settings = {}
            for line in lines:
                if line.startswith("#") or len(line.strip()) == 0:
                    continue
                comps = line.split("=")
                if len(comps) < 2:
                    print("INVALID CONFIG LINE: '" + line + "'")
                    continue
                key = line.split("=")[0].strip().lower()
                val = ''.join(line.split("=")[1:]).strip()
                self._settings[key] = val
        except Exception as e:
            self.error_process('Error loading settings from config file:')
            self.warn_process(traceback.format_exc())

    def _init_db_connections(self) -> None:
        if not self._mongo.connect(user=self.get_setting('mongo.user'),
                                   password=self.get_setting('mongo.pass'),
                                   ip=self.get_setting('mongo.ip'),
                                   port=self.get_setting('mongo.port')):
            self.error_main(
                f'{self.env_type.name} environment could not initialize connection to MongoDB database'
            )

        if not self._redis.connect(ip=self.get_setting('redis.ip'),
                                   port=self.get_setting('redis.port')):
            self.error_main(
                f'{self.env_type.name} environment could not initialize connection to MongoDB database'
            )

    def _load_settings_from_redis(self) -> None:
        """
        Overwrites any config-file settings with settings saved in Redis.
        """
        try:
            setting_keys = ['symbols', 'alpaca.endpoint']
            for setting_key in setting_keys:
                setting_str = self.redis().get_setting(setting_key)
                if setting_str is not None:
                    self._settings[setting_key] = setting_str
                self.redis().set_setting(setting_key,
                                         self._settings[setting_key])

        except Exception as e:
            self.error_process('Error loading settings from Redis:')
            self.warn_process(traceback.format_exc())

    """
    Reset method...
    """

    def reset_dbs(self) -> None:
        """
        Clears MongoDB and Redis databases of all data stored under this EnvType.
        """
        self.mongo().clear_db()
        self.redis().clear_db()
コード例 #7
0
    def optimize_strategy(self, strategy: AbstractStrategy,
                          symbol: str) -> None:
        """
        Runs simulations from START_DATE thru two days ago.
        Tries hundreds of model scoring systems and picks the highest performing one.
        """
        self.info_process(
            f'Optimizing {strategy.__class__.__name__}\'s weights using symbol: {symbol}'
        )
        end_date = self.time().now() - timedelta(days=2)
        dates_on_file = self.mongo().get_dates_on_file(symbol, START_DATE,
                                                       end_date)
        start_index = OPTIMIZATION_WARMUP_DAYS
        if len(dates_on_file) < start_index + 3:
            self.warn_process(
                f'Insufficient historical data ({len(dates_on_file)} days) for {symbol}'
            )
            return
        evaluation = None

        # Initialize objects that make up a kind of container for this evaluation
        sim_time_env = TimeEnv(
            datetime.combine(dates_on_file[start_index - 1], OPEN_TIME))
        sim_env = ExecEnv(self.logfeed_program, self.logfeed_process)
        sim_env.setup_first_time(env_type=EnvType.OPTIMIZATION,
                                 time=sim_time_env,
                                 data_collector=PolygonDataCollector(
                                     logfeed_program=self.logfeed_program,
                                     logfeed_process=self.logfeed_process,
                                     time_env=sim_time_env),
                                 mongo=MongoManager(self.logfeed_program,
                                                    EnvType.OPTIMIZATION),
                                 redis=RedisManager(self.logfeed_process,
                                                    EnvType.OPTIMIZATION))

        # Create a ModelFeeder for the simulated environment
        sim_model_feeder = ModelFeeder(sim_env)

        # Place the strategy in the simulated environment
        strategy = self._clone_strategy(strategy, sim_env)

        # Copy data we need from live environment into simulated environment
        data_copy_error = candle_util.init_simulation_data(
            live_env=self,
            sim_env=sim_env,
            symbols=[strategy.get_symbol()],
            days=start_index - 2,
            end_date=dates_on_file[start_index - 1],
            model_feeder=sim_model_feeder)
        if data_copy_error is not None:
            self.warn_process(data_copy_error)
            return

        for day_to_eval in dates_on_file[start_index:len(dates_on_file) - 2]:
            # Cancel simulations part-way through if a stop has been requested
            if not self.running:
                return

            # Copy day's data into the simulated environment but don't train analysis models
            data_copy_error = candle_util.init_simulation_data(
                live_env=self,
                sim_env=sim_env,
                symbols=[strategy.get_symbol()],
                days=2,
                end_date=dates_on_file[start_index - 1],
                model_feeder=sim_model_feeder,
                skip_last_day_training=True)
            if data_copy_error is not None:
                self.warn_process(data_copy_error)
                self.warn_process(
                    f'Optimization of {strategy.__class__.__name__} on '
                    f'{symbol} failed because the program is missing data on {day_to_eval:%Y-%m-%d}'
                )

            # Move the perspective to the historical day
            sim_env.time().set_moment(
                datetime.combine(day_to_eval,
                                 strategy.times_active().get_start_time()))

            # Create a new strategy for this run
            strategy = self._clone_strategy(strategy, sim_env)

            # Run evaluation on the day
            # TODO Change this to run an optimization simulation
            next_evaluation = StrategyEvaluator(strategy).evaluate()

            # Merge the results with all the evaluations from previous days
            if evaluation is None:
                evaluation = next_evaluation
                evaluation._calculate_metrics()
            else:
                evaluation.combine(next_evaluation)

        # Print results after evaluating each day
        if evaluation is not None:
            self.warn_process(
                'Evaluation results of {0} for {1}:\n\t total days = {2}, viable days: {3}, pct days entered = {4}%, '
                'avg profit = {5}, \n\tmedian profit = {6}, win ratio = {7}, entry-attempt ratio = {8}'
                .format(strategy.__class__.__name__, symbol,
                        evaluation.days_evaluated, evaluation.days_viable,
                        (100 * evaluation.days_entered /
                         evaluation.days_evaluated), evaluation.avg_profit,
                        evaluation.med_profit, evaluation.win_ratio,
                        evaluation.entry_ratio))

        return