Esempio n. 1
0
    def test_KFold(self):
        df = DF_TEST.copy()

        fit = df.model.fit(
            SkModel(
                MLPClassifier(activation='tanh', hidden_layer_sizes=(60, 50), random_state=42, max_iter=2),
                FeaturesAndLabels(
                    features=extract_with_post_processor(
                        [
                            lambda df: df["Close"].ta.trix(),
                            lambda df: df["Close"].ta.ppo(),
                            lambda df: df["Close"].ta.apo(),
                            lambda df: df["Close"].ta.macd(),
                            lambda df: df.ta.adx(),
                        ],
                        lambda df: df.ta.rnn(range(10))
                    ),
                    labels=[
                        lambda df: df["Close"].ta.sma(period=60) \
                            .ta.cross(df["Close"].ta.sma(period=20)) \
                            .ta.rnn([1, 2, 3, 4, 5]) \
                            .abs() \
                            .sum(axis=1) \
                            .shift(-5) \
                            .astype(bool)

                    ]
                )
            ),
            RandomSplits(test_size=0.4,
                         test_validate_split_seed=42,
                         cross_validation=(1, KFoldBoostRareEvents(n_splits=5).split))
        )
Esempio n. 2
0
    def test_reinformcement(self):
        # given a data frame
        df = DF_TEST.copy()

        # and a trading agent
        class ARGym(TradingAgentGym):

            def calculate_trade_reward(self, portfolio_performance_log):
                return portfolio_performance_log["net"].iloc[-1]

            def next_observation(self, idx, features, labels, targets, weights):
                return features

        # when we fit the agent
        fit = df.model.fit(
            ReinforcementModel(
                lambda: PPO2('MlpLstmPolicy',
                             DummyVecEnv([lambda: ARGym((28, 2), initial_capital=100000)]),
                             nminibatches=1),
                FeaturesAndLabels(
                    features=extract_with_post_processor(
                        [
                            lambda df: df.ta.atr(),
                            lambda df: df["Close"].ta.trix(),
                        ],
                        lambda df: df.ta.rnn(28)
                    ),
                    targets=[
                        lambda df: df["Close"]
                    ],
                    labels=[Constant(0)],
                )
            ),
            RandomSequences(0.1, 0.7, max_folds=None),
            total_timesteps=128 * 2,
            verbose=1,
            render='system'
        )

        print(fit.test_summary.df[PREDICTION_COLUMN_NAME])

        prediction = df.model.predict(fit.model, tail=3)
        print(prediction[PREDICTION_COLUMN_NAME])
        self.assertEqual(3, len(prediction))
        self.assertGreater(len(fit.model.reward_history), 0)
        self.assertGreater(len(fit.model.reward_history[0]), 1)
        self.assertGreater(len(fit.model.reward_history[0][1]), 1)
        backtest = df.model.backtest(fit.model).df
        print(backtest[PREDICTION_COLUMN_NAME])
        self.assertEqual(3, len(prediction))
Esempio n. 3
0
    def post_process(selectors, post_processors):
        # early exit if we actually do not post process
        if post_processors is None:
            return selectors

        # make post processors iterable and exceute one after the other
        pps = post_processors if isinstance(post_processors,
                                            list) else [post_processors]
        previous = selectors

        for pp in pps:
            previous = extract_with_post_processor(previous, pp)

        return previous
Esempio n. 4
0
    def test_future_bband_quantile_clasification(self):
        df = DF_TEST.copy()

        fit = df.model.fit(
            SkModel(
                MLPClassifier(activation='tanh',
                              hidden_layer_sizes=(60, 50),
                              random_state=42,
                              warm_start=True,
                              max_iter=2),
                FeaturesAndLabels(
                    features=extract_with_post_processor([
                        lambda df: df["Close"].ta.macd()._[
                            ['macd.*', 'signal.*']],
                        lambda df: df.ta.adx()._[['+DI', '-DM', '+DM']],
                        lambda df: df["Close"].ta.mom(),
                        lambda df: df["Close"].ta.apo(),
                        lambda df: df.ta.atr(),
                        lambda df: df["Close"].ta.trix(),
                    ], lambda df: df.ta.rnn(280)),
                    labels=[
                        lambda df: df["Close"].ta.future_bband_quantile(
                            include_mean=False).ta.one_hot_encode_discrete()
                    ],
                    targets=[
                        lambda df: df["Close"].ta.bbands()[["lower", "upper"]]
                    ]),
                summary_provider=ClassificationSummary,
            ),
            RandomSplits(
                test_size=0.4,
                test_validate_split_seed=42,
                cross_validation=(1, KEquallyWeightEvents(n_splits=3).split)))

        print(fit)
        prediction = df.model.predict(fit.model, tail=3)
        self.assertEqual(3, len(prediction))
        self.assertEqual(
            (3, ),
            np.array(prediction[PREDICTION_COLUMN_NAME].iloc[-1, -1]).shape)

        target_predictions = prediction.map_prediction_to_target()
        print(target_predictions)
        self.assertEqual(9, len(target_predictions))
Esempio n. 5
0
    def test_keras_model(self):
        df = DF_TEST.copy()

        def model_provider():
            model = Sequential([
                Reshape((28 * 2,), input_shape=(28, 2)),
                Dense(60, activation='tanh'),
                Dense(50, activation='tanh'),
                Dense(1, activation="sigmoid")
            ])

            model.compile(Adam(), loss='mse')

            return model

        fit = df.model.fit(
            KerasModel(
                model_provider,
                FeaturesAndLabels(
                    features=extract_with_post_processor([
                        lambda df: df["Close"].ta.rsi(),
                        lambda df: (df["Volume"] / df["Volume"].ta.ema(14) - 1).rename("RelVolume")
                    ], lambda df: df.ta.rnn(28)),
                    labels=[
                        lambda df: (df["Close"] > df["Open"]).shift(-1),
                    ],
                    sample_weights=["Volume"]
                ),
                # kwargs
                forecasting_time_steps=7,
                epochs=2
            )
        )

        print(fit)

        prediction = df.model.predict(fit.model)
        print(prediction)
        print(type(prediction[PREDICTION_COLUMN_NAME, 0].iloc[-1]))
        self.assertIsInstance(prediction[PREDICTION_COLUMN_NAME, 0].iloc[-1], (float, np.float, np.float32, np.float64))

        backtest = df.model.backtest(fit.model)
Esempio n. 6
0
    def test_debug(self):
        df = DF_TEST.copy()

        fit = df.model.fit(
            SkModel(
                MLPClassifier(activation='tanh', hidden_layer_sizes=(60, 50), random_state=42, warm_start=True, max_iter=2),
                FeaturesAndLabels(
                    features=extract_with_post_processor(
                        [
                            lambda df: df["Close"].q.ta_macd().ml[['macd.*', 'signal.*']],
                            lambda df: df.q.ta_adx().ml[['+DI', '-DM', '+DM']],
                            lambda df: df["Close"].q.ta_mom(),
                            lambda df: df["Close"].q.ta_apo(),
                            lambda df: df.q.ta_atr(),
                            lambda df: df["Close"].q.ta_trix(),
                        ],
                        lambda df: df.q.ta_rnn(280)
                    ),
                    labels=[
                        lambda df: df["Close"].q.ta_future_bband_quantile().q.ta_one_hot_encode_discrete()
                    ],
                    targets=[
                        lambda df: df["Close"].q.ta_bbands()[["lower", "upper"]]
                    ]
                ),
                summary_provider=ClassificationSummary,
            ),
            test_size=0.4,
            test_validate_split_seed=42,
            cross_validation=(1, KEquallyWeightEvents(n_splits=3).split),
        )

        print(fit)
        prediction = df.model.predict(fit.model, tail=3)
        self.assertEqual(3, len(prediction))

        target_predictions = prediction.map_prediction_to_target()
        print(target_predictions)
        self.assertEqual(9, len(target_predictions))