예제 #1
0
def main():
    df = stock_data()
    df = ta.utils.dropna(df)
    df = format_timeseries_dataframe(df, "Timestamp")
    df = format_look_ahead(df, "Close", size=-4)
    df.dropna()
    df['log_returns'] = 0
    df['log_returns'] = np.where(df["Close_future"] > df["Close"], 1, 1)
    df['log_returns'] = np.where(df["Close_future"] < df["Close"], -1,
                                 df['log_returns'])
    df = fibonacci(df)
    df = fibonacci_rsi(df)
    # df = super_hyper_mega_average_true_range(df)
    df = df.drop(columns=[
        'Open', 'High', 'Low', 'Volume_Currency', 'Weighted_Price',
        'Volume_BTC', 'Close', 'above_below_close', 'Close_future'
    ])
    df = df.rename(columns={"log_returns": "y"})
    model = (preprocessing.MinMaxScaler()
             | linear_model.PAClassifier(C=0.01, mode=1))
    report = metrics.ClassificationReport()

    roll_dataframe_stats(df, model=model, metric=report)
예제 #2
0
                inst = obj(binary_classifier=linear_model.LogisticRegression())

            else:
                inst = obj()

            yield inst


@pytest.mark.parametrize('estimator, check', [
    pytest.param(
        copy.deepcopy(estimator), check, id=f'{estimator}:{check.__name__}')
    for estimator in list(get_all_estimators()) + [
        feature_extraction.TFIDF(),
        linear_model.LogisticRegression(),
        preprocessing.StandardScaler() | linear_model.LinearRegression(),
        preprocessing.StandardScaler() | linear_model.PAClassifier(),
        preprocessing.StandardScaler()
        | multiclass.OneVsRestClassifier(linear_model.LogisticRegression()),
        preprocessing.StandardScaler()
        | multiclass.OneVsRestClassifier(linear_model.PAClassifier()),
        naive_bayes.GaussianNB(),
        preprocessing.StandardScaler(),
        cluster.KMeans(n_clusters=5, seed=42),
        preprocessing.MinMaxScaler(),
        preprocessing.MinMaxScaler() + preprocessing.StandardScaler(),
        preprocessing.PolynomialExtender(),
        feature_selection.VarianceThreshold(),
        feature_selection.SelectKBest(similarity=stats.PearsonCorrelation())
    ] for check in utils.estimator_checks.yield_checks(estimator)
])
def test_check_estimator(estimator, check):
예제 #3
0
파일: test_.py 프로젝트: rajagurunath/creme
def get_all_estimators():

    ignored = (CremeBaseWrapper, SKLBaseWrapper, base.Wrapper,
               compose.FuncTransformer, ensemble.GroupRegressor,
               ensemble.StackingBinaryClassifier, feature_extraction.Agg,
               feature_extraction.TargetAgg, feature_extraction.Differ,
               linear_model.FMRegressor, linear_model.SoftmaxRegression,
               multioutput.ClassifierChain, multioutput.RegressorChain,
               naive_bayes.BernoulliNB, naive_bayes.ComplementNB,
               preprocessing.OneHotEncoder, tree.DecisionTreeClassifier)

    def is_estimator(obj):
        return inspect.isclass(obj) and issubclass(obj, base.Estimator)

    for submodule in importlib.import_module('creme').__all__:
        for name, obj in inspect.getmembers(
                importlib.import_module(f'creme.{submodule}'), is_estimator):

            if issubclass(obj, ignored):
                continue

            if issubclass(obj, dummy.StatisticRegressor):
                inst = obj(statistic=stats.Mean())

            elif issubclass(obj, ensemble.BaggingClassifier):
                inst = obj(linear_model.LogisticRegression())

            elif issubclass(obj, ensemble.BaggingRegressor):
                inst = obj(linear_model.LinearRegression())

            elif issubclass(obj, ensemble.HedgeBinaryClassifier):
                inst = obj([
                    linear_model.LogisticRegression(),
                    linear_model.PAClassifier()
                ])

            elif issubclass(obj, ensemble.HedgeRegressor):
                inst = obj([
                    preprocessing.StandardScaler()
                    | linear_model.LinearRegression(intercept_lr=0.1),
                    preprocessing.StandardScaler()
                    | linear_model.PARegressor(),
                ])

            elif issubclass(obj, feature_selection.RandomDiscarder):
                inst = obj(n_to_keep=5)

            elif issubclass(obj, feature_selection.SelectKBest):
                inst = obj(similarity=stats.PearsonCorrelation())

            elif issubclass(obj, linear_model.LinearRegression):
                inst = preprocessing.StandardScaler() | obj(intercept_lr=0.1)

            elif issubclass(obj, linear_model.PARegressor):
                inst = preprocessing.StandardScaler() | obj()

            elif issubclass(obj, multiclass.OneVsRestClassifier):
                inst = obj(binary_classifier=linear_model.LogisticRegression())

            else:
                inst = obj()

            yield inst
예제 #4
0
             fit_intercept=False
         ),
         classes=[False, True]
     )
 ]),
 'sklearn PassiveAggressiveClassifier': compose.Pipeline([
     preprocessing.StandardScaler(),
     compat.CremeClassifierWrapper(
         sklearn_estimator=PassiveAggressiveClassifier(),
         classes=[False, True]
     )
 ]),
 'No-change classifier': dummy.NoChangeClassifier(),
 'Passive-aggressive II': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.PAClassifier(C=1, mode=2)
 ]),
 'Logistic regression w/ VanillaSGD': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.LogisticRegression(
         optimizer=optim.VanillaSGD(
             lr=optim.OptimalLR()
         )
     )
 ]),
 'Logistic regression w/ Adam': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.LogisticRegression(optim.Adam(optim.OptimalLR()))
 ]),
 'Logistic regression w/ AdaGrad': compose.Pipeline([
     preprocessing.StandardScaler(),
예제 #5
0
def main():

    benchmark.benchmark(
        get_X_y=functools.partial(stream.iter_sklearn_dataset, datasets.load_breast_cancer()),
        n=569,
        get_pp=preprocessing.StandardScaler,
        models=[
            ('creme', 'Log reg', linear_model.LogisticRegression(
                optimizer=optim.VanillaSGD(0.01),
                l2=0,
                intercept_lr=0.01
            )),
            ('sklearn', 'SGD', compat.CremeClassifierWrapper(
                sklearn_estimator=sk_linear_model.SGDClassifier(
                    loss='log',
                    learning_rate='constant',
                    eta0=0.01,
                    penalty='none'
                ),
                classes=[False, True]
            )),

            ('creme', 'PA-I', linear_model.PAClassifier(
                C=0.01,
                mode=1,
                fit_intercept=True
            )),
            ('sklearn', 'PA-I', compat.CremeClassifierWrapper(
                sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
                    C=0.01,
                    loss='hinge',
                    fit_intercept=True
                ),
                classes=[False, True]
            )),

            # ('creme', 'PA-I', linear_model.PAClassifier(C=1, mode=1)),
            # ('creme', 'PA-II', linear_model.PAClassifier(C=1, mode=2)),
            # ('sklearn', 'PA-I', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='hinge'
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-II', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='squared_hinge'
            #     ),
            #     classes=[False, True]
            # )),

            # ('sklearn', 'Logistic regression NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.SGDClassifier(
            #         loss='log',
            #         learning_rate='constant',
            #         eta0=0.01,
            #         fit_intercept=True,
            #         penalty='none'
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-I NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='hinge',
            #         fit_intercept=False
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-II NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='squared_hinge',
            #         fit_intercept=False
            #     ),
            #     classes=[False, True]
            # )),
        ],
        get_metric=metrics.Accuracy
    )