def main(): df = stock_data() df = ta.utils.dropna(df) df = format_timeseries_dataframe(df, "Timestamp") df = format_look_ahead(df, "Close", size=-4) df.dropna() df['log_returns'] = 0 df['log_returns'] = np.where(df["Close_future"] > df["Close"], 1, 1) df['log_returns'] = np.where(df["Close_future"] < df["Close"], -1, df['log_returns']) df = fibonacci(df) df = fibonacci_rsi(df) # df = super_hyper_mega_average_true_range(df) df = df.drop(columns=[ 'Open', 'High', 'Low', 'Volume_Currency', 'Weighted_Price', 'Volume_BTC', 'Close', 'above_below_close', 'Close_future' ]) df = df.rename(columns={"log_returns": "y"}) model = (preprocessing.MinMaxScaler() | linear_model.PAClassifier(C=0.01, mode=1)) report = metrics.ClassificationReport() roll_dataframe_stats(df, model=model, metric=report)
else: inst = obj() yield inst @pytest.mark.parametrize('estimator, check', [ pytest.param( copy.deepcopy(estimator), check, id=f'{estimator}:{check.__name__}') for estimator in list(get_all_estimators()) + [ feature_extraction.TFIDF(), linear_model.LogisticRegression(), preprocessing.StandardScaler() | linear_model.LinearRegression(), preprocessing.StandardScaler() | linear_model.PAClassifier(), preprocessing.StandardScaler() | multiclass.OneVsRestClassifier(linear_model.LogisticRegression()), preprocessing.StandardScaler() | multiclass.OneVsRestClassifier(linear_model.PAClassifier()), naive_bayes.GaussianNB(), preprocessing.StandardScaler(), cluster.KMeans(n_clusters=5, seed=42), preprocessing.MinMaxScaler(), preprocessing.MinMaxScaler() + preprocessing.StandardScaler(), preprocessing.PolynomialExtender(), feature_selection.VarianceThreshold(), feature_selection.SelectKBest(similarity=stats.PearsonCorrelation()) ] for check in utils.estimator_checks.yield_checks(estimator) ]) def test_check_estimator(estimator, check): check(estimator)
})] }, 2 + 3 * 4), (preprocessing.StandardScaler() | linear_model.LinearRegression(), { 'LinearRegression': { 'optimizer': [(optim.SGD, { 'lr': [1, 2] }), (optim.Adam, { 'beta_1': [.1, .01, .001], 'lr': [.1, .01, .001, .0001] })] } }, 2 + 3 * 4), (compose.Pipeline(('Scaler', None), linear_model.LinearRegression()), { 'Scaler': [ preprocessing.MinMaxScaler(), preprocessing.MaxAbsScaler(), preprocessing.StandardScaler() ], 'LinearRegression': { 'optimizer': { 'lr': [1e-1, 1e-2, 1e-3] } } }, 3 * 3) ]) def test_expand_param_grid_count(model, param_grid, count): assert len(utils.expand_param_grid(model, param_grid)) == count def test_decision_tree_max_depth():