Example #1
0
def main():

    benchmark.benchmark(
        get_X_y=functools.partial(stream.iter_sklearn_dataset,
                                  datasets.load_boston()),
        n=506,
        get_pp=preprocessing.StandardScaler,
        models=[
            ('creme', 'LinReg',
             linear_model.LinearRegression(optimizer=optim.VanillaSGD(0.01),
                                           l2=0.)),
            ('creme', 'GLM',
             linear_model.GLMRegressor(optimizer=optim.VanillaSGD(0.01),
                                       l2=0.)),
            ('creme', 'GLM detrend',
             meta.Detrender(
                 linear_model.GLMRegressor(optimizer=optim.VanillaSGD(0.01),
                                           l2=0.,
                                           intercept_lr=0.))),
            ('sklearn', 'SGD',
             compat.CremeRegressorWrapper(
                 sklearn_estimator=sk_linear_model.SGDRegressor(
                     learning_rate='constant',
                     eta0=0.01,
                     fit_intercept=True,
                     penalty='none'), )),
        ],
        get_metric=metrics.MSE)
Example #2
0
def main():
    def add_hour(x):
        x['hour'] = x['moment'].hour
        return x

    benchmark.benchmark(
        get_X_y=datasets.fetch_bikes,
        n=182470,
        get_pp=lambda:
        (compose.Whitelister('clouds', 'humidity', 'pressure', 'temperature',
                             'wind') +
         (add_hour | feature_extraction.TargetAgg(by=['station', 'hour'],
                                                  how=stats.Mean())
          ) | preprocessing.StandardScaler()),
        models=[
            # ('creme', 'LinReg', linear_model.LinearRegression(
            #     optimizer=optim.VanillaSGD(0.01),
            #     l2=0.
            # )),
            ('creme', 'GLM',
             linear_model.GLMRegressor(optimizer=optim.VanillaSGD(0.01),
                                       l2=0.)),
            ('creme', 'GLM',
             meta.Detrender(
                 linear_model.GLMRegressor(optimizer=optim.VanillaSGD(0.01),
                                           l2=0.))),

            # ('sklearn', 'SGD', compat.CremeRegressorWrapper(
            #     sklearn_estimator=sk_linear_model.SGDRegressor(
            #         learning_rate='constant',
            #         eta0=0.01,
            #         fit_intercept=True,
            #         penalty='none'
            #     ),
            # )),
            # ('sklearn', 'SGD no intercept', compat.CremeRegressorWrapper(
            #     sklearn_estimator=sk_linear_model.SGDRegressor(
            #         learning_rate='constant',
            #         eta0=0.01,
            #         fit_intercept=False,
            #         penalty='none'
            #     ),
            # )),
        ],
        get_metric=metrics.MSE)
Example #3
0
if (opt == "AdaBound"):
    optimizer = optim.AdaBound(lr, beta_1, beta_2, eps, gamma, final_lr)
elif (opt == "AdaDelta"):
    optimizer = optim.AdaDelta(rho, eps)
elif (opt == "AdaGrad"):
    optimizer = optim.AdaGrad(lr, eps)
elif (opt == "Adam"):
    optimizer = optim.Adam(lr, beta_1, beta_2, eps)
elif (opt == "FTRLProximal"):
    optimizer = optim.FTRLProximal(alpha, beta, l1, l2)
elif (opt == "Momentum"):
    optimizer = optim.Momentum(lr, rho)
elif (opt == "RMSProp"):
    optimizer = optim.RMSProp(lr, rho, eps)
elif (opt == "VanillaSGD"):
    optimizer = optim.VanillaSGD(lr)
elif (opt == "NesterovMomentum"):
    optimizer = optim.NesterovMomentum(lr, rho)
else:
    optimizer = None

output = {}

while True:

    #wait request
    data = input()

    if (init == 0):
        MNlog_reg = linear_model.SoftmaxRegression(optimizer, l2=l2)
        init = 1
Example #4
0
def main():

    benchmark.benchmark(
        get_X_y=datasets.fetch_electricity,
        n=45312,
        get_pp=preprocessing.StandardScaler,
        models=[
            # ('No-change', 'No-change', dummy.NoChangeClassifier()),
            ('creme', 'Logistic regression',
             linear_model.LogisticRegression(optimizer=optim.VanillaSGD(0.05),
                                             l2=0,
                                             intercept_lr=0.05)),
            # ('creme', 'PA-I', linear_model.PAClassifier(C=1, mode=1)),
            # ('creme', 'PA-II', linear_model.PAClassifier(C=1, mode=2)),
            ('sklearn', 'Logistic regression',
             compat.CremeClassifierWrapper(
                 sklearn_estimator=sk_linear_model.SGDClassifier(
                     loss='log',
                     learning_rate='constant',
                     eta0=0.05,
                     penalty='none'),
                 classes=[False, True])),
            # ('sklearn', 'PA-I', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='hinge'
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-II', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='squared_hinge'
            #     ),
            #     classes=[False, True]
            # )),

            # ('sklearn', 'Logistic regression NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.SGDClassifier(
            #         loss='log',
            #         learning_rate='constant',
            #         eta0=0.01,
            #         fit_intercept=True,
            #         penalty='none'
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-I NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='hinge',
            #         fit_intercept=False
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-II NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='squared_hinge',
            #         fit_intercept=False
            #     ),
            #     classes=[False, True]
            # )),
        ],
        get_metric=metrics.Accuracy)
Example #5
0
     preprocessing.StandardScaler(),
     compat.CremeClassifierWrapper(
         sklearn_estimator=PassiveAggressiveClassifier(),
         classes=[False, True]
     )
 ]),
 'No-change classifier': dummy.NoChangeClassifier(),
 'Passive-aggressive II': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.PAClassifier(C=1, mode=2)
 ]),
 'Logistic regression w/ VanillaSGD': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.LogisticRegression(
         optimizer=optim.VanillaSGD(
             lr=optim.OptimalLR()
         )
     )
 ]),
 'Logistic regression w/ Adam': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.LogisticRegression(optim.Adam(optim.OptimalLR()))
 ]),
 'Logistic regression w/ AdaGrad': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.LogisticRegression(optim.AdaGrad(optim.OptimalLR()))
 ]),
 'Logistic regression w/ RMSProp': compose.Pipeline([
     preprocessing.StandardScaler(),
     linear_model.LogisticRegression(optim.RMSProp(optim.OptimalLR()))
 ])
Example #6
0
def main():

    benchmark.benchmark(
        get_X_y=functools.partial(stream.iter_sklearn_dataset, datasets.load_breast_cancer()),
        n=569,
        get_pp=preprocessing.StandardScaler,
        models=[
            ('creme', 'Log reg', linear_model.LogisticRegression(
                optimizer=optim.VanillaSGD(0.01),
                l2=0,
                intercept_lr=0.01
            )),
            ('sklearn', 'SGD', compat.CremeClassifierWrapper(
                sklearn_estimator=sk_linear_model.SGDClassifier(
                    loss='log',
                    learning_rate='constant',
                    eta0=0.01,
                    penalty='none'
                ),
                classes=[False, True]
            )),

            ('creme', 'PA-I', linear_model.PAClassifier(
                C=0.01,
                mode=1,
                fit_intercept=True
            )),
            ('sklearn', 'PA-I', compat.CremeClassifierWrapper(
                sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
                    C=0.01,
                    loss='hinge',
                    fit_intercept=True
                ),
                classes=[False, True]
            )),

            # ('creme', 'PA-I', linear_model.PAClassifier(C=1, mode=1)),
            # ('creme', 'PA-II', linear_model.PAClassifier(C=1, mode=2)),
            # ('sklearn', 'PA-I', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='hinge'
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-II', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='squared_hinge'
            #     ),
            #     classes=[False, True]
            # )),

            # ('sklearn', 'Logistic regression NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.SGDClassifier(
            #         loss='log',
            #         learning_rate='constant',
            #         eta0=0.01,
            #         fit_intercept=True,
            #         penalty='none'
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-I NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='hinge',
            #         fit_intercept=False
            #     ),
            #     classes=[False, True]
            # )),
            # ('sklearn', 'PA-II NI', compat.CremeClassifierWrapper(
            #     sklearn_estimator=sk_linear_model.PassiveAggressiveClassifier(
            #         C=1,
            #         loss='squared_hinge',
            #         fit_intercept=False
            #     ),
            #     classes=[False, True]
            # )),
        ],
        get_metric=metrics.Accuracy
    )
Example #7
0
        'total_mastery_points_ratio': total_points_ratio,
        'rank_ratio': rank_ratio
    }


MODELS = {
    'v0': (compose.FuncTransformer(process_match) | compose.TransformerUnion([
        compose.Whitelister(
            'champion_mastery_points_ratio',
            'total_mastery_points_ratio',
            'rank_ratio',
        ),
        preprocessing.OneHotEncoder('mode', sparse=False),
        preprocessing.OneHotEncoder('type', sparse=False)
    ]) | preprocessing.StandardScaler()
           | linear_model.LinearRegression(optim.VanillaSGD(0.005)))
}


class Command(base.BaseCommand):
    def handle(self, *args, **options):

        print(f'Adding models with creme version {creme.__version__}')

        for name, pipeline in MODELS.items():

            if models.CremeModel.objects.filter(name=name).exists():
                print(f'\t{name} has already been added')
                continue

            models.CremeModel(name=name, pipeline=pipeline).save()