示例#1
0
    def _fit(self, recommender_data, users, items, matrix):
        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
            seed=254938879,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(negative_rate=0))
        negative_sample_generator.set_train_matrix(matrix)
        negative_sample_generator.set_items(items)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)

        learner = rs.OfflineIteratingImplicitLearner(**self.parameter_defaults(
            seed=254938879,
            number_of_iterations=9,
        ))
        learner.set_gradient_computer(gradient_computer)
        learner.set_negative_sample_generator(negative_sample_generator)
        learner.set_model(model)
        learner.set_recommender_data(recommender_data)
        learner.add_gradient_updater(updater)

        return (model, learner)
示例#2
0
    def _fit(self, recommender_data, users, items, matrix):
        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
            seed=self.parameter_default('factor_seed', 67439852),
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        gradient_computer.add_gradient_updater(updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0, initialize_all=False, max_item=-1))
        negative_sample_generator.set_train_matrix(matrix)
        negative_sample_generator.set_items(items)
        negative_sample_generator.add_updater(gradient_computer)

        learner = rs.OfflineIteratingOnlineLearnerWrapper(
            **self.parameter_defaults(
                seed=self.parameter_default('learner_seed', 254938879),
                number_of_iterations=9,
                shuffle=True,
            ))
        learner.add_iterate_updater(negative_sample_generator)

        return (model, learner)
    def _config(self, top_k, seed):
        model = rs.AsymmetricFactorModel(
            **self.parameter_defaults(begin_min=-0.01,
                                      begin_max=0.01,
                                      dimension=10,
                                      use_sigmoid=False,
                                      norm_type="exponential",
                                      gamma=0.8,
                                      initialize_all=False))

        gradient_updater = rs.AsymmetricFactorModelGradientUpdater(
            **self.parameter_defaults(
                learning_rate=0.05,
                cumulative_item_updates=False,
            ))
        gradient_updater.set_model(model)
        simple_updater = rs.AsymmetricFactorModelUpdater()
        simple_updater.set_model(model)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        gradient_computer.add_gradient_updater(gradient_updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=20,
                initialize_all=False,
                seed=928357823,
            ))
        negative_sample_generator.add_updater(gradient_computer)

        return (model, [negative_sample_generator, simple_updater], [])
示例#4
0
    def _config(self, top_k, seed):

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        learner = rs.ImplicitGradientLearner()
        learner.add_gradient_updater(updater)
        learner.set_model(model)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0.0,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        learner.set_negative_sample_generator(negative_sample_generator)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        return (model, learner, [])
示例#5
0
    def config(self, elems):
        config = self.parameter_defaults(
            top_k=100,
            min_time=0,
            seed=0,
            out_file=None,
            filters=[],
            loggers=[],
        )

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        learner = rs.ImplicitGradientLearner()
        learner.add_gradient_updater(updater)
        learner.set_model(model)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0.0,
                initialize_all=False,
                seed=0,
            ))
        learner.set_negative_sample_generator(negative_sample_generator)

        pointWise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(pointWise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        fmfilter = rs.FactorModelFilter()
        fmfilter.set_model(model)

        prediction_creator = rs.PredictionCreatorGlobal(
            **self.parameter_defaults(
                top_k=10000,
                # initial_threshold=1000,
                lookback=0))
        prediction_creator.set_model(model)
        prediction_creator.set_filter(fmfilter)
        online_predictor = rs.OnlinePredictor(**self.parameter_defaults(
            min_time=0, time_frame=86400, file_name=""))
        online_predictor.set_prediction_creator(prediction_creator)

        config['loggers'].append(online_predictor)

        return {'config': config, 'model': model, 'learner': learner}
    def _config(self, top_k, seed):
        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        learner_parameters = self.parameter_defaults(
            number_of_iterations=3,
            start_time=-1,
            period_length=86400,
            write_model=False,
            read_model=False,
            clear_model=False,
            learn=True,
            base_out_file_name="",
            base_in_file_name="",
            timeframe_length=0,
        )

        if (learner_parameters['timeframe_length'] == 0):
            learner_parameters.pop('timeframe_length', None)
            learner = rs.OfflineImplicitGradientLearner(**learner_parameters)
        else:
            learner = rs.PeriodicTimeframeImplicitGradientLearner(
                **learner_parameters)

        learner.set_model(model)
        learner.add_gradient_updater(updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))

        learner.set_negative_sample_generator(negative_sample_generator)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        return (model, learner, [], [])
示例#7
0
    def config(self, elems):
        config = self.parameter_defaults(
            top_k=100,
            min_time=0,
            seed=0,
            out_file=None,
            filters=[],
            loggers=[],
        )

        model = rs.SvdppModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            use_sigmoid=False,
            norm_type="exponential",
            gamma=0.8,
            user_vector_weight=0.5,
            history_weight=0.5
        ))

        gradient_updater = rs.SvdppModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05,
            cumulative_item_updates=False,
        ))
        gradient_updater.set_model(model)
        simple_updater = rs.SvdppModelUpdater()
        simple_updater.set_model(model)

        learner = rs.ImplicitGradientLearner()
        learner.add_gradient_updater(gradient_updater)
        learner.add_simple_updater(simple_updater)
        learner.set_model(model)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(**self.parameter_defaults(
            negative_rate=20,
            initialize_all=False,
            seed=928357823,
        ))
        learner.set_negative_sample_generator(negative_sample_generator)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        return {
            'config': config,
            'model': model,
            'learner': learner
        }
    def _config(self, top_k, seed):
        model = rs.RandomChoosingCombinedModel()
        updater = rs.RandomChoosingCombinedModelExpertUpdater(
            **self.parameter_defaults(
                eta=0.1,
                top_k=top_k,
                loss_type="abs",
            ))
        updater.set_model(model)
        pop_model = rs.PopularityModel()
        model.add_model(pop_model)
        pop_updater = rs.PopularityModelUpdater()
        pop_updater.set_model(pop_model)

        pop_model2 = rs.PopularityModel(
        )  #not updated popularity model, predicts 0 for all items and users
        model.add_model(pop_model2)
        pop_model3 = rs.PopularityModel(
        )  #not updated popularity model, predicts 0 for all items and users
        model.add_model(pop_model3)

        factor_model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))
        model.add_model(factor_model)

        factor_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=0.05,
                                      regularization_rate=0.0))
        factor_updater.set_model(factor_model)

        objective = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(objective)
        gradient_computer.set_model(factor_model)
        gradient_computer.add_gradient_updater(factor_updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=10,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        negative_sample_generator.add_updater(gradient_computer)

        return (model, [updater, pop_updater,
                        negative_sample_generator], [], [])
示例#9
0
    def config(self, elems):
        config = self.parameter_defaults(
            top_k=100,
            min_time=0,
            loggers=[],
        )

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        learner = rs.OfflineImplicitGradientLearner(
            **self.parameter_defaults(number_of_iterations=3,
                                      start_time=-1,
                                      period_length=86400,
                                      write_model=False,
                                      read_model=False,
                                      clear_model=False,
                                      learn=True,
                                      base_out_file_name="",
                                      base_in_file_name=""))
        learner.set_model(model)
        # learner.set_recommender_data_iterator(elems['recommender_data_iterator'])
        learner.add_gradient_updater(updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0,
                initialize_all=False,
            ))
        # negative_sample_generator.set_train_matrix(elems['train_matrix'])
        # negative_sample_generator.set_items(elems['items'])
        learner.set_negative_sample_generator(negative_sample_generator)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        return {'config': config, 'model': model, 'learner': learner}
示例#10
0
    def _fit(self, recommender_data, users, items, matrix):
        model = rs.SvdppModel(**self.parameter_defaults(
            begin_min=self.parameter_default("begin_min", -0.01),
            begin_max=self.parameter_default("begin_max", 0.01),
            dimension=self.parameter_default("dimension", 10),
            use_sigmoid=False,
            norm_type="constant",
            gamma=1,
            user_vector_weight=0.5,
            history_weight=0.5))

        gradient_updater = rs.SvdppModelGradientUpdater(
            **self.parameter_defaults(
                learning_rate=0.05,
                cumulative_item_updates=False,
            ))
        gradient_updater.set_model(model)
        simple_updater = rs.SvdppModelUpdater()
        simple_updater.set_model(model)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        gradient_computer.add_gradient_updater(gradient_updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=9, initialize_all=False, max_item=-1))
        negative_sample_generator.set_train_matrix(matrix)
        negative_sample_generator.set_items(items)
        negative_sample_generator.add_updater(gradient_computer)

        learner = rs.OfflineIteratingOnlineLearnerWrapper(
            **self.parameter_defaults(
                seed=254938879,
                number_of_iterations=20,
                shuffle=True,
            ))
        learner.add_early_updater(simple_updater)
        learner.add_iterate_updater(negative_sample_generator)

        return (model, learner)
示例#11
0
    def config(self, elems):
        config = self.parameter_defaults(
            top_k=100,
            min_time=0,
            seed=0,
            out_file=None,
            filters=[],
            loggers=[],
        )

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        learner = rs.ImplicitGradientLearner()
        learner.add_gradient_updater(updater)
        learner.set_model(model)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0.0,
                initialize_all=False,
                seed=0,
            ))
        learner.set_negative_sample_generator(negative_sample_generator)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        return {'config': config, 'model': model, 'learner': learner}
    def _config(self, top_k, seed):
        model = rs.CombinedModel(**self.parameter_defaults(
            los_file_name="my_log_file",
            log_frequency=100000,
            use_user_weights=False,
        ))
        pop_model = rs.PopularityModel()
        model.add_model(pop_model)
        pop_updater = rs.PopularityModelUpdater()
        pop_updater.set_model(pop_model)

        factor_model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))
        model.add_model(factor_model)

        factor_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=0.05,
                                      regularization_rate=0.0))
        factor_updater.set_model(factor_model)

        objective = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(objective)
        gradient_computer.set_model(factor_model)
        gradient_computer.add_gradient_updater(factor_updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=10,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        negative_sample_generator.add_updater(gradient_computer)

        return (model, [pop_updater, negative_sample_generator], [], [])
示例#13
0
    def _fit(self, recommender_data, users, items, matrix):
        model = rs.AsymmetricFactorModel(
            begin_min=self.parameter_default("begin_min", -0.01),
            begin_max=self.parameter_default("begin_max", 0.01),
            dimension=self.parameter_default("dimension", 10),
            use_sigmoid=False,
            norm_type="disabled",
            gamma=1)

        updater = rs.AsymmetricFactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=0.05,
                                      regularization_rate=0.0))
        updater.set_model(model)
        simple_updater = rs.AsymmetricFactorModelUpdater()
        simple_updater.set_model(model)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(negative_rate=0))
        negative_sample_generator.set_train_matrix(matrix)
        negative_sample_generator.set_items(items)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)

        learner = rs.OfflineIteratingImplicitLearner(**self.parameter_defaults(
            seed=254938879,
            number_of_iterations=9,
        ))
        learner.set_gradient_computer(gradient_computer)
        learner.set_negative_sample_generator(negative_sample_generator)
        learner.set_model(model)
        learner.set_recommender_data(recommender_data)
        learner.add_gradient_updater(updater)
        learner.add_early_simple_updater(simple_updater)

        return (model, learner)
示例#14
0
    def _config(self, top_k, seed):
        #config = self.parameter_defaults(
        #    top_k=100,
        #    evaluation_start_time=0,
        #    seed=0,
        #    out_file=None,
        #    filters=[],
        #    loggers=[],
        #)

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        gradient_computer.add_gradient_updater(updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=100,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        negative_sample_generator.add_updater(gradient_computer)

        return (model, [negative_sample_generator], [])
示例#15
0
    def _config(self, top_k, seed):
        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        gradient_computer.add_gradient_updater(updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        negative_sample_generator.add_updater(gradient_computer)

        offline_learner = rs.OfflineIteratingOnlineLearnerWrapper(
            **self.parameter_defaults(
                seed=254938879,
                number_of_iterations=3,
                shuffle=True,
            ))
        offline_learner.add_iterate_updater(negative_sample_generator)

        online_learner = rs.PeriodicOfflineLearnerWrapper(
            **self.parameter_defaults(
                write_model=False,
                read_model=False,
                clear_model=False,
                learn=True,
                base_out_file_name="",
                base_in_file_name="",
            ))
        online_learner.set_model(model)
        online_learner.add_offline_learner(offline_learner)

        data_generator_parameters = self.parameter_defaults(
            timeframe_length=0, )
        if (data_generator_parameters['timeframe_length'] == 0):
            data_generator = rs.CompletePastDataGenerator()
        else:
            data_generator = rs.TimeframeDataGenerator(
                **data_generator_parameters)
        online_learner.set_data_generator(data_generator)
        period_computer = rs.PeriodComputer(**self.parameter_defaults(
            period_length=86400,
            start_time=-1,
            period_mode="time",
        ))
        online_learner.set_period_computer(period_computer)

        return (model, online_learner, [])
    def _config(self, top_k, seed):

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        #
        # batch
        #

        # updater
        batch_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=self.parameter_default(
                'batch_learning_rate', 0.05),
                                      regularization_rate=0.0))
        batch_updater.set_model(model)

        # objective
        point_wise = rs.ObjectiveMSE()
        batch_gradient_computer = rs.GradientComputerPointWise()
        batch_gradient_computer.set_objective(point_wise)
        batch_gradient_computer.set_model(model)
        batch_gradient_computer.add_gradient_updater(batch_updater)

        # negative sample generator
        batch_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('batch_negative_rate',
                                                     70),
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        batch_negative_sample_generator.add_updater(batch_gradient_computer)

        batch_offline_learner = rs.OfflineIteratingOnlineLearnerWrapper(
            **self.parameter_defaults(
                seed=254938879,
                number_of_iterations=3,
                shuffle=True,
            ))
        batch_offline_learner.add_iterate_updater(
            batch_negative_sample_generator)

        batch_online_learner = rs.PeriodicOfflineLearnerWrapper(
            **self.parameter_defaults(
                write_model=False,
                read_model=False,
                clear_model=False,
                learn=True,
                base_out_file_name="",
                base_in_file_name="",
            ))
        batch_online_learner.set_model(model)
        batch_online_learner.add_offline_learner(batch_offline_learner)

        batch_data_generator_parameters = self.parameter_defaults(
            timeframe_length=0, )
        if (batch_data_generator_parameters['timeframe_length'] == 0):
            print("Full experiment")
            batch_data_generator = rs.CompletePastDataGenerator()
        else:
            print("Timeframe experiment")
            batch_data_generator = rs.TimeframeDataGenerator(
                **batch_data_generator_parameters)
        batch_online_learner.set_data_generator(batch_data_generator)
        batch_period_computer = rs.PeriodComputer(**self.parameter_defaults(
            period_length=86400,
            start_time=-1,
            period_mode="time",
        ))
        batch_online_learner.set_period_computer(batch_period_computer)

        #
        # online
        #

        # updater
        online_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=self.parameter_default(
                'online_learning_rate', 0.2),
                                      regularization_rate=0.0))
        online_updater.set_model(model)

        # objective
        point_wise = rs.ObjectiveMSE()
        online_gradient_computer = rs.GradientComputerPointWise()
        online_gradient_computer.set_objective(point_wise)
        online_gradient_computer.set_model(model)
        online_gradient_computer.add_gradient_updater(online_updater)

        # negative sample generator
        online_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('online_negative_rate',
                                                     100),
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))
        online_negative_sample_generator.add_updater(online_gradient_computer)

        learner = [batch_online_learner, online_negative_sample_generator]

        return (model, learner, [])
示例#17
0
    def _config(self, top_k, seed):

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        #
        # batch
        #

        # updater
        batch_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=self.parameter_default(
                'batch_learning_rate', 0.05),
                                      regularization_rate=0.0))
        batch_updater.set_model(model)

        # negative sample generator
        batch_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('batch_negative_rate',
                                                     70),
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))

        # objective
        point_wise = rs.ObjectiveMSE()
        batch_gradient_computer = rs.GradientComputerPointWise()
        batch_gradient_computer.set_objective(point_wise)
        batch_gradient_computer.set_model(model)

        # learner
        batch_learner_parameters = self.parameter_defaults(
            number_of_iterations=9,
            start_time=-1,
            period_length=86400,
            write_model=False,
            read_model=False,
            clear_model=False,
            learn=True,
            base_out_file_name="",
            base_in_file_name="",
            timeframe_length=0,
        )

        if (batch_learner_parameters['timeframe_length'] == 0):
            batch_learner_parameters.pop('timeframe_length', None)
            batch_learner = rs.OfflineImplicitGradientLearner(
                **batch_learner_parameters)
        else:
            batch_learner = rs.PeriodicTimeframeImplicitGradientLearner(
                **batch_learner_parameters)

        batch_learner.set_model(model)
        batch_learner.add_gradient_updater(batch_updater)
        batch_learner.set_gradient_computer(batch_gradient_computer)
        batch_learner.set_negative_sample_generator(
            batch_negative_sample_generator)

        #
        # online
        #

        # updater
        online_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=self.parameter_default(
                'online_learning_rate', 0.2),
                                      regularization_rate=0.0))
        online_updater.set_model(model)

        # negative sample generator
        online_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('online_negative_rate',
                                                     100),
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))

        # objective
        point_wise = rs.ObjectiveMSE()
        online_gradient_computer = rs.GradientComputerPointWise()
        online_gradient_computer.set_objective(point_wise)
        online_gradient_computer.set_model(model)

        # learner
        online_learner = rs.ImplicitGradientLearner()
        online_learner.add_gradient_updater(online_updater)
        online_learner.set_model(model)
        online_learner.set_negative_sample_generator(
            online_negative_sample_generator)
        online_learner.set_gradient_computer(online_gradient_computer)

        learner = [batch_learner, online_learner]

        return (model, learner, [], [])
    def config(self, elems):
        config = self.parameter_defaults(
            top_k=100,
            min_time=0,
            loggers=[],
        )

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        #
        # batch
        #

        # updater
        batch_updater = rs.FactorModelGradientUpdater(
            learning_rate=self.parameter_default('batch_learning_rate', 0.05),
            regularization_rate=0.0)
        batch_updater.set_model(model)

        # negative sample generator
        batch_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('batch_negative_rate',
                                                     70),
                initialize_all=False,
            ))

        # objective
        point_wise = rs.ObjectiveMSE()
        batch_gradient_computer = rs.GradientComputerPointWise()
        batch_gradient_computer.set_objective(point_wise)
        batch_gradient_computer.set_model(model)

        # learner
        batch_learner = rs.OfflineImplicitGradientLearner(
            **self.parameter_defaults(number_of_iterations=9,
                                      start_time=-1,
                                      period_length=86400,
                                      write_model=False,
                                      read_model=False,
                                      clear_model=False,
                                      learn=True,
                                      base_out_file_name="",
                                      base_in_file_name=""))
        batch_learner.set_model(model)
        batch_learner.add_gradient_updater(batch_updater)
        batch_learner.set_gradient_computer(batch_gradient_computer)
        batch_learner.set_negative_sample_generator(
            batch_negative_sample_generator)

        #
        # online
        #

        # updater
        online_updater = rs.FactorModelGradientUpdater(
            learning_rate=self.parameter_default('online_learning_rate', 0.2),
            regularization_rate=0.0)
        online_updater.set_model(model)

        # negative sample generator
        online_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('online_negative_rate',
                                                     100),
                initialize_all=False,
            ))

        # objective
        point_wise = rs.ObjectiveMSE()
        online_gradient_computer = rs.GradientComputerPointWise()
        online_gradient_computer.set_objective(point_wise)
        online_gradient_computer.set_model(model)

        # learner
        online_learner = rs.ImplicitGradientLearner()
        online_learner.add_gradient_updater(online_updater)
        online_learner.set_model(model)
        online_learner.set_negative_sample_generator(
            online_negative_sample_generator)
        online_learner.set_gradient_computer(online_gradient_computer)

        learner = [batch_learner, online_learner]

        return {'config': config, 'model': model, 'learner': learner}