def _config(self, top_k, seed):
        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        updater = rs.FactorModelGradientUpdater(**self.parameter_defaults(
            learning_rate=0.05, regularization_rate=0.0))
        updater.set_model(model)

        learner_parameters = self.parameter_defaults(
            number_of_iterations=3,
            start_time=-1,
            period_length=86400,
            write_model=False,
            read_model=False,
            clear_model=False,
            learn=True,
            base_out_file_name="",
            base_in_file_name="",
            timeframe_length=0,
        )

        if (learner_parameters['timeframe_length'] == 0):
            learner_parameters.pop('timeframe_length', None)
            learner = rs.OfflineImplicitGradientLearner(**learner_parameters)
        else:
            learner = rs.PeriodicTimeframeImplicitGradientLearner(
                **learner_parameters)

        learner.set_model(model)
        learner.add_gradient_updater(updater)

        negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=0,
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))

        learner.set_negative_sample_generator(negative_sample_generator)

        point_wise = rs.ObjectiveMSE()
        gradient_computer = rs.GradientComputerPointWise()
        gradient_computer.set_objective(point_wise)
        gradient_computer.set_model(model)
        learner.set_gradient_computer(gradient_computer)

        return (model, learner, [], [])
Exemple #2
0
    def _config(self, top_k, seed):

        model = rs.FactorModel(**self.parameter_defaults(
            begin_min=-0.01,
            begin_max=0.01,
            dimension=10,
            initialize_all=False,
        ))

        #
        # batch
        #

        # updater
        batch_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=self.parameter_default(
                'batch_learning_rate', 0.05),
                                      regularization_rate=0.0))
        batch_updater.set_model(model)

        # negative sample generator
        batch_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('batch_negative_rate',
                                                     70),
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))

        # objective
        point_wise = rs.ObjectiveMSE()
        batch_gradient_computer = rs.GradientComputerPointWise()
        batch_gradient_computer.set_objective(point_wise)
        batch_gradient_computer.set_model(model)

        # learner
        batch_learner_parameters = self.parameter_defaults(
            number_of_iterations=9,
            start_time=-1,
            period_length=86400,
            write_model=False,
            read_model=False,
            clear_model=False,
            learn=True,
            base_out_file_name="",
            base_in_file_name="",
            timeframe_length=0,
        )

        if (batch_learner_parameters['timeframe_length'] == 0):
            batch_learner_parameters.pop('timeframe_length', None)
            batch_learner = rs.OfflineImplicitGradientLearner(
                **batch_learner_parameters)
        else:
            batch_learner = rs.PeriodicTimeframeImplicitGradientLearner(
                **batch_learner_parameters)

        batch_learner.set_model(model)
        batch_learner.add_gradient_updater(batch_updater)
        batch_learner.set_gradient_computer(batch_gradient_computer)
        batch_learner.set_negative_sample_generator(
            batch_negative_sample_generator)

        #
        # online
        #

        # updater
        online_updater = rs.FactorModelGradientUpdater(
            **self.parameter_defaults(learning_rate=self.parameter_default(
                'online_learning_rate', 0.2),
                                      regularization_rate=0.0))
        online_updater.set_model(model)

        # negative sample generator
        online_negative_sample_generator = rs.UniformNegativeSampleGenerator(
            **self.parameter_defaults(
                negative_rate=self.parameter_default('online_negative_rate',
                                                     100),
                initialize_all=False,
                seed=67439852,
                filter_repeats=False,
            ))

        # objective
        point_wise = rs.ObjectiveMSE()
        online_gradient_computer = rs.GradientComputerPointWise()
        online_gradient_computer.set_objective(point_wise)
        online_gradient_computer.set_model(model)

        # learner
        online_learner = rs.ImplicitGradientLearner()
        online_learner.add_gradient_updater(online_updater)
        online_learner.set_model(model)
        online_learner.set_negative_sample_generator(
            online_negative_sample_generator)
        online_learner.set_gradient_computer(online_gradient_computer)

        learner = [batch_learner, online_learner]

        return (model, learner, [], [])