def _config(self, top_k, seed): model = rs.FactorModel(**self.parameter_defaults( begin_min=-0.01, begin_max=0.01, dimension=10, initialize_all=False, )) updater = rs.FactorModelGradientUpdater(**self.parameter_defaults( learning_rate=0.05, regularization_rate=0.0)) updater.set_model(model) learner = rs.ImplicitGradientLearner() learner.add_gradient_updater(updater) learner.set_model(model) negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=0.0, initialize_all=False, seed=67439852, filter_repeats=False, )) learner.set_negative_sample_generator(negative_sample_generator) point_wise = rs.ObjectiveMSE() gradient_computer = rs.GradientComputerPointWise() gradient_computer.set_objective(point_wise) gradient_computer.set_model(model) learner.set_gradient_computer(gradient_computer) return (model, learner, [])
def config(self, elems): config = self.parameter_defaults( top_k=100, min_time=0, seed=0, out_file=None, filters=[], loggers=[], ) model = rs.FactorModel(**self.parameter_defaults( begin_min=-0.01, begin_max=0.01, dimension=10, initialize_all=False, )) updater = rs.FactorModelGradientUpdater(**self.parameter_defaults( learning_rate=0.05, regularization_rate=0.0)) updater.set_model(model) learner = rs.ImplicitGradientLearner() learner.add_gradient_updater(updater) learner.set_model(model) negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=0.0, initialize_all=False, seed=0, )) learner.set_negative_sample_generator(negative_sample_generator) pointWise = rs.ObjectiveMSE() gradient_computer = rs.GradientComputerPointWise() gradient_computer.set_objective(pointWise) gradient_computer.set_model(model) learner.set_gradient_computer(gradient_computer) fmfilter = rs.FactorModelFilter() fmfilter.set_model(model) prediction_creator = rs.PredictionCreatorGlobal( **self.parameter_defaults( top_k=10000, # initial_threshold=1000, lookback=0)) prediction_creator.set_model(model) prediction_creator.set_filter(fmfilter) online_predictor = rs.OnlinePredictor(**self.parameter_defaults( min_time=0, time_frame=86400, file_name="")) online_predictor.set_prediction_creator(prediction_creator) config['loggers'].append(online_predictor) return {'config': config, 'model': model, 'learner': learner}
def config(self, elems): config = self.parameter_defaults( top_k=100, min_time=0, seed=0, out_file=None, filters=[], loggers=[], ) model = rs.SvdppModel(**self.parameter_defaults( begin_min=-0.01, begin_max=0.01, dimension=10, use_sigmoid=False, norm_type="exponential", gamma=0.8, user_vector_weight=0.5, history_weight=0.5 )) gradient_updater = rs.SvdppModelGradientUpdater(**self.parameter_defaults( learning_rate=0.05, cumulative_item_updates=False, )) gradient_updater.set_model(model) simple_updater = rs.SvdppModelUpdater() simple_updater.set_model(model) learner = rs.ImplicitGradientLearner() learner.add_gradient_updater(gradient_updater) learner.add_simple_updater(simple_updater) learner.set_model(model) negative_sample_generator = rs.UniformNegativeSampleGenerator(**self.parameter_defaults( negative_rate=20, initialize_all=False, seed=928357823, )) learner.set_negative_sample_generator(negative_sample_generator) point_wise = rs.ObjectiveMSE() gradient_computer = rs.GradientComputerPointWise() gradient_computer.set_objective(point_wise) gradient_computer.set_model(model) learner.set_gradient_computer(gradient_computer) return { 'config': config, 'model': model, 'learner': learner }
def config(self, elems): config = self.parameter_defaults( top_k=100, min_time=0, seed=0, out_file=None, filters=[], loggers=[], ) model = rs.FactorModel(**self.parameter_defaults( begin_min=-0.01, begin_max=0.01, dimension=10, initialize_all=False, )) updater = rs.FactorModelGradientUpdater(**self.parameter_defaults( learning_rate=0.05, regularization_rate=0.0)) updater.set_model(model) learner = rs.ImplicitGradientLearner() learner.add_gradient_updater(updater) learner.set_model(model) negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=0.0, initialize_all=False, seed=0, )) learner.set_negative_sample_generator(negative_sample_generator) point_wise = rs.ObjectiveMSE() gradient_computer = rs.GradientComputerPointWise() gradient_computer.set_objective(point_wise) gradient_computer.set_model(model) learner.set_gradient_computer(gradient_computer) return {'config': config, 'model': model, 'learner': learner}
def _config(self, top_k, seed): model = rs.AsymmetricFactorModel( **self.parameter_defaults(begin_min=-0.01, begin_max=0.01, dimension=10, use_sigmoid=False, norm_type="exponential", gamma=0.8)) gradient_updater = rs.AsymmetricFactorModelGradientUpdater( **self.parameter_defaults( learning_rate=0.05, cumulative_item_updates=False, )) gradient_updater.set_model(model) simple_updater = rs.AsymmetricFactorModelUpdater() simple_updater.set_model(model) learner = rs.ImplicitGradientLearner() learner.add_gradient_updater(gradient_updater) learner.add_simple_updater(simple_updater) learner.set_model(model) negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=20, initialize_all=False, seed=928357823, )) learner.set_negative_sample_generator(negative_sample_generator) point_wise = rs.ObjectiveMSE() gradient_computer = rs.GradientComputerPointWise() gradient_computer.set_objective(point_wise) gradient_computer.set_model(model) learner.set_gradient_computer(gradient_computer) return (model, learner, [], [])
def _config(self, top_k, seed): model = rs.FactorModel(**self.parameter_defaults( begin_min=-0.01, begin_max=0.01, dimension=10, initialize_all=False, )) # # batch # # updater batch_updater = rs.FactorModelGradientUpdater( **self.parameter_defaults(learning_rate=self.parameter_default( 'batch_learning_rate', 0.05), regularization_rate=0.0)) batch_updater.set_model(model) # negative sample generator batch_negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=self.parameter_default('batch_negative_rate', 70), initialize_all=False, seed=67439852, filter_repeats=False, )) # objective point_wise = rs.ObjectiveMSE() batch_gradient_computer = rs.GradientComputerPointWise() batch_gradient_computer.set_objective(point_wise) batch_gradient_computer.set_model(model) # learner batch_learner_parameters = self.parameter_defaults( number_of_iterations=9, start_time=-1, period_length=86400, write_model=False, read_model=False, clear_model=False, learn=True, base_out_file_name="", base_in_file_name="", timeframe_length=0, ) if (batch_learner_parameters['timeframe_length'] == 0): batch_learner_parameters.pop('timeframe_length', None) batch_learner = rs.OfflineImplicitGradientLearner( **batch_learner_parameters) else: batch_learner = rs.PeriodicTimeframeImplicitGradientLearner( **batch_learner_parameters) batch_learner.set_model(model) batch_learner.add_gradient_updater(batch_updater) batch_learner.set_gradient_computer(batch_gradient_computer) batch_learner.set_negative_sample_generator( batch_negative_sample_generator) # # online # # updater online_updater = rs.FactorModelGradientUpdater( **self.parameter_defaults(learning_rate=self.parameter_default( 'online_learning_rate', 0.2), regularization_rate=0.0)) online_updater.set_model(model) # negative sample generator online_negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=self.parameter_default('online_negative_rate', 100), initialize_all=False, seed=67439852, filter_repeats=False, )) # objective point_wise = rs.ObjectiveMSE() online_gradient_computer = rs.GradientComputerPointWise() online_gradient_computer.set_objective(point_wise) online_gradient_computer.set_model(model) # learner online_learner = rs.ImplicitGradientLearner() online_learner.add_gradient_updater(online_updater) online_learner.set_model(model) online_learner.set_negative_sample_generator( online_negative_sample_generator) online_learner.set_gradient_computer(online_gradient_computer) learner = [batch_learner, online_learner] return (model, learner, [], [])
def config(self, elems): config = self.parameter_defaults( top_k=100, min_time=0, loggers=[], ) model = rs.FactorModel(**self.parameter_defaults( begin_min=-0.01, begin_max=0.01, dimension=10, initialize_all=False, )) # # batch # # updater batch_updater = rs.FactorModelGradientUpdater( learning_rate=self.parameter_default('batch_learning_rate', 0.05), regularization_rate=0.0) batch_updater.set_model(model) # negative sample generator batch_negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=self.parameter_default('batch_negative_rate', 70), initialize_all=False, )) # objective point_wise = rs.ObjectiveMSE() batch_gradient_computer = rs.GradientComputerPointWise() batch_gradient_computer.set_objective(point_wise) batch_gradient_computer.set_model(model) # learner batch_learner = rs.OfflineImplicitGradientLearner( **self.parameter_defaults(number_of_iterations=9, start_time=-1, period_length=86400, write_model=False, read_model=False, clear_model=False, learn=True, base_out_file_name="", base_in_file_name="")) batch_learner.set_model(model) batch_learner.add_gradient_updater(batch_updater) batch_learner.set_gradient_computer(batch_gradient_computer) batch_learner.set_negative_sample_generator( batch_negative_sample_generator) # # online # # updater online_updater = rs.FactorModelGradientUpdater( learning_rate=self.parameter_default('online_learning_rate', 0.2), regularization_rate=0.0) online_updater.set_model(model) # negative sample generator online_negative_sample_generator = rs.UniformNegativeSampleGenerator( **self.parameter_defaults( negative_rate=self.parameter_default('online_negative_rate', 100), initialize_all=False, )) # objective point_wise = rs.ObjectiveMSE() online_gradient_computer = rs.GradientComputerPointWise() online_gradient_computer.set_objective(point_wise) online_gradient_computer.set_model(model) # learner online_learner = rs.ImplicitGradientLearner() online_learner.add_gradient_updater(online_updater) online_learner.set_model(model) online_learner.set_negative_sample_generator( online_negative_sample_generator) online_learner.set_gradient_computer(online_gradient_computer) learner = [batch_learner, online_learner] return {'config': config, 'model': model, 'learner': learner}