def send_report(self): pca = report.bool_str(self.apply_pca) if self.apply_pca: pca += report.plural(", {number} component{s}", self.pca_components) self.report_items(( ("Normalize data", report.bool_str(self.normalize)), ("PCA preprocessing", pca), ("Metric", METRICS[self.metric_idx][0]), ("k neighbors", self.k_neighbors), ("Resolution", self.resolution), ))
def send_report(self): pca = report.bool_str(self.apply_pca) if self.apply_pca: pca += report.plural(', {number} component{s}', self.pca_components) self.report_items(( ('PCA preprocessing', pca), ('Metric', METRICS[self.metric_idx][0]), ('k neighbors', self.k_neighbors), ('Resolution', self.resolution), ))
def get_learner_parameters(self): params = OrderedDict({}) # Classification loss function params['Classification loss function'] = self.cls_losses[ self.cls_loss_function_index][0] if self.cls_losses[self.cls_loss_function_index][1] in ( 'huber', 'epsilon_insensitive', 'squared_epsilon_insensitive'): params['Epsilon (ε) for classification'] = self.cls_epsilon # Regression loss function params['Regression loss function'] = self.reg_losses[ self.reg_loss_function_index][0] if self.reg_losses[self.reg_loss_function_index][1] in ( 'huber', 'epsilon_insensitive', 'squared_epsilon_insensitive'): params['Epsilon (ε) for regression'] = self.reg_epsilon params['Regularization'] = self.penalties[self.penalty_index][0] # Regularization strength if self.penalties[self.penalty_index][1] in ('l1', 'l2', 'elasticnet'): params['Regularization strength (α)'] = self.alpha # Elastic Net mixing if self.penalties[self.penalty_index][1] in ('elasticnet', ): params['Elastic Net mixing parameter (L1 ratio)'] = self.l1_ratio params['Learning rate'] = self.learning_rates[ self.learning_rate_index][0] # Eta if self.learning_rates[self.learning_rate_index][1] in \ ('constant', 'invscaling'): params['Initial learning rate (η<sub>0</sub>)'] = self.eta0 # t if self.learning_rates[self.learning_rate_index][1] in \ ('invscaling',): params['Inverse scaling exponent (t)'] = self.power_t params['Shuffle data after each iteration'] = bool_str(self.shuffle) if self.use_random_state: params['Random seed for shuffling'] = self.random_state return list(params.items())
def get_learner_parameters(self): params = OrderedDict({}) # Classification loss function params['Classification loss function'] = self.cls_losses[ self.cls_loss_function_index][0] if self.cls_losses[self.cls_loss_function_index][1] in ( 'huber', 'epsilon_insensitive', 'squared_epsilon_insensitive'): params['Epsilon (ε) for classification'] = self.cls_epsilon # Regression loss function params['Regression loss function'] = self.reg_losses[ self.reg_loss_function_index][0] if self.reg_losses[self.reg_loss_function_index][1] in ( 'huber', 'epsilon_insensitive', 'squared_epsilon_insensitive'): params['Epsilon (ε) for regression'] = self.reg_epsilon params['Regularization'] = self.penalties[self.penalty_index][0] # Regularization strength if self.penalties[self.penalty_index][1] in ('l1', 'l2', 'elasticnet'): params['Regularization strength (α)'] = self.alpha # Elastic Net mixing if self.penalties[self.penalty_index][1] in ('elasticnet',): params['Elastic Net mixing parameter (L1 ratio)'] = self.l1_ratio params['Learning rate'] = self.learning_rates[ self.learning_rate_index][0] # Eta if self.learning_rates[self.learning_rate_index][1] in \ ('constant', 'invscaling'): params['Initial learning rate (η<sub>0</sub>)'] = self.eta0 # t if self.learning_rates[self.learning_rate_index][1] in \ ('invscaling',): params['Inverse scaling exponent (t)'] = self.power_t params['Shuffle data after each iteration'] = bool_str(self.shuffle) if self.use_random_state: params['Random seed for shuffling'] = self.random_state return list(params.items())