def _validate_linear_feature_columns(self, features): if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns(features) self._feature_columns_inferred = True elif self._feature_columns_inferred: this_dict = {c.name: c for c in self._linear_feature_columns} that_dict = { c.name: c for c in layers.infer_real_valued_columns(features) } if this_dict != that_dict: raise ValueError( "Feature columns, expected %s, got %s.", (this_dict, that_dict))
def _get_train_ops(self, features, targets): """See base class.""" if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns( features) if not isinstance(self._linear_optimizer, sdca_optimizer.SDCAOptimizer): return super(LinearClassifier, self)._get_train_ops(features, targets) # SDCA currently supports binary classification only. if self._n_classes > 2: raise ValueError( "SDCA does not currently support multi-class classification.") global_step = contrib_variables.get_global_step() assert global_step logits, columns_to_variables, _ = layers.weighted_sum_from_feature_columns( columns_to_tensors=features, feature_columns=self._linear_feature_columns, num_outputs=self._num_label_columns(), weight_collections=[self._linear_weight_collection], name="linear") with ops.control_dependencies([self._centered_bias()]): loss = self._loss(logits, targets, self._get_weight_tensor(features)) logging_ops.scalar_summary("loss", loss) train_ops = self._linear_optimizer.get_train_step( self._linear_feature_columns, self._weight_column_name, "logistic_loss", features, targets, columns_to_variables, global_step) return train_ops, loss
def _get_train_ops(self, features, targets): """See base class.""" if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns(features) if not isinstance(self._linear_optimizer, sdca_optimizer.SDCAOptimizer): return super(LinearClassifier, self)._get_train_ops(features, targets) # SDCA currently supports binary classification only. if self._n_classes > 2: raise ValueError( "SDCA does not currently support multi-class classification.") global_step = contrib_variables.get_global_step() assert global_step logits, columns_to_variables, _ = layers.weighted_sum_from_feature_columns( columns_to_tensors=features, feature_columns=self._linear_feature_columns, num_outputs=self._num_label_columns(), weight_collections=[self._linear_weight_collection], name="linear") with ops.control_dependencies([self._centered_bias()]): loss = self._loss(logits, targets, self._get_weight_tensor(features)) logging_ops.scalar_summary("loss", loss) train_ops = self._linear_optimizer.get_train_step( self._linear_feature_columns, self._weight_column_name, "logistic_loss", features, targets, columns_to_variables, global_step) return train_ops, loss
def _get_train_ops(self, features, targets): """See base class.""" if isinstance(self._linear_optimizer, sdca_optimizer.SDCAOptimizer): raise ValueError("SDCAOptimizer does not currently support regression.") if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns(features) return super(LinearRegressor, self)._get_train_ops(features, targets)
def _get_train_ops(self, features, targets): """See base class.""" if isinstance(self._linear_optimizer, sdca_optimizer.SDCAOptimizer): raise ValueError( "SDCAOptimizer does not currently support regression.") if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns( features) return super(LinearRegressor, self)._get_train_ops(features, targets)
def infer_real_valued_columns_from_input_fn(input_fn): """Creates `FeatureColumn` objects for inputs defined by `input_fn`. This interprets all inputs as dense, fixed-length float values. This creates a local graph in which it calls `input_fn` to build the tensors, then discards it. Args: input_fn: Function returning a tuple of input and target `Tensor` objects. Returns: List of `FeatureColumn` objects. """ with ops.Graph().as_default(): features, _ = input_fn() return layers.infer_real_valued_columns(features)
def _get_train_ops(self, features, targets): """See base class.""" if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns(features) return super(LinearClassifier, self)._get_train_ops(features, targets)
def _get_train_ops(self, features, targets): """See base class.""" if self._dnn_feature_columns is None: self._dnn_feature_columns = layers.infer_real_valued_columns(features) return super(DNNRegressor, self)._get_train_ops(features, targets)
def _get_train_ops(self, features, targets): """See base class.""" if self._linear_feature_columns is None: self._linear_feature_columns = layers.infer_real_valued_columns( features) return super(LinearClassifier, self)._get_train_ops(features, targets)