示例#1
0
 def interpret_softmax_expr(self, expr, **kwargs):
     if self.softmax_function_name is NotImplemented:
         return self._do_interpret(fallback_expressions.softmax(expr.exprs),
                                   **kwargs)
     self.with_vectors = True
     self.with_math_module = True
     nested = [self._do_interpret(expr, **kwargs) for expr in expr.exprs]
     return self._cg.function_invocation(self.softmax_function_name,
                                         self._cg.vector_init(nested))
示例#2
0
    def _assemble_multi_class_output(self, estimator_params):
        # Multi-class output is calculated based on discussion in
        # https://github.com/dmlc/xgboost/issues/1746#issuecomment-295962863
        # and the enhancement to support boosted forests in XGBoost.
        splits = _split_estimator_params_by_classes(
            estimator_params, self._output_size,
            self.multiclass_params_seq_len)

        base_score = self._base_score
        exprs = [
            self._assemble_single_output(e, base_score=base_score, split_idx=i)
            for i, e in enumerate(splits)
        ]

        proba_exprs = fallback_expressions.softmax(exprs)
        return ast.VectorVal(proba_exprs)
示例#3
0
 def _multi_class_convert_output(self, exprs):
     return fallback_expressions.softmax(exprs)