Exemplo n.º 1
0
from webdnn.graph.operators.max import Max
from webdnn.graph.operators.min import Min
from webdnn.graph.operators.prod import Prod
from webdnn.graph.operators.rsqrt import Rsqrt
from webdnn.graph.operators.scalar_add import ScalarAdd
from webdnn.graph.operators.scalar_mul import ScalarMul
from webdnn.graph.operators.select import Select
from webdnn.graph.operators.sigmoid import Sigmoid
from webdnn.graph.operators.sum import Sum
from webdnn.graph.operators.tanh import Tanh
from webdnn.graph.operators.tensordot import Tensordot
from webdnn.graph.order import Order
from webdnn.graph.variables.constant_variable import ConstantVariable
from webdnn.util import console

TensorFlowConverter.register_handler("Abs")(unary_op_handler(Abs))


@TensorFlowConverter.register_handler("Acos")
def acos_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    raise NotImplementedError(
        f"[TensorFlowConverter] {tf_op.type} is not supported yet.")


@TensorFlowConverter.register_handler("Acosh")
def acosh_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    raise NotImplementedError(
        f"[TensorFlowConverter] {tf_op.type} is not supported yet.")


TensorFlowConverter.register_handler("Add")(elementwise_binary_op_handler(
Exemplo n.º 2
0
@TensorFlowConverter.register_handler("Dilation2DBackpropFilter")
def dilation2_d_backprop_filter_handler(converter: TensorFlowConverter,
                                        tf_op: "tf.Operation"):
    raise NotImplementedError(
        f"[TensorFlowConverter] {tf_op.type} is not supported yet.")


@TensorFlowConverter.register_handler("Dilation2DBackpropInput")
def dilation2_d_backprop_input_handler(converter: TensorFlowConverter,
                                       tf_op: "tf.Operation"):
    raise NotImplementedError(
        f"[TensorFlowConverter] {tf_op.type} is not supported yet.")


TensorFlowConverter.register_handler("Elu")(unary_op_handler(Elu))


@TensorFlowConverter.register_handler("EluGrad")
def elu_grad_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    raise NotImplementedError(
        f"[TensorFlowConverter] {tf_op.type} is not supported yet.")


@TensorFlowConverter.register_handler("FractionalAvgPoolGrad")
def fractional_avg_pool_grad_handler(converter: TensorFlowConverter,
                                     tf_op: "tf.Operation"):
    raise NotImplementedError(
        f"[TensorFlowConverter] {tf_op.type} is not supported yet.")

Exemplo n.º 3
0
from webdnn.graph.operators.rsqrt import Rsqrt
from webdnn.graph.operators.scalar_add import ScalarAdd
from webdnn.graph.operators.scalar_mul import ScalarMul
from webdnn.graph.operators.select import Select
from webdnn.graph.operators.sigmoid import Sigmoid
from webdnn.graph.operators.sin import Sin
from webdnn.graph.operators.sinh import Sinh
from webdnn.graph.operators.sum import Sum
from webdnn.graph.operators.tan import Tan
from webdnn.graph.operators.tanh import Tanh
from webdnn.graph.operators.tensordot import Tensordot
from webdnn.graph.order import Order
from webdnn.graph.variables.constant_variable import ConstantVariable
from webdnn.util import console

TensorFlowConverter.register_handler("Abs")(unary_op_handler(Abs))

TensorFlowConverter.register_handler("Acos")(unary_op_handler(Acos))

TensorFlowConverter.register_handler("Acosh")(unary_op_handler(Acosh))

TensorFlowConverter.register_handler("Add")(elementwise_binary_op_handler(
    ElementwiseAdd, ScalarAdd))

TensorFlowConverter.register_handler("AddN")(elementwise_binary_op_handler(
    ElementwiseAdd, ScalarAdd))


@TensorFlowConverter.register_handler("All")
def all_handler(converter: TensorFlowConverter, tf_op: "tf.Operation"):
    raise NotImplementedError(