def test_exit_condition(transformer_factory):
    bsz = 16
    class_num = 10

    # Limiting maximum absolute value for tensors elements to 7.9.
    #
    # There is used np.random.randn function to fill tensors with random values. It can give any
    # value as a result however values above 5 are highly improbable and would appear very rarely.
    # Limit 7.9 would almost never modify the tested tensor but would prevent from random
    # failures from time to time when the test is run in continuous environment.
    # This limit is approximate upper bound of range [4, 8). Numbers from this region can be
    # expressed by flexpoint number of the same dec.
    # Why not 15.9 that is approximate limit of [8, 16) range ?
    # Numbers above 8 are highly improbable and if appear from time to time can cause random
    # failures due to reduced accuracy of all numbers in tensor. Most numbers in normal
    # distribution are close to 0.

    is_flex = is_flex_factory(transformer_factory)
    clip_val = 7.9 if is_flex else 0

    N, Y = ng.make_axis(bsz), ng.make_axis(class_num)
    y_val = rng.randn_abs_clip(ng.make_axes([N, Y]), clip_max=clip_val)
    y = ng.constant(y_val, ng.make_axes([N, Y]))

    likelihood = ng.log(ng.softmax(y, normalization_axes=y.axes[1]))

    with ExecutorFactory() as ex:
        comp = ex.executor(likelihood)

        val1 = comp()
        val2 = comp()
        ng.testing.assert_allclose(val1, val2, atol=0, rtol=0)
Ejemplo n.º 2
0
def unary_op(op_str, a):
    if op_str == "Abs":
        return ng.abs(a)
    elif op_str == "Acos":
        return ng.acos(a)
    elif op_str == "Asin":
        return ng.asin(a)
    elif op_str == "Atan":
        return ng.atan(a)
    elif op_str == "Ceiling":
        return ng.ceiling(a)
    elif op_str == "Cos":
        return ng.cos(a)
    elif op_str == "Cosh":
        return ng.cosh(a)
    elif op_str == "Floor":
        return ng.floor(a)
    elif op_str == "log":
        return ng.log(a)
    elif op_str == "exp":
        return ng.exp(a)
    elif op_str == "negative":
        return ng.negative(a)
    elif op_str == "Sign":
        return ng.sign(a)
    elif op_str == "Sin":
        return ng.sin(a)
    elif op_str == "Sinh":
        return ng.sinh(a)
    elif op_str == "Sqrt":
        return ng.sqrt(a)
    elif op_str == "Tan":
        return ng.tan(a)
    elif op_str == "Tanh":
        return ng.tanh(a)
Ejemplo n.º 3
0
def test_log_sigmoid_deriv(transformer_factory, input_tensor):
    """TODO."""
    p_u = input_tensor
    u = rng.uniform(-3.0, 3.0, p_u.axes)

    log_val_u = ng.log(ng.sigmoid(p_u))

    check_derivative(log_val_u, p_u, 0.001, u, atol=1e-2, rtol=1e-2)
Ejemplo n.º 4
0
def ReduceLogSumExp(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Compute the log sum exponent of the input tensor's element' along the provided axes."""
    op = ng.exp(ng_inputs[0])
    op = make_reduction_op(ng.sum, onnx_node, op)
    op = ng.log(op)
    return op
Ejemplo n.º 5
0
def Softplus(onnx_node,
             ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply Softplus function, f(x) = ln(exp(x) + 1) to the input tensor element-wise.

    :param onnx_node: The ONNX node representing this operation.
    :param ng_inputs: The input tensors.
    :return: The tensor with applied Softplus operation.
    """
    return ng.log((ng.exp(ng_inputs[0]) + 1))
Ejemplo n.º 6
0
def test_log_sigmoid_deriv(transformer_factory):
    """TODO."""
    axes = ng.make_axes([ng.make_axis(20), ng.make_axis(128)])
    p_u = ng.placeholder(axes)
    u = rng.uniform(-3.0, 3.0, p_u.axes)

    log_val_u = ng.log(ng.sigmoid(p_u))

    check_derivative(log_val_u, p_u, 0.001, u, atol=1e-2, rtol=1e-2)
Ejemplo n.º 7
0
def get_simple_graph():
    ax = ng.make_axes([ng.make_axis(name='C', length=1)])
    base_op = ng.constant(5.0, ax).named("weird_name#@$")
    base_op.metadata["string"] = "stringval"
    simple_graph = ng.log(ng.exp(base_op))
    simple_graph.metadata.update(string_val="foo",
                                 bool_val=True,
                                 float_val=6.5,
                                 int_val=2)
    return base_op, simple_graph
Ejemplo n.º 8
0
def test_logreg(transformer_factory):
    # xs: (C, N), y: (N,)
    xs = np.array([[0.52, 0.88, 0.52, 0.74], [1.12, -1.08, 0.06, -2.49],
                   [0.77, 0.15, -1.3, 1.39]])
    ys = np.array([1, 1, 0, 1])
    max_iter = 10
    alpha = 0.1
    thetas = np.array([0., 0., 0.])

    np_logreg = NumpyLogreg(xs, ys, thetas)

    C, N = ng.make_axis(length=3), ng.make_axis(length=4)

    # input tensors
    xs_v = ng.placeholder((C, N))
    ys_v = ng.placeholder([N])
    alpha_v = ng.placeholder(())
    thetas_var = ng.variable([C - 1], initial_value=thetas)

    # define ops
    ys_pred = ng.sigmoid(ng.dot(thetas_var, xs_v))
    log_likelihoods = ng.log(ys_pred) * ys_v + ng.log(1 - ys_pred) * (1 - ys_v)
    loss = -ng.sum(log_likelihoods, reduction_axes=[N])
    grad_comp = ng.deriv(loss, thetas_var)
    grad = ng.sequential([
        ng.assign(thetas_var, thetas_var - alpha_v * grad_comp), thetas_var,
        grad_comp
    ])

    # transformer
    transformer = ngt.make_transformer()
    train_eval_func = transformer.computation([grad, loss, thetas_var], xs_v,
                                              ys_v, alpha_v)

    # evaluate
    for i in range(max_iter):
        grad_np, loss_np, thetas_np = np_logreg.optimize(alpha)
        grad_ng, loss_ng, thetas_ng = train_eval_func(xs, ys, alpha)
        assert ng.testing.allclose(loss_np, loss_ng)
        assert ng.testing.allclose(grad_np, grad_ng)
        assert ng.testing.allclose(thetas_np, thetas_ng)

    transformer.close()
Ejemplo n.º 9
0
def ReduceLogSum(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Compute the log sum of the input tensor's element along the provided axes.

    :param onnx_node: The ONNX node representing this operation.
    :param ng_inputs: The input tensors.
    :return: The tensor with applied ReduceLogSum operation.
    """
    sum_node = make_reduction_op(ng.sum, onnx_node, ng_inputs[0])
    return ng.log(sum_node)
Ejemplo n.º 10
0
    def ReduceElements(self, cntk_op, inputs):
        """
        Returns a reduction operation (max, min, mean, sum, prod) or a calculation which matches
        CNTK's LogSum reduction (`reduce_log_sum_exp` function).

        Arguments:
            cntk_op: CNTK operation to be imported.
            inputs: List of inputs to this node.

        Returns:
            A ngraph Op.
        """
        assert len(inputs) == 1

        reduction_op_name = cntk_op.attributes.get('reductionOpName')
        # CNTK API defines a reductionKeepDimensions flag, but we currently don't use it
        # keep_dimensions = cntk_op.attributes.get('reductionKeepDimensions', False)

        cntk_op_attribute_axes = []
        if cntk_op.attributes.get('axisVec'):
            cntk_op_attribute_axes.extend(cntk_op.attributes.get('axisVec'))
        elif cntk_op.attributes.get('axis'):
            cntk_op_attribute_axes.append(cntk_op.attributes.get('axis'))

        # CNTK axes are numbered in reverse order: the last axis is labeled 0, the previous 1, etc.
        reduction_axes_indexes = [len(inputs[0].axes) - 1 - i
                                  for (_, _, i) in cntk_op_attribute_axes]
        reduction_ng_axes_list = [axis for (i, axis) in enumerate(inputs[0].axes)
                                  if i in reduction_axes_indexes]
        reduction_ng_axes = ng.Axes(axes=reduction_ng_axes_list)

        if reduction_op_name == 'Max':
            return ng.max(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Min':
            return ng.min(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Mean':
            return ng.mean(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Sum':
            return ng.sum(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'Prod':
            return ng.prod(inputs[0], reduction_axes=reduction_ng_axes).named(cntk_op.uid)

        if reduction_op_name == 'LogSum':
            return ng.log(ng.sum(ng.exp(inputs[0]), reduction_axes=reduction_ng_axes))\
                .named(cntk_op.uid)

        raise NotImplementedError('CNTKImporter: ReduceElements does not support operation %s',
                                  reduction_op_name)
Ejemplo n.º 11
0
    def Log(self, tf_node, inputs):
        """
        Natural log of x element-wise.

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            x, name
        """
        return ng.log(inputs[0]).named(tf_node.name)
Ejemplo n.º 12
0
def test_exit_condition(transformer_factory):
    bsz = 16
    class_num = 10

    N, Y = ng.make_axis(bsz), ng.make_axis(class_num)
    y_val = np.absolute(np.random.randn(bsz, class_num))
    y = ng.constant(y_val, ng.make_axes([N, Y]))

    likelihood = ng.log(ng.softmax(y, normalization_axes=y.axes[1]))

    with ExecutorFactory() as ex:
        comp = ex.executor(likelihood)

        val1 = comp()
        val2 = comp()
        np.testing.assert_allclose(val1, val2, atol=0, rtol=0)
Ejemplo n.º 13
0
def LogSoftmax(
        onnx_node,
        ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Compute logarithm of softmax values for each layer in the batch of the given input.

    :param onnx_node: The ONNX node representing this operation.
    :param ng_inputs: The input tensors.
    :return: The tensor with applied LogSoftmax operation.
    """
    data = ng_inputs[0]
    axis = onnx_node.get_attribute_value('axis', 1)
    if axis < 0 or axis >= len(data.shape):
        raise ValueError(
            'LogSoftmax node (%s): provided axis attribute is out of input tensor'
            ' dimensions range.', onnx_node.name)
    return ng.log(ng.softmax(data, range(axis, len(data.shape))))
Ejemplo n.º 14
0
def unary_op(op_str, a):
    if op_str == 'Abs':
        return ng.abs(a)
    elif op_str == 'Acos':
        return ng.acos(a)
    elif op_str == 'Asin':
        return ng.asin(a)
    elif op_str == 'Atan':
        return ng.atan(a)
    elif op_str == 'Ceiling':
        return ng.ceiling(a)
    elif op_str == 'Cos':
        return ng.cos(a)
    elif op_str == 'Cosh':
        return ng.cosh(a)
    elif op_str == 'Floor':
        return ng.floor(a)
    elif op_str == 'log':
        return ng.log(a)
    elif op_str == 'exp':
        return ng.exp(a)
    elif op_str == 'negative':
        return ng.negative(a)
    elif op_str == 'Reverse':
        return ng.reverse(a, np.array([1]), 'index')
    elif op_str == 'Sign':
        return ng.sign(a)
    elif op_str == 'Sin':
        return ng.sin(a)
    elif op_str == 'Sinh':
        return ng.sinh(a)
    elif op_str == 'Sqrt':
        return ng.sqrt(a)
    elif op_str == 'Tan':
        return ng.tan(a)
    elif op_str == 'Tanh':
        return ng.tanh(a)
Ejemplo n.º 15
0
    def __call__(self,
                 H_concat,
                 states=None,
                 output=None,
                 reset_cells=True,
                 input_data=None):
        """
        Arguments:
        ----------
        H_concat: Concatenated forward and reverse unrolled outputs of the
                 `MatchLSTMCell_withAttention` cell
        states: previous LSTM state
        output: hidden state from previous timestep
        reset_cells: argument to reset a cell
        input_data: the ArrayIterator object for training data
                    (contains information of length of each sentence)

        """

        rec_axis_pr = H_concat.axes.recurrent_axis()
        const_one = ng.constant(const=1, axes=[self.dummy_axis])

        b_k_lists = []
        # rec_axis_hy=H_hy.axes.recurrent_axis()
        for i in range(0, 2):
            if output is None:
                h_k_old = ng.constant(axes=[self.F, self.N], const=0)
            else:
                h_k_old = ng.cast_axes(output, [self.F, self.N])

            sum_1 = ng.dot(
                self.V_answer,
                ng.cast_axes(H_concat,
                             [self.lstm_feature_new, rec_axis_pr, self.N]))
            sum_1 = ng.cast_axes(
                sum_1, [self.hidden_rows, self.hidden_cols_para, self.N])

            int_sum2 = ng.dot(self.W_a, h_k_old)
            int_sum = int_sum2  # +self.b_a
            int_sum = ng.ExpandDims(int_sum, self.dummy_axis, 1)

            # Following notations from the paper
            # Compute Attention Vector
            F_i_int = sum_1 + ng.axes_with_order(
                ng.dot(int_sum, self.e_q),
                [self.hidden_rows, self.hidden_cols_para, self.N])

            F_i = ng.tanh(F_i_int)  # Attention Vector

            b_k_sum1 = ng.dot(self.v_lr, F_i)
            # This masking with -inf for length of para>max_para ensures that
            # when we do softmax over these values we get a 0
            mask_loss_new = ng.log(ng.dot(const_one, input_data['para_len']))
            mask_loss_new = ng.axes_with_order(
                ng.cast_axes(mask_loss_new, [self.N, self.hidden_cols_para]),
                [self.hidden_cols_para, self.N])

            # Add mask to the required logits
            b_k = ng.softmax(b_k_sum1 + mask_loss_new)
            b_k_req = ng.softmax(b_k_sum1 + mask_loss_new)
            b_k_repeated = ng.cast_axes(
                ng.dot(self.e_q2, ng.ExpandDims(b_k, self.dummy_axis, 0)),
                [H_concat.axes[0], rec_axis_pr, self.N])

            inputs_lstm = ng.sum(ng.multiply(H_concat, b_k_repeated),
                                 rec_axis_pr)

            # LSTM Cell calculations
            if self.out_axes is None:
                self.out_axes = self.feature_axes + inputs_lstm.axes.batch_axis(
                )
            if states is None:
                states = self.initialize_states(inputs_lstm.axes.batch_axis(),
                                                reset_cells=reset_cells)
            assert self.out_axes == states['h'].axes

            for gate in self._gate_names:
                transform = self.gate_transform[gate]
                gate_input = self.i2h[gate](inputs_lstm) + self.h2h[gate](
                    states['h'])
                self.gate_output[gate] = ng.cast_role(transform(gate_input),
                                                      self.out_axes)

            states['c'] = (states['c'] * self.gate_output['f'] +
                           self.gate_output['i'] * self.gate_output['g'])
            states['h'] = self.gate_output['o'] * self.activation(states['c'])
            states['h'] = ng.cast_role(states['h'], self.out_axes)

            output = states['h']

            # append required outputs
            b_k_lists.append(b_k_req)

        return b_k_lists
Ejemplo n.º 16
0
 def get_loss(thetas, xs, ys):
     ys_pred = predict(thetas, xs)
     log_likelihoods = ng.log(ys_pred) * ys + ng.log(1 - ys_pred) * (1 - ys)
     loss = -ng.sum(log_likelihoods, reduction_axes=[N])
     return loss
Ejemplo n.º 17
0
def log(x, name=None):
    return ng.log(x).named(name)
Ejemplo n.º 18
0
    def __call__(self,
                 H_pr,
                 h_ip,
                 states,
                 output=None,
                 reset_cells=True,
                 input_data=None):
        """
        Arguments:
        ----------
        H_pr : Encoding for question
        h_ip: Sliced input of paragraph encoding for a particular time step
        states: State of the LSTM cell
        output: previous hidden state
        input_data: the ArrayIterator object for training data (contains information of
                                                        length of each sentence)
        """
        # get recurrent axis for question
        rec_axis_pr = H_pr.axes.recurrent_axis()
        const_one = ng.constant(const=1, axes=[self.dummy_axis])
        # if first word in a paragraph is encountered, assign the previous LSTM
        # hidden state as zeros
        if output is None:
            h_r_old = ng.constant(axes=[self.F, self.N], const=0)
        else:
            h_r_old = ng.cast_axes(output, [self.F, self.N])

        # Compute attention vector
        sum_1 = ng.dot(self.W_q, H_pr)
        sum_1 = ng.cast_axes(sum_1,
                             [self.hidden_rows, self.hidden_cols_ques, self.N])
        int_sum1 = ng.dot(self.W_p, h_ip)
        int_sum2 = ng.dot(self.W_r, h_r_old)
        int_sum = int_sum1 + int_sum2 + self.b_p
        int_sum = ng.ExpandDims(int_sum, self.dummy_axis, 1)

        # making for the attention vector
        req_mask = ng.axes_with_order(
            ng.cast_axes(ng.dot(self.e_q2, input_data['question_len']),
                         [self.hidden_rows, self.N, self.hidden_cols_ques]),
            [self.hidden_rows, self.hidden_cols_ques, self.N])

        req_mask_2 = ng.axes_with_order(
            ng.cast_axes(ng.dot(const_one, input_data['question_len']),
                         [self.N, self.hidden_cols_ques]),
            [self.hidden_cols_ques, self.N])

        G_i_int = sum_1 + ng.multiply(
            req_mask,
            ng.axes_with_order(
                ng.dot(int_sum, self.e_q),
                [self.hidden_rows, self.hidden_cols_ques, self.N]))

        G_i = ng.tanh(G_i_int)
        # Attention Vector
        at_sum1 = ng.dot(self.w_lr, G_i)
        at = ng.softmax(at_sum1 + ng.log(req_mask_2))
        at_repeated = ng.cast_axes(
            ng.dot(self.e_q2, ng.ExpandDims(at, self.dummy_axis, 0)),
            [self.F, rec_axis_pr, self.N])

        # Stack the 2 vectors as per the equation in the paper
        z1 = h_ip
        z2 = ng.sum(ng.multiply(H_pr, at_repeated), rec_axis_pr)
        # represents the inp to lstm_cell
        # ng.concat_along_axis([z1,z2],self.F)
        inputs_lstm = ng.dot(self.ZX, z1) + ng.dot(self.ZY, z2)

        # LSTM cell computations (from LSTM brach in ngraph)
        if self.out_axes is None:
            self.out_axes = self.feature_axes + inputs_lstm.axes.batch_axis()
        if states is None:
            states = self.initialize_states(inputs_lstm.axes.batch_axis(),
                                            reset_cells=reset_cells)
        assert self.out_axes == states['h'].axes

        for gate in self._gate_names:
            transform = self.gate_transform[gate]
            gate_input = self.i2h[gate](inputs_lstm) + self.h2h[gate](
                states['h'])
            self.gate_output[gate] = ng.cast_role(transform(gate_input),
                                                  self.out_axes)

        states['c'] = (states['c'] * self.gate_output['f'] +
                       self.gate_output['i'] * self.gate_output['g'])
        states['h'] = self.gate_output['o'] * self.activation(states['c'])
        states['h'] = ng.cast_role(states['h'], self.out_axes)
        # return unrolled output and state of LSTM cell
        return ng.cast_axes(states['h'], axes=[self.F, self.N]), states
Ejemplo n.º 19
0
def get_simple_graph():
    base_op = ng.constant(5.0)
    simple_graph = ng.log(ng.exp(base_op))
    return base_op, simple_graph
Ejemplo n.º 20
0
def Log(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Calculate the natural log of the input tensor elementwise."""
    return ng.log(ng_inputs[0])
Ejemplo n.º 21
0
def Log(onnx_node, ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    return ng.log(ng_inputs[0])
Ejemplo n.º 22
0
def Softplus(onnx_node, ng_inputs):  # type: (NodeWrapper, List[NgraphNode]) -> NgraphNode
    """Apply Softplus function, f(x) = ln(exp(x) + 1) to the input tensor elementwise."""
    return ng.log((ng.exp(ng_inputs[0]) + 1))
Ejemplo n.º 23
0
def get_simple_graph():
    ax = ng.make_axes([ng.make_axis(name='C', length=1)])
    base_op = ng.constant(5.0, ax)
    simple_graph = ng.log(ng.exp(base_op))
    return base_op, simple_graph
Ejemplo n.º 24
0
z = ng.placeholder(axes=z_ax)

# image placeholder
C = ng.make_axis(name='C', length=1)
D = ng.make_axis(name='D', length=1)
H = ng.make_axis(name='H', length=28)
W = ng.make_axis(name='W', length=28)
image_axes = ng.make_axes([C, D, H, W, N])
image = ng.placeholder(axes=image_axes)

# build network graph
generated = generator(z)
D1 = discriminator(image)
D2 = discriminator(generated)

loss_d = -ng.log(D1) - ng.log(1 - D2)
mean_cost_d = ng.mean(loss_d, out_axes=[])
loss_g = -ng.log(D2)
mean_cost_g = ng.mean(loss_g, out_axes=[])

optimizer_d = make_optimizer(name='discriminator_optimizer')
optimizer_g = make_optimizer(name='generator_optimizer')
updates_d = optimizer_d(loss_d, subgraph=discriminator)
updates_g = optimizer_g(loss_g, subgraph=generator)

# compile computations
generator_train_inputs = {'noise': z}
discriminator_train_inputs = {'image': image, 'noise': z}

generator_train_outputs = {
    'batch_cost': mean_cost_g,
Ejemplo n.º 25
0
def ReduceLogSumExp(onnx_node,
                    ng_inputs):  # type: (NodeWrapper, List[TensorOp]) -> Op
    op = ng.exp(ng_inputs[0])
    op = make_reduction_op(ng.sum, onnx_node, op)
    op = ng.log(op)
    return op