Пример #1
0
 def variable_update(self, variable, grad, scale_factor):
     epsilon, decay = (self.epsilon, self.decay_rate)
     grad = clip_gradient_value(grad, self.gradient_clip_value)
     state = ng.persistent_tensor(axes=variable.axes, initial_value=0.)
     updates = ng.sequential([
         ng.assign(state, decay * state + (1.0 - decay) * ng.square(grad)),
         ng.assign(variable, variable - ((scale_factor * grad * self.lrate)
                                         / (ng.sqrt(state + epsilon) + epsilon)))
     ])
     return updates
Пример #2
0
 def variable_update(self, variable, grad, scale_factor):
     grad = clip_gradient_value(grad, self.gradient_clip_value)
     state = ng.persistent_tensor(axes=grad.axes, initial_value=0.)
     updates = ng.sequential([
         ng.assign(state, state + ng.square(grad)),
         ng.assign(
             variable, variable - (scale_factor * self.lrate * grad) /
             (ng.sqrt(state + self.epsilon)))
     ])
     return updates
Пример #3
0
def test_sequential_side(M):
    x1_np = 2
    x2_np = 3
    b_np = 1
    x_np = np.array([1, 2, 3], dtype=np.float32)

    x = ng.variable([M], initial_value=x_np)
    x1 = ng.persistent_tensor(axes=(), initial_value=x1_np)
    x2 = ng.persistent_tensor(axes=(), initial_value=x2_np)
    x1_vo = ng.value_of(x1)
    x2_vo = ng.value_of(x2)
    b = ng.persistent_tensor(axes=(), initial_value=b_np)

    y = ng.sequential([
        x1_vo, x2_vo,
        ng.assign(x1,
                  ng.sum(x, out_axes=()) + x1 * b + (1 - b)),
        ng.assign(x2,
                  ng.mean(x, out_axes=()) + x2 * b + (1 - b)), x * 2
    ])

    with ExecutorFactory() as ex:
        main_effect = ex.executor((y, x1_vo, x2_vo, x1, x2))
        current_values = ex.executor((x1, x2))

        # Run main path #1
        y_val, x1_init_val, x2_init_val, x1_final_val, x2_final_val = main_effect(
        )
        y_np = x_np * 2

        assert np.allclose(y_val, y_np)
        assert np.allclose(x1_init_val, x1_np)
        assert np.allclose(x2_init_val, x2_np)
        x1_np = np.sum(x_np) + x1_np * b_np + (1 - b_np)
        x2_np = np.mean(x_np) + x2_np * b_np + (1 - b_np)
        assert np.allclose(x1_final_val, x1_np)
        assert np.allclose(x2_final_val, x2_np)

        x1_val, x2_val = current_values()
        assert np.allclose(x1_val, x1_np)
        assert np.allclose(x2_val, x2_np)

        # Run main path #2 (Should be the same as before)
        y_val, x1_init_val, x2_init_val, x1_final_val, x2_final_val = main_effect(
        )
        y_np = x_np * 2

        assert np.allclose(y_val, y_np)
        assert np.allclose(x1_init_val, x1_np)
        assert np.allclose(x2_init_val, x2_np)
        x1_np = np.sum(x_np) + x1_np * b_np + (1 - b_np)
        x2_np = np.mean(x_np) + x2_np * b_np + (1 - b_np)
        assert np.allclose(x1_final_val, x1_np)
        assert np.allclose(x2_final_val, x2_np)
Пример #4
0
 def variable_update(self, variable, grad, scale_factor):
     m = ng.persistent_tensor(axes=grad.axes, initial_value=0.)
     v = ng.persistent_tensor(axes=grad.axes, initial_value=0.)
     updates = ng.sequential([
         ng.assign(m, m * self.beta_1 + (1 - self.beta_1) * grad),
         ng.assign(v, v * self.beta_2 + (1 - self.beta_2) * grad * grad),
         ng.assign(
             variable, variable - (scale_factor * self.ell * m) /
             (ng.sqrt(v) + self.epsilon))
     ])
     return updates
Пример #5
0
 def variable_update(self, variable, grad, scale_factor):
     updates = []
     velocity = ng.persistent_tensor(
         axes=variable.axes, initial_value=0.).named(variable.name + '_vel')
     clip_grad = clip_gradient_value(grad, self.gradient_clip_value)
     lr = -self.lrate * (scale_factor * clip_grad + self.wdecay * variable)
     updates.append(ng.assign(velocity, velocity * self.momentum_coef + lr))
     if self.nesterov:
         delta = (self.momentum_coef * velocity + lr)
     else:
         delta = velocity
     updates.append(ng.assign(variable, variable + delta))
     return ng.sequential(updates)
Пример #6
0
 def variable_update(self, variable, grad, scale_factor):
     epsilon, decay = (self.epsilon, self.decay_rate)
     grad = clip_gradient_value(grad, self.gradient_clip_value)
     state = ng.persistent_tensor(axes=variable.axes, initial_value=1.)
     velocity = ng.persistent_tensor(
         axes=variable.axes, initial_value=0.).named(variable.name + '_vel')
     updates = ng.sequential([
         ng.assign(state, decay * state + (1.0 - decay) * ng.square(grad)),
         ng.assign(
             velocity, velocity * self.momentum +
             (self.lrate * scale_factor * grad / ng.sqrt(state + epsilon)) +
             self.lrate * self.wdecay * variable),
         ng.assign(variable, variable - velocity)
     ])
     return updates
Пример #7
0
def test_query_state(device):
    import ngraph as ng
    from ngraph.impl import Function
    input_data = ng.parameter([5, 7], name="input_data", dtype=np.float32)
    rv = ng.read_value(input_data, "var_id_667")
    #a = ng.add(rv, input_data)
    node = ng.assign(rv, "var_id_667")
    res = ng.result(rv, "res")
    func = Function([res], sinks=[node], parameters=[input_data], name='test')
    caps = Function.to_capsule(func)

    net = ie.IENetwork(caps)
    ie_core = ie.IECore()
    exec_net = ie_core.load_network(network=net,
                                    device_name=device,
                                    num_requests=1)
    request = exec_net.requests[0]
    mem_states = request.query_state()
    mem_state = mem_states[0]
    with pytest.raises(ValueError) as e:
        ones_arr = np.ones(shape=(1, 800), dtype=np.float32)
        mem_state.state.buffer[:] = ones_arr
    assert "assignment destination is read-only" in str(e.value)
    assert mem_state.name == 'id_1'
    assert mem_state.state.tensor_desc.precision == 'FP32'
Пример #8
0
    def __call__(self,
                 cost_func,
                 variables=None,
                 subgraph=None,
                 warning=False):
        """
        Arguments:
            cost_func (Op): The cost function to optimize
            variables (list of variables): List of variables to optimize
            subgraph (SubGraph): A subgraph instance containing all variables to optimize
            warning (bool): If True displays warning message if any variables
                            specified do not participate in batch cost computation

        .. Note::
            If subgraph is provided, the variables to optimize will be taken from it.
            Otherwise, they can be provided explicitly by passing a list as `variables`.
            If neither `subgraph` nor `variables` is provided, the variables to optimize will be
            all trainable variables on which `cost` depends.
        """

        all_updates = []
        batch_cost = ng.sum(cost_func, out_axes=())
        if cost_func.axes.batch_axis() is None:
            batch_size = 1
        else:
            batch_size = cost_func.axes.batch_axis().length

        # determine variables to optimize
        if subgraph is not None:
            if variables is not None:
                raise ValueError(
                    "variables and subgraph cannot both be specified.")
            variables = list(subgraph.variables.values())

        if variables is None:
            variables = batch_cost.variables()
        elif variables is not None and warning is True:
            all_variables = batch_cost.variables()
            selected_variables = all_variables & set(variables)
            if len(selected_variables) < len(variables):
                logger.warn(
                    "not all selected variables participate in cost computation"
                )

        # gradients
        grads = [ng.deriv(batch_cost, v) / batch_size for v in variables]
        scale_factor = clip_gradient_norm(grads, self.gradient_clip_norm)

        # updates
        for variable, grad in zip(variables, grads):
            updates = self.variable_update(variable, grad, scale_factor)
            all_updates.append(updates)
        updates = ng.doall(all_updates)
        grads = ng.doall(grads)
        clips = ng.doall([
            ng.assign(variable,
                      clip_weight_value(variable, self.weight_clip_value))
            for variable in variables
        ])
        return ng.sequential([grads, updates, clips, 0])
Пример #9
0
    def Assign(self, tf_node, inputs):
        """
        Assign `value` to `ref`.

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            ref, value, validate_shape, use_locking, name
        """
        """
        TODO: currently cannot fully support the TensorFlow semantics.
        1. Assign in TF returns the assigned tensor, in ngraph, it returns
           None
        2. In TF, is the assigned tensor is not used, then it retain the
           original value
        """
        ref, value = inputs
        assert ref.axes.lengths == value.axes.lengths, "shape not the same"
        value = ng.cast_axes(value, ref.axes)

        return ng.assign(ref, value)
Пример #10
0
 def variable_update(self, variable, grad, scale_factor):
     updates = []
     velocity = ng.persistent_tensor(
         axes=variable.axes, initial_value=0.).named(variable.name + '_vel')
     # add metadata to the gradient node indicating that
     # it should be reduced across data-parallel workers before used for optimization
     grad.metadata['reduce_func'] = 'sum'
     clip_grad = clip_gradient_value(grad, self.gradient_clip_value)
     lr = -self.lrate * (scale_factor * clip_grad + self.wdecay * variable)
     updates.append(ng.assign(velocity, velocity * self.momentum_coef + lr))
     if self.nesterov:
         delta = (self.momentum_coef * velocity + lr)
     else:
         delta = velocity
     updates.append(ng.assign(variable, variable + delta))
     return ng.sequential(updates)
Пример #11
0
def test_setting(M):
    with ExecutorFactory() as ex:
        axes = ng.make_axes([M])

        np_x = np.array([1, 2, 3], dtype=np.float32)
        np_y = np.array([1, 3, 5], dtype=np.float32)

        x = ng.constant(np_x, axes)
        y = ng.constant(np_y, axes)

        v = ng.variable(axes, initial_value=x)

        f_v = ex.executor(v)

        vset = ng.sequential([ng.assign(v, v + y), v])
        f_v1 = ex.executor(vset)

        f_v2 = ex.executor(v)

        e_v = f_v().copy()
        assert ng.testing.allclose(e_v, np_x)
        e_v1 = f_v1().copy()
        assert ng.testing.allclose(e_v1, np_x + np_y)
        e_v2 = f_v2().copy()
        assert ng.testing.allclose(e_v2, np_x + np_y)
Пример #12
0
    def get_restore_op(self):
        """
        Get variable restoring ngraph op from TF model checkpoint

        Returns:
            A `ng.doall` op that restores the stored weights in TF model
            checkpoint
        """
        if self._graph is None:
            raise ValueError("self._graph is None, import meta_graph first.")
        if self._checkpoint_path is None:
            raise ValueError("self._checkpoint_path is None, please specify"
                             "checkpoint_path while importing meta_graph.")
        with self._graph.as_default():
            tf_variables = tf.all_variables()
            ng_variables = self.get_op_handle(tf_variables)
            ng_restore_ops = []
            with tf.Session() as sess:
                self.saver.restore(sess, self._checkpoint_path)
                for tf_variable, ng_variable in zip(tf_variables,
                                                    ng_variables):
                    val = sess.run(tf_variable)
                    with ng.Op.saved_user_deps():
                        restore_op = ng.assign(ng_variable, val)
                        ng_restore_ops.append(restore_op)
            with ng.Op.saved_user_deps():
                ng_restore_ops = ng.doall(ng_restore_ops)
            return ng_restore_ops
Пример #13
0
    def __call__(self, cost_func):
        with ng.Op.saved_user_deps():
            state_updates, param_updates = [], []
            batch_cost = ng.sum(cost_func, out_axes=())
            batch_size = cost_func.axes.batch_axes()[0].length

            grads = [
                ng.deriv(batch_cost, v) / batch_size
                for v in batch_cost.variables()
            ]
            scale_factor = clip_gradient_norm(
                grads) if self.gradient_clip_norm else 1

            epsilon, decay = (self.epsilon, self.decay_rate)
            for i, (variable,
                    grad) in enumerate(zip(batch_cost.variables(), grads)):
                grad = clip_gradient_value(grad, self.gradient_clip_value)

                state = ng.persistent_tensor(axes=variable.axes,
                                             initial_value=0.)
                state_updates.append(
                    ng.assign(lvalue=state,
                              rvalue=decay * state +
                              (1.0 - decay) * ng.square(grad)).named(
                                  'state_u_%s' % i))

                param_updates.append(
                    ng.assign(
                        lvalue=variable,
                        rvalue=variable -
                        ((scale_factor * grad * self.learning_rate) /
                         (ng.sqrt(state + epsilon) + epsilon)),
                    ).named('var_u_%s' % i))

            lr_update = [
                ng.assign(
                    self.learning_rate,
                    self.schedule.get_learning_rate(self.learning_rate,
                                                    self.iteration_index))
            ]

            updates = ng.doall(state_updates + param_updates + lr_update)
            self.iteration_index += 1

        return updates
Пример #14
0
def test_modify_state():
    with ExecutorFactory() as ex:
        N = ng.make_axis(3, name='N')
        x_np = np.ones((N.length)) * 4
        x = ng.variable([N], initial_value=x_np).named('x')
        val = ng.sequential([ng.assign(x, x + x), x])
        f = ex.executor(val)
        x_val = f()
        assert np.allclose(x_np + x_np, x_val)
Пример #15
0
def test_assign():
    input_data = ng.parameter([5, 7], name="input_data", dtype=np.int32)
    rv = ng.read_value(input_data, "var_id_667")
    node = ng.assign(rv, "var_id_667")

    assert node.get_type_name() == "Assign"
    assert node.get_output_size() == 1
    assert list(node.get_output_shape(0)) == [5, 7]
    assert node.get_output_element_type(0) == Type.i32
Пример #16
0
def create_function_with_memory(input_shape, data_type):
    input_data = ng.parameter(input_shape, name="input_data", dtype=data_type)
    rv = ng.read_value(input_data, "var_id_667")
    add = ng.add(rv, input_data, name="MemoryAdd")
    node = ng.assign(add, "var_id_667")
    res = ng.result(add, "res")
    func = Function(results=[res], sinks=[node], parameters=[input_data], name="name")
    caps = Function.to_capsule(func)
    return caps
Пример #17
0
def test_sequential(N):
    x = ng.variable([N], initial_value=0)
    x0 = x + x
    x1 = x + x
    p = ng.sequential([x0, ng.assign(x, 2), x1, x0])
    with ExecutorFactory() as ex:
        x0_val, x1_val, p_val = ex.executor([x0, x1, p])()
    assert x0_val == 0
    assert x1_val == 4
    assert p_val == 0
Пример #18
0
    def train_outputs(self, in_obj):
        in_axes = in_obj.axes.sample_axes()
        red_axes = ng.make_axes()
        if len(in_axes.role_axes(ar.Channel)) != 0:
            red_axes += in_axes.sample_axes() - in_axes.role_axes(ar.Channel)
        red_axes += in_obj.axes.batch_axes()
        out_axes = in_axes - red_axes

        self.gamma = self.gamma or ng.variable(axes=out_axes, initial_value=1.0).named('gamma')
        self.beta = self.beta or ng.variable(axes=out_axes, initial_value=0.0).named('beta')
        self.gvar = self.gvar or ng.persistent_tensor(axes=out_axes, initial_value=1.0)
        self.gmean = self.gmean or ng.persistent_tensor(axes=out_axes, initial_value=1.0)

        xmean = ng.mean(in_obj, reduction_axes=red_axes)
        xvar = ng.variance(in_obj, reduction_axes=red_axes)
        ng.assign(self.gmean, self.gmean * self.rho + xmean * (1.0 - self.rho))
        ng.assign(self.gvar, self.gvar * self.rho + xvar * (1.0 - self.rho))

        return self.gamma * (in_obj - xmean) / ng.sqrt(xvar + self.eps) + self.beta
Пример #19
0
    def __call__(self, *args, **kwargs):
        if len(self.ops) == 0:
            self.beta_1 = ng.constant(self.beta_1, dtype=np.float32)
            self.beta_2 = ng.constant(self.beta_2, dtype=np.float32)
            self.t = ng.persistent_tensor(axes=(), initial_value=0)

        self.t = ng.sequential([ng.assign(self.t, self.t + 1), self.t])
        self.ell = self.lrate * ng.sqrt(1 - self.beta_2**self.t) / (
            1 - self.beta_1**self.t)

        return super(Adam, self).__call__(*args, **kwargs)
Пример #20
0
    def __call__(self, in_obj):
        if not self.initialized:
            w_axis = ng.make_axis()
            self.weight = ng.variable(axes=[w_axis],
                                      initial_value=2,
                                      metadata={"label": LABELS["weight"]})
            self.side_effect = ng.persistent_tensor(axes=[w_axis],
                                                    initial_value=0)

        return ng.sequential([ng.assign(self.side_effect, self.weight),
                              self.weight * in_obj])
Пример #21
0
def test_liveness():
    with ExecutorFactory() as ex:

        x = ng.variable(axes=[]).named('x')
        y = ng.variable(axes=[]).named('y')
        w1 = ng.variable(axes=[]).named('w1')
        w2 = ng.variable(axes=[]).named('w2')

        x2 = x * w1
        x3 = (x2 * w2).named('result')
        cost = x3 - y

        dw1 = ng.deriv(cost, w1)
        dw2 = ng.deriv(cost, w2)

        upd1 = ng.assign(w1, w1 + dw1)
        upd2 = ng.assign(w2, w2 + dw2)
        seq_stuff = ng.sequential([upd1, upd2, x3])

        exc = ex.executor(seq_stuff)
        return exc
Пример #22
0
def ngraph_logreg(xs_np, ys_np, max_iter, alpha):
    # axis
    C, N = ng.make_axis("C"), ng.make_axis("N")

    def sigmoid(x):
        return 1. / (1. + ng.exp(-x))

    def predict(thetas, xs):
        return sigmoid(ng.dot(thetas, xs))

    def get_loss(thetas, xs, ys):
        ys_pred = predict(thetas, xs)
        log_likelihoods = ng.log(ys_pred) * ys + ng.log(1 - ys_pred) * (1 - ys)
        loss = -ng.sum(log_likelihoods, reduction_axes=[N])
        return loss

    # axis
    C.length = 3
    N.length = 4

    # input tensors
    xs = ng.placeholder((C, N))
    ys = ng.placeholder([N])

    # init weights
    thetas_np = np.array([0., 0., 0.])
    thetas_numpy_tensor = ng.constant(thetas_np, [C])
    thetas = ng.variable([C - 1], initial_value=thetas_numpy_tensor)

    # define ops
    loss = get_loss(thetas, xs, ys)
    variable = list(loss.variables())[0]  # we only have one variable thetas
    grad = ng.deriv(loss, variable)
    with ng.Op.saved_user_deps():
        update = ng.assign(variable, variable - alpha * grad)

    # transformer
    transformer = ngt.make_transformer()
    train_eval_func = transformer.computation([grad, loss, thetas, update], xs,
                                              ys)

    # evaluate
    loss_collect = []
    grad_collect = []
    thetas_collect = []
    for i in range(max_iter):
        grad_val, loss_val, thetas_val, _ = train_eval_func(xs_np, ys_np)
        loss_collect.append(loss_val.copy())
        grad_collect.append(grad_val.copy())
        thetas_collect.append(thetas_val.copy())

    return loss_collect, grad_collect, thetas_collect
Пример #23
0
def test_assign(transformer_factory, operands, test_name):
    v = ng.variable(())
    ng_placeholder = ng.placeholder(())
    vset = ng.sequential([ng.assign(v, ng_placeholder), v])
    iterations = len(operands) != 1
    with executor(vset, ng_placeholder) as ex:
        for i in operands:
            flex_result = ex(i[0])
            print("flex: ", flex_result)
            print("expected: ", i[1])
            if iterations:
                assert_allclose(flex_result, i[1])
            else:
                assert flex_result == i[1]
Пример #24
0
def test_scope_ops(input_placeholder):
    """
    Test scope_ops creates a subgraph with correct attributes
    """

    with scope_ops(name="foo") as subgraph:
        w = ng.variable(ng.make_axis(), initial_value=1, name="W")
        y = w * input_placeholder
        z = y + 4
        v1 = ng.persistent_tensor(w.axes, initial_value=0, name="effect1")
        v2 = ng.persistent_tensor(w.axes, initial_value=0, name="effect2")
        ng.sequential([ng.assign(v1, w), ng.assign(v2, w), z.named("output")])

    assert len(subgraph.inputs) == 1
    assert input_placeholder.unscoped_name in subgraph.inputs

    assert len(subgraph.variables) == 1
    assert "W" in subgraph.variables

    assert len(subgraph.outputs) == 1
    assert "output" in subgraph.outputs

    assert len(subgraph.side_effects) == 2
Пример #25
0
    def AssignAdd(self, tf_node, inputs):
        """
        Assign `ref` + `value` to `ref`.
        Update 'ref' by adding 'value' to it.

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            ref, value, use_locking, name
        """
        ref, value = inputs
        assert ref.axes.lengths == value.axes.lengths, "shape not the same"
        value = ng.cast_axes(value, ref.axes)

        if tf_node.name in self.init_assign_op_names:
            with ng.Op.saved_user_deps():
                return ng.assign(ref, ref + value)
        else:
            return ng.assign(ref, ref + value)
Пример #26
0
    def minimize(self, cost, variables):
        """
        Minimize cost by returning update Ops.

        Arguments:
            cost: The cost Op to be minimized
            variables: TODO

        Returns:
            A doall op containing setitems to variable ops.
        """

        assert cost is not None
        assert variables is not None

        return ng.doall([ng.assign(variable,
                                   variable - self.compute_lr_op * ng.deriv(cost, variable))
                         for variable in variables])
Пример #27
0
    def ApplyGradientDescent(self, tf_node, inputs):
        """
        Apply gradient descent
        CPU reference: https://goo.gl/oMq2HA
        GPU reference: https://goo.gl/US3t0r

        Arguments:
            tf_node: NodeDef object, the tensorflow node to convert.
            inputs: List of ngraph Ops as inputs to this node.

        Returns:
            A ngraph Op corresponding to the tensorflow node.

        Inputs to tf_node:
            value, learning rate, gradient
        """
        var, lr, grad = inputs
        return ng.assign(var, var - lr * grad)
Пример #28
0
def test_logreg(transformer_factory):
    # xs: (C, N), y: (N,)
    xs = np.array([[0.52, 0.88, 0.52, 0.74], [1.12, -1.08, 0.06, -2.49],
                   [0.77, 0.15, -1.3, 1.39]])
    ys = np.array([1, 1, 0, 1])
    max_iter = 10
    alpha = 0.1
    thetas = np.array([0., 0., 0.])

    np_logreg = NumpyLogreg(xs, ys, thetas)

    C, N = ng.make_axis(length=3), ng.make_axis(length=4)

    # input tensors
    xs_v = ng.placeholder((C, N))
    ys_v = ng.placeholder([N])
    alpha_v = ng.placeholder(())
    thetas_var = ng.variable([C - 1], initial_value=thetas)

    # define ops
    ys_pred = ng.sigmoid(ng.dot(thetas_var, xs_v))
    log_likelihoods = ng.log(ys_pred) * ys_v + ng.log(1 - ys_pred) * (1 - ys_v)
    loss = -ng.sum(log_likelihoods, reduction_axes=[N])
    grad_comp = ng.deriv(loss, thetas_var)
    grad = ng.sequential([
        ng.assign(thetas_var, thetas_var - alpha_v * grad_comp), thetas_var,
        grad_comp
    ])

    # transformer
    transformer = ngt.make_transformer()
    train_eval_func = transformer.computation([grad, loss, thetas_var], xs_v,
                                              ys_v, alpha_v)

    # evaluate
    for i in range(max_iter):
        grad_np, loss_np, thetas_np = np_logreg.optimize(alpha)
        grad_ng, loss_ng, thetas_ng = train_eval_func(xs, ys, alpha)
        assert ng.testing.allclose(loss_np, loss_ng)
        assert ng.testing.allclose(grad_np, grad_ng)
        assert ng.testing.allclose(thetas_np, thetas_ng)

    transformer.close()
Пример #29
0
    def __init__(self,
                 tuning_parameters,
                 name="",
                 global_network=None,
                 network_is_local=True):
        Architecture.__init__(self, tuning_parameters, name)
        assert tuning_parameters.agent.neon_support, 'Neon is not supported for this agent'
        self.clip_error = tuning_parameters.clip_gradients
        self.total_loss = None
        self.epoch = 0
        self.inputs = []
        self.outputs = []
        self.targets = []
        self.losses = []

        self.transformer = tuning_parameters.sess
        self.network = self.get_model(tuning_parameters)
        self.accumulated_gradients = []

        # training and inference ops
        train_output = ng.sequential(
            [self.optimizer(self.total_loss), self.total_loss])
        placeholders = self.inputs + self.targets
        self.train_op = self.transformer.add_computation(
            ng.computation(train_output, *placeholders))
        self.predict_op = self.transformer.add_computation(
            ng.computation(self.outputs, self.inputs[0]))

        # update weights from array op
        self.weights = [
            ng.placeholder(w.axes) for w in self.total_loss.variables()
        ]
        self.set_weights_ops = []
        for target_variable, variable in zip(self.total_loss.variables(),
                                             self.weights):
            self.set_weights_ops.append(
                self.transformer.add_computation(
                    ng.computation(ng.assign(target_variable, variable),
                                   variable)))

        # get weights op
        self.get_variables = self.transformer.add_computation(
            ng.computation(self.total_loss.variables()))
Пример #30
0
    def minimize(self, cost):
        """
        Minimize cost by returning update Ops.

        Arguments:
            cost: The cost Op to be minimized

        Returns:
            A doall op containing setitems to variable ops.
        """
        variables = list(cost.variables())
        grads = [ng.deriv(cost, variable) for variable in variables]
        with ng.Op.saved_user_deps():
            param_updates = [
                ng.assign(variable, variable - self.lrate * grad)
                for variable, grad in zip(variables, grads)
            ]
            updates = ng.doall(param_updates)
        return updates