def test_mutate_input(): x = sym.Variable('x') y = sym.conv2d(data=x, name='conv') z = sym.assign(x, y) t = sym.add(z, x) try: z = sym.assign(z, z) assert False except NNVMError: pass
def minimize(self, obj): variables = obj.list_input_variables() grads = _base.gradients(obj, variables) updates = [] for v, g in zip(variables, grads): updates.append(_sym.assign(v, v + (-self.learning_rate) * g)) return _base.group(*updates)
def Variable(init, name=None): if not isinstance(init, symbol.Symbol): raise TypeError("Expect initialization expression to be Symbol") name = NameManager.current.get(name, 'variable') v = symbol.Variable(name) _all_variable_inits.append(symbol.assign(v, init)) return v
def Variable(init=None, name=None): name = NameManager.current.get(name, 'variable') v = symbol.Variable(name) if init is not None: if not isinstance(init, symbol.Symbol): raise TypeError("Expect initialization expression to be Symbol") _all_variable_inits.append(symbol.assign(v, init)) return v
def minimize(self, obj): variables = obj.list_input_variables() grads = _base.gradients(obj, variables) updates = [] for i, v in enumerate(variables): self.m.append(_base.Variable(_sym.zeros_like(v), self.name + '_m' + str(i))) self.v.append(_base.Variable(_sym.zeros_like(v), self.name + '_v' + str(i))) update_t = _sym.assign(self.t, self.t + 1) rate = _sym.sqrt(1 - self.beta2 ** update_t) / (1 - self.beta1 ** update_t) lr_t = self.learning_rate * rate for var, g, m, v in zip(variables, grads, self.m, self.v): update_m = _sym.assign(m, self.beta1 * m + (1 - self.beta1) * g) update_v = _sym.assign(v, self.beta2 * v + (1 - self.beta2) * g * g) update_var = _sym.assign(var, var - lr_t * update_m / (_sym.sqrt(update_v) + self.epsilon)) updates.append(update_var) return _base.group(*updates)
def test_list_args(): x = sym.Variable('x') z = sym.Variable('z') y = sym.conv2d(data=x, name='conv', dev='gpu') y = sym.add(y, z, name='add1') # write after read z = sym.assign(x, y, name='assign') assert z.list_inputs('read_only') == ['conv_weight', 'z'] assert z.list_inputs('aux_state') == ['x']
def test_list_args(): x = sym.Variable('x') z = sym.Variable('z') y = sym.conv2d(data=x, name='conv', dev='gpu') y = sym.add(y, z, name='add1') # write after read z = sym.assign(x, y, name='assign') assert z.list_input_names('read_only') == ['conv_weight', 'z'] assert z.list_input_names('aux_state') == ['x']
def minimize(self, obj): variables = obj.list_input_variables() grads = _base.gradients(obj, variables) updates = [] for i, v in enumerate(variables): self.m.append( _base.Variable(_sym.zeros_like(v), self.name + '_m' + str(i))) self.v.append( _base.Variable(_sym.zeros_like(v), self.name + '_v' + str(i))) update_t = _sym.assign(self.t, self.t + 1) rate = _sym.sqrt(1 - self.beta2**update_t) / (1 - self.beta1**update_t) lr_t = self.learning_rate * rate for var, g, m, v in zip(variables, grads, self.m, self.v): update_m = _sym.assign(m, self.beta1 * m + (1 - self.beta1) * g) update_v = _sym.assign(v, self.beta2 * v + (1 - self.beta2) * g * g) update_var = _sym.assign( var, var - lr_t * update_m / (_sym.sqrt(update_v) + self.epsilon)) updates.append(update_var) return _base.group(*updates)
def test_order_mutation_pass(): x = sym.Variable('x') y = sym.conv2d(data=x, name='conv', dev='gpu') y = sym.add(y, x, name='add1') # write after read z = sym.assign(x, y, name='assign') # read after write t = sym.add(y, x, name='add2') g = graph.create(sym.Group([t, z])) jgraph = json.loads(g.apply(['OrderMutation', 'SaveJSON']).json_attr('json')) jnodes = jgraph['nodes'] nindex = {n['name']: i for i, n in enumerate(jnodes)} assert nindex['assign'] in jnodes[nindex['add2']]['control_deps'] assert nindex['conv'] in jnodes[nindex['assign']]['control_deps'] assert nindex['add1'] in jnodes[nindex['assign']]['control_deps'] assert jnodes[nindex['assign']]['inputs'][0][2] == 1
def test_control_dep(): x = sym.Variable('x') y = sym.conv2d(data=x, name='conv') z = sym.assign(x, y) t = sym.add(x, x) t._add_control_deps([z, y])