def __call__(self, in_obj, init_state=None): """ Sets shape based parameters of this layer given an input tuple or int or input layer. Arguments: in_obj (int, tuple, Layer or Tensor): object that provides shape information for layer init_state (Tensor or list): object that provides initial state Returns: if sum_out or concat_out - rnn_out (Tensor): output otherwise - rnn_out (list of Tensors): list of length 2 """ if isinstance(in_obj, collections.Sequence): if len(in_obj) != 2: raise ValueError("If in_obj is a sequence, it must have length 2") if in_obj[0].axes != in_obj[1].axes: raise ValueError("If in_obj is a sequence, each element must have the same axes") fwd_in = in_obj[0] bwd_in = in_obj[1] else: fwd_in = in_obj bwd_in = in_obj if isinstance(init_state, collections.Sequence): if len(init_state) != 2: raise ValueError("If init_state is a sequence, it must have length 2") if init_state[0].axes != init_state[1].axes: raise ValueError("If init_state is a sequence, " + "each element must have the same axes") fwd_init = init_state[0] bwd_init = init_state[1] else: fwd_init = init_state bwd_init = init_state with ng.metadata(direction="fwd"): fwd_out = self.fwd_rnn(fwd_in, fwd_init) with ng.metadata(direction="bwd"): bwd_out = ng.cast_role(self.bwd_rnn(bwd_in, bwd_init), fwd_out.axes) if self.sum_out: return fwd_out + bwd_out elif self.concat_out: ax = fwd_out.axes.feature_axes() if len(ax) == 1: ax = ax[0] else: raise ValueError(("Multiple hidden axes: {}. " "Unable to concatenate automatically").format(ax)) return ng.concat_along_axis([fwd_out, bwd_out], ax) else: return fwd_out, bwd_out
def __call__(self, in_obj): branch_1_output = self.branch_1(in_obj) branch_2_output = self.branch_2[0](in_obj) branch_2_output = self.branch_2[1](branch_2_output) branch_3_output = self.branch_3[0](in_obj) branch_3_output = self.branch_3[1](branch_3_output) branch_4_output = self.branch_4[0](in_obj) branch_4_output = self.branch_4[1](branch_4_output) outputs = [branch_1_output, branch_2_output, branch_3_output, branch_4_output] # This does the equivalent of neon's merge-broadcast return ng.concat_along_axis(outputs, branch_1_output.axes.channel_axis())
def test_concatenate(transformer_factory, concatenate_variables): x_list, np_list, pos = concatenate_variables with ExecutorFactory() as ex: v = ng.concat_along_axis(x_list, x_list[0].axes[pos]) d = ng.deriv(v, x_list[0], error=ng.constant(np.ones(v.axes.lengths), axes=v.axes)) f = ex.executor([v, d]) e_v, e_d = f() np_v = np.concatenate(np_list, axis=pos) assert ng.testing.allclose(e_v.copy(), np_v) assert ng.testing.allclose(e_d.copy(), np.ones(x_list[0].axes.lengths))
def test_concatenate(): with ExecutorFactory() as ex: A = ng.make_axis(name='A', length=3) B = ng.make_axis(name='B', length=4) np_shape = (A.length, B.length) x0_np = -np.ones(np_shape) x1_np = np.ones(np_shape) x0_ng = ng.persistent_tensor([A, B], initial_value=x0_np).named('x0') x1_ng = ng.persistent_tensor([A, B], initial_value=x1_np).named('x1') j_np = np.concatenate([x0_np, x1_np], axis=0) j_ng = ng.concat_along_axis([x0_ng, x1_ng], A) f = ex.executor(j_ng) j_val = f() ng.testing.assert_allclose(j_val, j_np)
def __call__(self, in_obj, merge_axis=None): outputs = [branch(in_obj) for branch in self.branches] if (type(merge_axis) == str): merge_axis = ng.make_axis(name=merge_axis) if self.mode == 'concat': # Concatenate along the given axis if merge_axis is None: merge_axis = outputs[0].axes.channel_axis() outputs = ng.concat_along_axis(outputs, merge_axis) elif self.mode is None: # Return the output list directly pass else: pass return outputs
def __call__(self, batch_size, placeholders): embedding_ops = [] for idx, lut in enumerate(self.luts): embedding_op = lut(placeholders['embeddings_placeholders'][idx]) embedding_ops.append(embedding_op) X_deep = ng.concat_along_axis([placeholders['X_d']] + embedding_ops, ng.make_axis(name="F")) self.wide_deep = ng.sigmoid( self.deep_layers(X_deep) + self.linear_layer(placeholders['X_w']) + ng.variable((), initial_value=0.5).named('b')) return self.wide_deep
def test_concatenate(transformer_factory, concatenate_variables): if transformer_factory.name == flex_gpu_transformer_name: pytest.skip("Allowed to fail until PR2") x_list, np_list, role, pos = concatenate_variables with ExecutorFactory() as ex: v = ng.concat_along_axis(x_list, x_list[0].axes[pos]) v2 = ng.concat_role_axis(x_list, role) d = ng.deriv(v, x_list[0], error=ng.constant(np.ones(v.axes.lengths), axes=v.axes)) f = ex.executor([v, v2, d]) e_v, e_v2, e_d = f() np_v = np.concatenate(np_list, axis=pos) assert ng.testing.allclose(e_v.copy(), np_v) assert ng.testing.allclose(e_v2.copy(), np_v) assert ng.testing.allclose(e_d.copy(), np.ones(x_list[0].axes.lengths))
def test_concat_different_axis_lengths(transformer_factory): ax1 = ng.make_axis(length=3, name="concat") ax2 = ng.make_axis(length=2, name="concat") ax3 = ng.make_axis(length=10, name="other") x = ng.placeholder(axes=[ax1, ax3]) y = ng.placeholder(axes=[ax2, ax3]) np_x = np.zeros(x.axes.lengths) np_y = np.zeros(y.axes.lengths) # ax1 and ax2 have same name, so this should work v = ng.concat_along_axis([x, y], ax1) with ExecutorFactory() as ex: f = ex.executor(v, x, y) e_v = f(np_x, np_y) np_v = np.concatenate([np_x, np_y], axis=0) ng.testing.assert_allclose(e_v.copy(), np_v)
def Concat(onnx_node, ng_inputs): # type: (NodeWrapper, List[TensorOp]) -> Op """Concatenate a list of tensors into a single tensor.""" axis = onnx_node.get_attribute_value('axis', 0) if len(ng_inputs) < 2: raise ValueError( 'Concat node (%s): requires at least 2 inputs, %d given.', onnx_node.name, len(ng_inputs)) unique_input_ranks = {len(node.axes) for node in ng_inputs} if len(unique_input_ranks) != 1: raise ValueError( 'Concat node (%s): input tensors must be of equal rank.', onnx_node.name) if axis >= unique_input_ranks.pop(): raise ValueError('Concat node (%s): `axis` attribute is out of range.', onnx_node.name) ng_axis = ng_inputs[0].axes[axis] return ng.concat_along_axis(ng_inputs, ng_axis)
H_pr, H_hy, init_states=None, reset_cells=True, return_sequence=True, reverse_mode=True, input_data=inputs) # Mask unrolled outputs to the length of each sentence outputs_forward_1 = ng.multiply(outputs_forward, mask_para) outputs_reverse_1 = ng.multiply(outputs_reverse, mask_para) # Dropout layer for each of the unrolled outputs outputs_forward = dropout_3(outputs_forward_1, keep=drop_pointer) outputs_reverse = dropout_4(outputs_reverse_1, keep=drop_pointer) outputs_final = ng.concat_along_axis([outputs_forward, outputs_reverse], axis=outputs_reverse.axes.feature_axes()[0]) # Answer pointer pass logits_concat = answer_init(outputs_final, states=None, output=None, reset_cells=True, input_data=inputs) # Logits logits1 = ng.cast_axes(logits_concat[0], [ax.Y, N]) logits2 = ng.cast_axes(logits_concat[1], [ax.Y, N]) # Compute loss function label1 = ng.slice_along_axis( inputs['answer'], axis=inputs['answer'].axes.feature_axes()[0], idx=0)
H_hy, init_states=None, reset_cells=True, return_sequence=True, reverse_mode=True, input_data=inputs) # Mask unrolled outputs to the length of each sentence outputs_forward_1 = ng.multiply(outputs_forward, mask_para) outputs_reverse_1 = ng.multiply(outputs_reverse, mask_para) # Dropout layer for each of the unrolled outputs outputs_forward = dropout_3(outputs_forward_1, keep=drop_pointer) outputs_reverse = dropout_4(outputs_reverse_1, keep=drop_pointer) outputs_final = ng.concat_along_axis( [outputs_forward, outputs_reverse], axis=outputs_reverse.axes.feature_axes()[0]) # Answer pointer pass logits_concat = answer_init(outputs_final, states=None, output=None, reset_cells=True, input_data=inputs) # Logits logits1 = ng.cast_axes(logits_concat[0], [ax.Y, N]) logits2 = ng.cast_axes(logits_concat[1], [ax.Y, N]) # Compute loss function label1 = ng.slice_along_axis(inputs['answer'],