def get_layer(self, name): """ get v2.Layer Class instance by layer name :param name: :return: """ return v2_layer.get_layer(name)
def data_layers(self): """ get all data layer :return: """ data_layers = {} for layer in self.proto().layers: l = v2_layer.get_layer(layer.name) if l and l.layer_type == conf_helps.LayerType.DATA: data_layers[layer.name] = l return data_layers
def DRelu_scaled(self, x_in=None, name=None, shape=None, only_func=False, **kwargs): return ( get_layer(self, name=name, layer_type="DRelu_scaled", shape=shape, **kwargs).perform if only_func else get_layer(self, name=name, layer_type="DRelu_scaled", shape=shape, **kwargs).perform(x_in) )
def Wmatrix(self, name=None, shape=None, **kwargs): return get_layer(self, name=name, layer_type="Wmatrix", shape=shape, **kwargs).perform()
def attention(self, x_in=None, h_in=None, name=None, shape=None, **kwargs): return get_layer(self, name=name, layer_type="attention", shape=shape, **kwargs).perform(x_in, h_in)
def embedding(self, x_in=None, name=None, shape=None, **kwargs): return get_layer(self, name=name, layer_type="embedding", shape=shape, **kwargs).perform(x_in)
def lstm_flatten(self, x_in=None, name=None, shape=None, return_seq=True, **kwargs): return get_layer(self, name=name, layer_type="lstm_flatten", shape=shape, **kwargs).perform(x_in, return_seq)
def gru_seq(self, x_in=None, rec_in=None, name=None, shape=None, **kwargs): return get_layer(self, name=name, layer_type="gru_seq", shape=shape, **kwargs).perform(x_in, rec_in)
def h_softmax(self, x_in=None, y_in=None, name=None, shape=None, **kwargs): return get_layer(self, name=name, layer_type="h_softmax", shape=shape, **kwargs).perform(x_in, y_in)
def fc(self, x_in=None, name=None, shape=None, **kwargs): return get_layer(self, name=name, layer_type="fc", shape=shape, **kwargs).perform(x_in)