Пример #1
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 hidden_dim,
                 max_len_out=None,
                 loss="nl",
                 optimizer="ada"):
        Sequential.__init__(self,
                            use_mask=True,
                            input_value_type="int32",
                            prediction_type="vector",
                            prediction_value_type='int32')
        self.option[Option.MAX_LEN_OUT] = max_len_out
        self.option[Option.INPUT_DIM] = input_dim
        self.option[Option.OUTPUT_DIM] = output_dim
        self.option[Option.IS_SEQUENCE_WORK] = True

        self.len_out = tensor.iscalar('lo')
        l0 = Reverse()
        l1 = Embedding(input_dim, hidden_dim)
        l2 = LSTM(hidden_dim, hidden_dim)
        l3 = Repeat(times=self.len_out)
        l4 = LSTM(hidden_dim,
                  hidden_dim,
                  return_sequences=True,
                  need_swap_axis=True)  # need swap axis here
        l5 = TimeDitributed(
            core_layer=Dense(hidden_dim, output_dim, activation="softmax"))

        self.add_layer([l0, l1, l2, l3, l4, l5])
        self.compile(loss=loss, optimizer=optimizer)
Пример #2
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 hidden_dim,
                 dep=2,
                 loss="nl",
                 optimizer="ada",
                 we_dict=None,
                 map=None):
        Sequential.__init__(self,
                            use_mask=True,
                            input_value_type="int32",
                            prediction_type="vector",
                            prediction_value_type='int32')

        self.option[Option.LOSS] = loss
        self.option[Option.OPTIMIZER] = optimizer
        l1 = Embedding(input_dim, hidden_dim, we_dict=we_dict, map=map)
        self.add_layer(l1)
        for i in range(dep):
            l2 = LSTM(hidden_dim, hidden_dim, return_sequences=True)
            self.add_layer(l2)

        l4 = TimeDitributed(
            core_layer=Dense(hidden_dim, output_dim, activation="softmax"))

        self.add_layer(l4)
        self.option[Option.IS_SEQUENCE_WORK] = True
        self.compile()
Пример #3
0
    def __init__(self, input_dim1, input_dim2, output_dim, hidden_dim1, hidden_dim2, dep=2, loss="nl", optimizer="ada", we_dict1=None, we_dict2=None, map1=None, map2=None, use_noise=True):
        Sequential.__init__(self, use_mask=True, input_value_type="int32", prediction_type="vector",
                            prediction_value_type='int32', use_noise=use_noise)

        self.extra_input = tensor.matrix('x_extra', dtype="int32")
        self.extra_output = None


        self.option[Option.LOSS] = loss
        self.option[Option.OPTIMIZER] = optimizer
        l1 = Embedding(input_dim1, hidden_dim1, we_dict=we_dict1, map=map1)

        l2 = Embedding(input_dim2, hidden_dim2, we_dict=we_dict2, map=map2)


        l3 = Merge(layers=[l1,l2])

        self.add_layer([l1,l2,l3])

        l4 = Dropout(hidden_dim1+hidden_dim2,theano_rng= RandomStreams(128))
        self.add_layer(l4)

        for i in range(dep):
            l5 = LSTM(hidden_dim1+hidden_dim2, hidden_dim1+hidden_dim2, return_sequences=True)
            self.add_layer(l5)

        l6 = Dropout(hidden_dim1+hidden_dim2,theano_rng= RandomStreams(128))
        self.add_layer(l6)

        l7 = TimeDitributed(core_layer=Dense(hidden_dim1+hidden_dim2, output_dim, activation="softmax"))


        self.add_layer([l7])

        self.option[Option.IS_SEQUENCE_WORK] = True
Пример #4
0
    def __init__(self, input_dim, hidden_dim, loss="ce", optimizer="ada"):

        Sequential.__init__(self, prediction_type="matrix")
        l1 = Dense(input_dim=input_dim,
                   output_dim=hidden_dim,
                   activation="sigmoid")
        l2 = Dense(
            input_dim=hidden_dim,
            output_dim=input_dim,
            activation="sigmoid",
        )
        self.add_layer(l1)
        self.add_layer(l2)
        self.compile(loss=loss, optimizer=optimizer)
Пример #5
0
    def __init__(self, input_dim, hidden_dim, loss="ce", optimizer="ada"):

        Sequential.__init__(self, prediction_type="matrix")
        self.add_layer(Input(input_dim=input_dim))
        for nd in hidden_dim:
            self.add_layer(
                Dense(input_dim=self.layers[-1].option[Option.OUTPUT_DIM],
                      output_dim=nd,
                      activation="sigmoid"))
        self.add_layer(
            Dense(input_dim=self.layers[-1].option[Option.OUTPUT_DIM],
                  output_dim=input_dim,
                  activation="sigmoid"))
        self.compile(loss=loss, optimizer=optimizer)
Пример #6
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 hidden_dim,
                 dep=2,
                 loss="nl",
                 optimizer="ada",
                 we_dict=None,
                 map=None,
                 use_noise=True):
        '''

        :param input_dim:
        :param output_dim:
        :param hidden_dim:
        :param dep:
        :param loss:
        :param optimizer:
        :param we_dict: word embedding dictionary
        :param map: mapping from word to index
        :return:
        '''
        Sequential.__init__(self,
                            use_mask=True,
                            input_value_type="int32",
                            prediction_type="vector",
                            prediction_value_type='int32',
                            use_noise=use_noise)

        self.option[Option.LOSS] = loss
        self.option[Option.OPTIMIZER] = optimizer
        l1 = Embedding(input_dim, hidden_dim, we_dict=we_dict,
                       map=map)  # first layer is an embedding layer
        self.add_layer(l1)

        l2 = Dropout(hidden_dim, theano_rng=RandomStreams(128))
        self.add_layer(l2)
        for i in range(dep):
            l3 = LSTM(hidden_dim, hidden_dim, return_sequences=True)
            self.add_layer(l3)

        l4 = Dropout(hidden_dim, theano_rng=RandomStreams(128))
        self.add_layer(l4)
        l5 = TimeDitributed(
            core_layer=Dense(hidden_dim, output_dim, activation="softmax"))

        self.add_layer(l5)
        self.option[Option.IS_SEQUENCE_WORK] = True
Пример #7
0
    def __init__(self, input_dim, hidden_dim, loss="ce", optimizer="sgd"):

        Sequential.__init__(self, prediction_type="matrix")
        rng = np.random.RandomState(123)
        w_lamda = 4 * np.sqrt(6. / (input_dim + hidden_dim))
        l0 = NoisyInput(input_dim=input_dim, corruption_level=0.)
        l1 = Dense(input_dim=input_dim,
                   output_dim=hidden_dim,
                   activation="sigmoid",
                   rng=rng,
                   w_lamda=w_lamda)
        l2 = Dense(input_dim=hidden_dim,
                   output_dim=input_dim,
                   activation="sigmoid")
        self.add_layer(l0)
        self.add_layer(l1)
        self.add_layer(l2)
        self.compile(loss=loss, optimizer=optimizer)
Пример #8
0
 def __init__(self,
              input_dim,
              output_dim,
              hidden_dim,
              loss="nl",
              optimizer="ada"):
     Sequential.__init__(self,
                         use_mask=True,
                         input_value_type="int64",
                         hidden_dim=hidden_dim,
                         loss=loss,
                         optimizer=optimizer)
     l1 = Embedding(input_dim, hidden_dim)
     l2 = LSTM(hidden_dim, hidden_dim, return_sequences=True)
     l3 = MeanPooling()
     l4 = Dropout(hidden_dim, theano_rng=RandomStreams(128))
     l5 = Dense(hidden_dim, output_dim, activation="softmax")
     self.add_layer([l1, l2, l3, l4, l5])
     self.compile()
Пример #9
0
    def __init__(self,
                 input_dim1,
                 input_dim2,
                 output_dim1,
                 output_dim2,
                 hidden_dim1,
                 hidden_dim2,
                 semantic_label_map,
                 dep=2,
                 loss="nl",
                 optimizer="ada",
                 we_dict1=None,
                 we_dict2=None,
                 map1=None,
                 map2=None,
                 use_noise=True):
        Sequential.__init__(self,
                            use_mask=True,
                            input_value_type="int32",
                            prediction_type="vector",
                            prediction_value_type='int32',
                            use_noise=use_noise)

        self.extra_input = tensor.matrix('x_extra', dtype="int32")
        self.extra_output = None
        self.extra_prediction = tensor.ivector('ye')
        self.extra_gold = tensor.ivector('ge')

        self.option[Option.LOSS] = loss
        self.option[Option.OPTIMIZER] = optimizer
        l1 = Embedding(input_dim1, hidden_dim1, we_dict=we_dict1, map=map1)

        l2 = Embedding(input_dim2, hidden_dim2, we_dict=we_dict2, map=map2)

        self.add_layer([l1, l2])

        l11 = Dropout(hidden_dim1, theano_rng=RandomStreams(128))
        self.add_layer(l11)

        l21 = Dropout(hidden_dim2, theano_rng=RandomStreams(128))
        self.add_layer(l21)

        l4 = LSTM(hidden_dim1, hidden_dim1, return_sequences=True)

        l5 = LSTM(hidden_dim2, hidden_dim2, return_sequences=True)

        self.add_layer([l4, l5])
        l22 = Dropout(hidden_dim2, theano_rng=RandomStreams(128))
        self.add_layer(l22)
        l6 = TimeDitributed(
            core_layer=Dense(hidden_dim2, output_dim2, activation="softmax"))

        l12 = Dropout(hidden_dim1, theano_rng=RandomStreams(128))
        self.add_layer(l12)

        l7 = SoftmaxHybrid(hidden_dim1, hidden_dim2, output_dim1,
                           semantic_label_map)

        l8 = TimeDitributedHybrid(core_layer=l7)
        self.add_layer([l6, l8])

        self.option[Option.IS_SEQUENCE_WORK] = True