def _test_save_load(self, attention):
     inputs = keras.layers.Input(shape=(None,), name='Input')
     embd = keras.layers.Embedding(input_dim=3,
                                   output_dim=5,
                                   mask_zero=True,
                                   name='Embedding')(inputs)
     lstm = keras.layers.Bidirectional(keras.layers.LSTM(units=7,
                                                         return_sequences=True),
                                       name='Bi-LSTM')(embd)
     if attention.return_attention:
         layer, weights = attention(lstm)
     else:
         layer = attention(lstm)
     dense = keras.layers.Dense(units=2, activation='softmax', name='Softmax')(layer)
     loss = {'Softmax': 'sparse_categorical_crossentropy'}
     if attention.return_attention:
         outputs = [dense, weights]
         loss[attention.name] = 'mse'
     else:
         outputs = dense
     model = keras.models.Model(inputs=inputs, outputs=outputs)
     model.compile(optimizer='adam', loss=loss)
     model_path = os.path.join(tempfile.gettempdir(), 'keras_weighted_att_test_sl_%f.h5' % np.random.random())
     model.save(model_path)
     model = keras.models.load_model(model_path, custom_objects=Attention.get_custom_objects())
     model.summary(line_length=100)
     if attention.return_attention:
         self.assertEqual(2, len(model.outputs))
     else:
         self.assertEqual(1, len(model.outputs))
def get_model(args):
    model = None
    modelname = args.model_id
    # workaround for getting vis for attention
    if modelname == "model_folk100k_melody_2lstm32_attention":
        # (100000, 64, 58)
        model = att_model(32, False, 2, True)
    elif modelname == "model_folk100k_melody_bi3lstm64_attention":
        model = att_model(64, True, 3, True)
    else:
        json_model = open(os.path.join(modelname, "model.json"), "r").read()
        model = keras.models.model_from_json(
            json_model,
            custom_objects=SeqWeightedAttention.get_custom_objects())
    model.load_weights(os.path.join(modelname, "model.h5"))
    print(model.summary(line_length=100))
    return model
def get_model(args, dshape):
    model_dir = get_model_dir(args)

    model = None
    loss = 'categorical_crossentropy'
    optimizer = keras.optimizers.Adam(lr=0.005)

    if args.new:
        print('generating NEW model...')
        model = new_architecture(dshape[1] - 1, dshape[2], args.layers,
                                 args.bi, args.att, args.cells)
        # copy arch to folder
        shutil.copy('architecture.py', model_dir)
        model_json = model.to_json()
        model_json_path = os.path.join(model_dir, "model.json")
        print('storing model json in %s' % model_json_path)
        with open(model_json_path, "w") as json_file:
            json_file.write(model_json)
        # delete epoch counters
        delete_epoch_counters(model_dir)
        model.compile(loss=loss, optimizer=optimizer)

    else:
        print('using existing model...')
        model_json_path = os.path.join(model_dir, "model.json")
        model = keras.models.model_from_json(
            open(model_json_path, "r").read(),
            custom_objects=SeqWeightedAttention.get_custom_objects())

        model_weights_path = os.path.join(model_dir, "model.h5")
        print('loading existing weights from %s...' % model_weights_path)
        model.load_weights(model_weights_path)
        model.compile(loss=loss, optimizer=optimizer)

    print(model.summary())

    return model, model_dir
Exemple #4
0
    :param text_list:
    :param token_dict:
    :return:
    """
    X1 = []
    X2 = []
    tokenizer = Tokenizer(token_dict)
    for line in text_list:
        x1, x2 = tokenizer.encode(first=line)
        X1.append(x1)
        X2.append(x2)
    X1 = sequence.pad_sequences(X1, maxlen=maxlen, padding='post', truncating='post')
    X2 = sequence.pad_sequences(X2, maxlen=maxlen, padding="post", truncating='post')
    return [X1, X2]
if __name__=="__main__":
    maxlen=100
    text_list=["TW 0:02 / 41:54 Mind Your Language Season 3 Episode 2 Who Loves Ya Baby? | Funny TV Show (GM)","I have a dream"]
    token_dict=get_token_dict(dict_path)
    [X1,X2]=get_encode(text_list,token_dict)
    print(X1)
    wordvec=build_bert_model(X1,X2)
    print(wordvec)
    #with open("test_keras_bert2.yml","r") as f:
    #    yaml_string=yaml.load(f)
    #model=keras.models.load_model(yaml_string,custom_objects=SeqSelfAttention.get_custom_objects())
    print("loading weights")
    model=keras.models.load_model("test_keras_bert4.h5",custom_objects=SeqWeightedAttention.get_custom_objects())
    result=model.predict(wordvec)
    print(result)
    del model