Beispiel #1
0
def test_embeddings():
    model = Siamese('dummy', input_shape=(6, 8, 3), embedding_size=3)

    model.make_embeddings('data_tiny/train',
                          'data_tiny/train.csv',
                          batch_size=1)
    emb = pd.read_pickle(os.path.join(model.cache_dir, 'embeddings.pkl'))
    print(emb)

    model.predict('data_tiny/train')
    pred = pd.read_pickle(os.path.join(model.cache_dir, 'predictions.pkl'))
    print(pred)
import sys
sys.path.insert(0, '../')

from core.siamese import Siamese

model = Siamese('mobilenet_like', input_shape=(672, 896, 3), embedding_size=128, train_hidden_layers=True)
#model.load_weights('../trained/final_weights.h5')
model.load_weights('trained/final_weights.h5')
model.train('../data/train', 'train.csv', meta_dir='../data/meta', epochs=40, batch_size=20, learning_rate=0.001, margin=1.0)
Beispiel #3
0
from core.siamese import Siamese

model = Siamese('mobilenet_like',
                input_shape=(672, 896, 3),
                embedding_size=128,
                strategy='batch_all')
# model.load_weights('cache/cache-190208-093118/training/checkpoint-03.h5')
# TODO mark pretrained mobilenet
model.load_weights('cache/cache-190208-093118/training/checkpoint-03.h5')
model.train('data/train.csv',
            'data/train',
            epochs=20,
            batch_size=32,
            learning_rate=0.0001,
            margin=2.0)
from core.siamese import Siamese


# model = Siamese('shallow_mnist', input_shape=(28, 28, 3), embedding_size=64, strategy='batch_all')
# model.load_weights('cache/cache-190213-131256/training/checkpoint-07.h5')
model = Siamese.restore_from_config('cache/cache-190220-203237/config.json')
model.make_embeddings('data_mnist/train', 'data_mnist/train.csv', batch_size=200)
model.predict('data_mnist/train_subset')
model.make_csv('cache/cache-190213-131256/idx_to_whales_mapping.npy')
# model.load_embeddings('cache/cache-190205-070856/embeddings.pkl')
# model.load_predictions('cache/cache-190205-072026/predictions.pkl')
# model.make_kaggle_csv('cache/cache-190205-065005/idx_to_whales_mapping.npy')
# model.draw_tsne(model.predictions.values[:, 1:])





Beispiel #5
0
from core.siamese import Siamese

model = Siamese(input_shape=(224, 224, 3), n_classes=5004)
model.load_weights('data/new_whalizer.h5')

model.predict_new_whales('data/test', 'data/submission_no_new_whales.csv')
Beispiel #6
0
from core.siamese import Siamese

model = Siamese('mobilenet_like',
                input_shape=(672, 896, 3),
                embedding_size=128,
                strategy='batch_hard')
model.load_weights('cache/cache-190213-095205/training/checkpoint-07.h5')
model.load_embeddings('cache/cache-190213-113426/embeddings.pkl')
# model.make_embeddings('data/train.csv', 'data/train', batch_size=32)
# model.predict('data/test')
model.predict('data/train_subset')
model.make_csv('cache/cache-190208-093118/idx_to_whales_mapping.npy')
Beispiel #7
0
from core.siamese import Siamese

model = Siamese('shallow_mnist',
                input_shape=(28, 28, 3),
                embedding_size=64,
                strategy='batch_all')
#model.train('data_mnist/train.csv', 'data_mnist/train', 'C:/Users/Sergei/Desktop/humpback-whale-identification/data_mnist/meta',
#            epochs=10, batch_size=100, learning_rate=0.0001, margin=1.0)

model.train(
    'data_mnist/train',
    'data_mnist/train.csv',
    'C:/Users/Sergei/Desktop/humpback-whale-identification/data_mnist/meta',
    epochs=10,
    batch_size=100,
    learning_rate=0.0001,
    margin=0.5)
from core.siamese import Siamese

model = Siamese('mobilenet_like',
                input_shape=(672, 896, 3),
                embedding_size=128)
model.load_weights('trained/final_weights.h5')

#model.make_embeddings('data/train', 'data/train.csv', batch_size=5, meta_dir='data/meta')
model.load_embeddings('trained/embeddings.pkl')

model.predict('data/test', meta_dir='data/meta')
#model.load_predictions('trained/predictions.pkl')

model.make_kaggle_csv('data/meta/idx_to_whales_mapping.npy')
Beispiel #9
0
from core.siamese import Siamese

model = Siamese('mobilenet_like',
                input_shape=(672, 896, 3),
                embedding_size=128)
model.load_weights('model/mobilenet_imagenet.h5')
#model.load_weights('trained/final_weights.h5')

#model.train('data/train', 'data/split_train.csv', meta_dir='D:\\IdeaProjects\\whales\\data\\meta', epochs=20, batch_size=24, learning_rate=0.001)
model.train('data/train',
            'data/train.csv',
            meta_dir='data/meta',
            epochs=500,
            batch_size=25,
            learning_rate=0.0005,
            margin=1.0)
Beispiel #10
0
from utils.sequence import WhalesSequence
from core.siamese import Siamese

csv = 'val.csv'
#csv = 'train.csv'
mode = 'cos_angular'
#mode = 'classification'
input_shape = (224, 224, 3)
img_dir = '../data/train'

val = pd.read_csv(csv)
true_labels = val["Id"].values

if mode == 'classification':
    model = Siamese(input_shape=(224, 224, 3),
                    train_hidden_layers=True,
                    n_classes=8)
    model.load_weights('final_weights.h5')

    img_names = val['Image'].values
    bboxes = pd.read_pickle('../data/meta/bboxes.pkl').set_index('filename')
    whales_seq = WhalesSequence(img_dir,
                                bboxes=bboxes,
                                input_shape=input_shape,
                                x_set=img_names,
                                batch_size=1)
    pred = model.model.predict_generator(whales_seq, verbose=1)
    pred_labels = np.argmax(pred, axis=1).reshape(-1)

    for i, whale in enumerate(np.sort(np.unique(true_labels))):
        true_labels[np.where(true_labels == whale)] = i
import sys
sys.path.insert(0, '../')

from core.siamese import Siamese

model = Siamese('mobilenet_like', input_shape=(672, 896, 3), embedding_size=128)
model.load_weights('trained/final_weights.h5')

model.make_embeddings('../data/train', 'train.csv', mappings_filename='../data/meta/whales_to_idx_mapping.npy', batch_size=25)
#model.load_embeddings('trained/embeddings.pkl')

model.predict('../data/train', 'val.csv')
#model.predict('../data/train', 'train.csv')
from core.siamese import Siamese


model = Siamese('resnet_like_33', input_shape=(384, 512, 3), embedding_size=128, strategy='batch_all')
model.load_weights('trained/checkpoint-05.h5')
model.load_embeddings('trained/embeddings.pkl')
model.load_predictions('trained/predictions.pkl')

model.make_kaggle_csv('trained/idx_to_whales_mapping.npy')


Beispiel #13
0
from core.siamese import Siamese

model = Siamese('shallow_mnist',
                input_shape=(28, 28, 3),
                embedding_size=64,
                strategy='batch_all')
model.train('data_mnist/train.csv',
            'data_mnist/train',
            epochs=10,
            batch_size=100,
            learning_rate=0.0001,
            margin=1.0)
Beispiel #14
0
from core.siamese import Siamese

# using softmax loss classification
# model = Siamese(input_shape=(224, 224, 3), n_classes=5004)
# model.load_weights('trained/final_weights.h5')
# model.predict('data/test')
# model.make_kaggle_csv('data/meta/idx_to_whales_mapping.npy')

# using cos_angular embeddings
model = Siamese(input_shape=(224, 224, 3),
                n_classes=5004,
                mode='cosface',
                train=False)
model.load_weights('trained/final_weights.h5')

model.make_embeddings('data/train',
                      'data/train.csv',
                      batch_size=25,
                      meta_dir='data/meta')
#model.load_embeddings('trained/embeddings.pkl')

model.predict_using_embeddings('data/test', meta_dir='data/meta')
#model.load_predictions('trained/predictions.pkl')

model.make_kaggle_csv('data/meta/idx_to_whales_mapping.npy')
Beispiel #15
0
from core.siamese import Siamese

model = Siamese.restore_from_config('cache/cache-190226-111833/config.json')
model.make_embeddings('data/train', 'data/train.csv', batch_size=24)

# model.predict('data/train')
# model.make_csv('cache/cache-190225-233850/idx_to_whales_mapping.npy')

model.predict('data/test')
model.make_kaggle_csv('data/meta/idx_to_whales_mapping.npy')



Beispiel #16
0
import sys
sys.path.insert(0, '../')

from core.siamese import Siamese

#model = Siamese(input_shape=(224, 224, 3), train_hidden_layers=False, n_classes=8, mode='classification')
model = Siamese(input_shape=(224, 224, 3),
                train_hidden_layers=True,
                n_classes=8,
                mode='arcface')
#model.load_weights('../trained/final_weights.h5')
model.load_weights('trained/final_weights.h5')
model.train('../data/train',
            'train.csv',
            meta_dir='../data/meta',
            epochs=70,
            batch_size=5,
            learning_rate=0.0001)
# inference to get cos angle embeddings

import sys
sys.path.insert(0, '../')

from core.siamese import Siamese

model = Siamese(input_shape=(224, 224, 3),
                n_classes=8,
                mode='arcface',
                train=False)
model.load_weights('trained/final_weights.h5')

model.make_embeddings(
    '../data/train',
    'train.csv',
    mappings_filename='../data/meta/whales_to_idx_mapping.npy',
    batch_size=25)
#model.load_embeddings('trained/embeddings.pkl')

model.predict_using_embeddings('../data/train', 'val.csv')
#model.predict_using_embeddings('../data/train', 'train.csv')
Beispiel #18
0
from core.siamese import Siamese


model = Siamese('shallow_mnist', input_shape=(28, 28, 3), embedding_size=64, strategy='batch_all')
model.load_weights('cache/cache-190213-131256/training/checkpoint-07.h5')
model.make_embeddings('data_mnist/train.csv', 'data_mnist/train', batch_size=200)
model.predict('data_mnist/train_subset')
model.make_csv('cache/cache-190213-131256/idx_to_whales_mapping.npy')
# model.load_embeddings('cache/cache-190205-070856/embeddings.pkl')
# model.load_predictions('cache/cache-190205-072026/predictions.pkl')
# model.make_kaggle_csv('cache/cache-190205-065005/idx_to_whales_mapping.npy')
# model.draw_tsne(model.predictions.values[:, 1:])





Beispiel #19
0
from core.siamese import Siamese


model = Siamese('resnet_like_33', input_shape=(384, 512, 3), embedding_size=128, strategy='batch_all')
model.load_weights('trained/checkpoint-05.h5')
model.load_embeddings('trained/embeddings.pkl')
model.load_predictions('trained/predictions.pkl')

model.draw_tsne(model.predictions.values[:, 1:])