m_i, m_k, map_att, gtmap, score, y_true, y_pred, n_classes, batch_size = model(images)
    loss = loss_fun(m_i, m_k, map_att, gtmap, score, y_true, y_pred, n_classes, batch_size)
    print("Current TestLoss: {}".format(loss))
'''

# testing by running

if __name__ == '__main__':
    # tf.compat.v1.enable_eager_execution()
    gpu = tf.config.experimental.list_physical_devices('GPU')
    print("Num GPUs Available: ", len(gpu))
    # path_root = os.path.abspath(os.path.dirname(__file__))
    database = DataSet('../basemodel') #"/content/gdrive/My Drive/data")  # path_root)

    # DS, DS_test = database.load_gpu()  # image_batch, label_batch
    DS = database.load(GPU=False, train=True, batch_size=4)
    # DS_test = database.load(GPU=False, train=False, batch_size = 32)

    modelaki = FinalModel()

    loss_fun = Loss().loss_MA
    opt_fun = tf.keras.optimizers.SGD(learning_rate=0.001, momentum=0.9)

    # ckpt = tf.train.Checkpoint(step=tf.Variable(1), optimizer=opt_fun, net=modelaki)
    # manager = tf.train.CheckpointManager(ckpt, path_root + '/tf_ckpts',
    #                                     max_to_keep=3)  # keep only the three most recent checkpoints
    # ckpt.restore(manager.latest_checkpoint)  # pickup training from where you left off

    train_loss = tf.keras.metrics.Mean(name='train_loss')
    train_accuracy = tf.keras.metrics.Accuracy(name='train_accuracy')
    test_loss = tf.keras.metrics.Mean(name='test_loss')
Beispiel #2
0
    #net = FinalModel()
    #new_root = tf.train.Checkpoint(net=net)
    #status = new_root.restore(tf.train.latest_checkpoint('./tf_ckpts/'))
    net = FinalModel()
    ckpt = tf.train.Checkpoint(step=tf.Variable(1, dtype=tf.int32),
                               optimizer=opt,
                               net=net)
    ckpt.restore(tf.train.latest_checkpoint('./tf_ckpts/'))

    #DATA
    path_root = os.path.abspath(os.path.dirname(__file__))
    bird_data = DataSet("/Volumes/Watermelon")  # DataSet(path_root)
    phi_train = bird_data.get_phi(set=0)
    w = bird_data.get_w(alpha=1)  # (50*150)
    train_class_list, test_class_list = bird_data.get_class_split(mode="easy")
    train_ds = bird_data.load(GPU=False, train=True, batch_size=32)
    #test_ds = bird_data.load(GPU=False, train=False, batch_size=4) #.load_gpu(batch_size=4)
    PHI = bird_data.get_phi(set=0)
    for im, label in train_ds:
        #im_path = "/Volumes/Watermelon/CUB_200_2011/CUB_200_2011/images/059.California_Gull/"
        #img = tf.io.read_file(im_path)
        #im = database.decode_img(img)
        m0, m1, mask0, mask1, scores, phi, y_pred, C = net(
            im, PHI)  #tf.expand_dims(im,0)

    nu = 50
    ns = 150
    W = tf.ones((nu, ns))
    seen_classes = tf.ones((ns, 28))
    unseen_classes = tf.ones((nu, 28))
    phi_test = bird_data.get_phi(set=1)