예제 #1
0
    def __init__(self, nb_classes, resnet_layers, input_shape, weights):
        """Instanciate a PSPNet."""
        self.input_shape = input_shape
        self.nb_classes = nb_classes
        json_path = join("..", "weights", "keras", weights + ".json")
        h5_path = join("..", "weights", "keras", weights + ".h5")

        if not isfile(json_path) and not isfile(h5_path):
            download_weights(weights)

        if isfile(json_path) and isfile(h5_path):
            print("Keras model & weights found, loading...")
            with open(json_path, 'r') as file_handle:
                try:
                    self.model = model_from_json(file_handle.read())
                except ValueError as err:  # bad marshal data error when loading py2 model in py3 an vice versa
                    # https://github.com/fchollet/keras/issues/7440
                    print("Couldn't import model from json because it was build using a different python version: %s" % err)
                    print("Rebuilding pspnet model ...")
                    self.model = build_pspnet(nb_classes=nb_classes,
                                              resnet_layers=resnet_layers,
                                              input_shape=self.input_shape)
                    print("Saving pspnet to disk ...")
                    json_string = self.model.to_json()
                    with open(json_path, 'w') as file_handle:
                        file_handle.write(json_string)
            self.model.load_weights(h5_path)
        else:
            print("No Keras model & weights found, import from npy weights.")
            self.model = build_pspnet(nb_classes=nb_classes,
                                      resnet_layers=resnet_layers,
                                      input_shape=self.input_shape)
            self.set_npy_weights(weights)
예제 #2
0
def update_weights_json_to_python36p(dataset):
    if dataset == 'cityscapes':
        model = layers.build_pspnet(nb_classes=19,
                                    resnet_layers=101,
                                    input_shape=(713, 713),
                                    activation='softmax')
        with open('weights/keras/pspnet101_cityscapes.json', 'w') as json_file:
            json_file.write(model.to_json())
    elif dataset == 'ade20k':
        model = layers.build_pspnet(nb_classes=150,
                                    resnet_layers=50,
                                    input_shape=(473, 473),
                                    activation='softmax')
        with open('weights/keras/pspnet50_ade20k.json', 'w') as json_file:
            json_file.write(model.to_json())
예제 #3
0
    def __init__(self, nb_classes, resnet_layers, input_shape, weights):
        """Instanciate a PSPNet."""

        self.input_shape = input_shape

        json_path = join("weights", "keras", weights + ".json")

        h5_path = join("weights", "keras", weights + ".h5")

        if isfile(json_path) and isfile(h5_path):

            print("Keras model & weights found, loading...")

            with open(json_path, 'r') as file_handle:

                self.model = model_from_json(file_handle.read())

            self.model.load_weights(h5_path)

        else:

            print("No Keras model & weights found, import from npy weights.")

            self.model = layers.build_pspnet(nb_classes=nb_classes,
                                             resnet_layers=resnet_layers,
                                             input_shape=self.input_shape)

            self.set_npy_weights(weights)
예제 #4
0
 def __init__(self, nb_classes, resnet_layers, input_shape):
     self.input_shape = input_shape
     self.model = layers.build_pspnet(nb_classes=nb_classes,
                                      layers=resnet_layers,
                                      input_shape=self.input_shape)
     print("Load pre-trained weights")
     self.model.load_weights("weights/keras/pspnet101_voc2012.h5")
예제 #5
0
    def __init__(self, params, checkpoint=None):
        print("params %s" % params)
        print("checkpoint %s" % checkpoint)
        """Instanciate a PSPNet."""
        self.input_shape = params['input_shape']

        if checkpoint is not None:
            print("Loading from checkpoint %s" % checkpoint)
            self.model = load_model(checkpoint)
        else:
            # Load cached keras model
            model_path = join(
                "weights", "keras",
                params['name'] + "_" + params['activation'] + ".hdf5")
            if isfile(model_path):
                print("Cached Keras model found, loading %s" % model_path)
                self.model = load_model(model_path)
            else:
                print("No Keras model found, import from npy weights.")
                self.model = layers.build_pspnet(
                    nb_classes=params['nb_classes'],
                    resnet_layers=params['resnet_layers'],
                    input_shape=params['input_shape'],
                    activation=params['activation'])
                self.set_npy_weights(params['name'], model_path)
예제 #6
0
    def __init__(self, nb_classes, resnet_layers, input_shape, weights):
        self.input_shape = input_shape

        # These are the means for the ImageNet pretrained ResNet
        self.data_mean = np.array([[[123.68, 116.779, 103.939]]])  # RGB order

        if 'pspnet' in weights:
            # keras weights
            json_path = join("weights", "keras", weights + ".json")
            h5_path = join("weights", "keras", weights + ".h5")
            if os.path.isfile(json_path) and os.path.isfile(h5_path):
                print("Keras model & weights found, loading...")
                with CustomObjectScope({'Interp': layers.Interp}):
                    with open(json_path, 'r') as file_handle:
                        self.model = model_from_json(file_handle.read())
                self.model.load_weights(h5_path)
            else:
                print(
                    "No Keras model & weights found, import from npy weights.")
                self.model = layers.build_pspnet(nb_classes=nb_classes,
                                                 resnet_layers=resnet_layers,
                                                 input_shape=self.input_shape)
                self.set_npy_weights(weights)
        else:
            print('Load pre-trained weights')
            self.model = load_model(weights)
예제 #7
0
 def __init__(self, nb_classes, resnet_layers, input_shape):
     self.input_shape = input_shape
     self.model = layers.build_pspnet(nb_classes=nb_classes,
                                      layers=resnet_layers,
                                      input_shape=self.input_shape)
     print("Load pre-trained weights")
     self.model.load_weights("weights/keras/pspnet101_voc2012.h5")
예제 #8
0
def predict(datadir, logdir, input_size, nb_classes, resnet_layers, batchsize,
            weights, initial_epoch, pre_trained, sep):
    if args.weights:
        model = load_model(weights)
    else:
        model = layers.build_pspnet(nb_classes=nb_classes,
                                    resnet_layers=resnet_layers,
                                    input_shape=input_size)
        if False:
            model.load_weights('weights_train/weights.01-0.05.h5')
        else:
            set_npy_weights(pre_trained, model)
        # set_npy_weights(pre_trained, model)
    train_generator, val_generator = data_generator_s31(datadir=datadir,
                                                        batch_size=batchsize,
                                                        input_size=input_size,
                                                        nb_classes=nb_classes,
                                                        separator=sep)
    DATA_MEAN = np.array([[[123.68, 116.779, 103.939]]])
    img = np.array(next(iter(val_generator))[0])[0, ..., ::-1]
    img = img - DATA_MEAN
    img = img[:, :, ::-1]
    img.astype('float32')
    a = model.predict_on_batch(img[None, ...])
    plt.imshow(batch[0][0, ...].astype('uint8')[..., ::-1])
    plt.imshow(a[0, ..., 1] > 0.5, alpha=0.4)
    plt.show()
    plt.imshow(
        model.predict_on_batch(batch - DATA_MEAN[None, ..., ::-1]).argmax(
            axis=-1)[0, ...])
    plt.show()
예제 #9
0
    def __init__(self,
                 nb_classes,
                 resnet_layers,
                 input_shape,
                 weights,
                 path="./weights"):
        """Instanciate a PSPNet."""
        self.input_shape = input_shape
        path = realpath(split(realpath(__file__))[0] + "/../weights/")
        print(path)
        sys.stdout.flush()
        print("build extra model for speedup")

        inp = K.placeholder(shape=(None, 473, 473, 150), name='a')
        inp2 = K.placeholder(shape=(None, 473, 473, 150), name='a_flip')
        inp_flip = K.reverse(inp2, axes=2)
        cmb = (inp + inp_flip) / 2
        self.M_comb = [cmb, inp, inp2]

        json_path = join(path, "keras", weights + ".json")
        h5_path = join(path, "keras", weights + ".h5")
        if isfile(json_path) and isfile(h5_path):
            print("Keras model & weights found, loading...")
            with open(json_path, 'r') as file_handle:
                self.model = model_from_json(file_handle.read())

            # layer_flip = keras.layers.Lambda(lambda x: K.reverse(x,axes=2),name="extened_lda_rev")
            # # layer_split_1=keras.layers.Lambda(lambda x: K.slice(x, [0,0,0,0], [K.shape(x)[0]/2,K.shape(x)[1],K.shape(x)[2],K.shape(x)[3]]))
            # # layer_split_2=keras.layers.Lambda(lambda x: K.slice(x, [K.shape(x)[0]/2,0,0,0], [K.shape(x)[0]/2,K.shape(x)[1],K.shape(x)[2],K.shape(x)[3]]))

            # layer_split_1= keras.layers.Lambda(lambda x: x[:K.shape(x)[0]//2,:,:,:], output_shape=lambda x:x ,name="extened_lda_s1")
            # layer_split_2= keras.layers.Lambda(lambda x: x[K.shape(x)[0]//2:,:,:,:], output_shape=lambda x:x ,name="extened_lda_s2")
            # layer_dev=keras.layers.Lambda(lambda x: x/2, output_shape=lambda x:x,name="extened_lda_dev" )
            # model_in=self.model.get_layer(name="input_1").input
            # model_out=self.model.get_layer(name="activation_58").output

            # model_out_normal=layer_split_1(model_out)
            # model_out_flip=layer_flip(layer_split_2(model_out))
            # cmb_0=keras.layers.Add(name="extened_add")([model_out_normal,model_out_flip])
            # cmb=layer_dev(cmb_0)
            # self.model=Model(model_in,cmb)
            self.model.load_weights(h5_path)
            print("Keras model & weights found, loaded success")
        else:
            print("No Keras model & weights found, import from npy weights.")
            self.model = layers.build_pspnet(nb_classes=nb_classes,
                                             resnet_layers=resnet_layers,
                                             input_shape=self.input_shape)

            self.set_npy_weights(weights)
예제 #10
0
def train(datadir, logdir, input_size, nb_classes, resnet_layers, batchsize, weights, initial_epoch, pre_trained, sep):
    if args.weights:
        model = load_model(weights)
    else:
        model = layers.build_pspnet(nb_classes=nb_classes,
                                    resnet_layers=resnet_layers,
                                    input_shape=input_size)
        set_npy_weights(pre_trained, model)
    dataset_len = len(os.listdir(os.path.join(datadir, 'imgs')))
    train_generator, val_generator = data_generator_s31(
        datadir=datadir, batch_size=batchsize, input_size=input_size, nb_classes=nb_classes, separator=sep)
    model.fit_generator(
        generator=train_generator,
        epochs=100000, verbose=True, steps_per_epoch=500,
        callbacks=callbacks(logdir), initial_epoch=initial_epoch)
예제 #11
0
def train(datadir, logdir, input_size, nb_classes, resnet_layers, batchsize, weights, initial_epoch, pre_trained, sep):
    if args.weights:
        model = load_model(weights)
    else:
        model = layers.build_pspnet(nb_classes=nb_classes,
                                    resnet_layers=resnet_layers,
                                    input_shape=input_size)
        set_npy_weights(pre_trained, model)
    dataset_len = len(os.listdir(os.path.join(datadir, 'imgs')))
    train_generator, val_generator = data_generator_s31(
        datadir=datadir, batch_size=batchsize, input_size=input_size, nb_classes=nb_classes, separator=sep)
    model.fit_generator(
        generator=train_generator,
        epochs=100000, verbose=True, steps_per_epoch=500,
        callbacks=callbacks(logdir), initial_epoch=initial_epoch)
예제 #12
0
 def __init__(self, nb_classes, resnet_layers, input_shape, weights):
     self.input_shape = input_shape
     json_path = join("weights", "keras", weights + ".json")
     h5_path = join("weights", "keras", weights + ".h5")
     if 'pspnet' in weights:
         if os.path.isfile(json_path) and os.path.isfile(h5_path):
             print("Keras model & weights found, loading...")
             with CustomObjectScope({'Interp': layers.Interp}):
                 with open(json_path, 'r') as file_handle:
                     self.model = model_from_json(file_handle.read())
             self.model.load_weights(h5_path)
         else:
             print("No Keras model & weights found, import from npy weights.")
             self.model = layers.build_pspnet(nb_classes=nb_classes,
                                              resnet_layers=resnet_layers,
                                              input_shape=self.input_shape)
             self.set_npy_weights(weights)
     else:
         print('Load pre-trained weights')
         self.model = load_model(weights)
예제 #13
0
 def __init__(self, nb_classes, resnet_layers, input_shape, weights):
     self.input_shape = input_shape
     json_path = join("weights", "keras", weights + ".json")
     h5_path = join("weights", "keras", weights + ".h5")
     if 'pspnet' in weights:
         if os.path.isfile(json_path) and os.path.isfile(h5_path):
             print("Keras model & weights found, loading...")
             with CustomObjectScope({'Interp': layers.Interp}):
                 with open(json_path, 'r') as file_handle:
                     self.model = model_from_json(file_handle.read())
             self.model.load_weights(h5_path)
         else:
             print("No Keras model & weights found, import from npy weights.")
             self.model = layers.build_pspnet(nb_classes=nb_classes,
                                              resnet_layers=resnet_layers,
                                              input_shape=self.input_shape)
             self.set_npy_weights(weights)
     else:
         print('Load pre-trained weights')
         self.model = load_model(weights)
예제 #14
0
 def __init__(self):
     self.model = layers.build_pspnet()
예제 #15
0
                cv2.resize(cv2.imread(PATH_Camvid + txt[i][1][0:][:-1]),
                           (713, 713)))[:, :, 0])
        n = n + 1
        s = str(i) + '/' + str(k)  # string for output
        print('{0}\r'.format(s), end='')  # just print and flush
        time.sleep(0.2)
    print('Loaded Camvid Images from', j, ' to ', k)
    return train_data, train_label


if model_names == 'segnet':
    model = segnet()
else:
    resnet_layers = 50
    model = layers.build_pspnet(nb_classes=nb_classes,
                                resnet_layers=resnet_layers,
                                input_shape=(713, 713))
# # Save model into Json
# model_json = model.to_json()
# with open("PSPNet_Resnet50.json", "w") as json_file:
#     json_file.write(model_json)
#     print ("Model Save to Json")
# autoencoder = segnet()
# print (autoencoder.summary())
if pretrainded == 1:
    model.load_weights(
        'Final_Training_2_Class_pspnet_coarse_lr_0.001_50nb_epochadam_2800_samples.h5'
    )
    print('Weights Loaded...')

# print(autoencoder.summary())
예제 #16
0
from keras.optimizers import SGD
import layers_builder as layers
from tensorflow.python.client import device_lib
from keras.utils import plot_model
from keras.utils import print_summary

if __name__ == '__main__':

    source_model = 'pspnet50_ade20k'
    input_shape = (473, 473)
    num_classes_new = 3
    model_name_new = 'pspnet50_all-train'
    dir_name = 'weights/keras'

    model = layers.build_pspnet(nb_classes=num_classes_new,
                                resnet_layers=50,
                                input_shape=input_shape)

    #LOAD OLD WEIGHTS
    #Ignore final layer with weights
    layerNameOld = model.layers[
        -3].name  #set to old name after loading weights, to plot model correctly
    model.layers[-3].name += '_custom'
    h5_path = '{}/{}.h5'.format(dir_name, source_model)
    model.load_weights(h5_path, by_name=True)
    model.layers[-3].name = layerNameOld
    #only set final layer trainable
    #for i in range(len(model.layers)):
    #    model.layers[i].trainable = False
    #model.layers[-3].trainable = True
    #compile again since changed trainable