コード例 #1
0
ファイル: train_hrf.py プロジェクト: DanielGutmann/TextureDL
def main():
    parser = argparse.ArgumentParser(description='Train a model')
    parser.add_argument(
        '-load_path',
        type=str,
        help='Loads the initial model structure and weights from this location'
    )
    parser.add_argument('-debug',
                        action='store_true',
                        default=0,
                        help='use debug mode')

    dataFolder = os.getcwd() + '/hrf'
    #dataFolder = os.getcwd() + '/data10';
    modelFolder = dataFolder + '/Model100'
    args = parser.parse_args()
    kernel_size = 7
    nb_layer = 7
    nb_filter = [10, 10]
    if hasattr(args, 'load_path') and args.load_path is not None:
        print("Loading Model from: " + args.load_path)
        fileName = args.load_path
        model = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = create_model_seg(numLayers=nb_layer, kernel_size=kernel_size)
        #set training parameters
        sgd = opt.SGD(lr=0.000001, decay=0.0005, momentum=0.9, nesterov=True)
        model.compile(loss='mean_squared_error', optimizer=sgd)

    predicted, im, label, image_files = e.load_model_images_and_evaluate(
        model,
        dataFolderPath=dataFolder,
        labelFolder='hrf_splitted_gt_100',
        dataFolder='hrf_splitted_100')

    #start training
    sgd = opt.SGD(lr=0.0000001, decay=0.0005, momentum=0.9, nesterov=True)
    model.compile(loss='mean_squared_error', optimizer=sgd)

    #start training
    batchsize = 200
    nb_epoch = 100
    store_model_interval_in_epochs = 10
    model_file_prefix = 'm_layer_' + str(nb_layer) + 'kernel_' + str(
        kernel_size) + 'iter_'
    store_model_path = modelFolder + '/'
    steps = nb_epoch / store_model_interval_in_epochs
    for iter in range(steps):
        h = model.fit(im,
                      label,
                      batch_size=batchsize,
                      nb_epoch=store_model_interval_in_epochs)
        print("Storing model...")
        fileName = model_file_prefix + '_' + str(iter) + '.h5'
        model.save(store_model_path + fileName, overwrite=True)

    model.save(store_model_path + fileName, overwrite=True)
コード例 #2
0
def main():

    dataFolder = os.getcwd() + '/data600'
    modelFolder = dataFolder + '/latestModel'

    if hasattr(args, 'load_path') and args.load_path is not None:
        print("Loading Model from: " + args.load_path)
        modelFolder = args.load_path
        fileName = modelFolder + '/Keras_model_weights.h5'
        model = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model.compile(loss='mean_squared_error', optimizer='sgd')

    predicted, im, label, image_files = e.load_model_images_and_evaluate(
        model, dataFolder)

    appendedModel = e.create_model_seg(9)
    numberOfLayer = len(model.layers)
    print 'Number of Layers:' + str(numberOfLayer)
    convType = (Convolution2D)
    for i in range(0, numberOfLayer - 1):
        #if(isinstance(type(model.layers[i]),convType)): # TODO review why not working
        #if(isinstance(type(model.layers[i]),Convolution2D)) :
        if ('Convolution2D' in str(type(model.layers[i]))):
            print('Copying weights')
            copy_weights(model, appendedModel, i)

    topResults, bottomResults, im, label = e.load_images_and_evaluate1(
        appendedModel, args, dataFolder)

    sgd = opt.SGD(lr=0.00001, decay=0.0005, momentum=0.9, nesterov=True)
    appendedModel.compile(loss='mean_squared_error', optimizer='sgd')
    lossesForAppendedModel = appendedModel.evaluate(im, label, batch_size=10)
    lossesForOriginalModel = model.evaluate(im, label, batch_size=10)

    print lossesForAppendedModel
    print lossesForOriginalModel

    #start training
    nb_epoch = 50
    store_model_interval_in_epochs = 10
    model_file_prefix = 'Appended_model_weights'
    store_model_path = modelFolder
    steps = nb_epoch / store_model_interval_in_epochs
    for iter in range(steps):
        h = appendedModel.fit(im,
                              label,
                              batch_size=100,
                              nb_epoch=store_model_interval_in_epochs)
        print("Storing model...")
        fileName = model_file_prefix + '_' + str(iter) + '.h5'
        appendedModel.save(store_model_path + fileName, overwrite=True)

    appendedModel.save(store_model_path + fileName, overwrite=True)
コード例 #3
0
def main():

    #load/create model
    dataFolder = os.getcwd() + '/data_test_orient';
    modelFolder = dataFolder+'/Model';
    k = 10;

    if hasattr(args, 'load_path') and args.load_path is not None:
        print("Loading Model from: " + args.load_path);
        strings = args.load_path.split('\\');
        modelName = strings[len(strings) - 1];
        modelName = modelName.split('.')[0];
        fileName = args.load_path; 
        model = e.load_model(fileName);
        print("Model Loaded");
    else:
        raise Exception('Specify model file -load_path <modelfile>');

    predicted,im,label,image_files = e.load_model_images_and_evaluate(model,dataFolder);
    print 'Predicted Results Shape:' + str(predicted.shape);
    mse = find_mse(label,predicted);
    sortedIndices = np.argsort(mse);
      

    topkFolderName =   'topk_predicted_'+modelName;
    topkFullPath = dataFolder + '/'+ topkFolderName;
    if not os.path.exists(topkFullPath) :
        create_results_folder(topkFullPath);
    topkIndices = sortedIndices[sortedIndices.size-k:sortedIndices.size ];
    print topkIndices;
    
    lm.save_results(predicted,image_files,topkIndices,topkFolderName );

    bottomkFolderName =   'bottomk_predicted_'+modelName;
    bottomkFullPath = dataFolder + '/' + bottomkFolderName;
    if not os.path.exists(bottomkFullPath) :
       create_results_folder(bottomkFullPath)

    lm.save_results(predicted,image_files,sortedIndices[0:k-1 ],bottomkFolderName );

    #save predicted images for topk and bottomk

    topkFolderName = 'topk_im_'+ modelName
    topkFullPath = dataFolder + '/'+ topkFolderName;
    if not os.path.exists(topkFullPath) :
        create_results_folder(topkFullPath);
    topkIndices = sortedIndices[sortedIndices.size-k:sortedIndices.size ];
    print topkIndices;
    
    lm.save_results(im,image_files,topkIndices,topkFolderName );

    
    bottomkFolderName =  'bottomk_im_'+modelName;
    bottomkFullPath = dataFolder  + '/' + bottomkFolderName;
    if not os.path.exists(bottomkFullPath) :
       create_results_folder(bottomkFullPath)

    lm.save_results(im,image_files,sortedIndices[0:k-1 ],bottomkFolderName );
コード例 #4
0
def main():

    #load/create model
    dataFolderPath = os.getcwd() + '/data_test_orient'
    modelFolder = dataFolderPath + '/Model'

    if hasattr(args, 'load_path') and args.load_path is not None:
        print("Loading Model from: " + args.load_path)
        fileName = args.load_path
        model = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model.compile(loss='mean_squared_error', optimizer='sgd')

    topResults, bottomResults, im, label = e.evaluate_top_and_bottom_k(
        model=model, dataFolderPath=dataFolderPath, k=1)

    #Display image, label and predicted output for the image with highest error
    top1Fig = plt.figure(1, figsize=(15, 8))
    plt.title('Input Image', loc='left')
    plt.title('Actual Label', loc='center')
    plt.title('Predicted Label', loc='right')
    plt.axis('off')

    disp_images(top1Fig, topResults[0, :, :, :], 1, 3, pad=1, cmap=cm.binary)

    #save the results figure
    resultsFolderName = modelFolder + '/results'
    if not os.path.exists(resultsFolderName):
        create_results_folder(resultsFolderName)

    resultFigFileName = resultsFolderName + '/' + 'top1' + '.png'
    plt.savefig(resultFigFileName)

    #Display image, label and predicted output for the image with lowest error
    bottom1Fig = plt.figure(2, figsize=(15, 8))
    plt.title('Input Image', loc='left')
    plt.title('Actual Label', loc='center')
    plt.title('Predicted Label', loc='right')
    plt.axis('off')

    disp_images(bottom1Fig,
                bottomResults[0, :, :, :],
                1,
                3,
                pad=1,
                cmap=cm.binary)

    #save the results figure
    resultFigFileName = resultsFolderName + '/' + 'bottom1' + '.png'
    plt.savefig(resultFigFileName)

    plt.show()
コード例 #5
0
def main():

    #load/create model
    dataFolder = os.getcwd() + '/data600'
    modelFolder = dataFolder + '/Model'

    if hasattr(args, 'load_path') and args.load_path is not None:
        print("Loading Model from: " + args.load_path)
        fileName = args.load_path
        model = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model.compile(loss='mean_squared_error', optimizer='sgd')

    predicted, im, label, image_files = e.load_model_images_and_evaluate(
        model, dataFolder, 5)

    #Display image, and output of first layer

    layer = 1
    layer_out = model.layers[layer]
    inputs = [backend.learning_phase()] + model.inputs

    _convout1_f = backend.function(inputs, layer_out.output)

    def convout1_f(X):
        # The [0] is to disable the training phase flag
        return _convout1_f([0] + [X])

    imagesToVisualize1 = np.zeros([1, 400, 200, 1])
    imagesToVisualize1[0, :, :, 0] = np.squeeze(im[0, :, :])

    convout1 = np.squeeze(convout1_f(imagesToVisualize1))
    print 'Output shape of layer ' + str(layer) + ':' + str(convout1.shape)
    numImages = imagesToVisualize1.shape[0]
    if len(convout1.shape) == 3:
        numFilters = convout1.shape[2]
    else:
        numFilters = convout1.shape[3]

    imagesToVisualize = np.zeros([1, 400, 200, numFilters])

    filterNum = 0
    imageNum = 0
    position = 0
    print 'Number of filters:' + str(numFilters)
    while imageNum < numImages:
        while filterNum < numFilters:
            if len(convout1.shape) == 4:
                imToShow = convout1[imageNum, :, :, filterNum]
            else:
                imToShow = convout1[:, :, filterNum]
            imagesToVisualize[0, :, :, position] = np.squeeze(imToShow)
            position = position + 1
            filterNum = filterNum + 1
        imageNum = imageNum + 1
        filterNum = 0

    layer1Fig = plt.figure(1, figsize=(15, 8))
    plt.title('Output of layer ' + str(layer), loc='center')
    plt.axis('off')
    plt.tight_layout()
    disp_single_image_results(layer1Fig,
                              im[0, :, :],
                              np.squeeze(imagesToVisualize),
                              2,
                              5,
                              pad=0.8,
                              cmap=cm.binary)

    #save the results figure
    #resultsFolderName = dataFolder + '/results';
    #if not os.path.exists(resultsFolderName) :
    #   create_results_folder(resultsFolderName)

    #resultFigFileName = resultsFolderName + '/' + 'layer1_output'+'.png';
    #plt.savefig(resultFigFileName);

    imageFig = plt.figure(2, (15, 8))
    plt.title("Input Image")
    plt.axis('off')
    plt.tight_layout()
    print('Shape of input image:' + str(im[0, :, :].shape))
    plt.imshow(np.squeeze(im[0, :, :]), cmap=cm.binary)

    plt.show()
コード例 #6
0
def main():

    dataFolder = os.getcwd() + '/data_prev'
    modelFolder = dataFolder + '/latestModel'

    #load first model
    if hasattr(args, 'model1_path') and args.model1_path is not None:
        print("Loading Model from: " + args.model1_path)
        model1Folder = args.model1_path
        fileName = modelFolder + '/Keras_model_weights.h5'
        model1 = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model1.compile(loss='mean_squared_error', optimizer='sgd')

    #load second model
    if hasattr(args, 'model2_path') and args.model2_path is not None:
        print("Loading Model from: " + args.model2_path)
        model2Folder = args.model2_path
        fileName = model2Folder + '/Keras_model_weights.h5'
        model2 = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model2 = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model2.compile(loss='mean_squared_error', optimizer='sgd')

    topResults, bottomResults, im, label = e.evaluate_top_and_bottom_k(
        model1, dataFolder)
    topResults2, bottomResults2, im2, label2 = e.evaluate_top_and_bottom_k(
        model2, dataFolder)

    #Display image, label and predicted output for the image with highest error
    imagesToshowTop = np.zeros((400, 200, 3))
    imagesToshowTop[:, :, 0] = topResults[:, :, 1]
    imagesToshowTop[:, :, 1] = topResults[:, :, 2]
    imagesToshowTop[:, :, 2] = topResults2[:, :, 2]
    top1Fig = plt.figure(1, figsize=(15, 8))
    plt.title('Label', loc='left')
    plt.title('Model-1 Result', loc='center')
    plt.title('Model-2 Result', loc='right')
    plt.axis('off')

    disp_images(top1Fig, imagesToshowTop, 1, 3, pad=1, cmap=cm.binary)

    #save the results figure
    resultsFolderName = dataFolder + '/results'
    if not os.path.exists(resultsFolderName):
        create_results_folder(resultsFolderName)

    resultFigFileName = resultsFolderName + '/' + 'top1' + '.png'
    plt.savefig(resultFigFileName)

    #Display image, label and predicted output for the image with lowest error
    imagesToshowBottom = np.zeros((400, 200, 3))
    imagesToshowBottom[:, :, 0] = bottomResults[:, :, 1]
    imagesToshowBottom[:, :, 1] = bottomResults[:, :, 2]
    imagesToshowBottom[:, :, 2] = bottomResults2[:, :, 2]
    bottom1Fig = plt.figure(2, figsize=(15, 8))
    plt.title('Label', loc='left')
    plt.title('Model-1 Result', loc='center')
    plt.title('Model-2 Result', loc='right')
    plt.axis('off')

    disp_images(bottom1Fig, imagesToshowBottom, 1, 3, pad=1, cmap=cm.binary)

    #save the results figure
    resultFigFileName = resultsFolderName + '/' + 'bottom1' + '.png'
    plt.savefig(resultFigFileName)

    plt.show()
コード例 #7
0
def main():
    #load/create model
    dataFolder = os.getcwd() + '/data600'
    modelFolder = dataFolder + '/Model'

    if hasattr(args, 'load_path') and args.load_path is not None:
        print("Loading Model from: " + args.load_path)
        fileName = args.load_path
        model = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model.compile(loss='mean_squared_error', optimizer='sgd')

    predicted, im, label, image_files = e.load_model_images_and_evaluate(
        model=model, dataFolderPath=dataFolder, numImages=5)

    # plot the model weights
    layer = 1

    #W = model.layers[layer].W.get_value(borrow=True);
    #W = np.squeeze(W)
    #bias = model.layers[layer].B.get_value(borrow.True);

    weights = model.layers[layer].get_weights()[0]
    bias = model.layers[layer].get_weights()[1]

    print("weights Shape : ", weights.shape)
    print("weights Dimension : ", len(weights.shape))

    print("bias Shape : ", bias.shape)
    print("bias Dimension : ", len(bias.shape))

    wfile = open('weights.txt', 'w')

    for i in range(weights.shape[3]):
        print >> wfile, 'Filter-' + str(i)
        for j in range(weights.shape[0]):
            for k in range(weights.shape[1]):
                print >> wfile, weights[j, k, 0, i]
            print >> wfile, '\r\n'

    wfile.close()

    pl.figure(1, figsize=(15, 15))
    pl.title('Convoution layer:' + str(layer) + ' weights')

    nice_imshow(pl.gca(), make_mosaic(weights, 10, 10), cmap=cm.binary)
    figFileName = args.load_path + '/' + 'layer_' + str(layer) + '.png'
    #pl.savefig(figFileName);

    freq, values = np.histogram(weights, bins=1000)
    pl.figure(2, figsize=(15, 15))
    pl.title('Histogram of weights Layer:' + str(layer))
    pl.plot(values[1:len(values)], freq)

    pruningThreshold = 20
    numberOfFiltersToPrune = 0

    if len(weights.shape) == 4:
        pruningMask = np.ones((weights.shape[2], weights.shape[3]))
        for i in range(weights.shape[2]):  #for all filter
            for j in range(weights.shape[3]):  # for all channel
                weightMatrix = weights[:, :, i, j]
                maxW = np.amax(weights[:, :, i, j])
                minW = np.amin(weights[:, :, i, j])
                if abs(minW) < pruningThreshold and abs(
                        maxW) < pruningThreshold:
                    print '(' + str(i) + ',' + str(j) + ')'
                    numberOfFiltersToPrune = numberOfFiltersToPrune + 1
                    pruningMask[i][j] = 0

    pl.figure(3, figsize=(15, 15))
    pl.title('Bias values')
    pl.plot(bias)

    print 'Total Number of channels= ' + str(numberOfFiltersToPrune)

    #open files
    pl.show()
コード例 #8
0
def main():

    dataFolder = os.getcwd() + '/data_prev'

    #load first model
    if hasattr(args, 'model1_path') and args.model1_path is not None:
        print("Loading Model from: " + args.model1_path)
        model1Folder = args.model1_path
        fileName = model1Folder + '/Keras_model_weights.h5'
        model1 = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model1.compile(loss='mean_squared_error', optimizer='sgd')

    #load second model
    if hasattr(args, 'model2_path') and args.model2_path is not None:
        print("Loading Model from: " + args.model2_path)
        model2Folder = args.model2_path
        fileName = model2Folder + '/Appended_model_weights.h5'
        model2 = e.load_model(fileName)
        print("Model Loaded")
    else:
        print("Creating new model")
        model2 = e.create_model_seg()
        #set training parameters
        sgd = opt.SGD(lr=0.0001, decay=0.0005, momentum=0.9, nesterov=True)
        model2.compile(loss='mean_squared_error', optimizer='sgd')

    #compare model1 and model2 and summarize
    print('\t\t\t' + 'Model1' + '\t\t\t\t' + 'Model2')
    print('Number of Layers\t' + str(len(model1.layers)) + '\t\t\t\t' +
          str(len(model2.layers)))
    maxLayers = len(model1.layers)
    model = model1
    if len(model2.layers) > maxLayers:
        maxLayers = len(model2.layers)
        model = model2

    for i in range(maxLayers):
        #if(isinstance(type(model.layers[i]),convType)): # TODO review why not working
        #if(isinstance(type(model.layers[i]),Convolution2D)) :
        s = 'Layer_' + str(i) + '\t\t\t'
        layerType1 = None
        layerType2 = None
        if (i < len(model1.layers)):
            layerType1 = parseClassName(str(type(model1.layers[i])))
            s = s + layerType1
        else:
            s = s + '----'
        s = s + '\t\t\t'

        if (i < len(model2.layers)):
            layerType2 = parseClassName(str(type(model2.layers[i])))
            s = s + parseClassName(str(type(model2.layers[i])))
        else:
            s = s + '----'

        if (layerType1 is not None) & (layerType2 is not None) & (
                layerType1 == layerType2):
            if layerType1 == 'Convolution2D':
                s = s + '\t\t' + compare_layers(model1.layers[i],
                                                model1.layers[i])
        print(s)