def generate_single_svm_train(train_file):
    save_path = train_file.rsplit('.', 1)[0].strip()
    if not os.path.exists(save_path):
        os.makedirs(save_path)
    if len(os.listdir(save_path)) == 0:
        print("reading %s's svm dataset" % train_file.split('\\')[-1])
        # 避免正负样本数量相差过大,分类器偏向负,所以<0.1的为负,大于0.6的为正,其他作为测试样本
        # 要保证第一次分类后有一些测试样本被判为正样本,方便后面难负例挖掘
        prep.load_train_proposals(train_file, 2, save_path, threshold=0.1, is_svm=True, save=True)
    print("restoring svm dataset")
    images, labels, rects = prep.load_from_npy(save_path, is_svm=True)

    return images, labels, rects
Example #2
0
def generate_single_svm_train(train_file):
    save_path = train_file.rsplit('.', 1)[0].strip()
    if len(os.listdir(save_path)) == 0:
        print("reading %s's svm dataset" % train_file.split('\\')[-1])
        prep.load_train_proposals(train_file,
                                  2,
                                  save_path,
                                  threshold=0.3,
                                  is_svm=True,
                                  save=True)
    print("restoring svm dataset")
    images, labels = prep.load_from_npy(save_path)

    return images, labels
def generate_single_svm_train(one_class_train_file):
    trainfile = one_class_train_file
    savepath = one_class_train_file.replace('txt', 'pkl')
    images = []
    Y = []
    if os.path.isfile(savepath):
	print("restoring svm dataset " + savepath)
        images, Y = prep.load_from_pkl(savepath)
    else:
	print("loading svm dataset " + savepath)
    	images, Y = prep.load_train_proposals(trainfile, 2, threshold=0.3, svm=True, save=True, save_path=savepath)
    return images, Y
Example #4
0
def generate_single_svm_train(one_class_train_file):
    trainfile = one_class_train_file
    savepath = one_class_train_file.replace('txt', 'pkl')
    images = []
    Y = []
    if os.path.isfile(savepath):
        print("restoring svm dataset " + savepath)
        images, Y = prep.load_from_pkl(savepath)
    else:
        print("loading svm dataset " + savepath)
        images, Y = prep.load_train_proposals(trainfile,
                                              2,
                                              threshold=0.3,
                                              svm=True,
                                              save=True,
                                              save_path=savepath)
    return images, Y
Example #5
0
        n_epoch=20,
        validation_set=0.1,
        shuffle=True,
        show_metric=True,
        batch_size=64,
        snapshot_step=200,
        snapshot_epoch=False,
        run_id='alexnet_rcnnflowers2'
    )  # epoch = 1000 Start training (apply gradient descent algorithm)
    # Save the model
    model.save('fine_tune_model_save.model')


if __name__ == '__main__':

    if os.path.isfile('dataset.pkl'):
        print("Loading Data")
        X, Y = prep.load_from_pkl('dataset.pkl')
    else:
        print("Reading Data")
        X, Y = prep.load_train_proposals('refine_list.txt', 2, save=True)
    print("DONE")
    restore = False

    if os.path.isfile('fine_tune_model_save.model'):
        restore = True
        print("Continue training")

    net = create_alexnet(3, restore)
    fine_tune_Alexnet(net, X, Y)
              n_epoch=1,
              validation_set=0.1,
              shuffle=True,
              show_metric=True,
              batch_size=64,
              snapshot_step=200,
              snapshot_epoch=False,
              run_id='alexnet_rcnnflowers2')
    # Save the model
    model.save(fine_tune_model_path)


if __name__ == '__main__':
    data_set = config.FINE_TUNE_DATA
    if len(os.listdir(config.FINE_TUNE_DATA)) == 0:
        print("Reading Data")
        prep.load_train_proposals(config.FINE_TUNE_LIST,
                                  2,
                                  save=True,
                                  save_path=data_set)
    print("Loading Data")
    X, Y = prep.load_from_npy(data_set)
    restore = False
    if os.path.isfile(config.FINE_TUNE_MODEL_PATH + '.index'):
        restore = True
        print("Continue fine-tune")
    # three classes include background
    net = create_alexnet(config.FINE_TUNE_CLASS, restore=restore)
    fine_tune_Alexnet(net, X, Y, config.SAVE_MODEL_PATH,
                      config.FINE_TUNE_MODEL_PATH)
	print("Loading the fine tuned model")
    	model.load('fine_tune_model_save.model')
    elif os.path.isfile('model_save.model'):
	print("Loading the alexnet")
	model.load('model_save.model')
    else:
	print("No file to load, error")
        return False
    model.fit(X, Y, n_epoch=10, validation_set=0.1, shuffle=True,
              show_metric=True, batch_size=64, snapshot_step=200,
              snapshot_epoch=False, run_id='alexnet_rcnnflowers2') # epoch = 1000
    # Save the model
    model.save('fine_tune_model_save.model')

if __name__ == '__main__':
    if os.path.isfile('dataset.pkl'):
	print("Loading Data")
	X, Y = prep.load_from_pkl('dataset.pkl')
    else:
	print("Reading Data")
    	X, Y = prep.load_train_proposals('refine_list.txt', 2, save=True)
    print("DONE")
    restore = False
    if os.path.isfile('fine_tune_model_save.model'):
	restore = True
        print("Continue training")
    net = create_alexnet(3, restore)
    fine_tune_Alexnet(net,X,Y)