# iterate over different patch_offsets for patch_offset in HYPERPARAMETERS_OPTIONS["patch_offset"]: hyperparameters["patch_offset"] = patch_offset print "HYPERPARAMETER: patch_offset = " + str(patch_offset) # generate patches print "---------------------" print "## generating patches from '" + IMG_NAME + "' (" + str(IMG_SIZE[0]) + "x" + str( IMG_SIZE[1] ) + "; " + str(IMG_BBOX) + ")" patch_generator.generate_patches( IMG_BBOX, IMG_SIZE, patch_size=hyperparameters["patch_size"], offset_steps=hyperparameters["patch_offset"], target_folder=DATASET_DIR, force_refresh=False, data_folder=IMG_NAME, ) print "" # iterate over different codebook_sizes dataset_split = 0 for codebook_size in HYPERPARAMETERS_OPTIONS["codebook_size"]: hyperparameters["codebook_size"] = codebook_size print "HYPERPARAMETER: codebook_size = " + str(codebook_size) # undo dataset splitting if dataset_split: all_files = algo.get_imgfiles(TRAININGSET_DIR)
def get_imgfiles(path): all_files = [] all_files.extend([join(path, basename(fname)).replace("\\","/") for fname in glob(path + "/*") if splitext(fname)[-1].lower() in EXTENSIONS]) return all_files if __name__ == '__main__': import patch_generator patch_generator.generate_patches(BBOX, DOP_SIZE, patch_size=PATCH_SIZE, offset_steps=PATCH_OFFSET_STEPS, target_folder=DATASETPATH, data_folder='dop' + f, force_refresh=False ) try: os.makedirs(TMP_DIR) except: None algo.__clear_dir(TMP_DIR) cats = [0,1] ncats = len(cats)