def generate_tfrecord_files(tfrecords_path, images_path, images_per_file): make_dir(tfrecords_path) images_path_pattern = images_path + "*/*.jpeg" found_images = len(tf.io.gfile.glob(images_path_pattern)) print( "Pattern matches {} images which will be rewritten as {} TFRecord files containing ~{} images each." .format(found_images, math.ceil(found_images / images_per_file), images_per_file)) images = tf.data.Dataset.list_files(images_path_pattern) if SYSTEM == "Linux" or SYSTEM == "Darwin": encode_image = _encode_image_tfrecord_linux elif SYSTEM == "Windows": encode_image = _encode_image_tfrecord_windows dataset = images.map(encode_image, num_parallel_calls=AUTO).batch(images_per_file) for file_number, (image, label, file_name) in enumerate( tqdm(dataset, desc="Generating TFRecords")): tfrecord_filename = tfrecords_path + "{:02d}-{}.tfrecord".format( file_number, images_per_file) images_in_this_file = image.numpy().shape[0] if not os.path.isfile(tfrecord_filename): with tf.io.TFRecordWriter(tfrecord_filename) as out_file: for i in range(images_in_this_file): example = _to_tfrecord( out_file, np.array(image)[i], label.numpy()[i], file_name.numpy()[i], ) out_file.write(example.SerializeToString())
def detect(): src_dir = get_dirname(0) in_dir = make_dirname(src_dir, "in") tmp_dir = make_dirname(src_dir, "tmp") # tmp_csv_dir = make_dirname(tmp_dir, "csv") out_dir = make_dirname(src_dir, "out") out_img_dir = make_dirname(out_dir, "images") make_dir(tmp_dir) for filename in os.listdir(in_dir): tmp_split = make_dirname(tmp_dir, filename.split(".")[0], "images") make_dir(tmp_split) split_image(in_dir, filename, tmp_split) yolo3_dir = make_dirname(src_dir, "keras_yolo3") detector = Yolo() for dirname in os.listdir(tmp_dir): img_dir = make_dirname(tmp_dir, dirname, 'images') make_dir(make_dirname(tmp_dir, dirname, 'csv')) csv_dir = make_dirname(tmp_dir, dirname, 'csv', dirname + '.csv') detector.predict(img_dir, csv_dir) class_file = open( make_dirname(src_dir, "model_weights", "data_classes.txt"), "r") classes = [line.rstrip("\n") for line in class_file.readlines()] for img_name in os.listdir(tmp_dir): # img_name = img_folder.split(".")[0] img_file = [ filename for filename in os.listdir(in_dir) if img_name in filename ][0] img_file = make_dirname(in_dir, img_file) csv_file = make_dirname(tmp_dir, img_name, 'csv', img_name + ".csv") make_dir(make_dirname(out_dir, img_name, 'csv')) make_dir(make_dirname(out_dir, img_name, 'images')) out_img = make_dirname(out_dir, img_name, "images", "result_" + img_name + ".jpg") out_csv = make_dirname(out_dir, img_name, "csv", "result_" + img_name + ".csv") rebuild_image(img_file, csv_file, out_img, out_csv, classes) clear_dir(tmp_dir)
def get_callbacks(config): callbacks = [] if config.save_model: print("Saving model as " + config.save_model) filepath = "./Saved Models" make_dir(filepath) callbacks.append( ModelCheckpoint( filepath=filepath + "/" + config.save_model, save_best_only=True, save_weights_only=False, monitor="val_accuracy", mode="max", verbose=1, save_freq="epoch", ) ) return callbacks
def combine_heatmap_image(image_array, heatmap, file_names, folder_path): image_array = np.uint8(255 * image_array) heatmap = np.uint8(255 * heatmap) jet = cm.get_cmap("inferno") jet_colors = jet(np.arange(256))[:, :3] for hm, im, name in zip(heatmap, image_array, file_names): jet_heatmap = jet_colors[hm] jet_heatmap = array_to_img(jet_heatmap) jet_heatmap = jet_heatmap.resize((image_array.shape[1], image_array.shape[2])) jet_heatmap = img_to_array(jet_heatmap) superimposed_img = jet_heatmap * 0.7 + im superimposed_img = array_to_img(superimposed_img) make_dir(folder_path) save_path = folder_path + name superimposed_img.save(save_path)
def plot_clusters(X_embedded, labels, title="", save_path=None, s=1, loc="upper left"): """ Visualises the observations from X_embedded with the corresponding clusters in a 2D plot """ # make the plots fig, ax = plt.subplots() ax.xaxis.set_major_formatter(NullFormatter()) ax.yaxis.set_major_formatter(NullFormatter()) fig.patch.set_visible(False) ax.axis('off') # scatter = ax.scatter(x=X_sorted[:, 0], y=X_sorted[:, 1], cmap="Paired", c=labels_sorted, s=1, vmin=0, vmax=6) scatter = ax.scatter(x=X_embedded[:, 0], y=X_embedded[:, 1], cmap="Paired", c=labels, s=s, vmin=0, vmax=6) ax.set_title(title, fontsize=12) ax.legend(*scatter.legend_elements(), loc=loc, title='Cluster', prop={'size': 10}, fancybox=True) if save_path != None: make_dir(save_path) plt.savefig(save_path) #fig.set_size_inches(8, 8) plt.show()
def run_training(num_generations, config_filename=None, restore=False, restore_folder=None, restore_checkpoint=None): # create population if not restore: # create new folder fur current run time_stamp = datetime.datetime.now() current_folder = make_dir(os.path.join(PATH_TO_RES, "NEAT-AI", str(time_stamp).split(".")[0].replace(":", "-").replace(" ", "_"))) if config_filename is None: path_to_config = os.path.join(PATH_TO_CONFIGS, "neat_test_config") else: path_to_config = os.path.join(PATH_TO_CONFIGS, config_filename) # copy config file for later use shutil.copy(path_to_config, os.path.join(current_folder, "configfile")) neat_config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat.DefaultSpeciesSet, neat.DefaultStagnation, path_to_config) make_dir(os.path.join(current_folder, "checkpoints")) p = neat.Population(config=neat_config) else: # load saved config file path_to_config = os.path.join(PATH_TO_SAVINGS, restore_folder, "configfile") neat_config = neat.Config(neat.DefaultGenome, neat.DefaultReproduction, neat.DefaultSpeciesSet, neat.DefaultStagnation, path_to_config) # restore from checkpoint path_to_restore = os.path.join(PATH_TO_SAVINGS, restore_folder, "checkpoints", "checkpoint-" + str(restore_checkpoint)) p = neat.Checkpointer.restore_checkpoint(path_to_restore) current_folder = os.path.abspath(os.path.join(path_to_restore, os.pardir, os.pardir)) # show progress on the console p.add_reporter(neat.StdOutReporter(True)) # Create Reporter saving statistics over the generations and to get the best genome stats = neat.StatisticsReporter() p.add_reporter(stats) # Create reporter to save state during the evolution p.add_reporter(neat.Checkpointer(generation_interval=1, filename_prefix=os.path.join(current_folder, "checkpoints", "checkpoint-"))) try: for gen in range(num_generations): current_best = p.run(fitness_function=fitness_function, n=1) # save visualisation of winner visualize.draw_net(config=neat_config, genome=current_best, node_names=IO_NAMES, view=False, filename=os.path.join(current_folder, "checkpoints", "checkpoint-{}-best".format(p.generation - 1)), fmt="svg") # save winner for later use pickle.dump(current_best, open(os.path.join(current_folder, "checkpoints", "checkpoint-{}-best.p".format(p.generation - 1)), "wb")) except Exception as e: print(e) finally: winner = p.best_genome # save visualisation of winner and statistics print("\nBest genome:\n{!s}".format(winner)) visualize.draw_net(config=neat_config, genome=winner, node_names=IO_NAMES, view=False, filename=os.path.join(current_folder, "winner"), fmt="svg") # save winner for later use pickle.dump(winner, open(os.path.join(current_folder, "winner.p"), "wb")) visualize.plot_stats(stats, ylog=False, view=False, filename=os.path.join(current_folder, 'avg_fitness.svg')) visualize.plot_species(stats, view=False, filename=os.path.join(current_folder, 'speciation.svg')) print("finished")