def get_batch_of_stored_bottlenecks(sess, image_lists, batch_size, category, bottleneck_dir, image_dir, jpeg_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor): class_count = len(image_lists.keys()) bottlenecks = [] ground_truths = [] filenames = [] if batch_size >= 0: # Retrieve a random sample of bottlenecks. for i in range(batch_size): label_index = random.randrange(class_count) label_name = list(image_lists.keys())[label_index] image_index = random.randrange(MAX_NUM_IMAGES_PER_CLASS + 1) image_name = utils.get_image_path(image_lists, label_name, image_index, image_dir, category) bottleneck = get_bottleneck(sess, image_lists, label_name, image_index, image_dir, category, bottleneck_dir, jpeg_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor) ground_truth = np.zeros(class_count, dtype=np.float32) ground_truth[label_index] = 1.0 bottlenecks.append(bottleneck) ground_truths.append(ground_truth) filenames.append(image_name) else: # Retrieve all bottlenecks # Used for validation set mainly for label_index, label_name in enumerate(image_lists.keys()): for image_index, image_name in enumerate( image_lists[label_name][category]): image_name = utils.get_image_path(image_lists, label_name, image_index, image_dir, category) bottleneck = get_bottleneck(sess, image_lists, label_name, image_index, image_dir, category, bottleneck_dir, jpeg_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor) ground_truth = np.zeros(class_count, dtype=np.float32) ground_truth[label_index] = 1.0 bottlenecks.append(bottleneck) ground_truths.append(ground_truth) filenames.append(image_name) return bottlenecks, ground_truths, filenames
def create_bottleneck(bottleneck_path, image_lists, label_name, index, image_dir, category, sess, jpeg_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor): tf.logging.info("Creating Bottleneck at {}".format(bottleneck_path)) image_path = utils.get_image_path(image_lists, label_name, index, image_dir, category) if not gfile.Exists(image_path): tf.logging.fatal("File does not exist {}".format(image_path)) image_data = gfile.FastGFile(image_path, "rb").read() try: bottleneck_values = run_bottleneck_on_image(sess, image_data, jpeg_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor) except Exception as e: raise RuntimeError("Error bottlenecking {}\n{}".format( image_path, str(e))) bottleneck_string = ",".join(str(x) for x in bottleneck_values) bottleneck_directory = "/".join(bottleneck_path.split("/")[:-1]) utils.create_directory(bottleneck_directory) with open(bottleneck_path, "w") as bottleneck_file: bottleneck_file.write(bottleneck_string)