Exemplo n.º 1
0
def train_bottleneck_features(batch_size, save):
    data = OrigData(batch_size=batch_size)
    zipfile_name = 'tensorboard.zip'
    zipfile_path = full_path(zipfile_name)
    inputs = Input(shape=data.shape())
    # create the base pre-trained model
    base_model = VGG16(input_tensor=inputs, include_top=False)
    output = BatchNormalization()(base_model.output)

    model = Model(input=inputs, output=output)

    print('Bottleneck training')

    for generator in data.generators:
        results = {'left': [], 'right': [], 'center': []}

        for direction in ['left', 'right', 'center']:
            t = time.time()
            print("Generating bottleneck data for generator:", generator.name,
                  "and direction:", direction)
            generator.set_direction(direction)
            results[direction] = model.predict_generator(
                generator, generator.size())
            print("Done generatring output. Took", time.time() - t, "seconds.")

        file = "bottleneck_data/" + generator.name + ".p"
        pickle.dump(results, open(full_path(file), 'wb'))

        if save:
            print("Saving files", file)
            upload_s3(file)
            os.remove(full_path(file))
            print("Removed", file)
        else:
            print("Not saving files.")
Exemplo n.º 2
0
def unpack():
    if os.path.isdir(full_path(_folder + "/" + _file)) == False:
        print("unpacking", _filegz)
        untar_data(_filegz)
        shutil.move(full_path(_file), full_path(_folder))
    else:
        print(_file, "already unpacked.")
Exemplo n.º 3
0
def per_file_folder():
    for file in glob.glob(_folder + "/" + _file + "/*.bag"):
        folder = file.split(".")[0]

        if not os.path.exists(full_path(folder)):
            os.makedirs(full_path(folder))

        print("Moving", file, "to", folder)
        shutil.move(full_path(file), full_path(folder))
Exemplo n.º 4
0
def download():
    if os.path.isfile(full_path(_filegz)) == False:
        print("Unable to find " + _filegz + ". Downloading now...")
        download_s3(_filegz)
        print('Download Finished!')
    else:
        print(_filegz, "already downloaded.")
 def __init__(self, batch_size=32):
     self.batch_size = batch_size
     data_folder = full_path("image_data")
     self.video_folders = [
         os.path.join(data_folder, child)
         for child in os.listdir(data_folder)
         if os.path.isdir(os.path.join(data_folder, child))
     ]
     self.generators = self._generators()
Exemplo n.º 6
0
def put_tensorboard_logs():
    data_folder = full_path('logs')

    print("Zipping folder", data_folder)
    zf = zipfile.ZipFile(zipfile_path, "w")
    zipdir(data_folder, zf)
    zf.close()
    print("Finished zipping folder", data_folder)

    upload_s3(zipfile_name)
	def __init__(self, batch_size=32, video_frames=100, verbose=False):
		self.batch_size = batch_size
		self.video_frames = video_frames
		image_data = full_path("image_data")

		self.video_datasets = {}
		for child in os.listdir(image_data): 
			if os.path.isdir(os.path.join(image_data,child)) == False: continue

			self.video_datasets[child] = (os.path.join(image_data,child) , "bottleneck_data/{}.p".format(child))

		self.datasets = [VideoDataset("HMB_1", "image_data/HMB_1", "bottleneck_data/HMB_1.p", self.batch_size, self.video_frames, verbose=verbose)]
Exemplo n.º 8
0
 def image(self, index):
     i_width = 320
     i_height = 240
     path = self.video_folder + "/" + self.df['filename'][index]
     return img_to_array(load_img(full_path(path)))
Exemplo n.º 9
0
def train_model(model, data, config, include_tensorboard):
	model_history = History()
	model_history.on_train_begin()
	saver = ModelCheckpoint(full_path(config.model_file()), verbose=1, save_best_only=True, period=1)
	saver.set_model(model)
	early_stopping = EarlyStopping(min_delta=config.min_delta, patience=config.patience, verbose=1)
	early_stopping.set_model(model)
	early_stopping.on_train_begin()
	csv_logger = CSVLogger(full_path(config.csv_log_file()))
	csv_logger.on_train_begin()
	if include_tensorboard:
		tensorborad = TensorBoard(histogram_freq=10, write_images=True)
		tensorborad.set_model(model)
	else:
	 tensorborad = Callback()

	epoch = 0
	stop = False
	while(epoch <= config.max_epochs and stop == False):
		epoch_history = History()
		epoch_history.on_train_begin()
		valid_sizes = []
		train_sizes = []
		print("Epoch:", epoch)
		for dataset in data.datasets:
			print("dataset:", dataset.name)
			model.reset_states()
			dataset.reset_generators()

			valid_sizes.append(dataset.valid_generators[0].size())
			train_sizes.append(dataset.train_generators[0].size())
			fit_history = model.fit_generator(dataset.train_generators[0],
				dataset.train_generators[0].size(), 
				nb_epoch=1, 
				verbose=0, 
				validation_data=dataset.valid_generators[0], 
				nb_val_samples=dataset.valid_generators[0].size())

			epoch_history.on_epoch_end(epoch, last_logs(fit_history))

			train_sizes.append(dataset.train_generators[1].size())
			fit_history = model.fit_generator(dataset.train_generators[1],
				dataset.train_generators[1].size(),
				nb_epoch=1, 
				verbose=0)

			epoch_history.on_epoch_end(epoch, last_logs(fit_history))

		epoch_logs = average_logs(epoch_history, train_sizes, valid_sizes)
		model_history.on_epoch_end(epoch, logs=epoch_logs)
		saver.on_epoch_end(epoch, logs=epoch_logs)
		early_stopping.on_epoch_end(epoch, epoch_logs)
		csv_logger.on_epoch_end(epoch, epoch_logs)
		tensorborad.on_epoch_end(epoch, epoch_logs)
		epoch+= 1

		if early_stopping.stopped_epoch > 0:
			stop = True

	early_stopping.on_train_end()
	csv_logger.on_train_end()
	tensorborad.on_train_end({})