Example #1
0
 def __init__(self,
              input_file_path,
              images_per_batch=50,
              train_percentage=0.8,
              max_sample_records=1000):
     self.input_file_path = input_file_path
     folders = os.listdir(self.input_file_path)
     folders = sanitize_data_folders(folders)
     self.train_folders, self.test_folders = Dataset.train_test_split(
         folders)
     self.train_percentage = train_percentage
     self.max_sample_records = max_sample_records
     self.train_metadata_summaries, self.train_metadata = summarize_metadata(
         self.input_file_path, self.train_folders)
     self.train_folder_weights = self.get_folder_weights(self.train_folders)
     self.test_metadata_summaries, self.test_metadata = summarize_metadata(
         self.input_file_path, self.test_folders)
     self.test_folder_weights = self.get_folder_weights(self.test_folders)
     self.images_per_batch = images_per_batch
     self.images_per_epoch = int(
         self.train_metadata_summaries['image_count'] *
         self.train_percentage)
     self.batches_per_epoch = int(self.images_per_epoch /
                                  self.images_per_batch)
     self.samples_per_epoch = int(self.images_per_epoch /
                                  self.max_sample_records)
Example #2
0
 def __init__(self,input_file_path,images_per_batch=50,train_percentage=0.8, max_sample_records=1000):
     self.input_file_path = input_file_path
     folders = os.listdir(self.input_file_path)
     folders = sanitize_data_folders(folders)
     self.train_percentage = train_percentage
     self.train_folders, self.test_folders = self.train_test_split(folders)
     self.max_sample_records = max_sample_records
     self.train_metadata_summaries, self.train_metadata = summarize_metadata(self.input_file_path,self.train_folders)
     self.train_folder_weights = self.get_folder_weights(self.train_folders)
     self.test_metadata_summaries, self.test_metadata = summarize_metadata(self.input_file_path, self.test_folders)
     self.test_folder_weights = self.get_folder_weights(self.test_folders)
     self.images_per_batch = images_per_batch
     self.images_per_epoch = self.train_metadata_summaries['image_count']
     self.batches_per_epoch = int(self.images_per_epoch / self.images_per_batch)
     self.samples_per_epoch = int(self.images_per_epoch / self.max_sample_records)
Example #3
0
def data_prep(data_path, rgb=True):

    #gamma_map = make_gamma_tables(np.arange(1.0,3.0,0.5))
    gamma_map = make_gamma_tables([1])

    data_folders = os.listdir(data_path)
    data_folders = sanitize_data_folders(data_folders)
    shuffle(data_folders)
    #data_folders = data_folders[:10]
    train_folder_size = int(len(data_folders) * 0.8)

    train_predictors = []
    train_targets = []
    for folder in data_folders[:train_folder_size]:
        print("Started session: " + str(folder))
        predictors, targets = process_session(data_path + '/' + folder,
                                              gamma_map, rgb)
        train_predictors.extend(predictors)
        train_targets.extend(targets)
        print("Completed session: " + str(folder))
    train_predictors_np = np.array(train_predictors)
    train_targets_np = np.array(train_targets)

    validation_predictors = []
    validation_targets = []
    for folder in data_folders[train_folder_size:]:
        print("Started session: " + str(folder))
        predictors, targets = process_session(data_path + '/' + folder,
                                              gamma_map, rgb)
        validation_predictors.extend(predictors)
        validation_targets.extend(targets)
        print("Completed session: " + str(folder))
    validation_predictors_np = np.array(validation_predictors)
    validation_targets_np = np.array(validation_targets)

    max_folder = max([int(folder) for folder in data_folders])
    new_file_name = None
    if rgb:
        new_file_name = '/data_rgb_' + str(max_folder)
    else:
        new_file_name = '/data_bw_' + str(max_folder)
    np.savez(data_path + new_file_name,
             train_predictors=train_predictors_np,
             train_targets=train_targets_np,
             validation_predictors=validation_predictors_np,
             validation_targets=validation_targets_np)
Example #4
0
def data_prep(data_path,rgb=True):

    #gamma_map = make_gamma_tables(np.arange(1.0,3.0,0.5))
    gamma_map = make_gamma_tables([1])

    data_folders = os.listdir(data_path)
    data_folders = sanitize_data_folders(data_folders)
    shuffle(data_folders)
    #data_folders = data_folders[:10]
    train_folder_size = int(len(data_folders) * 0.8)

    train_predictors = []
    train_targets = []
    for folder in data_folders[:train_folder_size]:
        print("Started session: " + str(folder))
        predictors, targets = process_session(data_path+'/'+folder,gamma_map,rgb)
        train_predictors.extend(predictors)
        train_targets.extend(targets)
        print("Completed session: "+str(folder))
    train_predictors_np = np.array(train_predictors)
    train_targets_np = np.array(train_targets)

    validation_predictors = []
    validation_targets = []
    for folder in data_folders[train_folder_size:]:
        print("Started session: " + str(folder))
        predictors, targets = process_session(data_path + '/' + folder,gamma_map,rgb)
        validation_predictors.extend(predictors)
        validation_targets.extend(targets)
        print("Completed session: " + str(folder))
    validation_predictors_np = np.array(validation_predictors)
    validation_targets_np = np.array(validation_targets)

    max_folder = max([int(folder) for folder in data_folders])
    new_file_name = None
    if rgb:
        new_file_name = '/data_rgb_'+str(max_folder)
    else:
        new_file_name = '/data_bw_'+str(max_folder)
    np.savez(data_path+new_file_name, train_predictors=train_predictors_np,
             train_targets=train_targets_np,validation_predictors = validation_predictors_np,
             validation_targets = validation_targets_np)
Example #5
0
import os
from util import shell_command, sanitize_data_folders


def write_metadata(input_dir_path):
    input_file_path = input_dir_path + '/predictors_and_targets.npz'
    npzfile = np.load(input_file_path)
    image_count = len(npzfile['predictors'])
    metadata_file_path = input_dir_path + '/metadata.txt'
    with open(metadata_file_path, 'w') as writer:
        writer.write('image_count:' + str(image_count) + '\n')


data_path = 'C:\\Users\\Zak\\Self-Driving-Car\\data'
data_folders = os.listdir(data_path)
data_folders = sanitize_data_folders(data_folders)
gamma_map = make_gamma_tables([1]) # I should refactor-out the gamma at some point. It's not needed here
rgb = True

for folder in data_folders:
    cmd = 'dir '+data_path + '\\' + folder
    dir_contents = str(shell_command(cmd))
    print("Started work on "+str(folder))
    print(dir_contents)
    input_dir_path = data_path + '/' + folder
    if 'predictors_and_targets.npz' not in dir_contents:
        predictors, targets = process_session(data_path + '/' + folder, gamma_map, rgb)
        video_to_rgb_npz(input_dir_path,predictors,targets)
        print("Completed work on: "+str(folder)+". Created new npz and metadata files.")
        write_metadata(input_dir_path)
    elif 'metadata.csv' not in dir_contents:
import os
from util import shell_command, sanitize_data_folders


def write_metadata(input_dir_path):
    input_file_path = input_dir_path + '/predictors_and_targets.npz'
    npzfile = np.load(input_file_path)
    image_count = len(npzfile['predictors'])
    metadata_file_path = input_dir_path + '/metadata.txt'
    with open(metadata_file_path, 'w') as writer:
        writer.write('image_count:' + str(image_count) + '\n')


data_path = '/Users/ryanzotti/Documents/repos/Self_Driving_RC_Car/data'
data_folders = os.listdir(data_path)
data_folders = sanitize_data_folders(data_folders)
gamma_map = make_gamma_tables([1]) # I should refactor-out the gamma at some point. It's not needed here
rgb = True

for folder in data_folders:
    cmd = 'ls '+data_path + '/' + folder
    dir_contents = str(shell_command(cmd))
    print("Started work on "+str(folder))
    print(dir_contents)
    input_dir_path = data_path + '/' + folder
    if 'predictors_and_targets.npz' not in dir_contents:
        predictors, targets = process_session(data_path + '/' + folder, gamma_map, rgb)
        video_to_rgb_npz(input_dir_path,predictors,targets)
        print("Completed work on: "+str(folder)+". Created new npz and metadata files.")
        write_metadata(input_dir_path)
    elif 'metadata.csv' not in dir_contents: