def compute_on_dir(self, dirpath, outdir): """ Computes the output of all images in a directory with 'im' in their name. Args: dirpath (string): path to the data folder outdir (string): path to the output folder. Created if not existing. Returns: """ if not os.path.isdir(outdir): os.mkdir(outdir) _, _, files = next(os.walk(dirpath)) dir_len = 0 for file in files: if 'im' in file: dir_len += 1 gen = BatchGenerator(traindir=dirpath, city_model=self, trainsetsize=dir_len, batchsize=1, traindirsize=dir_len) counter = 0 inputs = gen.generate_input_only(option=self.prop_dict['trainset'][0]) for x in inputs: if counter > dir_len: break x = np.squeeze(x,axis=0) y = np.squeeze(self.compute_output(x),axis=0) Im = Image.fromarray(x[:,:,0:3].astype('uint8')) Out = Image.fromarray(y.astype('uint8')) Im.save(os.path.join(outdir,'input_'+str(counter)+'_.png')) Out.save((os.path.join(outdir,'output_'+str(counter)+'_.png'))) counter += 1 print(str(counter))
def on_train_begin(self, logs={}): if len(self.citymodel.prop_dict['valset']) > 0: self.gen = BatchGenerator( traindir=self.citymodel.prop_dict['valset'][1], city_model=self.citymodel, trainsetsize=self.citymodel.prop_dict['valset'][2], batchsize=self.num_ins, traindirsize=self.citymodel.prop_dict['valset'][2]) self.x, _ = next( self.gen.generate_batch( option=self.citymodel.prop_dict['valset'][0])) else: self.gen = BatchGenerator( traindir=self.citymodel.prop_dict['trainset'][1], city_model=self.citymodel, trainsetsize=self.citymodel.prop_dict['trainset'][2], batchsize=self.num_ins) self.x, _ = next( self.gen.generate_batch( option=self.citymodel.prop_dict['trainset'][0])) for i in range(self.num_ins): bob = self.x[i, :, :, :] bob = bob[:, :, 0:3].astype(np.uint8) Input = Image.fromarray(bob) Input.save( join(self.citymodel.prop_dict['directory'], 'watch', self.citymodel.prop_dict['name'] + str(i) + '_input.png'))
def evaluate_full(self, validation_dir, validation_option = '', logfile=None): valdirsize = len([name for name in os.listdir(validation_dir) if os.path.isfile(os.path.join(validation_dir,name)) and 'im' in name]) val_gen = BatchGenerator(traindir=validation_dir, city_model=self, trainsetsize=valdirsize, batchsize=1, traindirsize=valdirsize) counter = 0 gen = val_gen.generate_input_only(validation_option) metrics = ['acc'] for met in Metrics.valid_metrics: metrics.append(Metrics.create_metrics(met,self))
def evaluate(self, valdirsize=None, outputfile=None): """ Evaluates the model over the validation set. Args: valdirsize (integer): size of the validation set. Default is the value defined in the prop_dict of the model. outputfile (outputfile): path to a csv file that will be created to store the results. Returns: Nothing """ if not valdirsize: valdirsize = self.prop_dict['valset'][2] val_gen = BatchGenerator(traindir=self.prop_dict['valset'][1], city_model=self, trainsetsize=valdirsize, batchsize=1, traindirsize=valdirsize) counter = 0 gen = val_gen.generate_batch(option=self.prop_dict['valset'][0]) self.compile() out_dict_list = [] for (x_val, y_val) in gen: if counter < valdirsize: line = {} values = self.model.test_on_batch(x_val, y_val) line['loss'] = values[0] for i in range(1, len(values)): line[self.prop_dict['metrics'][i - 1]] = values[i] print(line) out_dict_list.append(line) counter += 1 else: break if outputfile: with open(outputfile, mode='w') as out: writer = csv.DictWriter(out, out_dict_list[0].keys()) writer.writeheader() writer.writerows(out_dict_list) means = out_dict_list[0] for key in means: for i in range(1, valdirsize): means[key] += out_dict_list[i][key] means[key] /= valdirsize print("The results over the valuation set are :") print(means)
def train(self, epochs, batch_size, save=True): """ Trains the neural network according to the values passed as arguments. Args: epochs (int): number of epochs to train on. batch_size (int): size of the batch to use. save (bool): whether to save the model at the end of training or not. Returns: """ # First, we compile the model print('compiling') self.compile() # We the build the callback functions, distinguishes cases between built-in callbacks and custom callbacks. print("Building Callback functions...") call_list = [] for call_def in self.prop_dict['callbacks']: if call_def[0] == 'tensorboard': call = keras.callbacks.TensorBoard( log_dir=os.path.join(self.prop_dict['directory'], 'logs', self.prop_dict['name']), histogram_freq=1, write_graph=True ) elif call_def[0] == 'csv': call = keras.callbacks.CSVLogger( filename=os.path.join(self.prop_dict['directory'], 'logs', self.prop_dict['name'] + '.csv'), separator=',', append=True ) elif call_def[0] == 'ckpt': call = keras.callbacks.ModelCheckpoint( filepath=os.path.join(self.prop_dict['directory'], 'saves', self.prop_dict['name']), verbose=2, save_weights_only=True ) else: call = callbacks.callbacks_dict[call_def[0]](self, options=call_def[1] ) call_list.append(call) batch_gen = BatchGenerator(traindir=self.prop_dict['trainset'][1], city_model=self, trainsetsize=self.prop_dict['trainset'][2], batchsize=batch_size) if len(self.prop_dict['valset']) > 0: val_gen = BatchGenerator(traindir=self.prop_dict['valset'][1], city_model=self, trainsetsize=self.prop_dict['valset'][2], batchsize=batch_size, traindirsize=100) self.model.fit_generator(generator=batch_gen.generate_batch(option=self.prop_dict['trainset'][0]), steps_per_epoch=batch_gen.epoch_size, epochs=epochs, verbose=2, callbacks=call_list, validation_data=val_gen.generate_batch(option=self.prop_dict['valset'][0]), validation_steps=1 ) else: self.model.fit_generator(generator=batch_gen.generate_batch(option=self.prop_dict['trainset'][0]), steps_per_epoch=batch_gen.epoch_size, epochs=epochs, verbose=2, callbacks=call_list ) if (save): print('Saving model') self.save_tojson() self.save_net() self.save_net(weights_only=True) print('done')
class ViewOutput_3D(Callback): """ Allows user to save an output of the network every specified step as a png file. """ def __init__(self, citymodel, options): """ :param citymodel: the cityscape model options: dictionnary of the options. Can contain : * 'batch_interval' : an int, how ofter we want to save an image. Default is 0. * 'num_ins' : number of inputs to compute. For now, only 1 is relevant. * 'on_epoch' : a bool, whether to print on every epoch or not. Default is True. """ super(ViewOutput_3D, self).__init__() self.citymodel = citymodel self.batch_interval = 0 self.i = 0 self.epoch_counter = 0 self.num_ins = 1 self.on_epoch = True self.gen = None self.x = None if ('batch_interval' in options.keys()): self.batch_interval = options['batch_interval'] if ('num_ins' in options.keys()): self.num_ins = options['num_ins'] if ('on_epoch' in options.keys()): self.on_epoch = options['on_epoch'] def on_train_begin(self, logs={}): if len(self.citymodel.prop_dict['valset']) > 0: self.gen = BatchGenerator( traindir=self.citymodel.prop_dict['valset'][1], city_model=self.citymodel, trainsetsize=self.citymodel.prop_dict['valset'][2], batchsize=self.num_ins, traindirsize=self.citymodel.prop_dict['valset'][2]) self.x, _ = next( self.gen.generate_batch_for_3D( self.citymodel.prop_dict['constraints'])) else: self.gen = BatchGenerator( traindir=self.citymodel.prop_dict['trainset'][1], city_model=self.citymodel, trainsetsize=self.citymodel.prop_dict['trainset'][2], batchsize=self.num_ins) self.x, _ = next( self.gen.generate_batch_for_3D( self.citymodel.prop_dict['constraints'])) for i in range(self.num_ins): bob = self.x[i, :, :, :] bob = bob[:, :, 0:3].astype('uint8') bob = np.squeeze(bob) Input = Image.fromarray(bob) Input.save( join(self.citymodel.prop_dict['directory'], 'watch', self.citymodel.prop_dict['name'] + str(i) + '_input.png')) def on_batch_end(self, batch, logs={}): if self.batch_interval != 0: self.i = self.i + 1 if self.i % self.batch_interval == 0: y = self.citymodel.model.predict_on_batch(self.x) y = np.argmax(y, axis=3) for i in range(self.num_ins): Output = Image.fromarray(y[i, :, :].astype(np.uint8)) Output.save( join( self.citymodel.prop_dict['directory'], 'watch', self.citymodel.prop_dict['name'] + str(i) + '_output_' + str(self.epoch_counter) + '_' + str(self.i) + '_.png')) def on_epoch_end(self, epoch, logs={}): if (self.on_epoch): y = self.citymodel.model.predict_on_batch(self.x) y = np.argmax(y, axis=3) for i in range(self.num_ins): Output = Image.fromarray(y[i, :, :].astype(np.uint8)) Output.save( join( self.citymodel.prop_dict['directory'], 'watch', self.citymodel.prop_dict['name'] + str(i) + '_output_epoch_' + str(self.epoch_counter) + '_.png')) self.epoch_counter += 1
def main(): try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'hav:', ['help', 'all', 'visualize=']) except getopt.error as msg: print(msg) print(__doc__) sys.exit(-1) all_metrics = False visu_folder = None for o, a in opts: if o in ['-h', '--help']: print(__doc__) sys.exit(1) if o in ['-a', '--all']: all_metrics = True if o in ['-v', '--visualize']: visu_folder = a model_folder = args[0] validation_folder = args[1] output_folder = args[2] model = CityScapeModel(model_folder) if not isdir(output_folder): mkdir(output_folder) if visu_folder: if not isdir(visu_folder): mkdir(visu_folder) if all_metrics: mets = ['acc', 'iou'] for i in range(model.prop_dict['num_labs']): mets.append('cat-iou_' + str(i)) model.define_metrics(*mets) testdirsize = dir_size(join(validation_folder, 'RGB')) test_gen = BatchGenerator(city_model=model, traindir=validation_folder, traindirsize=testdirsize, trainsetsize=testdirsize, batchsize=1) gen = test_gen.generate_batch_for_3D(model.prop_dict['constraints']) counter = 0 model.compile() model.load_weights() out_dict_list = [] for (x_test, y_test) in gen: if counter < testdirsize: line = {} begin_time = time.time() values = model.model.test_on_batch(x_test, y_test) end_time = time.time() line['loss'] = values[0] line['time'] = end_time - begin_time for i in range(1, len(values)): line[model.prop_dict['metrics'][i - 1]] = values[i] print(line) out_dict_list.append(line) if visu_folder: im_name = test_gen.name_list[counter] y_pred = model.model.predict_on_batch(x_test) y_pred = np.argmax(y_pred, axis=-1) y_pred = y_pred.astype(dtype='uint8') y_pred = np.squeeze(y_pred) out_image = Image.fromarray(y_pred) out_image.save(join(visu_folder, im_name)) counter += 1 else: break with open(join(output_folder, 'raw_output.csv'), 'w') as f: writer = csv.DictWriter(f, out_dict_list[0].keys()) writer.writeheader() writer.writerows(out_dict_list) means = out_dict_list[0] for key in means.keys(): for i in range(1, testdirsize): means[key] += out_dict_list[i][key] means[key] /= testdirsize with open(join(output_folder, 'mean_output.csv'), 'w') as f: writer = csv.DictWriter(f, means.keys()) writer.writeheader() writer.writerow(means)
def main(): try: opts, args = getopt.gnu_getopt(sys.argv[1:], 'ha', ['help', 'all']) except getopt.error as msg: print(msg) print("for help, try -h or --help") sys.exit(1) all_metrics = False for o, a in opts: if o in ['-h', '--help']: print(__doc__) sys.exit(2) if o in ['-a', '--all']: all_metrics = True if len(args) != 3: print("Incorrect number of arguments, see -h or --help") sys.exit(1) model_folder = args[0] test_folder = args[1] output_folder = args[2] model = CityScapeModel(model_folder) if not isdir(output_folder): mkdir(output_folder) if all_metrics: mets = ['acc', 'iou'] for i in range(model.prop_dict['num_labs']): mets.append('cat-iou_' + str(i)) model.define_metrics(*mets) testdirsize = len([ name for name in listdir(test_folder) if isfile(join(test_folder, name)) and 'im' in name ]) test_gen = BatchGenerator(traindir=test_folder, city_model=model, traindirsize=testdirsize, trainsetsize=testdirsize, batchsize=1) gen = test_gen.generate_batch(option=model.prop_dict['valset'][0]) counter = 0 model.compile() model.load_weights() out_dict_list = [] for (x_test, y_test) in gen: if counter < testdirsize: line = {} begin_time = time.time() values = model.model.test_on_batch(x_test, y_test) end_time = time.time() line['loss'] = values[0] line['time'] = end_time - begin_time for i in range(1, len(values)): line[model.prop_dict['metrics'][i - 1]] = values[i] print(line) out_dict_list.append(line) counter += 1 else: break with open(join(output_folder, 'raw_output.csv'), 'w') as f: writer = csv.DictWriter(f, out_dict_list[0].keys()) writer.writeheader() writer.writerows(out_dict_list) means = out_dict_list[0] for key in means.keys(): for i in range(1, testdirsize): means[key] += out_dict_list[i][key] means[key] /= testdirsize with open(join(output_folder, 'mean_output.csv'), 'w') as f: writer = csv.DictWriter(f, means.keys()) writer.writeheader() writer.writerow(means)