def train(dt): if not 'dataset_dir' in dt: return "No dataset directory specified" for key in dt: d_a[key] = dt[key] d_a['freeze_dict']['input_checkpoint'] = os.path.join( d_a['train_dir'], 'model.ckpt-' + str(d_a['max_number_of_steps'])) start = time.time() train_size, val_size, num_classes = convert_data.run( dt['dataset_dir'], d_a['dataset_name'], d_a['validation_percentage'], d_a['num_shards']) start_train = time.time() train_image_classifier_gen.main( d_a['train_dir'], d_a['num_clones'], d_a['clone_on_cpu'], train_size, val_size, num_classes, d_a['worker_replicas'], d_a['log_every_n_steps'], d_a['save_interval_secs'], d_a['weight_decay'], d_a['optimization'], d_a['learning_rate'], d_a['moving_average_decay'], d_a['dataset'], d_a['max_number_of_steps'], d_a['checkpoint']) end_train = time.time() export_inference_graph.main(d_a['export'], train_size, val_size, num_classes) freeze_graph.main(d_a['freeze_dict']) end = time.time() train_time = end_train - start_train full_time = end - start rest = full_time - train_time return {'training_time': train_time, 'full_time': full_time, 'rest': rest}
'num_shards': args.num_shards, 'train_dir': args.train_dir, 'num_clones': args.num_clones, 'worker_replicas': args.worker_replicas, 'log_every_n_steps': args.log_every_n_steps, 'save_interval_secs': args.save_interval_secs, 'weight_decay': args.weight_decay, 'moving_average_decay':args.moving_average_decay, 'max_number_of_steps': args.max_number_of_steps, 'clone_on_cpu': args.clone_on_cpu, 'optimization': optimization, 'learning_rate': learning_rate, 'dataset': dataset, 'checkpoint': checkpoint, 'export': export, 'freeze_dict': freeze_dict } with open('default_arguments.json', 'w') as js: json.dump(j, js)''' train_size, val_size, num_classes = convert_data.run( dataset_dir, dataset_name, validation_percentage, num_shards) train_image_classifier_gen.main( train_dir, num_clones, clone_on_cpu, train_size, val_size, num_classes, worker_replicas, log_every_n_steps, save_interval_secs, weight_decay, optimization, learning_rate, moving_average_decay, dataset, max_number_of_steps, checkpoint) export_inference_graph.main(export, train_size, val_size, num_classes) freeze_graph.main(freeze_dict)
def test_freeze_graph(self): print('test_freeze_graph') argv = [self.pretrained_model, self.frozen_graph_filename] args = freeze_graph.parse_arguments(argv) freeze_graph.main(args)
def test_freeze_graph(self): print('test_freeze_graph') argv = [ self.pretrained_model, self.frozen_graph_filename ] args = freeze_graph.parse_arguments(argv) freeze_graph.main(args)