def main(_): utils.set_gpus_to_use() try: import tensorvision.train except ImportError: logging.error("Could not import the submodules.") logging.error("Please execute:" "'git submodule update --init --recursive'") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = json.load(f) #utils.load_plugins() if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'KittiBox') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) #train.maybe_download_and_extract(hypes) logging.info("Start training") train.do_training(hypes)
def main(_): utils.set_gpus_to_use() if tf.app.flags.FLAGS.hypes is None: logging.error("No hype file is given.") logging.info("Usage: python train.py --hypes hypes/KittiClass.json") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) utils.load_plugins() if tf.app.flags.FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(tf.app.flags.FLAGS.mod) dict_merge(hypes, mod_dict) if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'KittiSeg') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) train.maybe_download_and_extract(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) logging.info("Start finetuning") do_finetuning(hypes)
def main(): with open('../config/fcn8_seg.json', 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) utils.set_dirs(hypes, '../config/fcn8_seg.json') utils._add_paths_to_sys(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) logging.info("Start training") print('start') train.do_training(hypes) print('end')
def main(_): utils.set_gpus_to_use() sys.path.append("submodules/tensorflow-fcn") sys.path.append("submodules/TensorVision") import tensorvision.train import tensorflow_fcn.utils # try: # import tensorvision.train # import tensorflow_fcn.utils # except ImportError: # logging.error("Could not import the submodules.") # logging.error("Please execute:" # "'git submodule update --init --recursive'") # exit(1) if tf.app.flags.FLAGS.hypes is None: logging.error("No hype file is given.") logging.info("Usage: python train.py --hypes hypes/KittiClass.json") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) utils.load_plugins() if tf.app.flags.FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(tf.app.flags.FLAGS.mod) dict_merge(hypes, mod_dict) if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'KittiSeg') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) train.maybe_download_and_extract(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) logging.info("Start training") train.do_training(hypes)
def main(_): utils.set_gpus_to_use() try: import tensorvision.train import tensorflow_fcn.utils except ImportError: logging.error("Could not import the submodules.") logging.error("Please execute:" "'git submodule update --init --recursive'") exit(1) if tf.app.flags.FLAGS.hypes is None: logging.error("No hype file is given.") logging.info("Usage: python train.py --hypes hypes/KittiClass.json") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) hypes['dist'] = FLAGS.dist if FLAGS.layers: hypes['arch']['layers'] = FLAGS.layers if FLAGS.lr: hypes['solver']['learning_rate'] = FLAGS.lr if FLAGS.optimizer: hypes['solver']['opt'] = FLAGS.optimizer utils.load_plugins() if tf.app.flags.FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(tf.app.flags.FLAGS.mod) dict_merge(hypes, mod_dict) if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'KittiSeg') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) train.maybe_download_and_extract(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) train.do_training(hypes)
def main(_): utils.set_gpus_to_use() try: import tensorvision.train import tensorflow_fcn.utils except ImportError: logging.error("Could not import the submodules.") logging.error("Please execute:" "'git submodule update --init --recursive'") exit(1) if tf.app.flags.FLAGS.hypes is None: logging.error("No hype file is given.") logging.info("Usage: python train.py --hypes hypes/KittiClass.json") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) utils.load_plugins() if tf.app.flags.FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(tf.app.flags.FLAGS.mod) dict_merge(hypes, mod_dict) if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'KittiSeg') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) train.maybe_download_and_extract(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) logging.info("Start training") encoder_path = hypes['model']['architecture_file'] hypes['model']['architecture_file'] = '../encoder/stub.py' hypes['ga_data'] = 'ga_data.json' run_genetic_algorithm(hypes, encoder_path)
def main(_): utils.set_gpus_to_use() with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = json.load(f) utils.load_plugins() if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'MediSeg') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) train.maybe_download_and_extract(hypes) logging.info("Start training") train.do_training(hypes)
def main(_): utils.set_gpus_to_use() try: import tensorvision.train import tensorflow_fcn.utils except ImportError: logging.error("Could not import the submodules.") logging.error("Please execute:" "'git submodule update --init --recursive'") exit(1) if tf.app.flags.FLAGS.hypes is None: logging.error("No hype file is given.") logging.info("Usage: python train.py --hypes hypes/KittiClass.json") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) utils.load_plugins() if tf.app.flags.FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(tf.app.flags.FLAGS.mod) dict_merge(hypes, mod_dict) os.environ["TV_DIR_DATA"] = "../../SemSeg_DATA/DATA" os.environ["TV_DIR_RUNS"] = "../../SemSeg_DATA/RUNS" # print(os.environ["TV_DIR_DATA"]) utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) train.maybe_download_and_extract(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) logging.info("Start training") train.do_training(hypes)
def main(_): utils.set_gpus_to_use() try: import tensorvision.train import tensorflow_fcn.utils except ImportError: logging.error("Could not import the submodules.") logging.error("Please execute:" "'git submodule update --init --recursive'") exit(1) if tf.app.flags.FLAGS.hypes is None: logging.error("No hype file is given.") logging.info("Usage: python train.py --hypes hypes/KittiClass.json") exit(1) with open(tf.app.flags.FLAGS.hypes, 'r') as f: logging.info("f: %s", f) hypes = commentjson.load(f) utils.load_plugins() if tf.app.flags.FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(tf.app.flags.FLAGS.mod) dict_merge(hypes, mod_dict) if 'TV_DIR_RUNS' in os.environ: os.environ['TV_DIR_RUNS'] = os.path.join(os.environ['TV_DIR_RUNS'], 'KittiSeg') utils.set_dirs(hypes, tf.app.flags.FLAGS.hypes) utils._add_paths_to_sys(hypes) train.maybe_download_and_extract(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) logging.info("Start training") train.do_training(hypes)
def main(_): logging.info( "Initializing GPUs, plugins and creating the essential folders") utils.set_gpus_to_use() if FLAGS.hypes is None: logging.error("No hypes are given.") logging.error("Usage: python train.py --hypes hypes.json") logging.error(" tf: tv-train --hypes hypes.json") exit(1) with open(FLAGS.hypes) as f: logging.info("f: %s", f) hypes = commentjson.load(f) if FLAGS.mod is not None: import ast mod_dict = ast.literal_eval(FLAGS.mod) dict_merge(hypes, mod_dict) logging.info("Loading plugins") utils.load_plugins() logging.info("Set dirs") utils.set_dirs(hypes, FLAGS.hypes) logging.info("Add paths to sys") utils._add_paths_to_sys(hypes) logging.info("Initialize training folder") train.initialize_training_folder(hypes) tf.reset_default_graph() logging.info("Start training") train.do_training(hypes)
def build_united_model(meta_hypes): logging.info("Initialize training folder") subhypes = {} subgraph = {} submodules = {} subqueues = {} subgraph['debug_ops'] = {} base_path = meta_hypes['dirs']['base_path'] first_iter = True for model in meta_hypes['model_list']: subhypes_file = os.path.join(base_path, meta_hypes['models'][model]) with open(subhypes_file, 'r') as f: logging.info("f: %s", f) subhypes[model] = json.load(f) hypes = subhypes[model] utils.set_dirs(hypes, subhypes_file) #output directories hypes['dirs']['output_dir'] = meta_hypes['dirs']['output_dir'] #data directories hypes['dirs']['data_dir'] = meta_hypes['dirs']['data_dir'] #initialize the training folders train.initialize_training_folder(hypes, files_dir=model, logging=first_iter) #get the image directory from hypes meta_hypes['dirs']['image_dir'] = hypes['dirs']['image_dir'] #load the modules from hypes submodules[model] = utils.load_modules_from_hypes( hypes, postfix="_%s" % model) modules = submodules[model] logging.info("Build %s computation Graph.", model) with tf.name_scope("Queues_%s" % model): subqueues[model] = modules['input'].create_queues(hypes, 'train') logging.info('Building Model: %s' % model) subgraph[model] = build_training_graph(hypes, subqueues[model], modules, first_iter) first_iter = False #if model list is having detection and segmentation if len(meta_hypes['models']) == 2: #calculate the losses _recombine_2_losses(meta_hypes, subgraph, subhypes, submodules) #if model list is having detection, segmentation and classification else: #calculate combined losses _recombine_3_losses(meta_hypes, subgraph, subhypes, submodules) hypes = subhypes[meta_hypes['model_list'][0]] tv_sess = core.start_tv_session(hypes) sess = tv_sess['sess'] #for each of the models in model list expand the image dimension for model in meta_hypes['model_list']: hypes = subhypes[model] modules = submodules[model] optimizer = modules['solver'] with tf.name_scope('Validation_%s' % model): tf.get_variable_scope().reuse_variables() #returns the tensor that may be used as handle for feeding a value image_pl = tf.placeholder(tf.float32) #expand the shape of the array by inserting new axes in 0th positon image = tf.expand_dims(image_pl, 0) inf_out = core.build_inference_graph(hypes, modules, image=image) subgraph[model]['image_pl'] = image_pl subgraph[model]['inf_out'] = inf_out # Start the data load modules['input'].start_enqueuing_threads(hypes, subqueues[model], 'train', sess) #join paths for the output result target_file = os.path.join(meta_hypes['dirs']['output_dir'], 'hypes.json') with open(target_file, 'w') as outfile: json.dump(meta_hypes, outfile, indent=2, sort_keys=True) return subhypes, submodules, subgraph, tv_sess
def build_united_model(meta_hypes): logging.info("Initialize training folder") subhypes = {} subgraph = {} submodules = {} subqueues = {} subgraph['debug_ops'] = {} base_path = meta_hypes['dirs']['base_path'] first_iter = True for model in meta_hypes['model_list']: subhypes_file = os.path.join(base_path, meta_hypes['models'][model]) with open(subhypes_file, 'r') as f: logging.info("f: %s", f) subhypes[model] = json.load(f) hypes = subhypes[model] utils.set_dirs(hypes, subhypes_file) hypes['dirs']['output_dir'] = meta_hypes['dirs']['output_dir'] hypes['dirs']['data_dir'] = meta_hypes['dirs']['data_dir'] train.initialize_training_folder(hypes, files_dir=model, logging=first_iter) meta_hypes['dirs']['image_dir'] = hypes['dirs']['image_dir'] submodules[model] = utils.load_modules_from_hypes(hypes, postfix="_%s" % model) modules = submodules[model] logging.info("Build %s computation Graph.", model) with tf.name_scope("Queues_%s" % model): subqueues[model] = modules['input'].create_queues(hypes, 'train') logging.info('Building Model: %s' % model) subgraph[model] = build_training_graph(hypes, subqueues[model], modules, first_iter) first_iter = False if len(meta_hypes['models']) == 2: _recombine_2_losses(meta_hypes, subgraph, subhypes, submodules) else: _recombine_3_losses(meta_hypes, subgraph, subhypes, submodules) hypes = subhypes[meta_hypes['model_list'][0]] tv_sess = core.start_tv_session(hypes) sess = tv_sess['sess'] for model in meta_hypes['model_list']: hypes = subhypes[model] modules = submodules[model] optimizer = modules['solver'] with tf.name_scope('Validation_%s' % model): tf.get_variable_scope().reuse_variables() image_pl = tf.placeholder(tf.float32) image = tf.expand_dims(image_pl, 0) inf_out = core.build_inference_graph(hypes, modules, image=image) subgraph[model]['image_pl'] = image_pl subgraph[model]['inf_out'] = inf_out # Start the data load modules['input'].start_enqueuing_threads(hypes, subqueues[model], 'train', sess) target_file = os.path.join(meta_hypes['dirs']['output_dir'], 'hypes.json') with open(target_file, 'w') as outfile: json.dump(meta_hypes, outfile, indent=2, sort_keys=True) return subhypes, submodules, subgraph, tv_sess