def train(pkl_path): sys.modules['apc_data'] = apc_data data_path = module_path + "/rbo_segmentation/data" cache_path = os.path.join(data_path, 'cache') dataset_path = os.path.join(data_path, 'rbo_apc') dataset_names = ( ["berlin_runs/"+str(i+1) for i in range(3)] + ["berlin_samples", "berlin_selected"] + ["seattle_runs/"+str(i+1) for i in range(5)] + ["seattle_test"]) # load from cached data datasets = load_datasets(dataset_names, dataset_path, cache_path) datasets['berlin_runs'] = combine_datasets( [datasets["berlin_runs/"+str(i+1)] for i in range(3)]) datasets['seattle_runs'] = combine_datasets( [datasets["seattle_runs/"+str(i+1)] for i in range(5)]) datasets['training_berlin_and_seattle'] = combine_datasets( [datasets['berlin_selected'], datasets['berlin_runs']]) bp = ProbabilisticSegmentationBP(**params) train_set = datasets['berlin_selected'] bp.fit(train_set) with open(pkl_path, 'wb') as f: pickle.dump(bp, f) print "saved"
def train(dataset_path, params): """ Args: dataset_path (str): example /home/leus/ros/indigo/src/start-jsk/jsk_apc/jsk_apc2016_common/data/tokyo_run """ pkl_path = common_path + '/data/trained_segmenter_2016.pkl' bp = ProbabilisticSegmentationBP(**params) # initialize empty dataset dataset = APCDataSet(from_pkl=True) data_file_prefixes = [] key = '.jpg' for dir_name, sub_dirs, files in os.walk(dataset_path): for f in files: if key == f[-len(key):]: data_file_prefixes.append( os.path.join(dir_name, f[:-len(key)])) print data_file_prefixes for file_prefix in data_file_prefixes: dataset.samples.append( APCSample(data_2016_prefix=file_prefix, labeled=True, is_2016=True, infer_shelf_mask=True)) bp.fit(dataset) print "done fitting" with open(pkl_path, 'wb') as f: pickle.dump(bp, f) print "done dumping model"