def evaluate(): label_map = du.get_label_map(os.path.join(c.data_kaggle, 'train.csv')) normdict = {'p1090': (10, 90), 'pMS': None} for pstr in ['pMS', 'p1090']: segment_fn = m.get_segmenter_function(os.path.join( c.params_dir, 'fcn_v2_' + pstr), c.fcn_img_size, ensemble=True, version=2) dsets = [] for s in range(*c.fcn_eval_cases): dset = du.CNN_Dataset(s) if len(dset.slices_ver) < 5: pass dset.segment( segment_fn, lambda x: du.segmenter_data_transform( x, rotate=None, normalize_pctwise=normdict[pstr])) sys_vol, dias_vol = du.calculate_areas(dset, dset.counts, dset.sys_time, dset.dias_time, end_slice_include=False) sys_vol = max(sys_vol, 0.15 * dias_vol) dset.sys_vol, dset.dias_vol = (sys_vol, dias_vol) print '#{} {} {}'.format(dset.name, sys_vol, dias_vol) dsets.append(dset) dset.unload() #vector_map = { int(ds.name):([1, ds.sys_vol], [1, ds.dias_vol]) for ds in dsets } #w_func = du.optimize_w(vector_map, label_map, 2, max_w = 8, above_below=True) du.write_outputs(dsets, c.output_dir, pstr)
def evaluate(): ch4_seg_fn = m.get_segmenter_function(os.path.join(c.params_dir, 'ch4seg_v2'), c.fcn_img_size, ensemble=True, version=2) label_map = du.get_label_map(os.path.join(c.data_kaggle, 'train.csv')) # do 4ch segmentation and calculate volume as if it were circular calc_map = {} print 'segmenting 4ch images and calculating volumes' for s in xrange(*c.ch4_eval_cases): if s % 10 == 0: print 'processing example {}'.format(s) dset = du.CNN_Dataset(s, load_sax_images=False) if dset.ch4_images is not None: dset.segment_ch4(ch4_seg_fn, lambda x: du.segmenter_data_transform(x, rotate=None)) ch4counts = [np.count_nonzero(ch4s_) for ch4s_ in dset.ch4seg] # count for each time if sum(ch4counts) > 0: volumes = np.empty(dset.ch4seg.shape[0]) for t in xrange(dset.ch4seg.shape[0]): diams = np.array([np.count_nonzero(dset.ch4seg[t,:,i]) for i in xrange(dset.ch4seg[t].shape[0])]) volumes[t] = sum((dset.ch4_line_mult**3)*np.pi*d**2/4 for d in diams)/1000. calc_map[s] = min(volumes), max(volumes) out_lines = ['{},{:.2f},{:.2f}\n'.format(s, calc[0], calc[1]) for s,calc in calc_map.iteritems()] with open(os.path.join(c.output_dir, 'ch4_volumes_map.csv'), 'w') as wr: wr.writelines(out_lines) '''
def evaluate(): ch4_seg_fn = m.get_segmenter_function(os.path.join(c.params_dir, 'ch4seg_v2'), c.fcn_img_size, ensemble=True, version=2) label_map = du.get_label_map(os.path.join(c.data_kaggle, 'train.csv')) # do 4ch segmentation and calculate volume as if it were circular calc_map = {} print 'segmenting 4ch images and calculating volumes' for s in xrange(*c.ch4_eval_cases): if s % 10 == 0: print 'processing example {}'.format(s) dset = du.CNN_Dataset(s, load_sax_images=False) if dset.ch4_images is not None: dset.segment_ch4( ch4_seg_fn, lambda x: du.segmenter_data_transform(x, rotate=None)) ch4counts = [np.count_nonzero(ch4s_) for ch4s_ in dset.ch4seg] # count for each time if sum(ch4counts) > 0: volumes = np.empty(dset.ch4seg.shape[0]) for t in xrange(dset.ch4seg.shape[0]): diams = np.array([ np.count_nonzero(dset.ch4seg[t, :, i]) for i in xrange(dset.ch4seg[t].shape[0]) ]) volumes[t] = sum((dset.ch4_line_mult**3) * np.pi * d**2 / 4 for d in diams) / 1000. calc_map[s] = min(volumes), max(volumes) out_lines = [ '{},{:.2f},{:.2f}\n'.format(s, calc[0], calc[1]) for s, calc in calc_map.iteritems() ] with open(os.path.join(c.output_dir, 'ch4_volumes_map.csv'), 'w') as wr: wr.writelines(out_lines) '''
np.random.seed(2345) #compare with 1 (10,95) if train_CV: segment_fn = m.get_segmenter_function(c.params_dir, c.fcn_img_size, NCV=5, version=version, param_file_key=vvv) else: segment_fn = m.get_segmenter_function( c.params_dir + '/fcn_v{}_{}_f5.npz'.format(version, vvv), c.fcn_img_size, NCV=False, version=version) label_map = du.get_label_map(os.path.join(c.data_kaggle, 'train.csv')) dsets = [] for s in range(M, N + 1): start = time.time() dset = du.CNN_Dataset(s) if len(dset.slices_ver) < 3: pass dset.segment(segment_fn, segment_transform = lambda x:du.segmenter_data_transform(x, rotate=None, normalize_pctwise=pct_norm),\ segment_transform2 = lambda x:du.segmenter_data_transform(x, rotate=None, normalize_pctwise=pct_norm2)) #plt.imshow(dset.counts) #plt.show() sys_vol, dias_vol = dset.calculate_areas() print '{} - #{} s={} t={} {:0.2f} sec, {} {}'.format(
pct_norm = (10, 90) pct_norm2 = (20, 99) generate_contours = 0 if noc == "auto": generate_contours = 0 np.random.seed(2345) # compare with 1 (10,95) if train_CV: segment_fn = m.get_segmenter_function(c.params_dir, c.fcn_img_size, NCV=5, version=version, param_file_key=vvv) else: segment_fn = m.get_segmenter_function( c.params_dir + "/fcn_v{}_{}_f5.npz".format(version, vvv), c.fcn_img_size, NCV=False, version=version ) label_map = du.get_label_map(os.path.join(c.data_kaggle, "train.csv")) dsets = [] for s in range(M, N + 1): start = time.time() dset = du.CNN_Dataset(s) if len(dset.slices_ver) < 3: pass dset.segment( segment_fn, segment_transform=lambda x: du.segmenter_data_transform(x, rotate=None, normalize_pctwise=pct_norm), segment_transform2=lambda x: du.segmenter_data_transform(x, rotate=None, normalize_pctwise=pct_norm2), ) # plt.imshow(dset.counts) # plt.show()