def script_visualizeScoreResults(test_output_file,gt_output_file,gt_data_output_file,out_file_html,rel_path,means,out_dir):
	data=np.load(gt_data_output_file);
	gt_label=np.load(gt_output_file);
	pred_label=np.load(test_output_file);
	print data.shape,gt_label.shape,pred_label.shape

	# print data.shape;
	im_paths=[];captions=[];
	correct=0;
	for im_no in range(data.shape[0]):
		data_path=os.path.join(out_dir,str(im_no)+'_data.png');
		# gt_path=os.path.join(out_dir,str(im_no)+'_gt.png');
		# pred_path=os.path.join(out_dir,str(im_no)+'_pred.png');
		# scipy.misc.imsave(data_path,reshapeMat(data[im_no],means));

		visualize.saveMatAsImage(reshapeMat(data[im_no],means)/255,data_path);
		pred_label_curr=pred_label[im_no,0];
		gt_label_curr=gt_label[im_no,0];
		# visualize.saveMatAsImage(reshapeMat(gt[im_no],means),gt_path);
		# visualize.saveMatAsImage(reshapeMat(out[im_no],means),pred_path);
		im_paths.append([data_path.replace(rel_path[0],rel_path[1])]);
		if (pred_label_curr*gt_label_curr)>=0:
			correct=correct+1;

		captions.append(['Pred '+str(pred_label_curr)+' GT '+str(gt_label_curr)]);
		# if im_no==10:
		# 	break;
	print correct
	visualize.writeHTML(out_file_html,im_paths,captions,height=224,width=224);
예제 #2
0
def script_compareHashWithToyExperiment(params):
    in_file = params.in_file;
    num_hash_tables_all = params.num_hash_tables_all;
    key_type = params.key_type;
    out_file_indices = params.out_file_indices;
    out_file_pres = params.out_file_pres;
    out_file_html = params.out_file_html;
    rel_path = params.rel_path;

    [features_test,features_train,labels_test,labels_train,_,_,indices,_]=pickle.load(open(in_file,'rb'));
    visualize.saveMatAsImage(indices,out_file_indices);    
    
    hammings=[];
    for out_file_pre,num_hash_tables in zip(out_file_pres,num_hash_tables_all):
        indices_hash = getIndicesHash(features_test,features_train,num_hash_tables,key_type);
        visualize.saveMatAsImage(indices_hash,out_file_pre+'.png');    
        hamming=util.getHammingDistance(indices,indices_hash);
        pickle.dump([indices_hash,indices,hamming],open(out_file_pre+'.p','wb'));

        hammings.append(np.mean(hamming));
    
    sizes = scipy.misc.imread(out_file_indices);
    sizes = sizes.shape

    im_files_html=[];
    captions_html=[];
    for idx,out_file_pre in enumerate(out_file_pres):
        out_file_curr=out_file_pre+'.png'
        key_str=str(key_type);
        key_str=key_str.replace('<type ','').replace('>','');
        caption_curr='NN Hash. Num Hash Tables: '+str(num_hash_tables_all[idx])+' '+'Hamming Distance: '+str(hammings[idx]);
        im_files_html.append([out_file_indices.replace(rel_path[0],rel_path[1]),out_file_curr.replace(rel_path[0],rel_path[1])])
        captions_html.append(['NN cosine',caption_curr]);

    visualize.writeHTML(out_file_html,im_files_html,captions_html,sizes[0]/2,sizes[1]/2);
예제 #3
0
def script_saveHashAnalysisImages(params):
    path_to_db = params.path_to_db
    class_labels_map = params.class_labels_map
    percents = params.percents
    out_file_class_pre = params.out_file_class_pre
    out_file_hash_simple = params.out_file_hash_simple
    out_file_hash_byClass = params.out_file_hash_byClass
    hashtable = params.hashtable
    inc = params.inc
    dtype = params.dtype
    # in_file = params.in_file;

    if not os.path.exists(out_file_class_pre + '.npz'):
        mani = Tube_Manipulator(path_to_db)

        mani.openSession()
        ids = mani.selectMix((Tube.class_idx_pascal, TubeHash.hash_val),
                             (TubeHash.hash_table == hashtable, ))
        mani.closeSession()

        ids = np.array(ids, dtype=dtype)
        np.savez(out_file_class_pre, ids)

    ids = np.load(out_file_class_pre + '.npz')['arr_0']
    # ids=np.load(in_file)['arr_0'];

    counts_all, class_ids_breakdown = getClassIdsCount(ids[:, 0], ids[:, 1])
    ranks = getDiscriminativeScore(counts_all)

    sort_idx = np.argsort(ranks)
    counts_all = [counts_all[idx] for idx in sort_idx]
    class_ids_breakdown = [class_ids_breakdown[idx] for idx in sort_idx]
    im_simple = getHashAnalysisIm(counts_all,
                                  class_ids_breakdown,
                                  inc=inc,
                                  colorByClass=False)
    im_byClass = getHashAnalysisIm(counts_all,
                                   class_ids_breakdown,
                                   inc=inc,
                                   colorByClass=True)

    visualize.saveMatAsImage(im_simple, out_file_hash_simple)
    visualize.saveMatAsImage(im_byClass, out_file_hash_byClass)

    counts_all_ravel = np.array([c for counts in counts_all for c in counts])
    class_ids_breakdown_ravel = np.array(
        [c for class_ids in class_ids_breakdown for c in class_ids])
    class_id_pascal, class_idx_pascal = zip(*class_labels_map)

    for class_id_idx, class_id in enumerate(class_idx_pascal):
        frequency = counts_all_ravel[class_ids_breakdown_ravel == class_id]
        out_file = out_file_class_pre + '_' + class_id_pascal[
            class_id_idx] + '.png'
        title = class_id_pascal[class_id_idx] + ' ' + str(class_id)
        cum_freq, idx_perc = getCumulativeInfo(frequency, percents)
        savePerClassCumulativeGraph(cum_freq / float(cum_freq[-1]), idx_perc,
                                    percents, out_file, title)
예제 #4
0
def script_compareHashWithToyExperiment(params):
    in_file = params.in_file
    num_hash_tables_all = params.num_hash_tables_all
    key_type = params.key_type
    out_file_indices = params.out_file_indices
    out_file_pres = params.out_file_pres
    out_file_html = params.out_file_html
    rel_path = params.rel_path

    [
        features_test, features_train, labels_test, labels_train, _, _,
        indices, _
    ] = pickle.load(open(in_file, 'rb'))
    visualize.saveMatAsImage(indices, out_file_indices)

    hammings = []
    for out_file_pre, num_hash_tables in zip(out_file_pres,
                                             num_hash_tables_all):
        indices_hash = getIndicesHash(features_test, features_train,
                                      num_hash_tables, key_type)
        visualize.saveMatAsImage(indices_hash, out_file_pre + '.png')
        hamming = util.getHammingDistance(indices, indices_hash)
        pickle.dump([indices_hash, indices, hamming],
                    open(out_file_pre + '.p', 'wb'))

        hammings.append(np.mean(hamming))

    sizes = scipy.misc.imread(out_file_indices)
    sizes = sizes.shape

    im_files_html = []
    captions_html = []
    for idx, out_file_pre in enumerate(out_file_pres):
        out_file_curr = out_file_pre + '.png'
        key_str = str(key_type)
        key_str = key_str.replace('<type ', '').replace('>', '')
        caption_curr = 'NN Hash. Num Hash Tables: ' + str(
            num_hash_tables_all[idx]) + ' ' + 'Hamming Distance: ' + str(
                hammings[idx])
        im_files_html.append([
            out_file_indices.replace(rel_path[0], rel_path[1]),
            out_file_curr.replace(rel_path[0], rel_path[1])
        ])
        captions_html.append(['NN cosine', caption_curr])

    visualize.writeHTML(out_file_html, im_files_html, captions_html,
                        sizes[0] / 2, sizes[1] / 2)
예제 #5
0
def script_saveHashAnalysisImages(params):
    path_to_db = params.path_to_db;
    class_labels_map = params.class_labels_map;
    percents = params.percents;
    out_file_class_pre = params.out_file_class_pre;
    out_file_hash_simple = params.out_file_hash_simple;
    out_file_hash_byClass = params.out_file_hash_byClass;
    hashtable = params.hashtable;
    inc = params.inc;
    dtype = params.dtype;
    # in_file = params.in_file;

    if not os.path.exists(out_file_class_pre+'.npz'):
        mani=Tube_Manipulator(path_to_db);

        mani.openSession();
        ids=mani.selectMix((Tube.class_idx_pascal,TubeHash.hash_val),(TubeHash.hash_table==hashtable,));
        mani.closeSession();
        
        ids=np.array(ids,dtype=dtype);
        np.savez(out_file_class_pre,ids);

    ids=np.load(out_file_class_pre+'.npz')['arr_0'];
    # ids=np.load(in_file)['arr_0'];
    
    counts_all,class_ids_breakdown = getClassIdsCount(ids[:,0],ids[:,1]);
    ranks = getDiscriminativeScore(counts_all);

    sort_idx=np.argsort(ranks);
    counts_all=[counts_all[idx] for idx in sort_idx];
    class_ids_breakdown=[class_ids_breakdown[idx] for idx in sort_idx];
    im_simple = getHashAnalysisIm(counts_all,class_ids_breakdown,inc=inc,colorByClass=False);
    im_byClass = getHashAnalysisIm(counts_all,class_ids_breakdown,inc=inc,colorByClass=True);

    visualize.saveMatAsImage(im_simple,out_file_hash_simple)
    visualize.saveMatAsImage(im_byClass,out_file_hash_byClass)

    counts_all_ravel=np.array([c for counts in counts_all for c in counts]);
    class_ids_breakdown_ravel=np.array([c for class_ids in class_ids_breakdown for c in class_ids]);
    class_id_pascal,class_idx_pascal = zip(*class_labels_map);

    for class_id_idx,class_id in enumerate(class_idx_pascal):
        frequency = counts_all_ravel[class_ids_breakdown_ravel==class_id]
        out_file=out_file_class_pre+'_'+class_id_pascal[class_id_idx]+'.png'
        title=class_id_pascal[class_id_idx]+' '+str(class_id)        
        cum_freq,idx_perc=getCumulativeInfo(frequency,percents)
        savePerClassCumulativeGraph(cum_freq/float(cum_freq[-1]),idx_perc,percents,out_file,title)
def script_visualizeSegResults(pred_file,gt_output_file,gt_data_output_file,out_file_html,rel_path,means,out_dir):
	data=np.load(gt_data_output_file);
	gt= np.load(gt_output_file);
	out = np.load(pred_file);

	print data.shape;
	im_paths=[];captions=[];
	
	for im_no in range(data.shape[0]):
		print im_no;
		data_path=os.path.join(out_dir,str(im_no)+'_data.png');
		gt_path=os.path.join(out_dir,str(im_no)+'_gt.png');
		pred_path=os.path.join(out_dir,str(im_no)+'_pred.png');
		# scipy.misc.imsave(data_path,reshapeMat(data[im_no],means));
		visualize.saveMatAsImage(reshapeMat(data[im_no],means)/255,data_path);
		visualize.saveMatAsImage(reshapeMat(gt[im_no],means),gt_path);
		visualize.saveMatAsImage(reshapeMat(out[im_no],means),pred_path);
		im_paths.append([data_path.replace(rel_path[0],rel_path[1]),gt_path.replace(rel_path[0],rel_path[1]),pred_path.replace(rel_path[0],rel_path[1])]);
		captions.append(['im','mask_gt','mask_pred']);
		# if im_no==10:
		# 	break;

	visualize.writeHTML(out_file_html,im_paths,captions,height=224,width=224);
def main():
	# # out_dir='/disk2/marchExperiments/deep_proposals/testing_3_28_2/images';
	# # out_dir='/disk2/marchExperiments/deep_proposals/new_design/training_human/images';
	# out_dir='/disk2/aprilExperiments/dual_flow/onlyHuman/images';
	# out_dir_old='/disk2/aprilExperiments/dual_flow/onlyHuman/images_old';
	# img_pre='img';
	# ims=[file_curr for file_curr in os.listdir(out_dir) if file_curr.startswith(img_pre) and not file_curr.endswith('.npy')];
	# img_pre='img';
	# mask_pre='pred_mask';
	
	# out_file_html=os.path.join(out_dir,'visualize.html');
	# rel_path_old=['/disk2','../../../..']
	# rel_path=['/disk2','../../../../..']
	# img_paths=[];
	# captions=[];
	

	# for file_curr in ims:
	# 	print file_curr
	# 	file_curr_split=file_curr.split('_');
	# 	file_curr_pre='_'.join(file_curr_split[:4]);
	# 	# print file_curr_pre;
	# 	# print file_curr_split
	# 	# print file_curr;
	# 	im_path_old=[os.path.join(out_dir_old,x) for x in os.listdir(out_dir_old) if x.startswith(file_curr_pre)][0];
	# 	im_path = os.path.join(out_dir,file_curr);
	# 	# print im_path_old;
	# 	# print os.path.exists(im_path_old);
	# 	# raw_input();
	# 	mask_path=im_path.replace(img_pre,mask_pre);
	# 	mask_path_old=im_path_old.replace(img_pre,mask_pre);
		
	# 	im=scipy.misc.imread(mask_path);
	# 	mask_path=mask_path+'_heat.png'
	# 	visualize.saveMatAsImage(im[:,:,0],mask_path)
	# 	im=scipy.misc.imread(mask_path_old);
	# 	mask_path_old=mask_path_old+'_heat.png'
	# 	visualize.saveMatAsImage(im[:,:,0],mask_path_old)

	# 	img_path=im_path;
	# 	img_paths.append([img_path.replace(rel_path[0],rel_path[1]),mask_path.replace(rel_path[0],rel_path[1]),im_path_old.replace(rel_path_old[0],rel_path_old[1]),mask_path_old.replace(rel_path_old[0],rel_path_old[1])]);
	# 	captions.append([img_path[img_path.rindex('/')+1:]+' flow',mask_path[mask_path.rindex('/')+1:]+' flow',im_path_old[im_path_old.rindex('/')+1:]+' old',mask_path_old[mask_path_old.rindex('/')+1:]+' old']);

	# visualize.writeHTML(out_file_html,img_paths,captions,height=224,width=224);	


	# return

	out_dir='/disk2//aprilExperiments/headC_160/noFlow_gaussian_all/images_test';
	out_file_html=os.path.join(out_dir,'visualize.html');
	rel_path=['/disk2','../../../..']
	img_paths=[];
	captions=[];
	img_pre='img';
	mask_pre='pred_mask';
	imgs_all=[file_curr for file_curr in os.listdir(out_dir) if file_curr.startswith(img_pre) and not file_curr.endswith('.npy')];

	lists=[[],[],[],[]];
	caption_lists=[[],[],[],[]]
	for file_curr in imgs_all:
		file_curr_split=file_curr.split('_');
		print file_curr_split
		if file_curr_split[3]=='pos':
			if file_curr_split[4]=='correct.png':
				lists[0].append(file_curr);
				caption_lists[0].append('Positive Correct')
			else:
				lists[1].append(file_curr);
				caption_lists[1].append('Positive Wrong')
		else:
			if file_curr_split[4]=='correct.png':
				lists[2].append(file_curr);
				caption_lists[2].append('Negative Correct')
			else:
				lists[3].append(file_curr);
				caption_lists[3].append('Negative Wrong')

		# print file_curr_split
		# print file_curr;
		# raw_input();

	lists=[file_curr for file_curr_list in lists for file_curr in file_curr_list];
	caption_lists=[file_curr for file_curr_list in caption_lists for file_curr in file_curr_list];

	for idx,file_curr in enumerate(lists):
		img_path=os.path.join(out_dir,file_curr);
		mask_path=img_path.replace(img_pre,mask_pre);
		
		# img_path_old=img_path.replace(out_dir,out_dir_old);
		# mask_path_old=mask_path.replace(out_dir,out_dir_old);

		im=scipy.misc.imread(mask_path);
		mask_path=mask_path+'_heat.png'
		# visualize.showMat(im[:,:,0]);
		visualize.saveMatAsImage(im[:,:,0],mask_path)
		# img_paths.append([img_path.replace(rel_path[0],rel_path[1]),mask_path.replace(rel_path[0],rel_path[1]),img_path_old.replace(rel_path[0],rel_path[1]),mask_path_old.replace(rel_path[0],rel_path[1])]);
		# captions.append([caption_lists[idx]+' img',caption_lists[idx]+' mask',caption_lists[idx]+'OLD img',caption_lists[idx]+'OLD mask']);

		img_paths.append([img_path.replace(rel_path[0],rel_path[1]),mask_path.replace(rel_path[0],rel_path[1])]);
		captions.append([caption_lists[idx]+' img',caption_lists[idx]+' mask']);

	visualize.writeHTML(out_file_html,img_paths,captions,height=224,width=224);	

	# 	img_row_curr=[];
	# 	img_curr=os.path.join(out_dir,'img_'+str(i)+'.png');
	# 	img_row_curr.append(img_curr.replace(rel_path[0],rel_path[1]));
	# 	for j in range(1,3):
	# 		img_curr=os.path.join(out_dir,'img_crop_'+str(j)+'_'+str(i)+'.png');
	# 		img_row_curr.append(img_curr.replace(rel_path[0],rel_path[1]));
	# 	img_paths.append(img_row_curr);
	# 	captions.append(['','','']);
	# 
	


	return

	out_dir='/disk2/marchExperiments/deep_proposals/checkNegScaling';
	out_file_html=os.path.join(out_dir,'visualize.html');
	rel_path=['/disk2','../../../..']
	img_paths=[];
	captions=[];

	for i in range(1,11):
		img_row_curr=[];
		img_curr=os.path.join(out_dir,'img_'+str(i)+'.png');
		img_row_curr.append(img_curr.replace(rel_path[0],rel_path[1]));
		for j in range(1,3):
			img_curr=os.path.join(out_dir,'img_crop_'+str(j)+'_'+str(i)+'.png');
			img_row_curr.append(img_curr.replace(rel_path[0],rel_path[1]));
		img_paths.append(img_row_curr);
		captions.append(['','','']);
	visualize.writeHTML(out_file_html,img_paths,captions);
			




	return
	text_input='/disk2/februaryExperiments/deep_proposals/positive_data.txt';
	test_output_file='/disk2/februaryExperiments/deep_proposals/model_no_seg_test.npy';
	gt_output_file='/disk2/februaryExperiments/deep_proposals/model_no_seg_gt.npy';
	gt_data_output_file='/disk2/februaryExperiments/deep_proposals/model_no_seg_gt_data.npy';
	# out_dir='/disk2/februaryExperiments/deep_proposals/model_no_score_test';
	out_dir='/disk2/marchExperiments/deep_proposals/model_no_seg_test';
	
	test_output_file='/disk2/marchExperiments/deep_proposals/debugging_nan/pred.npy';
	gt_output_file='/disk2/marchExperiments/deep_proposals/debugging_nan/label.npy';
	gt_data_output_file='/disk2/marchExperiments/deep_proposals/debugging_nan/im.npy';


	out_dir='/disk2/marchExperiments/deep_proposals/model_seg_tan_test';
	util.mkdir(out_dir);
	
	out_file_html=os.path.join(out_dir,'visualize.html');
	
	rel_path=['/disk2','../../../..'];

	means=[122,117,104]

	script_visualizeSegResults(test_output_file,gt_output_file,gt_data_output_file,out_file_html,rel_path,means,out_dir)

	# script_visualizeSegResults(gt_data_output_file)

	# script_visualizeScoreResults(test_output_file,gt_output_file,gt_data_output_file,out_file_html,rel_path,means,out_dir)

	# lines=util.readLinesFromFile(text_input);
	# lines=lines[:100];

	# out=np.load(test_output_file);
	# gt=np.load(gt_output_file);
	



	return

	out_files=[];
	gt_files=[];
	mask_files=[];

	print out.shape,gt.shape
	for im_no in range(out.shape[0]):
		print im_no
		im=out[im_no];
		im=im.reshape((im.shape[1],im.shape[2]));
		# print im.shape
		out_file=os.path.join(out_dir,str(im_no+1)+'.png');
		visualize.saveMatAsImage(im,out_file);
		out_files.append(out_file);

		im=gt[im_no];
		im=im.reshape((im.shape[1],im.shape[2]));
		# print im.shape
		out_file=os.path.join(out_dir,str(im_no+1)+'_mask.png');
		visualize.saveMatAsImage(im,out_file);
		gt_files.append(out_file);


	out_file_html=os.path.join(out_dir,'visualize.html');
	rel_path=['/disk2','../../../..'];
	im_paths=[];captions=[];
	for idx,line in enumerate(lines):
		im_path=line[:line.index(' ')];
		# mask_path=line[line.index(' '):];
		# mask=scipy.misc.imread(mask_path);

		im_path=im_path.replace(rel_path[0],rel_path[1]);
		im_paths.append([im_path,out_files[idx].replace(rel_path[0],rel_path[1]),
			gt_files[idx].replace(rel_path[0],rel_path[1])]);
		captions.append(['im','pred','gt']);

	visualize.writeHTML(out_file_html,im_paths,captions,height=224,width=224);
def main():

	# dir_flo='/disk2/aprilExperiments/flo_subset_transfer';
	# dir_model_results='/disk2/aprilExperiments/flo_subset_predictions';
	# dir_meta_im='/disk2/marchExperiments/youtube';
	# util.mkdir(dir_model_results);

	# flo_files=[file_curr for file_curr in os.listdir(dir_flo) if file_curr.endswith('.flo')];
	# img_files_all=[];

	# for file_curr in flo_files:
	# 	video_name=file_curr[:file_curr.index('.')]
	# 	img_file=os.path.join(dir_meta_im,video_name,'images_transfer',file_curr.replace('.flo','.jpg'));
	# 	img_files_all.append(img_file+' 1');

	# util.writeFile(os.path.join(dir_model_results,'test.txt'),img_files_all);

	# # /disk2/aprilExperiments/flo_subset_predictions/test.txt /home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction/examples/opticalflow/final.caffemodel 1

	# return


	dir_flo='/disk2/aprilExperiments/flo_subset_transfer';
	dir_meta_im='/disk2/marchExperiments/youtube';

	out_dir_flo_im='/disk2/aprilExperiments/flo_im';
	out_dir_tif_im='/disk2/aprilExperiments/tif_im';

	util.mkdir(out_dir_flo_im);	
	util.mkdir(out_dir_tif_im);

	out_file_html='/disk2/aprilExperiments/flo_im_visualize.html';
	rel_path_img=['/disk2','../../../..'];
	rel_path_tif=['/disk2','../../../..'];
	rel_path_flo=['/disk2','../../..'];

	flo_files=[os.path.join(dir_flo,file_curr) for file_curr in os.listdir(dir_flo) if file_curr.endswith('.flo')];
	# flo_files=flo_files[:10];
	

	img_paths_all=[];
	captions_all=[]
	for flo_file in flo_files:
		flo_just_name=flo_file[flo_file.rindex('/')+1:flo_file.rindex('.')];
		video_name=flo_just_name[:flo_just_name.index('.')];
		flo_file_np=os.path.join(out_dir_flo_im,flo_just_name+'.npy');
		if os.path.exists(flo_file_np):
			continue;
		print flo_file
		try:
			flo=util.readFlowFile(flo_file,flip=False)
		except:
			print 'ERROR';
			continue;

		
		np.save(flo_file_np,flo);

		print flo.shape
		out_flo_name_x=os.path.join(out_dir_flo_im,flo_just_name+'_x.png');
		visualize.saveMatAsImage(flo[:,:,0],out_flo_name_x);
		out_flo_name_y=os.path.join(out_dir_flo_im,flo_just_name+'_y.png');
		visualize.saveMatAsImage(flo[:,:,1],out_flo_name_y);

		jpg_name=os.path.join(dir_meta_im,video_name,'images_transfer',flo_just_name+'.jpg');
		tif_name=os.path.join(dir_meta_im,video_name,'images_transfer',flo_just_name+'.tif');

		tif=scipy.misc.imread(tif_name);
		print tif.shape,np.min(tif),np.max(tif);
		tif_just_name=flo_just_name;
		out_tif_name_x=os.path.join(out_dir_tif_im,tif_just_name+'_x.png');
		visualize.saveMatAsImage(tif[:,:,0],out_tif_name_x);
		out_tif_name_y=os.path.join(out_dir_tif_im,tif_just_name+'_y.png');
		visualize.saveMatAsImage(tif[:,:,1],out_tif_name_y);

		img_paths_all.append([jpg_name.replace(rel_path_img[0],rel_path_img[1]),out_flo_name_x.replace(rel_path_flo[0],rel_path_flo[1]),out_flo_name_y.replace(rel_path_flo[0],rel_path_flo[1]),
								out_tif_name_x.replace(rel_path_tif[0],rel_path_tif[1]),out_tif_name_y.replace(rel_path_tif[0],rel_path_tif[1])]);
		captions_all.append([flo_just_name,'x_flo','y_flo','cluster_x','cluster_y']);

	visualize.writeHTML(out_file_html,img_paths_all,captions_all,300,300);
def main():
	# out_dir='/disk2/aprilExperiments/headC_160/figuring_test/im_pred';
	# seg_file='1_1_seg.png'
	# seg_curr=np.load(os.path.join(out_dir,seg_file))[0];
	# print seg_curr[:,0];
	# seg_curr=np.load(os.path.join(out_dir,'1_2_seg.png'))[0];
	# print seg_curr[:,0];
	# # min_value=np.min(seg_curr);
	# # print min_value
	# # bin_curr=np.sum(seg_curr==min_value,axis=0);
	# # print bin_curr.shape;
	# # print bin_curr
	# # idx_emp=np.where(bin_curr==seg_curr.shape[0]);
	# # print len(idx_emp);
	# # print idx_emp
	# # print np.max(idx_emp);

	# # bin_curr=np.sum(seg_curr==0,axis=1);
	# # print bin_curr.shape;
	# # idx_emp=np.where(bin_curr==seg_curr.shape[1]);
	# # print len(idx_emp);
	# # print np.max(idx_emp);
	# # print idx_emp
	

	# return
	im_path='/disk2/ms_coco/train2014/COCO_train2014_000000460565.jpg';

	out_dir='/disk2/aprilExperiments/headC_160/figuring_test/im_pred_score';
	im_size_org=(427,640);
	score_list=getScoresList(out_dir);

	# out_dir_new='/disk2/aprilExperiments/headC_160/figuring_test/im_pred_score';
	score_all=np.load(os.path.join(out_dir,'all_score.npy'))[0][0];
	print score_all.shape

	for i in range(score_all.shape[0]):
		for j in range(score_all.shape[1]):
			score_bef=score_list[(i+1,j+1)];
			score_now=score_all[i,j];
			print score_bef,score_now,i,j
			print np.abs(score_bef-score_now)
			assert np.abs(score_bef-score_now)<0.00001;

	visualize.saveMatAsImage(score_all,os.path.join(out_dir,'new_mat.png'))


	return
	scores=score_list.values();
		
	idx_max=np.argmax(scores);

	idx_sort=np.argsort(scores)[::-1];
	# print idx_sort
	# return
	# [::-1];
	# print idx_sort[0],idx_max
	print scores
	idx_max=idx_sort[19];
	print scores[idx_max]

	print idx_max,scores[idx_max];
	# return
	keys=score_list.keys();
	[r,c]=zip(*keys);
	max_r=max(r);
	max_c=max(c);

	max_idx=keys[idx_max];
	
	pix_starts=[];
	good_ones=[max_idx];
	im=Image.open(im_path);
	draw = ImageDraw.Draw(im)

	for r_idx,c_idx in good_ones:
		print r_idx,c_idx;
		pix_start=getPixStart(r_idx-1,c_idx-1,im_size_org,max_r,max_c);
		pix_starts.append(pix_start);


		seg_curr=os.path.join(out_dir,str(r_idx)+'_'+str(c_idx)+'_seg.npy');
		seg_curr=np.load(seg_curr)[0];
		heatmap=getHeatMap(seg_curr);
		im=np.array(im);
		im=im*0.5;
		heatmap=heatmap*0.5;
		print pix_start
		# pix_start=[pix_start_curr-160 for pix_start_curr in pix_start]
		im_rel=im[pix_start[0]:pix_start[0]+160,pix_start[1]:pix_start[1]+160]
		im[pix_start[0]:pix_start[0]+160,pix_start[1]:pix_start[1]+160]=im_rel+heatmap[:min(heatmap.shape[0],im_rel.shape[0]),:min(heatmap.shape[1],im_rel.shape[1])];
	

	im=Image.fromarray(np.uint8(im));
	for pix_start in pix_starts:
		draw.rectangle([pix_start[1],pix_start[0],pix_start[1]+160,pix_start[0]+160]);

	im.save(os.path.join(out_dir,'check.png'));




	return
	for r_idx in range(0,max_rows,9):
		row_curr=[];
		scores_row=[];
		for c_idx in range(0,max_cols,9):
			seg_curr=np.load(os.path.join(out_dir,str(r_idx+1)+'_'+str(c_idx+1)+'_seg.png'))[0];
			seg_curr[seg_curr<0]=0;	
			score_curr=np.load(os.path.join(out_dir,str(r_idx+1)+'_'+str(c_idx+1)+'_score.png'))[0];
			print seg_curr.shape,score_curr;
			row_curr.append(seg_curr);
			scores_row.append(score_curr);
		
		scores.append(scores_row);
		
		row_curr=np.hstack(tuple(row_curr));
		if r_idx==0:
			img_yet=row_curr;
		else:
			img_yet=np.vstack((img_yet,row_curr));

	print img_yet.shape

	visualize.saveMatAsImage(img_yet,os.path.join(out_dir,'full_img.png'))
예제 #10
0
def main():
	in_dir='/disk2/aprilExperiments/testing_neg_torch';
	# check_dir=os.path.join(in_dir,'check_crops_invalid');
	# out_file_html='visualize_crops.html';
	visualize.writeHTMLForFolder(in_dir,'.png',200,200);

	return

	in_dir='/disk2/aprilExperiments/testing_neg_fixed_test';
	img_path=os.path.join(in_dir,'1.png');
	crop_path_pos=img_path.replace('.png','_crop_pos.npy');
	crop_path_neg=img_path.replace('.png','_crop_neg.npy');

	bbox_path=img_path.replace('.png','_bbox.npy');

	im=scipy.misc.imread(img_path);
	bbox=np.load(bbox_path);
	crop_box_pos=np.load(crop_path_pos);
	crop_box_neg=np.load(crop_path_neg);

	# crop_box_pos=np.array(crop_path_pos,dty);
	# crop_box_neg=np.array(crop_path_neg,dty);


	out_file_neg_box=img_path.replace('.png','_box_neg.png');
	out_file_pos_box=img_path.replace('.png','_box_pos.png');
	im=Image.open(img_path);
	# draw=Image.ImageDraw(im);

	# draw = ImageDraw.Draw(im)
	# for crop_box in crop_box_neg:
	# 	# draw.rectangle([(bbox[box_no,0],bbox[box_no,1]),(bbox[box_no,0]+bbox[box_no,2],bbox[box_no,1]+bbox[box_no,3])],outline=(255,255,255))
	# 	draw.rectangle([(crop_box[0],crop_box[1]),(crop_box[2],crop_box[3])],outline=(0,0,255))

	# box_no=0;
	# draw.rectangle([(bbox[box_no,0],bbox[box_no,1]),(bbox[box_no,0]+bbox[box_no,2],bbox[box_no,1]+bbox[box_no,3])],outline=(255,255,255))
	# del draw

	# print out_file_neg_box
	# im.save(out_file_neg_box, "PNG");

	

	
	to_plot=[];
	print crop_box_pos.shape

	# min_x_req=bbox[0,0]-224;
	# min_y_req=bbox[0,1]-224;
	# max_x_req=bbox[0,0]+bbox[0,2]+224;
	# max_y_req=bbox[0,1]+bbox[0,3]+224;

	center_x=bbox[0,0]+bbox[0,2]/2;
	center_y=bbox[0,1]+bbox[0,3]/2;
	min_x_req=center_x-224;
	min_y_req=center_y-224;
	max_x_req=center_x+224;
	max_y_req=center_y+224;


	for idx_crop_box_curr,crop_box_curr in enumerate(crop_box_pos):
		# if idx_crop_box_curr%100==0:
		# 	print idx_crop_box_curr;
		[min_x,min_y,max_x,max_y]=list(crop_box_curr);
		if min_x>=min_x_req and min_y>=min_y_req and max_x<=max_x_req and max_y<=max_y_req:
			to_plot.append(idx_crop_box_curr);


	print len(to_plot)

	# draw = ImageDraw.Draw(im);
	crop_check_dir=os.path.join(in_dir,'check_crops_invalid');
	util.mkdir(crop_check_dir);
	print crop_check_dir;
	# return
	im_np=np.array(im);

	for idx_idx_box,idx_box in enumerate(range(len(crop_box_neg))):
		if idx_idx_box%1000==0:
			print idx_idx_box;
		crop_box=crop_box_neg[idx_box];
		im_curr=im_np[crop_box[1]:crop_box[3],crop_box[0]:crop_box[2],:];
		out_file=os.path.join(crop_check_dir,str(idx_idx_box)+'.png');
		scipy.misc.imsave(out_file,im_curr);

	# 	draw.rectangle([(crop_box[0],crop_box[1]),(crop_box[2],crop_box[3])],outline=(0,0,255))

	# box_no=0;
	# draw.rectangle([(bbox[box_no,0],bbox[box_no,1]),(bbox[box_no,0]+bbox[box_no,2],bbox[box_no,1]+bbox[box_no,3])],outline=(255,255,255))
	# del draw
	
	# print out_file_pos_box
	# im.save(out_file_pos_box, "PNG");


	return
	im=scipy.misc.imread(img_path);
	bbox=np.load(bbox_path);
	crop_box_pos=np.load(crop_path_pos);
	crop_box_neg=np.load(crop_path_neg);

	print bbox.shape
	ys=bbox[:,1];
	print ys.shape
	print np.argmax(ys);
	print bbox[np.argmax(ys)]

	# return
	crop_box_pos=np.array(crop_box_pos,dtype='int');
	crop_box_neg=np.array(crop_box_neg,dtype='int');
	
	print crop_box_pos.shape
	print crop_box_neg.shape
	new_im=np.zeros((im.shape[0],im.shape[1]));
	for crop_box_curr in crop_box_neg:
		# print crop_box_curr
		new_im[crop_box_curr[1]:crop_box_curr[3],crop_box_curr[0]:crop_box_curr[2]]=new_im[crop_box_curr[1]:crop_box_curr[3],crop_box_curr[0]:crop_box_curr[2]]+1;

	im_new=new_im;
	visualize.saveMatAsImage(im_new,os.path.join(in_dir,'heat_map_neg.png'));

	new_im=np.zeros((im.shape[0],im.shape[1]));
	for crop_box_curr in crop_box_pos:
		# print crop_box_curr
		new_im[crop_box_curr[1]:crop_box_curr[3],crop_box_curr[0]:crop_box_curr[2]]=new_im[crop_box_curr[1]:crop_box_curr[3],crop_box_curr[0]:crop_box_curr[2]]+1;

	im_new=new_im;
	print im_new.shape
	visualize.saveMatAsImage(im_new,os.path.join(in_dir,'heat_map_pos.png'));





	

	return
	script_makeNegImages();
	return

	in_dir='/disk2/aprilExperiments/testing_neg';
	img_path=os.path.join(in_dir,'71.png');
	crop_path=img_path.replace('.png','_crop.npy');
	bbox_path=img_path.replace('.png','_bbox.npy');

	max_dim=128;
	tolerance=32;

	bbox=np.load(bbox_path);
	crop_box=np.load(crop_path);
	im = Image.open(img_path);
	
	box_to_plot,scale,new_tolerance=getCorrespondingTolerance(bbox[0],max_dim,tolerance);

	
    # print (center_box,center_crop)
    



	center_box=[bbox[0,0]+bbox[0,2]/2.0,bbox[0,1]+bbox[0,3]/2.0];
	center_crop=[crop_box[0]+(crop_box[2]-crop_box[0])/2.0,crop_box[1]+(crop_box[3]-crop_box[1])/2.0];
	print center_box,center_crop
	dist_centers=np.sqrt(np.sum(np.power(np.array(center_box)-np.array(center_crop),2)));
	print (new_tolerance,dist_centers)

	if dist_centers<new_tolerance:
		scaleTest(bbox[0],crop_box,[0.5,2]);
def main():

    dir_meta='/disk2/aprilExperiments/deep_proposals/flow_all_humans/';
    dir_other_images='/disk2/aprilExperiments/deep_proposals/flow_neg/flo_subset_for_pos_cropped';
    dir_pos=os.path.join(dir_meta,'results');
    out_file_match=os.path.join(dir_meta,'match.txt');

    dir_curr='/disk2/aprilExperiments/deep_proposals/flow_neg_subset_debug/results';
    dir_small_im=dir_curr+'_images'
    dir_full_crops='/disk2/aprilExperiments/deep_proposals/flow_neg/flo_subset_for_pos_cropped'

    list_files=[os.path.join(dir_curr,file_curr) for file_curr in os.listdir(dir_curr) if file_curr.endswith('.h5')]
    img_files=[];
    h5_names=[];
    for list_file in list_files:
        img_file=util.readLinesFromFile(list_file.replace('.h5','.txt'))[0].strip();

        h5_names.append(list_file[list_file.rindex('/')+1:list_file.rindex('.')]);
        
        img_file=img_file[img_file.rindex('/')+1:];
        img_files.append(img_file);


    # h5_names_sorted=[int(name) for name in h5_names];
    # h5_names_sorted.sort();



    out_file_html=os.path.join(dir_full_crops,'comparison.html');
    imgs_full_crops=[file_curr for file_curr in os.listdir(dir_full_crops) if file_curr.endswith('.png')];

    img_paths=[];
    captions=[];
    rel_path=['/disk2','../../../..'];
    for img_file in imgs_full_crops:
        img_file_full=os.path.join(dir_full_crops,img_file);
        print img_files[0],img_file
        
        # h5_name_rel=int(h5_names[img_files.index(img_file)])
        img_file_small=os.path.join(dir_small_im,h5_names[img_files.index(img_file)]+'.jpg');



        img_paths.append([img_file_full.replace(rel_path[0],rel_path[1]),img_file_small.replace(rel_path[0],rel_path[1])]);
        captions.append(['Crop full flow','Flow on Cropped']);
    util.writeHTML(out_file_html,img_paths,captions,240,240);





    # # util.mkdir(out_dir_matches);
    # # writeh5ImgFile(dir_pos,out_file_match);
    # img_files_to_find=[file_curr for file_curr in os.listdir(dir_other_images) if file_curr.endswith('.png')];
    # lines=util.readLinesFromFile(out_file_match);
    # just_imgs=[line[line.rindex('/')+1:] for line in lines];
    # idx_matches=[];
    # # idx_matches=[just_imgs.index(file_curr) for file_curr in img_files_to_find];
    # # h5_files=[lines[idx_match][:lines[idx_match].index(' ')] for idx_match in idx_matches];
    # # print h5_files[0],len(h5_files);
    # # for file_curr in h5_files:
    # for img_file_to_find in img_files_to_find:
    #     if img_file_to_find in just_imgs:
    #         idx_match=just_imgs.index(img_file_to_find)
    #     else:
    #         continue;
    #     file_curr=lines[idx_match][:lines[idx_match].index(' ')]
    #     out_file_curr=os.path.join(out_dir_matches,file_curr[file_curr.rindex('/')+1:]);
    #     shutil.copyfile(file_curr,out_file_curr);

    #     file_curr=file_curr.replace('.h5','.txt');
    #     out_file_curr=out_file_curr.replace('.h5','.txt');
        
    #     shutil.copyfile(file_curr,out_file_curr);

    # # for file_curr in img_files_to_find:
    # #     idx_match=lines.index(file_curr);
    # #     idx_matches.append(idx_match)

    # return
    # dir_curr='/disk2/aprilExperiments/deep_proposals/flow_neg/flo_subset_for_pos_cropped';
    # visualize.writeHTMLForFolder(dir_curr,'.jpg');

    return    
    dir_meta='/disk2/aprilExperiments/deep_proposals/flow_neg/';
    dir_neg=os.path.join(dir_meta,'results');
    out_file_rec=os.path.join(dir_meta,'ims_to_analyze.npz')
    out_file_match=os.path.join(dir_meta,'match.txt');

    lines=util.readLinesFromFile('/disk2/aprilExperiments/deep_proposals/positives_person.txt');

    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';

    arrs=np.load(out_file_rec)
    negs=arrs['negs'];
    h5_files=[line[:line.index(' ')] for line in negs];
    dir_flo_im=os.path.join(dir_meta,'flo_subset_for_pos');
    util.mkdir(dir_flo_im);
    replace_paths=['','']; 

    srf.script_saveFloAsNpPred(clusters_file,h5_files,dir_flo_im,replace_paths)

    return
    print lines[0];
    imgs=[line[:line.index(' ')] for line in lines];

    num_to_keep=100;
    random.shuffle(imgs);
    imgs=imgs[:num_to_keep];
    imgs_neg=[img[img.rindex('/')+1:img.rindex('_')] for img in imgs];
    print imgs_neg[0]

    lines_neg=util.readLinesFromFile(out_file_match);
    print 'lines_neg',len(lines_neg);
    print lines_neg[0];
    rel_imgs=[];
    for idx_lines,line in enumerate(lines_neg):
        # if idx_lines%100==0:
        #     print idx_lines;
        rel_img_part=line[line.rindex(' ')+1:];
        rel_img_part=rel_img_part[rel_img_part.rindex('/')+1:rel_img_part.rindex('.')];
        rel_imgs.append(rel_img_part);
    print len(rel_imgs);
    print rel_imgs[0];
    
    idx_rel=[];
    paths_rel=[];
    for idx_curr,img_curr in enumerate(imgs_neg):
        print idx_curr
        idx=rel_imgs.index(img_curr);
        idx_rel.append(idx);
        paths_rel.append(lines_neg[idx]);

    imgs=np.array(imgs);
    paths_rel=np.array(paths_rel);
    np.savez(out_file_rec,pos=imgs,negs=paths_rel);


        # lines=[];
    # h5_files=[os.path.join(dir_neg,file_curr) for file_curr in os.listdir(dir_neg) if file_curr.endswith('.h5')];
    # print len(h5_files)
    # for idx_file_curr,file_curr in enumerate(h5_files):
    #     if idx_file_curr%100==0:
    #         print idx_file_curr
    #     img_file=util.readLinesFromFile(file_curr.replace('.h5','.txt'))[0].strip();
    #     # print file_curr,img_file
    #     lines.append(file_curr+' '+img_file);

    # util.writeFile(out_file_match,lines);


    return

    dir_meta='/disk2/aprilExperiments/flo_debug/results'
    h5_file=os.path.join(dir_meta,'0.h5');
    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';
    img_file = util.readLinesFromFile(h5_file.replace('.h5','.txt'))[0].strip();
    
    with h5py.File(clusters_file,'r') as hf:
        # print hf.keys();
        C=np.array(hf.get('C'));
    C=C.T    

    flow_mat = getFlowMat(h5_file,C);
    im=scipy.misc.imread(img_file);

    flow_mat=cv2.resize(flow_mat,(im.shape[1],im.shape[0]));
    # print flow_mat.shape,im.shape
    out_file_mat=h5_file.replace('.h5','.mat');
    scipy.io.savemat(out_file_mat,{'N':flow_mat});

    out_file_im=os.path.join(dir_meta,'from_mat.png');

    string="img_file='"+img_file+"';flo_mat='"+out_file_mat+"';out_file='"+out_file_im+"';"
    print string
    
    
    return
    out_dir='/disk2/aprilExperiments/flo_all_predictions/results'
    dir_flo_im='/disk2/aprilExperiments/flo_all_predictions/flo_npy';
    util.mkdir(dir_flo_im);
    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';
    vision3_path='/disk2/marchExperiments';
    hpc_path='/group/leegrp/maheen_data';
    h5_files=[os.path.join(out_dir,file_curr) for file_curr in os.listdir(out_dir) if file_curr.endswith('.h5')];
    
    img_files=[];
    for h5_file in h5_files[:100]:
        img_file=util.readLinesFromFile(h5_file.replace('.h5','.txt'))[0].strip();
        img_files.append(img_file);

    script_saveFloAsNpPred(clusters_file,h5_files,img_files,dir_flo_im)        
    

    return

    dir_pred='/disk2/aprilExperiments/flo_all_predictions'
    dir_im_meta='/disk2/marchExperiments/youtube'
    file_name='youtube_list_flo_paths.txt';
    test_file=os.path.join(dir_pred,'test.txt');

    flo_files=util.readLinesFromFile(os.path.join(dir_pred,file_name));
    print len(flo_files);
    random.shuffle(flo_files);

    im_paths=[];

    for flo_file in flo_files:
        flo_name=flo_file[flo_file.rindex('/')+1:];
        video_name=flo_name[:flo_name.index('.')];
        im_path=os.path.join(dir_im_meta,video_name,'images_transfer',flo_name[:flo_name.rindex('.')]+'.jpg');
        im_paths.append(im_path+' 1');
        # assert os.path.exists(im_path);
    
    print len(im_paths);
    print im_paths[0];

    util.writeFile(test_file,im_paths);
    print test_file;

    



    return
    dir_flo_pred='/disk2/aprilExperiments/flo_subset_predictions/pred_flo_im';
    dir_flo_gt='/disk2/aprilExperiments/flo_im';
    dir_im_meta='/disk2/marchExperiments/youtube'
    out_dir_debug='/disk2/aprilExperiments/flo_debug';


    flo_names=[file_curr for file_curr in os.listdir(dir_flo_pred) if file_curr.endswith('.npy')];

    errors=[];

    for flo_name in flo_names[:100]:
    
        video_name=flo_name[:flo_name.index('.')];
        im_path=os.path.join(dir_im_meta,video_name,'images_transfer',flo_name[:flo_name.rindex('.')]+'.jpg');        

        gt_flo_file=os.path.join(dir_flo_gt,flo_name)
        # print gt_flo_file
        gt_flo=np.load(gt_flo_file);
        # print im_path.replace('/disk2','vision3.cs.ucdavis.edu:1000');
        im=scipy.misc.imread(im_path);
        
        gt_flo_cv2=bringToImageFrame(gt_flo,im.shape);
        gt_flo_sp=bringToImageFrameSP(gt_flo,im.shape);
        tol=1.0
        error=np.sum(abs(gt_flo_sp-gt_flo_cv2)<tol)/float(gt_flo_sp.size);
        errors.append(error);

    print min(errors),max(errors),np.mean(errors);

    


    return

    out_file=os.path.join(out_dir_debug,flo_name);
    out_file_mat=os.path.join(out_dir_debug,flo_name[:flo_name.rindex('.')]+'.mat');
    scipy.io.savemat(out_file_mat,{'img':gt_flo});
    print out_file
    np.save(out_file,gt_flo);
    out_file_gt_flo=os.path.join(out_dir_debug,'flo_gt_no_rescale.png');
    print out_file_gt_flo


    return
    for flo_name in flo_names[:1]:
    
        video_name=flo_name[:flo_name.index('.')];
        im_path=os.path.join(dir_im_meta,video_name,'images_transfer',flo_name[:flo_name.rindex('.')]+'.jpg');
        im=scipy.misc.imread(im_path);
        # print im.shape
        gt_flo=np.load(os.path.join(dir_flo_gt,flo_name));
        pred_flo=np.load(os.path.join(dir_flo_pred,flo_name));
        # print gt_flo.shape
        # print pred_flo.shape

        # pred_flo=bringToImageFrame(pred_flo,im.shape);
        mag_pred_flo_bef=np.power(np.power(pred_flo[:,:,0],2)+np.power(pred_flo[:,:,1],2),0.5)
        pred_flo=cv2.resize(pred_flo,(im.shape[1],im.shape[0]));
        mag_pred_flo_aft=np.power(np.power(pred_flo[:,:,0],2)+np.power(pred_flo[:,:,1],2),0.5)

        mag_gt_flo_bef=np.power(np.power(gt_flo[:,:,0],2)+np.power(gt_flo[:,:,1],2),0.5)
        gt_flo=bringToImageFrame(gt_flo,im.shape);
        mag_gt_flo_aft=np.power(np.power(gt_flo[:,:,0],2)+np.power(gt_flo[:,:,1],2),0.5)

        print pred_flo.shape,gt_flo.shape
        # pred_flo = cv2.resize(pred_flo, (im.shape[1],im.shape[0]))
        # gt_flo = cv2.resize(gt_flo, (im.shape[1],im.shape[0]));

        # pred_flo=makeUnitMag(pred_flo);
        # gt_flo=makeUnitMag(gt_flo);

        pred_values=[np.sort(pred_flo[:,:,0].ravel()),np.sort(pred_flo[:,:,1].ravel())]
        
        gt_values=[np.sort(gt_flo[:,:,0].ravel()),np.sort(gt_flo[:,:,1].ravel())]

        # print np.max(np.power(np.power(pred_values[0],2)+np.power(pred_values[1],2),0.5))
        # print np.max(np.power(np.power(gt_values[0],2)+np.power(gt_values[1],2),0.5))

        # print np.min(pred_values[0]),np.min(gt_values[0]);
        # print np.min(pred_values[1]),np.min(gt_values[1]);

        # print gt_values[0].shape,gt_values[1].shape

        
        util.mkdir(out_dir_debug);

        fig=plt.figure();
        ax1 = fig.add_subplot(221)
        ax1.plot(np.sort(mag_pred_flo_bef.ravel()));

        ax2 = fig.add_subplot(222)
        ax2.plot(np.sort(mag_pred_flo_aft.ravel()))

        ax3 = fig.add_subplot(223)
        ax3.plot(np.sort(mag_gt_flo_bef.ravel()))

        ax4 = fig.add_subplot(224)
        ax4.plot(np.sort(mag_gt_flo_aft.ravel()))


        plt.tight_layout()
        plt.savefig(os.path.join(out_dir_debug,'ranges.png'));




    return

    dir_flo_org='/disk2/aprilExperiments/flo_im';
    dir_flo_pred='/disk2/aprilExperiments/flo_subset_predictions/pred_flo_im';
    dir_im_meta='/disk2/marchExperiments/youtube'
    im_names=[file_curr[:file_curr.rindex('_')] for file_curr in os.listdir(dir_flo_org) if file_curr.endswith('_x.png')];
    
    out_file_html='/disk2/aprilExperiments/flo_subset_predictions/visualizeFlosComparison.html';
    img_paths_all=[];
    captions_all=[];
    for im_name in im_names:
        video_name=im_name[:im_name.index('.')];
        jpg_file=os.path.join(dir_im_meta,video_name,'images_transfer',im_name+'.jpg');

        x_flo_org=os.path.join(dir_flo_org,im_name+'_x.png');
        y_flo_org=os.path.join(dir_flo_org,im_name+'_y.png');
        
        x_flo_pred=os.path.join(dir_flo_pred,im_name+'_x.png');
        y_flo_pred=os.path.join(dir_flo_pred,im_name+'_y.png');

        row=[makeRelPath(jpg_file),makeRelPath(x_flo_org),makeRelPath(y_flo_org),makeRelPath(x_flo_pred),makeRelPath(y_flo_pred)];
        captions=['im','org_x','org_y','pred_x','pred_y'];
        img_paths_all.append(row);
        captions_all.append(captions)

    visualize.writeHTML(out_file_html,img_paths_all,captions_all,200,200);

    return
    results_dir='/disk2/aprilExperiments/flo_subset_predictions/results';
    dir_flo_im='/disk2/aprilExperiments/flo_subset_predictions/pred_flo_im';
    util.mkdir(dir_flo_im);

    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';

    with h5py.File(clusters_file,'r') as hf:
        print hf.keys();
        C=np.array(hf.get('C'));
    C=C.T    

    # img_files=[];
    h5_files=[os.path.join(results_dir,file_curr) for file_curr in os.listdir(results_dir) if file_curr.endswith('.h5')];
    for idx_h5_file,h5_file in enumerate(h5_files):
        print idx_h5_file
        img_file=util.readLinesFromFile(h5_file.replace('.h5','.txt'))[0];
        img_name=img_file[img_file.rindex('/')+1:img_file.rindex('.')];
        # img_files.append(img_file);

        flo=getFlowMat(h5_file,C);
        out_file_flo=os.path.join(dir_flo_im,img_name+'.npy');
        np.save(out_file_flo,flo);

        out_file_flo_x=os.path.join(dir_flo_im,img_name+'_x.png');
        out_file_flo_y=os.path.join(dir_flo_im,img_name+'_y.png');
        visualize.saveMatAsImage(flo[:,:,0],out_file_flo_x);
        visualize.saveMatAsImage(flo[:,:,1],out_file_flo_y);

    visualize.writeHTMLForFolder(dir_flo_im,ext='_x.png',height=300,width=300)


    return

    # pos_file_org='/disk2/aprilExperiments/deep_proposals/positives_person.txt';
    # flow_dir='/disk2/aprilExperiments/deep_proposals/flow_all_humans/results_flow';
    # out_dir='/disk2/aprilExperiments/dual_flow/onlyHuman_all_xavier';

    # pos_file_new=os.path.join(out_dir,'positives.txt');

    pos_file_org='/disk2/marchExperiments/deep_proposals/negatives.txt';
    flow_dir='/disk2/aprilExperiments/deep_proposals/flow_neg/results_flow';
    out_dir='/disk2/aprilExperiments/dual_flow/onlyHuman_all_xavier';

    pos_file_new=os.path.join(out_dir,'negatives.txt');
    

    pos_data=util.readLinesFromFile(pos_file_org);
    pos_data_new=[];
    for idx_pos_data_curr,pos_data_curr in enumerate(pos_data):
        if idx_pos_data_curr%100==0:
            print idx_pos_data_curr

        img_name=pos_data_curr[:pos_data_curr.index(' ')];
        img_name=img_name[img_name.rindex('/')+1:];
        flow_name=img_name[:img_name.rindex('.')]+'_flow.png';
        flow_path=os.path.join(flow_dir,flow_name);
        if os.path.exists(flow_path):
            pos_data_new_curr=pos_data_curr+' '+flow_path;
            pos_data_new.append(pos_data_new_curr);

    print len(pos_data_new)
    util.writeFile(pos_file_new,pos_data_new);


    return
    dir_flow='/disk2/aprilExperiments/deep_proposals/flow_all_humans/results';
    out_dir_flow_im='/disk2/aprilExperiments/deep_proposals/flow_all_humans/results_flow';
    util.mkdir(out_dir_flow_im);

    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';

    with h5py.File(clusters_file,'r') as hf:
        print hf.keys();
        C=np.array(hf.get('C'));
    C=C.T


    h5_files=[os.path.join(dir_flow,file_curr) for file_curr in os.listdir(dir_flow) if file_curr.endswith('.h5')];
    img_files=[];
    for idx,h5_file in enumerate(h5_files):
        if idx%100==0:
            print idx;
        img_file=util.readLinesFromFile(h5_file.replace('.h5','.txt'))[0].strip();
        img_files.append(img_file);

    args=[];
    count_missing=0;
    for idx,(h5_file,img_file) in enumerate(zip(h5_files,img_files)):
        if not os.path.exists(img_file):
            count_missing=count_missing+1;
            continue;
        # print idx,h5_file,img_file
        out_file_flow=img_file[:img_file.rindex('.')]+'_flow.png';
        out_file_flow=out_file_flow[out_file_flow.rindex('/')+1:]
        out_file_flow=os.path.join(out_dir_flow_im,out_file_flow);

        arg_curr=(h5_file,img_file,out_file_flow,C,idx);
        args.append(arg_curr);

    print len(args);
    print count_missing;
    print len(img_files);
    # for arg in args[:10]:
    #     arg=list(arg);
        # print arg[:-2];

    print 'starting Pool';
    p=multiprocessing.Pool(8);
    p.map(saveFlowImage,args);


        # out_file_flow=h5_file.replace('.h5','.png');
        # saveFlowImage(h5_file,img_file,out_file_flow,C);
        # print h5_files.index(h5_file),out_file_flow
    

    return

     # /disk2/marchExperiments/youtube/dog_11_1/images_transfer/dog_11_1.avi_000326.tif

    out_dir_results='/disk2/aprilExperiments/testing_flow/debug/results'

    out_dir='/disk2/aprilExperiments/testing_flow/debug';
    img_path='/disk2/marchExperiments/youtube/dog_11_1/images_transfer/dog_11_1.avi_000326.jpg'
    tif_path='/disk2/marchExperiments/youtube/dog_11_1/images_transfer/dog_11_1.avi_000326.tif';
    
    pred_path=os.path.join(out_dir_results,'0.h5');

    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';

    C=getClusters(clusters_file);
    

    img=scipy.misc.imread(img_path)
    tif=scipy.misc.imread(tif_path);
    tif=tif[:,:,:2];
    for idx in range(tif.shape[2]):
        print np.min(tif[:,:,idx]),np.max(tif[:,:,idx]);
    flo=getFlowGT(tif,C);

    with h5py.File(pred_path,'r') as hf:
        data = hf.get('Outputs')
        data = np.array(data)

    flo_pred=assignToFlowSoft(data,C)
    flo_pred=cv2.resize(flo_pred, (flo.shape[1],flo.shape[0]))

    print np.min(flo),np.max(flo);
    print np.min(flo_pred),np.max(flo_pred);

    print flo_pred.shape
    print flo.shape
    print img.shape
    print tif.shape

    out_img=os.path.join(out_dir,'img.png');
    tif_x=os.path.join(out_dir,'tif_x.png');
    tif_y=os.path.join(out_dir,'tif_y.png');
    flo_x=os.path.join(out_dir,'flo_x.png');
    flo_y=os.path.join(out_dir,'flo_y.png');

    flo_pred_x=os.path.join(out_dir,'flo_pred_x.png');
    flo_pred_y=os.path.join(out_dir,'flo_pred_y.png');


    plt.figure;plt.imshow(img);plt.savefig(out_img);plt.close();
    plt.figure;plt.imshow(tif[:,:,0]);plt.savefig(tif_x);plt.close();
    plt.figure;plt.imshow(tif[:,:,1]);plt.savefig(tif_y);plt.close();
    plt.figure;plt.imshow(flo[:,:,0]);plt.savefig(flo_x);plt.close();
    plt.figure;plt.imshow(flo[:,:,1]);plt.savefig(flo_y);plt.close();

    plt.figure;plt.imshow(flo_pred[:,:,0]);plt.savefig(flo_pred_x);plt.close();
    plt.figure;plt.imshow(flo_pred[:,:,1]);plt.savefig(flo_pred_y);plt.close();



    return

    dir_prop='/disk2/aprilExperiments/deep_proposals/addingFlow/';
    util.mkdir(dir_prop);    
    out_file_pos=os.path.join(dir_prop,'positives.txt');
    out_file_neg=os.path.join(dir_prop,'negatives.txt');

    pos_file_org='/disk2/aprilExperiments/deep_proposals/positives_person.txt';
    neg_file_org='/disk2/marchExperiments/deep_proposals/negatives.txt';
    dir_flow_pos='/disk2/aprilExperiments/deep_proposals/flow/results';
    dir_flow_neg='/disk2/aprilExperiments/deep_proposals/flow/results_neg';

    num_to_keep=100;

    flow_files,im_files=getFlowImFiles(dir_flow_pos);
    pos_data=util.readLinesFromFile(pos_file_org);
    pos_data=pos_data[:100];
    writeNewFileWithFlow(pos_data,flow_files,im_files,out_file_pos)

    flow_files,im_files=getFlowImFiles(dir_flow_neg);
    pos_data=util.readLinesFromFile(neg_file_org);
    pos_data=pos_data[:100];
    writeNewFileWithFlow(pos_data,flow_files,im_files,out_file_neg)

    return
    dir_flow='/disk2/aprilExperiments/deep_proposals/flow/results_neg';
    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';

    with h5py.File(clusters_file,'r') as hf:
        print hf.keys();
        C=np.array(hf.get('C'));
    C=C.T


    h5_files=[os.path.join(dir_flow,file_curr) for file_curr in os.listdir(dir_flow) if file_curr.endswith('.h5')];
    img_files=[];
    for h5_file in h5_files:
        img_file=util.readLinesFromFile(h5_file.replace('.h5','.txt'))[0].strip();
        img_files.append(img_file);


    for h5_file,img_file in zip(h5_files,img_files):
        out_file_flow=h5_file.replace('.h5','.png');
        saveFlowImage(h5_file,img_file,out_file_flow,C);
        print h5_files.index(h5_file),out_file_flow
        
        





    return
    clusters_file='/home/maheenrashid/Downloads/debugging_jacob/optical_flow_prediction_test/examples/opticalflow/clusters.mat';
    with h5py.File(clusters_file,'r') as hf:
        print hf.keys();
        C=np.array(hf.get('C'));
    C=C.T
    print C.shape;



    # return
    theImage='/disk2/februaryExperiments/deep_proposals/positives/COCO_train2014_000000024215_61400.png';
    theImage =scipy.misc.imread(theImage);

    

    file_name='/disk2/aprilExperiments/deep_proposals/flow/results/44.h5';        

    with h5py.File(file_name,'r') as hf:
        print('List of arrays in this file: \n', hf.keys())
        data = hf.get('Outputs')
        np_data = np.array(data)
        print('Shape of the array dataset_1: \n', np_data.shape)

    flow=assignToFlowSoft(np_data.ravel(),C);

    # print np.min(flow),np.max(flow);

    # flow_new_x=scipy.misc.imresize(flow[:,:,0],(theImage.shape[0],theImage.shape[1]))

    # flow_new_y=scipy.misc.imresize(flow[:,:,1],(theImage.shape[0],theImage.shape[1]))
    # flow=np.dstack((flow_new_x,flow_new_y));
    # print np.min(flow),np.max(flow);
    flow = cv2.resize(flow, (240,240))
    print flow.shape
    flow_old=flow;
    # f=h5py.File(file_name,'r')
    # output=f.get('/Outputs')
    # f.close();

    # output=np.array(output);

    # return
    
    # h5disp(file_name, '/Outputs');
 #        theTemp = h5read(file_name, '/Outputs');
    # theOutput = [];
    #     for j = 1:size(theTemp,2)
    #   for k = 1:size(theTemp,1)
    #        for l = 1:size(theTemp,3)
    #       theOutput = [theOutput theTemp(k,j,l)];
    #        end
    #   end
 #            end
        
 #        N = assignToFlowSoft(theOutput(:), C);

    # N = imresize(N, [size(theImage,1) size(theImage,2)]);


    # return
    path_to_mat='/disk2/temp/checking_h5.mat';
    mat_data=scipy.io.loadmat(path_to_mat)
    flow_new=mat_data['N'];
    # print flow_new.shape

    # print flow[:3,:3,0];
    # print flow_new[:3,:3,0];
    # ans=np.isclose(flow_new,flow);
    # print ans
    # print ans
    # scipy.misc.imsave('/disk2/temp/a.png',255*np.dstack((np.dstack((ans[:,:,0],ans[:,:,0])),ans[:,:,0])))
    # scipy.misc.imsave('/disk2/temp/b.png',255*np.dstack((np.dstack((ans[:,:,1],ans[:,:,1])),ans[:,:,1])))


    # return

    out_flow_range_old='/disk2/temp/checking_h5_flow_old.png';
    out_flow_range_new='/disk2/temp/checking_h5_flow_new.png';
    out_flow_range_img='/disk2/temp/checking_h5_flow_img.png';

    plt.ion();

    plt.figure();
    plt.plot(np.sort(np.ravel(flow_old)));
    plt.savefig(out_flow_range_old);
    
    

    plt.figure();
    plt.plot(np.sort(np.ravel(flow_new)));
    plt.savefig(out_flow_range_new);

    # return
    out_flow_x='/disk2/temp/checking_h5_x.png';
    out_flow_y='/disk2/temp/checking_h5_y.png';
    out_mag='/disk2/temp/checking_h5_mag.png';
    
    mag=np.power(np.power(flow[:,:,0],2)+np.power(flow[:,:,1],2),0.5)
    flow=np.dstack((flow,mag));
    print flow.shape

    min_v=np.min(flow);
    flow=flow+abs(min_v);
    max_v=np.max(flow);
    flow=flow/max_v
    flow=flow*255;

    flow=scipy.misc.imread(out_mag);
    plt.figure();
    plt.plot(np.sort(np.ravel(flow[:,:,2])));
    plt.savefig(out_flow_range_img);
    
    return
    
    # flow=flow/np.dstack((mag,mag));
    # flow=flow
    # flow=flow+abs(np.min(flow))
    # flow=255*flow;


    # print np.min(flow[:,:,0]),np.max(flow[:,:,0]);
    # print np.min(flow[:,:,1]),np.max(flow[:,:,1]);
    for idx in range(flow.shape[2]):
        mag=flow[:,:,idx];
        print np.min(mag),np.max(mag);
    
    im_x=np.dstack((np.dstack((flow[:,:,0],flow[:,:,0])),flow[:,:,0]));
    im_y=np.dstack((np.dstack((flow[:,:,1],flow[:,:,1])),flow[:,:,1]));
    # mag=np.dstack((np.dstack((mag,mag)),mag));
    # mag=np.dstack((np.dstack((flow[:,:,0],flow[:,:,1])),mag))
    scipy.misc.imsave(out_flow_x,im_x);
    scipy.misc.imsave(out_flow_y,im_y);
    scipy.misc.imsave(out_mag,flow);