示例#1
0
def loadCOCOAndOverSeg(im_set="test", detector="sf", N_SPIX=1000, fold=0):
    from pickle import dumps, loads
    try:
        import lz4, pickle
        decompress = lambda s: pickle.loads(lz4.decompress(s))
        compress = lambda o: lz4.compressHC(pickle.dumps(o))
    except:
        compress = lambda x: x
        decompress = lambda x: x
    from gop import contour, dataset, segmentation
    FILE_NAME = '/tmp/coco_%s_%s_%d_%d.dat' % (im_set, detector, N_SPIX, fold)
    try:
        with open(FILE_NAME, 'rb') as f:
            over_segs, segmentations = loads(f.read())
            f.close()
            over_seg = segmentation.ImageOverSegmentationVec()
            for i in over_segs:
                over_seg.append(decompress(i))
            return over_seg, [decompress(i) for i in segmentations], []
            #return over_segs,segmentations,[]
    except FileNotFoundError:
        pass

    # Load the dataset
    data = dataset.loadCOCO2014(im_set == "train", im_set == "valid", fold)

    # COCO has some pretty gray scale images (WTF!!!)
    images = [
        e['image'] if e['image'].C == 3 else e['image'].tileC(3) for e in data
    ]
    try:
        segmentations = [e['segmentation'] for e in data]
    except:
        segmentations = []

    # Do the over-segmentation
    if detector == 'sf':
        detector = contour.StructuredForest()
        detector.load('../data/sf.dat')
    elif detector == "mssf":
        detector = contour.MultiScaleStructuredForest()
        detector.load("../data/sf.dat")
    elif detector == 'st':
        detector = contour.SketchTokens()
        detector.load('../data/st_full_c.dat')
    else:
        detector = contour.DirectedSobel()

    if detector != None:
        over_segs = segmentation.generateGeodesicKMeans(
            detector, images, N_SPIX)
    with open(FILE_NAME, 'wb') as f:
        #f.write( dumps( (over_segs,segmentations) ) )
        f.write(
            dumps(
                ([compress(i)
                  for i in over_segs], [compress(i) for i in segmentations])))
        f.close()

    return over_segs, segmentations, []
示例#2
0
def loadVOCAndOverSeg( im_set="test", detector="sf", N_SPIX=1000, EVAL_DIFFICULT=False, year="2012" ):
	from pickle import dumps,loads
	try:
		import lz4, pickle
		decompress = lambda s: pickle.loads( lz4.decompress( s ) )
		compress = lambda o: lz4.compressHC( pickle.dumps( o ) )
	except:
		compress = lambda x: x
		decompress = lambda x: x
	from gop import contour,dataset,segmentation
	FILE_NAME = '/tmp/%s_%s_%d_%d_%s.dat'%(im_set,detector,N_SPIX,EVAL_DIFFICULT,year)
	try:
		with open(FILE_NAME,'rb') as f:
			over_segs,segmentations,boxes = loads( f.read() )
			f.close()
			over_seg = segmentation.ImageOverSegmentationVec()
			for i in over_segs:
				over_seg.append( decompress(i) )
			return over_seg,[decompress(i) for i in segmentations],[decompress(i) for i in boxes]
	except IOError:
		pass
	
	# Load the dataset
	#data = eval("dataset.loadVOC2012_small")(im_set=="train",im_set=="valid",im_set=="test")
	data = eval("dataset.loadVOC%s"%year)(im_set=="train",im_set=="valid",im_set=="test")

	
	images = [e['image'] for e in data]
	try:
		segmentations = [e['segmentation'] for e in data]
	except:
		segmentations = []
	boxes = [[a['bbox'] for a in e['annotation'] if not a['difficult'] or EVAL_DIFFICULT] for e in data]

	# Do the over-segmentation
	if detector=='sf':
		detector = contour.StructuredForest()
		detector.load( '../data/sf.dat' )
	elif detector == "mssf":
		detector = contour.MultiScaleStructuredForest()
		detector.load( "../data/sf.dat" )
	elif detector=='st':
		detector = contour.SketchTokens()
		detector.load( '../data/st_full_c.dat' )
	else:
		detector = contour.DirectedSobel()
	
	if detector != None:
		over_segs = segmentation.generateGeodesicKMeans( detector, images, N_SPIX )
	#try:
	with open(FILE_NAME,'wb') as f:
		f.write( dumps( ([compress(i) for i in over_segs],[compress(i) for i in segmentations],[compress(i) for i in boxes]) ) )
		f.close()
	#except FileNotFoundError:
		#pass
	
	return over_segs,segmentations,boxes
示例#3
0
文件: util.py 项目: ILoveFree2/gop
def loadCOCOAndOverSeg( im_set="test", detector="sf", N_SPIX=1000, fold=0 ):
	from pickle import dumps,loads
	try:
		import lz4, pickle
		decompress = lambda s: pickle.loads( lz4.decompress( s ) )
		compress = lambda o: lz4.compressHC( pickle.dumps( o ) )
	except:
		compress = lambda x: x
		decompress = lambda x: x
	from gop import contour,dataset,segmentation
	FILE_NAME = '/tmp/coco_%s_%s_%d_%d.dat'%(im_set,detector,N_SPIX,fold)
	try:
		with open(FILE_NAME,'rb') as f:
			over_segs,segmentations = loads( f.read() )
			f.close()
			over_seg = segmentation.ImageOverSegmentationVec()
			for i in over_segs:
				over_seg.append( decompress(i) )
			return over_seg,[decompress(i) for i in segmentations],[]
			#return over_segs,segmentations,[]
	except FileNotFoundError:
		pass
	
	# Load the dataset
	data = dataset.loadCOCO2014( im_set=="train",im_set=="valid", fold)
	
	# COCO has some pretty gray scale images (WTF!!!)
	images = [e['image'] if e['image'].C==3 else e['image'].tileC(3)  for e in data]
	try:
		segmentations = [e['segmentation'] for e in data]
	except:
		segmentations = []
	
	# Do the over-segmentation
	if detector=='sf':
		detector = contour.StructuredForest()
		detector.load( '../data/sf.dat' )
	elif detector == "mssf":
		detector = contour.MultiScaleStructuredForest()
		detector.load( "../data/sf.dat" )
	elif detector=='st':
		detector = contour.SketchTokens()
		detector.load( '../data/st_full_c.dat' )
	else:
		detector = contour.DirectedSobel()
	
	if detector != None:
		over_segs = segmentation.generateGeodesicKMeans( detector, images, N_SPIX )
	with open(FILE_NAME,'wb') as f:
		#f.write( dumps( (over_segs,segmentations) ) )
		f.write( dumps( ([compress(i) for i in over_segs],[compress(i) for i in segmentations]) ) )
		f.close()
	
	return over_segs,segmentations,[]