def create_micro_functions(myfile, coeffs, kernel_header_filename): write_line(myfile, 0, '#include "%s"' % kernel_header_filename) write_break(myfile) abc_micro_kernel_gen.write_common_rankk_macro_assembly(myfile) write_break(myfile) abc_micro_kernel_gen.macro_initialize_assembly(myfile) #write_break( myfile ) #abc_micro_kernel_gen.macro_rankk_xor0_assembly( myfile ) #write_break( myfile ) #abc_micro_kernel_gen.macro_rankk_loopkiter_assembly( myfile ) #write_break( myfile ) #abc_micro_kernel_gen.macro_rankk_loopkleft_assembly( myfile ) #write_break( myfile ) #abc_micro_kernel_gen.macro_rankk_postaccum_assembly( myfile ) write_break(myfile) for i, coeff_set in enumerate(transpose(coeffs[2])): if len(coeff_set) > 0: nonzero_coeffs = [ coeff for coeff in coeff_set if is_nonzero(coeff) ] nnz = len(nonzero_coeffs) if nnz <= 23: abc_micro_kernel_gen.generate_micro_kernel( myfile, nonzero_coeffs, i) write_break(myfile)
def create_packm_functions(myfile, coeffs): ''' Generate all of the custom add functions. myfile is the file to which we are writing coeffs is the set of all coefficients ''' def all_adds(coeffs, name): for i, coeff_set in enumerate(coeffs): if len(coeff_set) > 0: write_packm_func(myfile, coeff_set, i, name) write_break(myfile) # S matrices formed from A subblocks all_adds(transpose(coeffs[0]), 'A') # T matrices formed from B subblocks all_adds(transpose(coeffs[1]), 'B')
def create_kernel_header(myfile, coeffs): #write_line( myfile, 0, '#include "bl_dgemm_kernel.h"' ) write_break(myfile) abc_micro_kernel_gen.write_header_start(myfile) for i, coeff_set in enumerate(transpose(coeffs[2])): if len(coeff_set) > 0: nonzero_coeffs = [ coeff for coeff in coeff_set if is_nonzero(coeff) ] nnz = len(nonzero_coeffs) abc_micro_kernel_gen.generate_kernel_header( myfile, nonzero_coeffs, i) write_break(myfile) abc_micro_kernel_gen.write_header_end(myfile)
def create_macro_functions(myfile, coeffs): for i, coeff_set in enumerate(transpose(coeffs[2])): if len(coeff_set) > 0: write_macro_func(myfile, coeff_set, i, 'C') write_break(myfile)
shapelyPoligons = [] for poligon in mark.poligons: pol = Polygon(poligon) shapelyPoligons.append(pol) #Generating random poligons refLocalPoligons = [] for i in range(negativeNum): negAngle = random.random() * 90 negIndex = int(random.random() * len(refRects)) shift = (np.random.rand(2) * img.shape).astype(dtype=int)[::-1] #rotate and transpose negPoligon = refRects[negIndex].copy() negPoligon = common.rotate(negPoligon, negAngle) negPoligon = common.transpose(negPoligon, shift) negPolygonShapely = Polygon(negPoligon) #Check collision noCollisions = True if not imgPolygonShapely.contains(negPolygonShapely): noCollisions = False for shapelyPoligon in shapelyPoligons: if shapelyPoligon.intersects(negPolygonShapely): noCollisions = False if masked and not common.checkMaskContains(mask, negPoligon): noCollisions = False if noCollisions:
from common import Input, transpose, cat from collections import Counter if __name__ == '__main__': data = Input(6).read().split() t_data = [col for col in transpose(data)] couters = [Counter(m) for m in t_data] frequenc_items = [m.most_common(1)[0][0] for m in couters] print(cat(frequenc_items))
from common import transpose, first, groupby, cat import unittest if __name__ == '__main__': assert tuple(transpose(((1, 2, 3), (4, 5, 6)))) == ((1, 4), (2, 5), (3, 6)) assert first('abc') == first(['a', 'b', 'c']) == 'a' assert cat(['a', 'b', 'c']) == 'abc' assert (groupby(['test', 'one', 'two', 'three', 'four'], key=len) == { 3: ['one', 'two'], 4: ['test', 'four'], 5: ['three'] }) unittest.main()
outParser = markParser.MarkParser() refRects, refP, refAngles = common.loadParams(paramsPath) for mark in parser.marks: refLocalPoligons = [] for poligon in mark.poligons: longLine1, longLine2 = common.getLongLines(poligon) angle = np.mean([common.getAngle(longLine1), common.getAngle(longLine2)]) qAngle = min(refAngles, key=lambda x:abs(x-angle)) meanCoord = poligon.mean(axis=0) perim = common.perim(poligon) rectIndex = refP.index( min(refP, key=lambda x:abs(x-perim)) ) refPoligon = refRects[rectIndex] refPoligonTr = common.rotate(refPoligon, qAngle) refPoligonTr = common.transpose(refPoligonTr, meanCoord) refPoligonTrShapely = Polygon(refPoligonTr) if not imgPolygonShapely.contains(refPoligonTrShapely): continue if masked and not common.checkMaskContains(mask, refPoligonTr): continue refLocalPoligons.append(refPoligonTr) outParser.marks.append( markParser.Mark(mark.imageName, refLocalPoligons) ) outParser.save(outListPath)
img = cv2.imread(imagePath, cv2.CV_LOAD_IMAGE_GRAYSCALE) mask = cv2.imread(maskPath, cv2.CV_LOAD_IMAGE_GRAYSCALE) refRects, refP, refAngles = common.loadParams(paramsPath) outPoligon = [(0, 0), (img.shape[1], 0), (img.shape[1], img.shape[0]), (0, img.shape[0])] imgPolygonShapely = Polygon(outPoligon) for rectIndex in range(len(refRects)): rect = refRects[rectIndex] for angleIndex in range(len(refAngles)): angle = refAngles[angleIndex] for x in range(0, img.shape[1] - 50, 8): for y in range(0, img.shape[0] - 50, 8): rotRect = common.rotate(rect, angle) trRotRect = common.transpose(rotRect, np.asarray((x, y))) if mask[y, x] != 255: continue polygonShapely = Polygon(trRotRect) if not imgPolygonShapely.contains(polygonShapely): continue crop = common.extractCrop(img, trRotRect, size) hist = hog.compute(crop) #print hist #print hist.shape pred = classifier.predict(np.transpose(hist)) print pred