def rotate_points(points): new_points = [] for point in points: #print point point[1]+= 0.01 new_points.append(rotation.rotate(point[0], point[1])) return new_points
def run(): sdl2.ext.init() window = sdl2.ext.Window("TriFan", window_size) renderer = sdl2.ext.Renderer(window) sprite_factory = sdl2.ext.SpriteFactory(sdl2.ext.TEXTURE, renderer=renderer) renderer.blendmode = sdl2.SDL_BLENDMODE_BLEND window.show() renderer.clear(clear_color) rot_per_div = 2*math.pi/divisions center = (window_size[0]/2, window_size[1]/2) rot_inc = 0#math.pi/600 rot = 2*math.pi/600 length = center[1]/4 total_rot=0 sub = 1 total_lip = length_inc_percentage running=True while(running): events = sdl2.ext.get_events() for event in events: if event.type==sdl2.SDL_QUIT or (event.type==sdl2.SDL_KEYDOWN and event.key.keysym.sym==sdl2.SDLK_ESCAPE): running=False renderer.clear(clear_color) total_rot +=rot_inc total_lip-=0.01 for p in range(0,6): for m in range(0,divisions): len = (6-p)*length*total_lip#length_inc_percentage #print len current_length = length-(len) #lines = [] for n in range(0,30):#divisions): p2 = translation.translate(rotation.rotate((0,current_length), total_rot),center) renderer.draw_line((center[0], center[1],p2[0], p2[1]),0x2f606060+p*0x001010) total_rot+=rot#rot_per_div current_length -=sub renderer.present() return 0
def rotate_minutiae(self, coords, image): mask = np.zeros(image.shape) rows, cols = image.shape angle, r, c = correctrotation(image) angle = angle * np.pi / 180 rotatedcoords = [] for x, y, t in coords: xd, yd = rotate([r / 2, c / 2], [x, y], angle) rotatedcoords.append((xd, yd, t)) #print rotatedcoords for x, y, t in rotatedcoords: mask[int(round(x)), int(round(y))] = 1 return rotatedcoords, angle, mask
def change_in_momentum(momentum_in, axis_of_rotation, two_theta): """Finds the change in momentum vector given momentum in, the axis of rotation and the two theta angle Args: momentum_in: The momentum vector of the incident beam. Units keV/c. axis_of_rotion: The axis perpendicular to the difraction plane centred on the sample. two_theta: The two_theta angle of difraction. Angle must be passed in degrees. Returns: change_in_momentum: The change in momentum vector. """ _q = r.rotation_to_quaternion(axis_of_rotation, two_theta) momentum_out = r.rotate(momentum_in, _q) return momentum_out - momentum_in
def normalize(motion, translate='', rotate='', scale='', clean=True): """! Apply normalization to a motion. The input motion is not modified. @param motion numpy.array: The motion to normalize @param translate: The normalization for translating the motions @param rotate: The normalization for rotating the motions @param scale: The normalization for scaling the motions @param cleaning: Remove duplicate points and large jumps, default true @return: The normalized motion and the normalization parameters """ out = motion translationRef = translation.translate(out[:, 1:4], translate) rotationRef = rotation.rotate(out[:, 1:8], rotate) scalingRef = scaling.scale(out[:, 1:4], scale) out, removedPoints = cleaning.clean(motion) return out, translationRef, rotationRef, scalingRef
def run(): sdl2.ext.init() window = sdl2.ext.Window("Leaf", window_size) renderer = sdl2.ext.Renderer(window) sprite_factory = sdl2.ext.SpriteFactory(sdl2.ext.TEXTURE, renderer=renderer) renderer.blendmode = sdl2.SDL_BLENDMODE_BLEND window.show() renderer.clear(clear_color) delta_theta = 2 * math.pi / steps running = True while (running): events = sdl2.ext.get_events() for event in events: if event.type == sdl2.SDL_QUIT or (event.type == sdl2.SDL_KEYDOWN and event.key.keysym.sym == sdl2.SDLK_ESCAPE): running = False center = (int(window_size[0] / 2), int(window_size[1] / 2)) point = (int(window_size[0] / 4), int(window_size[1] / 3)) renderer.draw_point(translation.translate(point, center), draw_color) for i in range(0, steps): new_point = rotation.rotate(point, delta_theta * i) percent = ((2 * math.pi) / (i + delta_theta)) #print(percent) renderer.draw_point(translation.translate(center, new_point), draw_color - (int(percent * 0xffffffff))) renderer.present() return 0
def normalize(allPolygones, name): # 平移变换, 将模型重心作为空间原点 mp, transed_points = translation.translate(allPolygones) # 旋转变换 rotated_points = rotation.rotate(transed_points) # 缩放变换, 返回最终预处理好的模型 # final_points是一个二维的nx3数组,每一行代表三角形的一个顶点 final_points = scaling.scale(mp, rotated_points) # 原本final_points是list类型,转换为<type 'numpy.ndarray'> final_points = np.array(final_points) #extractEigvect(final_points, name) #eigvectflat(final_points, name) # 测试生成二维投影直方图 # histogram.getHist(final_points) allTris = [] for i in range(len(final_points)/3): allTris.append(final_points[i*3:i*3+3]) #allTris = np.array(allTris) # print 'type(allTris): ', type(allTris) histogram.getTriHist(allTris, final_points, name, 200, 200)
def update_global_map_obstacles(self): rotated_view_window = rotate(self.view_window, 360 - self.direction) N = 5 M = 160 # current position of a agent in the # global map. curr_i = self.position[0] curr_j = self.position[1] # looping around the view map to copy into # GlobalMap for i in range(N): for j in range(N): element = rotated_view_window[i][j] # indices to copy into global map. Iidx = curr_i + i - 2 Jidx = curr_j + j - 2 # Conditons for writing to gloabl map # 1) The copying indices should not # exceed the global map # 2) Not copying the current directions of AI. if (Iidx >= 0 and Iidx < M and Jidx >= 0 and Jidx < M and not (i == 2 and j == 2)): if [Iidx, Jidx] in self.placedStonesCoordinates: self.global_map_obstacles[Iidx][Jidx] = ' ' else: self.global_map_obstacles[Iidx][Jidx] = element elif (i == 2 and j == 2): self.global_map_obstacles[Iidx][Jidx] = '^' self.top_global_map_scores[Iidx][Jidx] = -10000 if [Iidx, Jidx] not in self.exploredCoordinates: self.exploredCoordinates.append([Iidx, Jidx]) if [Iidx, Jidx] in self.visibleCoordinates: self.visibleCoordinates.remove([Iidx, Jidx]) if [Iidx, Jidx] in self.visibleItems: self.visibleItems.remove([Iidx, Jidx]) if [Iidx, Jidx] not in self.exploredCoordinates and [ Iidx, Jidx ] not in self.visibleCoordinates: if self.global_map_obstacles[Iidx][ Jidx] in self.visibleItemsPts: self.visibleItems.append([Iidx, Jidx]) astar_path = self.astarMinimum(self.AstarGlobalMap, self.position, [Iidx, Jidx], self.items, self.onRaft) totalScores, remove_flag, reachScores, coordScores = self.calculateTotalCoordScores( Iidx, Jidx, astar_path, self.global_map_obstacles, self.onRaft, self.items, self.position) if remove_flag: self.top_global_map_scores[Iidx][Jidx] = 0 self.exploredCoordinates.append([Iidx, Jidx]) remove_flag = 0 else: self.top_global_map_scores[Iidx][Jidx] = totalScores self.visibleCoordinates.append([Iidx, Jidx]) self.AstarGlobalMap.updateGrid(self.global_map_obstacles, self.onRaft)
def test_rotation_around_z_axis(self): point = np.array((1, 0, 0)) point = rotation.rotate(point, axis_vector=Z_AXIS_VECTOR, theta=np.pi / 2) self._validate_is_close(point, np.array((0, 1, 0)))
def run(): sdl2.ext.init() window = sdl2.ext.Window("Weight", window_size) renderer = sdl2.ext.Renderer(window) sprite_factory = sdl2.ext.SpriteFactory(sdl2.ext.TEXTURE, renderer=renderer) renderer.blendmode = sdl2.SDL_BLENDMODE_BLEND num_x = width/x_spacing #print num_x num_y = height/y_spacing #print num_y points=[] offset_points=[] #rotation_offset = for y in range(int(num_y)): for x in range(int(num_x)): points.append([0, 0]) offset_points.append((x*x_spacing, y*y_spacing)) window.show() renderer.clear(clear_color) center_position = [int(width/2), int(height/2)] #print offset_points pattern = [ [0.10, 0.25, 0.50, 0.65, 0.75, 0.65, 0.50, 0.25, 0.10], [0.25, 0.50, 0.75, 1.00, 1.25, 1.00, 0.75, 0.50, 0.25], [0.25, 0.75, 1.00, 1.25, 1.50, 1.25, 1.00, 0.75, 0.25], [0.25, 0.50, 0.75, 1.00, 1.25, 1.00, 0.75, 0.50, 0.25], [0.10, 0.25, 0.50, 0.65, 0.75, 0.65, 0.50, 0.25, 0.10] ] mouse_down = False auto_center = [0, 160] theta= 0 output=[] running=True while(running): events = sdl2.ext.get_events() for event in events: if event.type==sdl2.SDL_QUIT or (event.type==sdl2.SDL_KEYDOWN and event.key.keysym.sym==sdl2.SDLK_ESCAPE): running=False elif event.type==sdl2.SDL_MOUSEMOTION: center_position = [int(event.motion.x), int(event.motion.y)] if event.motion.x == 0 or event.motion.y == 0 or event.motion.x == width-1 or event.motion.y == height-1: mouse_down = False else: mouse_down = True #elif event.type==sdl2.SDL_MOUSEBUTTONDOWN: # mouse_down = True #elif event.type==sdl2.SDL_MOUSEBUTTONUP: # mouse_down = False theta-=0.0075 rotated_auto_center= translation.translate(rotation.rotate(auto_center, theta), [width/3, height/3]) neg_rotated_auto_center= translation.translate(rotation.rotate(auto_center, -theta), [2*width/3, height/3]) #print center_position placeholder = [center_position[0],center_position[1], 10, 10] #renderer.clear(clear_color) #renderer.draw_rect(placeholder, 0x80FF00FF) if mouse_down: points = perturb_points(points, offset_points, pattern, center_position) else: points = perturb_points(points, offset_points, pattern, neg_rotated_auto_center) points = perturb_points(points, offset_points, pattern, rotated_auto_center) #print new_points#, offset_points output_cur = translation.translate(points, offset_points) #renderer.draw_point(output_cur, 0x80FFFFFF) renderer.draw_line(output_cur, 0x40204000) if len(output)<1: #print len(output) output.append(output_cur) else: renderer.draw_point(output[0], clear_color) renderer.draw_line(output[0], clear_color) output.append(output_cur) del output[0] for point in points: #print point if point[1] > 0: #print "return" point[1] -= 0.75 renderer.present() return 0
for num in x: x_trans=[] y_trans=[] for i in range(num): line=next(f) line=line[0:len(line)-1] X,y=ri.read_train_struct() line=line.split(' ') offset=[] c=X[int(line[1]),:] example=np.array(c).reshape(8,16) if(line[0]=='r'): alpha=int(line[2]) x_result=r.rotate(example,alpha) x_result=x_result.reshape(128,) elif (line[0]=='t'): offset.append(int(line[2])) offset.append(int(line[3])) x_result=t.translate(example,offset) x_result=x_result.reshape(128,) x_trans.append(x_result) y_trans.append(train_data[int(line[1])]) if(num==0): clf=svm.train(X_train,y_train,i/len(y_train)) else: clf=svm.train(x_trans,y_trans,i/len(y_trans)) y_test,y_pred,score=svm.test(clf,X_test,y_test) test_accuracy.append(score*100)
import numpy as np import argparse import rotation import cv2 ap = argparse.ArgumentParser() ap.add_argument("-i", "--image", required = True, help = "Enter Path to the Image") args = vars(ap.parse_args()) image = cv2.imread(args["image"]) cv2.imshow("Orginal", image) rotated = rotation.rotate(image,10) cv2.imshow("Rotiert", rotated) cv2.waitKey(0)
def sift_init(): """ init SIFT descriptors and keypoints for all images in query/ :return: just pass them to sift_match """ if LOAD_SIFT == 0: query_img_name = [[], [], []] query_img = [[], [], []] query_img_hog = [[], [], []] # kp = [[], [], []] des = [[], [], []] sift_pt = [[], [], []] for direct in range(FRONT, NONE): for base_path, folder_list, file_list in os.walk( 'query/' + direct_lower_str[direct]): for file_name in file_list: filename = os.path.join(base_path, file_name) if filename[-4:] != '.png' and filename[-4:] != '.jpg': continue if base_path == 'query/side': query_img_name[direct].append(filename) query_img[direct].append( cv2.imread(filename, cv2.IMREAD_GRAYSCALE)) img_hog_temp = cv2.imread(filename) img_hog_reverse = cv2.flip( img_hog_temp, -1) # flipped horizontally & vertically query_img_hog[direct].append( [img_hog_temp, img_hog_reverse]) else: query_img_name[direct].append(filename) query_img[direct].append( cv2.imread(filename, cv2.IMREAD_GRAYSCALE)) query_img_hog[direct].append(cv2.imread(filename)) # rotate query img in rotate directory for augmentation for folder_name in folder_list: if folder_name != 'rotate': continue for base_path_rot, folder_list_rot, file_list_rot in os.walk( 'query/' + direct_lower_str[direct] + '/rotate'): for file_name_rot in file_list_rot: filename_rot = os.path.join( base_path_rot, file_name_rot) if filename_rot[-4:] != '.png' and filename_rot[ -4:] != '.jpg': continue query_img_name[direct].append(filename_rot) query_img[direct].append( cv2.imread(filename_rot, cv2.IMREAD_GRAYSCALE)) img_hog_temp = cv2.imread(filename_rot) img_hog_aug = rotation.rotate(img_hog_temp) query_img_hog[direct].append(img_hog_aug) break # only traverse top level # in case of inconsistency img_file_map = dict() for direct in range(FRONT, NONE): for img_name_temp, img_temp, img_hog_temp in zip( query_img_name[direct], query_img[direct], query_img_hog[direct]): img_file_map[img_name_temp] = [img_temp, img_hog_temp] query_img_name[direct].sort() query_img = [[], [], []] query_img_hog = [[], [], []] for direct in range(FRONT, NONE): for img_name_temp in query_img_name[direct]: query_img[direct].append(img_file_map[img_name_temp][0]) query_img_hog[direct].append(img_file_map[img_name_temp][1]) # Initiate HOG fd # fd = hog.hog_des(query_img_hog) fd = hog.load_fd() # Initiate SIFT detector orb = cv2.ORB_create() # find the keypoints and descriptors with SIFT for direct in range(FRONT, NONE): for img_temp, img_name in zip(query_img[direct], query_img_name[direct]): kp_temp, des_temp = orb.detectAndCompute(img_temp, None) if des_temp is None: if DUMP == 1: print(img_name + ": SIFT cannot detect keypoints and descriptor") # kp[direct].append(None) des[direct].append(None) sift_pt[direct].append(None) continue # kp[direct].append(kp_temp) des[direct].append(des_temp) pt_list = [] for pt_temp in kp_temp: pt_list.append( (pt_temp.pt, pt_temp.size, pt_temp.angle, pt_temp.response, pt_temp.octave, pt_temp.class_id)) sift_pt[direct].append(pt_list) # load label json data img_json_map = dict() for direct in range(FRONT, NONE): for img_name_temp in query_img_name[direct]: img_json_map[img_name_temp] = json.load( open(str(img_name_temp.split('.')[0]) + '.json')) # pack together sift_data = [ query_img_name, query_img_hog, query_img_name, sift_pt, des, fd, img_json_map ] with open('match.dat', 'wb') as f: pickle.dump(sift_data, f) return sift_data else: target = 'match.dat' if os.path.getsize(target) > 0: with open(target, "rb") as f: unpickler = pickle.Unpickler(f) sift_data = unpickler.load() return sift_data
print(error) with open("Trial_1.log", "r") as f: curr_line = f.readline() count = 10 ans = [] temp = {} last_x_gyro = 0 last_y_gyro = 0 last_z_gyro = 0 for curr_line in f: if curr_line == "-": ans.append(temp) temp = rotate(temp) insert = ("INSERT INTO `sensor_raw_one` (x_accel_one, y_accel_one, z_accel_one, x_gyro, y_gyro, z_gyro, temperature, x_accel_two, y_accel_two, z_accel_two, timestamp, x_angular_accel, y_angular_accel, z_angular_accel) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)") try: cursor.execute(insert, (float(temp["x_accel_one"]), float(temp["y_accel_one"]), float(temp["z_accel_one"]), float(temp["x_gyro"]), float(temp["y_gyro"]), float(temp["z_gyro"]), float(temp["temp"]), float(temp["x_accel_two"]), float(temp["y_accel_two"]), float(temp["z_accel_two"]), float(temp["timestamp"]), float((float(temp["x_gyro"]) - last_x_gyro)/2), float((float(temp["y_gyro"]) - last_y_gyro)/2), float((float(temp["z_gyro"]) - last_z_gyro)/2))) cursor.execute("COMMIT") except pymysql.IntegrityError: print("Error. Username must be unique") print(temp) temp = {} count = 11 break # newline if count == 11:
def local_rotation(self): rotation.rotate(self.pad) self.show_pad()
def main(): logging.basicConfig(level=logging.INFO) today = date.today() parser = argparse.ArgumentParser(add_help=False) mutually_exclusive_group = parser.add_mutually_exclusive_group( required=True) mutually_exclusive_group.add_argument( "-r", help="-r: Pacbio Hifi Reads from your species") mutually_exclusive_group.add_argument( "-c", help= "-c: Assemnbled fasta contigs/scaffolds to be searched to find mitogenome" ) parser.add_argument("-h", "--help", action="help", default=argparse.SUPPRESS, help="Print this help message.") parser.add_argument("-f", help="-f: Close-related Mitogenome is fasta format", required="True") parser.add_argument( "-g", help="-k: Close-related species Mitogenome in genebank format", required="True") parser.add_argument( "-t", help="-t: Number of threads for (i) hifiasm and (ii) the blast search", required="True", type=int) parser.add_argument( "-p", help= "-p: Percentage of query in the blast match with close-related mito", type=int, default=50) parser.add_argument( "-m", help= "-m: Number of bits for HiFiasm bloom filter [it maps to -f in HiFiasm] (default = 0)", type=int, default=0) parser.add_argument( '--circular-size', help='Size to consider when checking for circularization', type=int, default=220) parser.add_argument( '--circular-offset', help= 'Offset from start and finish to consider when looking for circularization', type=int, default=40) parser.add_argument( "-o", help= """-o: Organism genetic code following NCBI table (for mitogenome annotation): 1. The Standard Code 2. The Vertebrate MitochondrialCode 3. The Yeast Mitochondrial Code 4. The Mold,Protozoan, and Coelenterate Mitochondrial Code and the Mycoplasma/Spiroplasma Code 5. The Invertebrate Mitochondrial Code 6. The Ciliate, Dasycladacean and Hexamita Nuclear Code 9. The Echinoderm and Flatworm Mitochondrial Code 10. The Euplotid Nuclear Code 11. The Bacterial, Archaeal and Plant Plastid Code 12. The Alternative Yeast Nuclear Code 13. The Ascidian Mitochondrial Code 14. The Alternative Flatworm Mitochondrial Code 16. Chlorophycean Mitochondrial Code 21. Trematode Mitochondrial Code 22. Scenedesmus obliquus Mitochondrial Code 23. Thraustochytrium Mitochondrial Code 24. Pterobranchia Mitochondrial Code 25. Candidate Division SR1 and Gracilibacteria Code """, type=str, default='1') args = parser.parse_args() print("MitoHifi v2.2" + "/n") print("Started at:", today) # measure the length of the related mitogenome rel_mito_len = getMitoLength.get_mito_length(args.f) print("Length of related mitogenome is: {} bp".format(rel_mito_len)) # calculate maximum contig size accepted by mitofinder when annotating the contigs max_contig_size = 5 * rel_mito_len # if input are reads, map them to the related mitogenome and assemble the mapped ones if args.r: print("\nRunning MitoHifi pipeline in reads mode\n") print( "\nFirst we map your PacbioHiFi reads to the close-related mitogenome\n" ) print(shlex.split(args.r)) minimap_cmd = [ "minimap2", "-t", str(args.t), "--secondary=no", "-ax", "map-pb", args.f ] + shlex.split(args.r) samtools_cmd = [ "samtools", "view", "-@", str(args.t), "-S", "-b", "-F4", "-F", "0x800" ] minimap = subprocess.Popen(minimap_cmd, stdout=subprocess.PIPE) mapped_reads_f = open("reads.HiFiMapped.bam", "w") subprocess.run(samtools_cmd, stderr=subprocess.STDOUT, stdin=minimap.stdout, stdout=mapped_reads_f) minimap.wait() minimap.stdout.close() print( "\nNow we filter out any mapped reads that are larger than the reference mitogenome to avoid NUMTS\n" ) mapped_fasta_f = open("gbk.HiFiMapped.bam.fasta", "w") subprocess.run(["samtools", "fasta", "reads.HiFiMapped.bam"], stdout=mapped_fasta_f) filterfasta.filterFasta(minLength=rel_mito_len, neg=True, inStream="gbk.HiFiMapped.bam.fasta", outPath="gbk.HiFiMapped.bam.filtered.fasta") print( "\nNow let's run hifiasm to assemble the mapped and filtered reads!\n" ) with open("hifiasm.log", "w") as hifiasm_log_f: subprocess.run([ "hifiasm", "-t", str(args.t), "-f", str(args.m), "-o", "gbk.HiFiMapped.bam.filtered.assembled", "gbk.HiFiMapped.bam.filtered.fasta", ], stderr=subprocess.STDOUT, stdout=hifiasm_log_f) gfa2fa_script = os.path.join(os.path.dirname(__file__), "gfa2fa") # gets path to gfa2fa script with open("gbk.HiFiMapped.bam.filtered.assembled.p_ctg.fa", "w") as p_ctg_f: subprocess.run([ gfa2fa_script, "gbk.HiFiMapped.bam.filtered.assembled.p_ctg.gfa" ], stdout=p_ctg_f) with open("gbk.HiFiMapped.bam.filtered.assembled.a_ctg.fa", "w") as a_ctg_f: subprocess.run([ gfa2fa_script, "gbk.HiFiMapped.bam.filtered.assembled.a_ctg.gfa" ], stdout=a_ctg_f) with open("hifiasm.contigs.fasta", "w") as hifiasm_f: subprocess.run([ "cat", "gbk.HiFiMapped.bam.filtered.assembled.p_ctg.fa", "gbk.HiFiMapped.bam.filtered.assembled.a_ctg.fa" ], stdout=hifiasm_f) contigs = "hifiasm.contigs.fasta" else: print("\nRunning MitoHifi pipeline in contigs mode\n") print("Fixing potentially conflicting FASTA headers...\n") original_contigs = args.c fixContigHeaders.fix_headers(original_contigs, "fixed_header_contigs.fasta") os.remove(original_contigs) # remove original contig file shutil.move( "fixed_header_contigs.fasta", original_contigs ) # replace original contigs file by the version that has the headers fixed contigs = original_contigs print( "\nLet's run the blast of the contigs versus the close-related mitogenome\n" ) makeblastdb = "makeblastdb -in " + args.f + " -dbtype nucl" print(makeblastdb) subprocess.run(["makeblastdb", "-in", args.f, "-dbtype", "nucl"], stderr=subprocess.STDOUT) print("\nmakeblastdb done. Running blast with the contigs\n") subprocess.run([ "blastn", "-query", contigs, "-db", args.f, "-num_threads", str(args.t), "-out", "contigs.blastn", "-outfmt", "6 std qlen slen" ], stderr=subprocess.STDOUT) print("Blast done!" + "\n") #the next script parses a series of conditions to exclude blast with NUMTs. parse_blast.parse_blast(query_perc=args.p) #We check for circularisation # select contigs to be circularized # first look for contigs in parsed_blast.txt contigs_ids = get_contigs_ids("parsed_blast.txt") # if we don't find contigs in parse_blast.txt # look for contigs in parsed_blast_all.txt if len(contigs_ids) == 0: contigs_ids = get_contigs_ids("parsed_blast_all.txt") # if we can't find any contigs even in parsed_blast_all.txt, then we exit the pipeline if len(contigs_ids) == 0: sys.exit( """\n Attention! \n The 'parsed_blast.txt' and 'parsed_blast_all.txt' files are empty. The pipeline has stopped !! \n You need to run further scripts to check if you have mito reads pulled to a large NUMT!!""" ) # records all contigs kept for the downstream steps in a file called 'contigs_ids.txt' with open("contigs_ids.txt", "w") as f: for contig_id in contigs_ids: f.write(contig_id + "\n") # removes file that contains history of circularization of it already exists try: os.remove('all_contigs.circularisationCheck.txt') except OSError: pass print( "\n" + "6-) Now we are going to circularize, annotate and rotate each contig which is a potential mitogenome" + "\n") # creates a dictionary that will save frameshifts information for each contig contig_shifts = {} # iterates through each contig for contig_id in contigs_ids: print("\n" + "Working with contig: " + contig_id + "\n") # retrieves the FASTA files for each contig filterfasta.filterFasta(idList=[contig_id], inStream=contigs, outPath="".join([contig_id, ".mito.fa"])) # circularizes each contig and saves circularization history to a file circularization_history = get_circo_mito(contig_id, args.circular_size, args.circular_offset) for circularization_event in circularization_history: with open('all_contigs.circularisationCheck.txt', 'a') as f: f.write("\t".join([contig_id, circularization_event, "\n"])) # annotates mitogenome(s) using mitofinder print("Running mitofinder with maximum contig size of {} bp".format( max_contig_size)) subprocess.run([ "mitofinder", "--max-contig-size", str(max_contig_size), "-j", contig_id + ".annotation", "-a", contig_id + ".mitogenome.fa", "-r", args.g, "-o", args.o, "-p", str(args.t) ], stderr=subprocess.STDOUT) # rotates the mitogenome mitogenome_gb = os.path.join( contig_id + ".annotation", contig_id + ".annotation_MitoFinder_mitfi_Final_Results", contig_id + ".annotation_mtDNA_contig.gb") if not os.path.isfile(mitogenome_gb): warnings.warn( "Contig " + contig_id + " does not have an annotation file, check MitoFinder's output") continue start, strand = rotation.get_phe_pos(mitogenome_gb) genome = contig_id + ".mitogenome.fa" if start == None: warnings.warn('tRNA-Phe is not present in file ' + mitogenome_gb + '... Skipping contig ' + contig_id + '\n') continue new_gb = None if strand == -1: genome_rc = contig_id + "_RC.mitogenome.fa" rc = os.path.join(os.path.dirname(genome), genome_rc) rotation.make_rc(genome, rc) new_gb = rotation.annotate(os.path.dirname(genome), os.path.abspath(genome_rc), os.path.abspath(args.g), contig_id, args.o, max_contig_size, args.t) start, strand = rotation.get_phe_pos(new_gb) genome = rc rotation.rotate(genome, start, contig_id) print(' '.join([ 'Rotated to tRNA-Phe genome is at', os.path.join(os.path.dirname(genome), contig_id + '.mitogenome.rotated.fa') ])) if new_gb: print("new_gb: ") print(new_gb) print('Mitogenome annotation is at ', new_gb) # check frameshifts in genes from contig and append save findings to # `contig_shifts` dictionary frameshifts = findFrameShits.find_frameshifts(mitogenome_gb) contig_dir = os.path.join("potential_contigs", contig_id) mitogenome_location = os.path.join(contig_dir, mitogenome_gb) contig_shifts[contig_id] = [frameshifts, mitogenome_location] #align final mitogenome rotated contigs print("\n" + "7-) Now the final rotated contigs will be aligned" + "\n") # list all final rotated contigs contigs_files = [] for curr_file in os.listdir('.'): if curr_file.endswith('mitogenome.rotated.fa'): contigs_files.append(curr_file) # first concatenate all rotated contigs into a single multifasta file print("List of contigs that will be aligned: " + str(contigs_files) + "\n") concat_fasta = alignContigs.concatenate_contigs(contigs_files) # then run MAFFT alignment between the rotated contigs using the multifasta as input and clustal as output format alignContigs.mafft_align(multifasta_file=concat_fasta, threads=args.t, clustal_format=True) print("\n" + "8-) Now we will choose the most representative contig" + "\n") repr_contig_id, repr_contig_cluster = getReprContig.get_repr_contig( "all_mitogenomes.rotated.fa", args.t) print( "Representative contig is {} that belongs to {}. This contig will be our final mitogenome. See all contigs and clusters in cdhit.out.clstr" .format(repr_contig_id, repr_contig_cluster)) repr_contig_fasta = repr_contig_id + ".mitogenome.rotated.fa" repr_contig_get_gb = [ "mitofinder", "--max-contig-size", str(max_contig_size), "-j", "final_mitogenome.annotation", "-a", repr_contig_fasta, "-r", args.g, "-o", args.o, "-p", str(args.p) ] subprocess.run(repr_contig_get_gb, stderr=subprocess.STDOUT) final_fasta = os.path.join( "final_mitogenome.annotation", "final_mitogenome.annotation_MitoFinder_mitfi_Final_Results", "final_mitogenome.annotation_mtDNA_contig.fasta") final_gbk = os.path.join( "final_mitogenome.annotation", "final_mitogenome.annotation_MitoFinder_mitfi_Final_Results", "final_mitogenome.annotation_mtDNA_contig.gb") # Generate contigs stats ## Print first two lines (comment and header) frameshifts = findFrameShits.find_frameshifts(final_gbk) if not frameshifts: all_frameshifts = "No frameshift found" elif len(frameshifts) == 1: all_frameshifts = "".join(frameshifts) elif len(frameshifts) > 1: all_frameshifts = ";".join(frameshifts) with open("contigs_stats.tsv", "w") as f: f.write("\t".join(["contig_id", "frameshifts_found", "genbank_file\n"])) f.write("\t".join( ["final_mitogenome", all_frameshifts, "final_mitogenome.gb\n"])) ## Iterate over each contig and print its info (ID, framshifts and genbank file used ## to search for the frameshifts) for contig_id in contig_shifts: # if contig is the representative, skip writing the stats # because we have already written it (final_mitogenome) if contig_id == repr_contig_id: continue frameshifts = contig_shifts[contig_id][0] genbank_path = contig_shifts[contig_id][1] if not frameshifts: all_frameshifts = "No frameshift found" elif len(frameshifts) == 1: all_frameshifts = "".join(frameshifts) elif len(frameshifts) > 1: all_frameshifts = ";".join(frameshifts) with open("contigs_stats.tsv", "a+") as f: f.write("\t".join( [contig_id, all_frameshifts, genbank_path + "\n"])) # copying final FASTA and GBK to working directory shutil.copy(final_fasta, "final_mitogenome.fasta") shutil.copy(final_gbk, "final_mitogenome.gb") # cleaning up working directory cleanUpCWD.clean_up_work_dir(contigs_ids) print("Pipeline finished!")
def tamper(model): test_acc = [] wrd_acr = [] y_pred = [] X_train, y_train = ri.read_train_struct() X_test, y_test = ri.read_test_struct() for num in x: print(num) X_trans = copy.deepcopy(X_train) for i in range(num): line = next(f) line = line.split(' ') offset = [] c = X_trans[int(line[1]), :] example = np.array(c).reshape(16, 8) if (line[0] == 'r'): alpha = float(line[2]) x_result = r.rotate(example, alpha) x_result = x_result.reshape(128, ) elif (line[0] == 't'): offset.append(int(line[2])) offset.append(int(line[3])) x_result = t.translate(example, offset) x_result = x_result.reshape(128, ) x_max_old = 0 x_min_old = 255 x_min_new = 0 x_max_new = 1 new_value = restore_range(x_result, x_max_old, x_min_old, x_min_new, x_max_new) X_trans[int(line[1])] = new_value #training if (num == 0): if (model == 'svm'): clf = svm.train(X_train, y_train, 1000 / len(y_train)) else: x_y = X_train, y_train optimize.get_params(x_y) a = np.loadtxt("best_Weights_tampered", usecols=(0, )) W = np.array(a[:26 * 128].reshape(26, 128)) T = np.array(a[26 * 128:26 * 128 + 26 * 26].reshape(26, 26)) #training with more than 1 transformation else: if (model == 'svm'): clf = svm.train(X_trans, y_train, 1000 / len(y_train)) else: x_y = X_trans, y_train print(type(x_y)) optimize.get_params(x_y) a = np.loadtxt("best_Weights_tampered", usecols=(0, )) W = np.array(a[:26 * 128].reshape(26, 128)) T = np.array(a[26 * 128:26 * 128 + 26 * 26].reshape(26, 26)) #testing if (model == 'svm'): y_pred, score = svm.test(clf, X_test, y_test) else: y_pred = decode.max_sum(X_test, W, T) y_pred = [y + 1 for y in y_pred] y_test = y_test.reshape(26198, ) y_pred = np.array(y_pred).reshape(len(y_pred, )) print((y_test)) print((y_pred)) score = max_sum_decode.get_test_accuracy(y_test, y_pred) test_acc.append(score * 100) y_test = y_test.reshape(len(y_test, )) given_words, pred_words = svm.form_words(y_test, y_pred) w_acc = svm.word_accuracy(given_words, pred_words) wrd_acr.append(w_acc * 100) return test_acc, wrd_acr
def pose_cb(self, msg): # TODO: Implement self.current_pose = msg closest_dist = float("inf") self.closest_wpt = self.base_wpts.waypoints[0] d1 = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2) for i in range(len(self.base_wpts.waypoints)): wpt = self.base_wpts.waypoints[i] dist = d1(msg.pose.position, wpt.pose.pose.position) if dist < closest_dist: closest_dist = dist self.closest_wpt_id = i self.closest_wpt = wpt # know whether the closest one is on the front pos_to_vector = lambda p: np.array([p.x, p.y, p.z]) ori_to_vector = lambda p: np.array([p.x, p.y, p.z, p.w]) # note that the ros represent the orientation in x,y,z,w but the utils in w,x,y,z cpose = pos_to_vector(self.current_pose.pose.position) cori = ori_to_vector(self.current_pose.pose.orientation) ppose = pos_to_vector(self.closest_wpt.pose.pose.position) ppose_car = rotate(ppose-cpose, cori) if ppose_car[0] <= 0.5: # if the closest waypoint is not on the front of the car self.closest_wpt_id += 1 self.closest_wpt_id %= self.num_base_wpts new_wpts = Lane() #raw_new_wpts = [] end_wpt_id = (self.closest_wpt_id+LOOKAHEAD_WPS)%self.num_base_wpts if end_wpt_id < self.closest_wpt_id: #raw_new_wpts = self.base_wpts.waypoints[self.closest_wpt_id:] + self.base_wpts.waypoints[:end_wpt_id] new_wpts.waypoints = self.base_wpts.waypoints[self.closest_wpt_id:] + self.base_wpts.waypoints[:end_wpt_id] else: #raw_new_wpts = self.base_wpts.waypoints[self.closest_wpt_id:end_wpt_id] new_wpts.waypoints = self.base_wpts.waypoints[self.closest_wpt_id:end_wpt_id] # copy the wpts #for wpt in raw_new_wpts: # new_wpt = copy.deepcopy(wpt) # new_wpts.waypoints.append(new_wpt) # debug #new_wpts.waypoints = raw_new_wpts rospy.loginfo("traffic: %d"%self.traffic_wpt_id) if self.traffic_wpt_id != -1: # have traffic light next_decay_wpt_id = self.traffic_wpt_id - self.closest_wpt_id num_decay_wpts = 0 decay_ongoing = True # stop in next 5 waypoints for i in range(5): stop_wpt_id = next_decay_wpt_id + i if stop_wpt_id < len(new_wpts.waypoints): self.set_waypoint_velocity(new_wpts.waypoints, stop_wpt_id, 0) # and decay in previous waypoints while decay_ongoing and next_decay_wpt_id >= 0: new_velocity = num_decay_wpts * TL_DECAY_RATE if self.get_waypoint_velocity(new_wpts.waypoints[next_decay_wpt_id]) < new_velocity: decay_ongoing = False # decay finished else: self.set_waypoint_velocity(new_wpts.waypoints, next_decay_wpt_id, new_velocity) next_decay_wpt_id -= 1 num_decay_wpts += 1 self.final_waypoints_pub.publish(new_wpts) rospy.loginfo("waypoints published " + str(self.closest_wpt_id) + " " + str(end_wpt_id))
def _step(self): """Simulate one step. This function simulates one 'step' in the simulation of an electron's path. It calls a method from the Scatterer, called Scatter() which determines the kind of scattering event (elastic or inelastic), the distance the electron travels before the scattering event, the new direction of the electron relative to its old direction, in polar coord (radians). """ """First get the distance electron travels before being scattered, as well as the kind of scattering event and the angle of scattering.""" kind, d, theta, phi = self.scatterer.Scatter() """The new time is the distance travelled, divided by the length of the velocity vector.""" velocity = self.e.vector[3:6] position = self.e.vector[0:3].copy() time = d / np.linalg.norm(velocity) old_position = position.copy() """Determine the new position.""" new_position = (position.copy() + (velocity * time)) if self.scatteringMedium.inside(new_position): """Check if next potential scattering event is inside or outside scattering medium. If it is still inside the scattering medium, then the electron is scattered once again.""" self.e.vector[0:3] = new_position """Determine the new velocity after scattering. The velocity vector needs to be transformed into a different coordinate system to perform the rotation.""" new_velocity = rotate(velocity, theta, phi) self.e.vector[3:6] = new_velocity """Check the kind of scattering (elastic or inelastic) and increment the count accordingly.""" if kind == 'elastic': """Icrement the elastic scattering count.""" self.e.vector[8] += 1 else: """Increment inelastic scattering count.""" self.e.vector[7] += 1 delta_E = self.scatterer.getDeltaKE() self.e.updateKineticEnergy(delta_E) if self.e.kinetic_energy > 0: self.scatterer.setXSect(self.e.kinetic_energy) else: self.e.kinetic_energy = 0 self.e.vector[3:6] = np.array([0, 0, 0]) """Determine the new time.""" self.e.vector[6] += time self.e.pathlength += d else: """This condition is run, if the electron's new position is not inside the scattering medium. Then the scattering event is rejected and the electron is projected onto the Sphere.""" """To determine path length within scattering medium first find the interesection with the surface of the scatterer.""" scat_inter = self.scatteringMedium.getIntersection( old_position.copy(), velocity) """Get the distance from intersection to previous position.""" d = np.linalg.norm(old_position - scat_inter) """Append this distance to the pathlengths.""" self.e.pathlength += d """Then find coordinates where electron will intersect the boundary.""" intersect = self.boundary.getIntersection(position, velocity) """Set the electron's current position coordinates to the intersection coordinates.""" self.e.vector[0:3] = intersect """Get new time.""" distance = np.linalg.norm(intersect - position) new_time = distance / np.linalg.norm(velocity) self.e.vector[6] = new_time """Collect all intersected electrons into a list.""" self.intersections += [list(self.e.vector)] self.intersected = True
def process_contig_02(ref_tRNA, threads_per_contig, circular_size, circular_offset, contigs, max_contig_size, rel_gbk, gen_code, contig_id): """Rotate a contig related to a reference tRNA gene and calculate contig statistics. Args: ref_tRNA (str): tRNA gene to be used as reference for rotation (contig starts at reference tRNA) threads_per_contig (int): number of threads to be used circular_size (int): size to consider when checking for circularization circular_offset (int): offset from start and finish to consider when looking for circularization contigs (str): filename of contigs file (containing all contigs) max_contig_size (int): maximum contig size allowed rel_gbk (str): filename of related mito genbank file gen_code (str): species genetic code contig_id (str): target contig ID Returns: None """ logging.info(f"Started {contig_id} rotation.") if not os.path.isfile(f"{contig_id}.trnas"): warnings.warn(f"Contig {contig_id} does not have annotated tRNAs, skipping it...") return with open(f"{contig_id}.trnas", "r") as infile: for line in infile: if line.strip().split("\t")[0] == ref_tRNA: start = int(line.strip().split("\t")[1]) strand = int(line.strip().split("\t")[2]) if start == None: warnings.warn(f"Reference gene {ref_tRNA} is not present in contig {contig_id}. Skipping contig...") return mitogenome_gb = os.path.join(contig_id + ".annotation", contig_id + ".annotation_MitoFinder_mitfi_Final_Results", contig_id + ".annotation_mtDNA_contig.gb") genome = contig_id + ".mitogenome.fa" if strand == -1: logging.info(f"{ref_tRNA} is at reverse complement of {contig_id}.mitogenome.fa") logging.info(f"For that reason we'll reverse complement {contig_id}.mitogenome.fa before the rotation") genome_rc = contig_id + "_RC.mitogenome.fa" rc = os.path.join(os.path.dirname(genome), genome_rc) rotation.make_rc(genome, rc) logging.info(f"Reverse complement generated: {contig_id}_RC.mitogenome.fa. Starting reverse complement annotation...") mitogenome_gb = rotation.annotate(os.path.dirname(genome), os.path.abspath(genome_rc), os.path.abspath(rel_gbk), contig_id, gen_code, max_contig_size, str(threads_per_contig)) logging.info(f"Annotation of reverse complement for contig {contig_id} done") genome = rc if not os.path.isfile(mitogenome_gb): warnings.warn("Contig "+ contig_id + " does not have a reverse complemented annotation file, check MitoFinder's log") return rotation.rotate(genome, start, contig_id) try: f = open(mitogenome_gb) except FileNotFoundError: sys.exit(f"""Annotation file {mitogenome_gb} not found. An error may have occurred when annotating contig {contig_id}. Check MitoFinder""") finally: f.close() rotated_file = os.path.join(os.path.dirname(genome), contig_id + '.mitogenome.rotated.fa') logging.info(f"Rotation of {contig_id} done. Rotated is at {rotated_file}") # check frameshifts in genes from contig and save findings to # `{contig_id}.individual.stats` intermediate file frameshifts = findFrameShifts.find_frameshifts(mitogenome_gb) gb_len, num_genes = findFrameShifts.get_gb_stats(mitogenome_gb) contig_dir = os.path.join("potential_contigs", contig_id) mitogenome_location = os.path.join(contig_dir, mitogenome_gb) is_circ = get_circularization_info(contig_id) if not frameshifts: all_frameshifts = "No frameshift found" elif len(frameshifts)==1: all_frameshifts = "".join(frameshifts) elif len(frameshifts)>1: all_frameshifts = ";".join(frameshifts) with open(f"{contig_id}.individual.stats", "w") as outfile: outfile.write("\t".join([contig_id, all_frameshifts, mitogenome_location, gb_len, num_genes, str(is_circ)+"\n"]))
def extract_y_scale(input_file,coordinate,working_dir) : print(working_dir) img = cv2.imread(input_file,cv2.IMREAD_GRAYSCALE) rows,cols = img.shape img = img[0:rows,0:coordinate] temp_file = 'temp.png' temp_file = os.path.join(working_dir,temp_file) temp1_file = 'temp1.png' temp1_file = os.path.join(working_dir,temp1_file) cv2.imwrite(temp_file,img) cv2.imwrite(temp1_file,img) output_file = "test3.png" output_file = os.path.join(working_dir,output_file) rotation.rotate(temp_file,output_file,1) img = cv2.imread(output_file,cv2.IMREAD_GRAYSCALE) cv2.imwrite(temp1_file,img) rows,cols = img.shape out_hocr_file = os.path.join(working_dir,"out1") os.system("tesseract "+output_file+" "+out_hocr_file+" hocr") #os.rename('out.hocr','out.xml') output_array = [] y_pos=[] f = open(out_hocr_file+".hocr",'r') data=f.read() soup = BeautifulSoup(data) ''' i = soup.find('span',{'class':'ocr_line'}) h= i.get('title').split(' ') k= h[1:5] k[-1]=k[-1][:-1] print k ''' mini = 1000000000 for i in soup.find_all('span',{'class':'ocr_line'}) : h= i.get('title').split(' ') k= h[1:5] k[-1]=k[-1][:-1] print k if int(k[3])<mini : mini = int(k[3]) #cv2.rectangle(img,(int(k[0]),int(k[1])),(int(k[2]),int(k[3])),(0,255,0),3) #print min #cv2.rectangle(img,(294,60),(394,75),(0,255,0),3) #cv2.imwrite(temp1_file,img) #cv2.rectangle(img,(int(k[0]),int(k[1])),(int(k[2]),int(k[3])),(0,255,0),3) #cv2.imwrite(output_file,img) img = img[mini:rows,0:cols] cv2.imwrite(temp_file,img) rotation.rotate(temp_file,output_file,0) out_hocr_file_2 = os.path.join(working_dir,"out") os.system("tesseract "+output_file+" "+out_hocr_file_2+" hocr") f = open(out_hocr_file_2+".hocr",'r') data=f.read() a = [] index = [] soup = BeautifulSoup(data) count = 0 for i in soup.find_all('span',{'class':'ocrx_word'}) : print 'ndns' print i.text h= i.get('title').split(' ') k= h[1:5] k[-1]=k[-1][:-1] x = '' for char in i.text : if char == 'O' or char =='o': x+='0' else : x+=char print x count = count+1 try : y = float(x) output_array.append(float(x)) y_pos.append((int(k[1])+int(k[3]))/2) index.append(count) except : continue y1 = output_array[0] y2 = output_array[1] pos1 = y_pos[0] pos2 = y_pos[1] scale = abs(y1-y2)/abs(index[0]-index[1]) return y1,y2,pos1,pos2,scale '''
img = cv2.imread('ocr4.png') #Convert to gray img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #Histogram equalization - psuje.. #img = cv2.equalizeHist(img) #create a CLAHE object - Normalizacja histogramu CLAHE, praktycznie nic nie zmienia clahe = cv2.createCLAHE(clipLimit=1.0, tileGridSize=(1, 1)) img = clahe.apply(img) #Apply threshold to get image with only black and white - binarization #blur = cv2.GaussianBlur(img,(5,5),0) #Gaussian Filtering #ret3,img = cv2.threshold(blur,0,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU) th, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU) #Apply dilatation and erosion to remove some noise kernel = np.ones((1, 1), np.uint8) img = cv2.erode(img, kernel, iterations=1) #img = cv2.dilate(img, kernel, iterations=2) #ewentualna dodatkowa filtracja img = cv2.GaussianBlur(img, (1, 1), 0) rotated = rotate(img) #show results cv2.imshow('rotated', rotated) cv2.waitKey()
def extract_y_scale(input_file,coordinate,working_dir) : print(working_dir) img = cv2.imread(input_file,cv2.IMREAD_GRAYSCALE) rows,cols = img.shape img = img[0:rows,0:coordinate] temp_file = 'temp.png' temp_file = os.path.join(working_dir,temp_file) temp1_file = 'temp1.png' temp1_file = os.path.join(working_dir,temp1_file) cv2.imwrite(temp_file,img) cv2.imwrite(temp1_file,img) output_file = "test3.png" output_file = os.path.join(working_dir,output_file) testing_file = "rectangle_y.png" testing_file = os.path.join(working_dir,testing_file) rotation.rotate(temp_file,output_file,1) img = cv2.imread(output_file,cv2.IMREAD_GRAYSCALE) cv2.imwrite(temp1_file,img) rows,cols = img.shape out_hocr_file = os.path.join(working_dir,"out1") os.system("tesseract "+output_file+" "+out_hocr_file+" hocr") #os.rename('out.hocr','out.xml') output_array = [] y_pos=[] f = open(out_hocr_file+".hocr",'r') data=f.read() soup = BeautifulSoup(data) ''' i = soup.find('span',{'class':'ocr_line'}) h= i.get('title').split(' ') k= h[1:5] k[-1]=k[-1][:-1] print k ''' mini = 1000000000 for i in soup.find_all('span',{'class':'ocr_line'}) : h= i.get('title').split(' ') k= h[1:5] k[-1]=k[-1][:-1] print k if int(k[3])<mini : mini = int(k[3]) #cv2.rectangle(img,(int(k[0]),int(k[1])),(int(k[2]),int(k[3])),(0,255,0),3) #print min #cv2.rectangle(img,(294,60),(394,75),(0,255,0),3) #cv2.imwrite(temp1_file,img) #cv2.rectangle(img,(int(k[0]),int(k[1])),(int(k[2]),int(k[3])),(0,255,0),3) #cv2.imwrite(output_file,img) img = img[mini:rows,0:cols] cv2.imwrite(temp_file,img) rotation.rotate(temp_file,output_file,0) out_hocr_file_2 = os.path.join(working_dir,"out") img1 = cv2.imread(output_file) os.system("tesseract "+output_file+" "+out_hocr_file_2+" hocr") f = open(out_hocr_file_2+".hocr",'r') data=f.read() a = [] index = [] soup = BeautifulSoup(data) count = 0 for i in soup.find_all('span',{'class':'ocrx_word'}) : print 'ndns' print i.text h= i.get('title').split(' ') k= h[1:5] k[-1]=k[-1][:-1] cv2.rectangle(img1,(int(k[0])-1,int(k[1])+1),(int(k[2])+1,int(k[3])-1),(0,255,0),3) x = '' for char in i.text : if char == 'O' or char =='o': x+='0' else : x+=char print x count = count+1 try : y = float(x) output_array.append(float(x)) y_pos.append((int(k[1])+int(k[3]))/2) index.append(count) except : continue cv2.imwrite(testing_file,img1) y1 = output_array[0] y2 = output_array[1] pos1 = y_pos[0] pos2 = y_pos[1] scale = abs(y1-y2)/abs(index[0]-index[1]) return y1,y2,pos1,pos2,scale '''
def grow_leaf(renderer, start_pos, growth_direction_vector, parent_node, branches, subdivision_factor, color): leaf_development_factor = parent_node.root_value #print(leaf_development_factor) #growth direction vector (magnitude, rotation) if growth_direction_vector[0] < growth_rate: #== 0: return if leaf_development_factor >= 1: #print('saturating leaf development') leaf_development_factor = 1 wind_step = 0.0001 if math.fabs(grow_leaf.wind_target - grow_leaf.toward_wind) < wind_step: grow_leaf.wind_target = (random.random() * math.pi / 8) - math.pi / 16 #print(grow_leaf.wind_target) if grow_leaf.toward_wind < grow_leaf.wind_target: grow_leaf.toward_wind += wind_step elif grow_leaf.toward_wind > grow_leaf.wind_target: grow_leaf.toward_wind -= wind_step #print(grow_leaf.toward_wind) rotated_point = translation.translate( start_pos, rotation.rotate((growth_direction_vector[0], 0), growth_direction_vector[1] + grow_leaf.toward_wind)) center_line = (int(start_pos[0]), int(start_pos[1]), int(rotated_point[0]), int(rotated_point[1])) #center_line = (int(start_pos[0]), int(start_pos[1]), int(start_pos[0]), int(start_pos[1]-height)) #renderer.draw_line(center_line, 0xFFFF0000) interpolator = interpolate.linear_interpolate number_subdivisions = int(1 / subdivision_factor) #for i in range(1, number_subdivisions): # if (i*subdivision_factor - leaf_development_factor) < growth_rate: # developed_line = interpolator(center_line, i*subdivision_factor) # branch((developed_line[2], developed_line[3]), leaf_development_factor, subdivision_factor, branches, growth_direction_vector, renderer, parent_node, color- 0x102000) developed_line = interpolator(center_line, leaf_development_factor) renderer.draw_line(developed_line, color) #draw_color = color #use leaf envelope to decide when to branch and modulate interpolation based on leaf_development_factor #subdivision_length = (center_line[3] - center_line[1]) * subdivision_factor #current_length = developed_line[3] - developed_line[1] #if current_length >= subdivision_length: #if color > 0x102000: # color = color - 0x102000 #else: # color = 0xFFFF0000 #create branches branch((developed_line[2], developed_line[3]), leaf_development_factor, subdivision_factor, branches, growth_direction_vector, renderer, parent_node, color - 0x102000)
# rotate query img in rotate directory for augmentation for folder_name in folder_list: if folder_name != 'rotate': continue for base_path_rot, folder_list_rot, file_list_rot in os.walk( 'query/' + direct_lower_str[direct] + '/rotate'): for file_name_rot in file_list_rot: filename_rot = os.path.join(base_path_rot, file_name_rot) if filename_rot[-4:] != '.png' and filename_rot[ -4:] != '.jpg': continue imgname[direct].append(filename_rot) img[direct].append( cv2.imread(filename_rot, cv2.IMREAD_GRAYSCALE)) img_hog_temp = cv2.imread(filename_rot) img_hog_aug = rotation.rotate(img_hog_temp) img_hog[direct].append(img_hog_aug) break # only traverse top level # get HOG descriptor if HOG_APP == 1: if REBUILD_HOG == 1: fd = hog.hog_des(img_hog) else: fd = hog.load_fd() else: fd = None # Initiate SIFT detector orb = cv2.ORB_create()
def correction(image_path): rotate(image_path) skew_correction(image_path) return 0
assert (Euler == False), 'Only use quarternions' while (not terminateConnection and samples < NUMBEROFSAMPLES): curTime = time.time() try: data = communicateToServer(server_address, sock) if (Euler): orientation = (data[0], data[1], data[2]) acc = (data[3], data[4], data[5]) elif (Quaternion): orientation = (data[0], data[1], data[2], data[3]) acc = (data[4], data[5], data[6]) if (DisplayGraphMode): if (Euler): rotation.rotationMatrix = rotation.rotate( orientation[0], orientation[1], orientation[2]) elif (Quaternion): rotation.rotationMatrix = rotation.rotateQuaternion( orientation[0], orientation[1], orientation[2], orientation[3]) dt = curTime - nextTick if (trackMovement and curTime > nextTick): nextTick = time.time() + 1 / samplerate print('acceleration: x = %0.2f, y = %0.2f, z = %0.2f' % (acc[0], acc[1], acc[2])) LinearTracking1.integrate(acc[0], acc[1], acc[2], function='Riemann') rotation.accx = acc[0] rotation.accy = acc[1]