def main(): #screenshot.snapscreen() #截图,保存到Steam目录 #print_time('Screenshot') raw_location = screenshot.get_raw_location() #原图片的位置 #print(raw_location) region.region(raw_location) #处理截图,保存到cache文件夹 #print_time('Region') img_location = region.get_img_location() #bar图的位置 #print(img_location) items = ocr(img_location) #物品名称列表 #print_time('OCR') info = output.get_info(items) #获得价格列表 #print_time('Get_info') out_GUI.popwindow(info) #输出到窗口中
def simulation(): data_path = "/Users/test/Uni/Masterarbeit/thesis_simulations/data" burnup = '0.5MWd' #burnup = '2MWd' spent_fuel_fname = "SERPENT_outputs_NatU_percentages.npy" spent_fuel_fname = os.path.join(data_path, spent_fuel_fname) if burnup == '0.5MWd': spent_rep_fuel_fname = "SERPENT_outputs_RepU_05MWd_percentages.npy" elif burnup == '2MWd': spent_rep_fuel_fname = "SERPENT_outputs_RepU_2MWd_percentages.npy" spent_rep_fuel_fname = os.path.join(data_path, spent_rep_fuel_fname) arch = archetypes.archetypes() commod = commodity.commodity() ctrl = control.control() fac = facility.facility(burnup) inst = institution.institution(burnup) # not needed but it should be # called during the consistency # check recipes = recipe.recipe(spent_fuel=spent_fuel_fname, spent_rep_fuel=spent_rep_fuel_fname, burnup=burnup) reg = region.region(burnup) return {"simulation": {**arch, **commod, **ctrl, **fac, **recipes, **reg}}
def setup(image_type): name = 'salt-%s' % image_type name = identify(name) print(name) create_policy(name, 'image_policy.json') queue = sqs.create_queue(name) mapping = {'region': region(), 'queue_url': queue.url, 'image_type': image_type} data = user_data('image_data', mapping) return name, data, queue
def simulation(): arch = archetypes.archetypes() commod = commodity.commodity() ctrl = control.control() fac = facility.facility() recipes = recipe.recipe() reg = region.region() return {"simulation": {**arch, **commod, **ctrl, **fac, **recipes, **reg}}
def get_region(self): """************************************************************************************************************************************************************ Task: returns a list of regions with all intervals Output: regions: list of regions ************************************************************************************************************************************************************""" fd=file(self.filename) regions=[] for line in fd: aline=line.split('\t') #new region r=region.region(aline[0],aline[1],aline[2],aline[3]) regions.append(r) return regions
def parser(self): ua = dict(DesiredCapabilities.CHROME) options = webdriver.ChromeOptions() options.add_argument('headless') options.add_argument('window-size=1920x935') driver = webdriver.Chrome("C:\\Users\\fromt\\Desktop\\covid_bot\\chromedriver.exe", chrome_options=options) driver.get("https://coronavirus-monitor.ru/coronavirus-v-rossii/") driver.implicitly_wait(30) show_more = driver.find_element_by_class_name("js-table-show-more").click() driver.implicitly_wait(30) regions = driver.find_elements_by_class_name("statistics-row") del regions[-1] x = 1 while (x == 1): rus = russia() rus.region.clear() for g in regions: s = g.text.split('\n') if len(s) > 1: reg = region() reg.Name = s[1] if not ("+" in s[3]): s.insert(3, "") s.insert(4, "") if not ("+" in s[6]): s.insert(6, "") s.insert(7, "") if not ("+" in s[9]): s.insert(9, "") s.insert(10, "") reg.Active = s[2] reg.Active_today = s[3] reg.Deaths = s[5] reg.Deaths_today = s[6] reg.Recovered = s[8] reg.Recovered_today = s[9] rus.region.append(reg) if len(rus.region) == 85: x = 0 with open('entry.pickle', 'wb') as f: pickle.dump(rus.region, f) driver.close() print(len(rus.region))
def fillImgRegions(self): regionID = 1 self.edges = cv2.Canny(self.grayImage,40,120) self.mask = cv2.copyMakeBorder(self.edges, 1,1,1,1, cv2.BORDER_CONSTANT, value = 255) floodFlags = cv2.FLOODFILL_MASK_ONLY | 4 | 1 << 8 for i in range(0, 240, 1): for j in range(0, 320, 1): #We found a new region: if self.imgRegions[i][j] == -1: #Optimize this, it's the part that makes it stupid slow if self.edges[i][j] == 0: _, _, newMask, rect = cv2.floodFill(self.grayImage, self.mask, (j,i), 1, loDiff = 10, upDiff = 10, flags = floodFlags) newRegion = region(regionID, rect) for k in range (rect[0], rect[0] + rect[2], 1): for l in range(rect[1], rect[1] + rect[3], 1): if newMask[l+1][k+1] == 1 and self.imgRegions[l][k] == -1: self.imgRegions[l][k] = regionID self.listRegions.append(copy.deepcopy(newRegion)) regionID += 1 #self.mask = cv2.copyMakeBorder(self.edges, 1,1,1,1, cv2.BORDER_CONSTANT, value = 255) for i in range(1,239,1): for j in range(1,319,1): if self.imgRegions[i][j] == -1: for k in range(-1,2,1): for l in range(-1,2,1): if self.imgRegions[i+k][j+l] != -1 and self.imgRegions[i][j] == -1: self.imgRegions[i][j] = self.imgRegions[i+k][j+l]
import sys import time import boto from boto.ec2.autoscale import LaunchConfiguration from boto.ec2.autoscale import AutoScalingGroup from data import user_data, env from identity import identify from region import region from access import create_policy from control import while_not_try autoscale = boto.ec2.autoscale.connect_to_region(region()) ec2 = boto.ec2.connect_to_region(region()) route53 = boto.connect_route53() ami_timestamp = sys.argv[2] master_ami = 'salt-master-%s' % ami_timestamp minion_ami = 'salt-minion-%s' % ami_timestamp images = ec2.get_all_images(owners=['self']) master_ami = [image.id for image in images if image.name == master_ami][0] minion_ami = [image.id for image in images if image.name == minion_ami][0] master = identify('salt-master') minion = identify('salt-minion') master_security = ec2.create_security_group(master, master) minion_security = ec2.create_security_group(minion, minion) master_security.authorize(src_group=minion_security) zone = route53.get_zones()[0] mapping = {'zone_id': zone.id, 'dns': env('salt_master_dns')}
import time from boto import sqs from multiprocessing import Pool from identity import identify from data import user_data from access import create_policy, delete_policy from instance import start_instance, wait_instance, stop_instance from region import region sqs = sqs.connect_to_region(region()) def build_image(image_type): name, data, queue = setup(image_type) instance = start_instance(name, data) wait_instance(queue) stop_instance(instance) salt_image = instance.create_image(name) print(salt_image) cleanup(name, instance, queue) def setup(image_type): name = 'salt-%s' % image_type name = identify(name) print(name) create_policy(name, 'image_policy.json') queue = sqs.create_queue(name) mapping = {'region': region(), 'queue_url': queue.url, 'image_type': image_type} data = user_data('image_data', mapping) return name, data, queue def cleanup(name, instance, queue):
def update_region(): cn_db = db_reg.region(con_db) data = request.json reg1 = reg.region(data['region_id'], data['region_description']) rs = cn_db.update(reg1) return jsonify({'message': rs}), 200
def one_region(region_id): e = reg.region(region_id=region_id) rs = db_reg.region(con_db).get_by_id(e) if rs[1] != 200: return jsonify({'message': rs[0]}), rs[1] return jsonify({'message': rs[0].to_json()}), 200
def insert_region(): cn_db = db_reg.region(con_db) data = request.json reg1 = reg.region(1, data['region_description']) rs = cn_db.insert(reg1) return jsonify({'message': rs}), 200
def processRegionData(regionData,regionPath): # remove comments & empty lines lines = regionData.replace('\t','').splitlines() # remove tabs, split lines lines = [line.partition(cString)[0] for line in lines] # remove comments lines = [line for line in lines if notEmpty(line)] # remove empty lines # separate lines into standalones (should be empty) and present regions. standaloneLines,sections = getStandaloneLinesAndSections(lines) if len(standaloneLines) > 0: print("Warning: region being loaded (%s) has declarative lines outside of any region."%regionPath) ################################################ ## get region data from 'region_data' section ## ################################################ if not ('region_data' in sections.keys()): # If we didn't find a section labeled 'region_data' print("Error: section 'region_data' not found in region (%s)"%regionPath) # Inform the user that the region data file is invalid raise ValueError # Exit out with an error. regionData = getValuePairs(sections['region_data']) requiredRegionDataKeys = [ ['name' ,str], # These are config values necessary to initiate the region object ['ID' ,int], # More to be added later, as region objects grow more complex. ['x_dim',int], ['y_dim',int], ['bg' ,str], ['mg' ,str], ['fg' ,str], ] regionDataComplete,invalidPair = checkDictContents(regionData,requiredRegionDataKeys) if not regionDataComplete: print("Error: region data for region (%s) incomplete."%regionPath) print("Missing or invalid config variable '%s' in section 'region_data' with type '%s'"%(invalidPair[0],invalidPair[1])) raise ValueError regionName = regionData['name'] # Get values from the regionData dictionary regionID = int(regionData['ID']) # These will be used when declaring the region object regionXDim = int(regionData['x_dim']) # regionYDim = int(regionData['y_dim']) # more to be added as regions become more complex regionBG = regionData['bg'] regionMG = regionData['mg'] regionFG = regionData['fg'] ####################### ## extract room data ## ####################### if not ('rooms' in sections.keys()): print("Error: section 'rooms' not found in region (%s)"%regionPath) raise ValueError roomStandaloneLines, roomSections = getStandaloneLinesAndSections(sections['rooms']) if len(roomStandaloneLines) > 0: # warn user if sray lines present in rooms data print("Warning: declarative lines found in the 'rooms' section of region (%s). They will be ignored."%regionPath) # declare array of rooms. By default each is None; if not declared in the region file, any room does not exist. rooms = [ [ None for nY in range(regionYDim) ] for nX in range(regionXDim) ] for location in roomSections.keys(): roomX,delimeter,roomY = location.partition(',') try: # Make sure declarative coordinants are valid integers roomX = int(roomX) # roomY = int(roomY) # except: # print("Error: room found with invalid declarative coordinants in region (%s)."%regionPath) print("Declarative coordinants are '%s' and should be of form 'x,y' (x and y integers)"%location) raise ValueError if not ((0 <= roomX < regionXDim) and (0 <= roomY < regionYDim)): # make sure coordinants print("Error: room found with invalid declarative coordinants in region (%s)."%regionPath) # are within allowed bounds print("Coordinants (%s) outside allowed range (0 <= x < x_dim), (0 <= y < y_dim)"%location) # set by region size raise ValueError rooms[roomX][roomY] = createRoom(roomSections[location],regionPath,roomX,roomY) return region( regionName, regionID , regionXDim, regionYDim, regionBG , regionMG , regionFG , rooms , )
from spatial_pooler import spatial_pooler from region import region import numpy as np if __name__ == "__main__": input_shape = (2, 2, 2) reg_size = 5 reg1 = region(reg_size, input_shape) sp = spatial_pooler() for x in range(500): active_input = np.random.randint(2, size=input_shape) print(active_input) sp.run(reg1, active_input) for x in range(40): active_input = np.random.randint(2, size=input_shape) print(active_input) reg1.pattern.show(active_input)
# Get communicator comm = MPI.COMM_WORLD start_time = t.time() # Region 1 (starts iteration) if comm.Get_rank() == 1: # Define region points = np.array([(0, 2), (1, 2), (1, 1), (1, 0), (0, 0), (0, 1)]) edge_type = np.array(['d', 'd', 'd', 'd', 'd', 'd']) fetch = np.array([None, 3, None, None, 2, None]) edge_init = np.array( [radiator_heat, None, wall_heat, window_heat, None, wall_heat]) # Create region r = region(points, edge_type, fetch, edge_init, dx) print("Process 1 define region:", t.time() - start_time, " s") # Solve system res = r.solve(comm) comm.send(res, dest=0) # Region 2 (bottom left) if comm.Get_rank() == 2: # Define region points = np.array([(0, 1), (1, 1), (1, 0), (0, 0)]) edge_type = np.array(['d', 'n', 'd', 'd']) fetch = np.array([None, 1, None, None]) edge_init = np.array([wall_heat, None, wall_heat, radiator_heat]) # Create region r = region(points, edge_type, fetch, edge_init, dx)
def load_region(self, file_location): reg = region() reg.load_region(file_location) self.regions = reg
from boto import ec2 import time from control import while_not_try from region import region, ami ec2 = ec2.connect_to_region(region()) aws_image = ami() size = 't1.micro' def start_instance(name, data): print('reserving instance for %s' % name) print(data) reserve_instance = lambda: ec2.run_instances(aws_image, key_name='sandy', user_data=data, instance_type=size, instance_profile_name=name) reservation = while_not_try(reserve_instance) instance = reservation.instances[0] print(instance) return instance def wait_instance(queue): print('waiting for message from %s' % queue.name) get_message = lambda: queue.get_messages(wait_time_seconds=10)[0] while_not_try(get_message) def stop_instance(instance): print('stopping instance for %s' % instance.id) instance.stop() status = instance.update() while status != 'stopped': print(status) status = instance.update()
def getOffTarget(self,offset,coverageThreshold,target,outfile,tmpdir=None): """************************************************************************************************************************************************************ Task: selects off-tareget(+offset) regions with a coverage > coverageThreshold Inputs: offset: integer indicating the number of bases to extend the target. coverageThreshold: integer indicating the coverage threshold to select the region target: ROIs bed file Ouputs: a new bedgraph file will be created containing selected regions. ************************************************************************************************************************************************************""" pid = str(os.getpid()) tmpbed = tmpdir+'/'+pid+'.extended.bed' bed=bed_file.bed_file(target) extendedBed=bed.extendnoref(offset,tmpbed) sortedBed=extendedBed.my_sort_bed() nonOverlappingBed=sortedBed.non_overlapping_exons(-1) # Base 0, it is a standard BED finalBed=nonOverlappingBed.my_sort_bed() # BED file in base 0 finalBed.load_custom(-1) # Load chromosome and positions in base 0 bed_region=finalBed.get_region() bed_index=0 #index to control bed_region position fd=file(self.filename) header=fd.readline() reading=True #boolean to control while loop chr_found=False batch_n=1 fdw=file(outfile,'w') while reading: batch,fd=self.get_batch(fd, 10000000) # print batch_n batch_n=batch_n+1 if batch==[]: reading=False else: for line in batch: aline=line.replace('\n','').split(' ') #new region r=region.region(aline[0],aline[1],aline[2],aline[3]) search_open=True while search_open: type_overlap=r.overlap_type(bed_region[bed_index]) if type_overlap==0: #bed region comes before bedgraph region search_open=True if bed_index+1<len(bed_region) and (chr_found==False or (chr_found==True and r.chrom==bed_region[bed_index].chrom)): bed_index=bed_index+1 elif r.value>=coverageThreshold: search_open=False for region_selected in r-bed_region[bed_index]: fdw.write(str(region_selected)) else: search_open=False elif type_overlap==-1: #bed region comes after bedgraph region search_open=False chr_found=True if r.value>=coverageThreshold: for region_selected in r-bed_region[bed_index]: fdw.write(str(region_selected)) else: search_open=False chr_found=True if r.value>=coverageThreshold: for region_selected in r-bed_region[bed_index]: fdw.write(str(region_selected)) fd.close()
def extractMetrics(network): limit = 0.6 mu_lim = 0.05 diffCells = 0 nodes = network.nodes() subNet = nx.Graph() node_ref = dict() nP = len(nodes) center = GetCenter(nodes) data = np.zeros([nP, 2]) for i in range(nP): nb = nodes[i] if nb.state == "D": diffCells = diffCells + 1. ind = int(nb.ID) node_ref[ind] = i reg = region(i) subNet.add_node(reg) regs = subNet.nodes() for i in range(nP): node = nodes[i] reg = regs[i] reg.savePos(node.location[0], node.location[1]) if node.state == "U": val = 0 else: val = 1 nbs = network.neighbors(node) for j in range(len(nbs)): nb = nbs[j] ind = int(nb.ID) subNet.add_edge(reg, regs[node_ref[ind]]) if nb.state == "D": val = val + 1. num = len(nbs) + 1 reg.changeValue(val / num, num) data[i, 0] = num data[i, 1] = val cell_bodies = nx.connected_components(subNet) if len(cell_bodies) > 1: for i in range(1, len(cell_bodies)): clust = cell_bodies[i] for j in range(len(clust)): nb = clust[j] ind = int(nb.ID) subNet.remove_node(regs[node_ref[ind]]) mu = np.average(data[:, 1] / data[:, 0], weights=data[:, 0]) edges = subNet.edges() high_nodes = [] low_nodes = [] for node in subNet.nodes(): if node.diffPer > limit or (mu > mu_lim and node.diffPer > mu): high_nodes.append(node) node.change_state(1) else: low_nodes.append(node) D = subNet.copy() UD = subNet.copy() nodes = subNet.nodes() d_edges = D.edges() ud_edges = UD.edges() for i in range(len(d_edges)): edge = d_edges[i] edge2 = ud_edges[i] obj1 = edge[0] obj2 = edge[1] obj3 = edge2[0] obj4 = edge2[1] if (obj1.state == 0 or obj2.state == 0): D.remove_edge(obj1, obj2) if (obj3.state == 1 or obj4.state == 1): UD.remove_edge(obj3, obj4) outside_nodes, inside_nodes = closest_nbs(subNet) inout_ratios = np.zeros([2, 1]) for i in range(len(inside_nodes)): node = inside_nodes[i] if node.diffPer > limit or (mu > mu_lim and node.diffPer > mu): inout_ratios[0] = inout_ratios[0] + 1. for i in range(len(outside_nodes)): node = outside_nodes[i] if node.diffPer > limit or (mu > mu_lim and node.diffPer > mu): inout_ratios[1] = inout_ratios[1] + 1. dat_anal = np.zeros(7) dat_anal[0] = inout_ratios[0] / len(inside_nodes) dat_anal[1] = inout_ratios[1] / len(outside_nodes) dat_anal[6] = dat_anal[1] / (0.75 + dat_anal[0]) p = nx.connected_components(D) pud = nx.connected_components(UD) rad_avg = [] for j in range(len(pud)): if pud[j][0].state == 0: r, com = GetRadiusandCenter(pud[j]) #laos get the number of nodes avg, std = getAverageRadialDistance(pud[j], com) rad_avg.append(avg) ud_crd_avg = np.average(rad_avg) ################## if (np.isnan(ud_crd_avg)): ud_crd_avg = 0 rs = [] num_nodes = [] rej_clust = 0 rej_clustUD = 0 clusters = [] clustersUD = [] for i in range(len(p)): group = p[i] if len(group) > 3: clusters.append(group) elif group[0].state == 1: rej_clust = rej_clust + 1 for i in range(len(pud)): group = pud[i] if len(group) > 3: clustersUD.append(group) elif group[0].state == 0: rej_clustUD = rej_clustUD + 1 edges = D.edges() edgesUD = UD.edges() num_nodes = float(len(subNet.nodes())) for i in range(len(clustersUD)): clust_UD = nx.Graph() clust_UD.add_nodes_from(clustersUD[i]) group = clustersUD[i] for j in range(len(group)): for k in range(j, (len(group))): cell1 = group[j] cell2 = group[k] cell_dist = dist(cell1.pos, cell2.pos) dat_anal[5] = max(dat_anal[5], cell_dist) for j in range(len(edgesUD)): edge = edgesUD[j] obj1 = edge[0] obj2 = edge[1] if obj1 in clustersUD[i] and obj2 in clustersUD[i]: clust_UD.add_edge(obj1, obj2) x = nx.average_shortest_path_length(clust_UD) dat_anal[2] = max(x, dat_anal[2]) dat_anal[2] = dat_anal[2] / nx.average_shortest_path_length(subNet) dat_anal[3] = float(rej_clust / (num_nodes)) dat_anal[4] = float(ud_crd_avg / num_nodes) dat_anal[5] = float(dat_anal[5] / num_nodes) return dat_anal
# create the dict of odpair classes odclasses = dict() # {(o,d):class, ..} for orig in list(np.arange(1,numRegions+1,1)): # fill the classes for dest in list(np.arange(1, numRegions+1, 1)): odclasses[(orig, dest)] = odpair(orig, dest, slot, lamMLE[(orig, dest)], window, orderSerOD[(orig, dest)]) odclasses[(orig, dest)].updateObsStarts(prevStartsWin[(orig, dest)]) # add prev. starts in window odclasses[(orig, dest)].updateObsEnds(prevEndsWin[(orig, dest)]) # add prev. ends in window odclasses[(orig, dest)].createFutureStarts() # creates future starts odclasses[(orig, dest)].createFutureEnds() # creates future ends print('... initialized odpair classes ...') # create the regions and implement the optimization regclasses = dict() for reg in list(np.arange(1,numRegions+1,1)): regclasses[reg] = region(reg, slot, window, odclasses) regclasses[reg].updateObsStarts() regclasses[reg].updateObsEnds() regclasses[reg].createFutureStarts() regclasses[reg].createFutureEnds() loadProc[slot][reg], PS[slot][reg], OS[slot][reg], PE[slot][reg], OE[slot][reg] = regclasses[reg].loadProcess() regclasses[reg].nowStart() regclasses[reg].nowEnd() probs[slot][reg], zs[slot][reg], status[slot][reg], opval[slot][reg] = regclasses[reg].optimize(beta_c, beta_d, weight) for key, pk in enumerate(probs[slot][reg][:,0]): # kills small negative values due to numerical error if pk<=0: print('... warning, probabilities are too close to zero! ...') probs[slot][reg][key, 0] = 0.00000001 probs[slot][reg][0,0]+=1-sum(list(probs[slot][reg][:,0])) # kills round off errors, makes sure sum to 1 print('... done creating regions and optimizing ...')
def fillImgRegions(self): #print("principio" + str(self.imgRegions)) #np.set_printoptions(threshold = np.inf) regionID = 1 regionList = [] #print("imagen: " + str(self.grayImage.shape)) # self.printNumpyArray(self.grayImage) self.edges = cv2.Canny(self.grayImage, 40, 120) # print("---") #print("bordes: " + str(self.edges)) # print("Stop1") # self.printNumpyArray(self.edges) self.mask = cv2.copyMakeBorder(self.edges, 1, 1, 1, 1, cv2.BORDER_CONSTANT, value=255) # print(self.mask.shape) # print("Stop") # self.printNumpyArray(self.mask) #print("borders shape: " + str(self.mask.shape)) # print("---") # print(self.mask) ''' print("Edge size:" + str(self.edges.shape)) print("Image shape" + str(self.grayImage.shape)) print("Regions shape" + str(self.imgRegions.shape)) print("We got here") #plt.subplot(121),plt.imshow(self.edges,cmap = 'gray') #plt.show() ''' dialogValue = self.spinBoxDifference.value() print(dialogValue) if self.checkBoxRange.isChecked() == True: floodFlags = cv2.FLOODFILL_MASK_ONLY | 4 | 1 << 8 else: floodFlags = cv2.FLOODFILL_MASK_ONLY | 4 | cv2.FLOODFILL_FIXED_RANGE | 1 << 8 for i in range(0, 240, 1): for j in range(0, 320, 1): # We found a new region: # Optimize this, it's the part that makes it stupid slow if self.imgRegions[i][j] == -1: if self.edges[i][j] == 0: _, _, newMask, rect = cv2.floodFill(self.grayImage, self.mask, (j, i), 1, loDiff=dialogValue, upDiff=dialogValue, flags=floodFlags) print(rect) newRegion = region(regionID, rect) for k in range(rect[0], rect[0] + rect[2], 1): for l in range(rect[1], rect[1] + rect[3], 1): if newMask[l + 1][k + 1] == 1 and self.imgRegions[ l][k] == -1: self.imgRegions[l][k] = regionID newRegion.addPoint(self.grayImage[l][k]) newRegion.calcAverage() regionList.append(copy.deepcopy(newRegion)) regionID += 1 #self.mask = cv2.copyMakeBorder(self.edges, 1,1,1,1, cv2.BORDER_CONSTANT, value = 255) for i in range(1, 239, 1): for j in range(1, 319, 1): if self.imgRegions[i][j] == -1: for k in range(-1, 2, 1): for l in range(-1, 2, 1): if self.imgRegions[i + k][ j + l] != -1 and self.imgRegions[i][j] == -1: self.imgRegions[i][j] = self.imgRegions[i + k][j + l] if self.checkBoxMerge.isChecked() is True: print("Merging") for i in range(1, 239, 1): for j in range(1, 319, 1): found = False for k in range(-1, 2, 1): if found is True: break for l in range(-1, 2, 1): if found is True: break if self.imgRegions[i][j] != self.imgRegions[i + k][j + l]: regionList[self.imgRegions[i][j] - 1].addFrontierPoint([ i, j, self.imgRegions[i + k][j + l] ]) #print("Point coords: ", i, " ", j, " Region ID: ", self.imgRegions[i][j]) for i in regionList: if i.deleted == False: borderRegions = i.regionsInBorder() for j in borderRegions: otherRegion = regionList[j - 1] if i.regionSize() < otherRegion.regionSize(): smallestRegion = i.id biggest = j else: smallestRegion = j biggest = i.id percentageOfBorder = regionList[smallestRegion - 1].percentageOfBorder( self.edges, biggest) percentageOfFrontier = regionList[ smallestRegion - 1].percentageOfFrontier(biggest) if percentageOfBorder > 0.4 and percentageOfFrontier > 0.4: for k in range(240): for l in range(320): if self.imgRegions[k][l] == smallestRegion: self.imgRegions[k][l] = biggest regionList[biggest - 1].mergeRegion( regionList[smallestRegion - 1]) regionList[smallestRegion - 1].deleted = True #regionList.pop(smallestRegion-1) ''' Lo que tengo que hacer: Para cada región, mirar su frontera. Para cada valor distinto que haya, mirar cual de las dos es más pequeña. Para la más pequeña, mirar si el número de puntos de ese valor es mayor de un porcentaje y si no muchos de esos puntos pertenecen a un borde de canny. Si es así, recorrer el rectángulo de esa región y poner todos los puntos al otro valor. ''' for i in range(240): for j in range(320): regionIndex = self.imgRegions[i][j] - 1 region2 = regionList[regionIndex] avgGrey = region2.returnAverage() self.grayImageDest[i][j] = int(avgGrey) print("Number of regions after: ", len(regionList)) checkBreak = False if self.checkBoxBorders.isChecked() == True: #We skip the first to avoid out of bounds. Can be done manually, or adding an if check that makes everything slow as f**k. for i in range(1, 239, 1): for j in range(1, 319, 1): checkBreak = False for k in range(1, -2, -1): if checkBreak == True: break for l in range(1, -2, -1): if self.imgRegions[i][j] != self.imgRegions[i + k][j + l]: self.grayImageDest[i][j] = 255 checkBreak = True break ''' #Set borders to black. for i in range(0, 240, 1): for j in range(0, 320, 1): if self.imgRegions[i][j] == -1: self.imgRegions[i][j] = 0 ''' #print("Resultado: " + str(self.imgRegions)) # print(self.imgRegions.shape) # print(np.unique(self.imgRegions)) #plt.subplot(121),plt.imshow(self.imgRegions,cmap = 'gray') # plt.show() #cv2.imwrite("result.png", self.imgRegions) #self.grayImageDest = cv2.resize(self.grayImageDest, (320, 240)) #self.grayImageDest = cv2.cvtColor(self.grayImageDest, cv2.COLOR_BGR2GRAY) self.visorD.set_open_cv_image(self.grayImageDest) self.visorD.update() self.imgRegions = np.full((240, 320), -1, dtype=np.int32)