def main(var=None): # Create the directories #filesystem.home(oo = True) #print "Path:", filesystem.home() #LOG_FILENAME = 'client.log' #logging.basicConfig(filename=LOG_FILENAME,level=logging.DEBUG,) vfs.createVFS() sys.stdout = Logger() connection = network.ServerSocket() script = gui.Script() #messenger.toggleVerbose() #audioManager = Audio.AudioManager() world = World() guiController = gui.GUIController(script) guiController.makeMainMenu() serverHost = 'localhost' serverPort = 52003 reg = region.Region() kmcontroller = controllers.KMController() run()
def read_metadata(me, f): magic, totalsize, _ = struct.unpack('<III', f.read(12)) if magic != METADATA_MAGIC: raise Exception('bad metadata magic') rawchunks = chunks.read_chunks(f, totalsize - 12) me.chunks = [] me.regions = [] me.attrs = {} for t, d in rawchunks: if t == 'ATTR': k, v = d.split('=', 1) me.attrs[k] = v elif t == 'REGN': rgnchunks = chunks.read_chunks(cStringIO.StringIO(d), len(d)) rgn = region.Region(rgnchunks) me.regions.append(rgn) elif t == 'TSET': raise Exception("can't handle TSET chunk yet") elif t == 'TILE': raise Exception("can't handle TILE chunk yet") else: # add it to the unparsed chunks list me.chunks.append((t, d))
def __init__(self, NW_corner, SE_corner, data='10m', **kwargs): ''' NW_corner, SE_corner parameters are lat/lon tuples ''' self.Region = region.Region(NW_corner[0], NW_corner[1], data=data) self.elev = self.Region.slice_subregion(NW_corner, SE_corner) self.LAT_ARR = self.elev.index.values.astype(float) self.LON_ARR = self.elev.columns.values.astype(float)
def to_region(obj: Dict, image_folder: str = '', voyager: str = '') -> region.Region: filename = obj['#filename'] file_path = os.path.join(image_folder, filename) if not voyager: voyager = prefix_for_filename(filename) if os.path.exists(file_path): img = Image.open(file_path) (x_size, y_size) = img.size else: x_size = 0 y_size = 0 file = region.ImageFile( path=file_path, x_size=x_size, y_size=y_size, ) shape_info = json.loads(obj['region_shape_attributes']) label_info = json.loads(obj['region_attributes']) labels = [] ku_id = label_info.get(ku_id_key_1, '') or label_info.get(ku_id_key_2, '') if ku_id: label = region.Label( tp=const.TYPE_KU_ID, id=int(ku_id), ) labels.append(label) group_id = label_info.get(group_id_key_1, '') or label_info.get( group_id_key_2, '') if group_id: extra = { 'voyager': voyager, } label = region.Label( tp=const.TYPE_GROUP_ID, id=int(group_id), extra=extra, ) labels.append(label) return region.Region( file=file, shape=shape_info['name'], x_min=int(shape_info['x']), y_min=int(shape_info['y']), x_length=int(shape_info['width']), y_length=int(shape_info['height']), labels=labels, )
def testRegion(self): _sec = region.Section(0.05, 0.15) _reg = region.Region(_sec) self.assertTrue(_reg.add(3.5, 3.4, 3.2, 3.49, 3.15, 988232)) self.assertTrue(_reg.add(3.48, 3.33, 3.1, 3.46, 3.16, 899000)) self.assertFalse(_reg.add(3.57, 3.41, 3.21, 3.56, 3.19, 8923000)) self.assertEqual(_reg.len(), 2) self.assertListEqual(_reg.getData(), [[3.5, 3.4, 3.2, 3.49, 3.15, 988232], [3.48, 3.33, 3.1, 3.46, 3.16, 899000]])
def S21(x): key = ('S', 2, 1) xn = self.xn[key](x) def l21(p): return p - 1 + (xn - self.d_min) / self.d return rg.Region( ('S', 2, 1), (1 - (xn - self.d_min) / self.d, 1, lambda p: 0, l21), g.dist, g.distq_p)
def S22(x): key = ('S', 2, 2) param = self.domx[key] xn = self.xn[key](x) def l22(p): return p + 1 - (xn - self.d_min) / self.d return rg.Region(('S', 2, 2), (0, (xn - self.d_min) / self.d, l22, lambda p: 1), g.dist, g.distq_p)
def S11(x): key = ('S', 1, 1) xn = self.xn[key](x) def l11(p): return p - xn / self.d def l11_p(p): return max(0, l11(p)) return rg.Region(('S', 1, 1), (0, 1, l11_p, lambda p: p), g.dist, g.distq_p)
def testGetColumnAverage(self): _sec = region.Section(0.05, 0.15) _reg = region.Region(_sec) _reg.add(3.5, 3.4, 3.2, 3.49, 3.15, 988232) _reg.add(3.48, 3.33, 3.1, 3.46, 3.16, 899000) _reg.add(3.49, 3.41, 3.21, 3.56, 3.19, 892000) self.assertEqual(float((3.5 + 3.48 + 3.49) / 3), _reg.getColumnAverage(0)) self.assertEqual(float((3.4 + 3.33 + 3.41) / 3), _reg.getColumnAverage(1)) self.assertEqual(float((3.2 + 3.1 + 3.21) / 3), _reg.getColumnAverage(2))
def os_load_oar(password, sim_path, region_name, oar_path, params={}): logging.main_logger.debug("[xml-rpc] Method 'os_load_oar' called.") # check password if not check_password(password): return wrong_password # compute the answer myregion = region.Region(sim_path, region_name) if myregion.os_load_oar(oar_path.split('/')[5][:-4], params): return {"success": True} return {"success": False, "message": 'Error loading oar'}
def S12(x): key = ('S', 1, 2) xn = self.xn[key](x) def l12(p): return p + xn / self.d def l12_p(p): return min(1, l12(p)) return rg.Region(('S', 1, 2), (0, 1, lambda p: p, l12_p), g.dist, g.distq_p)
def _run(self, madness, winner=None, second=None): results = { 'year': self.year, 'south': None, 'west': None, 'east': None, 'midwest': None, 'semi1': None, 'semi2': None, 'final_four': None, 'finalist': None, 'champion': None } final_four = { 'south': None, 'west': None, 'east': None, 'midwest': None } # Database lookup, so cache this info all_regions = data.all_regions(self.year) for key in all_regions: r = region.Region(key, all_regions[key], self.algorithm) r.set_sf(winner, second) # Winner of region goes to final four final_four[key] = r(madness) results[key] = r semis = [] for match in data.ff_games(self.year): team1, team2 = match semis.append( self.algorithm((final_four[team1], final_four[team2]), madness)) finalist = (semis[0]()[0], semis[1]()[0]) self.fixup(finalist) championship = self.algorithm(finalist, madness) champion, loser = championship() results['semi1'] = semis[0] results['semi2'] = semis[1] results['final_four'] = final_four results['finalist'] = finalist results['championship'] = championship results['champion'] = champion results['2nd_place'] = loser results['upsets'] = self._upsets(results) return results
def R21(x): key = ('R', 2, 1) xn = self.xn[key](x) l21p = (self.de - xn + self.A[2, 1]) / self.de p21 = max(self.p1, l21p) def l21(p): return (-self.de + self.de * p + xn - self.A[2, 1]) / self.df def q21(p): return min(self.q2, l21(p)) return rg.Region(('R', 2, 1), (p21, 1, lambda p: 0, q21), g.dist, g.distq_p)
def R12(x): key = ('R', 1, 2) xn = self.xn[key](x) l12p = (xn - self.A[1, 2]) / self.de p12 = min(self.p2, l12p) def l12(p): return (self.df + self.de * p - xn + self.A[1, 2]) / self.df def q12(p): return max(self.q1, l12(p)) return rg.Region(('R', 1, 2), (0, p12, q12, lambda p: 1), g.dist, g.distq_p)
def __init__(self, lats, lons, data='10m', **kwargs): ''' lats, lons params are lists of latitude and longitude points of the Trace positions throughout the Region ''' self.Region = region.Region(lats, lons, data=data) self.lats = lats self.lons = lons self.coords = list(zip(lats, lons)) print('Generating elevation profile') self.elev_m = self.bilin_walk() # Convert results to meters self.elev = [i * 3.280839895 for i in self.elev_m] print('Generating grade profile') self.grade = self.get_grade_profile()
def R11(x): key = ('R', 1, 1) xn = self.xn[key](x) l11p = (xn - self.A[1, 1]) / self.de p11 = min(self.p1, l11p) def l11(p): return (-self.de * p + xn - self.A[1, 1]) / self.df def q11(p): return min(self.q1, l11(p)) return rg.Region(('R', 1, 1), (0, p11, lambda p: 0, q11), g.dist, g.distq_p)
def R22(x): key = ('R', 2, 2) xn = self.xn[key](x) l22p = (self.de - xn + self.A[2, 2]) / self.de p22 = max(self.p2, l22p) def l22(p): return (self.de + self.df - self.de * p - xn + self.A[2, 2]) / self.df def q22(p): return max(self.q2, l22(p)) return rg.Region(('R', 2, 2), (p22, 1, q22, lambda p: 1), g.dist, g.distq_p)
def __init__(self, c_flow=None, transf_flow=None, subg=None): """ :param c_flow: original control flow graph (only for those who don't have parents) :param transf_flow: if the CFG was changed, then this is the graph :param subg: subgraph which this represents """ self._c_flow = None self._isSubflow = None self._regions = None self._curr_graph = None self._domTree = None self.entryId = None self.exitId = None if c_flow is not None: assert isinstance(c_flow, fparser.control_flow.ControlFlow) self._isSubflow = False self.entryId = c_flow._entryId self.exitId = c_flow._exitId self._c_flow = c_flow self._regions = region.RegionCollection(c_flow.get_max_id()) # Set graph. self._curr_graph = get_skeleton_graph( c_flow) # make a copy, because we change stuff for n in self._curr_graph: # each node becomes a region. why is each node a region (w/o any trafo)? r_graph = networkx.DiGraph() r_graph.add_node(n) self._regions.add_region(region.Region(n, r_graph, None)) else: assert isinstance(transf_flow, TransformedFlowGraph) assert isinstance(subg, networkx.DiGraph) self._isSubflow = True self.entryId = transf_flow.get_entry_id() self.entryId = transf_flow.get_exit_id() self.copy_references(self, transf_flow) # Set graph self._curr_graph = subg
def main(): vfs = filesystem.FileSystem() # Finish setting up logger logPath = vfs.logs + 'server.log' logFile = logging.FileHandler(logPath) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") logFile.setFormatter(formatter) logger.addHandler(logFile) logger.info("Logging file handler added. Logging to %s" % logPath) network = Network(HOST, PORT) chatServer = chat.ChatServer() state = GameState() reg = region.Region() try: messenger.start() except Exception, e: logger.exception(e)
def __init__(self, redis, features={}): Generator.__init__(self, redis, features) self.logger = logging.getLogger(__name__) if not hasattr(self, 'region'): print "noregion!!!" self.region = region.Region(self.redis) self.gatheringplace = Business(self.redis, {'kind': 'bus_' + self.gatheringplace}) if not hasattr(self, 'leader'): self.leader = leader.Leader(self.redis, {"location": self}) #self.leader = Leader(self.redis) if not hasattr(self, 'name'): self.name = Name(self.redis, 'city') self.citizen = NPC(self.redis) self.calculate_population() self.calculate_racial_breakdown() self.select_subraces()
def load_regions(self, load=False, load_all=False): logging.main_logger.debug("[sim] 'load_regions' called") # check if regions.ini exists if not os.path.isfile(self.path + '/bin/Regions/Regions.ini'): logging.main_logger.warning("[sim] No Regions.ini file found : %s" % (self.path + '/bin/Regions/Regions.ini')) return False import ConfigParser regions_ini = ConfigParser.ConfigParser() regions_ini.read(self.path + '/bin/Regions/Regions.ini') sections = regions_ini.sections() if not len(sections): logging.main_logger.warning("[sim] No region found in Regions.ini file") return False import region for section in sections: new_region = region.Region(self.path, section, load, load_all) self.regions.append(new_region) return True
#return 'grey' return random.choice(Config.colorList[1:-1]) else: return Config.colorList[-1] if __name__ == '__main__': # Generate points points = generatePoints(Config.xNumRegions, Config.yNumRegions, Config.regionSizePx, Config.regionRegularity) if debug and 1: print(points) # Initialise list of regions regionList = [] for i in range(0, Config.totalNumRegions): newRegion = region.Region() regionList.append( newRegion ) # Set origin of each region for region in regionList: region.setOrigin(points[region.id]) # generate voronoi from numpy array of points vor = Voronoi(points) if debug and 0: # Voronoi gives array of all vertices (Voronoi.vertices) print(vor.vertices) # An array of regions is provided (Voronoi.regions) print(vor.regions) # Each region in the array is described as an array of indexes that correspond to the array of vertices # The order regions appear in the region array is given by a 1d array (Voronoi.point_region)
# This example demonstrates how speed optimization slows down for curves. # We construct a very sharp curve on an otherwise straight path, and observe that # the speed optimization causes us to slow down for that curve. # It plots the results. import region from numpy import * import matplotlib.pyplot as plt from trajectory_optimization import * from trajectory_plotting import * # Construct our region workspace = region.Region((0.,0.),(1.,1.)) # Construct our path and plot it P = [[.1,i*.05] for i in range(1,19)] P.extend([.15+.05*i,.9] for i in range(18)) bubbles = [P,[.025 for i in range(len(P))]] P = asarray(P) u = 5*ones((P.shape[0]-2,2)) v = P[2:,:] - P[0:-2,:] P = elastic_stretching(P,v,u,bubbles) workspace.plot_region() PlotTrajectory(P) PlotBubbles(bubbles) plt.title("Sharp Turn Trajectory") plt.savefig("ex2_trajectory.pdf") plt.clf() # Do speed optimization and see that it slows down for sharp turn result = speed_optimization(P)
SE_pnt_evans = (39.558016, -105.610281) # TODO: add get_path function to allow different 1m data tiles to be retrieved #path_1m_data = '/mnt/e/DEM_Database/NED_1m/x47y440/USGS_NED_one_meter_x47y440_CO_SoPlatteRiver_Lot5_2013_IMG_2015.img' path_1m_data = '/Volumes/Fleet Storage/DEM_Database/NED_1m/x47y440/USGS_NED_one_meter_x47y440_CO_SoPlatteRiver_Lot5_2013_IMG_2015.img' #path_1m_data = '/mnt/e/DEM_Database/NED_1m/x48y440/USGS_NED_one_meter_x48y440_CO_SoPlatteRiver_Lot5_2013_IMG_2015.img' path_1m_meta = '/Volumes/Fleet Storage/DEM_Database/NED_1m/x47y440/USGS_NED_one_meter_x47y440_CO_SoPlatteRiver_Lot5_2013_IMG_2015_meta.xml' #path_1m_meta = '/mnt/e/DEM_Database/NED_1m/x47y440/USGS_NED_one_meter_x47y440_CO_SoPlatteRiver_Lot5_2013_IMG_2015_meta.xml' path_10m_data = '/Volumes/Fleet Storage/DEM_Database/NED_13/grid/n40w106/grdn40w106_13/w001001.adf' #path_10m_data = '/mnt/e/DEM_Database/NED_13/grid/n40w106/grdn40w106_13/w001001.adf' data_store_path = '/Users/ckennedy/projects/RoadGrade_Cory/data/' #data_store_path = '/home/ckennedy/repos/RoadGrade_Cory/data/' ''' 1m dataset ''' # get elevation dataframe lookoutMt_tile_1m = region.Region(NW_pnt_lookout[0], NW_pnt_lookout[1], data='1m') print(lookoutMt_tile_1m.Elev) # slice area of interest theM_1m_df = lookoutMt_tile_1m.slice_subregion(NW_pnt_lookout, SE_pnt_lookout) print(theM_1m_df) # save df to a csv file theM_1m_df.to_csv(data_store_path + 'theM_1m.csv') # convert Dataframe to 2d numpy array theM_1m_2d = theM_1m_df.values # flatten array theM_1m_1d = theM_1m_2d.flatten()
def write_to_file(me, name): f = file(name, 'wb') meta = me.metadata_to_string() if me.bmp: me.bmp.write_to_file(f, len(meta)) f.write(meta) assert (f.tell() & 3) == 0 elif meta: print "WARNING: metadata can't be saved because no tileset is present" me.write_tile_data(f) if __name__ == '__main__': # test code l = LVLFile('testin.lvl') l.set_attr('NAME', 'lvl for testing metadata') l.set_attr('TESTFIELD', 'blah blah blah') d = '\xa1\xf3!\xf3I"\x01\xa0\xc6 c`c#7\xe0b\xa0\xc7\t@#\xf4\x96' rgninit = [('rTIL', d), ('rFOO', 'blahrg'), ('rNAM', 'test region #1')] rgn = region.Region(rgninit) l.add_region(rgn) l.chunks.append(('CFOO', 'custom data goes here')) l.write_to_file('testout.lvl') l = LVLFile('testout.lvl') l.write_to_file('testout2.lvl')
# This example demonstrates calling our code to run our algorithms on a simple geometry. # It plots the results. import region from numpy import * import matplotlib.pyplot as plt from trajectory_optimization import * from trajectory_plotting import * from fmt import * k = 1000 rk = .05 workspace = region.Region((0., 0.), (1., 1.)) obs = [ region.Region((.1, .1), (.4, .4)), region.Region((.6, .6), (.9, .9)), region.Region((.6, .1), (1.0, .5)) ] goal = region.Region((.9, .9), (1., 1.)) max_iter = 1000 xinit = (.5, .1) # Offline computation of neighbors data = offline_sampling(k, rk, xinit, workspace, obs) # FMT Algorithm path = FMT(k, rk, xinit, workspace,
def parsePDF(fileLocation): global regions regions.clear() print(fileLocation) ''' hdr = {'sec-ch-ua': '"Google Chrome";v="87", " Not;A Brand";v="99", "Chromium";v="87"', 'sec-ch-ua-mobile': '?0', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36', 'accept': '*/*'} requestWHeader = Request(fileLocation, headers=hdr) pdfFile = urlopen(requestWHeader,context = ssl.SSLContext()) data = pdfFile.read() bytesFile = io.BytesIO(data) read_pdf = PyPDF2.PdfFileReader(bytesFile) ''' payload = {} headers = { 'sec-ch-ua': '"Google Chrome";v="87", " Not;A Brand";v="99", "Chromium";v="87"', 'sec-ch-ua-mobile': '?0', 'Upgrade-Insecure-Requests': '1', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36' } response = requests.request("GET", fileLocation, headers=headers, data=payload) data = response.content bytesFile = io.BytesIO(data) read_pdf = PyPDF2.PdfFileReader(bytesFile) pages = [] page = read_pdf.getPage(8) pages.append(page) page = read_pdf.getPage(9) pages.append(page) page = read_pdf.getPage(10) pages.append(page) for page in pages: page_content = page.extractText().replace("\n", "") # print(page_content) x = re.findall("[A-Z][a-z&A-Z ,*/-]+ +[-0-9,*]+ +[-0-9,*]+", page_content) healthUnits = [] for subString in x: if "health" in subString.lower(): healthUnits.append(subString) for subString in healthUnits: healthUnitsWithValues = subString.split(" ") healthUnitsWithValues = list(filter(None, healthUnitsWithValues)) currLastDay = re.sub('[^0-9/-]', '', healthUnitsWithValues[-2]) currDay = re.sub('[^0-9/-]', '', healthUnitsWithValues[-1]) tempName = "" for x in healthUnitsWithValues[:-2]: tempName = tempName + x + " " tempName = re.sub('[^a-z A-Z]', '', tempName[:-1]) tempRegion = region.Region(tempName, 0, currDay, currLastDay) regions[tempName] = tempRegion popFile = open("HealthUnitPopulations.txt", 'r') for x in popFile: currRegion = x.replace("\n", "") if currRegion in regions.keys(): population = int(next(popFile)) regions[currRegion].setPopulation(population) regions[currRegion].calculatePer100() popFile.close()
def reduce_single_loop(self, n, level, parentloop): def translate_node_id(b): """ Header nodes of already reduced loops are replaced by new nodes. Given the original node id, this function returns the new block id if loop is already reduced, else it returns the given node id itself. """ new_id = self._regions._loopRegions_i.get(b, None) if new_id is None: return b else: return new_id def get_loop_nodes(): """Returns loop nodes, including header node.""" lInfo = self._c_flow.get_loop_info() bNodes = lInfo.get_body_nodes(n) bNodes = set([translate_node_id(b) for b in bNodes]) return bNodes.union([n]) def get_exit_edges(lNodes): """Return a set of exit edges.""" ee = set([]) for _n in lNodes: succ = list(self._curr_graph.successors(_n)) for s in succ: if s not in lNodes: ee.add((_n, s)) return ee def get_entry_edges(): """ Returns loop entry edge(s). Not all loops contain preheader nodes, therefore the header node may contain multiple entry edges. """ lInfo = self._c_flow.get_loop_info() ie = set(self._curr_graph.in_edges(n)) be = set([(translate_node_id(b), n) for b in lInfo.get_back_edges(n)]) log.debug("get_entry_edges: ie={}, be={}".format(ie, be)) # FIXME: Check if entry edge comes from an already reduced loop! # translate_node_id() must return the new region id for # original reduced loop body nodes too. return ie.difference(be) self._graph_changed() # 0. First get all loop nodes lNodes = get_loop_nodes() # 1. Get entry and exit edges. e_entry = get_entry_edges() e_exit = get_exit_edges(lNodes) # 2. Save a copy of loop's subgraph, remove loop nodes from control flow graph. lSubg = networkx.DiGraph(self._curr_graph.subgraph(lNodes)) self._curr_graph.remove_nodes_from(lNodes) # 3. Insert new dummy node in graph that represents the reduced loop r_id = self._regions.generate_new_region_id() self._curr_graph.add_node(r_id) # Log log.debug("New loop region id: {}".format(r_id)) log.debug("Entry edges: {}".format(e_entry)) log.debug("Exit edges: {}".format(e_exit)) # 4. Add entry and exit edges for this node self._curr_graph.add_edges_from([(b, r_id) for b, _ in e_entry]) self._curr_graph.add_edges_from([(r_id, b) for _, b in e_exit]) # 5. Save subgraph as 'region' attribute, first add the new region. tf = transformation.ReducedLoopTransf(n, e_exit, level=level, parentloop=parentloop) self._regions.add_region(region.Region(r_id, lSubg, tf)) self._curr_graph.nodes[r_id]['region'] = self._regions.get_region(r_id)
def region_hook(d): return region.Region(d['Geo'], d['Region'], d['Location'], d['Failover Region'], d['Status'])
# This example demonstrates calling our code to run our algorithms on a more complex geometry. # It plots the results. import region from numpy import * import matplotlib.pyplot as plt from trajectory_optimization import * from trajectory_plotting import * from fmt import * k = 1000 rk = .1 workspace = region.Region((0., 0.), (1., 2.)) # obs = [region.Region((.1,.1),(.4,.4)),region.Region((.6,.6),(.9,.9)),region.Region((.6,.1),(1.0,.5))] obs = [ region.Region((.1, .1), (.4, .4)), region.Region((.3, .6), (.9, .9)), region.Region((.6, .1), (1.0, .5)), region.Region((.1, .5), (.2, 1.2)), region.Region((.3, 1.2), (1.0, 1.9)) ] goal = region.Region((.1, 1.5), (.2, 1.7)) max_iter = 1000 xinit = (.5, .1) # FMT Algorithm for obstacle in obs: obstacle.plot_region() goal.plot_region() path = FMT(k, rk, xinit, workspace, obs, goal, max_iter, with_plotting=True)