def cal_heatf(): """calculate the heat of formation as following: \delta H_f = E_system + 4RT + POP + n_CI_C + n_HI_H POP: contribution of high energy conformations. (default -0.2 kcal/mol) I_C: heat increament for C (see STD) I_H: heat increament for H (see STD) @ref: reaxFF 2001 """ # get E_system assert os.path.exists("fort.74") f = open("fort.74", "r") ener = float(f.readline()[27:37]) f.close() assert os.path.exists("geo") fname = "geo" a = Geo(fname) b = a.parser() # get atom map b.assignAtomTypes() # get element type b.assignEleTypes() ht = {} for i in b.map: ht[i[1]] = 0 for i in b.atoms: ener = ener + STD[i.element] ener = ener + 4*300*8.314/4184 + (-0.21) print b.name, ener
def __init__(self): # in: options is type(list) of names for each option to input QtWidgets.QDialog.__init__(self) self.info = {} self.geo_locator = Geo() self.location_coordinates = None self.setupUi()
def __init__(self, geo_in, C, zs, z_fine, angles, n_double): """geo_in Geo object""" self.geo_in = geo_in self.angles = angles self.n_double = n_double Geo.__init__(self, zs, C, z_fine) l_max = geo_in._l_max self.expand_alm_table(l_max)
def __init__(self, geo_in, mask_in, C, zs, z_fine): """geo_in,mask_in: Geo objects""" self.geo_in = geo_in self.mask_in = mask_in assert (self.geo_in.angular_area() - self.mask_in.angular_area()) >= 0. Geo.__init__(self, zs, C, z_fine) l_max = np.min([geo_in._l_max, mask_in._l_max]) self.expand_alm_table(l_max)
def check_postcode_found(self, postcode, expected_result): self.m.return_value = [self.valid_entry_one_letter, self.valid_entry_two_letters] with patch('geo.open', self.m, create=True): g = Geo() e, n = g.getEastingAndNorthing(postcode) assert_equal((e, n), expected_result) postcode_dir = url_for('static', filename='postcodes') filename = re.match('([a-z]{1,2})[0-9]', postcode.replace(' ', '')).group(1) filename = os.path.join(postcode_dir, filename + '.csv') self.m.assert_called_once_with(filename)
def __init__(self, zs, C, z_fine): """create an analytic geo of the full sky inputs: zs: the tomographic z bins C: a CosmoPie object z_fine: the resolution z slices l_max: the maximum l to compute the alm table to res_healpix: 4 to 9, healpix resolution to use """ self.C = C self.z_fine = z_fine Geo.__init__(self, zs, C, z_fine)
def __init__(self, window, zoom=12): super().__init__() self.setupUi(window) # input: tuple of starting coordinates self.app = QtWidgets.QApplication(sys.argv) # dataframe header names for better referencing self.addr_name = 'Address' self.lat_name = 'Lat' self.lon_name = 'Lon' self.start_lat = 38.6268039 self.start_lon = -90.1994097 self.geo_locator = Geo() self.map = folium.Map(location=(self.start_lat, self.start_lon), zoom_start=zoom) self.default_map_save = path.dirname( path.abspath(__file__)) + r"\map.html" self.address_pickle_location = path.dirname( path.abspath(__file__)) + r"\cities_dictionary.pkl" self.city_lookup_dict = None self.save_location = None self.map_save_path = None self.unsaved_changes = False self.editing_cells = False self.last_edited_data = None self.data = pd.DataFrame( { # will only be used for passthrough to model 'Name': ['Eric Chung'], # type(str) self.addr_name: ['Saint Louis, MO'], # type(str) 'Tags': ['Me, Myself, I'], # type(list) self.lat_name: str(self.start_lat), # type(float) self.lon_name: str(self.start_lon), # type(float) 'Last visited': [datetime.now().strftime('%Y-%m-%d %H:%M:%S')], # type(float) 'Dates visited': ['All the time'], # type(list) 'Date added': [ datetime.fromtimestamp(777186000).strftime( '%Y-%m-%d %H:%M:%S') ], # type(float) 'Description': ['0'] # type(str) }) self._setup_map() self._setup_column_delegates() self._setup_connections() self._setup_column_width_rules() self._load_address_dict_from_pickle() self.stackedWidget_main.setCurrentIndex(0)
def test(self): ''' for test ''' self.actions = ['ちょっと', '一人で', '女性と', 'しっぽり'] self.geos = Geos([ Geo(1, 'くれしま', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(2, 'くれない', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(3, '鳥貴族', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(4, '鳥次郎', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(5, '大島', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(6, '順菜', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(7, '魔境', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(8, '眠い', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.') ]) self.reviews = { 1: ['なんて日だ!', 'ちょっと飲むにはいい店です.', 'ちょっと飲むの楽しい'], 2: ['なんて日だ!'], 3: [], 4: ['ちょっとだけと思ったのに気づいたら飲む飲む', '一人で参戦!飲む', '楽しい'], 5: ['女性としっぽりと飲むでました', '女性とちょっとだけ飲む'], 6: [], 7: ['しっぽり!'], 8: ['ちょっと', '一人でゆっくりと飲む'] } self.scores = np.zeros([len(self.actions), len(self.geos.geos)])
def collect_gdacs_data(self): try: d = feedparser.parse('https://gdacs.org/xml/rss.xml') for entry in d['entries']: if entry.get('gdacs_calculationtype'): output = { 'type': 'Feature', 'id': entry['id'], 'geometry': { 'type': 'Point', "coordinates": [ float(entry['geo_long']), float(entry['geo_lat']) ], }, 'properties': { 'type': entry['gdacs_calculationtype'], 'title': entry['title'] } } feature = Feature(id=output['id'], properties=output['properties'], geometry=Point( output['geometry']['coordinates'])) Geo(feature).save() except Exception as e: print("error fetching gdacs data: %s", e) return print("retrieved docs from gdacs")
def get(self, room_name=None): if room_name is None: room_name = 'index.html' location = Geo.get_location_from_ip(self.request.remote_ip) if location is None or location.city.name is None: city = None else: city = location.city.name.lower() prefix = 'login-' if self.request.host.startswith(prefix): matches = re.match('^{p}([a-zA-Z0-9_-]+)\.(teaorbit\.com|tea.local)(:\d{{4}})?$'.format(p=prefix), self.request.host) if matches: session_id = matches.group(1) base_host = matches.group(2) port = matches.group(3) or "" print "domain", base_host print "session", session_id print "port", port self.set_cookie(config.cookie_name, session_id, domain=".{host}".format(host=base_host), expires=None, path='/', expires_days=2000) return self.redirect("http://{host}{port}/{channel}".format(host=base_host, channel=room_name, port=port)) client = self.request.headers.get('X-Requested-By', 'Web') return self.render("templates/index.html", STATIC_URL=STATIC_URL, room_name=room_name, client=client, version=config.version, city=city)
def parseFormData(self, data): if self.isEmptyDatamodel(data): return None if isinstance(data, Geo): return data if not isinstance(data, dict): raise WidgetError('Expected multiple values for "%s"' % self.title) try: lat = data.get('lat', None) if not lat: lat = None lon = data.get('lon', None) if not lon: lon = None address = data.get('address', '').strip() if address and lat is lon is None: coordinates = geocoding.location_geocode(address) if coordinates is not None: lat, lon = coordinates return Geo(lat, lon, address) except ValueError, e: raise WidgetError(str(e))
def convertValue(self, value): if not (value is None or isinstance(value, Geo)): raise WidgetError('Bad value for GeoWidget: %s' % repr(value)) elif value == Geo(): return None else: return value
def __init__(self, options): # in: options is type(list) of names for each option to input QtWidgets.QDialog.__init__(self) self.obj_list = [ ] # storage for dynamic variables since can't name them all..# . self.info = {} self.options = options self.setupUi() self.address_input = self.obj_list[2 * self.options.index('Address') + 1] self.lat_input = self.obj_list[2 * self.options.index('Lat') + 1] self.lon_input = self.obj_list[2 * self.options.index('Lon') + 1] self.geo_locator = Geo() self.address_input.editingFinished.connect(self.address_event) self.lat_input.editingFinished.connect(self.lat_lon_event) self.lon_input.editingFinished.connect(self.lat_lon_event)
def _convert_to_form_string(self, value): """ We don't actually convert to a string here; GeoWidget needs the Geo instance all the way to the template """ if value == Geo(): return None return value
def __init__(self,zs,pixels,C,z_fine,l_max,hard_l_max=np.inf): """pixelated geomtery inputs: zs: tomographic z bins pixels: pixels in format np.array([(theta,phi,area)]), area in steradians C: CosmoPie object z_fine: the fine z slices hard_l_max: absolute maximum possible l to resolve """ self.pixels = pixels self.hard_l_max = hard_l_max Geo.__init__(self,zs,C,z_fine) self._l_max = 0 self.alm_table[(0,0)] = np.sum(self.pixels[:,2])/np.sqrt(4.*np.pi) self.expand_alm_table(l_max)
def fetch_country_boundaries(request, country): """ Fetch country boundaries """ t1 = time.time() if request.method == 'GET': print(country) gadm = Boundary.objects.all().filter(country=country) #print type(gadm[0].geo_json) gj = gadm[0].geo_json #print(gj) point = (float(request.query_params["lon"]), float(request.query_params["lat"])) geo = Geo() dist = geo.find_district(point, gj) t2 = time.time() print("%f seconds" % (t2 - t1)) return Response(dist)
def get_geo_list(): with sqlite3.connect('geocoder.db') as conn: cursor = conn.cursor() cursor.execute('''select * from cities''') row = cursor.fetchall() geo_list = [] for city in row: cursor.execute('''select * from "{0[2]}"'''.format(city)) row = cursor.fetchall() geo_list += [Geo(city, building) for building in row] return geo_list
def main(): parser = argparse.ArgumentParser() parser.add_argument("fname", default="geo", nargs="?", help="geo file name") parser.add_argument("-c", action="store_true", help="convert the file to other formats (geo, xyz, gjf, lammps)") parser.add_argument("-pbc", action="store_true", help="using default pbc 5nm * 5nm * 5nm") parser.add_argument("-b", nargs=2, type=int, help="get the bond distance between a1, a2, a3") parser.add_argument("-a", nargs=3, type=int,help="get the angle of a1-a2-a3") parser.add_argument("-vol", action="store_true", help="get the volume of the simulation box") args = parser.parse_args() #print b.getBondDist(3,2) fname = args.fname assert os.path.exists(fname) a = Geo(fname) b = a.parser() b.assignEleTypes() b.assignAtomTypes2() if args.c: print "converting %s to geo, xyz, gjf and lammps..."%fname if args.pbc: b.pbc = [50, 50, 50, 90.0, 90.0, 90.0] convertors(b) if args.b: a1 = args.b[0] a2 = args.b[1] val = b.getBondDist(a1, a2) print "Distance between %d and %d is %.3f."%(a1, a2, val) if args.a: a1 = args.a[0] a2 = args.a[1] a3 = args.a[2] val = b.getAngle(a1, a2, a3) print "Angle of %d-%d-%d is %.3f."%(a1, a2, a3, val) if args.vol: vol = b.getVol() print "Volume is %.3f"%vol
def stops(): """Retrieve list of bus stops (type: STBC) around the user's location""" tfl_api = TflApi() try: lat = request.form['lat'] lon = request.form['lon'] except BadRequestKeyError: try: postcode = request.form['postcode'] except BadRequestKeyError: return render_template('main.html') # bail out # Convert postcode to coordinates geo = Geo(postcode) lat, lon = geo.getLatitudeAndLongitude() stops = tfl_api.getStopList(lat, lon) stopcodes = [s['StopCode1'] for s in stops] busses = tfl_api.getBusList(stopcodes) results = [] for stop in stops: result = { 'name': '{name} ({letter})'.format(name=stop['StopPointName'], letter=stop['StopPointIndicator']), 'busses': [] } busses_for_stopcode = [b for b in busses if b['StopCode1'] == stop['StopCode1']] for bus in sorted(busses_for_stopcode, key=lambda i: i['EstimatedTime']): result['busses'].append({ 'number': bus['LineName'], 'direction': bus['DestinationText'], 'eta': U.format_time(bus['EstimatedTime']) }) results.append(result) return render_template('results.html', results=results)
def get_geos_and_reviews_from_db(self, db='ieyasu'): ''' Get geos(restaurants) from db Now specify '京都市' where res.LstPrf = "A2601" Args: db: str Returns: None ''' db_connection = get_db_connection(db) cursor = db_connection.cursor() try: sql = 'select res.restaurant_id, res.name, res.url, res.pr_comment_title, res.pr_comment_body, rev.title, rev.body from restaurants as res left join reviews as rev on res.restaurant_id = rev.restaurant_id where res.LstPrf = "A2601" order by res.id;' cursor.execute(sql) result = cursor.fetchall() geo_ids = [] for row in result: geo_id = row[0] name = row[1] geo_url = '' if row[2] is None else row[2] pr_title = '' if row[3] is None else row[3] pr_body = '' if row[4] is None else row[4] geo = Geo(geo_id, name, geo_url, pr_title, pr_body) if geo_id not in geo_ids: geo_ids.append(geo_id) self.geos.append(geo) rvw_title = '' if row[5] is None else row[5] rvw_body = '' if row[6] is None else row[6] review = rvw_title + rvw_body if geo_id in self.reviews: self.reviews[geo_id].append(review) else: self.reviews[geo_id] = [review] except MySQLdb.Error as e: print('MySQLdb.Error: ', e) except Exception as e: traceback.print_exc() print(e) finally: cursor.close() db_connection.close()
def collect_eq_data(self): try: response = requests.get( "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson" ) for feature in response.json().get('features'): Geo( Feature(id=feature.get('id'), properties=feature.get('properties'), geometry=Point( feature['geometry']['coordinates']))).save() except Exception as e: print("error fetching USGS earthquake data: %s", e) return print("retrieved docs from usgs")
def densify(targetNodes, factor): for i, curr in enumerate(targetNodes): if (i == len(targetNodes) - 1): break next = targetNodes[i + 1] parent = curr.getparent() # Obtain data from both current and next nodes nodes = {'curr': curr, 'next': next} for key, node in nodes.items(): nodes[key] = { # 'time': DateTime.strptime(node[0].text, '%Y-%m-%dT%H:%M:%SZ'), 'lat': float(node.attrib['lat']), 'lon': float(node.attrib['lon']), 'node': node, } # using the geodata, create a virtual line to get information. line = Geo([nodes['curr']['lat'], nodes['curr']['lon']], [nodes['next']['lat'], nodes['next']['lon']]) # subdivide the line into corresponding chunks and add the to the tree after curr for j in range(factor): pointC = line.point(line.distance * (1 / (j + 1))) node = DeepCopy(curr) node.attrib['lat'] = '%.7f' % round(pointC[0], 7) node.attrib['lon'] = '%.7f' % round(pointC[1], 7) curr.addnext(node)
def get(self, address): latlong_address = geofind().lookuplatlong(address) # latlong_address = {'longitude':-118.0768, 'latitude':38.0512} point_geojson = Point( (latlong_address['longitude'], latlong_address['latitude'])) geojson_results = [] results = Geo(Feature(geometry=point_geojson)).intersects_with() for result in results: geojson_results.append( Feature(id=result.get('id'), properties=result.get('properties'), geometry=Point(result['geometry']['coordinates']))) return jsonify(FeatureCollection(geojson_results))
def getUnitList(self, location): reply = [] bBox = Geo.boundingBox(location, 2) db.execute("SELECT * FROM units WHERE health > 0 AND lat > ? AND lat < ? AND lon > ? AND lon < ?", (bBox['latMin'], bBox['latMax'], bBox['lonMin'], bBox['lonMax'])) data = db.fetchall() for unit in data: tmpDic = dict() tmpDic['unitID'] = unit['unit_id'] tmpDic['userID'] = unit['user_id'] tmpDic['type'] = unit['type'] tmpDic['location'] = dict(lat=unit['lat'], lon=unit['lon']) tmpDic['target'] = dict(lat=unit['target_lat'], lon=unit['target_lon']) tmpDic['health'] = unit['health'] reply.append(tmpDic) return reply
def get_arrival_time(self, route, point): print 'Retrieving arrival time for route: %s point: %s' % (route, point) self.get_route_config(route) if not self.routes_config.get(route): return "'Route %s doesn't exist" % route stops = self.routes_config[route] points = [(float(stop['lat']), float(stop['lon'])) for stop in stops] #closest_stops = Geo.get_neighbors(point, points, 0.1) # within half a mile closest_stop = Geo.closest_point(point, points) stop = stops[closest_stop[1]] print 'Closest stop for %s is %s' % (point, stop['title']) result = self._format(self.get_stop_arrival_time(route, stop['tag'])) return result
def post(self): geojson_payload = geojson.loads(json.dumps(request.json)) features = [] if isinstance(geojson_payload, geojson.feature.FeatureCollection): features = geojson_payload['features'] else: features = [geojson_payload] geojson_results = [] for feature in features: results = Geo(feature).intersects_with() for result in results: geojson_results.append( Feature(id=result.get('id'), properties=result.get('properties'), geometry=Point(result['geometry']['coordinates']))) return jsonify(FeatureCollection(geojson_results))
def __init__(self, db_conn, db_name): from pymongo import Connection print "Opening MongoDB connection" self.conn = Connection(host=db_conn) self.db = self.conn[db_name] # Open subcollections self.knowledge = Knowledge(self) self.frames = Frames(self) #self.map = Map(self) self.geo = Geo(self) # Logging from wifistalker import Log header = 'DB' self.log = Log(self, use_stdout=True, header=header) # Log collection self._log = self['log'] self._log.ensure_index('stamp_utc', expireAfterSeconds=60*60)
def __init__(self, zs, thetas, phis, theta_in, phi_in, C, z_fine, l_max, poly_params): """ inputs: zs: the tomographic z bins thetas,phis: an array of theta values for the edges in radians, last value should be first for closure, edges will be clockwise theta_in,phi_in: a theta and phi known to be outside, needed for finding intersect for now C: a CosmoPie object z_fine: the resolution z slices l_max: the maximum l to compute the alm table to poly_params: a dict of parameters """ self.poly_params = poly_params self.n_double = poly_params['n_double'] self.l_max = l_max #maximum alm already available, only a00 available at start self._l_max = 0 self.n_v = thetas.size - 1 self.bounding_theta = thetas - np.pi / 2. #to radec self.bounding_phi = np.pi - phis self.bounding_xyz = np.asarray( sgv.radec_to_vector(self.bounding_phi, self.bounding_theta, degrees=False)).T self.theta_in = theta_in self.phi_in = phi_in self.thetas_orig = thetas self.phis_orig = phis #this gets correct internal angles with specified vertex order (copied from spherical_polygon clas) angle_body = gca.angle(self.bounding_xyz[:-2], self.bounding_xyz[1:-1], self.bounding_xyz[2:], degrees=False) angle_end = gca.angle(self.bounding_xyz[-2], self.bounding_xyz[0], self.bounding_xyz[1], degrees=False) self.internal_angles = 2. * np.pi - np.hstack([angle_body, angle_end]) Geo.__init__(self, zs, C, z_fine) self.sp_poly = get_poly(thetas, phis, theta_in, phi_in) print("PolygonGeo: area calculated by SphericalPolygon: " + str(self.sp_poly.area())) print("PolygonGeo: area calculated by PolygonGeo: " + str(self.angular_area()) + " sr or " + str(self.angular_area() * (180. / np.pi)**2) + " deg^2") self.alm_table = {(0, 0): self.angular_area() / np.sqrt(4. * np.pi)} self.z_hats = np.zeros((self.n_v, 3)) self.y_hats = np.zeros_like(self.z_hats) self.xps = np.zeros_like(self.z_hats) self.betas = np.zeros(self.n_v) self.theta_alphas = np.zeros(self.n_v) self.omega_alphas = np.zeros(self.n_v) self.gamma_alphas = np.zeros(self.n_v) for itr1 in range(0, self.n_v): itr2 = itr1 + 1 pa1 = self.bounding_xyz[itr1] #vertex 1 pa2 = self.bounding_xyz[itr2] #vertex 2 cos_beta12 = np.dot(pa2, pa1) #cos of angle between pa1 and pa2 cross_12 = np.cross(pa2, pa1) sin_beta12 = np.linalg.norm(cross_12) #magnitude of cross product self.betas[itr1] = np.arctan2( sin_beta12, cos_beta12) #angle between pa1 and pa2 #angle should be in quadrant expected by arccos because angle should be <pi beta_alt = np.arccos(cos_beta12) assert np.isclose(sin_beta12, np.sin(self.betas[itr1])) assert np.isclose(sin_beta12**2 + cos_beta12**2, 1.) assert np.isclose(beta_alt, self.betas[itr1]) #define z_hat if possible if np.isclose(self.betas[itr1], 0.): print( "PolygonGeo: side length 0, directions unconstrained, picking directions arbitrarily" ) #z_hat is not uniquely defined here so arbitrarily pick one orthogonal to pa1 arbitrary = np.zeros(3) if not np.isclose(np.abs(pa1[0]), 1.): arbitrary[0] = 1. elif not np.isclose(np.abs(pa1[1]), 1.): arbitrary[1] = 1. else: arbitrary[2] = 1. cross_12 = np.cross(arbitrary, pa1) self.z_hats[itr1] = cross_12 / np.linalg.norm(cross_12) elif np.isclose(self.betas[itr1], np.pi): raise RuntimeError( "PolygonGeo: Spherical polygons with sides of length pi are not uniquely determined" ) else: self.z_hats[ itr1] = cross_12 / sin_beta12 #direction of cross product #three euler rotation angles if not (np.isclose(self.z_hats[itr1, 1], 0.) and np.isclose(self.z_hats[itr1, 0], 0.)): self.theta_alphas[itr1] = -np.arccos(self.z_hats[itr1, 2]) y1 = np.cross(self.z_hats[itr1], pa1) self.y_hats[itr1] = y1 assert np.allclose( pa1 * np.cos(self.betas[itr1]) - y1 * np.sin(self.betas[itr1]), pa2) assert np.allclose(np.cross(pa1, y1), self.z_hats[itr1]) self.xps[itr1] = np.array([ self.z_hats[itr1][1] * pa1[0] - self.z_hats[itr1][0] * pa1[1], self.z_hats[itr1][1] * y1[0] - self.z_hats[itr1][0] * y1[1], 0. ]) self.gamma_alphas[itr1] = np.arctan2(-self.z_hats[itr1, 0], self.z_hats[itr1, 1]) gamma_alpha2 = np.arctan2(self.z_hats[itr1, 1], self.z_hats[itr1, 0]) - np.pi / 2. assert np.isclose(np.mod(self.gamma_alphas[itr1] + 0.000001, 2. * np.pi), np.mod(gamma_alpha2 + 0.000001, 2. * np.pi), atol=1.e-5) self.omega_alphas[itr1] = -np.arctan2(self.xps[itr1, 1], self.xps[itr1, 0]) else: self.omega_alphas[itr1] = 0. self.gamma_alphas[itr1] = np.arctan2(pa1[1], pa1[0]) #need to handle the case where z||z_hat separately (so don't divide by 0) if self.z_hats[itr1, 2] < 0: print("PolygonGeo: setting theta_alpha to pi at " + str(itr1)) self.theta_alphas[itr1] = np.pi else: print("PolygonGeo: setting theta_alpha to 0 at " + str(itr1)) self.theta_alphas[itr1] = 0. self.expand_alm_table(l_max) print("PolygonGeo: finished initialization")
""" parse the geo file with multi configuration into seperated files """ import os from utilities import parseBlock from mytype import System, Molecule, Atom from geo import Geo from output_conf import toGeo from output_conf import toXyz os.chdir("/home/tao/Documents/debug/geofile") parseBlock("geo", 1) for i in range(204): fname = "out%03d"%i a = Geo(fname) b = a.parser() b.assignAtomTypes() toGeo(b, b.name+'.geo') toXyz(b, b.name+'.xyz')
def generate( state, zoom ): global geo, scaleoffset print '----------------------------------------' print 'Generating %s zoom %d' %( state, zoom ) scale = 10 geo = Geo( zoom, 256*scale ) pixgeo = Geo( zoom, 256 ) #exec re.sub( '.+\(', 'data = (', readFile( '%s/%s.js' %( shapespath, state ) ) ) json = readFile( '%s/%s.js' %( shapespath, state ) ) exec re.sub( '^.+\(', 'data = (', json ) places = data['places'] #t1 = time.time() places = filterCONUS( places ) #outer = pixgeo.pixFromGeoBounds( featuresBounds(features) ) bounds = placesBounds( places ) outer = pixgeo.pixFromGeoBounds( bounds ) outer = pixgeo.inflateBounds( outer, 8 ) gridoffset, gridsize = pixgeo.tileBounds( outer ) scaleoffset = pixgeo.scalePoint( gridoffset, scale ) print 'Offset:[%d,%d], Size:[%d,%d]' %( gridoffset[0], gridoffset[1], gridsize[0], gridsize[1] ) draw = [ 'scale .1,.1\n' ] draw.append( 'stroke-width 10\n' ) drawPlaces( draw, places ) cmdfile = 'draw.tmp' writeFile( cmdfile, ''.join(draw) ) #t2 = time.time() #print '%0.3f seconds to generate commands' %( t2 - t1 ) crop = True if crop: cropcmd = '-crop 256x256' else: cropcmd = '' blank = magick.blank( gridsize ) base = '%s/%s/%s-%d' %( tilespath, state, state, zoom ) command = ( '%s -draw "@%s" %s ' + base + '.png' )%( blank, cmdfile, cropcmd ) #command = ( '%s -draw "@draw.cmd" %s -depth 8 -type Palette -floodfill 0x0 white -background white -transparent-color white ' + base + '.png' )%( blank, cropcmd ) #command = ( 'null: -resize %dx%d! -floodfill 0x0 white -draw "@draw.cmd" %s -depth 8 -type Palette -background white -transparent white -transparent-color white ' + base + '.png' )%( gridsize[0], gridsize[1], cropcmd ) #command = 'null: -resize %(cx)dx%(cy)d! -draw "@draw.cmd" %(crop)s tile%(zoom)d.png' %({ # 'cx': gridsize[0], # 'cy': gridsize[1], # 'crop': crop, # 'zoom': zoom #}) magick.convert( command ) if crop: xyCount = 2 << zoom n = 0 # TODO: refactor xMin = gridoffset[0] / 256 xMinEdge = max( xMin - 2, 0 ) yMin = gridoffset[1] / 256 yMinEdge = max( yMin - 2, 0 ) xN = gridsize[0] / 256 yN = gridsize[1] /256 xLim = xMin + xN xLimEdge = min( xLim + 2, xyCount ) yLim = yMin + yN yLimEdge = min( yLim + 2, xyCount ) nMoving = xN * yN nCopying = ( xLimEdge - xMinEdge ) * ( yLimEdge - yMinEdge ) - nMoving print 'Moving %d tiles, copying %d blank tiles...' %( nMoving, nCopying ) t1 = time.time() for y in xrange( yMinEdge, yLimEdge ): for x in xrange( xMinEdge, xLimEdge ): target = '%s-%d-%d.png' %( base, y, x ) if xMin <= x < xLim and yMin <= y < yLim: if xN == 1 and yN == 1: source = '%s.png' %( base ) else: source = '%s-%d.png' %( base, n ) if os.path.exists( target ): os.remove( target ) if os.stat(source)[stat.ST_SIZE] > 415: os.rename( source, target ) else: os.remove( source ) shutil.copy( 'blanktile.png', target ) n += 1 else: shutil.copy( 'blanktile.png', target ) t2 = time.time() print '%0.3f seconds to move files' %( t2 - t1 )
""" read the geo file and output to data (LAMMPS), geo and xyz file. """ from mytype import System, Molecule, Atom from geo import Geo from output_conf import toData from output_conf import toGeo from output_conf import toXyz testfile = "../../debug/geo" a = Geo(testfile) b = a.parser() b.assignAtomTypes() toData(b) toGeo(b) toXyz(b)
def check_invalid_postcode(self, postcode): with patch('geo.open', self.m, create=True): g = Geo() with assert_raises(PostcodeMalformedError): g.getEastingAndNorthing(postcode)
unitID = data['id'] lat = data['lat'] lon = data['lon'] db.execute("SELECT unit_id, lat, lon FROM units WHERE unit_id = ? AND user_id = ? AND health > 0", (unitID,userID)) res = db.fetchone() if res is None: replyDic['status'] = 0 reply = json.dumps(replyDic) self.transport.write(reply + '\n') return #check if unit is with range userLoc = user.get_location() distance = Geo.distance(userLoc, dict(lat=res['lat'], lon=res['lon'])) print distance if distance > 2000: replyDic['status'] = 0 reply = json.dumps(replyDic) self.transport.write(reply + '\n') return # update db db.execute("UPDATE units SET target_lat = ?, target_lon = ? WHERE unit_id = ? AND user_id = ?", (lat, lon, unitID, userID)) sql.commit() replyDic['status'] = 1 replyDic['units'] = self.getUnitList(user.get_location()) reply = json.dumps(replyDic)
for i, u in enumerate(U_dev): userLocation[u] = P_dev[i] data = (A, X_train, Y_train, X_dev, Y_dev, X_test, Y_test, U_train, U_dev, U_test, classLatMedian, classLonMedian, userLocation, vocab) if not builddata: logging.info('dumping data in {} ...'.format(str(dump_file))) dump_obj(data, dump_file) logging.info('data dump finished!') return data dataset = 'geotext' path = "C:\\Users\\61484\\Graph_Convolutional_Networks\\data\\geo" dataset = Geo(path, dataset, transform=None) data = dataset[0] A, X_train, Y_train, X_dev, Y_dev, X_test, Y_test, U_train, U_dev, U_test, classLatMedian, classLonMedian, userLocation, vocab = get_geo_data( dataset.raw_dir, 'dump.pkl') U = U_train + U_dev + U_test locs = np.array([userLocation[u] for u in U]) class Net(torch.nn.Module): def __init__(self): super(Net, self).__init__() self.lin1 = Sequential(Linear(dataset.num_features, 300)) self.conv1 = GCNConv(300, dataset.num_classes) #self.conv2 = GCNConv(300, 300)
def check_postcode_not_found(self, file_content, postcode): self.m.return_value = file_content with patch('geo.open', self.m, create=True): g = Geo() with assert_raises(PostcodeNotFoundError): g.getEastingAndNorthing(postcode)
""" parse the geo file with multi configuration into seperated files """ import os from utilities import parseBlock from mytype import System, Molecule, Atom from geo import Geo from output_conf import toGeo from output_conf import toXyz os.chdir("/home/tao/Documents/debug/geofile") parseBlock("geo", 1) for i in range(204): fname = "out%03d" % i a = Geo(fname) b = a.parser() b.assignAtomTypes() toGeo(b, b.name + '.geo') toXyz(b, b.name + '.xyz')
def generate( state, stateFilename, countyFilename, path, zoom ): global geo, scaleoffset print '----------------------------------------' print 'Generating %s %s zoom %d' %( stateFilename, countyFilename, zoom ) scale = 10 geo = Geo( zoom, 256*scale ) pixgeo = Geo( zoom, 256 ) stateShapefile = loadshapefile( stateFilename ) t1 = time.time() stateFeatures = stateShapefile['features'] print '%d state features' % len(stateFeatures) stateFeatures = filterCONUS( stateFeatures ) print '%d features in CONUS states' % len(stateFeatures) #writeFile( 'features.csv', shpUtils.dumpFeatureInfo(features) ) #outer = pixgeo.pixFromGeoBounds( featuresBounds(features) ) fb = featuresBounds( stateFeatures ) outer = pixgeo.pixFromGeoBounds( fb ) outer = pixgeo.inflateBounds( outer, 8 ) gridoffset, gridsize = pixgeo.tileBounds( outer ) scaleoffset = pixgeo.scalePoint( gridoffset, scale ) print 'Offset:[%d,%d], Size:[%d,%d]' %( gridoffset[0], gridoffset[1], gridsize[0], gridsize[1] ) draw = [ 'scale .1,.1\n' ] if countyFilename: countyShapefile = loadshapefile( countyFilename ) countyFeatures = countyShapefile['features'] print '%d county features' % len(countyFeatures) countyFeatures = filterCONUS( countyFeatures ) print '%d features in CONUS counties' % len(countyFeatures) draw.append( 'stroke-width 10\n' ) drawFeatures( draw, countyFeatures, getRandomColor ) draw.append( 'stroke-width 20\n' ) drawFeatures( draw, stateFeatures, None ) else: draw.append( 'stroke-width 10\n' ) drawFeatures( draw, stateFeatures, getRandomColor ) writeFile( 'draw.cmd', ''.join(draw) ) t2 = time.time() print '%0.3f seconds to generate commands' %( t2 - t1 ) crop = True if crop: cropcmd = '-crop 256x256' else: cropcmd = '' blank = magick.blank( gridsize ) base = '%s/tile-%d' %( path, zoom ) command = ( '%s -draw "@draw.cmd" %s ' + base + '.png' )%( blank, cropcmd ) #command = ( '%s -draw "@draw.cmd" %s -depth 8 -type Palette -floodfill 0x0 white -background white -transparent-color white ' + base + '.png' )%( blank, cropcmd ) #command = ( 'null: -resize %dx%d! -floodfill 0x0 white -draw "@draw.cmd" %s -depth 8 -type Palette -background white -transparent white -transparent-color white ' + base + '.png' )%( gridsize[0], gridsize[1], cropcmd ) #command = 'null: -resize %(cx)dx%(cy)d! -draw "@draw.cmd" %(crop)s tile%(zoom)d.png' %({ # 'cx': gridsize[0], # 'cy': gridsize[1], # 'crop': crop, # 'zoom': zoom #}) magick.convert( command ) if crop: xyCount = 2 << zoom n = 0 # TODO: refactor xMin = gridoffset[0] / 256 xMinEdge = max( xMin - 2, 0 ) yMin = gridoffset[1] / 256 yMinEdge = max( yMin - 2, 0 ) xN = gridsize[0] / 256 yN = gridsize[1] /256 xLim = xMin + xN xLimEdge = min( xLim + 2, xyCount ) yLim = yMin + yN yLimEdge = min( yLim + 2, xyCount ) nMoving = xN * yN nCopying = ( xLimEdge - xMinEdge ) * ( yLimEdge - yMinEdge ) - nMoving print 'Moving %d tiles, copying %d blank tiles...' %( nMoving, nCopying ) t1 = time.time() for y in xrange( yMinEdge, yLimEdge ): for x in xrange( xMinEdge, xLimEdge ): target = '%s-%d-%d.png' %( base, y, x ) if xMin <= x < xLim and yMin <= y < yLim: if xN == 1 and yN == 1: source = '%s.png' %( base ) else: source = '%s-%d.png' %( base, n ) if os.path.exists( target ): os.remove( target ) if os.stat(source)[stat.ST_SIZE] > 415: os.rename( source, target ) else: os.remove( source ) shutil.copy( 'blanktile.png', target ) n += 1 else: shutil.copy( 'blanktile.png', target ) t2 = time.time() print '%0.3f seconds to move files' %( t2 - t1 )
def isEmptyDatamodel(self, value): return value in (None, '', {}, Geo())
def test_getLatitudeAndLongitude(): sut = Geo() lat, lon = sut.getLatitudeAndLongitude(651409.903, 313177.270) assert_almost_equal(lat, 52.657570, places=6) assert_almost_equal(lon, 1.717922, places=6)
def run(self): lastFired = dict() sql = lite.connect('db.sqlite') db = sql.cursor() db.row_factory = lite.Row while True: db.execute("SELECT * FROM units WHERE health > 0 AND lat != target_lat OR lon != target_lon") data = db.fetchall() for unit in data: lat1 = unit['lat'] lon1 = unit['lon'] lat2 = unit['target_lat'] lon2 = unit['target_lon'] unitID = unit['unit_id'] distance = Geo.distance(dict(lat=lat1, lon=lon1), dict(lat=lat2, lon=lon2)) steps = round(distance / 4); if steps >= 1: lat1 = lat1 + ((lat2 - lat1) / steps) lon1 = lon1 + ((lon2 - lon1) / steps) else: lat1 = lat2 lon1 = lon2 db.execute("UPDATE units SET lat = ?, lon = ? WHERE unit_id = ?", (lat1, lon1, unitID)) #random gives each unit a chance to fire first db.execute("SELECT * FROM units WHERE health > 0 ORDER BY RANDOM()") data = db.fetchall() for unit in data: t = int(round(time.time() * 1000)) lat1 = unit['lat'] lon1 = unit['lon'] unitID = unit['unit_id'] userID = unit['user_id'] if unitID in lastFired and t - lastFired[unitID] < 500: continue #randomize firing if random.randint(1, 5) != 1: continue #get closest enemy unit within range of weapons bBox = Geo.boundingBox(dict(lat=lat1, lon=lon1), 0.10) db.execute("SELECT * FROM units WHERE user_id != ? AND health > 0 AND lat > ? AND lat < ? AND lon > ? AND lon < ? LIMIT 1", (userID, bBox['latMin'], bBox['latMax'], bBox['lonMin'], bBox['lonMax'])) target = db.fetchone() if target is not None: dis = Geo.distance(dict(lat=lat1, lon=lon1), dict(lat=target['lat'], lon=target['lon'])) if dis < 100: lastFired[unitID] = t targetID = target['unit_id'] db.execute("UPDATE units SET health = health - 10 WHERE unit_id = ?", (targetID,)) print "{0} shot {1} ({2})".format(unitID, targetID, (int(target['health']) - 10)) sql.commit() time.sleep(0.2)
from collections import defaultdict import math from geo import Geo # RTT absoluto quiere decir desde mi pc hasta el hop i. # RTT relativo (o RTT a secas) quiere decir desde el hop i-1 hasta el hop i. def avg(l): return sum(map(float, l)) / len(l) global_geo = Geo() class Location(): def __init__(self, ip): self._ip = ip location = global_geo.locate(ip) self._city = location[0] self._latitude = location[1] self._longitude = location[2] def city(self): return self._city def latitude(self): return self._latitude
def generate(state, zoom): global geo, scaleoffset print '----------------------------------------' print 'Generating %s zoom %d' % (state, zoom) scale = 10 geo = Geo(zoom, 256 * scale) pixgeo = Geo(zoom, 256) #exec re.sub( '.+\(', 'data = (', readFile( '%s/%s.js' %( shapespath, state ) ) ) json = readFile('%s/%s.js' % (shapespath, state)) exec re.sub('^.+\(', 'data = (', json) places = data['places'] #t1 = time.time() places = filterCONUS(places) #outer = pixgeo.pixFromGeoBounds( featuresBounds(features) ) bounds = placesBounds(places) outer = pixgeo.pixFromGeoBounds(bounds) outer = pixgeo.inflateBounds(outer, 8) gridoffset, gridsize = pixgeo.tileBounds(outer) scaleoffset = pixgeo.scalePoint(gridoffset, scale) print 'Offset:[%d,%d], Size:[%d,%d]' % (gridoffset[0], gridoffset[1], gridsize[0], gridsize[1]) draw = ['scale .1,.1\n'] draw.append('stroke-width 10\n') drawPlaces(draw, places) cmdfile = 'draw.tmp' writeFile(cmdfile, ''.join(draw)) #t2 = time.time() #print '%0.3f seconds to generate commands' %( t2 - t1 ) crop = True if crop: cropcmd = '-crop 256x256' else: cropcmd = '' blank = magick.blank(gridsize) base = '%s/%s/%s-%d' % (tilespath, state, state, zoom) command = ('%s -draw "@%s" %s ' + base + '.png') % (blank, cmdfile, cropcmd) #command = ( '%s -draw "@draw.cmd" %s -depth 8 -type Palette -floodfill 0x0 white -background white -transparent-color white ' + base + '.png' )%( blank, cropcmd ) #command = ( 'null: -resize %dx%d! -floodfill 0x0 white -draw "@draw.cmd" %s -depth 8 -type Palette -background white -transparent white -transparent-color white ' + base + '.png' )%( gridsize[0], gridsize[1], cropcmd ) #command = 'null: -resize %(cx)dx%(cy)d! -draw "@draw.cmd" %(crop)s tile%(zoom)d.png' %({ # 'cx': gridsize[0], # 'cy': gridsize[1], # 'crop': crop, # 'zoom': zoom #}) magick.convert(command) if crop: xyCount = 2 << zoom n = 0 # TODO: refactor xMin = gridoffset[0] / 256 xMinEdge = max(xMin - 2, 0) yMin = gridoffset[1] / 256 yMinEdge = max(yMin - 2, 0) xN = gridsize[0] / 256 yN = gridsize[1] / 256 xLim = xMin + xN xLimEdge = min(xLim + 2, xyCount) yLim = yMin + yN yLimEdge = min(yLim + 2, xyCount) nMoving = xN * yN nCopying = (xLimEdge - xMinEdge) * (yLimEdge - yMinEdge) - nMoving print 'Moving %d tiles, copying %d blank tiles...' % (nMoving, nCopying) t1 = time.time() for y in xrange(yMinEdge, yLimEdge): for x in xrange(xMinEdge, xLimEdge): target = '%s-%d-%d.png' % (base, y, x) if xMin <= x < xLim and yMin <= y < yLim: if xN == 1 and yN == 1: source = '%s.png' % (base) else: source = '%s-%d.png' % (base, n) if os.path.exists(target): os.remove(target) if os.stat(source)[stat.ST_SIZE] > 415: os.rename(source, target) else: os.remove(source) shutil.copy('blanktile.png', target) n += 1 else: shutil.copy('blanktile.png', target) t2 = time.time() print '%0.3f seconds to move files' % (t2 - t1)
from pprint import pprint from geo import Geo, MultiThreadGeo ADDRESS_VEC: list = ['Minsk', 'Moscow', 'Bangui', '453 Booth Street, Ottawa ON'] # ===== Basic Geo Class ===== geo = Geo() coords = geo.get_coordinates(ADDRESS_VEC, method_name='yandex') coords_table = geo.get_multiple_method_coordinates(ADDRESS_VEC) pprint(coords) print(f'\n{"=" * 30}\n') print(coords_table) # ===== Multi Thread Geo ===== mt_geo = MultiThreadGeo(geo)