def test(self): ''' for test ''' self.actions = ['ちょっと', '一人で', '女性と', 'しっぽり'] self.geos = Geos([ Geo(1, 'くれしま', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(2, 'くれない', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(3, '鳥貴族', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(4, '鳥次郎', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(5, '大島', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(6, '順菜', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(7, '魔境', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.'), Geo(8, '眠い', 'http://test.com', '宴会に最適!', 'エンジェルがいるよ.') ]) self.reviews = { 1: ['なんて日だ!', 'ちょっと飲むにはいい店です.', 'ちょっと飲むの楽しい'], 2: ['なんて日だ!'], 3: [], 4: ['ちょっとだけと思ったのに気づいたら飲む飲む', '一人で参戦!飲む', '楽しい'], 5: ['女性としっぽりと飲むでました', '女性とちょっとだけ飲む'], 6: [], 7: ['しっぽり!'], 8: ['ちょっと', '一人でゆっくりと飲む'] } self.scores = np.zeros([len(self.actions), len(self.geos.geos)])
def cal_heatf(): """calculate the heat of formation as following: \delta H_f = E_system + 4RT + POP + n_CI_C + n_HI_H POP: contribution of high energy conformations. (default -0.2 kcal/mol) I_C: heat increament for C (see STD) I_H: heat increament for H (see STD) @ref: reaxFF 2001 """ # get E_system assert os.path.exists("fort.74") f = open("fort.74", "r") ener = float(f.readline()[27:37]) f.close() assert os.path.exists("geo") fname = "geo" a = Geo(fname) b = a.parser() # get atom map b.assignAtomTypes() # get element type b.assignEleTypes() ht = {} for i in b.map: ht[i[1]] = 0 for i in b.atoms: ener = ener + STD[i.element] ener = ener + 4*300*8.314/4184 + (-0.21) print b.name, ener
def parseFormData(self, data): if self.isEmptyDatamodel(data): return None if isinstance(data, Geo): return data if not isinstance(data, dict): raise WidgetError('Expected multiple values for "%s"' % self.title) try: lat = data.get('lat', None) if not lat: lat = None lon = data.get('lon', None) if not lon: lon = None address = data.get('address', '').strip() if address and lat is lon is None: coordinates = geocoding.location_geocode(address) if coordinates is not None: lat, lon = coordinates return Geo(lat, lon, address) except ValueError, e: raise WidgetError(str(e))
def __init__(self): # in: options is type(list) of names for each option to input QtWidgets.QDialog.__init__(self) self.info = {} self.geo_locator = Geo() self.location_coordinates = None self.setupUi()
def convertValue(self, value): if not (value is None or isinstance(value, Geo)): raise WidgetError('Bad value for GeoWidget: %s' % repr(value)) elif value == Geo(): return None else: return value
def collect_gdacs_data(self): try: d = feedparser.parse('https://gdacs.org/xml/rss.xml') for entry in d['entries']: if entry.get('gdacs_calculationtype'): output = { 'type': 'Feature', 'id': entry['id'], 'geometry': { 'type': 'Point', "coordinates": [ float(entry['geo_long']), float(entry['geo_lat']) ], }, 'properties': { 'type': entry['gdacs_calculationtype'], 'title': entry['title'] } } feature = Feature(id=output['id'], properties=output['properties'], geometry=Point( output['geometry']['coordinates'])) Geo(feature).save() except Exception as e: print("error fetching gdacs data: %s", e) return print("retrieved docs from gdacs")
def _convert_to_form_string(self, value): """ We don't actually convert to a string here; GeoWidget needs the Geo instance all the way to the template """ if value == Geo(): return None return value
def get_geo_list(): with sqlite3.connect('geocoder.db') as conn: cursor = conn.cursor() cursor.execute('''select * from cities''') row = cursor.fetchall() geo_list = [] for city in row: cursor.execute('''select * from "{0[2]}"'''.format(city)) row = cursor.fetchall() geo_list += [Geo(city, building) for building in row] return geo_list
def __init__(self, window, zoom=12): super().__init__() self.setupUi(window) # input: tuple of starting coordinates self.app = QtWidgets.QApplication(sys.argv) # dataframe header names for better referencing self.addr_name = 'Address' self.lat_name = 'Lat' self.lon_name = 'Lon' self.start_lat = 38.6268039 self.start_lon = -90.1994097 self.geo_locator = Geo() self.map = folium.Map(location=(self.start_lat, self.start_lon), zoom_start=zoom) self.default_map_save = path.dirname( path.abspath(__file__)) + r"\map.html" self.address_pickle_location = path.dirname( path.abspath(__file__)) + r"\cities_dictionary.pkl" self.city_lookup_dict = None self.save_location = None self.map_save_path = None self.unsaved_changes = False self.editing_cells = False self.last_edited_data = None self.data = pd.DataFrame( { # will only be used for passthrough to model 'Name': ['Eric Chung'], # type(str) self.addr_name: ['Saint Louis, MO'], # type(str) 'Tags': ['Me, Myself, I'], # type(list) self.lat_name: str(self.start_lat), # type(float) self.lon_name: str(self.start_lon), # type(float) 'Last visited': [datetime.now().strftime('%Y-%m-%d %H:%M:%S')], # type(float) 'Dates visited': ['All the time'], # type(list) 'Date added': [ datetime.fromtimestamp(777186000).strftime( '%Y-%m-%d %H:%M:%S') ], # type(float) 'Description': ['0'] # type(str) }) self._setup_map() self._setup_column_delegates() self._setup_connections() self._setup_column_width_rules() self._load_address_dict_from_pickle() self.stackedWidget_main.setCurrentIndex(0)
def get_geos_and_reviews_from_db(self, db='ieyasu'): ''' Get geos(restaurants) from db Now specify '京都市' where res.LstPrf = "A2601" Args: db: str Returns: None ''' db_connection = get_db_connection(db) cursor = db_connection.cursor() try: sql = 'select res.restaurant_id, res.name, res.url, res.pr_comment_title, res.pr_comment_body, rev.title, rev.body from restaurants as res left join reviews as rev on res.restaurant_id = rev.restaurant_id where res.LstPrf = "A2601" order by res.id;' cursor.execute(sql) result = cursor.fetchall() geo_ids = [] for row in result: geo_id = row[0] name = row[1] geo_url = '' if row[2] is None else row[2] pr_title = '' if row[3] is None else row[3] pr_body = '' if row[4] is None else row[4] geo = Geo(geo_id, name, geo_url, pr_title, pr_body) if geo_id not in geo_ids: geo_ids.append(geo_id) self.geos.append(geo) rvw_title = '' if row[5] is None else row[5] rvw_body = '' if row[6] is None else row[6] review = rvw_title + rvw_body if geo_id in self.reviews: self.reviews[geo_id].append(review) else: self.reviews[geo_id] = [review] except MySQLdb.Error as e: print('MySQLdb.Error: ', e) except Exception as e: traceback.print_exc() print(e) finally: cursor.close() db_connection.close()
def collect_eq_data(self): try: response = requests.get( "https://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.geojson" ) for feature in response.json().get('features'): Geo( Feature(id=feature.get('id'), properties=feature.get('properties'), geometry=Point( feature['geometry']['coordinates']))).save() except Exception as e: print("error fetching USGS earthquake data: %s", e) return print("retrieved docs from usgs")
def __init__(self, options): # in: options is type(list) of names for each option to input QtWidgets.QDialog.__init__(self) self.obj_list = [ ] # storage for dynamic variables since can't name them all..# . self.info = {} self.options = options self.setupUi() self.address_input = self.obj_list[2 * self.options.index('Address') + 1] self.lat_input = self.obj_list[2 * self.options.index('Lat') + 1] self.lon_input = self.obj_list[2 * self.options.index('Lon') + 1] self.geo_locator = Geo() self.address_input.editingFinished.connect(self.address_event) self.lat_input.editingFinished.connect(self.lat_lon_event) self.lon_input.editingFinished.connect(self.lat_lon_event)
def get(self, address): latlong_address = geofind().lookuplatlong(address) # latlong_address = {'longitude':-118.0768, 'latitude':38.0512} point_geojson = Point( (latlong_address['longitude'], latlong_address['latitude'])) geojson_results = [] results = Geo(Feature(geometry=point_geojson)).intersects_with() for result in results: geojson_results.append( Feature(id=result.get('id'), properties=result.get('properties'), geometry=Point(result['geometry']['coordinates']))) return jsonify(FeatureCollection(geojson_results))
def fetch_country_boundaries(request, country): """ Fetch country boundaries """ t1 = time.time() if request.method == 'GET': print(country) gadm = Boundary.objects.all().filter(country=country) #print type(gadm[0].geo_json) gj = gadm[0].geo_json #print(gj) point = (float(request.query_params["lon"]), float(request.query_params["lat"])) geo = Geo() dist = geo.find_district(point, gj) t2 = time.time() print("%f seconds" % (t2 - t1)) return Response(dist)
def __init__(self, db_conn, db_name): from pymongo import Connection print "Opening MongoDB connection" self.conn = Connection(host=db_conn) self.db = self.conn[db_name] # Open subcollections self.knowledge = Knowledge(self) self.frames = Frames(self) #self.map = Map(self) self.geo = Geo(self) # Logging from wifistalker import Log header = 'DB' self.log = Log(self, use_stdout=True, header=header) # Log collection self._log = self['log'] self._log.ensure_index('stamp_utc', expireAfterSeconds=60*60)
def post(self): geojson_payload = geojson.loads(json.dumps(request.json)) features = [] if isinstance(geojson_payload, geojson.feature.FeatureCollection): features = geojson_payload['features'] else: features = [geojson_payload] geojson_results = [] for feature in features: results = Geo(feature).intersects_with() for result in results: geojson_results.append( Feature(id=result.get('id'), properties=result.get('properties'), geometry=Point(result['geometry']['coordinates']))) return jsonify(FeatureCollection(geojson_results))
def main(): parser = argparse.ArgumentParser() parser.add_argument("fname", default="geo", nargs="?", help="geo file name") parser.add_argument("-c", action="store_true", help="convert the file to other formats (geo, xyz, gjf, lammps)") parser.add_argument("-pbc", action="store_true", help="using default pbc 5nm * 5nm * 5nm") parser.add_argument("-b", nargs=2, type=int, help="get the bond distance between a1, a2, a3") parser.add_argument("-a", nargs=3, type=int,help="get the angle of a1-a2-a3") parser.add_argument("-vol", action="store_true", help="get the volume of the simulation box") args = parser.parse_args() #print b.getBondDist(3,2) fname = args.fname assert os.path.exists(fname) a = Geo(fname) b = a.parser() b.assignEleTypes() b.assignAtomTypes2() if args.c: print "converting %s to geo, xyz, gjf and lammps..."%fname if args.pbc: b.pbc = [50, 50, 50, 90.0, 90.0, 90.0] convertors(b) if args.b: a1 = args.b[0] a2 = args.b[1] val = b.getBondDist(a1, a2) print "Distance between %d and %d is %.3f."%(a1, a2, val) if args.a: a1 = args.a[0] a2 = args.a[1] a3 = args.a[2] val = b.getAngle(a1, a2, a3) print "Angle of %d-%d-%d is %.3f."%(a1, a2, a3, val) if args.vol: vol = b.getVol() print "Volume is %.3f"%vol
def densify(targetNodes, factor): for i, curr in enumerate(targetNodes): if (i == len(targetNodes) - 1): break next = targetNodes[i + 1] parent = curr.getparent() # Obtain data from both current and next nodes nodes = {'curr': curr, 'next': next} for key, node in nodes.items(): nodes[key] = { # 'time': DateTime.strptime(node[0].text, '%Y-%m-%dT%H:%M:%SZ'), 'lat': float(node.attrib['lat']), 'lon': float(node.attrib['lon']), 'node': node, } # using the geodata, create a virtual line to get information. line = Geo([nodes['curr']['lat'], nodes['curr']['lon']], [nodes['next']['lat'], nodes['next']['lon']]) # subdivide the line into corresponding chunks and add the to the tree after curr for j in range(factor): pointC = line.point(line.distance * (1 / (j + 1))) node = DeepCopy(curr) node.attrib['lat'] = '%.7f' % round(pointC[0], 7) node.attrib['lon'] = '%.7f' % round(pointC[1], 7) curr.addnext(node)
for i, u in enumerate(U_dev): userLocation[u] = P_dev[i] data = (A, X_train, Y_train, X_dev, Y_dev, X_test, Y_test, U_train, U_dev, U_test, classLatMedian, classLonMedian, userLocation, vocab) if not builddata: logging.info('dumping data in {} ...'.format(str(dump_file))) dump_obj(data, dump_file) logging.info('data dump finished!') return data dataset = 'geotext' path = "C:\\Users\\61484\\Graph_Convolutional_Networks\\data\\geo" dataset = Geo(path, dataset, transform=None) data = dataset[0] A, X_train, Y_train, X_dev, Y_dev, X_test, Y_test, U_train, U_dev, U_test, classLatMedian, classLonMedian, userLocation, vocab = get_geo_data( dataset.raw_dir, 'dump.pkl') U = U_train + U_dev + U_test locs = np.array([userLocation[u] for u in U]) class Net(torch.nn.Module): def __init__(self): super(Net, self).__init__() self.lin1 = Sequential(Linear(dataset.num_features, 300)) self.conv1 = GCNConv(300, dataset.num_classes) #self.conv2 = GCNConv(300, 300)
from pprint import pprint from geo import Geo, MultiThreadGeo ADDRESS_VEC: list = ['Minsk', 'Moscow', 'Bangui', '453 Booth Street, Ottawa ON'] # ===== Basic Geo Class ===== geo = Geo() coords = geo.get_coordinates(ADDRESS_VEC, method_name='yandex') coords_table = geo.get_multiple_method_coordinates(ADDRESS_VEC) pprint(coords) print(f'\n{"=" * 30}\n') print(coords_table) # ===== Multi Thread Geo ===== mt_geo = MultiThreadGeo(geo)
from collections import defaultdict import math from geo import Geo # RTT absoluto quiere decir desde mi pc hasta el hop i. # RTT relativo (o RTT a secas) quiere decir desde el hop i-1 hasta el hop i. def avg(l): return sum(map(float, l)) / len(l) global_geo = Geo() class Location(): def __init__(self, ip): self._ip = ip location = global_geo.locate(ip) self._city = location[0] self._latitude = location[1] self._longitude = location[2] def city(self): return self._city def latitude(self): return self._latitude
def isEmptyDatamodel(self, value): return value in (None, '', {}, Geo())
""" read the geo file and output to data (LAMMPS), geo and xyz file. """ from mytype import System, Molecule, Atom from geo import Geo from output_conf import toData from output_conf import toGeo from output_conf import toXyz testfile = "../../debug/geo" a = Geo(testfile) b = a.parser() b.assignAtomTypes() toData(b) toGeo(b) toXyz(b)
def generate(state, stateFilename, countyFilename, path, zoom): global geo, scaleoffset print '----------------------------------------' print 'Generating %s %s zoom %d' % (stateFilename, countyFilename, zoom) scale = 10 geo = Geo(zoom, 256 * scale) pixgeo = Geo(zoom, 256) stateShapefile = loadshapefile(stateFilename) t1 = time.time() stateFeatures = stateShapefile['features'] print '%d state features' % len(stateFeatures) stateFeatures = filterCONUS(stateFeatures) print '%d features in CONUS states' % len(stateFeatures) #writeFile( 'features.csv', shpUtils.dumpFeatureInfo(features) ) #outer = pixgeo.pixFromGeoBounds( featuresBounds(features) ) fb = featuresBounds(stateFeatures) outer = pixgeo.pixFromGeoBounds(fb) outer = pixgeo.inflateBounds(outer, 8) gridoffset, gridsize = pixgeo.tileBounds(outer) scaleoffset = pixgeo.scalePoint(gridoffset, scale) print 'Offset:[%d,%d], Size:[%d,%d]' % (gridoffset[0], gridoffset[1], gridsize[0], gridsize[1]) draw = ['scale .1,.1\n'] if countyFilename: countyShapefile = loadshapefile(countyFilename) countyFeatures = countyShapefile['features'] print '%d county features' % len(countyFeatures) countyFeatures = filterCONUS(countyFeatures) print '%d features in CONUS counties' % len(countyFeatures) draw.append('stroke-width 10\n') drawFeatures(draw, countyFeatures, getRandomColor) draw.append('stroke-width 20\n') drawFeatures(draw, stateFeatures, None) else: draw.append('stroke-width 10\n') drawFeatures(draw, stateFeatures, getRandomColor) writeFile('draw.cmd', ''.join(draw)) t2 = time.time() print '%0.3f seconds to generate commands' % (t2 - t1) crop = True if crop: cropcmd = '-crop 256x256' else: cropcmd = '' blank = magick.blank(gridsize) base = '%s/tile-%d' % (path, zoom) command = ('%s -draw "@draw.cmd" %s ' + base + '.png') % (blank, cropcmd) #command = ( '%s -draw "@draw.cmd" %s -depth 8 -type Palette -floodfill 0x0 white -background white -transparent-color white ' + base + '.png' )%( blank, cropcmd ) #command = ( 'null: -resize %dx%d! -floodfill 0x0 white -draw "@draw.cmd" %s -depth 8 -type Palette -background white -transparent white -transparent-color white ' + base + '.png' )%( gridsize[0], gridsize[1], cropcmd ) #command = 'null: -resize %(cx)dx%(cy)d! -draw "@draw.cmd" %(crop)s tile%(zoom)d.png' %({ # 'cx': gridsize[0], # 'cy': gridsize[1], # 'crop': crop, # 'zoom': zoom #}) magick.convert(command) if crop: xyCount = 2 << zoom n = 0 # TODO: refactor xMin = gridoffset[0] / 256 xMinEdge = max(xMin - 2, 0) yMin = gridoffset[1] / 256 yMinEdge = max(yMin - 2, 0) xN = gridsize[0] / 256 yN = gridsize[1] / 256 xLim = xMin + xN xLimEdge = min(xLim + 2, xyCount) yLim = yMin + yN yLimEdge = min(yLim + 2, xyCount) nMoving = xN * yN nCopying = (xLimEdge - xMinEdge) * (yLimEdge - yMinEdge) - nMoving print 'Moving %d tiles, copying %d blank tiles...' % (nMoving, nCopying) t1 = time.time() for y in xrange(yMinEdge, yLimEdge): for x in xrange(xMinEdge, xLimEdge): target = '%s-%d-%d.png' % (base, y, x) if xMin <= x < xLim and yMin <= y < yLim: if xN == 1 and yN == 1: source = '%s.png' % (base) else: source = '%s-%d.png' % (base, n) if os.path.exists(target): os.remove(target) if os.stat(source)[stat.ST_SIZE] > 415: os.rename(source, target) else: os.remove(source) shutil.copy('blanktile.png', target) n += 1 else: shutil.copy('blanktile.png', target) t2 = time.time() print '%0.3f seconds to move files' % (t2 - t1)
""" parse the geo file with multi configuration into seperated files """ import os from utilities import parseBlock from mytype import System, Molecule, Atom from geo import Geo from output_conf import toGeo from output_conf import toXyz os.chdir("/home/tao/Documents/debug/geofile") parseBlock("geo", 1) for i in range(204): fname = "out%03d" % i a = Geo(fname) b = a.parser() b.assignAtomTypes() toGeo(b, b.name + '.geo') toXyz(b, b.name + '.xyz')
def generate(state, zoom): global geo, scaleoffset print '----------------------------------------' print 'Generating %s zoom %d' % (state, zoom) scale = 10 geo = Geo(zoom, 256 * scale) pixgeo = Geo(zoom, 256) #exec re.sub( '.+\(', 'data = (', readFile( '%s/%s.js' %( shapespath, state ) ) ) json = readFile('%s/%s.js' % (shapespath, state)) exec re.sub('^.+\(', 'data = (', json) places = data['places'] #t1 = time.time() places = filterCONUS(places) #outer = pixgeo.pixFromGeoBounds( featuresBounds(features) ) bounds = placesBounds(places) outer = pixgeo.pixFromGeoBounds(bounds) outer = pixgeo.inflateBounds(outer, 8) gridoffset, gridsize = pixgeo.tileBounds(outer) scaleoffset = pixgeo.scalePoint(gridoffset, scale) print 'Offset:[%d,%d], Size:[%d,%d]' % (gridoffset[0], gridoffset[1], gridsize[0], gridsize[1]) draw = ['scale .1,.1\n'] draw.append('stroke-width 10\n') drawPlaces(draw, places) cmdfile = 'draw.tmp' writeFile(cmdfile, ''.join(draw)) #t2 = time.time() #print '%0.3f seconds to generate commands' %( t2 - t1 ) crop = True if crop: cropcmd = '-crop 256x256' else: cropcmd = '' blank = magick.blank(gridsize) base = '%s/%s/%s-%d' % (tilespath, state, state, zoom) command = ('%s -draw "@%s" %s ' + base + '.png') % (blank, cmdfile, cropcmd) #command = ( '%s -draw "@draw.cmd" %s -depth 8 -type Palette -floodfill 0x0 white -background white -transparent-color white ' + base + '.png' )%( blank, cropcmd ) #command = ( 'null: -resize %dx%d! -floodfill 0x0 white -draw "@draw.cmd" %s -depth 8 -type Palette -background white -transparent white -transparent-color white ' + base + '.png' )%( gridsize[0], gridsize[1], cropcmd ) #command = 'null: -resize %(cx)dx%(cy)d! -draw "@draw.cmd" %(crop)s tile%(zoom)d.png' %({ # 'cx': gridsize[0], # 'cy': gridsize[1], # 'crop': crop, # 'zoom': zoom #}) magick.convert(command) if crop: xyCount = 2 << zoom n = 0 # TODO: refactor xMin = gridoffset[0] / 256 xMinEdge = max(xMin - 2, 0) yMin = gridoffset[1] / 256 yMinEdge = max(yMin - 2, 0) xN = gridsize[0] / 256 yN = gridsize[1] / 256 xLim = xMin + xN xLimEdge = min(xLim + 2, xyCount) yLim = yMin + yN yLimEdge = min(yLim + 2, xyCount) nMoving = xN * yN nCopying = (xLimEdge - xMinEdge) * (yLimEdge - yMinEdge) - nMoving print 'Moving %d tiles, copying %d blank tiles...' % (nMoving, nCopying) t1 = time.time() for y in xrange(yMinEdge, yLimEdge): for x in xrange(xMinEdge, xLimEdge): target = '%s-%d-%d.png' % (base, y, x) if xMin <= x < xLim and yMin <= y < yLim: if xN == 1 and yN == 1: source = '%s.png' % (base) else: source = '%s-%d.png' % (base, n) if os.path.exists(target): os.remove(target) if os.stat(source)[stat.ST_SIZE] > 415: os.rename(source, target) else: os.remove(source) shutil.copy('blanktile.png', target) n += 1 else: shutil.copy('blanktile.png', target) t2 = time.time() print '%0.3f seconds to move files' % (t2 - t1)