예제 #1
0
def get_inner_hashtable(df,maxsize):
	# getting extrema
	extrema = get_extrema(df)

	# getting upper lefft and lowerright point
	ul = [extrema['w'],extrema['n']]
	lr = [extrema['e'],extrema['s']]
	# getting geohash for ul and lr
	# assuming 8 for the time being as abs min
	ulhash = geohash.encode(ul[1],ul[0],12)
	lrhash = geohash.encode(lr[1],lr[0],12)


	# setting while loop to continue iterating until a dataframe of shape
	# bigger than 3x3 is returned
	ind = 0
	current = 0
		
	if maxsize == False:
		# setting the size of the proper ul hashs and lr hashs
		ulhash = ulhash[:current+2]
		lrhash = lrhash[:current+2]
	else:
		# setting the size of the proper ul hashs and lr hashs
		ulhash = ulhash[:maxsize]
		lrhash = lrhash[:maxsize]	

	# gettin corners
	ulhash = get_corner(ulhash,'ul')
	lrhash = get_corner(lrhash,'lr')

	# making the inner most hashtable needed
	hashtable = make_points_geohash(ulhash,lrhash,maxsize)

	return hashtable
예제 #2
0
def is_nearby(building_id, lat, lon, precis=6):
	target_hash = geohash.encode(lat, lon, precision=precis)
	if building_id not in buildings:
		return False
	building = buildings[building_id]
	if "location" in building:
		building_hash = geohash.encode(float(building["location"]["latitude"]), float(building["location"]["longitude"]), precision=precis)
		if target_hash == building_hash:
			return True;
	return False
예제 #3
0
def find_nearest_buildings(lat, lon, precis=6):
	retval = []
	target_hash = geohash.encode(lat, lon, precision=precis)
	print target_hash
	for building in buildings.itervalues():
		if "location" in building:
			print building["location"]["latitude"]
			building_hash = geohash.encode(float(building["location"]["latitude"]), float(building["location"]["longitude"]), precision=precis)
			if target_hash == building_hash:
				retval.append(building)
	return retval
예제 #4
0
def get_corner(hash,corner):	
	lat,long,latdelta,longdelta = geohash.decode_exactly(hash)

	# ul corner
	if corner == 'ul':
		lat = lat + (3 * latdelta)
		long = long - (3 * longdelta)
		return geohash.encode(lat,long,len(hash))
	elif corner == 'lr':
		lat = lat - (3 * latdelta)
		long = long + (3 * longdelta)
		return geohash.encode(lat,long,len(hash))
예제 #5
0
def fill_geohashs(data,size):
	# function for linting whether the first point and lastpoint are the same if not appends talbe
	data = first_last(data)

	
	extrema = get_extrema(data)

	# getting upper lefft and lowerright point
	ul = [extrema['w'],extrema['n']]
	lr = [extrema['e'],extrema['s']]


	# getting geohash for ul and lr
	# assuming 8 for the time being as abs min
	ulhash = geohash.encode(ul[1],ul[0],size)
	lrhash = geohash.encode(lr[1],lr[0],size)

	lat,long,latdelta,longdelta = geohash.decode_exactly(ulhash)

	latdelta,longdelta = latdelta * 2.0,longdelta * 2.0

	hashsize = ((latdelta ** 2) + (longdelta ** 2)) ** .5

	count = 0
	for row in data.columns.values.tolist():
		if 'lat' in str(row).lower():
			latheader = row
		elif 'long' in str(row).lower():
			longheader = row
		count += 1


	count = 0
	newlist = []
	for row in data[[longheader,latheader]].values.tolist():
		if count == 0:
			count = 1
		else:
			dist = distance(oldrow,row)
			if dist > hashsize / 5.0:
				number = (dist / hashsize) * 5.0
				number = int(number)
				newlist += generate_points(number,oldrow,row)[1:]
			else:
				newlist.append(row)
		oldrow = row

	newlist = pd.DataFrame(newlist,columns=['LONG','LAT'])
	newlist = map_table(newlist,size,map_only=True)
	return newlist
예제 #6
0
파일: __init__.py 프로젝트: ihucos/geonear
    def loc2geohash(self, loc):
        """
        Used internally to convert the "\\*\\*loc" into a geohash.

        :param latlon: E.g. (38.70, -90.29)
        :param location: String to be be geocoded.  Example "Sophienstr. 9, 10178 Berlin".
        :param geohash: A geohash. Example: "u33dbczk"
        :param who: Use the location of another pin.
        """
        if 'latlon' in loc:
            return geohash.encode(loc['latlon'],
                                  precision=self._geohash_precision)

        elif 'location' in loc:

            cache_geocoding = (
                loc['cache_geocoding'] if 'cache_geocoding' in loc
                else self._cache_geocoding)

            # take geohash from cache if geocoding caching is enabled
            if cache_geocoding:
                cache_key = hash_iter(('geocoding',
                                       'google',
                                       str(self._geohash_precision),
                                       loc['location']))
                gh = self._redis.get(cache_key)
                if gh:
                    return gh

            # geocode the location to a geohash
            lat, lon = self._nominatim.geocode(loc['location'])
            gh = geohash.encode(lat, lon,
                                precision=self._geohash_precision)

            # if cache the geolocated location
            if cache_geocoding:
                self._redis.setex(cache_key, self._cache_geocoding, gh)

            return gh

        elif 'geohash' in loc:
            return geohash.encode(*geohash.decode(loc['geohash']),
                                  precision=self._geohash_precision)

        elif 'who' in loc:
            return self.geohash(loc['who'])

        else:
            raise TypeError('wrong location specificaton')
예제 #7
0
파일: camera.py 프로젝트: tomasvdw/thas
    def check_line(cls, loc1, loc2, userid):
        "Check if the given line intersects with a camera"

        # get all cameras in the neighbourhood
        hashes = geohash.expand(geohash.encode(loc1.lat, loc1.lon, 7))
        if str(loc1) != str(loc2):
            hashes.extend(geohash.expand(geohash.encode(loc2.lat, loc2.lon, 7)))
        sets = ["camloc:" + h for h in hashes]
        cams = rd.sunion(sets)

        for camstring in cams:
            cam = Camera(camstring)
            rate = cam.check_camera_line(loc1, loc2, userid)

        return False
예제 #8
0
파일: walkens.py 프로젝트: wesleyks/walkens
def storeMark():
	markId = str(uuid.uuid4())
	color = request.form['color']
	x = request.form['x']
	y = request.form['y']
	modifiedX = float(x) / 1112.0
	modifiedY = float(y) / 1112.0
	gHash = geohash.encode(modifiedX, modifiedY, 4)
	value = {
		'action': 'add',
		'type': 'm',
		'uuid': markId,
		'color': color,
		'x': x,
		'y': y,
		'vx': 0.0,
		'vy': 0.0
	}
	valueJson = json.dumps(value)
	redisClient.publish(gHash, valueJson)
	mark = {
		'_id': markId,
		'geoHash': gHash,
		'utcDate': datetime.utcnow(),
		'value': value
	}
	mongoMarks.insert(mark)
	if production:
		logging.info('data: ' + valueJson)
	return '0'
예제 #9
0
파일: geohash3.py 프로젝트: jokoon/eio
def load_from2():
    filename = "C:\Users\epsi\Downloads\worldcitiespop.txt"
    filename = "~/Downloads/worldcitiespop.txt"
    filename = "/Users/bidet/Downloads/worldcitiespop.txt"
    filename = "C:\_code\temporary\worldcitiespop.txt"
    filename = "/home/jorinovski/repos/jonas-histo/simplemaps-worldcities-basic.csv"
    filename = "/home/jorinovski/repos/jonas-histo/worldcitiespop.txt"

    # Country,City,AccentCity,Region,Population,Latitude,Longitude
    keystr = "Country,City,AccentCity,Region,Population,Latitude,Longitude"
    keys = keystr.split(',')
    geohashes = []
    lines = [a.split(',') for a in open(filename, encoding='Latin-1')][1:]
    print('lines done', len(lines))
    print(lines[343])
    
    cities = [{keys[k]:data for k,data in enumerate(line)} for line in lines]
    print('cities done', len(cities))
    print(cities[343])
    
    cities_gh = [dict(c, **{"geohash":geohash.encode(float(c['Latitude']),float(c['Longitude']))}) for c in cities]
    print('cities_gh done', len(cities_gh))
    print(cities_gh[343])

    insert = [(city["City"],city["Population"],city["Latitude"],city["Longitude"],city["geohash"]) for city in cities_gh]
    print(len(insert))
    print(insert[343])
    c.executemany('INSERT INTO cities_large (City, Population, Latitude, Longitude, geohash) VALUES (?,?,?,?,?)', insert)
    conn.commit()
예제 #10
0
  def loadZips(self):
    sf = open('states.txt')
    states = {}
    for line in sf:
      cols = line.strip().split(' ')
      states[cols[0]] = cols[1]
   
    f = open('uszip.txt')

    self.zips = {}
    for line in f:
      cols = line.strip('\n').split('\t')
      zipc = cols[1]
      city = cols[2]
      statename = cols[3]
      state = cols[4]
      countyname = cols[5]
      county = cols[6]
      lat = float(cols[9])
      lon = float(cols[10])
      ghash = encode(lat,lon)[0:4]
      if not states.has_key(state):
          continue
      if not self.zips.has_key(ghash):
         self.zips[ghash] = []
      self.zips[ghash].append({'zipc':zipc,
                               'city': city, 
                               'state': state, 
                               'statename': statename, 
                               'county': states[state] + county, 
                               'countyname': countyname, 
                               'lat':lat,'lon':lon})
예제 #11
0
def load_shops(filename):
    file_path = data_path(filename)
    with open(file_path, "rb") as shops:
        dict_reader = csv.DictReader(shops)
        query_text = "INSERT INTO shop " + "(id, name, latitude, longitude, geohash) " + "VALUES "
        query = []
        param = []
        counter = 0
        for row in dict_reader:
            query.append("( ?, ?, ?, ?, ? )")

            param.append(row["id"])
            param.append(row["name"])
            param.append(row["lat"])
            param.append(row["lng"])

            param.append(encode(float(row["lat"]), float(row["lng"])))
            counter += 1
            if counter == 499:
                batch_text = query_text + ",".join(query)
                db.engine.execute(batch_text, param)
                query = []
                param = []
                counter = 0
        query_text += ",".join(query)
        db.engine.execute(query_text, param)
예제 #12
0
def solve_xmin(pt1,pt2,size):
	positiondict = {'neg':[4,6,2,5,0,1],'pos':[3,7,2,5,0,1],'vert':[2,5],'zero':[0,1]}
	
	ghash = geohash.encode(pt1[1],pt1[0],size)
	
	# getting neighbors
	neighbors = geohash.neighbors(ghash)

	# setting up variables for slope determination
	y1,y2,y3,y4,xmin = get_corner_points(neighbors)

	# getting slope
	slope = get_slope(pt1,pt2)

	y = interpol_point(xmin,slope,pt1)
	if y < y1 and y > y2:
		slope = 'neg'
		tang = 'pos'
	elif y < y2 and y > y3:
		slope = 'zero'
		tang = 'vert'
	elif y < y3 and y > y4:
		slope = 'pos'
		tang = 'neg'
	elif y >= y1 or y <= y4:
		slope = 'vert'
		tang = 'zero'
	if tang == 'pos' or tang == 'neg':
		pos1,pos2,pos3,pos4,pos5,pos6 = positiondict[tang]
		return [pos1,pos2,pos3,pos4,pos5,pos6]
	pos1,pos2 = positiondict[tang]
	return [pos1,pos2]
예제 #13
0
def get_cells_in_circle(lat, lon, radius, precision):
  """Gets all geohash cells inside a circle, sorted approximately by distance.

  Args:
    lat: float, the latitude of the circle center.
    lon: float, the longitude of the circle center.
    radius: float, the radius of the circle in meters.
    precision: int, the precision of the geohash.

  Returns:
    list, the list of geohash cells.
  """
  # Get all cells that are in the circle (with the max_resolution).
  # Start from the center cell.
  cur_set = set([geohash.encode(lat, lon, precision)])
  all_set = set(cur_set)
  result = list(cur_set)
  while cur_set:
    # Gradually extend the found cells (all_set) layer by layer.
    new_set = set([])
    for cell in cur_set:
      for one_neighbor in geohash.neighbors(cell):
        if one_neighbor in all_set:
          continue
        (nb_lat, nb_lon) = geohash.decode(one_neighbor)
        if distance(nb_lat, nb_lon, lat, lon) < radius:
          new_set.add(one_neighbor)
    all_set.update(new_set)
    result.extend(list(new_set))
    cur_set = new_set

  return result
예제 #14
0
파일: walkens.py 프로젝트: wesleyks/walkens
def hashesToSearch(x, y):
	gHashes = set()
	for i in [-7.0, -4.0, -1.0, 0.0, 1.0, 4.0, 7.0]:
		for j in [-6.0, -2.0, 0.0, 2.0, 6.0]:
			subHash = geohash.encode((x + i * offsetX) / 1112.0, (y + j * offsetY) / 1112.0, 4)
			gHashes.add(subHash)
	return gHashes
예제 #15
0
def filterFromSignature(data,hmap,maxDistance=5,heatThreshold=.01):
  ndata = copy.deepcopy(data)
  geohashlength = hmap['geohashlength']
  for l in range(len(data)):
    h = geohash.encode(data[l]['lat'],data[l]['lon'],geohashlength)
    visited = set([h])
    neighbors = set([h])
    nonZeroNeighbors = [h] if (h in hmap and hmap[h]>heatThreshold) else []
    d=0
    while (len(nonZeroNeighbors)==0 and d<maxDistance):
      nneighbors = set([])
      for n in neighbors:
        nneighbors.union([h for h in geohash.neighbors(n) if h not in visited])
      neighbors = nneighbors
      for n in neighbors:
        if (n in hmap and hmap[n]>heatThreshold):
          nonZeroNeighbors.append(n)
      visited.union(neighbors)
      d+=1

    if len(nonZeroNeighbors)>0:
      if len(nonZeroNeighbors)>1:
        print h,nonZeroNeighbors
      lat,lon=0.,0.
      for n in nonZeroNeighbors:
        dlat,dlon = geohash.decode(n)
        lat += dlat
        lon += dlon
      ndata[l]= Location(lat/len(nonZeroNeighbors),
                            lon/len(nonZeroNeighbors),
                            data[l].timestamp)

  return ndata
예제 #16
0
파일: walkens.py 프로젝트: wesleyks/walkens
def storePosition():
	playerId = request.form['uuid']
	color = request.form['color']
	x = request.form['x']
	vx = request.form['vx']
	y = request.form['y']
	vy = request.form['vy']
	action = request.form['action']
	modifiedX = float(x) / 1112.0
	modifiedY = float(y) / 1112.0
	gHash = geohash.encode(modifiedX, modifiedY, 4)
	value = {
		'action': action,
		'type': 'p',
		'uuid': playerId,
		'color': color,
		'x': x,
		'vx': vx,
		'y': y,
		'vy': vy
	}
	valueJson = json.dumps(value)
	redisClient.publish(gHash, valueJson)
	if production:
		logging.info('data: ' + valueJson)
	return gHash
예제 #17
0
 def handle(self, reset=False, *args, **kwargs):
     if reset:
         base_qs = Hospital.objects
     else:
         base_qs = Hospital.objects.not_geocoded()
     self.hospitals_queryset = base_qs.only(*self.address_fields).order_by("name")
     if not self.confirm():
         return
     for args in self.get_addresses():
         hospital_id, coords, err = self.geocode_address(*args)
         if err:
             self.failed_lookups.append((hospital_id, err))
         else:
             hospital = Hospital.objects.get(pk=hospital_id)
             hospital.latitude = coords[0]
             hospital.longitude = coords[1]
             hospital.geohash = geohash.encode(*coords, precision=12)
             try:
                 hospital.save()
             except decimal.InvalidOperation, e:
                 raise CommandError(e)
             ascii_name = self.name_lookup[hospital_id].encode('ascii', 'ignore')
             self.stdout.write("Coordinates added for {0}.\n".format(ascii_name))
         # Avoid spamming the API.
         time.sleep(self.sleep_time)
예제 #18
0
 def put(self, lat, lon, key):
   """Adds or updates one key with its related latitude and longitude.
   Args:
     - `lat`: float, latitude
     - `lon`: float, longitude
     - `key`: str, the key to add to Redis.
   """
   GeoSearch._rw_lock.writer_acquire()
   try:
     new_cells = set([geohash.encode(lat, lon, precision=p)
                      for p in range(self._min_p, self._max_p + 1)])
     key_k2l = '%s:%s' % (PREFIX_K2L, key)
     non_existing_cells = set([])
     if self._r.exists(key_k2l):
       old_cells = set(self._r.smembers(key_k2l))
       non_existing_cells = old_cells - new_cells
       new_cells = new_cells - old_cells
     # Remove non-existing ones.
     for one_cell in non_existing_cells:
       self._r.srem(key_k2l, one_cell)
       self._r.srem('%s:%s' % (PREFIX_L2K, one_cell), key)
     # Add new ones.
     for one_cell in new_cells:
       self._r.sadd(key_k2l, one_cell)
       self._r.sadd('%s:%s' % (PREFIX_L2K, one_cell), key)
   finally:
     GeoSearch._rw_lock.writer_release()
예제 #19
0
파일: device.py 프로젝트: chriswhsu/cassa
    def __init__(self, external_identifier, name, device_uuid=None, geohash=None, measures=None, tags=None,
                 parent_device_uuid=None, latitude=None, longitude=None):
        """ initialize new device object
        """


        # prevent inconsistant geospatial data from making it into repository
        # rederive lat / long
        if geohash and (latitude or longitude):
            (latitude, longitude) = gh.decode(geohash)

        # but populate both to facilitate queries that need lat/long data

        if (latitude and longitude) and not geohash:
            geohash = gh.encode(latitude, longitude)

        if geohash and not (latitude or longitude):
            (latitude, longitude) = gh.decode(geohash)

        self.external_identifier = external_identifier
        self.name = name
        self.geohash = geohash
        self.measures = measures
        self.tags = tags
        self.parent_device_id = parent_device_uuid
        self.latitude = latitude
        self.longitude = longitude

        # get a uuid if one wasn't passed in.
        if device_uuid is None:
            self.device_uuid = uuid.uuid4()
        elif isinstance(device_uuid, basestring):
            self.device_uuid = uuid.UUID(device_uuid)
        else:
            self.device_uuid = device_uuid
예제 #20
0
 def get_point_hash(self, point):
     """
     return geohash for given point with self.precision
     :param point: GeoPoint instance
     :return: string
     """
     return geohash.encode(point.latitude, point.longitude, self.precision)
예제 #21
0
def geohash_key_for_element(elem):
    "Return a geohash key for element 'elem'."

    lat = min(C.MAXGHLAT, float(elem.get(C.LAT)) / __SCALEFACTOR)
    lon = float(elem.get(C.LON)) / __SCALEFACTOR

    return geohash.encode(lat, lon, precision=__GHKEYLENGTH)
예제 #22
0
    def get_sites_near_latlon(self, lat, lon):
        ghash = geohash.encode(float(lat), float(lon))

        hashes = geohash.neighbors(ghash)
        hashes.append(ghash)

        sites = rds.zrange(sitekey, 0, -1, withscores = True)

        hashmatches = {}

        for chars in range(6,3,-1):
            for sitehash, id in sites:
                for currenthash in hashes:

                    if currenthash[0:chars] == sitehash[0:chars]:
                        hashmatches[sitehash] = int(id)

            if len(hashmatches) > 0:
                break

        sites = []

        if len(hashmatches) > 0:

            for hash, id in hashmatches.items():
                site = self.fetch_id(id)
                if site:
                    sites.append(site)

        return sites
예제 #23
0
	def on_data(self, data):
		global counter
		js = json.loads(data.decode('utf-8'))
		if 'coordinates' in js and  js['coordinates']!=None:
			print js['coordinates']
			c = js['coordinates']['coordinates']
			print c
			g = geohash.encode(c[1],c[0])
			t = twitter_epoch(js['created_at'])
			#self.map[g[:5],(t/3600)%24]+=1
			self.map[g[:5],(t/60)%60]+=1
			self.time_geo[(t/60)%60][g[:5]]+=1
			self.counter+=1
			print self.counter
			print self.size
			print self.size<self.counter

		if self.counter>args.size:
			map_to_csv(self.map)
			_to_json(self.time_geo)
			self.counter = 0
			print 'writing to file'
		#if 'text' in js:
		# 	print js['text']
		# 	self.tweets.append(data)
		# 	self.counter+=1
		#	print self.counter
		# 	if self.counter>int(self.size):
		# 		with open(self.folder+'/'+datetime.datetime.now().isoformat('_'),'a') as f:
		# 			f.write(',\n'.join(self.tweets).encode('UTF-8'))
		# 			self.counter = 0
		# 			del self.tweets[:]
		return True
예제 #24
0
def my_encode(argstring):
	argstring = str.split(argstring,'_')
	lat,long,precision = float(argstring[0]),float(argstring[1]),int(argstring[2])
	try:
		hash = geohash.encode(lat,long,precision)
	except:
		hash = ''
	return hash
예제 #25
0
    def proximity_search(self, latitude, longitude, radius):
        """
        Given a centerpoint, find everything within a radius around
        that latitude and longitude, returned in order.

        :param latitude: floating point latitude
        :param longitude: floating point longitude
        :param radius: radius in meters.
        :return:
        """

        hashcode = geohash.encode(latitude=latitude, longitude=longitude)
        centerpoint = (latitude, longitude)

        tmp_hashcode = ''
        for x in hashcode:
            # Go through the hashcode character by character
            tmp_hashcode += x
            lat, lng, delta_lat, delta_lng = geohash.decode(tmp_hashcode,
                                                            delta=True)
            overall_lat = 2 * 1000 * haversine(
                point1=(latitude - delta_lat, longitude),
                point2=(latitude + delta_lat, longitude)
            )
            overall_lng = 2 * 1000 * haversine(
                point1=(latitude, longitude-delta_lng),
                point2=(latitude, longitude+delta_lng)
            )

            dist = min(overall_lng, overall_lat)
            if dist < radius:
                tmp_hashcode = tmp_hashcode[:-1]
                break

        if tmp_hashcode == '':
            raise ValueError('Radius larger than earth')

        precision = len(tmp_hashcode)

        search_hashes = self._get_adjoining_hashes(hashcode=hashcode,
                                                   precision=precision)
        search_hashes.append(tmp_hashcode)

        possible_points = []
        result_values = []

        for search_hash in search_hashes:
            possible_points.extend(self.storage.values(prefix=search_hash))

        for point_id in possible_points:
            point = self.points_by_id[point_id]
            dist = 1000 * haversine(centerpoint, point)
            if dist <= radius:
                result_values.append((point_id, dist))

        sorted_results = sorted(result_values, key = lambda x: x[1])
        final_results = [x[0] for x in sorted_results]
        return final_results
예제 #26
0
def fetch_site_id(id):

    key = "site:%s" % id
    site = rds.get(key)

    if site:
        site = json.loads(site)

    if site == None:
        url = "http://www.museumoflondon.org.uk/laarcWS/v1/rest/?op=GetSite&search_type=bykey&terms=%d" % id

        code = 0
        tries = 0
        response = None
        root = None

        while code != 200:
            response = urllib2.urlopen(url)
            code = response.code
            root = etree.XML(response.read())

            if root.find(".//ErrorCode") is not None:
                code = int(root.find(".//ErrorCode").text.strip())

            if code != 200:
                tries += 1
                print "Retrying %s, try %s" % (id, tries)
                eventlet.sleep(1)

                if tries > 20:
                    break


        if root.find(".//Site") is not None:

            (latitude, longitude) = (float(root.find(".//Latitude").text), float(root.find(".//Longitude").text))

            site = {
                'id':           root.find(".//Site").get('id'),
                'year':         root.find(".//SiteYear").text,
                'name':         root.find(".//SiteName").text,
                'description':  root.find(".//Description").text.strip(),
                'article':      root.find(".//GazetteerArticle").text.strip(),
                'period':       [l.strip() for l in root.find(".//Period").text.split(',')],
                'latitude':     latitude,
                'longitude':    longitude,
                'location':     root.find(".//Location").text,
                'geohash':      geohash.encode(latitude, longitude),
            }
            
            rds.set(key, json.dumps(site))
            rds.zadd('sites', site['geohash'], int(site['id']))

        else:
            pass
            rds.set(key, '')

    return site
예제 #27
0
파일: shell.py 프로젝트: xlqian/addok
 def do_geohash(self, latlon):
     """Compute a geohash from latitude and longitude.
     GEOHASH 48.1234 2.9876"""
     try:
         lat, lon = map(float, latlon.split())
     except ValueError:
         print(red('Invalid lat and lon {}'.format(latlon)))
     else:
         print(white(geohash.encode(lat, lon, config.GEOHASH_PRECISION)))
예제 #28
0
파일: iprange.py 프로젝트: kenchung/geodis
 def __init__(self, range_min, range_max, lat, lon, zipcode='', country=''):
     self.range_min = range_min
     self.range_max = range_max
     self.lat = lat
     self.lon = lon
     self.zipcode = zipcode
     self.country = country
     self.geo_key = hasher.encode(lat, lon)
     self.key = '%s:%s:%s:%s' % (self.range_min, self.range_max, self.zipcode, self.country)
예제 #29
0
파일: core.py 프로젝트: digideskio/addok
 def geohash_key(self):
     if self.lat and self.lon and self._geohash_key is None:
         geoh = geohash.encode(self.lat, self.lon, config.GEOHASH_PRECISION)
         self._geohash_key = compute_geohash_key(geoh)
         if self._geohash_key:
             self.debug('Computed geohash key %s', self._geohash_key)
         else:
             self.debug('Empty geohash key, deleting %s', self._geohash_key)
     return self._geohash_key
예제 #30
0
def test(n=10, scale=1, k=5, GeohashTreeCls=GeohashTree):
    locations = [(random.uniform(-90.0, 90.0), random.uniform(-180.0, 180.0)) for i in range(n)]
    g = GeohashTreeCls() # GeoPileTree()
    for p in locations:
        h = geohash.encode(*p)
        #print h
        #g[h] = random.randint(0, 1)
        g.setValue(h,random.randint(0, 9)) # "=" doesn't be used when g = GeoPileTree()
    g.printS()
    print (g.keys()[n/2], g[g.keys()[n/2]])
    _test_surroundings(g.keys()[n/2], g, scale)
    _test_nn_value(g.keys()[n/2], g, k)
    for p in locations:
        h = geohash.encode(*p)
        #g.removeMatchedValue(h, random.randint(0, 0))
        #del g[h]
    g.printS()
    return g
예제 #31
0
    def get_geocodes(self, bbox):
        """Return a list of keys covering a given area.

        Parameters:

        bbox -- Bounding box of the desired region.
        """

        # TODO: Make this more efficient for sparse areas of the map.
        w, s, e, n = map(float, bbox)

        n = min(C.MAXGHLAT, n)  # work around a geohash library
        s = min(C.MAXGHLAT, s)  # limitation

        assert (w <= e and s <= n)

        gcset = set()
        gc = geohash.encode(s, w, self.precision)

        bl = geohash.bbox(gc)  # Box containing point (s,w).

        s_ = bl['s']
        while s_ < n:  # Step south to north.
            w_ = bl['w']

            gc = geohash.encode(s_, w_, self.precision)
            bb_sn = geohash.bbox(gc)  # bounding box in S->N direction

            while w_ < e:  # Step west to east.
                gcset.add(gc)

                bb_we = geohash.bbox(gc)  # in W->E direction
                w_ = bb_we['e']

                gc = geohash.encode(s_, w_, self.precision)

            s_ = bb_sn['n']

        assert (len(gcset) > 0)

        return [gc for gc in gcset]
예제 #32
0
파일: line_index.py 프로젝트: qdhqf/ult
def fill_geohashs(data,name,size,maxdistance,hashsize):
	global ghashdict
	# function for linting whether the first point and lastpoint are the same if not appends talbe

	count = 0
	geohashlist = []
	tangslist = []
	currentdist = 0.
	neighbors = []
	ghashdict = {}
	dist = 0
	ind = 0
	for row in data:
		if count == 0:
			count = 1
			geohashlist.append([geohash.encode(row[1],row[0],9),'%s,%s,%s' % (geohash.encode(row[1],row[0],9),name,str(currentdist))])
		else:
			slope = get_slope(oldrow,row)
			x1,y1 = oldrow
			dist = distance(oldrow,row)

			if dist > hashsize / 5.0 or ind == 0:
				number = (dist / hashsize) * 5.0
				number = int(number)				

				if ind == 0 and not dist == 0 and not number > 10:
					ind = 1
					number = 10

				addghashs = generate_points_geohash(number,oldrow,row,name,size,currentdist,maxdistance)[1:]

				geohashlist += addghashs
			else:
				point = row
				geohashlist.append([geohash.encode(point[1],point[0],9),'%s,%s,%s' % (geohash.encode(point[1],point[0],9),name,str(currentdist))])

			currentdist += dist

		oldrow = row

	return geohashlist
예제 #33
0
 def get(self, lat, lon):
     mhash = encode(lat, lon)[0:4]
     minzip = None
     if self.zips.has_key(mhash):
         mindist = -1
         for candidate in self.zips[mhash]:
             dist = self.distance([lat, lon],
                                  [candidate['lat'], candidate['lon']])
             if mindist == -1 or dist < mindist:
                 mindist = dist
                 minzip = candidate
     return minzip
예제 #34
0
def get_hashsize(ul,size):
	# getting geohash for ul and lr
	# assuming 8 for the time being as abs min
	ulhash = geohash.encode(ul[1],ul[0],size)

	lat,long,latdelta,longdelta = geohash.decode_exactly(ulhash)

	latdelta,longdelta = latdelta * 2.0,longdelta * 2.0

	hashsize = ((latdelta ** 2) + (longdelta ** 2)) ** .5

	return hashsize
예제 #35
0
def geohashing(lat, lng, precision=0):
    """ Geohash lat lng """
    if precision != 0:
        gh = geohash.encode( \
            lat, \
            lng, \
            precision=precision \
        )
    else:
        gh = geohash.encode( \
            lat, \
            lng \
        )

    if geohash_coordinates:
        gc = geohash.decode( \
            gh
        )
        return gh, gc
    else:
        return gh
예제 #36
0
    def execute(self, current_location: Location) -> Optional[Location]:
        """Execution of the Movement Evaluation Policy"""

        if random.random() <= settings.COURIER_MOVEMENT_PROBABILITY:
            current_geohash = geohash.encode(*current_location.coordinates, precision=6)
            geohash_neighbors = geohash.neighbors(current_geohash)
            destination_geohash = random.choice(geohash_neighbors)
            destination_coordinates = geohash.decode(destination_geohash)

            return Location(lat=destination_coordinates[0], lng=destination_coordinates[1])

        return None
예제 #37
0
def process_data():
    ph = request.args['ph']
    print ph
    bacteria = request.args['bacteria']
    lead = request.args['lead']
    latitude = float(request.args['latitude'])
    longitude = float(request.args['longitude'])
    water_index = calculate_water_index()
    print water_index
    geohash = Geohash.encode(latitude, longitude, 9)
    write_data(ph, bacteria, lead, longitude, latitude, water_index, geohash)
    return "Done!"
예제 #38
0
 def get_location(self, ip):
     if self.config["local_range"] in ip:
         return self.config["default_location"]
     try:
         query = self.geoip_city.city(ip)
         ghash = geohash.encode(query.location.latitude,
                                query.location.longitude)
         return dict(country=query.country.name,
                     city=query.city.name,
                     geohash=ghash)
     except geoip2.errors.AddressNotFoundError:
         return dict(country="", city="", geohash="")
예제 #39
0
def create_air_station_list(data, station_url):
    try:
        t_in_milliseconds = data["last_measurement"]["calibrated"]["when"][
            "$date"]
        t_date_time = datetime.datetime.fromtimestamp(t_in_milliseconds /
                                                      1000.0)
        coord = (convert_gps_2_latlng(
            data["last_measurement"]["calibrated"]["readings"]["GPS"]["lon"]),
                 convert_gps_2_latlng(data["last_measurement"]["calibrated"]
                                      ["readings"]["GPS"]["lat"]))

        station = {
            "id":
            "ein-aireas-" + str(int(data["_id"])),
            "type":
            "AirQualityObserved",
            "dateObserved":
            str(t_date_time),
            "location": {
                "type": "Point",
                "coordinates": coord
            },
            "geohash":
            geohash.encode(coord[1], coord[0]),
            "source":
            station_url,
            "relativeHumidity":
            data["last_measurement"]["calibrated"]["readings"]["RelHum"],
            "temperature":
            data["last_measurement"]["calibrated"]["readings"]["Temp"],
            "airQualityLevel":
            "moderate",
            "PM1":
            data["last_measurement"]["calibrated"]["readings"]["PM1"],
            "PM25":
            data["last_measurement"]["calibrated"]["readings"]["PM25"],
            "Ozon":
            data["last_measurement"]["calibrated"]["readings"]["Ozon"],
            "PM10":
            data["last_measurement"]["calibrated"]["readings"]["PM10"],
            "NO2":
            data["last_measurement"]["calibrated"]["readings"]["NO2"]
        }
        air_stations.append(station)

        # print(json.dumps(station, indent=4, sort_keys=True))
    except KeyError as e:
        error_desc = {
            "id": station_url,
            "desc": "Wrong format or missing fields!"
        }
        error_list.append(error_desc)
        print("Data source problem, skipping: " + station_url)
예제 #40
0
def update_user_coordinate(longitude, latitude, user):
    if user.latest_longitude and user.latest_latitude:
        UserOldCoordinate.objects.create(user=user,
                                         longitude=user.latest_longitude,
                                         latitude=user.latest_latitude)
    user.latest_longitude = float(longitude)
    user.latest_latitude = float(latitude)
    geo_hash = geohash.encode(user.latest_latitude, user.latest_longitude)
    user.geo_hash = geo_hash
    user.updated_coordinate_time = datetime.now()
    user.save()
    return geo_hash
예제 #41
0
파일: line_index.py 프로젝트: qdhqf/ult
def generate_points_geohash(number_of_points,point1,point2,name,size,currentdist,maxdistance):
	# getting x points
	geohashlist = []
	if number_of_points == 0:
		return []

	x1,x2 = point1[0],point2[0]
	xdelta = (float(x2) - float(x1)) / float(number_of_points)
	xcurrent = x1

	# getting y points
	y1,y2 = point1[1],point2[1]
	ydelta = (float(y2) - float(y1)) / float(number_of_points)
	ycurrent = y1
	g1 = geohash.encode(y1,x1,size)
	geohashlist = ['GEOHASH',geohash.encode(y1,x1,size)]
	pointdelta = (xdelta ** 2 + ydelta ** 2) ** .5
	current = currentdist
	stringlist = [[g1,'%s,%s,%s' % (g1,name,str(currentdist),)]]
	count = 0
	strmaxdistance = str(maxdistance)
	while count < number_of_points:
		count += 1
		xcurrent += xdelta
		ycurrent += ydelta
		current += pointdelta
		ghash = geohash.encode(ycurrent,xcurrent,size)
		geohashlist.append(ghash)
		stringlist.append([ghash,'%s,%s,%s' % (ghash,name,str(current))])
	geohashlist.append(geohash.encode(point2[1],point2[0],size))
	lastdist = currentdist + distance(point1,point2)
	g2 = geohash.encode(y2,x2,size)

	stringlist.append([g2,'%s,%s,%s' % (g2,name,str(lastdist))])
	indexs = np.unique(geohashlist,return_index=True)[1]
	try:
		stringlist = [stringlist[i] for i in sorted(indexs)]
	except:
		return []
	return stringlist
예제 #42
0
def main():
    args = parse_args()

    # get country geometry
    country = json.load(open(args.country_geojson))['features'][0]
    polygon = ee.Geometry.Polygon(country['geometry']['coordinates'])
    geohashes_country = polygon2geohash(polygon, precision=5, coarse_precision=5)

    # Get locations of sightings, and restrict to AOI
    df = pd.read_csv(args.hopper_csv)
    df['geohash'] = df[['Y', 'X']].apply(lambda x: geohash.encode(*x, precision=5), axis=1).values
    df = df.loc[df.STARTDATE > args.start_date].loc[df['geohash'].isin(geohashes_country)]
    df['STARTDATE'] = pd.to_datetime(df.STARTDATE)

    # Encode locations as geohashes and get surrounding geohashes
    gh = set(df['geohash'])
    for _ in range(30):
        for g in list(gh):
            gh |= set(geohash.expand(g))

    gh = list(gh.intersection(geohashes_country))

    random.shuffle(gh)
    gh = gh[:len(gh) // 3]
    gh.extend(list(df['geohash']))
    gh = list(set(gh))

    # Prepare to load data
    os.makedirs(args.outdir, exist_ok=True)

    # Get all geohashes of interest for around date where a hopper sighting occurs
    interval = 30
    delta = date.fromisoformat('2020-06-01') - date.fromisoformat(args.start_date)

    locs = []
    for i in range(int(delta.days/30)):
        start_date = date.fromisoformat(args.start_date) + timedelta(days=i*interval)
        end_date = start_date + timedelta(days=interval)
        for i in range(len(gh)):
            locs.append({'date_start': str(start_date),
                         'date_end': str(end_date),
                         'geohash': gh[i]})

    # Run jobs in parallel
    jobs = []
    for loc in locs:
        job = delayed(get_one_sentinel)(loc, outdir=args.outdir)
        jobs.append(job)

    random.shuffle(jobs)

    _ = Parallel(backend='multiprocessing', n_jobs=args.n_jobs, verbose=1, batch_size=4)(tqdm(jobs))
예제 #43
0
    def parse_category(self, response):
        result = json.loads(response.text)
        category_id_list = list()
        for i in range(1, len(result)):
            detail = result[i]
            sub_categories = detail.get('sub_categories')
            for sub in sub_categories:
                category_item = CategoryItem()
                category_item['count'] = sub.get('count')
                category_item['id'] = sub.get('id')
                image = sub.get('image_url')
                if image.endswith('png'):
                    category_item['image_url'] = '/'.join([
                        'http://fuss10.elemecdn.com', image[0], image[1:3],
                        image[3:]
                    ]) + '.' + image[-3:]
                elif image.endswith('jpeg'):
                    category_item['image_url'] = '/'.join([
                        'http://fuss10.elemecdn.com', image[0], image[1:3],
                        image[3:]
                    ]) + '.' + image[-4:]
                else:
                    category_item['image_url'] = '/'.join([
                        'http://fuss10.elemecdn.com', image[0], image[1:3],
                        image[3:]
                    ]) + '.' + image[32:]
                category_item['level'] = sub.get('level')
                category_item['name'] = sub.get('name') + detail.get('name') if category_item['id'] == \
                                                                                detail.get('ids')[0] else sub.get(
                    'name')
                yield category_item
                category_id_list.append(sub.get('id'))

        self.logger.debug('category_id_list: {}'.format(category_id_list))
        lat = response.meta.get('lat')
        lng = response.meta.get('lng')
        geohashcode = geohash.encode(lat, lng)
        # for category_id in category_id_list[:]:
        for category_id in [252, 254, 271, 273]:
            yield Request(self.category_url.format(lat=lat,
                                                   lng=lng,
                                                   category_id=category_id,
                                                   geohashcode=geohashcode,
                                                   offset=0),
                          callback=self.parse_shops,
                          meta={
                              'lat': lat,
                              'lng': lng,
                              'category_id': category_id,
                              'geohashcode': geohashcode,
                              'offset': 0
                          })
예제 #44
0
파일: models.py 프로젝트: hanurp/neighbors
    def save(self,
             force_insert=False,
             force_update=False,
             using=None,
             update_fields=None):
        self.geo_hash = geohash.encode(longitude=self.lon, latitude=self.lat)
        if update_fields:
            update_fields = update_fields + ['geo_hash']

        super(Person, self).save(force_insert=force_insert,
                                 force_update=force_update,
                                 using=using,
                                 update_fields=update_fields)
예제 #45
0
    def polygon_into_geohash(self, geo: BaseGeometry, accuracy: int = 7) -> list:
        """
        将多边形切割成固定精度的多个geohash块,将其按照位置输出成矩阵

        Parameters
        ----------
        geo : shapely.geometry.base.BaseGeometry
            目标多边形
        accuracy : int, optional
            Geohash的精度,默认为7

        Returns
        ----------
        list
            分割出的geohash字符串列表

        Examples
        ----------
        >>> g = GeohashOperator()
        >>> p = Polygon([[116.40233516693117, 39.95442126877703], [116.40233516693117, 39.95744689749303], [116.4070386902313, 39.95744689749303], [116.4070386902313, 39.95442126877703]])
        >>> g.polygon_into_geohash(p)
        [['wx4g2f1', 'wx4g2f4', 'wx4g2f5', 'wx4g2fh', 'wx4g2fj'],
        ['wx4g2cc', 'wx4g2cf', 'wx4g2cg', 'wx4g2cu', 'wx4g2cv'],
        ['wx4g2c9', 'wx4g2cd', 'wx4g2ce', 'wx4g2cs', 'wx4g2ct'],
        ['wx4g2c3', 'wx4g2c6', 'wx4g2c7', 'wx4g2ck', 'wx4g2cm']]

        See Also
        ----------
        nearby_geohash : 求解周边的geohash块编码
        geohash_to_polygon : 将Geohash字符串转成矩形
        geohash_lonlac : 求geohash字符串的边界经度/维度
        """
        boundary = geo.bounds
        geo_list, line_geohash = [], []
        horizontal_geohash = vertical_geohash = geohash.encode(boundary[1], boundary[0], accuracy)
        while True:
            vertical_geohash_polygon = self.geohash_to_polygon(vertical_geohash)
            if geo.contains(vertical_geohash_polygon) or geo.intersects(vertical_geohash_polygon):
                line_geohash.append(vertical_geohash)
                vertical_geohash = self.nearby_geohash(str(vertical_geohash), 3)
            elif self.geohash_lonlac(vertical_geohash, 'w') < boundary[2]:
                vertical_geohash = self.nearby_geohash(str(vertical_geohash), 3)
            else:
                if line_geohash:
                    geo_list.append(line_geohash)
                    line_geohash = []
                horizontal_geohash = vertical_geohash = self.nearby_geohash(horizontal_geohash, 1)
                horizontal_geohash_polygon = self.geohash_to_polygon(horizontal_geohash)
                if not (geo.contains(horizontal_geohash_polygon) or geo.intersects(horizontal_geohash_polygon) or (
                        self.geohash_lonlac(horizontal_geohash, 's') < boundary[3])):
                    return geo_list[::-1]
예제 #46
0
def load_item(info_jsn, id_dic):
    item = dict()
    item['business_id'] = info_jsn['id']
    item['name'] = info_jsn['name']
    item['address'] = info_jsn['address']
    item['telephone'] = info_jsn['call_center']
    item['month_saled'] = info_jsn['month_sale_num']
    item['shop_announcement'] = info_jsn['bulletin']
    longitude = str(info_jsn['longitude'])
    latitude = str(info_jsn['latitude'])
    item['latitude'] = latitude[:2] + '.' + latitude[2:]
    item['longitude'] = longitude[:3] + '.' + longitude[3:]
    item['geohash'] = encode(float(item['latitude']), float(item['longitude'])),
    item['geohash'] = item['geohash'][0]
    item['avg_rating'] = info_jsn['wm_poi_score']
    item['business_url'] = 'http://i.waimai.meituan.com/wxi/restaurant/%s' % info_jsn['id']
    item['photo_url'] = info_jsn['pic_url']
    item['float_minimum_order_amount'] = info_jsn['min_price']
    item['float_delivery_fee'] = info_jsn['shipping_fee']

    item['delivery_consume_time'] = info_jsn['avg_delivery_time']
    item['work_time'] = info_jsn['shipping_time']

    md5 = ''
    for k, j in item.items():
        md5 += str(j)
    item['md5'] = hashlib.md5(md5.encode('utf8')).hexdigest()

    item['mt_poi_id'] = id_dic[str(item['business_id'])]

    item = (
        item['business_id'],
        item['name'],
        item['address'],
        item['telephone'],
        item['month_saled'],
        item['shop_announcement'],
        item['latitude'],
        item['longitude'],
        item['geohash'],
        item['avg_rating'],
        item['business_url'],
        item['photo_url'],
        item['float_minimum_order_amount'],
        item['float_delivery_fee'],
        item['delivery_consume_time'],
        item['work_time'],
        item['md5'],
        item['mt_poi_id']
    )
    return item
예제 #47
0
    def on_result_ping(self, ping):
        if self.db_client:
            try:
                if not self.probes.has_key(ping["prb_id"]):
                    data = requests.get("https://atlas.ripe.net/api/v2/probes/%s" % ping["prb_id"])
                    if data.status_code == 200:
                        self.probes[ping["prb_id"]] = data.json()
                        try:
                            self.geohash[ping["prb_id"]] = geohash.encode(data.json()["geometry"]["coordinates"][1], data.json()["geometry"]["coordinates"][0])
                        except:
                            print "Failed on ", data.json()["geometry"]["coordinates"]

                json_body = [
                    {
                        "measurement": "ping_avg",
                        "tags": {
                            "prb_id": "{}".format(ping['prb_id']),
                            "msm_id": "{}".format(ping['msm_id']),
                            "src_addr": "{}".format(ping['src_addr']),
                            "dst_addr": "{}".format(ping['dst_addr']),
                            "country" : self.probes[ping["prb_id"]].get("country_code", ""),
                            "asn" : self.probes[ping["prb_id"]].get("asn_v4", 0),
                            "geohash" : "{}".format(self.geohash[ping["prb_id"]]),
                        },
                        # convert time into nanoseconds
                        "time": int(ping['timestamp']) * (10**9),
                        "fields": {
                            "value": float(ping['avg'])
                        }
                    },
                    {
                        "measurement": "ping_loss",
                        "tags": {
                            #"prb_id": "{}".format(ping['prb_id']),
                            #"msm_id": "{}".format(ping['msm_id']),
                            "src_addr": "{}".format(ping['src_addr']),
                            "dst_addr": "{}".format(ping['dst_addr']),
                            "country" : self.probes[ping["prb_id"]]["country_code"],
                            "asn" : self.probes[ping["prb_id"]].get("asn_v4", 0),
                            "geohash" : "{}".format(self.geohash[ping["prb_id"]]),
                        },
                        # convert time into nanoseconds
                        "time": int(ping['timestamp']) * (10**9),
                        "fields": {
                            "value": int(ping["sent"])-int(ping["rcvd"]),
                        }
                    },
                ]
                self.db_client.write_points(json_body)
            except KeyError:
                pass
 def get_near_palce(data):
     driver_geohash_value = geohash.encode(data['driver_lng'],
                                           data['driver_lat'], 10)
     points = FinalGeoHashPickUpPlace.objects.filter(
         geohash_value__startswith=driver_geohash_value)
     res = []
     for point in points:
         t = {}
         t['pickup_longitude'] = point.pickup_longitude
         t['pickup_latitude'] = point.pickup_latitude
         res.append((t, point.point_num))
     res = sorted(res, key=lambda x: x[1], reverse=True)
     print(len(res))
     return res
예제 #49
0
파일: build.py 프로젝트: airq-dev/airq-data
def create_sensors():
    print("Creating sensors")
    results = get_purpleair_data()
    num_created = 0
    for result in results:
        if result.get("DEVICE_LOCATIONTYPE") != "outside":
            continue
        if result.get("ParentID"):
            # I don't know what this means but feel it's probably
            # best to skip?
            continue
        if result.get("LastSeen") < datetime.datetime.now().timestamp() - (
                24 * 60 * 60):
            # Out of date / maybe dead
            continue
        pm25 = result.get("PM2_5Value")
        if not pm25:
            continue
        try:
            pm25 = float(pm25)
        except (TypeError, ValueError):
            continue
        if pm25 < 0 or pm25 > 500:
            # Something is very wrong
            continue
        latitude = result.get("Lat")
        longitude = result.get("Lon")
        if not latitude or not longitude:
            continue
        gh = geohash.encode(latitude, longitude)
        conn = get_connection()
        cursor = conn.cursor()
        cursor.execute(
            textwrap.dedent("""
            INSERT INTO sensors
            VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
        """),
            (
                result["ID"],
                round(float(latitude), ndigits=6),
                round(float(longitude), ndigits=6),
                *list(gh),
            ),
        )
        conn.commit()
        num_created += 1
        if num_created % 50 == 0:
            print(
                f"Created {num_created} sensors of {len(results)} purpleair sensors"
            )
예제 #50
0
    def six_words(self, lat_long):
        """Convert coordinate to a combination of six words

        The coordinate is defined by latitude and longitude
        in degrees.

        With six words the word list contains only words
        which are short, easy to pronounce and easy distinguish.
        """
        lat, lon = lat_long
        gh = geohash.encode(lat, lon, 9)
        words = "-".join(self.six_wordlist[p]
                         for p in self.to_bytes(self.pad(gh)))
        return words
예제 #51
0
 def Loop(self):
     """Adds point data with GPS values to point queue."""
     gps_sensor = GPS()
     while not self.stop_process_signal.value:
         report = gps_sensor.GetReport()
         if report:
             point = gps_pb2.Point()
             point.lat = report.lat
             point.lon = report.lon
             if report.get('alt'):
                 point.alt = report.alt
             point.speed_ms = report.speed
             point.geohash = geohash.encode(point.lat, point.lon)
             self.AddPointToQueue(point)
예제 #52
0
def generate_points_geohash(number_of_points,point1,point2,areaindex,size):
	# getting x points
	geohashlist = []

	x1,x2 = point1[0],point2[0]
	xdelta = (float(x2) - float(x1)) / float(number_of_points)
	xcurrent = x1

	# getting y points
	y1,y2 = point1[1],point2[1]
	ydelta = (float(y2) - float(y1)) / float(number_of_points)
	ycurrent = y1

	geohashlist = ['GEOHASH',geohash.encode(point1[1],point1[0],size)]

	count = 0
	while count < number_of_points:
		count += 1
		xcurrent += xdelta
		ycurrent += ydelta
		geohashlist.append(geohash.encode(ycurrent,xcurrent,size))
	geohashlist.append(geohash.encode(point2[1],point2[0],size))
	return geohashlist
예제 #53
0
def daily():
    coords = {
        'lat': float(request.args.get('lat')),
        'long': float(request.args.get('long'))
    }
    hashgeo = geohash.encode(coords['lat'], coords['long'])[:6]
    forecast = requests.get(f'https://api.weather.bom.gov.au/v1/locations/{hashgeo}/forecasts/daily')
    try:
        print(f'{forecast.json()}')
        return forecast.json()
    except Exception:
        print("Exception")
        print(forecast)
        return json.dumps({"error": "Invalid data"})
예제 #54
0
def neighbor(geo_hash, direction):
    """
    Find neighbor of a geohash string in certain direction.
    :param geo_hash: geohash string
    :type geo_hash: str
    :param direction: Direction is a two-element array, i.e. [1,0] means north, [1,1] means northeast
    :type direction: list
    :return: geohash string
    :rtype: str
    """
    decode_result = geohash.decode_exactly(geo_hash)
    neighbor_lat = decode_result[0] + direction[0] * decode_result[2] * 2
    neighbor_lon = decode_result[1] + direction[1] * decode_result[3] * 2
    return geohash.encode(neighbor_lat, neighbor_lon, len(geo_hash))
예제 #55
0
def findNearestProvince(lat, lon, K, ifset, folderPath="./dics/"):
    # print(lat, lon)
    #folderPath = "./dics/"
    precis = 6
    code = geohash.encode(lat, lon, precision=precis)
    print("GeoHash Code: " + code)

    # neighbours = geohash.neighbours(code)
    pre3 = code[0:3]
    json_list = os.listdir(folderPath)
    list.sort(json_list)
    # print(json_list)

    n1 = findHelper(pre3, json_list, 10)  #find 10 nearest json files
    # print(n1)
    # print(n1,n2)

    middle = n1[-1]
    count = K + 1
    i = 0
    provinces = []
    keys = []
    minus = 1
    incre = 0
    laslons = []
    while count > 1:
        fileName = folderPath + n1[middle + i]
        incre = incre + 1
        i = i + incre * minus
        minus = minus * (-1)
        with open(fileName) as f:
            file = json.load(f)

            key, res = dictFinder(code, file, count)

            count = count - int(key[-1])

            for loop in range(len(key) - 1):
                provinces.append(res[loop])
                keys.append(distanceCal(code, key[loop]))
                laslons.append(geohash.decode(key[loop]))

    # if ifset==0 : return keys,provinces,laslons
    check = set()
    finalResult = []
    for j in range(len(keys)):
        if provinces[j] in check and ifset == 1: continue
        check.add(provinces[j])
        finalResult.append([keys[j], provinces[j], laslons[j]])
    return finalResult
예제 #56
0
 def parse(self, response):
     res = json.loads(response.text, encoding='gbk')
     city = CitiesItem()
     for i in res:
         for data in res[i]:
             city['abbr'] = data['abbr']
             city['id'] = data['id']
             city['latitude'] = data['latitude']
             city['longitude'] = data['longitude']
             city['geohash'] = geohash.encode(data['latitude'],
                                              data['longitude'])
             city['name'] = data['name']
             city['pinyin'] = data['pinyin']
             yield city
예제 #57
0
def limit_df_coordinates(df, precision=5):
    '''
    Combines coordinates based on geohash precision
    :param coordinates: list of (lat, lon) tuples
    :param precision: geohash precision
    :return:
    '''

    hashes = [tup + (encode(getattr(tup,'lat'), getattr(tup,'lon'), precision),) for tup in df.itertuples()]
    temp_dict = dict()
    for tup in hashes: temp_dict[tup[-1]] = temp_dict.get(tup[-1], tup[1:-1])
    used_coords = list(temp_dict.values())
    del temp_dict
    return pd.DataFrame.from_records(used_coords,  columns=df.columns)
예제 #58
0
파일: models.py 프로젝트: wgilpin/shout
 def get_unique_place(cls, request, return_existing=True):
   try:
     it = ndb.Key(Item,int(request.get('key'))).get()
   except:
     it = None
   if it:
     logging.debug('get_unique_place exists '+it.place_name)
     return it if return_existing else None
   place_name = request.get('new-title')
   if not place_name:
     place_name = request.get('place_name')
   logging.debug('get_unique_place name '+place_name)
   if 'latitude' in request.params:
     lat = float(request.get('latitude'))
   else:
     lat = float(request.get('lat'))
   if 'longitude' in request.params:
     lng = float(request.get('longitude'))
   else:
     lng = float(request.get('lng'))
   geo_code = geohash.encode(lat, lng, precision=6)
   local_results = Item.query().\
     filter(Item.geo_hash >geo_code).\
     filter(Item.geo_hash < geo_code + "{")
   lower_name = place_name.lower()
   for place in local_results:
     if lower_name in place.place_name.lower():
       logging.debug('get_unique_place Found "%s"@[%f.4,%f.4]'%
                     (place_name,lat,lng))
       return place if return_existing else None
   it = Item(place_name=place_name)
   it.lat = lat
   it.lng = lng
   it.geo_hash = geohash.encode(lat, lng)
   logging.debug("get_unique_place - create item %s@[%f.4,%f.4]"%
                (it.place_name, it.lat, it.lng))
   return it
예제 #59
0
def _sensors_sync(
    purpleair_data: typing.List[typing.Dict[str,
                                            typing.Any]]) -> typing.List[int]:
    existing_sensor_map = {s.id: s for s in Sensor.query.all()}

    updates = []
    new_sensors = []
    moved_sensor_ids = []
    for result in purpleair_data:
        if _is_valid_reading(result):
            sensor = existing_sensor_map.get(result["ID"])
            latitude = result["Lat"]
            longitude = result["Lon"]
            pm25 = float(result["PM2_5Value"])
            data: typing.Dict[str, typing.Any] = {
                "id": result["ID"],
                "latest_reading": pm25,
                "updated_at": result["LastSeen"],
            }

            if (not sensor or sensor.latitude != latitude
                    or sensor.longitude != longitude):
                gh = geohash.encode(latitude, longitude)
                data.update(
                    latitude=latitude,
                    longitude=longitude,
                    **{
                        f"geohash_bit_{i}": c
                        for i, c in enumerate(gh, start=1)
                    },
                )
                moved_sensor_ids.append(result["ID"])

            if sensor:
                updates.append(data)
            else:
                new_sensors.append(Sensor(**data))

    if new_sensors:
        logger.info("Creating %s sensors", len(new_sensors))
        db.session.bulk_save_objects(new_sensors)
        db.session.commit()

    if updates:
        logger.info("Updating %s sensors", len(updates))
        db.session.bulk_update_mappings(Sensor, updates)
        db.session.commit()

    return moved_sensor_ids
예제 #60
0
 def log(self, client, session, runNo, interval):  # edit for JSON class
     for i in range(0, len(self.sensorData)):
         if (self.sensorData[i] == "None"):  # skip if sensor has no data
             continue
         else:
             print "TIMESTAMP: " + self.sensorData[i][0]
             tags = {"run": runNo}
             fields = {}
             if i == 3:  # special insertion for GPS data
                 rawLatitude = self.sensorData[i][
                     2]  # assuming value is North of equator
                 assert (len(rawLatitude) >= 4)  # assert valid latitude
                 minutesIndex = rawLatitude.find('.') - 2
                 if (minutesIndex < 0):  # no decimal found
                     minutesIndex = 2
                 latitude = float(rawLatitude[0:minutesIndex]) + float(
                     rawLatitude[minutesIndex:]) / 60
                 #print "LAT: " + str(rawLatitude) + " | " + str(rawLatitude[0:minutesIndex]) + " | " + str(rawLatitude[minutesIndex:]) + " | " + str(latitude) + " (" + str(minutesIndex) + ")"
                 rawLongitude = self.sensorData[i][
                     3]  # assuming value is West of Prime Meridian (hence -1 multiplication)
                 assert (len(rawLongitude) >= 4)  # assert valid longitude
                 minutesIndex = rawLongitude.find('.') - 2
                 if (minutesIndex < 0):  # no decimal found
                     minutesIndex = 2
                 longitude = -1 * (float(rawLongitude[0:minutesIndex]) +
                                   float(rawLongitude[minutesIndex:]) / 60)
                 geohashValue = str(geohash.encode(latitude, longitude))
                 #print "LONG: " + str(rawLongitude) + " | " + str(rawLongitude[0:minutesIndex]) + " | " + str(rawLongitude[minutesIndex:]) + " | " + str(longitude) + " (" + str(minutesIndex) + ")"
                 #print "GEO:" + geohashValue
                 tags["geohash"] = geohashValue
                 fields["metric"] = 1
             else:
                 for j in range(2, len(self.sensorData[i])):
                     if (self.sensorData[i][j] == ""
                         ):  # skip if sensor value is empty
                         continue
                     else:
                         fields[sensorDict.get(self.sensorData[i][1])[
                             j - 2]] = self.sensorData[i][
                                 j]  # create dictionary for JSON body
             json_body = [{
                 "measurement": session,
                 "tags": tags,
                 "time": time.ctime(float(self.sensorData[i][0])),
                 "fields": fields
             }]
             # Write JSON to InfluxDB
             client.write_points(json_body)
             print "LOGGED TO INFLUXDB\n"