def mapper(self, _, line): for row in csv.reader([line]): pickup_lat = row[17] pickup_lon = row[18] dropoff_lat = row[20] dropoff_lon = row[21] trip_seconds = row[4] #Skips header if trip_seconds == 'Trip Seconds': final_geohash = None trip_seconds = 0 continue #skips rows with empty cells if not pickup_lat or not pickup_lon \ or not dropoff_lat or not dropoff_lon \ or not trip_seconds: final_geohash = None trip_seconds = 0 continue time_of_day = process_time(row) if not time_of_day: final_geohash = None trip_seconds = 0 else: pickup_geohash = geohash2.encode(float(pickup_lat), float(pickup_lon), 7) dropoff_geohash = geohash2.encode(float(dropoff_lat), float(dropoff_lon), 7) final_geohash = pickup_geohash + "_" + dropoff_geohash + "_" + time_of_day yield final_geohash, (trip_seconds, 1)
def lochasher(lines): hashDict = {} for line in lines: if type(line[1]) != tuple: hashed = Geohash.encode(round(float(line[0][0]),6),round(float(line[0][1]),6),7) hashDict.setdefault(hashed,[]).append(line) else: hashed = Geohash.encode(round(float(line[0][1]),6),round(float(line[0][0]),6),7) hashedneighbors = geohash.neighbors(hashed) hashedneighbors.append(hashed) hashDict.setdefault((' '.join(hashedneighbors),line[1]),[]).append(line) return hashDict
def putDataInfoMysql(): sqlConnection = my.connect(host="amina.uhurulabs.org", user="******", passwd="", db="sensors") cursor = sqlConnection.cursor() for key in data["stations"]: elevation = str(key.get("elevation")) name = str(key.get("name")) lng = str(key["location"].get("lng")) lat = str(key["location"].get("lat")) lastMeasurement = str(key.get("lastMeasurement")) firstMeasurement = str(key.get("firstMeasurement")) timezoneOffset = str(key.get("timezoneOffset")) battery = str(key.get("battery")) tahmoId = str(key.get("id")) deviceId = str(key.get("deviceId")) first = convertToUnixTimeStamp(firstMeasurement) gHash = Geohash.encode((key["location"].get("lat")), (key["location"].get("lng")), precision=10) #print tahmoId, deviceId, name, lng, lat, elevation, battery, firstMeasurement, first, lastMeasurement, gHash sqlCommand = "INSERT INTO weatherstations (tahmoId, name, longitude, latitude, elevation, battery, deviceId, firstMeasurement, lastMeasurement, geohash) \ VALUES ('" + tahmoId + "','" + name + "','" + lng + "','" + lat + "','" + elevation + "','" + battery + "','" + deviceId + "','" + firstMeasurement + "','" + lastMeasurement + "','" + gHash + "')" print sqlCommand try: cursor.execute(sqlCommand) sqlConnection.commit() except my.Error as e: print e sqlConnection.rollback() sqlConnection.close()
def update_restaurant(restaurant): print('Updating DynamoDB with restaurant {} in table {}'.format( restaurant['restaurant-id'], config.restaurants_dynamodb_table_name)) geohash = Geohash.encode( restaurant['restaurant-location']['lat'], restaurant['restaurant-location']['lng'], precision=config.restaurants_dynamodb_geohash_precision) print('DYNAMODB: Adding restaurant: {} ({})'.format( restaurant['restaurant-id'], geohash)) response = config.restaurants_dynamodb_table.update_item( Key={ config.restaurants_pkey: geohash, config.restaurants_skey: restaurant['restaurant-id'] }, UpdateExpression="SET #restaurant_name = :restaurant_name", ExpressionAttributeNames={'#restaurant_name': 'restaurant-name'}, ExpressionAttributeValues={ ':restaurant_name': restaurant['restaurant-name'] }, ReturnValues="UPDATED_NEW") return response
def to_influxdb_point(station_data): return { 'measurement': 'bikes', 'tags': { 'scheme_id': station_data['schemeId'], 'scheme_name': station_data['schemeShortName'], 'station_id': station_data['stationId'], 'station_name': station_data['name'], 'station_name_irish': station_data['nameIrish'] }, 'time': TZ_CORK.localize( dateutil.parser.parse(station_data['dateStatus'], dayfirst=True)).isoformat(), 'fields': { 'bikes_available': station_data['bikesAvailable'], 'docks_available': station_data['docksAvailable'], 'docks_count': station_data['docksCount'], 'station_status': station_data['status'], 'latitude': float(station_data['latitude']), 'longitude': float(station_data['longitude']), 'geohash': Geohash.encode(float(station_data['latitude']), float(station_data['longitude']), 12) } }
def parse_category(self, response): json_data = json.loads(response.body) cates = self.__parse_category(json_data) geo_hash_val = geohash.encode(response.meta['item']['lat'], response.meta['item']['lng'], precision=11) for cate in cates: cate_id = cate['cate1_id'] if 'cate2_id' in cate: cate_id = cate['cate2_id'] url = "https://mainsite-restapi.ele.me/shopping/restaurants?extras%5B%5D=activities" url += "&geohash=%s&latitude=%s&limit=24&longitude=%s" % ( geo_hash_val, response.meta['item']['lat'], response.meta['item']['lng']) url += "&restaurant_category_ids%%5B%%5D=%s&sign=%s&offset=0" % ( cate_id, int(time.time() * 1000)) request_context = { 'url': url, 'grab_type': GRAB_TYPE_SHOPLIST, 'cate1': cate['cate1_name'], 'city': response.meta['item']['city'], 'spot_id': response.meta['item']['spot_id'], 'spot_name': response.meta['item']['spot_name'], 'lng': response.meta['item']['lng'], 'lat': response.meta['item']['lat'], 'offset': 0 } if 'cate2_id' in cate: request_context['cate2'] = cate['cate2_name'] yield scrapy.Request(url, dont_filter=True, callback=self.parse_shop, meta={'item': request_context})
def test_proximity(self): famhash = Geohash.encode(self.family[0], self.family[1], precision=20) nexthash = Geohash.encode(self.next_door[0], self.next_door[1], precision=20) sturthash = Geohash.encode(self.sturt_mall[0], self.sturt_mall[1], precision=20) rockhash = Geohash.encode(self.the_rock[0], self.the_rock[1], precision=20) assert famhash[:5] == nexthash[:5] assert famhash[:4] == sturthash[:4] assert famhash[:2] == rockhash[:2]
def testhashgeo(): # Testing hashgeo cookies = configparser.get('google', 'cookies') headers = configparser.get('google', 'headers') url = "https://www.google.com/maps/timeline/kml?authuser=0&pb=!1m8!1m3!1i{0}!2i{1}!3i{2}!2m3!1i{0}!2i{1}!3i{2}".format(year, month, day) r = requests.get(url, headers=headers, cookies=cookies) o = xmltodict.parse(r.text) name = o['kml']['Document']['name'] keys = [] values = [] when = o['kml']['Document']['Placemark']['gx:Track']['when'] for i in when: newdate = (parser.parse(i)).astimezone(tz.tzlocal()) a = newdate.strftime("%Y-%m-%d %H:%M:%S") keys.append(a) where = o['kml']['Document']['Placemark']['gx:Track']['gx:coord'] for i in where: lon, lat = i[:-2].split() coord = lat + "," + lon hashtest = Geohash.encode(float(lat), float(lon)) print str(coord) + " - " + str(hashtest)
def addUsers(response): for i in response["hits"]["hits"]: text = "" lat = round(i["_source"]["location"]["latitude"], 3) lon = round(i["_source"]["location"]["longitude"], 3) ghash = Geohash.encode(lat, lon) link = i["_source"]["link"] id = i["_id"] print id datetime = i["_source"]["created_time"] words = [] if i["_source"]["caption"]: text = i["_source"]["caption"]["text"].encode("ascii", "ignore").replace("\n", " ").replace("\t", " ") # words = i['_source']['caption']['text'].encode('ascii','ignore').replace('\n',' ').replace('\t',' ').lower().split() username = i["_source"]["user"]["username"] output.write( "\t".join( ( id, str(lat), str(lon), ghash, link, str(datetime), username, text, i["_source"]["images"]["standard_resolution"]["url"], ) ) + "\n" )
def createLocationObservation(iesGraph,mmsi,timestamp,lat,lon,obs=None): print(mmsi,timestamp) #add the location transponder - We don't know this is necessarily a vessel. All we know is that we have a LocationTransponder. lt = createLocationTransponder(iesGraph=iesGraph,mmsi=mmsi) #Now create the observation event lo = instantiate(iesGraph=iesGraph,_class=locationObservation) #If track emulation is not required, obs will be None. If it's not None, make the LocationObservation (lo) part of the overall track observation if obs: addToGraph(iesGraph=iesGraph,subject=lo,predicate=ipao,obj=obs) #...and the ParticularPeriod in which the observation occurred putInPeriod(iesGraph=iesGraph,item=lo,iso8601TimeString=timestamp) #And involve the transponder in that location observation ltPart = instantiate(iesGraph=iesGraph,_class=observedTarget) addToGraph(iesGraph=iesGraph,subject=ltPart,predicate=ipo,obj=lt) #participation of the transponder addToGraph(iesGraph=iesGraph,subject=ltPart,predicate=ipi,obj=lo) #participation in the LocationObservation #Now the observed location, a geopoint with a lat and long - using a geohash to give each point a unique uri gp = URIRef(dataUri+"geohash_"+Geohash.encode(lat,lon)) instantiate(iesGraph=iesGraph,_class=geoPoint,instance=gp) #Add the lat and long values as identifiers of the geopoint...firstly creating repeatable URIs for them so they don't overwrite latObj = URIRef(gp.toPython()+"_lat") lonObj = URIRef(gp.toPython()+"_lon") instantiate(iesGraph=iesGraph, _class=latitude,instance=latObj) instantiate(iesGraph=iesGraph, _class=longitude,instance=lonObj) addToGraph(iesGraph=iesGraph,subject=gp,predicate=iib,obj=latObj) addToGraph(iesGraph=iesGraph,subject=gp,predicate=iib,obj=lonObj) #Add the representation values to the lat and lon objects addToGraph(iesGraph=iesGraph,subject=latObj,predicate=rv,obj=Literal(lat, datatype=XSD.string)) addToGraph(iesGraph=iesGraph,subject=lonObj,predicate=rv,obj=Literal(lon, datatype=XSD.string)) #Now the participation of the GeoPoint in the Observation gpPart = instantiate(iesGraph=iesGraph,_class=observedLocation) addToGraph(iesGraph=iesGraph,subject=gpPart,predicate=ipo,obj=gp) #participation of the GeoPoint addToGraph(iesGraph=iesGraph,subject=gpPart,predicate=ipi,obj=lo) #participation in the LocationObservation
def handle_event(self, event, sender, level, formatted_msg, data): if self.mqttc == None: return #sender_name = type(sender).__name__ #if formatted_msg: # message = "[{}] {}".format(event, formatted_msg) #else: #message = '{}: {}'.format(event, str(data)) if event == 'catchable_pokemon': #self.mqttc.publish("pgomapcatch/all", str(data)) #print data if data['pokemon_id']: #self.mqttc.publish("pgomapcatch/all/catchable/"+str(data['pokemon_id']), str(data)) # precision=4 mean 19545 meters, http://stackoverflow.com/questions/13836416/geohash-and-max-distance geo_hash = Geohash.encode(data['latitude'], data['longitude'], precision=4) self.mqttc.publish( "pgomapgeo/" + geo_hash + "/" + str(data['pokemon_id']), str(data['latitude']) + "," + str(data['longitude']) + "," + str(data['encounter_id']) + "," + str(data['pokemon_id']) + "," + str(data['expiration_timestamp_ms']) + "," + str(data['pokemon_name'])) self.mqttc.publish( "pgomapcatch/all/catchable/" + str(data['pokemon_id']), str(data['latitude']) + "," + str(data['longitude']) + "," + str(data['encounter_id']) + "," + str(data['pokemon_id']) + "," + str(data['expiration_timestamp_ms']) + "," + str(data['pokemon_name']))
def data2influxdb(dataset_title, organization_title, dateTime, ip): dataset = dataset_title organization = organization_title geoIPdata = reader.get(ip) try: lat = float(geoIPdata['location']['latitude']) lon = float(geoIPdata['location']['longitude']) geohash_data = Geohash.encode(float(lat), float(lon)) except: geohash_data = float(0) try: country = geoIPdata['registered_country']['iso_code'] except: country = 'null' try: city = geoIPdata['city']['names']['en'] except: city = 'null' access_log = [{ 'measurement' : 'access', 'tags': { 'dataset': dataset, 'organization' : organization, 'geohash' : geohash_data, 'country' : country, 'city' : city }, 'fields' : {'value' : 1}, 'time' : dateTime }] print(access_log) client.write_points(access_log)
def start_requests(self): conn = pymysql.connect(host='10.15.1.24', user='******', passwd='hh$writer', db='o2o', charset='utf8', connect_timeout=5000, cursorclass=pymysql.cursors.DictCursor) cur = conn.cursor() sql = ''' select city, spot_name, spot_id, lng, lat from t_hh_gaode_hotspots limit 10; ''' cur.execute(sql) temps = cur.fetchall() for i,r in enumerate(temps): if r['lng']<r['lat']: a=r['lng'] r['lng']=r['lat'] r['lat']=a a=Geohash.encode(r['lat'], r['lng'], 12) url='http://waimai.meituan.com/home/' +a headers={ 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.8', 'Host': 'waimai.meituan.com', 'Upgrade-Insecure-Requests': '1', "User-Agent": "Mozilla/5.0 (MSIE 9.0; Windows NT 6.1; Trident/5.0)", } yield scrapy.Request(url,headers=headers,meta={'item':{'lat':r['lat'],'lng':r['lng'], 'spot_name':r['spot_name'],'base_url':url}, },dont_filter=True)
def prediction_to_geohash2(): predict = np.load("prediction.npy") data = pd.read_csv("test.csv") data = list(data["orderid"]) print(len(data)) print(predict.shape[0]) label = [] num = predict.shape[0] #num = 100 for i in tqdm.trange(num): temp = predict[i] label.append(gh.encode(temp[0], temp[1], precision=7)) result = '' for i in tqdm.trange(len(label)): a = random.randint(a=0, b=len(label) - 1) b = random.randint(a=0, b=len(label) - 1) # 这里只是尝试,由于lable不允许重复,所以添加随机 #TODO:尝试用pandas写入而不要用字符串写入文件 result = result + str( data[i]) + "," + label[i] + "," + label[a] + "," + label[b] + "\n" with open("submission.csv", "w") as f: f.write(result)
def authorize_spotify(): # First check we have a location location = "New York, NY" # request.form['location'] if not location: index() payload = {'address': location, 'key': BaseConfig.GOOGLE_GEOCODING} GOOGLE_GEOCODE_URL = "https://maps.googleapis.com/maps/api/geocode/json" urlparams = urllib.urlencode(payload) r = requests.get("%s?%s" % (GOOGLE_GEOCODE_URL, urlparams)) if not r.status_code == 200: return "Error" json_data = json.loads(r.text) lat = json_data["results"][0]["geometry"]["location"]["lat"] lng = json_data["results"][0]["geometry"]["location"]["lng"] session['geohash'] = Geohash.encode(lat, lng) # TODO implement state & show_dialog # https://github.com/plamere/spotipy/issues/211 # stateKey = 'spotify_auth_state' # state = ''.join(choice(ascii_uppercase) for i in range(16)) # res.cookie(stateKey, state) oauth = oauth2.SpotifyOAuth( client_id=BaseConfig.SPOTIFY_CLIENT_ID, client_secret=BaseConfig.SPOTIFY_CLIENT_SECRET, redirect_uri=BaseConfig.SPOTIFY_REDIRECT_URL, scope='user-library-read') return redirect(oauth.get_authorize_url())
def insert_into_all_tables(self, data): column_names = [ "ciudad", "pais_destino", "divisa", "competidor", "comision", "tasa_cambio", "timestamp", "lat", "lon", "num_agente", "importe_nominal" ] column_values = [] for i in column_names: column_values.append(data[i]) column_names.append("importe_destino") importe_destino = 100 * float(data["tasa_cambio"]) - float( data["comision"]) column_values.append(importe_destino) for table_name in self.tables.keys(): if table_name.startswith("geo"): column_names.append("geohash") column_values.append( Geohash.encode(float(data['lat']), float(data['lon']))) self._insert_data(table_name, column_names, column_values) # Remove last items. column_names.pop(-1) column_values.pop(-1) else: self._insert_data(table_name, column_names, column_values)
def construct_geo_location_query(lon=None, lat=None, distance=None, dist_unit=None): """ Returns a geo location query for Couchbase and Elastic search """ from lib.couchbase_helper.data import LON_LAT if not lon: lon_lat = random.choice(LON_LAT) lon = lon_lat[0] lat = lon_lat[1] distance = random.choice([10, 100, 500, 1000, 10000]) dist_unit = random.choice(["km", "mi"]) fts_query = { "location": { "lon": lon, "lat": lat }, "distance": str(distance) + dist_unit, "field": "geo" } es_query = { "query": { "match_all": {} }, "filter": { "geo_distance": { "distance": str(distance) + dist_unit, "geo": { "lat": lat, "lon": lon } } } } case = random.randint(0, 3) # Geo Location as array if case == 1: fts_query['location'] = [lon, lat] es_query['filter']['geo_distance']['geo'] = [lon, lat] # Geo Location as string if case == 2: fts_query['location'] = "{0},{1}".format(lat, lon) es_query['filter']['geo_distance']['geo'] = "{0},{1}".format( lat, lon) # Geo Location as Geohash if case == 3: geohash = Geohash.encode(lat, lon, precision=random.randint(3, 8)) fts_query['location'] = geohash es_query['filter']['geo_distance']['geo'] = geohash # Geo Location as an object of lat and lon if case == 0 return fts_query, es_query
def logparse(LOGPATH, INFLUXHOST, INFLUXPORT, INFLUXDBDB, INFLUXUSER, INFLUXUSERPASS, MEASUREMENT, GEOIPDB, INODE, HOSTNAME): # NOQA # Preparing variables and params IPS = {} COUNT = {} GEOHASH = {} # HOSTNAME = os.uname()[1] CLIENT = InfluxDBClient(host=INFLUXHOST, port=INFLUXPORT, username=INFLUXUSER, password=INFLUXUSERPASS, database=INFLUXDBDB) # NOQA re_IPV4 = re.compile('(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})') re_IPV6 = re.compile( '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))' ) # NOQA GI = geoip2.database.Reader(GEOIPDB) # Main loop to parse access.log file in tailf style with sending metrcs with open(LOGPATH, "r") as FILE: STR_RESULTS = os.stat(LOGPATH) ST_SIZE = STR_RESULTS[6] FILE.seek(ST_SIZE) while True: METRICS = [] WHERE = FILE.tell() LINE = FILE.readline() INODENEW = os.stat(LOGPATH).st_ino if INODE != INODENEW: break if not LINE: time.sleep(1) FILE.seek(WHERE) else: if re_IPV4.match(LINE): m = re_IPV4.match(LINE) IP = m.group(1) elif re_IPV6.match(LINE): m = re_IPV6.match(LINE) IP = m.group(1) if ipadd(IP).iptype() == 'PUBLIC' and IP: INFO = GI.city(IP) if INFO is not None: HASH = Geohash.encode(INFO.location.latitude, INFO.location.longitude) # NOQA COUNT['count'] = 1 GEOHASH['geohash'] = HASH GEOHASH['host'] = HOSTNAME GEOHASH['country_code'] = INFO.country.iso_code IPS['tags'] = GEOHASH IPS['fields'] = COUNT IPS['measurement'] = MEASUREMENT METRICS.append(IPS) # Sending json data to InfluxDB CLIENT.write_points(METRICS)
def get_shop_detail_info1(url): try: headers = {} headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36" headers["Referer"] = "http://www.dianping.com/search/keyword/1/0_%E8%8E%98%E5%BA%84/p1?aid=20245884%2C6212826%2C15998012" headers['Cookie'] = '_lxsdk_cuid=160c6a7a179c8-02393e4445bc62-b7a103e-100200-160c6a7a179c8; _lxsdk=160c6a7a179c8-02393e4445bc62-b7a103e-100200-160c6a7a179c8; cy=1; cye=shanghai; _hc.v=0f32a3b6-7385-0888-333e-5c3045e7cad3.1515161390; s_ViewType=10; aburl=1; wed_user_path=27760|0; __mta=251920054.1515203025830.1515203025830.1515203025830.1; _lxsdk_s=160c901323c-8b8-3a9-dad%7C%7C218' content = HttpUtils.get(url, headers, 'utf-8') print content soup = BeautifulSoup(content, "lxml") # shop_expand_addr = soup.find(id="basic-info").find_all("div", "address")[0].find_all("span", "item")[0].get_text().strip() #tel shop_expand_tel = soup.find(id="basic-info").find_all("p", "tel")[0].find_all("span", "item")[0].get_text().strip() #open_time shop_expand_open = '' infos = soup.find(id="basic-info").find_all("div", "other")[0].find_all("p", "info") if infos is not None and len(infos) > 0: for info in infos: text = info.find_all("span", "info-name")[0].get_text().encode('utf-8') if text.find('时间') > 0: shop_expand_open = info.find_all("span", "item")[0].get_text().strip() break #photo photo_url = 'http://www.dianping.com/ajax/json/shopDynamic/shopTabs?shopId=90951360&cityId=1&shopName=Regiustea%E5%A4%A9%E5%BE%A1%E7%9A%87%E8%8C%B6&power=5&mainCategoryId=244&shopType=10&shopCityId=1&_token=eJxVTt1ugjAYfZdeN9ACbYHEC3WOieKGMowzXiAwJEAtlAx12buvJu5iyZecn%2B%2Bc5HyDbp4BFyOELAzBV94BF2ANaRRA0Ev1IZgSzBgxqGNBkP73mG1CcOziJ%2BDuiYMgNenhbqyV3mPDRtBG6AAVNShkjB2gYam7Z%2BYqAk59L1xdH4ZBy8qEi5IXWnpudHk6C91BDsEmRWoJUI0mUg2F1QOTB%2FZ%2FOlDTVVaWBVcs9y%2FRRlqy%2FVwHMorfr8gMbi%2Br1%2BWsXt2u9nS6Lj%2BaRRETP%2BfPfZO0p5iTSWoNzmbyOpt6YufVbeJdmtDbMZLvGuN8rP0238YirURnZQFfCF1u34Qe663PPVxQsZwXODuG4bgaR3Ul8nA0Aj%2B%2FFDJm5g%3D%3D&uuid=40e49af0-9fce-a1c6-0e90-dbacb64cb3cb.1516376513&platform=1&partner=150&originUrl=http%3A%2F%2Fwww.dianping.com%2Fshop%2F90951360' shop_style_photos = [] #map shop_extra = {} if content.find("window.shop_config") > -1: js_str_start = content.find("window.shop_config") js_str_suf = content[js_str_start:] js_str_start = js_str_suf.find("{") js_str_end = js_str_suf.find("</script>") js_str = js_str_suf[js_str_start : js_str_end] json_obj = jsonpickle.decode(js_str) shop_extra['shop_name'] = json_obj['shopName'] shop_extra['shop_addr'] = json_obj['address'] shop_extra['shop_id'] = json_obj['shopId'] shop_extra['shop_full_name'] = json_obj['fullName'] shop_extra['shop_lat'] = json_obj['shopGlat'] shop_extra['shop_lng'] = json_obj['shopGlng'] shop_extra['shop_type'] = json_obj['shopType'] shop_extra['shop_first_category'] = json_obj['categoryName'] shop_extra['shop_second_category'] = json_obj['mainCategoryName'] shop_extra['shop_logo'] = json_obj['defaultPic'][:json_obj['defaultPic'].find('.jpg')+4] shop_extra['shop_geohash'] = Geohash.encode(float(json_obj['shopGlat']), float(json_obj['shopGlng']), 8) shop_extra['shop_tel'] = shop_expand_tel shop_extra['shop_open'] = shop_expand_open shop_extra['shop_photos'] = shop_style_photos return shop_extra except Exception, e: traceback.print_exc(e) print url
def test_basic(self): hash = Geohash.encode(self.family[0], self.family[1], precision=20) (lats, lons) = Geohash.decode(hash) assert float(lats) == self.family[0] assert float(lons) == self.family[1]
def update_geohash(self): """ Sets the geohash to correspond to the coordinates stored in self.geo. """ if self.geo: self.geohash = Geohash.encode(self.geo.lat, self.geo.lon, precision=20)
def features(): obj = request.get_json(force=True) lat = float(obj["lat"]) lon = float(obj["lon"]) amenity = obj["amenity"] geohash = Geohash.encode(lat, lon) obj["geohash"] = geohash print(json.dumps(obj)) sql = """ WITH q1 AS ( SELECT name, ST_Distance(ST_MakePoint(%s, %s)::GEOGRAPHY, ref_point)::NUMERIC(9, 2) dist_m, ST_Y(ref_point::GEOMETRY) lat, ST_X(ref_point::GEOMETRY) lon, date_time, key_value FROM osm WHERE """ if useGeohash: sql += "geohash4 = SUBSTRING(%s FOR 4) AND amenity = %s" else: sql += "ST_DWithin(ST_MakePoint(%s, %s)::GEOGRAPHY, ref_point, 5.0E+03, TRUE) AND key_value && ARRAY[%s]" sql += """ ) SELECT * FROM q1 """ if useGeohash: sql += "WHERE dist_m < 5.0E+03" sql += """ ORDER BY dist_m ASC LIMIT 10; """ rv = [] conn = get_db() #print("SQL:\n" + sql + "\n") with conn.cursor() as cur: try: if useGeohash: cur.execute(sql, (lon, lat, geohash, amenity)) else: cur.execute(sql, (lon, lat, lon, lat, "amenity=" + amenity)) for row in cur: (name, dist_m, lat, lon, dt, kv) = row d = {} d["name"] = name d["amenity"] = amenity d["dist_m"] = str(dist_m) d["lat"] = lat d["lon"] = lon #print("Feature: " + json.dumps(d)) rv.append(d) except: logging.debug("Search: status message: {}".format( cur.statusmessage)) return Response(json.dumps(rv), status=200, mimetype="application/json")
def get_around_geohashs(self): around_geohashs = {} self._get_min_lat_unit() self._get_min_long_unit() # north around_geohashs['n'] = Geohash.encode(self.lat + self.min_lat_unit, self.long, self.precision) around_geohashs['c'] = Geohash.encode(self.lat, self.long, self.precision) around_geohashs['s'] = Geohash.encode(self.lat, self.long + self.min_long_unit, self.precision) # northwest around_geohashs['nw'] = Geohash.encode(self.lat - self.min_lat_unit, self.long + self.min_long_unit, self.precision) # southwest around_geohashs['sw'] = Geohash.encode(self.lat - self.min_lat_unit, self.long - self.min_long_unit, self.precision) # northeast around_geohashs['ne'] = Geohash.encode(self.lat + self.min_lat_unit, self.long + self.min_long_unit, self.precision) # southeast around_geohashs['se'] = Geohash.encode(self.lat + self.min_lat_unit, self.long - self.min_long_unit, self.precision) return around_geohashs
def get_geohash_index(points, precision=24): """ get a list of geohash :param points: a list of GPS points :param precision: the length of a geohash binary string, the longer the string is the more precise the geohash is :return: a list of geohash (binary strings) """ geo_index = [Geohash.encode(latitude=p[1], longitude=p[0], precision=precision) for p in points] return geo_index
def save(self, *args, **kwargs): # if not self.point and not self.area: # raise ValidationError(_(u"You must at least set a point or choose " # u"an area.")) if not self.label: self.label = self.adr1 if self.point: self.geohash = Geohash.encode(self.point.y, self.point.x) return super(Location, self).save(*args, **kwargs)
def vpnPlusOnlineSessions(token): url = baseurl + '/webapi/entry.cgi' headers = { 'Content-Type': 'application/x-www-form-urlencoded' } body = { 'Content-Type': 'application/x-www-form-urlencoded', 'api':'SYNO.VPNPlus.Connectivity', 'version':'1', 'method':'list', 'status':'online', '_sid':token } r = session.post(url, headers=headers, data=body) jsonDic = json.loads(r.text) result = jsonDic['data']['cnt_list'] influxPayloadVpnPlusActiveSessions= [] for item in result: if item['ip_from'] != "0.0.0.0": INFO = GI.city(item['ip_from']) HASH = Geohash.encode(INFO.location.latitude, INFO.location.longitude) country_code = INFO.country.iso_code if INFO.city.name != "" and INFO.city.name is not None: location_name = INFO.city.name else: location_name = country_code else: HASH = "Blocked" location_name = "Blocked" country_code = "Blocked" influxPayloadVpnPlusActiveSessions.append( { "measurement": "OUTLAND.Remote.Network.VPNPlus", "tags": { "username":item['username'], "geohash": HASH, "country_code":country_code, "location_name":location_name, "externalIP":item['ip_from'], "internalIP":item['signature'] }, "time": now_iso(), "fields": { "externalIP":item['ip_from'], "internalIP":item['signature'], "abnormal":item['abnormal'], "download":item['download'], "upload":item['upload'], "count": 1, "time_duration":item['time_duration'], "time_start":datetime.fromtimestamp(float(item['time_start'])).strftime('%Y-%m-%d %H:%M:%S') } } ) influx_sender(influxPayloadVpnPlusActiveSessions,'telegraf') return influxPayloadVpnPlusActiveSessions
def geohash_encoding(gcjLat, gcjLng, precision=12): if not isinstance(gcjLat, float) or not isinstance(gcjLng, float): raise ValueError("input lat and lng is not float") if not isinstance(precision, int): raise ValueError("input precision is not int") if not 1 <= precision <= 12: raise ValueError("precision must between 1 and 12") wgsLat, wgsLng = gcj2wgs(gcjLat, gcjLng) return Geohash.encode(wgsLat, wgsLng, precision)
def fetch_data(self): data = requests.get(self.url).json() shelters = data['shelters'] # Scan for potential dupes by lat/lon (using geohash) by_geohash = {} for shelter in shelters: if shelter['id'] in IGNORE_DUPE_IDS: continue geohash = Geohash.encode( shelter['latitude'], shelter['longitude'], precision=GEOHASH_PRECISION, ) by_geohash.setdefault(geohash, []).append(shelter) dupe_groups = [ pair for pair in by_geohash.items() if ( # More than one shelter in this group len(pair[1]) > 1 # Group is not invalid lat/lon and pair[0] != ('0' * GEOHASH_PRECISION)) ] no_latlons = by_geohash.get('0' * GEOHASH_PRECISION) or [] return { 'dupe_groups': [{ 'geohash': dupe_group[0], 'shelters': [{ 'id': shelter['id'], 'name': shelter['shelter'], 'address': shelter['address'], 'latitude': shelter['latitude'], 'longitude': shelter['longitude'], 'google_maps': 'https://www.google.com/maps/search/%(latitude)s,%(longitude)s' % shelter, 'view_url': 'https://irma-api.herokuapp.com/shelters/%s' % shelter['id'], } for shelter in dupe_group[1]], } for dupe_group in dupe_groups], 'no_latitude_longitude': [{ 'id': shelter['id'], 'name': shelter['shelter'], 'address': shelter['address'], 'view_url': 'https://irma-api.herokuapp.com/shelters/%s' % shelter['id'], } for shelter in no_latlons] }
def create_geo_time_key(timestamp, lat, lng): # geohash映射 geohash = Geohash.encode(lat, lng, precision=5) # 时间round至5分钟整数(floor方式) tm = datetime.datetime.utcfromtimestamp(timestamp) tm = tm - datetime.timedelta(minutes=tm.minute % 5, seconds=tm.second, microseconds=tm.microsecond) ts_5mins = str(time.mktime(tm.timetuple())) # 组合key (时间+空间) key = geohash + "_" + ts_5mins[:-2] return key
def get_geohash(wkt): ''' generate a geohash from a wkt ''' if type(wkt) == str: wkt = loads(wkt) y = wkt.centroid.y x = wkt.centroid.x ghash = Geohash.encode(float(y), float(x), precision=10) return ghash
def _create_geohash(self, latitude, longitude, radius, precision, as_set=True): x = 0.0 y = 0.0 geohashes = [] returned = None grid_width = [ 5009400.0, 1252300.0, 156500.0, 39100.0, 4900.0, 1200.0, 152.9, 38.2, 4.8, 1.2, 0.149, 0.0370 ] grid_height = [ 4992600.0, 624100.0, 156000.0, 19500.0, 4900.0, 609.4, 152.4, 19.0, 4.8, 0.595, 0.149, 0.0199 ] height = (grid_height[precision - 1]) / 2 width = (grid_width[precision - 1]) / 2 lat_moves = int(math.ceil(radius / height)) lon_moves = int(math.ceil(radius / width)) for i in range(0, lat_moves): temp_lat = y + height * i for j in range(0, lon_moves): temp_lon = x + width * j if self._in_circle_check(temp_lat, temp_lon, y, x, radius): x_cen, y_cen = self._get_centroid(temp_lat, temp_lon, height, width) lat, lon = self._convert_to_latlon(y_cen, x_cen, latitude, longitude) geohashes += [Geohash.encode(lat, lon, precision)] lat, lon = self._convert_to_latlon(-y_cen, x_cen, latitude, longitude) geohashes += [Geohash.encode(lat, lon, precision)] lat, lon = self._convert_to_latlon(y_cen, -x_cen, latitude, longitude) geohashes += [Geohash.encode(lat, lon, precision)] lat, lon = self._convert_to_latlon(-y_cen, -x_cen, latitude, longitude) geohashes += [Geohash.encode(lat, lon, precision)] returned = set(geohashes) if as_set else ','.join(set(geohashes)) return returned
def get_shop_detail_info1(url): try: content = HttpUtils.get(url, headers, 'utf-8') print content soup = BeautifulSoup(content, "lxml") # shop_expand_addr = soup.find(id="basic-info").find_all("div", "address")[0].find_all("span", "item")[0].get_text().strip() #tel shop_expand_tel = '' if len(soup.find(id="basic-info").find_all("p", "tel")) > 0 and len(soup.find(id="basic-info").find_all("p", "tel")[0].find_all("span", "item")) > 0: shop_expand_tel = soup.find(id="basic-info").find_all("p", "tel")[0].find_all("span", "item")[0].get_text().strip() #open_time shop_expand_open = '' infos = soup.find(id="basic-info").find_all("div", "other")[0].find_all("p", "info") if infos is not None and len(infos) > 0: for info in infos: text = info.find_all("span", "info-name")[0].get_text().encode('utf-8') if text.find('时间') > 0: shop_expand_open = info.find_all("span", "item")[0].get_text().strip() break #photo photo_url = 'http://www.dianping.com/ajax/json/shopDynamic/shopTabs?shopId=90951360&cityId=1&shopName=Regiustea%E5%A4%A9%E5%BE%A1%E7%9A%87%E8%8C%B6&power=5&mainCategoryId=244&shopType=10&shopCityId=1&_token=eJxVTt1ugjAYfZdeN9ACbYHEC3WOieKGMowzXiAwJEAtlAx12buvJu5iyZecn%2B%2Bc5HyDbp4BFyOELAzBV94BF2ANaRRA0Ev1IZgSzBgxqGNBkP73mG1CcOziJ%2BDuiYMgNenhbqyV3mPDRtBG6AAVNShkjB2gYam7Z%2BYqAk59L1xdH4ZBy8qEi5IXWnpudHk6C91BDsEmRWoJUI0mUg2F1QOTB%2FZ%2FOlDTVVaWBVcs9y%2FRRlqy%2FVwHMorfr8gMbi%2Br1%2BWsXt2u9nS6Lj%2BaRRETP%2BfPfZO0p5iTSWoNzmbyOpt6YufVbeJdmtDbMZLvGuN8rP0238YirURnZQFfCF1u34Qe663PPVxQsZwXODuG4bgaR3Ul8nA0Aj%2B%2FFDJm5g%3D%3D&uuid=40e49af0-9fce-a1c6-0e90-dbacb64cb3cb.1516376513&platform=1&partner=150&originUrl=http%3A%2F%2Fwww.dianping.com%2Fshop%2F90951360' shop_style_photos = [] #map shop_extra = {} if content.find("window.shop_config") > -1: js_str_start = content.find("window.shop_config") js_str_suf = content[js_str_start:] js_str_start = js_str_suf.find("{") js_str_end = js_str_suf.find("</script>") js_str = js_str_suf[js_str_start : js_str_end] json_obj = jsonpickle.decode(js_str) shop_extra['shop_name'] = json_obj['shopName'] shop_extra['shop_addr'] = json_obj['address'] shop_extra['shop_id'] = json_obj['shopId'] shop_extra['shop_full_name'] = json_obj['fullName'] shop_extra['shop_lat'] = json_obj['shopGlat'] shop_extra['shop_lng'] = json_obj['shopGlng'] shop_extra['shop_type'] = json_obj['shopType'] shop_extra['shop_first_category'] = json_obj['categoryName'] shop_extra['shop_second_category'] = json_obj['mainCategoryName'] shop_extra['shop_logo'] = json_obj['defaultPic'][:json_obj['defaultPic'].find('.jpg')+4] shop_extra['shop_geohash'] = Geohash.encode(float(json_obj['shopGlat']), float(json_obj['shopGlng']), 8) shop_extra['shop_tel'] = shop_expand_tel shop_extra['shop_open'] = shop_expand_open shop_extra['shop_photos'] = shop_style_photos return shop_extra except Exception, e: traceback.print_exc(e) print 'exception : ' + url
def _to_es_model(self): from wselasticsearch.models import IpGeolocationModel return IpGeolocationModel( geolocation=Geohash.encode(self.latitude, self.longitude), country_code=self.country_code, region=self.region, geo_source=self.geo_source, postal_code=self.postal_code, latitude=self.latitude, longitude=self.longitude, )
def position_update(): jsonRequest = request.json Position( id=ObjectId('556018800640fd52df330d31'), lat=jsonRequest['lat'], long=jsonRequest['long'], geohash=Geohash.encode(jsonRequest['lat'], jsonRequest['long'], precision=16), updated=datetime.datetime.utcnow() ).save() return ('', 204)
def setup_start_points_to_trip(self): """ Input: Route_name eg '22' Output: Dictionary which maps all of the trips associated with that route to a gps coordinate where the trips start Discussion: TODO: This should live in schedule puller class """ for trip_id in self.trip_to_dep_arr_times.keys(): self.start_point_to_trips[trip_id] = Geohash.encode(37.76048, -122.38895) pass
def post(self): if cgi.escape(self.request.get('lat')) and cgi.escape(self.request.get('lat')): lat = cgi.escape(self.request.get('lat')) lon = cgi.escape(self.request.get('lon')) if self.request.get('address'): lat = 0 lon = 0 precision = cgi.escape(self.request.get('precision')) or 20 geohash = Geohash.encode(float(lat), float(lon)) if geohash.startswith('c2'): geohash = geohash.replace('c2', '') geohash = geohash[0:int(precision)] pdxhash = {'pdxhash': geohash} else: pdxhash = {'error': 'Not in Portland'} self.response.out.write(simplejson.dumps(pdxhash) + "\n")
def get(self, point, buffer_size=0, multiple=False): """ lookup state and county based on geohash of coordinates from tweet """ lon, lat = point geohash = Geohash.encode(lat, lon, precision=self.precision) key = (geohash, buffer_size, multiple) if key in self.geohash_cache: # cache hit on geohash self.hit += 1 #print self.hit, self.miss return self.geohash_cache[key] self.miss += 1 # cache miss on geohash # project point to ESRI:102005 lat, lon = Geohash.decode(geohash) proj_point = project([float(lon), float(lat)]) args = dict(buffer_size=buffer_size, multiple=multiple) payload = self.get_object(proj_point, **args) self.geohash_cache[key] = payload return payload
def get_inventory(self): starttime = UTCDateTime() endtime = UTCDateTime() client = Client(self.fdsn_server) for s in self.streams: net, sta, chan, loc = s inv = client.get_stations(starttime=starttime, endtime=endtime, network=net, station=sta, location=loc, channel=chan, level="response") channels = set(inv.get_contents()['channels']) for c in channels: try: coords = inv.get_coordinates(c, datetime=starttime) except: try: coords = inv.get_coordinates(c) except: print c, "No matching coordinates found" continue latitude = coords['latitude'] longitude = coords['longitude'] elevation = coords['elevation'] self.station_coordinfo[c] = \ {"latitude": latitude, "longitude": longitude, "elevation": elevation, "geohash": Geohash.encode(latitude, longitude, precision=7) }
def get_Geohash(self, obj): if obj.Latitude and obj.Longitude: return Geohash.encode(float(obj.Latitude), float(obj.Longitude)) else: return None
def geohash_for_point(self, pt, precision=12): gh = Geohash.encode(pt['latitude'], pt['longitude'], precision=precision) logging.debug("geohash (%s) for %s, %s: %s" % (precision, pt['latitude'], pt['longitude'], gh)) return gh
def costAlgo(r): parsedJson = json.loads(r[1]) # This needs to change depending on the stream information. Then we handle that and calculate cost. fromHash = str(parsedJson["from"]) geohash = Geohash.encode(fromHash.get("lat"),fromHash.get("lon"), 9) dest = str(parsedJson["to"]) toHash = Geohash.encode(dest.get("lat"),dest.get("lon"), 8) keyHash = str(parsedJson["key"]) tStamp = str(parsedJson["timestamp"]) value = str(parsedJson["value"]) # Try to connect and insert into our database try: conn = psycopg2.connect("dbname='DRP' user='******' password='******'") except: print "I am unable to connect to the database." cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) try: result = None while result == None: cur.execute("""SELECT * FROM geohashed_ways WHERE geohash LIKE """ + "'" + geohash + "'%;") result = cur.fetchone() geohash = geohash[:-1] except: print "I can't SELECT." # Compute the cost using the updated data that was streamed and data already existing in our database. tagDict = pghstore.loads(result['tags']) isAccident = False if keyHash == 'TRAFFIC_INCIDENT': cost = value*10 isAccident = True else: cost = value for k,v in tagDict: if k == 'lanes': if tagDict[k] == 2 and result['oneway'] == 'yes': if isAccident: cost=cost+(value*10) else: cost=cost+20 elif tagDict[k] == 2 and result['oneway'] == 'no': if isAccident: cost=cost+(value*5) else: cost=cost+15 elif tagDict[k] > 2 and result('oneway') == 'yes': if isAccident: cost=cost+(value*5) else: cost=cost+10 elif tagDict[k] > 2 and result('oneway') == 'no': if isAccident: cost=cost+(value*2) else: cost=cost+5 elif k == 'highway': if tagDict[k] == 'primary': cost=cost-20 elif tagDict[k] == 'secondary': cost=cost-15 elif tagDict[k] == 'trunk': cost=cost-10 elif tagDict[k] == 'trunk_link': cost=cost-5 elif k == 'maxspeed': if tagDict[k] >= 50: cost = cost-tagDict[k] else: cost = cost+tagDict[k] try: # Insert the updated cost for the way into our database. cur.execute("""INSERT INTO ways cost VALUES """ + cost + " where way.id=" + result['id'] + ';') except: print "I can't INSERT" cur.close() conn.close()
else: raise ValueError('coordenadas mal formadas: %r' % coords) return lat, long export = {} with zipfile.ZipFile(ARQ) as kmz: doc = kmz.open('doc.kml') raiz = None qt_lidos = 0 capital = True for event, elem in ElementTree.iterparse(doc): ns = elem.tag[:elem.tag.find('}')+1] if elem.tag == ns+'Placemark': nome, uf = extrair_nome_uf(ns, elem) codmun = extrair_codmun(ns, elem) lat, long = extrair_coords(ns, elem) qt_lidos += 1 g_hash = Geohash.encode(lat, long) export[codmun] = (uf, int(capital), g_hash, lat, long, nome) #break elif elem.tag == ns+'Folder': nome_folder = elem.findtext(ns+'name') assert capital == (nome_folder==u'Capitais'), 'folder e flag nao batem' if capital and nome_folder==u'Capitais': capital = False with open('coords.json','wb') as saida: json.dump(export, saida, indent=2) print qt_lidos, 'tuplas salvas'
#coding:utf8 import Geohash #longitude : 经度 #latitude : 纬度 lng = 116.37439 lat = 39.94758 h = Geohash.encode(lat,lng) print h print Geohash.decode(h)
def geohash(latitude, longitude): return Geohash.encode(float(latitude), float(longitude))
def build_geohash_id(lat, lon): """Build a GR id with an embedded geohash""" gh = Geohash.encode(lat, lon) id="GR_%s" % (gh) return str(id)
for values2 in cursor2: docStandort = dict(zip(cursor2.column_names, values2)) breite = None laenge = None if docStandort["standort_laenge"] and docStandort["standort_breite"]: breite = docStandort["standort_breite"] laenge = docStandort["standort_laenge"] docStandort["koordinaten_institutionengenau"] = True elif docStandort["ort_laenge"] and docStandort["ort_breite"]: breite = docStandort["ort_breite"] laenge = docStandort["ort_laenge"] docStandort["koordinaten_institutionengenau"] = False if breite and laenge: docStandort["koordinaten"] = str(breite) + "," + str(laenge) docStandort["geohash"] = [] geohash = Geohash.encode(breite, laenge) i = 1 while (i <= len(geohash)): docStandort["geohash"] += [('%02d' % i) + "-" + geohash[0:i]] i += 1 del docStandort["standort_laenge"] del docStandort["standort_breite"] del docStandort["ort_laenge"] del docStandort["ort_breite"] if docStandort["bistum_uid"]: queryBistumURL = """ SELECT url.url, url.bemerkung, url_typ.name AS url_typ
def _get_geohash(self, town): if town is not None: return geohash.encode(town.latitude, town.longitude) else: return None
def change_corr(cor, precision=5): x, y = cor geohash = Geohash.encode(x, y, precision=precision) x, y = Geohash.decode(geohash) return [float(x), float(y)]
def generate_cache_key(self, lat, lon): return Geohash.encode(lat, lon)
simplergeo.__init__(self, **kwargs) self.cfg = cfg if __name__ == '__main__' : import Geohash import time import json token='YER_SIMPLEGEO_TOKEN' secret='YER_SIMPLEGEO_SECRET' layer_name='YER_SIMPLEGEO_LAYER' lat = 37.764845 lon = -122.419857 uid = Geohash.encode(lat, lon) args = { 'geometry' : { 'type' : 'Point', 'coordinates' : [ lon, lat ], }, 'created' : int(time.time()), 'properties' : {}, 'type' : 'Feature', } geo = simplegeo(token=token, secret=secret, debug=True) req = '/records/%s/%s.json' % (layer_name, uid) rsp = geo.execute_request_simple(req, method='PUT', args=json.dumps(args))
# 1000 API calls per day are free -> every 86 seconds. So 100 seconds sleep if now > last + 100: last = now lat = random.randint(35,60) lon = random.randint(-11,30) print 'coords: %d,%d' % (lat,lon) try: forecast = forecastio.manual('https://api.forecast.io/forecast/%s/%d,%d?exclude=minutely,hourly,daily,flags' % (forecast_io_api_key, lat, lon)) except: forecast = None pass if forecast != None: #print forecast.json # geohash 3 has a lat/lon error of 0.7, so it should be ok for lat and lon as int geohash = Geohash.encode(lat, lon, precision=3) if forecast.json.has_key('alerts'): alerts = forecast.json['alerts'] for alert in alerts: print alert ttl = alert['expires'] - now payloaddict = dict(_type='msg',tst=now,ttl=ttl,prio=2,icon='fa-exclamation-triangle',desc=alert['description'],title=alert['title'], url=alert['uri']) client.publish('msg/forecast.io/%s' % geohash, payload=json.dumps(payloaddict), qos=2, retain=True) print 'pub: ' + 'msg/forecast.io/%s' % geohash else: if forecast.json.has_key('currently'): currently = forecast.json['currently'] weather = '%s' % (currently['summary']) payloaddict = dict(_type='msg',tst=currently['time'],ttl=24*3600,prio=1,icon='fa-exclamation-triangle',title='Local Weather',desc=weather, url='http://forecast.io') else: