def _get_data_link(start_time): epoch = datetime(1970, 1, 1) utcmsecs = int((start_time - epoch).total_seconds() * 1000) #dataurl = html.urlencode('http://beeaware.spriggle.net/location', dataurl = html.urlencode( urls.LOCATION__URL, { location_atom.DISPLAY: '', location_atom.TIME: utcmsecs - 120000, location_atom.LEN: '240000' }) return html.hyperlink(dataurl, 'data')
def render_page(isin, screen_name, u_id, visit_date=None): clust_hist = \ location_data.do_get_visit_history_detail(u_id, visit_date, visit_date) data = {} clusters = [] last_entry_date = None temp_dict = None for row in clust_hist: entry_date = row['entry_date'] #only do this step when another date is hit if entry_date != last_entry_date: #dont append the null dict if temp_dict is not None: clusters.append(copy.deepcopy(temp_dict)) temp_dict = { 'entry_date':row['entry_date'], 'entry_day':row['entry_day'], 'items':[] } mapurl_data = { loc_atom.PLACE_ID:row['place_id'], loc_atom.DESCR:row['place'], loc_atom.LATITUDE:row['clust_avg_lat'], loc_atom.LONGITUDE:row['clust_avg_lon'], loc_atom.PROXIMITY:row['proximity'] } mapurl = html.urlencode(urls.PLACE__URL, mapurl_data) #use the current temp dict to accumulate items temp_dict['items'].append({ 'entry_day':row['entry_day'], 'entry_time':row['entry_time'], 'exit_day':row['exit_day'], 'exit_time':row['exit_time'], 'total_time':row['total_time'], 'place':html.hyperlink(mapurl, row['place']) }) last_entry_date = entry_date #make sure to save the last date clusters.append(copy.deepcopy(temp_dict)) data['clusters'] = clusters return sp(_render_html(data), isin, screen_name)
def _get_place_gen(u_id, p_id, data): """ u_id : user id p_id : place id data : if not none, then this is a new place so render an 'add' page """ #print "_get_place_gen" #print str(data) items = data hasplaces = False #print str(items) for item in items: #print str(item) hasplaces = True isnew = False #only expect to pass when an unkonwn place; place stuff is brittle try: if (item[loc_atom.PROXIMITY] != 0): isnew = True except: pass pid = item[loc_atom.PLACE_ID] lat = str(item[loc_atom.LATITUDE]) lon = str(item[loc_atom.LONGITUDE]) descr = item[loc_atom.DESCR] at_dist = item[loc_atom.AT_DIST] maplink = html.hyperlink( 'http://maps.google.com/maps?q=' + lat + ',+' + lon, 'map') lookuplink = html.hyperlink( 'http://nominatim.openstreetmap.org/reverse?format=json&lat=' + \ lat + '&lon=' + lon + '&zoom=18&addressdetails=1', 'lookup') place = {} place['isnew'] = isnew place['place_url'] = urls.PLACES__URL place['lat_atom'] = loc_atom.LATITUDE place['lat'] = lat place['lon_atom'] = loc_atom.LONGITUDE place['lon'] = lon place['pid_atom'] = loc_atom.PLACE_ID place['pid'] = pid place['descr_atom'] = loc_atom.DESCR place['descr'] = descr place['map'] = maplink place['lookup'] = lookuplink place['divclass'] = global_atoms.MESSAGE_DIV_CLASS place['divid'] = global_atoms.MESSAGE_DIV_ID_OUTER place['at_dist_atom'] = loc_atom.AT_DIST place['at_dist'] = at_dist place['showhistory'] = False if (p_id is not None): place['showhistory'] = True hist = [ dict(x) for x in list(loc_data.do_get_visit_history_rollup(u_id, p_id)) ] for item in hist: dateurl = html.urlencode(urls.VISIT_HISTORY_DETAIL__URL, {'date': item['entry_date']}) datelink = html.hyperlink(dateurl, str(item['entry_date'])) item['entry_date'] = datelink place['history'] = hist #print str(place) yield pystache.render(_TMPL, place) if not hasplaces: yield 'No places defined yet'
def render_page(isin, screen_name, u_id): loc_hist = location_data.do_get_visit_history_rollup(u_id) top_places = [dict(x) for x in list(location_data.do_get_top_places(u_id))] for place in top_places: url_data = \ {loc_atom.PLACE_ID:place[loc_atom.PLACE_ID], loc_atom.PROXIMITY:0} url = html.urlencode(urls.PLACE__URL, url_data) place['place_url'] = html.hyperlink(url, place[loc_atom.DESCR]) data = {} visits = [] last_entry_date = None temp_dict = None for row in loc_hist: entry_date = row['entry_date'] entry_day = row['entry_day'] #only do this step when another date is hit if entry_date != last_entry_date: #dont append the null dict if temp_dict is not None: visits.append(copy.deepcopy(temp_dict)) detailurl = \ html.urlencode( urls.VISIT_HISTORY_DETAIL__URL, {'date':str(entry_date)}) temp_dict = { 'datelink': html.hyperlink(detailurl, '[' + entry_day + ' ' + str(entry_date) + ']'), 'items': [] } mapurl_data = { loc_atom.PLACE_ID: row['place_id'], loc_atom.DESCR: row['place'], loc_atom.LATITUDE: row['visit_avg_lat'], loc_atom.LONGITUDE: row['visit_avg_lon'], loc_atom.PROXIMITY: row['proximity'] } mapurl = html.urlencode(urls.PLACE__URL, mapurl_data) #use the current temp dict to accumulate items dist = int(row['dist']) if row['dist'] is not None else row['dist'] temp_dict['items'].append({ 'visit_count': row['visit_count'], 'visit_time': row['total_visit_time'], 'dist': dist, 'place': html.hyperlink(mapurl, row['place']) }) last_entry_date = entry_date #make sure to save the last date visits.append(copy.deepcopy(temp_dict)) data['visits'] = visits data['top_places'] = top_places data['places_url'] = html.hyperlink(urls.PLACES__URL, 'Manage My Places') data['places_recalc_url'] = \ html.hyperlink(urls.VISIT_HISTORY_RECALC__URL, 'Re-analyze My Places') return sp(render_html(data), isin, screen_name)
def get_location_gen(usrid, view, time, length, display): displaydefined = display is not None accuracythresh = '500' epoch = datetime.utcfromtimestamp(0) now = datetime.utcnow() nowdelta = now - epoch onedayms = 1000 * 60 * 60 * 24 deltanum = onedayms if length and length.isdigit(): deltanum = long(length) rangedelta = timedelta(milliseconds=deltanum) yesterdaydelta = nowdelta - rangedelta seconds = yesterdaydelta.seconds seconds += yesterdaydelta.days * 24 * 60 * 60 millis = seconds * 1000 time = str(millis) if time is None else time begtimestr = '' endtimestr = '' timephrase = '' timenum = 0 if time is None or time == 'all' else long(time) view = view if view else view if not time is None and time.isdigit(): delta = timedelta(milliseconds=timenum) begtimestr = str(delta + epoch) endtimestr = str(delta + epoch + rangedelta) timephrase = \ '''and (time > '%(b)s' and time < '%(e)s')''' % \ { 'b':begtimestr, 'e':endtimestr } selectphrase = 'loc.sess_id, latitude, longitude, altitude, accuracy, time' orderphrase = 'order by time asc' query = \ ''' select ''' + \ selectphrase + \ ''' from location as loc join _user_session as us on us.sess_id = loc.sess_id join _user as u on u.u_id = us.u_id where u.u_id = %(usrid)s and accuracy < ''' + \ accuracythresh + ' ' + \ timephrase + ' ' + \ orderphrase filterquery = \ '''with filter as (''' + query + ''') select distinct on (latitude, longitude) latitude, longitude from filter''' cur = db.sendtodb(filterquery if view == 'latlon' else query, {'usrid': usrid}) if displaydefined: yield \ html.hyperlink(urls.PLACES__URL, 'My Places') + ' | ' + \ html.br() + html.br() if time != None: lastframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.DISPLAY : '', loc_atom.TIME : str(timenum - deltanum), loc_atom.LEN : length, loc_atom.EXPORT : view }) nextframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.DISPLAY : '', loc_atom.TIME : str(timenum + deltanum), loc_atom.LEN : length, loc_atom.EXPORT : view }) yield \ '[' + html.hyperlink(lastframe, '<<<') + '] ' + \ begtimestr + ' - ' + endtimestr + \ ' [' + html.hyperlink(nextframe , '>>>') + ']' + html.br() viewframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.TIME : time, loc_atom.LEN : length, loc_atom.DISPLAY : '' }) viewcsvframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.EXPORT : 'csv', loc_atom.TIME : time, loc_atom.LEN : length, loc_atom.DISPLAY : '' }) viewlatlonframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.EXPORT : 'latlon', loc_atom.TIME : time, loc_atom.LEN : length, loc_atom.DISPLAY : '' }) downloadcsvframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.EXPORT : 'csv', loc_atom.TIME : time, loc_atom.LEN : length, }) downloadlatlonframe = \ html.urlencode(urls.LOCATION__URL, { loc_atom.EXPORT : 'latlon', loc_atom.TIME : time, loc_atom.LEN : length, }) downloadcsvall = \ html.urlencode(urls.LOCATION__URL, { loc_atom.EXPORT : 'csv', loc_atom.TIME : 'all' }) links = [ html.hyperlink(viewframe, 'view frame'), html.hyperlink(viewlatlonframe, 'view frame latlon'), html.hyperlink(viewcsvframe, 'view frame csv'), html.hyperlink(downloadcsvframe, 'download frame csv'), html.hyperlink(downloadlatlonframe, 'download frame latlon'), html.hyperlink(downloadcsvall, 'download all csv') ] yield " | ".join(links) + html.br() + html.br() if view == 'csv' or view == 'latlon': timepair = \ { loc_atom.TIME : str(timenum) } if timenum else {} lenpair = \ { loc_atom.LEN : deltanum } if length else {} exporturl = html.urlencode( urls.LOCATION__URL, dict(timepair.items() + lenpair.items() + {loc_atom.EXPORT: view}.items())) if displaydefined: yield '<pre>' if view == 'csv': yield 'sess,time,lat,lon,alt,acc\n' for row in cur: if view == 'csv': time = str(row['time']) sess = str(row['sess_id']) lat = str(row['latitude']) lon = str(row['longitude']) alt = str(row['altitude']) acc = str(row['accuracy']) yield sess + ',' + time + ',' + lat + ',' + lon + ',' + alt + \ ',' + acc + '\n' elif view == 'latlon': lat = str(row['latitude']) lon = str(row['longitude']) yield lat + ',' + lon + '\n' else: time = str(row['time']) sess = str(row['sess_id']) lat = str(row['latitude']) lon = str(row['longitude']) alt = str(row['altitude']) acc = str(row['accuracy']) maplink = html.hyperlink( html.urlencode('http://maps.google.com/maps', {'q': lat + ',' + lon}), 'map') revlookuplink = html.hyperlink( html.urlencode( 'http://nominatim.openstreetmap.org/reverse', { 'format': 'json', 'lat': lat, 'lon': lon, 'zoom': '18', 'addressdetails': '1' }), 'lookup') markurl = html.urlencode( urls.PLACES__URL, { loc_atom.ORIGIN: urls.LOCATION__URL, loc_atom.LATITUDE: lat, loc_atom.LONGITUDE: lon }) markform = \ html.markup('form', { 'action' : markurl, 'method' : 'post', 'style' : 'display:inline'}, comn.submitbutton('Mark')) yield \ time + ': ' + \ lat + ', ' + lon + \ ' [' + maplink + '] | ' + \ '[' + revlookuplink + '] ' + \ html.br() if displaydefined and (view == 'csv' or view == 'latlon'): yield '</pre>'