def getRoutes(refresh): if refresh is False: # do we already have it in the datastore? api = db.GqlQuery('select * from StaticAPIs where method = :1', utils.GETROUTES).get() if api is not None: logging.debug('---> datastore hit'); return api.json logging.debug('---> datastore lookup starting!') offset = 0 q = RouteListing.all() routes = q.fetch(1000) hits = {} response_dict = {'status':0,'timestamp':utils.getLocalTimestamp()} while len(routes) > 0: offset += len(routes) ## stopped here trying to create a map of unique routes and endpoints ## for r in routes: # are we tracking this route/direction pair? key = r.route + ':' + r.direction hits[key] = hits.get(key,0) + 1 # get more routes routes = q.fetch(1000,offset) routeMap = {} for k,v in hits.iteritems(): key = k.split(':') routeID = key[0] direction = key[1] directionLabel = utils.getDirectionLabel(direction) logging.debug('adding direction %s to route %s' % (directionLabel,routeID)) if routeID in routeMap: routeMap[routeID].append(directionLabel) else: routeMap[routeID] = list() routeMap[routeID].append(directionLabel) route_results = [] for k,v in routeMap.iteritems(): route_results.append(dict({'routeID':k,'directions':routeMap[k]})) # add the populated route details to the response response_dict.update({'routes':route_results}) json = simplejson.dumps(response_dict) static = StaticAPIs() static.method = utils.GETROUTES static.json = json static.put() return json
def handle_result(rpc, stopID, routeID, sid, directionID): routes = None result = None try: # go fetch the webpage for this route/stop! result = rpc.get_result() done = True except urlfetch.DownloadError: logging.error("API: Error loading page. route %s, stop %s" % (routeID, stopID)) if result: logging.error("API: Error status: %s" % result.status_code) logging.error("API: Error header: %s" % result.headers) logging.error("API: Error content: %s" % result.content) directionLabel = utils.getDirectionLabel(directionID) arrival = "0" textBody = "unknown" valid = False if result is None or result.status_code != 200: logging.error("API: Exiting early: error fetching URL: ") textBody = "error " + routeID + " (missing data)" else: soup = BeautifulSoup(result.content) for slot in soup.html.body.findAll("a", "ada"): # only take the first time entry if slot["title"].split(":")[0].isdigit(): arrival = slot["title"] textBody = arrival.replace("P.M.", "pm").replace("A.M.", "am") valid = True # the original implementaiton leveraged the datastore to store and # ultimately sort the results when we got all of the routes back. # we'll continute to use the model definition, but never actually store # the results in the datastore. stop = BusStopAggregation() stop.stopID = stopID stop.routeID = routeID stop.sid = sid stop.arrivalTime = textBody stop.destination = directionLabel # turn the arrival time into absolute minutes hours = int(arrival.split(":")[0]) if arrival.find("P.M.") > 0 and int(hours) < 12: hours += 12 minutes = int(arrival.split(":")[1].split()[0]) arrivalMinutes = (hours * 60) + minutes stop.time = arrivalMinutes stop.text = textBody + " toward %s" % directionLabel # instead of shoving this in the datastore, we're going to shove # it in a local variable and retrieve it with the sid later # old implementation --> stop.put() insert_result(sid, stop) # create the task that glues all the messages together when # we've finished the fetch tasks counter = memcache.decr(sid) if counter == 0: # put them all together memcache.delete(sid) # routes = aggregateAsynchResults(sid) return routes