def country_report(country_code): """ """ country_code = country_code.upper() if not available_countries.get(country_code): raise ApiException(400, "Reports are not available for the country that you specified.") if g.user is None or not g.user.is_active(): raise ApiException(401, "You need to be logged-in in order to access this resource.") country = Country.query.filter_by(code=country_code).one() report_json = cache.retrieve('country_overview_' + country.code) if report_json: logger.debug('loading country overview from cache') else: report = {} procurement_list = [] procurements = Procurement.query.filter_by(country=country).filter_by(approved=True).order_by(Procurement.start_date.desc(), Procurement.end_date.desc()).all() for procurement in procurements: procurement_list.append(procurement.to_dict(include_related=True)) report['country'] = country.to_dict() report['medicines'] = calculate_country_overview(country) report['procurements'] = procurement_list report_json = json.dumps(report, cls=serializers.CustomEncoder) cache.store('country_overview_' + country.code, report_json) return send_api_response(report_json)
def gotprices(self, request): try: price = tornado.escape.json_decode(request.body) except: self.returnerror("No trip price date receaved") return None price = price["data"] trips = self.trips["data"]["rows"] for i in range(0, len(trips)): for j in range(0, len(price)): if price[j]["journeyId"] == trips[i]["id"]: trips[i]["pricedata"] = price[j] trips[i]["departureDate"] = self.getdate trips[i]["departureLocation"] = self.getfrom trips[i]["arrivalLocation"] = self.getto data = Empty() data.getdate = self.getdate data.getfrom = self.getfrom data.gettime = trips[i]["departureTime"] data.getto = self.getto data.gettotime = trips[i]["arrivalTime"] cache.store("sj", data, trips[i]) break try: self.returnrequest(cache.get("sj", self)) except: self.returnerror("Trip not found in search")
def autocomplete(query): """ Return the name and medicine_id of each medicine that matches the given query. """ out = [] medicine_list_json = cache.retrieve('medicine_list') if medicine_list_json: logger.debug('calculating autocomplete from cache') medicine_list = json.loads(medicine_list_json) else: medicine_list = calculate_autocomplete() cache.store('medicine_list', json.dumps(medicine_list, cls=serializers.CustomEncoder)) for medicine in medicine_list: tmp = {} query_index = medicine['name'].lower().find(query.lower()) if query_index > -1: tmp['medicine_id'] = medicine['medicine_id'] tmp['name'] = medicine['name'] tmp['index'] = query_index out.append(tmp) if len(out) > 20: break out = sorted(out, key=itemgetter('index')) if len(out) > 10: out = out[0:10] return send_api_response(json.dumps(out))
def country_ranking(): """ """ ranking_json = cache.retrieve('country_ranking') if ranking_json: logger.debug('loading country ranking from cache') else: ranking = calculate_country_rankings() ranking_json = json.dumps(ranking) cache.store('country_ranking', ranking_json) return send_api_response(ranking_json)
def overview(): """ Give a broad overview of the size of -, and recent activity related to, the database. """ tmp = cache.retrieve('db_overview') if tmp: logger.debug("DB overview served from cache") return send_api_response(tmp) else: tmp = calculate_db_overview() cache.store('db_overview', json.dumps(tmp, cls=serializers.CustomEncoder)) return send_api_response(json.dumps(tmp))
def request(self, request_body, verbose=1,update_cache=False): rsp = cache.load(self.uri+request_body) if rsp == None or update_cache: req = urllib2.Request(url=self.uri) req.add_data(data=request_body) headers = [ ('Content-Type', 'application/json; charset=utf-8'), ('Accept-Encoding', 'text'), ('Accept', 'application/json,application/json,application/jsonrequest')] for t,v in headers: req.add_header(t,v) rsp = urllib2.urlopen(req).read() cache.store(self.uri+request_body,rsp) return simplejson.loads(rsp)
def get_item(url=None, path=None): if path: url = path2url(path) src = cache.load(url) if src: d = json.loads(src) response = Response(*d) else: response = _get_request(url) cache.store(url, json.dumps(response)) if not response.ok: logging.error('Status {} on {}'.format(response.status, response.url)) return None return json.loads(response.content)
def actions(self): ''' Construct a dict of dicts: actions are keys and dictionary of attributes/values are values. Cached too. ''' id = "ra_actions-%s" % self.ra_string() if cache.is_cached(id): return cache.retrieve(id) if self.mk_ra_node() is None: return None d = {} for c in self.ra_elem.xpath("//actions/action"): name = c.get("name") if not name or name in self.skip_ops: continue if name == "monitor": name = monitor_name_node(c) d[name] = {} for a in c.attrib.keys(): if a in self.skip_op_attr: continue v = c.get(a) if v: d[name][a] = v # add monitor ops without role, if they don't already # exist d2 = {} for op in d.keys(): if re.match("monitor_[^0-9]", op): norole_op = re.sub(r'monitor_[^0-9_]+_(.*)', r'monitor_\1', op) if not norole_op in d: d2[norole_op] = d[op] d.update(d2) return cache.store(id, d)
def params(self): ''' Construct a dict of dicts: parameters are keys and dictionary of attributes/values are values. Cached too. ''' id = "ra_params-%s" % self.ra_string() if cache.is_cached(id): return cache.retrieve(id) if self.mk_ra_node() is None: return None d = {} for c in self.ra_elem.xpath("//parameters/parameter"): name = c.get("name") if not name: continue required = c.get("required") unique = c.get("unique") type, default = self.param_type_default(c) d[name] = { "required": required, "unique": unique, "type": type, "default": default, } return cache.store(id, d)
def _response(parsed_query, query, fast_mode=False): """Create response text based on `parsed_query` and `query` data. If `fast_mode` is True, process only requests that can be handled very fast (cached and static files). """ answer = None cache_signature = cache.get_signature(parsed_query["user_agent"], parsed_query["request_url"], parsed_query["ip_addr"], parsed_query["lang"]) answer = cache.get(cache_signature) if parsed_query['orig_location'] in PLAIN_TEXT_PAGES: answer = show_text_file(parsed_query['orig_location'], parsed_query['lang']) if parsed_query['html_output']: answer = render_template('index.html', body=answer) if answer or fast_mode: return answer # at this point, we could not handle the query fast, # so we handle it with all available logic loc = (parsed_query['orig_location'] or "").lower() if parsed_query.get("view"): if not parsed_query.get("location"): parsed_query["location"] = loc output = wttr_line(query, parsed_query) elif loc == 'moon' or loc.startswith('moon@'): output = get_moon(parsed_query) else: output = get_wetter(parsed_query) if parsed_query.get('png_filename'): if parsed_query.get("view") != "v3": # originally it was just a usual function call, # but it was a blocking call, so it was moved # to separate threads: # # output = fmt.png.render_ansi( # output, options=parsed_query) result = TASKS.spawn(fmt.png.render_ansi, cache._update_answer(output), options=parsed_query) output = result.get() else: if query.get('days', '3') != '0' \ and not query.get('no-follow-line') \ and ((parsed_query.get("view") or "v2")[:2] in ["v2", "v3"]): if parsed_query['html_output']: output = add_buttons(output) else: message = get_message('FOLLOW_ME', parsed_query['lang']) if parsed_query.get('no-terminal', False): message = remove_ansi(message) output += '\n' + message + '\n' return cache.store(cache_signature, output)
def ra_providers(ra_type, ra_class="ocf"): 'List of providers for a class:type.' id = "ra_providers-%s-%s" % (ra_class, ra_type) if cache.is_cached(id): return cache.retrieve(id) l = ra_if().providers(ra_type, ra_class) l.sort() return cache.store(id, l)
def ra_classes(): ''' List of RA classes. ''' if cache.is_cached("ra_classes"): return cache.retrieve("ra_classes") l = ra_if().classes() l.sort() return cache.store("ra_classes", l)
def speak(text): """Synthesize and play a speech Args: text (str): Text to synthesize silent: don't play speech lazy: play speech after getting all segments Returns: byte: A byte stream """ # Preprocess text text = Text.clean(text) # Play cached speech if possible cached_speech = cache.retrieve(text) if cached_speech: CACHE_REPORT((text,)) play(cached_speech[1]) return print("Synthesizing speech") segments = Text.paginate(text, conf.page_limit) # If speech is short, flush it to disk before play # (speed up retries for interrupted speeches) if len(segments) == 1: text = segments[0][1] SHORT_REPORT((text,)) buffer = speech.synth(text) mp3 = cache.store(text, buffer) play(mp3) return buffers = [] for num, text in segments: PROGRESS_REPORT((num, text)) stream = speech.synth(text) buffers.append(stream) play(stream) buffer = b"".join(buffers) cache.store(text, buffer)
def ra_providers_all(ra_class="ocf"): ''' List of providers for a class. ''' id = "ra_providers_all-%s" % ra_class if cache.is_cached(id): return cache.retrieve(id) ocf = os.path.join(os.environ["OCF_ROOT"], "resource.d") if os.path.isdir(ocf): return cache.store(id, sorted([s for s in os.listdir(ocf) if os.path.isdir(os.path.join(ocf, s))])) return []
def ra_providers_all(ra_class="ocf"): ''' List of providers for a class. ''' id = "ra_providers_all-%s" % ra_class if cache.is_cached(id): return cache.retrieve(id) dir = "%s/resource.d" % os.environ["OCF_ROOT"] l = [] for s in os.listdir(dir): if os.path.isdir("%s/%s" % (dir, s)): l.append(s) l.sort() return cache.store(id, l)
def meta(self): ''' RA meta-data as raw xml. ''' sid = "ra_meta-%s" % self.ra_string() if cache.is_cached(sid): return cache.retrieve(sid) if self.ra_class in constants.meta_progs: l = prog_meta(self.ra_class) else: l = ra_if().meta(self.ra_class, self.ra_type, self.ra_provider) if not l: return None self.debug("read and cached meta-data") return cache.store(sid, l)
def ra_providers_all(ra_class="ocf"): ''' List of providers for a class. ''' id = "ra_providers_all-%s" % ra_class if cache.is_cached(id): return cache.retrieve(id) ocf = os.path.join(os.environ["OCF_ROOT"], "resource.d") if os.path.isdir(ocf): return cache.store( id, sorted([ s for s in os.listdir(ocf) if os.path.isdir(os.path.join(ocf, s)) ])) return []
def ra_types(ra_class="ocf", ra_provider=""): ''' List of RA type for a class. ''' if not ra_class: ra_class = "ocf" id = "ra_types-%s-%s" % (ra_class, ra_provider) if cache.is_cached(id): return cache.retrieve(id) list = [] for ra in ra_if().types(ra_class): if (not ra_provider or ra_provider in ra_providers(ra, ra_class)) \ and ra not in list: list.append(ra) list.sort() return cache.store(id, list)
def searchdone(self, response): global cache trips = json.loads(response.body) #print trips for trip in trips: data = Empty() data.getdate = trip['Departure'][:10] data.getfrom = self.getfrom data.gettime = trip['Departure'][-8:-3] data.getto = self.getto data.gettotime = trip['Arrival'][-8:-3] price = cache.store('ot', data, trip) try: price = cache.get('ot', self) outdata = {"travelerAge":35, "travelerIsStudent":False, "sellername":"Östgötatr.", "price":"", "currency":"SEK", "validPrice":True, "url":"http://www.ostgotatrafiken.se" } outdata['departureTime'] = self.get_argument('departureTime') outdata['arrivalTime'] = self.get_argument('arrivalTime') outdata['date'] = self.get_argument('date') outdata['from'] = self.get_argument('from') outdata['to'] = self.get_argument('to') outdata['price'] = price['Prices'][3]['Price'] outdata['validPrice'] = 1 self.write(outdata) self.finish() return except: self.write({'error':'No trip found'}) self.finish()
def wttr(location, request): """ Main rendering function, it processes incoming weather queries. Depending on user agent it returns output in HTML or ANSI format. Incoming data: request.args request.headers request.remote_addr request.referrer request.query_string """ def _wrap_response(response_text, html_output): response = make_response(response_text) response.mimetype = 'text/html' if html_output else 'text/plain' return response if is_location_blocked(location): return "" ip_addr = client_ip_address(request) try: LIMITS.check_ip(ip_addr) except RuntimeError as exception: return str(exception) png_filename = None if location is not None and location.lower().endswith(".png"): png_filename = location location = location[:-4] lang, fmt = get_answer_language_and_format(request) query = parse_query.parse_query(request.args) html_output = get_output_format(request, query) user_agent = request.headers.get('User-Agent', '').lower() # generating cache signature cache_signature = cache.get_signature(user_agent, request.url, ip_addr, lang) answer = cache.get(cache_signature) if answer: return _wrap_response(answer, html_output) if location in PLAIN_TEXT_PAGES: help_ = show_text_file(location, lang) if html_output: return _wrap_response(render_template('index.html', body=help_), html_output) return _wrap_response(help_, html_output) if location and ':' in location: location = cyclic_location_selection(location, query.get('period', 1)) orig_location = location if not png_filename: location, override_location_name, full_address, country, query_source_location = \ location_processing(location, ip_addr) us_ip = query_source_location[ 1] == 'United States' and 'slack' not in user_agent query = parse_query.metric_or_imperial(query, lang, us_ip=us_ip) # logging query orig_location_utf8 = (orig_location or "").encode('utf-8') location_utf8 = location.encode('utf-8') use_imperial = query.get('use_imperial', False) log(" ".join( map(str, [ ip_addr, user_agent, orig_location_utf8, location_utf8, use_imperial, lang ]))) if country and location != NOT_FOUND_LOCATION: location = "%s,%s" % (location, country) # We are ready to return the answer try: if fmt or 'format' in query: response_text = wttr_line(location, override_location_name, full_address, query, lang, fmt) fmt = fmt or query.get('format') response_text = cache.store(cache_signature, response_text) return _wrap_response(response_text, html_output) if png_filename: options = {'ip_addr': ip_addr, 'lang': lang, 'location': location} options.update(query) cached_png_file = wttrin_png.make_wttr_in_png(png_filename, options=options) response = make_response( send_file(cached_png_file, attachment_filename=png_filename, mimetype='image/png')) for key, value in { 'Cache-Control': 'no-cache, no-store, must-revalidate', 'Pragma': 'no-cache', 'Expires': '0', }.items(): response.headers[key] = value # Trying to disable github caching return response if orig_location and (orig_location.lower() == 'moon' or orig_location.lower().startswith('moon@')): output = get_moon(orig_location, html=html_output, lang=lang, query=query) else: output = get_wetter( location, ip_addr, html=html_output, lang=lang, query=query, location_name=override_location_name, full_address=full_address, url=request.url, ) if query.get('days', '3') != '0' and not query.get('no-follow-line'): if html_output: output = add_buttons(output) else: #output += '\n' + get_message('NEW_FEATURE', lang).encode('utf-8') output += '\n' + get_message('FOLLOW_ME', lang).encode('utf-8') + '\n' return _wrap_response(output, html_output) except Exception as exception: # if 'Malformed response' in str(exception) \ # or 'API key has reached calls per day allowed limit' in str(exception): if html_output: return _wrap_response(MALFORMED_RESPONSE_HTML_PAGE, html_output) return _wrap_response( get_message('CAPACITY_LIMIT_REACHED', lang).encode('utf-8'), html_output)
def log_event(description, user): cache.store(clean_description(description), user.email) return
IGNORE CASE; """ user_repos = defaultdict(list) # Trying to lookup in cache newusers = cache.lookup(users, user_repos) print "{} users found in cache".format(len(users) - len(newusers)) sc = utils.SimpleClient() query = QUERY.format(','.join(["'{}'".format(x) for x in newusers])) repos = set() if len(newusers) != 0 : for x in sc.runSyncQuery(query): repos.add(x[0]) user_repos[x[1]].append(x[0]) # Store users in cache cache.store(user_repos) def measure_history(u1, u2): return len(set(u1).intersection(set(u2))) sys.stdout.write("{}\t".format(' '.ljust(10))) for u in users: sys.stdout.write("| {} ".format(u.ljust(5)[:5])) sys.stdout.write("\n") data = {} data["users"] = users data["repos"] = user_repos data["timestamp"] = cache.timestamp() l = len(users) matrix = [[0]*l for i in range(l)]