def networks(type="hs"): file_name = "network_{0}.json.gz".format(type) cached_q = cached_query(file_name) if cached_q: ret = make_response(cached_q) else: path = datavivadir+"/static/json/networks/{0}".format(file_name) gzip_file = open(path).read() cached_query(file_name, gzip_file) ret = make_response(gzip_file) ret.headers['Content-Encoding'] = 'gzip' ret.headers['Content-Length'] = str(len(ret.data)) return ret
def coords(id="all"): if id == "all": file_name = "bra_states.json.gz" else: file_name = "{0}_munic.json.gz".format(id) cached_q = cached_query(file_name) if cached_q: ret = make_response(cached_q) else: path = datavivadir+"/static/json/coords/{0}".format(file_name) gzip_file = open(path).read() cached_query(file_name, gzip_file) ret = make_response(gzip_file) ret.headers['Content-Encoding'] = 'gzip' ret.headers['Content-Length'] = str(len(ret.data)) return ret
def attrs(attr="bra",Attr_id=None): Attr = globals()[attr.title()] Attr_weight_mergeid = "{0}_id".format(attr) if attr == "bra": Attr_weight_tbl = Yb Attr_weight_col = "population" elif attr == "isic": Attr_weight_tbl = Yi Attr_weight_col = "num_emp" elif attr == "cbo": Attr_weight_tbl = Yo Attr_weight_col = "num_emp" elif attr == "hs": Attr_weight_tbl = Yp Attr_weight_col = "val_usd" elif attr == "wld": Attr_weight_tbl = Yw Attr_weight_col = "val_usd" depths = {} depths["bra"] = [2,4,7,8] depths["isic"] = [1,3,5] depths["cbo"] = [1,2,4] depths["hs"] = [2,4,6] depths["wld"] = [2,5] depth = request.args.get('depth', None) order = request.args.get('order', None) offset = request.args.get('offset', None) limit = request.args.get('limit', None) if offset: offset = float(offset) limit = limit or 50 lang = request.args.get('lang', None) or g.locale ret = {} dataset = "rais" if Attr == Cbo or Attr == Hs: dataset = "secex" latest_year = __latest_year__[dataset] cache_id = request.path + lang if depth: cache_id = cache_id + "/" + depth # first lets test if this query is cached cached_q = cached_query(cache_id) if cached_q and limit is None: ret = make_response(cached_q) ret.headers['Content-Encoding'] = 'gzip' ret.headers['Content-Length'] = str(len(ret.data)) return ret # if an ID is supplied only return that if Attr_id: # the '.show.' indicates that we are looking for a specific nesting if ".show." in Attr_id: this_attr, ret["nesting_level"] = Attr_id.split(".show.") # filter table by requested nesting level attrs = Attr.query \ .filter(Attr.id.startswith(this_attr)) \ .filter(func.char_length(Attr.id) == ret["nesting_level"]).all() # the 'show.' indicates that we are looking for a specific nesting elif "show." in Attr_id: ret["nesting_level"] = Attr_id.split(".")[1] # filter table by requested nesting level attrs = Attr.query.filter(func.char_length(Attr.id) == ret["nesting_level"]).all() # the '.' here means we want to see all attrs within a certain distance elif "." in Attr_id: this_attr, distance = Attr_id.split(".") this_attr = Attr.query.get_or_404(this_attr) attrs = this_attr.get_neighbors(distance) else: attrs = [Attr.query.get_or_404(Attr_id)] ret["data"] = [fix_name(a.serialize(), lang) for a in attrs] # an ID/filter was not provided else: query = db.session.query(Attr,Attr_weight_tbl) \ .outerjoin(Attr_weight_tbl, and_(getattr(Attr_weight_tbl,"{0}_id".format(attr)) == Attr.id, Attr_weight_tbl.year == latest_year)) if depth: query = query.filter(func.char_length(Attr.id) == depth) else: query = query.filter(func.char_length(Attr.id).in_(depths[attr])) if order: direction = "asc" if "." in order: o, direction = order.split(".") else: o = order if o == "name": o = "name_{0}".format(lang) if o == Attr_weight_col: order_table = Attr_weight_tbl else: order_table = Attr if direction == "asc": query = query.order_by(asc(getattr(order_table,o))) elif direction == "desc": query = query.order_by(desc(getattr(order_table,o))) if limit: query = query.limit(limit).offset(offset) attrs_all = query.all() # just get items available in DB attrs_w_data = None if depth is None and limit is None: attrs_w_data = db.session.query(Attr, Attr_weight_tbl) \ .filter(getattr(Attr_weight_tbl, Attr_weight_mergeid) == Attr.id) \ .group_by(Attr.id) # raise Exception(attrs_w_data.all()) attrs_w_data = [a[0].id for a in attrs_w_data] attrs = [] for i, a in enumerate(attrs_all): b = a[0].serialize() if a[1]: b[Attr_weight_col] = a[1].serialize()[Attr_weight_col] else: b[Attr_weight_col] = 0 a = b if attrs_w_data: a["available"] = False if a["id"] in attrs_w_data: a["available"] = True if Attr_weight_col == "population" and len(a["id"]) == 8 and a["id"][:2] == "mg": plr = Bra.query.get_or_404(a["id"]).pr2.first() if plr: a["plr"] = plr.id if order: a["rank"] = int(i+offset+1) attrs.append(fix_name(a, lang)) ret["data"] = attrs ret = jsonify(ret) ret.data = gzip_data(ret.data) if limit is None and cached_q is None: cached_query(cache_id, ret.data) ret.headers['Content-Encoding'] = 'gzip' ret.headers['Content-Length'] = str(len(ret.data)) return ret
def embed(app_name=None, dataset=None, bra_id=None, filter1=None, filter2=None, output=None): lang = request.args.get('lang', None) or g.locale if request.is_xhr: cache_id = request.path + lang cached_q = cached_query(cache_id) if cached_q: ret = make_response(cached_q) ret.headers['Content-Encoding'] = 'gzip' ret.headers['Content-Length'] = str(len(ret.data)) return ret '''Since the "builds" are held in the database with placeholders for attributes i.e. <cbo>, <hs>, <isic> we need to convert the IDs given in the URL to these placeholders. i.e. - a0111 = <isic> - 010101 = <hs> - all = all ''' build_filter1 = filter1 if dataset == "rais" and build_filter1 != "all": build_filter1 = "<isic>" if dataset == "secex" and build_filter1 != "all": build_filter1 = "<hs>" build_filter2 = filter2 if dataset == "rais" and build_filter2 != "all": build_filter2 = "<cbo>" if dataset == "secex" and build_filter2 != "all": build_filter2 = "<wld>" '''This is an instance of the Build class for the selected app, determined by the combination of app_type, dataset, filters and output. ''' current_app = App.query.filter_by(type=app_name).first_or_404() current_build = Build.query.filter_by(app=current_app, dataset=dataset, filter1=build_filter1, filter2=build_filter2, output=output).first_or_404() current_build.set_filter1(filter1) current_build.set_filter2(filter2) current_build.set_bra(bra_id) '''Get the recommended app list to pass with data''' filler_bra = bra_id filler1 = filter1 filler2 = filter2 if output == "isic" or output == "hs": filler1 = "filler" elif output == "cbo" or output == "wld": filler2 = "filler" elif output == "bra": filler_bra = "filler" recs = recommend(app_name=app_name, dataset=dataset, bra_id=filler_bra, \ filter1=filler1, filter2=filler2, output=output) '''Every possible build, required by the embed page for building the build dropdown. ''' all_builds = Build.query.all() for build in all_builds: build.set_filter1(filter1) build.set_filter2(filter2) build.set_bra(bra_id) '''Get URL query parameters from reqest.args object to return to the view. ''' global_vars = {x[0]:x[1] for x in request.args.items()} if "controls" not in global_vars: global_vars["controls"] = "true" '''If user is logged in see if they have starred this app.''' starred = 0 app_id = "/".join([app_name, dataset, bra_id, filter1, filter2, output]) if g.user and g.user.is_authenticated(): is_starred = Starred.query.filter_by(user=g.user, app_id=app_id).first() starred = 1 if is_starred else -1 '''Get the actual data for the current build''' # view_data = rais_ybi(bra_id='sp', isic_id='a0112').data # app.url_map.bind('/').match('/attrs/wld/nausa/') if request.is_xhr: ret = jsonify({ "current_build": current_build.serialize(), "all_builds": [b.serialize() for b in all_builds], "recommendations": json.loads(recs.data), "starred": starred }) ret.data = gzip_data(ret.data) ret.headers['Content-Encoding'] = 'gzip' ret.headers['Content-Length'] = str(len(ret.data)) if cached_q is None: cached_query(cache_id, ret.data) else: ret = make_response(render_template("apps/embed.html", all_builds = all_builds, starred = starred, form = DownloadForm(), current_build = current_build, global_vars = json.dumps(global_vars))) ret.headers.add('Last-Modified', datetime.now()) ret.headers.add('Cache-Control', 'no-store, no-cache, must-revalidate, post-check=0, pre-check=0') ret.headers.add('Pragma', 'no-cache') return ret