def get_stat_set_within_place(): """Gets the statistical variable values for child places of a certain place type contained in a parent place at a given date. If no date given, will return values for most recent date. Returns: Dict keyed by statvar DCIDs with dicts as values. See `SourceSeries` in https://github.com/datacommonsorg/mixer/blob/master/proto/mixer.proto for the definition of the inner dicts. In particular, the values for "val" are dicts keyed by child place DCIDs with the statvar values as values. """ parent_place = request.args.get("parent_place") if not parent_place: return Response(json.dumps("error: must provide a parent_place field"), 400, mimetype='application/json') child_type = request.args.get("child_type") if not child_type: return Response(json.dumps("error: must provide a child_type field"), 400, mimetype='application/json') stat_vars = request.args.getlist("stat_vars") if not stat_vars: return Response(json.dumps("error: must provide a stat_vars field"), 400, mimetype='application/json') date = request.args.get("date") return Response(json.dumps( dc.get_stat_set_within_place(parent_place, child_type, stat_vars, date)), 200, mimetype='application/json')
def choropleth_data(dcid): """ Get stats var data needed for choropleth charts for a given place API Returns: { [stat var]: { date: string, data: { [dcid]: number, ... }, numDataPoints: number, exploreUrl: string, sources: [], }, ... } """ configs = get_choropleth_configs() stat_vars, denoms = shared_api.get_stat_vars(configs) display_dcid, display_level = get_choropleth_display_level(dcid) geos = [] if display_dcid and display_level: geos = dc_service.get_places_in([display_dcid], display_level).get(display_dcid, []) if not stat_vars or not geos: return Response(json.dumps({}), 200, mimetype='application/json') # Get data for all the stat vars for every place we will need and process the data sv_data = dc_service.get_stat_set_within_place(display_dcid, display_level, list(stat_vars), "") sv_data_values = sv_data.get('data', {}) sv_metadata = sv_data.get('metadata', {}) denoms_data = get_denoms_data(geos, denoms) result = {} # Process the data for each config for cc in configs: # we should only be making choropleths for configs with a single stat var sv = cc['statsVars'][0] cc_sv_data_values = sv_data_values.get(sv, {}).get('stat', {}) denom = landing_page_api.get_denom(cc, True) cc_denom_data = denoms_data.get(denom, {}) scaling = cc.get('scaling', 1) if 'relatedChart' in cc: scaling = cc['relatedChart'].get('scaling', scaling) sources = set() dates = set() data_dict = dict() # Process the data for each place we have stat var data for for place_dcid in cc_sv_data_values: dcid_sv_data = cc_sv_data_values.get(place_dcid) # process and then update data_dict with the value for this # place_dcid val = get_value(place_dcid, dcid_sv_data, denom, cc_denom_data, scaling) if not val: continue data_dict[place_dcid] = val # add the date of the stat var value for this place_dcid to the set # of dates dates.add(dcid_sv_data.get("date", "")) # add stat var source and denom source (if there is a denom) to the # set of sources metadata_hash = dcid_sv_data.get('metaHash', "") source = sv_metadata.get(str(metadata_hash), {}).get('provenanceUrl', "") sources.add(source) if denom: sources.add( cc_denom_data.get(place_dcid, {}).get('provenanceUrl', "")) # build the exploreUrl # TODO: webdriver test to test that the right choropleth loads is_scaled = (('relatedChart' in cc and cc['relatedChart'].get('scale', False)) or ('denominator' in cc)) url_anchor = '&pd={}&ept={}&sv={}'.format(dcid, display_level, sv) if is_scaled: url_anchor += "&pc=1" explore_url = urllib.parse.unquote( url_for('tools.map', _anchor=url_anchor)) # process the set of sources and set of dates collected for this chart # config sources = filter(lambda x: x != "", sources) date_range = shared_api.get_date_range(dates) # build the result for this chart config and add it to the result cc_result = { 'date': date_range, 'data': data_dict, 'numDataPoints': len(data_dict.values()), # TODO (chejennifer): exploreUrl should link to choropleth tool once the tool is ready 'exploreUrl': explore_url, 'sources': sorted(list(sources)) } result[sv] = cc_result return Response(json.dumps(result), 200, mimetype='application/json')