def tableapi(): config = configuration() # Test case customyear = '' fromyear = '1800' toyear = '2000' customcountrycodes = '380,250,276,804' handle = "F16UDU" DEBUG = 0 logscale = 0 apifile = str(handle) + ".json" jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile dataframe = load_api_data(jsonapi, '') loccodes = loadcodes(dataframe) (ctr, header) = countryset(customcountrycodes, loccodes) indicator = '' (frame, years, values, dates) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logscale, DEBUG) names = ['indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id'] (csvdata, aggrdata) = combinedata(ctr, frame, loccodes) return (csvdata, aggrdata)
def tableapi(): # years in filter config = configuration() switch = 'modern' datafilter = {} datafilter['ctrlist'] = '' customyear = '' fromyear = '1500' datafilter['startyear'] = fromyear toyear = '2012' datafilter['endyear'] = toyear customcountrycodes = '' (aggr, logscale, dataset, handles) = ('','','',[]) # Select countries f = request.args for key in f.keys(): if key == 'loc': for value in sorted(f.getlist(key)): if value: customcountrycodes = str(customcountrycodes) + str(value) + ',' if customcountrycodes: customcountrycodes = customcountrycodes[:-1] #handle = "F16UDU" # HANDLE if request.args.get('handle'): handledataset = request.args.get('handle') try: (pids, pidslist) = pidfrompanel(handledataset) handles.append(pids[0]) except: handles.append(handledataset) nopanel = 'yes' if request.args.get('dataset'): dataset = request.args.get('dataset') if request.args.get('hist'): switch = 'historical' if request.args.get('ctrlist'): customcountrycodes = '' tmpcustomcountrycodes = request.args.get('ctrlist') c = tmpcustomcountrycodes.split(',') for ids in sorted(c): if ids: customcountrycodes = str(customcountrycodes) + str(ids) + ',' customcountrycodes = customcountrycodes[:-1] datafilter['ctrlist'] = customcountrycodes if not customcountrycodes: customcountrycodes = '528' if request.args.get('yearmin'): fromyear = request.args.get('yearmin') datafilter['startyear'] = fromyear if request.args.get('yearmax'): toyear = request.args.get('yearmax') datafilter['endyear'] = toyear if request.args.get('aggr'): aggr = request.args.get('aggr') # Log scales switch if request.args.get('logscale'): logscale = request.args.get('logscale') DEBUG = 0 old = '' if old: apifile = str(dataset) + ".json" jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile dataframe = load_api_data(jsonapi, '') loccodes = loadcodes(dataframe) (ctr, header) = countryset(customcountrycodes, loccodes) indicator = '' (frame, years, values, dates, original) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logscale, DEBUG) names = ['indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id'] (csvdata, aggrdata) = combinedata(ctr, frame, loccodes) # New version is fast else: (geocoder, geolist, oecd2webmapper, modern, historical) = request_geocoder(config, '') (origdata, maindata, metadata) = request_datasets(config, switch, modern, historical, handles, geolist) (subsets, panel) = ({}, []) for handle in handles: (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter) if not datasubset.empty: datasubset = datasubset.dropna(how='all') panel.append(datasubset) subsets[handle] = datasubset classification = modern if switch == 'historical': classification = historical (csvdata, aggrdata) = dataset_to_csv(config, subsets[handles[0]], classification) if aggr: csvdata = aggrdata return (csvdata, aggrdata)
def tableapi(): # years in filter config = configuration() switch = 'modern' datafilter = {} datafilter['ctrlist'] = '' customyear = '' fromyear = '1500' datafilter['startyear'] = fromyear toyear = '2012' datafilter['endyear'] = toyear customcountrycodes = '' (aggr, logscale, dataset, handles) = ('', '', '', []) # Select countries f = request.args for key in f.keys(): if key == 'loc': for value in sorted(f.getlist(key)): if value: customcountrycodes = str(customcountrycodes) + str( value) + ',' if customcountrycodes: customcountrycodes = customcountrycodes[:-1] #handle = "F16UDU" # HANDLE if request.args.get('handle'): handledataset = request.args.get('handle') try: (pids, pidslist) = pidfrompanel(handledataset) handles.append(pids[0]) except: handles.append(handledataset) nopanel = 'yes' if request.args.get('dataset'): dataset = request.args.get('dataset') if request.args.get('hist'): switch = 'historical' if request.args.get('ctrlist'): customcountrycodes = '' tmpcustomcountrycodes = request.args.get('ctrlist') c = tmpcustomcountrycodes.split(',') for ids in sorted(c): if ids: customcountrycodes = str(customcountrycodes) + str(ids) + ',' customcountrycodes = customcountrycodes[:-1] datafilter['ctrlist'] = customcountrycodes if not customcountrycodes: customcountrycodes = '528' if request.args.get('yearmin'): fromyear = request.args.get('yearmin') datafilter['startyear'] = fromyear if request.args.get('yearmax'): toyear = request.args.get('yearmax') datafilter['endyear'] = toyear if request.args.get('aggr'): aggr = request.args.get('aggr') # Log scales switch if request.args.get('logscale'): logscale = request.args.get('logscale') DEBUG = 0 old = '' if old: apifile = str(dataset) + ".json" jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile dataframe = load_api_data(jsonapi, '') loccodes = loadcodes(dataframe) (ctr, header) = countryset(customcountrycodes, loccodes) indicator = '' (frame, years, values, dates, original) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logscale, DEBUG) names = [ 'indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id' ] (csvdata, aggrdata) = combinedata(ctr, frame, loccodes) # New version is fast else: (geocoder, geolist, oecd2webmapper, modern, historical) = request_geocoder(config, '') (origdata, maindata, metadata) = request_datasets(config, switch, modern, historical, handles, geolist) (subsets, panel) = ({}, []) for handle in handles: (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter) if not datasubset.empty: datasubset = datasubset.dropna(how='all') panel.append(datasubset) subsets[handle] = datasubset classification = modern if switch == 'historical': classification = historical (csvdata, aggrdata) = dataset_to_csv(config, subsets[handles[0]], classification) if aggr: csvdata = aggrdata return (csvdata, aggrdata)