def loadpanel(jsonapi, yearmin, yearmax, ctrlist): cleanedpanel = '' dataframe = load_api_data(jsonapi, '') allcodes = {} panel = [] names = {} for dataitem in dataframe: handle = dataitem['handle'] try: title = dataitem['title'] except: title = handle names[handle] = title (dataset, codes) = paneldatafilter(dataitem['data'], int(yearmin), int(yearmax), ctrlist, handle) if not dataset.empty: panel.append(dataset) if panel: totalpanel = pd.concat(panel) cleanedpanel = totalpanel.dropna(axis=1, how='any') cleanedpanel = totalpanel # Replace empty values with NaN cleanedpanel = cleanedpanel.replace(r'', np.nan, regex=True) return (panel, cleanedpanel, names)
def printme(): config = configuration() if config['error']: return config['error'] year = request.args.get("year") handle = request.args.get("handle") handles = [] handles.append(handle) hquery = formdatasetquery(handles,'') datainfo = readdatasets('datasets', json.loads(hquery)) try: for item in datainfo: datasetID = item['datasetID'] except: datasetID = 228 root = config['dataverseroot'] + "/api/datasets/" + str(datasetID) + "/versions/?key=" + config['key'] + "&show_entity_ids=true&q=authorName:*" data = load_api_data(root, 1) (title, citation) = get_citation(data['data']) uhandle = handle uhandle = uhandle.replace('hdl:', '') mapcopyright = config['cshapes_copyright'] if int(year) < 1946: mapcopyright = config['geacron_copyright'] # "Note: Map polygons provided by Geacron <a href=\"geacron.com\">http://www.geacron.com</a>" resp = make_response(render_template('printall.html', title=title, citation=citation, mapcopyright=mapcopyright, year=year, handle=handle, uhandle=uhandle)) return resp
def datasets(settings=''): config = configuration() if config['error']: return config['error'] (year, code, website, server, imagepathloc, imagepathweb, viewerpath, path, geojson, datarange, custom) = readglobalvars() topicapiurl = website + "/api/topicslist" topicstats = load_api_data(topicapiurl, '', '', '', '', '') localfile = 'index.csv' filename = imagepathloc + '/' + localfile f = csv.writer(open(filename, "wb+")) varlist = [] firstline = 0 for code in sorted(topicstats): dataset = topicstats[code] mapurl = website + "/site?code=" + dataset['topic_code'] + "&year=" + str(dataset['startyear']) dataurl = website + '/api/data?code=' + dataset['topic_code'] topicstats[code]['urlmap'] = mapurl topicstats[code]['urldata'] = dataurl datarow = [] if firstline == 0: for row in sorted(dataset): varlist.append(row) f.writerow(varlist) firstline = 1 for row in sorted(dataset): datarow.append(dataset[row]) f.writerow(datarow) return send_from_directory(imagepathloc, localfile, as_attachment=True)
def treemap(settings=''): (years, ctrlist) = ([], '') showpanel = 'yes' config = configuration() if config['error']: return config['error'] (historical, handle, handles, thisyear) = ('', '', [], '') if request.args.get('face'): facehandle = request.args.get('face') if facehandle not in handles: handles.append(facehandle) handle = facehandle if request.args.get('handle'): handledataset = request.args.get('handle') try: (pids, pidslist) = pidfrompanel(handledataset) handle = pids[0] handles.append(handle) except: handles.append(handledataset) handle = handledataset nopanel = 'yes' if request.args.get('historical'): historical = request.args.get('historical') if request.args.get('year'): thisyear = request.args.get('year') if request.args.get('hist'): historical = request.args.get('hist') if request.args.get('ctrlist'): ctrlist = request.args.get('ctrlist') if ctrlist == config['ctrlist']: ctrlist = '' mainlink = '&handle=' + str(handle) try: (title, units, years) = dpemetadata(config, handle) except: (title, units, years) = ('Panel Data', '', []) if historical: mainlink = str(mainlink) + '&historical=on' if thisyear: mainlink = str(mainlink) + '&year=' + str(thisyear) if ctrlist: mainlink = str(mainlink) + '&ctrlist=' + str(ctrlist) links = graphlinks(mainlink) apitreemap = config['apiroot'] + "/api/treemap?action=showyears&handle=" + str(handles[0]) + "&ctrlist=" + str(ctrlist) years = load_api_data(apitreemap, 1) total = len(years) lastyear = years[-1] resp = make_response(render_template('treemap.html', handle=handle, chartlib=links['chartlib'], barlib=links['barlib'], panellib=links['panellib'], treemaplib=links['treemaplib'], q=handle, showpanel=showpanel, historical=historical, title=title, thisyear=thisyear, years=years, total=total, lastyear=lastyear, ctrlist=ctrlist)) return resp
def test_stats(): jsonapi = "http://dpe.sandbox.socialhistoryservices.org/api/datasets?handle=Panel[%27hdl%3A10622/4X6NCK%27%2C%20%27hdl%3A10622/I0YK5M%27%2C%20%27hdl%3A10622/ZWRBOY%27]" yearmin = '1990' yearmax = '2010' ctrlist = '' # ctrlist = '76,578,620,554' dataframe = load_api_data(jsonapi, '') allcodes = {} panel = [] for dataitem in dataframe: handle = dataitem['handle'] (dataset, codes) = paneldatafilter(dataitem['data'], int(yearmin), int(yearmax), ctrlist, handle) if not dataset.empty: panel.append(dataset) return panel
def test_stats(): jsonapi = "http://dpe.sandbox.socialhistoryservices.org/api/datasets?handle=Panel[%27hdl%3A10622/4X6NCK%27%2C%20%27hdl%3A10622/I0YK5M%27%2C%20%27hdl%3A10622/ZWRBOY%27]" yearmin = "1990" yearmax = "2010" ctrlist = "" # ctrlist = '76,578,620,554' dataframe = load_api_data(jsonapi, "") allcodes = {} panel = [] for dataitem in dataframe: handle = dataitem["handle"] (dataset, codes) = paneldatafilter(dataitem["data"], int(yearmin), int(yearmax), ctrlist, handle) if not dataset.empty: panel.append(dataset) return panel
def loadpanel(jsonapi, yearmin, yearmax, ctrlist): cleanedpanel = '' dataframe = load_api_data(jsonapi, '') allcodes = {} panel = [] names = {} for dataitem in dataframe: handle = dataitem['handle'] try: title = dataitem['title'] except: title = handle names[handle] = title (dataset, codes) = paneldatafilter(dataitem['data'], int(yearmin), int(yearmax), ctrlist, handle) if not dataset.empty: panel.append(dataset)
def tableapi(): config = configuration() # Test case customyear = '' fromyear = '1800' toyear = '2000' customcountrycodes = '380,250,276,804' handle = "F16UDU" DEBUG = 0 logscale = 0 apifile = str(handle) + ".json" jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile dataframe = load_api_data(jsonapi, '') loccodes = loadcodes(dataframe) (ctr, header) = countryset(customcountrycodes, loccodes) indicator = '' (frame, years, values, dates) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logscale, DEBUG) names = ['indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id'] (csvdata, aggrdata) = combinedata(ctr, frame, loccodes) return (csvdata, aggrdata)
def searchdata(query): config = configuration() searchapi = config['dataverseroot'] + "/api/search?q=" + query + "&key=" + config['key'] dataframe = load_api_data(searchapi, '') return json.dumps(dataframe)
def tableapi(): # years in filter config = configuration() switch = 'modern' datafilter = {} datafilter['ctrlist'] = '' customyear = '' fromyear = '1500' datafilter['startyear'] = fromyear toyear = '2012' datafilter['endyear'] = toyear customcountrycodes = '' (aggr, logscale, dataset, handles) = ('','','',[]) # Select countries f = request.args for key in f.keys(): if key == 'loc': for value in sorted(f.getlist(key)): if value: customcountrycodes = str(customcountrycodes) + str(value) + ',' if customcountrycodes: customcountrycodes = customcountrycodes[:-1] #handle = "F16UDU" # HANDLE if request.args.get('handle'): handledataset = request.args.get('handle') try: (pids, pidslist) = pidfrompanel(handledataset) handles.append(pids[0]) except: handles.append(handledataset) nopanel = 'yes' if request.args.get('dataset'): dataset = request.args.get('dataset') if request.args.get('hist'): switch = 'historical' if request.args.get('ctrlist'): customcountrycodes = '' tmpcustomcountrycodes = request.args.get('ctrlist') c = tmpcustomcountrycodes.split(',') for ids in sorted(c): if ids: customcountrycodes = str(customcountrycodes) + str(ids) + ',' customcountrycodes = customcountrycodes[:-1] datafilter['ctrlist'] = customcountrycodes if not customcountrycodes: customcountrycodes = '528' if request.args.get('yearmin'): fromyear = request.args.get('yearmin') datafilter['startyear'] = fromyear if request.args.get('yearmax'): toyear = request.args.get('yearmax') datafilter['endyear'] = toyear if request.args.get('aggr'): aggr = request.args.get('aggr') # Log scales switch if request.args.get('logscale'): logscale = request.args.get('logscale') DEBUG = 0 old = '' if old: apifile = str(dataset) + ".json" jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile dataframe = load_api_data(jsonapi, '') loccodes = loadcodes(dataframe) (ctr, header) = countryset(customcountrycodes, loccodes) indicator = '' (frame, years, values, dates, original) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logscale, DEBUG) names = ['indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id'] (csvdata, aggrdata) = combinedata(ctr, frame, loccodes) # New version is fast else: (geocoder, geolist, oecd2webmapper, modern, historical) = request_geocoder(config, '') (origdata, maindata, metadata) = request_datasets(config, switch, modern, historical, handles, geolist) (subsets, panel) = ({}, []) for handle in handles: (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter) if not datasubset.empty: datasubset = datasubset.dropna(how='all') panel.append(datasubset) subsets[handle] = datasubset classification = modern if switch == 'historical': classification = historical (csvdata, aggrdata) = dataset_to_csv(config, subsets[handles[0]], classification) if aggr: csvdata = aggrdata return (csvdata, aggrdata)
def searchdata(query): config = configuration() searchapi = config[ 'dataverseroot'] + "/api/search?q=" + query + "&key=" + config['key'] dataframe = load_api_data(searchapi, '') return json.dumps(dataframe)
def tableapi(): # years in filter config = configuration() switch = 'modern' datafilter = {} datafilter['ctrlist'] = '' customyear = '' fromyear = '1500' datafilter['startyear'] = fromyear toyear = '2012' datafilter['endyear'] = toyear customcountrycodes = '' (aggr, logscale, dataset, handles) = ('', '', '', []) # Select countries f = request.args for key in f.keys(): if key == 'loc': for value in sorted(f.getlist(key)): if value: customcountrycodes = str(customcountrycodes) + str( value) + ',' if customcountrycodes: customcountrycodes = customcountrycodes[:-1] #handle = "F16UDU" # HANDLE if request.args.get('handle'): handledataset = request.args.get('handle') try: (pids, pidslist) = pidfrompanel(handledataset) handles.append(pids[0]) except: handles.append(handledataset) nopanel = 'yes' if request.args.get('dataset'): dataset = request.args.get('dataset') if request.args.get('hist'): switch = 'historical' if request.args.get('ctrlist'): customcountrycodes = '' tmpcustomcountrycodes = request.args.get('ctrlist') c = tmpcustomcountrycodes.split(',') for ids in sorted(c): if ids: customcountrycodes = str(customcountrycodes) + str(ids) + ',' customcountrycodes = customcountrycodes[:-1] datafilter['ctrlist'] = customcountrycodes if not customcountrycodes: customcountrycodes = '528' if request.args.get('yearmin'): fromyear = request.args.get('yearmin') datafilter['startyear'] = fromyear if request.args.get('yearmax'): toyear = request.args.get('yearmax') datafilter['endyear'] = toyear if request.args.get('aggr'): aggr = request.args.get('aggr') # Log scales switch if request.args.get('logscale'): logscale = request.args.get('logscale') DEBUG = 0 old = '' if old: apifile = str(dataset) + ".json" jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile dataframe = load_api_data(jsonapi, '') loccodes = loadcodes(dataframe) (ctr, header) = countryset(customcountrycodes, loccodes) indicator = '' (frame, years, values, dates, original) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logscale, DEBUG) names = [ 'indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id' ] (csvdata, aggrdata) = combinedata(ctr, frame, loccodes) # New version is fast else: (geocoder, geolist, oecd2webmapper, modern, historical) = request_geocoder(config, '') (origdata, maindata, metadata) = request_datasets(config, switch, modern, historical, handles, geolist) (subsets, panel) = ({}, []) for handle in handles: (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter) if not datasubset.empty: datasubset = datasubset.dropna(how='all') panel.append(datasubset) subsets[handle] = datasubset classification = modern if switch == 'historical': classification = historical (csvdata, aggrdata) = dataset_to_csv(config, subsets[handles[0]], classification) if aggr: csvdata = aggrdata return (csvdata, aggrdata)