Exemple #1
0
def geocoder():
    config = configuration()
    config['remote'] = ''
    remote = 'on'

    # Geocoder
    handle = config['geocoderhandle']
    (classification, geodataset, title, units) = content2dataframe(config, config['geocoderhandle'])

    fromyear = 1500
    toyear = 2016
    cfilter = ''
    if request.args.get('name'):
        cfilter = request.args.get('name')
    if request.args.get('name'):
        cfilter = request.args.get('name')

    if fromyear:
        historical = ''
        if historical == 'old':
            api = config['apiroot'] + "/collabs/static/data/historical.json"
            (regions, countries, ctr2reg, webmapper, geocoder) = histo(api, cfilter)
	else:
	    (geocoder, geolist, oecd) = buildgeocoder(geodataset, config, cfilter)

    data = json.dumps(geocoder, encoding="utf-8", sort_keys=True, indent=4)
    return Response(data,  mimetype='application/json')
def tableapis(handle, customcountrycodes, fromyear, toyear, customyear, logflag):
    # years in filter
    config = {}
    indicator = ''
    config = configuration()
    DEBUG = 0
    try:
	(dataset, revid, cliopid, clearpid) = findpid(handle)
    except:
	dataset = handle

    try:
        apifile = str(dataset) + ".json"
        jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile
        dataframe = load_api_data(jsonapi, '')
    except:
	jsonapi = config['apiroot'] + "/api/datasets?handle=Panel[" + handle + "]"
	datajson = load_api_data(jsonapi, '')
	for handledata in datajson:
	    dataframe = handledata['data']

    # DEBUG2
    #print dataframe
    loccodes = loadcodes(dataframe)
    (ctr, header) = countryset(customcountrycodes, loccodes)
    (frame, years, values, dates, original) = createframe(indicator, loccodes, dataframe, customyear, fromyear, toyear, ctr, logflag, DEBUG)
    names = ['indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value', 'id']

    (csvdata, aggrdata) = combinedata(ctr, frame, loccodes)

    return (years, frame, csvdata, aggrdata, original)
Exemple #3
0
def indicators():
    #data = load_indicators("indicators.csv")
    config = configuration()
    pid = config['topicindex']

    if pid:
        (handles, pidslist) = pidfrompanel(pid)
        hquery = formdatasetquery(handles,'')
	datainfo = readdatasets('datasets', json.loads(hquery))
        csvio = StringIO(str(datainfo[0]['csvframe']))
        data = pd.read_csv(csvio, sep='\t', dtype='unicode',quoting=csv.QUOTE_NONE)
	columns = []	
	for item in data.columns:
	    col = re.sub(r"\"", "", item)
	    columns.append(col)
	data.columns = columns
	storeddata = readdatasets('datasets', '')
	linking = {}
	for item in storeddata:
	    try:
	        linking[item['title']] = item['handle']
	    except:
		skip = 'yes'
 	data['handle'] = ''
	data = data.drop('ID', axis=1)
	for row in data.index:
	    title = data.ix[row]['Name']
	    try:
		data.ix[row]['handle'] = linking[title]
	    except:
	        data.ix[row]['handle'] = ''
        return Response(data.to_csv(orient='records'),  mimetype='application/json')
    else:
	return 'No data'
Exemple #4
0
def totalstatistics(settings=''):
    config = configuration()
    if config['error']:
        return config['error']

    handles = []

    if request.args.get('handle'):
        handledataset = request.args.get('handle')
        (dataset, revid, cliopid, clearpid) = findpid(handledataset)
        handles.append(dataset)

    if request.args.get('dataset'):
        dataset = request.args.get('dataset')
	handles.append(dataset)

    if request.args.get('yearmin'):
        fromyear = request.args.get('yearmin')
    if request.args.get('yearmax'):
        toyear = request.args.get('yearmax')

    html = ''
    for dataset in handles:
        jsonapi = config['apiroot'] + "/collabs/static/data/" + str(dataset) + ".json"
        data = createdata(jsonapi)
        d = data.describe()
        show = d.transpose()
        stats = show.to_html()
        html = html = stats + '<br>'

    return html
def save_serial_list(task, serial_list_dict):
    configs = configuration()
    root = configs.get_log_analysis_path()
    serial_list_path = os.path.join(root, task, __MY_VAR_ROOT_PATH__,__FN_RESULTS_LIST__)
    serial_display_path = os.path.join(root, task, __MY_VAR_ROOT_PATH__, __FN_DISPLAY_RESULTS_LIST__)
    
    file_result = open(serial_list_path, 'w')
    file_result_display = open(serial_display_path, 'w')

    for key, value in serial_list_dict.items():
        
        'write serial result into file /serial.txt/ for later dispaly'
        linekey = key
        '''print linekey'''
        file_result_display.write(linekey)
        '''print '-----'''
        file_result_display.write('\n-----\n')
        linevalue = ''
        if (type(value) is types.ListType):
            for v in value:
                linevalue = linevalue + str(v) + '|'
            linevalue = linevalue.strip('|')
        else:
            linevalue = str(value)
        '''print linevalue'''
        file_result_display.write(linevalue)
        '''print '=========='''
        file_result_display.write('\n==========\n')

        'write serial result into file /serial/ for later use'
        serial_to_write = key + '=' + linevalue
        file_result.write(serial_to_write + '\n')
    file_result_display.close()
    file_result.close()
Exemple #6
0
def get(settings=''):
    config = configuration()
    if config['error']:
        return config['error']

    (year, code, website, server, imagepathloc, imagepathweb, viewerpath, path, geojson, datarange, custom) = readglobalvars()
    image = request.args.get('image')
    gzip = request.args.get('nlgis')
    svg = request.args.get('svg')
    pdf = request.args.get('pdf')
    outfile = ''

    thismimetype='image'
    if image:
	outfile = image
    if gzip:
	thismimetype = 'application/x-gzip'
	outfile = gzip
    if svg:
	thismimetype = 'text/plain'
	outfile = svg
    if pdf:
	thismimetype = 'application/pdf'
	outfile = pdf
  	
    if image:
        return send_from_directory(imagepathloc, outfile, mimetype=thismimetype)
    else:
	return send_from_directory(imagepathloc, outfile, as_attachment=True)
Exemple #7
0
def compiledataset(csvfile):
    handles = []
    remote = 'on'

    # Geocoder
    config = configuration()
    config['remote'] = 'yes'
    dataframe = loaddataset_fromfile(config, csvfile)
    dataset = dataframe
    title = dataframe.columns[1]
    units = dataframe.ix[0][1]
    metadata = {}
    switch = 'modern'
    switch = 'historical'
    #dataset = dataset.convert_objects(convert_numeric=True)
    dataset.columns = dataset.ix[1]
    dataset.index = dataset[config['moderncode']]
    if '1' in dataset.columns:
        dataset = dataset.drop('1', axis=1)

    #dataset = dataset[2:]
    (intcodes, notint) = selectint(dataset.columns)
    #for colname in notint:
    #    dataset = dataset.drop(colname, axis=1)
    dataset.columns = notint + intcodes
    return (dataset, title, units)
Exemple #8
0
def advanced_statistics():
    (yearmin, yearmax, ctrlist) = (1500, 2020, '')
    config = configuration()
    handles = []

    if request.args.get('handle'):
        handledataset = request.args.get('handle')
	handledataset = handledataset.replace(" ", '')

    if request.args.get('dataset'):
        dataset = request.args.get('dataset')
        handles.append(dataset)

    if request.args.get('yearmin'):
        yearmin = request.args.get('yearmin')
    if request.args.get('yearmax'):
        yearmax = request.args.get('yearmax')
    if request.args.get('ctrlist'):
        ctrlist = request.args.get('ctrlist')

    modern = moderncodes(config['modernnames'], config['apiroot'])
    jsonapi = config['apiroot'] + '/api/datasets?handle=' + str(handledataset)

    (panel, cleanedpanel, names) = loadpanel(jsonapi, yearmin, yearmax, ctrlist)
    (header, data, countries, handles, vhandles) = advpanel2dict(cleanedpanel)

    ctrlimit = 200
    #result = panel2csv(header, data, thisyear, countries, handles, vhandles, ctrlimit, modern)
    #maindataframe = data2statistics(handles, cleanedpanel)
    #showhtml = statistics_tojson(maindataframe, modern)
    data = handle2statistics(handles, cleanedpanel)
    showhtml = statistics2table(data)
    return showhtml
def testdownload():
    config = configuration()
    DEBUG = 0
    API_TOKEN=config['key']
    cmd = "--insecure -u " + API_TOKEN + ": " + config['dataverseroot'] + "/dvn/api/data-deposit/v1.1/swordv2/statement/study/"
    tmpdir = "/tmp/test"
    filename = randomword(10)
    arc = "data" + filename + ".zip"
    finaldir = "/home/dpe/tmp"
    if filename:
	finaldir = str(finaldir) + '/' + str(filename)
	tmpdir = str(tmpdir) + '/' + str(filename)

    pid = "hdl:10622/73BBBI"
    pid = "hdl:10622/4X6NCK"
    pid = "hdl:10622/JPIRSD"
    try:
	os.mkdir(tmpdir)
        os.mkdir(finaldir)

    except OSError as e:
        if e.errno != errno.EEXIST:
            raise e
        pass

    zipfile = get_papers(config['dataverseroot'], config['key'], cmd, pid, tmpdir, arc, finaldir)
    print zipfile
    return
def predict_task(task, serials):
    global lut_api_key
    config = configuration()
    lut_api_key = config.read_LUT(1)

    task_log_path = os.path.join(config.get_log_analysis_path(), task, task)
    try:
        task_log_file = open(task_log_path, 'r')
    except:
        print 'OPEN LOG FILE FAILED! (' + task_log_file + ')'
    else:
        api_serial = []
        last_time = 0
        predict_count = 0
        predict_hitted = 0
        total_api_count = 0
        next_api_key = ''

        random_hitted = 0
        for line in task_log_file:
            linearray = line.split('\t')
            api = linearray[5]
            
            'check whether it is a api'
            if (lut_api_key.has_key(api)):
                apikey = lut_api_key[api]
            else:
                continue
            
            'check whether time out'         
            if (last_time > 0):
                if (int(linearray[3]) - last_time > __VAR_TIME_INTERVAL__*1000):
                    api_serial = []
                else:
                    'it is a valid api, check whether predict hitted!'
                    total_api_count = total_api_count + 1
                    if (next_api_key != ''):
                        random_next_api = chr(random.randint(ord('A'), ord('A')+len(lut_api_key)-1))
                        if (apikey == random_next_api):
                            random_hitted = random_hitted + 1
                    
                    if (apikey == next_api_key):
                        predict_hitted = predict_hitted + 1
                        '''print 'PREDICT HITTED!'''
            next_api_key = ''
            last_time = int(linearray[3])

            api_serial.append(apikey)
            predict = prediction(api_serial, serials)
            if (predict != False):
                predict_count = predict_count + 1
                next_api_key = predict
                
        task_log_file.close()
        print '===== PREDICT TASK COMPLETED ====='
        time.sleep(2)
        print 'TOTAL API COUNT:' + str(total_api_count)
        print 'PREDICTED COUNT:' + str(predict_count)
        print 'PREDICTED HITTED COUNT:' + str(predict_hitted)
        print 'RANDOM HITTED COUNT:' + str(random_hitted)
Exemple #11
0
def webmapper():
    config = configuration()
    api = config['apiroot'] + "/collabs/static/data/historical.json"
    (regions, countries, ctr2reg, webmapper, geocoder) = histo(api)

    data = json.dumps(webmapper, encoding="utf-8", sort_keys=True, indent=4)
    return Response(data,  mimetype='application/json')
Exemple #12
0
def advanced(settings=''):
    config = configuration()
    if config['error']:
        return config['error']

    (year, code, website, server, imagepathloc, imagepathweb, viewerpath, path, geojson, datarange, custom) = readglobalvars()

    for name in request.cookies:
	settings = settings + ' ' + name + '=' + request.cookies[name]	
    #return settings

    image = imagepathweb + '/' + year + '.png';

    settings = ''
    resp = make_response(render_template('advanced.html', image=image, settings=settings, r=request.cookies))
  
    # Cookie revision
    for name in request.cookies:
	on = request.cookies[name]
        try: 
	    if request.args[name]: 
	        i = 1
	except:
	    if on == 'on':
		erase[name] = on
	        resp.set_cookie(name, '')

    for name in request.args:
        resp.set_cookie(name, request.args[name])

    return resp
Exemple #13
0
def printme():
    config = configuration()
    if config['error']:
        return config['error']

    year = request.args.get("year")
    handle = request.args.get("handle")
    handles = []
    handles.append(handle)

    hquery = formdatasetquery(handles,'')
    datainfo = readdatasets('datasets', json.loads(hquery))
    try:
	for item in datainfo:
	    datasetID = item['datasetID']
    except:
	datasetID = 228
    root = config['dataverseroot'] + "/api/datasets/" + str(datasetID) + "/versions/?key=" + config['key'] + "&show_entity_ids=true&q=authorName:*"
    data = load_api_data(root, 1)
    (title, citation) = get_citation(data['data'])
    uhandle = handle
    uhandle = uhandle.replace('hdl:', '')
    mapcopyright = config['cshapes_copyright']
    if int(year) < 1946:
	mapcopyright = config['geacron_copyright']
    # "Note: Map polygons provided by Geacron <a href=\"geacron.com\">http://www.geacron.com</a>"
    resp = make_response(render_template('printall.html', title=title, citation=citation, mapcopyright=mapcopyright, year=year, handle=handle, uhandle=uhandle))
    return resp
Exemple #14
0
def datasets(settings=''):
    config = configuration()
    if config['error']:
        return config['error']

    (year, code, website, server, imagepathloc, imagepathweb, viewerpath, path, geojson, datarange, custom) = readglobalvars()
    topicapiurl = website + "/api/topicslist"
    topicstats = load_api_data(topicapiurl, '', '', '', '', '')
    localfile = 'index.csv'
    filename = imagepathloc + '/' + localfile
    f = csv.writer(open(filename, "wb+"))

    varlist = []
    firstline = 0
    for code in sorted(topicstats):
        dataset = topicstats[code]
        mapurl = website + "/site?code=" + dataset['topic_code'] + "&year=" + str(dataset['startyear'])
        dataurl = website + '/api/data?code=' + dataset['topic_code']
        topicstats[code]['urlmap'] = mapurl
        topicstats[code]['urldata'] = dataurl
        datarow = []    
        if firstline == 0:
            for row in sorted(dataset):
                varlist.append(row)    
            f.writerow(varlist)
            firstline = 1
        
        for row in sorted(dataset):        
            datarow.append(dataset[row])
        f.writerow(datarow) 
    return send_from_directory(imagepathloc, localfile, as_attachment=True)
    def finalizeOrder(self,cursor,cashier):
        try:
            owner=self.book.owner
        except:
            cfg = configuration()
	    owner = cfg.get("default_owner")
        cursor.execute ("""
        INSERT INTO transactionLog SET
        action = "SALE",
        amount = %s,
        cashier = %s,
        date = NOW(),
        info = %s,
        schedule = %s,
        owner = %s,
        cart_id=''
        """,(self.price,cashier,"[%s] %s" % (self.getDistributor(),self.getName()),self.price_schedule,owner))

        try:
            self.book.sellme() # marks as sold
        except:
            print "sellme failed"
            pass

        cursor.close()
 def update_pages(self,event):
     master_page_name=[m for m in self.pages.keys() if self.pages[m].master][0]
     master_page=self.pages[master_page_name]
     new_master_price=float(string.replace(master_page.GetValue(),"$",""))
     cfg = configuration()
     for mp in cfg.get("multiple_prices"):
         if mp[0] != master_page_name:
             #print mp[1]
             self.pages[mp[0]].SetValue("%s" % (mp[1]*new_master_price))
Exemple #17
0
def boundaries(settings=''):
    config = configuration()
    if config['error']:
        return config['error']

    root = config['apiroot']
    dataset = 'dataframe66_'
    jsondataset = getboundaries(root, dataset)
    return Response(json.dumps(jsondataset), mimetype='application/json')
Exemple #18
0
def treemap(settings=''):
    (years, ctrlist) = ([], '')
    showpanel = 'yes'
    config = configuration()
    if config['error']:
        return config['error'] 

    (historical, handle, handles, thisyear) = ('', '', [], '')
    if request.args.get('face'):
        facehandle = request.args.get('face')
        if facehandle not in handles:
            handles.append(facehandle)
	    handle = facehandle

    if request.args.get('handle'):
        handledataset = request.args.get('handle')
        try:
            (pids, pidslist) = pidfrompanel(handledataset)
	    handle = pids[0]
            handles.append(handle)
        except:
            handles.append(handledataset)
	    handle = handledataset
            nopanel = 'yes'
    if request.args.get('historical'):
        historical = request.args.get('historical')
    if request.args.get('year'):
        thisyear = request.args.get('year')
    if request.args.get('hist'):
        historical = request.args.get('hist')
    if request.args.get('ctrlist'):
	ctrlist = request.args.get('ctrlist')
        if ctrlist == config['ctrlist']:
	    ctrlist = ''

    mainlink = '&handle=' + str(handle)
    try:
        (title, units, years) = dpemetadata(config, handle)
    except:
        (title, units, years) = ('Panel Data', '', [])

    if historical:
	mainlink = str(mainlink) + '&historical=on'
    if thisyear:
	mainlink = str(mainlink) + '&year=' + str(thisyear)
    if ctrlist:
	mainlink = str(mainlink) + '&ctrlist=' + str(ctrlist)

    links = graphlinks(mainlink)
    apitreemap = config['apiroot'] + "/api/treemap?action=showyears&handle=" + str(handles[0]) + "&ctrlist=" + str(ctrlist)
    years = load_api_data(apitreemap, 1)
    total = len(years)
    lastyear = years[-1]

    resp = make_response(render_template('treemap.html', handle=handle, chartlib=links['chartlib'], barlib=links['barlib'], panellib=links['panellib'], treemaplib=links['treemaplib'], q=handle, showpanel=showpanel, historical=historical, title=title, thisyear=thisyear, years=years, total=total, lastyear=lastyear, ctrlist=ctrlist))
    return resp
Exemple #19
0
def browse(settings=''):
    activepage = 'Dashboard'
    config = configuration()
    if config['error']:
        return config['error']

    pages = getindex(activepage)
    dataverse = config['dataverseroot']
    resp = make_response(render_template('dataverse.html', active=activepage, pages=pages, dataverse=dataverse))
    return resp
Exemple #20
0
 def _init_ctrls(self, prnt):
     self.cfg = configuration()
     title = self.cfg.get("title")
     wxFrame.__init__(self, id=-1, name='', parent=prnt,
                     pos=wxPoint(35, 47), 
                     style=wxDEFAULT_FRAME_STYLE, title=title)
     self._init_utils()
     self.SetSizer(self.build_GUI()) 
     self.Fit()
     self.global_sizer.SetSizeHints(self)
Exemple #21
0
def metadata(dataset):
    #return ('xxx', '', '')
    config = configuration()
    (pid, fileid, revid, clearpid) = findpid(dataset)
    #return ('xxx', '', '')
    data = {}
    if pid:
        query = pid
        apiurl = config['dataverseroot'] + "/api/search?q=" + query + '&key=' + config['key'] + '&type=dataset'
        data = load_dataverse(apiurl)
    return (data, pid, fileid)
Exemple #22
0
def dialog():
    pid = ''
    root = ''
    config = configuration()
    if request.args.get('pid'):
        pid = request.args.get('pid')
        zipfile = downloadzip(pid)
        root = config['clearance'] + "/collabs/static/tmp/" + zipfile

    resp = make_response(render_template('dialog.html', download=root))
    return resp
Exemple #23
0
def datasets():
    config = configuration()
    (jsondata, pid) = ('', '')
    handles = []
    combineddataset = []
    resultdataset = ''
    datainfo = []
    outformat = 'json'

    if request.args.get('format'):
        outformat = request.args.get('format')
    if request.args.get('handle'):
        pid = request.args.get('handle')
    if request.args.get('latest'):
	dataset = config['defaulthandle']
   	return dataset

    if pid:
        (handles, pidslist) = pidfrompanel(pid)

        hquery = formdatasetquery(handles,'')
        datainfo = readdatasets('datasets', json.loads(hquery))
	#if not datainfo:
	    #datainfo.append(pid)

        for dataset in datainfo:
	    data = {}
	    handle = dataset['handle']
	    if outformat == 'json':
                jsondata = str(dataset['data'])
	        jsondata = jsondata.replace(".0,", ",")
	        json_dict = ast.literal_eval(jsondata.strip())
	        data['handle'] = handle
	        try:
	            data['title'] = dataset['title']
	            data['units'] = dataset['units']
		    data['datasetID'] = dataset['datasetID']
	        except:
		    data['title'] = 'Title'
		    data['units'] = 'Units'
		    data['datasetID'] = 228
	        data['data'] = json_dict
	        combineddataset.append(data)
	    elif outformat == 'csv':
		data['data'] = dataset['csvframe']
		resultdataset = data['data']

    if outformat == 'json':
	if combineddataset:
            finaldata = json.dumps(combineddataset, encoding="utf-8", sort_keys=True, indent=4)
            return Response(finaldata,  mimetype='application/json')
    elif outformat == 'csv':
        return Response(resultdataset,  mimetype='text/plain')
Exemple #24
0
def export(settings=''):
    config = configuration()
    if config['error']:
        return config['error']

    activepage = 'Dashboard'
    config = configuration()
    perlbin = "/usr/bin/perl "
    path = config['datapath']
    varproject = request.args.get('project')
    varbase = request.args.get('base')
    dataset = request.args.get('dataset')
    fileID = request.args.get('fileID')
    maincontent = config['error']
    cmd = "/bin/cat " + path + fileID + '.csv'
    docheck = re.match(r'.*[;|\/\:\(\)]', cmd)
    if not docheck:
        p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
        maincontent = p.communicate()[0]
    else:
	maincontent = 'Something went wrong..'
    return maincontent
Exemple #25
0
def search(qurl):
    # Global settings
    config = configuration()
    root = config['dataverseroot']

    # Load topics
    topicurl = config['apiroot'] + "/collabs/static/data/dataframe100_0.json"
    topics = loadjson(topicurl)

    # Input
    IDS = getindicators(qurl)

    datasets = dataset_search(root, IDS, topics)
    return datasets
def read_serial_list(task):
    serial_list_dict = {}
    configs = configuration()
    root = configs.get_log_analysis_path()
    serial_list_path = os.path.join(root, task, __MY_VAR_ROOT_PATH__,__FN_RESULTS_LIST__)
    if (os.path.isfile(serial_list_path) == True):
        file_serial_list = open(serial_list_path, 'r')
        for line in file_serial_list:
            arrays = line.split('=')
            serial = arrays[0]
            values = arrays[1].split('|')
            serial_list_dict[serial] = values
        file_serial_list.close()
    return serial_list_dict
Exemple #27
0
def load_indicators(filename):
    config = configuration()
    csvfile = config['clearance'] + "/collabs/static/data/" + filename
    ufile = urlopen(csvfile)
    data = pd.read_csv(ufile, delimiter='\t')
    df = data
    if csvfile:
        d = [
        dict([
        (colname, row[i])
        for i,colname in enumerate(df.columns)
        ])
        for row in df.values
        ] 
    
    return json.dumps(d)
Exemple #28
0
def visualize():
    config = configuration()
    resp = 'visualize'
    view = 'panel'
    if request.args.get('view'):
	view = request.args.get('view')

    if config['error']:
        return config['error']

    if view == 'panel':
        resp = panel()
    elif view == 'time-series':
	resp = chartlib()
    elif view == 'treemap':
	resp = treemap()
    return resp
Exemple #29
0
def dataverse_search(apiurl):
    dataframe = loadjson(apiurl)
    config = configuration()
    topics = topicscoder(config)

    info = []
    tmpinfo = []
    panel = {}
    panel['url'] = 'url'
    panel['indicator'] = '<b>&nbsp;Panel data</b>'
    panel['description'] = '<b>&nbsp;All datasets in panel format</b>'
    panel['name'] = '&nbsp;Panel data'
    panel['topic'] = '<b>&nbsp;Selected topics</b>'
    panel['pid'] = 'Panel'
    panel['citation'] = 'citation'

    link = config['apiroot'] + "/collabs/dashboard?dataset="
    handles = []
    for item in dataframe['data']['items']:
        datasets = {}
        datasets['url'] = item['url']
        datasets['pid'] = item['global_id']
	handles.append(datasets['pid'])
        datasets['indicator'] = '&nbsp;' + item['name'] 
	try:
	    datasets['topic'] = '&nbsp;' + topics[item['name']]
	except:
            datasets['topic'] = '&nbsp;' + item['description'] 
	abstract = str(item['description'])
        resume = re.search(r'^(.+?\.)\s+', str(item['description']))
        if resume:
            abstract = resume.group(1)
        datasets['description'] = '&nbsp;' + abstract
        datasets['startyear'] = 1500
        datasets['endyear'] = 2010
	datasets['analyze'] = link + item['global_id'] + '&action=visualize'

        tmpinfo.append(datasets)
    
    panel['pid'] = 'Panel' + str(handles)
    info.append(panel)
    for data in tmpinfo:
	info.append(data) 

    return info
Exemple #30
0
def chartonprint(webpage, fileformat, year, code, proxy): 
    # Print function
    (cmd, size, fileonweb) = ('', '1024', '')
    code = str(randomword(10))

    # Configuration
    config = configuration()
    path = config['path']
    phantompath = config['phantompath']
    imagepathloc = config['imagepath'] 
    filesvg = config['tmpdir'] + '/' + year + '_' + code + '.svg'  
    print filesvg

    if fileformat == 'shapefile':
        year = year
    else:
        cmd = phantompath + "/phantomjs/lib/phantom/bin/phantomjs --ssl-protocol=any --disk-cache=true " + path + "/static/renderHTML.js '" + webpage + "'"

        p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
        html = p.communicate()[0]
        result = re.findall(r'<svg.+?</svg>', html, re.DOTALL)
        if year:
            svgfile = open(filesvg, "w")
            svgfile.write(result[0])
            svgfile.close()

    if fileformat == 'SVG':
        svgfileout = '/get?svg=' + year + '_' + code + '_' + "map.svg"
        return "<a href=\"" + svgfileout + "\">Download SVG file</a>"
        fileonweb = ''

    if fileformat == 'png':
        outfile = year + '_' + code + '_' + 'map.png'
        outdirfile = imagepathloc + '/' + outfile
        cmd = "/usr/bin/inkscape " + filesvg + " -e " + outdirfile + " -h " + size + " -D -b '#ffffff'"
        fileonweb = config['proxy']  + config['imagepathonweb'] + '/' + outfile

    if cmd:
        p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
        result = p.communicate()[0]
        image = outfile

    return fileonweb
Exemple #31
0
 def __init__(self, configlocation):
     self.config = configuration(configlocation)
Exemple #32
0
from ynab import ynabapi
from bunq2ynab import sync

LOGGER = configure_logger(__name__)

# Add commandline switches:
parser = argparse.ArgumentParser()
parser.add_argument('-l', action='store_true', help="Run in list mode")
args = parser.parse_args()

# Set configuration file path
config_file_path = os.path.join(os.path.dirname(__file__), 'config.json')
config_location = 'file:' + config_file_path

# Initialize Config, Bunq and YNAB
config = configuration(config_location)
b = bunqapi(config_location)
y = ynabapi(config_location)

# Start program
if args.l:
    LOGGER.info('Running in LIST mode, no accounts will be synced.')
    b.list_users()
    y.list_budget()
else:
    for i in config.value['bunq2ynab']:
        for key, value in i.items():
            assert value, '{0} cannot be an empty string. Setup your sync pairs in your config'.format(
                key)

    try:
 def __init__(self, configlocation):
     self.config = configuration(configlocation)
     self.url    = 'https://api.youneedabudget.com/'
Exemple #34
0
preserving state from one request to the next.  In some 
situations, hidden fields can be a useful alternative to 
using cookies (the session variable). 
"""

import flask
import logging

# Our own modules
import config

###
# Globals
###
app = flask.Flask(__name__)
CONFIG = config.configuration(proxied=(__name__ != "__main__"))
app.secret_key = CONFIG.SECRET_KEY  # Should allow using session variables

###
# Pages
###


@app.route("/")
@app.route("/index")
def index():
    """The main page of the application.
    In this app, we will carry state from screen
    to screen in hidden fields.  Initially we 
    know nothing. 
    """
from PIL import Image
import numpy as np
import cv2
from torchvision import transforms as trans
from config import configuration

MEAN = [0.5, 0.5, 0.5]
STD = [0.5, 0.5, 0.5]
TARGET_IMAGE_SIZE = configuration().inputSize

def image_norm(fname):
    img = Image.open(fname, 'r')
    if img.size[0] == 0 | img.size[1] == 0:
        return 0

    resize_img = img.resize((TARGET_IMAGE_SIZE, TARGET_IMAGE_SIZE))

    if len(resize_img.split()) == 1:
        gray_img = np.asarray(((np.float32(resize_img) / 255.0)-MEAN[0])/STD[0])
    elif len(resize_img.split()) == 3:
        resize_img = resize_img.convert('L')
        gray_img = np.asarray(((np.float32(resize_img) / 255.0)-MEAN[0])/STD[0])
    else:
        return 0
    normImg = np.asarray([gray_img])

    return normImg


Exemple #36
0
Build an application and launch it.  This is the entry point that creates the
flaks application.  This should be passed to FLASK_APP::

    export FLASK_APP=app
    flask run

"""
import logging
import atexit
import config
import signal
import sys
from webapp import create_app, requestManager, db
from webapp.models import Monitor

config_class = config.configuration()


def configure_logging(config):
    """Configure application logging for `app`"""
    from logging.config import dictConfig
    default_format = \
        '[%(asctime)s] %(levelname)s ' \
        'in %(module)s (%(threadName)s): %(message)s'
    logging_config = {
        'version': 1,
        'formatters': {
            'default': {
                'format': default_format,
            }
        },
Exemple #37
0
def main():
    handle = ''
    dataverse = ''
    customkey = ''
    config = configuration()

    try:
        myopts, args = getopt.getopt(sys.argv[1:], "H:r:d:k:D:")
    except getopt.GetoptError as e:
        print(str(e))
        print(
            "Usage: %s -y year -d datatype -r region -f filename -DDEBUG -o output"
            % sys.argv[0])
        sys.exit(2)

    (handle, rhandle, customdv) = ('', '', '')
    for o, a in myopts:
        if o == '-H':
            handle = a
        if o == '-r':
            rhandle = a
        if o == '-d':
            dataverse = a
        if o == '-k':
            customkey = a
        if o == '-D':
            customdv = a

    dataset = {}
    DEBUG = ''
    path = config['path']
    # Default dataverse
    root = config['dataverseroot']
    key = config['key']
    dvname = config['branch']
    title = 'Title'
    units = 'Units'

    if dataverse:
        root = dataverse
    if customkey:
        key = customkey
    if customdv:
        dvname = customdv

    files = []
    if rhandle:
        contentsapi = root + "/api/dataverses/" + dvname + "/contents?key=" + key
        print contentsapi
        newdata = load_api_data(contentsapi, '')
        metadata = newdata['data']
        for item in metadata:
            dv = item['id']
            files = getfiles(root, dv, key)

    if handle:
        print handle
        (datahandle, datasetID, fileID) = parsehandle(handle)
        files.append(fileID)

    for fileID in files:
        fullpath = downloadfile(root, path, fileID, key)
        print fullpath
        (pid, revid, cliohandle, clearpid) = findpid(handle)
        (jsonfile, csvfile) = ('', '')
        #try:
        if pid:
            handle = pid
            try:
                (jsonfile, csvfile, tmptitle,
                 tmpunits) = dataextractor(fullpath, path, pid, fileID)
            except:
                resultfile = config['tmpdir'] + "/" + fileID
                (jsonfile, csvfile, tmptitle,
                 tmpunits) = excelvalidator(config['phantompath'], fullpath,
                                            resultfile, config['tmpdir'])

            if jsonfile:
                remove = removedata('datasets', 'handle', clearpid)
                try:
                    title = str(tmptitle)
                    units = str(tmpunits)
                except:
                    donothing = 1
                print "ADD " + str(jsonfile)
                datasetadd(jsonfile, csvfile, clearpid, handle, title, units,
                           datasetID)
                print handle
                print clearpid
                print datasetID
Exemple #38
0
def initialization():
    """
    This function reads the user-defined parameters and paths from :mod:`config.py`, then adds additional parameters related
    to the shapefiles. 
    First, it saves the spatial scope of the problem.
    Then, it distinguishes between countries, exclusive economic zones and subregions. For each one of them, 
    it saves the geodataframes, the number of features, and the coordinates of the bounding boxes of each feature.
    Finally, it saves the number of rows and columns in the low and righ resolution, and a georeference dictionary
    used for saving tif files.

    :return: The updated dictionaries param and paths.
    :rtype: tuple(dict, dict)
    """
    timecheck("Start")
    # import param and paths
    paths, param = configuration()

    # Read shapefile of scope
    scope_shp = gpd.read_file(paths["spatial_scope"])
    param["spatial_scope"] = define_spatial_scope(scope_shp)

    res_weather = param["res_weather"]
    res_desired = param["res_desired"]
    Crd_all = crd_merra(param["spatial_scope"], res_weather)[0]
    param["Crd_all"] = Crd_all
    ymax, xmax, ymin, xmin = Crd_all
    bounds_box = Polygon([(xmin, ymin), (xmin, ymax), (xmax, ymax), (xmax, ymin)])

    timecheck("Read shapefile of countries")
    # Extract land areas
    countries_shp = gpd.read_file(paths["Countries"], bbox=scope_shp)
    countries_shp = countries_shp.to_crs({"init": "epsg:4326"})

    # Crop all polygons and take the part inside the bounding box
    countries_shp["geometry"] = countries_shp["geometry"].buffer(0)
    countries_shp["geometry"] = countries_shp["geometry"].intersection(bounds_box)
    countries_shp = countries_shp[countries_shp.geometry.area > 0]
    param["regions_land"] = countries_shp
    param["nRegions_land"] = len(param["regions_land"])
    Crd_regions_land = np.zeros((param["nRegions_land"], 4))

    for reg in range(0, param["nRegions_land"]):
        # Box coordinates for MERRA2 data
        r = countries_shp.bounds.iloc[reg]
        box = np.array([r["maxy"], r["maxx"], r["miny"], r["minx"]])[np.newaxis]
        Crd_regions_land[reg, :] = crd_merra(box, res_weather)

    timecheck("Read shapefile of EEZ")
    # Extract sea areas
    eez_shp = gpd.read_file(paths["EEZ_global"], bbox=scope_shp)
    eez_shp = eez_shp.to_crs({"init": "epsg:4326"})

    # Crop all polygons and take the part inside the bounding box
    eez_shp["geometry"] = eez_shp["geometry"].buffer(0)
    eez_shp["geometry"] = eez_shp["geometry"].intersection(bounds_box)
    eez_shp = eez_shp[eez_shp.geometry.area > 0]
    param["regions_sea"] = eez_shp
    param["nRegions_sea"] = len(param["regions_sea"])
    Crd_regions_sea = np.zeros((param["nRegions_sea"], 4))

    for reg in range(0, param["nRegions_sea"]):
        # Box coordinates for MERRA2 data
        r = eez_shp.bounds.iloc[reg]
        box = np.array([r["maxy"], r["maxx"], r["miny"], r["minx"]])[np.newaxis]
        Crd_regions_sea[reg, :] = crd_merra(box, res_weather)

    timecheck("Read shapefile of subregions")
    # Read shapefile of regions
    regions_shp = gpd.read_file(paths["subregions"], bbox=scope_shp)
    regions_shp = regions_shp.to_crs({"init": "epsg:4326"})

    # Crop all polygons and take the part inside the bounding box
    regions_shp["geometry"] = regions_shp["geometry"].intersection(bounds_box)
    regions_shp = regions_shp[regions_shp.geometry.area > 0]
    regions_shp.sort_values(by=["NAME_0"], inplace=True)
    regions_shp.reset_index(inplace=True)
    param["regions_sub"] = regions_shp
    param["nRegions_sub"] = len(param["regions_sub"])
    Crd_regions_sub = np.zeros((param["nRegions_sub"], 4))

    for reg in range(0, param["nRegions_sub"]):
        # Box coordinates for MERRA2 data
        r = regions_shp.bounds.iloc[reg]
        box = np.array([r["maxy"], r["maxx"], r["miny"], r["minx"]])[np.newaxis]
        Crd_regions_sub[reg, :] = crd_merra(box, res_weather)

    # Saving parameters
    param["Crd_subregions"] = Crd_regions_sub
    param["Crd_regions"] = np.concatenate((Crd_regions_land, Crd_regions_sea), axis=0)

    # Indices and matrix dimensions
    Ind_all_low = ind_merra(Crd_all, Crd_all, res_weather)
    Ind_all_high = ind_merra(Crd_all, Crd_all, res_desired)

    param["m_high"] = int((Ind_all_high[:, 0] - Ind_all_high[:, 2] + 1)[0])  # number of rows
    param["n_high"] = int((Ind_all_high[:, 1] - Ind_all_high[:, 3] + 1)[0])  # number of columns
    param["m_low"] = int((Ind_all_low[:, 0] - Ind_all_low[:, 2] + 1)[0])  # number of rows
    param["n_low"] = int((Ind_all_low[:, 1] - Ind_all_low[:, 3] + 1)[0])  # number of columns
    param["GeoRef"] = calc_geotiff(Crd_all, res_desired)
    timecheck("End")

    # Display initial information
    print("\nRegion: " + param["region_name"] + " - Year: " + str(param["year"]))
    print("Folder Path: " + paths["region"] + "\n")

    return paths, param
Exemple #39
0
 def __init__(self):
     current_configuration = configuration()
     current_configuration.parse("config.ini")
Exemple #40
0
def download():
    (classification, pid, root, switch, datafile) = ('modern', '', '',
                                                     'modern', '')
    handle = ''
    config = configuration()
    cmd = "--insecure -u " + config['key'] + ": " + config[
        'dataverseroot'] + "/dvn/api/data-deposit/v1.1/swordv2/statement/study/"

    config['remote'] = ''
    datafilter = {}
    datafilter['startyear'] = '1500'
    datafilter['endyear'] = '2010'
    datafilter['ctrlist'] = ''

    tmpdir = config['tmpdir']
    filerandom = randomword(10)
    #filerandom = '12345'
    arc = "data" + filerandom + ".zip"
    filename = filerandom
    finaldir = config['path'] + '/static/tmp'
    # ToDO
    if filename:
        finaldir = str(finaldir) + '/' + str(filename)
        tmpdir = str(tmpdir) + '/' + str(filename)

    try:
        os.mkdir(tmpdir)
        os.mkdir(finaldir)
    except:
        donothing = 'ok'

    if request.args.get('handle'):
        handle = request.args.get('handle')
    if request.args.get('type[0]') == 'historical':
        classification = request.args.get('type[0]')
        switch = classification
    if request.args.get('y[min]'):
        datafilter['startyear'] = request.args.get('y[min]')
    if request.args.get('y[max]'):
        datafilter['endyear'] = request.args.get('y[max]')

    # Select countries
    customcountrycodes = ''
    f = request.args
    for key in f.keys():
        if is_location(key):
            for value in sorted(f.getlist(key)):
                customcountrycodes = str(customcountrycodes) + str(value) + ','
    if customcountrycodes:
        customcountrycodes = customcountrycodes[:-1]
        datafilter['ctrlist'] = customcountrycodes

    if request.args.get('ctrlist'):
        datafilter['ctrlist'] = request.args.get('ctrlist')

    if request.args.get('pid'):
        pid = request.args.get('pid')
        ispanel = ''
        try:
            (pids, pidslist) = pidfrompanel(pid)
            handles = pids
            handle = pids[0]
            match = re.match(r'Panel\[(.+)\]', pid)
            if match:
                ispanel = 'yes'
        except:
            handles = pid
            handle = pids[0]

        if ispanel:
            dirforzip = ''
            for handle in handles:
                dirforzip = get_papers(config['dataverseroot'], config['key'],
                                       cmd, handle, tmpdir, arc, finaldir)

            (header, panelcells, metadata,
             totalpanel) = build_panel(config, switch, handles, datafilter)
            filename = "paneldata.xlsx"
            metadata = []
            datadir = config['webtest']
            localoutfile = panel2excel(dirforzip, filename, header, panelcells,
                                       metadata)
            arc = 'dataarchive.zip'
            compile2zip(dirforzip, arc)
            root = config['apiroot'] + "/collabs/static/tmp/" + str(arc)
            return redirect(root, code=301)

    if classification:
        outfile = "clioinfra.xlsx"
        dirforzip = get_papers(config['dataverseroot'], config['key'], cmd,
                               handle, tmpdir, arc, finaldir)
        #fullpath = config['webtest'] + "/" + str(outfile)
        fullpath = dirforzip

        # Check selection
        isselection = 'yes'
        if datafilter['startyear'] == '1500':
            if datafilter['ctrlist'] == '':
                isselection = 'yes'

        if isselection:
            (datafile, outfilefinal,
             finalsubset) = dataframe_compiler(config, fullpath, handle,
                                               classification, datafilter)
            #return datafile.to_html()
        else:
            # Copy original dataset
            source = os.listdir(tmpdir)
            for excelfile in source:
                shutil.copy(tmpdir + '/' + excelfile, dirforzip)

        #return outfilefinal
        arc = 'dataarchive.zip'
        if datafile:
            arc = "%s_%s.zip" % (datafile, switch)
        compile2zip(dirforzip, arc)
        root = config['apiroot'] + "/collabs/static/tmp/" + str(arc)
        #root = config['apiroot'] + "/collabs/static/tmp/" + str(outfile)
        return redirect(root, code=301)
    else:
        zipfile = downloadzip(pid)
        # CHANGE
        #return zipfile
        # DEBUG1
        root = config['apiroot'] + "/collabs/static/tmp/" + zipfile
        # HTML
        #resp = make_response(render_template('progress.html', download=root))
        #return "<a href=\"" + str(root) + "\">Download dataset(s) with all papers (zip archive)</a>"
        #return resp
        return redirect(root, code=301)
Exemple #41
0
# import datetime # But we still need time
from dateutil import tz  # For interpreting local times

# OAuth2  - Google library implementation for convenience
from oauth2client import client
import httplib2  # used in oauth2 flow

# Google API for services
from apiclient import discovery

###
# Globals
###
import config
if __name__ == "__main__":
    CONFIG = config.configuration()
else:
    CONFIG = config.configuration(proxied=True)

app = flask.Flask(__name__)
app.debug = CONFIG.DEBUG
app.logger.setLevel(logging.DEBUG)
app.secret_key = CONFIG.SECRET_KEY

SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = CONFIG.GOOGLE_KEY_FILE  ## You'll need this
APPLICATION_NAME = 'MeetMe class project'

checked = {}

#############################
sys.path.append(
    os.path.abspath(os.path.join(os.path.dirname(__file__), '../modules')))
from config import configuration


def loadjson(apiurl):
    jsondataurl = apiurl

    req = urllib2.Request(jsondataurl)
    opener = urllib2.build_opener()
    f = opener.open(req)
    dataframe = simplejson.load(f)
    return dataframe


config = configuration()

connection = Connection(host, token)
dataverse = connection.get_dataverse('clioinfra')

for item in dataverse.get_contents():
    # u'protocol': u'hdl', u'authority': u'10622' u'identifier': u'R8EJJF'
    try:
        handle = str(item['protocol']) + ':' + str(
            item['authority']) + "/" + str(item['identifier'])
        datasetid = item['id']
        url = "https://" + str(host) + "/api/datasets/" + str(
            datasetid) + "/versions/1.0?&key=" + str(token)
        print item
        dataframe = loadjson(url)
        for fileitem in dataframe['data']['files']:
"""

import flask
import logging
import arrow  # Replacement for datetime, based on moment.js

# Our own modules
import pre  # Preprocess schedule file
import config  # Configure from configuration files or command line

###
# Globals
###
app = flask.Flask(__name__)
if __name__ == "__main__":
    configuration = config.configuration()
else:
    # If we aren't main, the command line doesn't belong to us
    configuration = config.configuration(proxied=True)

if configuration.DEBUG:
    app.logger.setLevel(logging.DEBUG)

# Pre-processed schedule is global, so be careful to update
# it atomically in the view functions.
#
schedule = pre.process(open(configuration.SYLLABUS))

###
# Pages
# Each of these transmits the default "200/OK" header
Exemple #44
0
def downloadzip(pid):
    DEBUG = 0
    (fullpath) = ('')
    fullmetadata = {}
    logscale = 0

    config = configuration()
    config['remote'] = 'on'
    API_TOKEN = config['key']
    HOSTNAME = config['dataverseroot']
    cmd = "--insecure -u " + API_TOKEN + ": " + HOSTNAME + "/dvn/api/data-deposit/v1.1/swordv2/statement/study/"
    tmpdir = config['tmpdir']
    filerandom = randomword(10)
    #filerandom = '12345'
    arc = "data" + filerandom + ".zip"
    filename = filerandom
    finaldir = config['path'] + '/static/tmp'
    # ToDO
    if filename:
        finaldir = str(finaldir) + '/' + str(filename)
        tmpdir = str(tmpdir) + '/' + str(filename)

    try:
        os.mkdir(tmpdir)
        os.mkdir(finaldir)
    except:
        donothing = 'ok'

    customyear = ''
    fromyear = request.args.get('y[min]')
    toyear = request.args.get('y[max]')
    historical = request.args.get('type[0]')
    (handles, pidslist) = pidfrompanel(pid)
    try:
        if pidslist:
            fullmetadata = load_fullmetadata(pidslist)
    except:
        showwarning = 1

    # Log scales switch
    if request.args.get('logscale'):
        logscale = 1

    # Select countries
    customcountrycodes = ''
    f = request.args
    for key in f.keys():
        if is_location(key):
            for value in sorted(f.getlist(key)):
                customcountrycodes = str(customcountrycodes) + str(value) + ','
    if customcountrycodes:
        customcountrycodes = customcountrycodes[:-1]

    if handles:
        if historical:
            api = config['apiroot'] + "/collabs/static/data/historical.json"
            (regions, countries, ctr2reg, webmapper, geocoder) = histo(api, '')
            hist = countries
        else:
            hist = ''

    (classification, geodataset, title,
     units) = content2dataframe(config, config['geocoderhandle'])

    #geocoder = buildgeocoder(dataset, config)
    (modern, historical) = loadgeocoder(config, dataset, 'geocoder')
    for handle in handles:
        #if remote:
        #    (class1, dataset) = loaddataset_fromurl(config, handle)
        #else:
        #    dataset = loaddataset(handles)

        #(cfilter, notint) = selectint(activeindex.values)
        #(moderndata, historicaldata) = loadgeocoder(dataset, '')
        # CHANGE
        #return str(dataset.index)
        (header, panelcells, codes, datahub, data, handle2ind, unit2ind,
         original) = data2panel(handles, customcountrycodes, fromyear, toyear,
                                customyear, hist, logscale)
        filename = filename + '.xls'
        fullpath = panel2excel(finaldir, filename, header, panelcells,
                               fullmetadata)
    else:
        # Clio format download
        zipfile = get_papers(HOSTNAME, API_TOKEN, cmd, pid, tmpdir, arc,
                             finaldir)
        (alonepid, revid, cliohandle, clearpid) = findpid(pid)
        if alonepid:
            handles = [clearpid]

        for pid in handles:
            if historical:
                api = config['apiroot'] + "/collabs/static/data/historical.json"
                (regions, countries, ctr2reg, webmapper,
                 geocoder) = histo(api, '')
                hist = countries
            else:
                hist = ''
            filename = filename + '.xls'
            # 2DEBUG
            (header, panelcells, codes, datahub, data, handle2ind, unit2ind,
             originalvalues) = data2panel(handles, customcountrycodes,
                                          fromyear, toyear, customyear, hist,
                                          logscale)
            #codes = hist
            #return str(fullmetadata)
            metadata = fullmetadata
            result = individual_dataset(finaldir, filename, handle2ind[pid],
                                        unit2ind[pid], datahub, data[pid],
                                        codes, metadata)

    try:
        for everypid in handles:
            # Download papers
            zipfile = get_papers(HOSTNAME, API_TOKEN, cmd, everypid, tmpdir,
                                 arc, finaldir)
    except:
        nopapers = 1

    compile2zip(finaldir, arc)
    filename = arc
    return filename
Exemple #45
0
import os
from flask import Flask, redirect, url_for, request, render_template
from pymongo import MongoClient
import flask
from flask import request
import arrow  # Replacement for datetime, based on moment.js
import acp_times # Brevet time calculations
import config
import time
import logging
import datetime
app = flask.Flask(__name__)
CONFIG = config.configuration()
app.secret_key = CONFIG.SECRET_KEY

client = MongoClient("db", 27017)
db = client.tododb
db.tododb.drop()


'''@app.route('/')
def todo():
    _items = db.tododb.find()
    items = [item for item in _items]

    return render_template('todo.html', items=items)

@app.route('/new', methods=['POST'])
def new():
    item_doc = {
        'name': request.form['name'],
Exemple #46
0
def dataapi():
    (datafilter, handles) = ({}, [])
    datafilter['ctrlist'] = ''
    logscale = ''
    config = configuration()
    customyear = ''
    fromyear = '1500'
    toyear = '2012'
    categoriesMax = 6
    countriesNum = 200
    geocoder = ''
    (special, getrange, colormap, pallette, customcountrycodes,
     switch) = ('', '', '', '', '', 'modern')

    if request.args.get('special'):
        special = request.args.get('special')
    if request.args.get('logscale'):
        logscale = request.args.get('logscale')
    if request.args.get('year'):
        customyear = request.args.get('year')
        datafilter['startyear'] = customyear
        datafilter['endyear'] = customyear
    if request.args.get('catmax'):
        categoriesMax = int(request.args.get('catmax'))
    if request.args.get('getrange'):
        getrange = request.args.get('getrange')
    if request.args.get('colors'):
        pallette = request.args.get('colors')
    if request.args.get('colormap'):
        colormap = request.args.get('colormap')
    if request.args.get('geocoder'):
        switch = request.args.get('geocoder')
        if switch == 'on':
            switch = 'modern'
    if request.args.get('handle'):
        handlestring = request.args.get('handle')
        ishandle = re.search(r'(hdl:\d+\/\w+)', handlestring)
        if ishandle:
            handle = ishandle.group(1)
            handle = handle.replace("'", "")
        else:
            handle = handlestring
        handles.append(handle)

    if request.args.get('ctrlist'):
        customcountrycodes = ''
        tmpcustomcountrycodes = request.args.get('ctrlist')
        c = tmpcustomcountrycodes.split(',')
        for ids in sorted(c):
            if ids:
                customcountrycodes = str(customcountrycodes) + str(ids) + ','
        customcountrycodes = customcountrycodes[:-1]
        datafilter['ctrlist'] = tmpcustomcountrycodes

    hist = {}
    config = configuration()
    try:
        if len(customcountrycodes):
            countriesNum = len(customcountrycodes.split(','))
            if countriesNum < categoriesMax:
                if countriesNum >= 1:
                    categoriesMax = countriesNum
    except:
        nothing = 1

    # Old version of panel data
    #(header, panelcells, codes, x1, x2, x3, x4, originalvalues) = data2panel(handles, customcountrycodes, fromyear, toyear, customyear, hist, logscale)
    panelcells = []
    # New version is fast
    if config:
        (geocoder, geolist, oecd2webmapper, modern,
         historical) = request_geocoder(config, '')
        (subsets, panel) = ({}, [])
        try:
            (origdata, maindata,
             metadata) = request_datasets(config, switch, modern, historical,
                                          handles, geolist)

            for handle in handles:
                (datasubset, ctrlist) = datasetfilter(maindata[handle],
                                                      datafilter)
                if not datasubset.empty:
                    datasubset = datasubset.dropna(how='all')
                    panel.append(datasubset)
                    subsets[handle] = datasubset
        except:
            subsets[handles[0]] = get_iishvisitors_frame(int(customyear))

        (panelcells, originalvalues) = dataset2panel(config,
                                                     subsets[handles[0]],
                                                     modern, logscale)
    #(header, panelcells, codes, x1, x2, x3, x4, originalvalues) = data2panel(handles, customcountrycodes, fromyear, toyear, customyear, hist, logscale)

    #modern = moderncodes(config['modernnames'], config['apiroot'])
    #jsondata = data2json(modern, codes, panelcells)
    #data = json.dumps(jsondata, ensure_ascii=False, sort_keys=True, indent=4)
    # SCALES
    if switch:
        if switch == 'historical':
            geocoder = historical
        else:
            geocoder = modern
    #geocoder = ''
    (defaultcolor, colors) = getcolors(categoriesMax, pallette, colormap)
    (catlimit, ranges, dataset) = getscales(config, panelcells, colors,
                                            categoriesMax, geocoder,
                                            originalvalues, switch, logscale)

    if getrange:
        (showrange, tmprange) = combinerange(ranges)
        webscale = webscales(showrange, colors, defaultcolor)
        data = json.dumps(webscale,
                          ensure_ascii=False,
                          sort_keys=True,
                          indent=4)
        return Response(data, mimetype='application/json')
    else:
        data = json.dumps(dataset,
                          ensure_ascii=False,
                          sort_keys=True,
                          indent=4)
        return Response(data, mimetype='application/json')
Exemple #47
0
import os
from flask import Flask, render_template, session, request
from flaskext.mysql import MySQL

from config import configuration
import logging

logging.basicConfig(format=' %(levelname)s - %(asctime)s - %(message)s ',
                    level=logging.DEBUG)

app = Flask(__name__)

mysql = MySQL()
app.config['MYSQL_DATABASE_USER'] = configuration().getDbUser()
app.config['MYSQL_DATABASE_PASSWORD'] = configuration().getDbPass()
app.config['MYSQL_DATABASE_DB'] = configuration().getDatabase()
app.config['MYSQL_DATABASE_HOST'] = configuration().getLocalhost()
mysql.init_app(app)


@app.route('/', methods=['GET'])
def home():
    if not session.get('logged_in'):
        return render_template('login.html')

    else:
        configuration().getAppUser()
        logging.info(getTotalFlights())
        logging.info(getAllFlights2020())
        logging.info(getAllFlights2019())
        logging.info(getAllFlights2018())
Exemple #48
0
def tableapi():
    # years in filter
    config = configuration()
    switch = 'modern'
    datafilter = {}
    datafilter['ctrlist'] = ''
    customyear = ''
    fromyear = '1500'
    datafilter['startyear'] = fromyear
    toyear = '2012'
    datafilter['endyear'] = toyear
    customcountrycodes = ''
    (aggr, logscale, dataset, handles) = ('', '', '', [])

    # Select countries
    f = request.args
    for key in f.keys():
        if key == 'loc':
            for value in sorted(f.getlist(key)):
                if value:
                    customcountrycodes = str(customcountrycodes) + str(
                        value) + ','
    if customcountrycodes:
        customcountrycodes = customcountrycodes[:-1]

    #handle = "F16UDU"
    # HANDLE
    if request.args.get('handle'):
        handledataset = request.args.get('handle')
        try:
            (pids, pidslist) = pidfrompanel(handledataset)
            handles.append(pids[0])
        except:
            handles.append(handledataset)
            nopanel = 'yes'
    if request.args.get('dataset'):
        dataset = request.args.get('dataset')
    if request.args.get('hist'):
        switch = 'historical'
    if request.args.get('ctrlist'):
        customcountrycodes = ''
        tmpcustomcountrycodes = request.args.get('ctrlist')
        c = tmpcustomcountrycodes.split(',')
        for ids in sorted(c):
            if ids:
                customcountrycodes = str(customcountrycodes) + str(ids) + ','
        customcountrycodes = customcountrycodes[:-1]
        datafilter['ctrlist'] = customcountrycodes

    if not customcountrycodes:
        customcountrycodes = '528'
    if request.args.get('yearmin'):
        fromyear = request.args.get('yearmin')
        datafilter['startyear'] = fromyear
    if request.args.get('yearmax'):
        toyear = request.args.get('yearmax')
        datafilter['endyear'] = toyear
    if request.args.get('aggr'):
        aggr = request.args.get('aggr')
    # Log scales switch
    if request.args.get('logscale'):
        logscale = request.args.get('logscale')
    DEBUG = 0

    old = ''
    if old:
        apifile = str(dataset) + ".json"
        jsonapi = config['apiroot'] + "/collabs/static/data/" + apifile
        dataframe = load_api_data(jsonapi, '')
        loccodes = loadcodes(dataframe)
        (ctr, header) = countryset(customcountrycodes, loccodes)
        indicator = ''
        (frame, years, values, dates,
         original) = createframe(indicator, loccodes, dataframe, customyear,
                                 fromyear, toyear, ctr, logscale, DEBUG)
        names = [
            'indicator', 'm', 'ctrcode', 'country', 'year', 'intcode', 'value',
            'id'
        ]
        (csvdata, aggrdata) = combinedata(ctr, frame, loccodes)
    # New version is fast
    else:
        (geocoder, geolist, oecd2webmapper, modern,
         historical) = request_geocoder(config, '')
        (origdata, maindata,
         metadata) = request_datasets(config, switch, modern, historical,
                                      handles, geolist)
        (subsets, panel) = ({}, [])

        for handle in handles:
            (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter)
            if not datasubset.empty:
                datasubset = datasubset.dropna(how='all')
                panel.append(datasubset)
                subsets[handle] = datasubset
        classification = modern
        if switch == 'historical':
            classification = historical
        (csvdata, aggrdata) = dataset_to_csv(config, subsets[handles[0]],
                                             classification)

    if aggr:
        csvdata = aggrdata

    return (csvdata, aggrdata)
Exemple #49
0
def initialization():
    """
    This function reads the user-defined parameters and paths from :mod:`config.py`, then checks the validity of the input files. If they are missing or are not rasters,
    a warning is thrown and the code is exited. The same applies if the rasters do not have the same resolution or scope.
    
    If the input files are valid, the CSV file in *input_stats* is generated and filled with the information that is already available. It will be called by other functions
    and eventually filled by them.

    :return: The updated dictionaries param and paths.
    :rtype: tuple(dict, dict)
    """
    timecheck("Start")
    # import param and paths
    paths, param = configuration()

    # Check whether the inputs are correct
    if not len(paths["inputs"]):
        warn("No input file given", UserWarning)
        sys.exit(0)
    for input_file in paths["inputs"]:
        if not os.path.isfile(input_file):
            warn("File does not exist: " + input_file, UserWarning)
            sys.exit(0)
        elif not input_file.endswith(".tif"):
            warn("File is not raster: " + input_file, UserWarning)
            sys.exit(0)

    # Check that all rasters have the same scope and resolution
    for input_file in paths["inputs"]:
        dataset = gdal.Open(input_file)
        (upper_left_x, x_size, x_rotation, upper_left_y, y_rotation, y_size) = dataset.GetGeoTransform()
        Crd_all = np.array(
            [[upper_left_y], [upper_left_x + x_size * dataset.RasterXSize], [upper_left_y + y_size * dataset.RasterYSize], [upper_left_x]]
        )
        if input_file == paths["inputs"][0]:
            Crd_all_old = Crd_all
            x_size_old = x_size
            y_size_old = y_size
        elif (Crd_all_old != Crd_all).any() or (x_size_old != x_size) or (y_size_old != y_size):
            warn("Not the same scope / resolution!", UserWarning)
            sys.exit(0)
        param["Crd_all"] = Crd_all
        param["res_desired"] = np.array([abs(x_size), abs(y_size)])
        param["GeoRef"] = calc_geotiff(Crd_all, param["res_desired"])

    # Create dataframe for input stats
    df = pd.DataFrame(
        index=[
            "map_parts_total",
            "output_raster_columns",
            "output_raster_rows",
            "ref_part_name",
            "size_max",
            "std_max",
            "max_no_of_cl_ref",
            "max_no_of_cl_total",
        ],
        columns=["value"],
    )
    df.loc[["output_raster_columns", "output_raster_rows"], "value"] = (dataset.RasterXSize, dataset.RasterYSize)
    if not os.path.exists(paths["input_stats"]):
        df.to_csv(paths["input_stats"], sep=";", decimal=",")
    timecheck("End")

    return paths, param
Exemple #50
0
def searchdata(query):
    config = configuration()
    searchapi = config[
        'dataverseroot'] + "/api/search?q=" + query + "&key=" + config['key']
    dataframe = load_api_data(searchapi, '')
    return json.dumps(dataframe)
Exemple #51
0
"""
John Doe's Flask API.
"""

import config
import os
from flask import Flask, render_template, abort, send_from_directory

app = Flask(__name__)

options = config.configuration()

ILLEGAL_CHARS = ["//", "~", ".."]
DOCROOT = options.DOCROOT


@app.route("/")
def hello():
    return "UOCIS docker demo!\n"


@app.route("/<path:filename>")
def get_page(filename):
    if any((char in filename) for char in ILLEGAL_CHARS):
        abort(403)
    if filename not in os.listdir(DOCROOT):
        abort(404)
    return send_from_directory(DOCROOT, filename), 200


@app.errorhandler(404)
Exemple #52
0
def treemapweb():
    (thisyear, datafilter, yearmin, lastyear, handles) = (0, {}, 1500, 2010,
                                                          [])
    (action, switch, geodataset) = ('', 'modern', '')
    config = configuration()
    datafilter['startyear'] = yearmin
    datafilter['endyear'] = lastyear
    datafilter['ctrlist'] = ''

    handle = ''
    if request.args.get('handle'):
        handledataset = request.args.get('handle')
        try:
            (pids, pidslist) = pidfrompanel(handledataset)
            handle = pids[0]
            handles.append(handle)
        except:
            handles.append(handledataset)
            nopanel = 'yes'

    if request.args.get('face'):
        handle = request.args.get('face')
        handles.append(handle)
    if request.args.get('year'):
        thisyear = request.args.get('year')
    if request.args.get('action'):
        action = request.args.get('action')
    if request.args.get('ctrlist'):
        datafilter['ctrlist'] = request.args.get('ctrlist')

    if int(thisyear) > 0:
        datafilter['startyear'] = int(thisyear)
        datafilter['endyear'] = int(thisyear)

    if request.args.get('historical'):
        switch = 'historical'
    # Geocoder
    (classification, geodataset, title,
     units) = content2dataframe(config, config['geocoderhandle'])

    #(modern, historical) = loadgeocoder(config, geodataset, 'geocoder')
    (geocoder, geolist, oecd2webmapper, modern,
     historical) = request_geocoder(config, '')

    if switch == 'modern':
        activeindex = modern.index
        coder = modern
        class1 = switch
    else:
        activeindex = historical.index
        coder = historical
        class1 = switch

    # Loading dataset in dataframe
    try:
        (class1, dataset, title, units) = content2dataframe(config, handle)
    except:
        return 'No dataset ' + handle

    (cfilter, notint) = selectint(activeindex.values)
    (origdata, maindata, metadata) = request_datasets(config, switch, modern,
                                                      historical, handles,
                                                      geolist)
    (subsets, panel) = ({}, [])

    # Show only available years
    if action == 'showyears':
        years = []
        datafilter['startyear'] = yearmin
        datafilter['endyear'] = lastyear
        (datasubset, ctrlist) = datasetfilter(maindata[handles[0]], datafilter)
        # Remove years without any values
        if not datafilter['ctrlist']:
            if np.nan in datasubset.index:
                datasubset = datasubset.drop(np.nan, axis=0)
        for colyear in datasubset.columns:
            if datasubset[colyear].count() == 0:
                datasubset = datasubset.drop(colyear, axis=1)

        (years, notyears) = selectint(datasubset.columns)
        # YEARS
        return Response(json.dumps(years), mimetype='application/json')

    # Process all indicators
    for handle in handles:
        (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter)
        if not datasubset.empty:
            #datasubset = datasubset.dropna(how='all')
            if not datafilter['ctrlist']:
                if np.nan in datasubset.index:
                    datasubset = datasubset.drop(np.nan, axis=0)
            panel.append(datasubset)
            subsets[handle] = datasubset

    maindata = subsets[handles[0]]
    treemapdata = buildtreemap(config, maindata, switch, cfilter, coder)
    return Response(treemapdata, mimetype='application/json')
Exemple #53
0
import falcon
from falcon_multipart.middleware import MultipartMiddleware

from config import configuration
configuration()
from views import RecognitionView, LabelsView, RecognitionOldView

api = application = falcon.API(middleware=[MultipartMiddleware()])

recognition_view = RecognitionView()
api.add_route('/api/v2/recognition', recognition_view)
labels_view = LabelsView()
api.add_route('/api/v1/labels', labels_view)
recognition_old_view = RecognitionOldView()
api.add_route('/api/v1/recognition', recognition_old_view)
Exemple #54
0
def panel():
    (thisyear, datafilter, handle, yearmin, yearmax, thisyear, ctrlist,
     lastyear, logscale) = (0, {}, '', '1500', '2020', 1950, '', 2010, '')
    handles = []
    config = configuration()
    datafilter['startyear'] = yearmin
    datafilter['endyear'] = lastyear
    datafilter['ctrlist'] = config['ctrlist']

    #modern = moderncodes(config['modernnames'], config['apiroot'])
    if request.args.get('handle'):
        handle = str(request.args.get('handle'))
        handle = handle.replace(" ", "")
        handle = handle.replace("'", "")
        try:
            (pids, pidslist) = pidfrompanel(handle)
            handles = pids
        except:
            nopanel = 'yes'
            handles.append(handle)
    if request.args.get('face'):
        facehandle = request.args.get('face')
        if facehandle not in handles:
            handles.append(facehandle)
    if request.args.get('dataset'):
        dataset = request.args.get('dataset')
    if request.args.get('ctrlist'):
        customcountrycodes = ''
        ctrlist = request.args.get('ctrlist')
        datafilter['ctrlist'] = ctrlist
    if request.args.get('logscale'):
        logscale = request.args.get('logscale')
    if request.args.get('year'):
        thisyear = request.args.get('year')
        datafilter['startyear'] = int(thisyear)
        datafilter['endyear'] = int(thisyear)
    if request.args.get('yearmin'):
        fromyear = request.args.get('yearmin')
        datafilter['startyear'] = fromyear
    if request.args.get('yearmax'):
        toyear = request.args.get('yearmax')
        datafilter['endyear'] = toyear
    if request.args.get('hist'):
        switch = 'historical'
        if datafilter['ctrlist'] == '':
            datafilter['ctrlist'] = config['histctrlist']
    else:
        switch = 'modern'

    (geocoder, geolist, oecd2webmapper, modern,
     historical) = request_geocoder(config, '')
    (origdata, maindata, metadata) = request_datasets(config, switch, modern,
                                                      historical, handles,
                                                      geolist)
    (subsets, subsetyears, panel) = ({}, [], [])

    for handle in handles:
        (datasubset, ctrlist) = datasetfilter(maindata[handle], datafilter)
        datasubset['handle'] = handle
        if not datasubset.empty:
            datasubset = datasubset.dropna(how='all')
            try:
                if np.nan in datasubset.index:
                    datasubset = datasubset.drop(np.nan, axis=0)
            except:
                skip = 'yes'

            for year in datasubset:
                if datasubset[year].count() == 0:
                    datasubset = datasubset.drop(year, axis=1)

            (datayears, notyears) = selectint(datasubset.columns)
            panel.append(datasubset)
            subsets[handle] = datasubset
            subsetyears.append(datayears)

    dataframe = subsets
    ctrlimit = 10

    # Trying to find the best year with most filled data values
    try:
        bestyearlist = subsetyears[0]
        for i in range(1, len(subsetyears)):
            bestyearlist = list(set(bestyearlist) & set(subsetyears[i]))
#bestyearlist = bestyearlist.sort()
        thisyear = bestyearlist[0]
    except:
        bestyearlist = []

    allcodes = {}
    panel = []
    names = {}

    for handle in dataframe:
        try:
            names[handle] = metadata[handle]['title']
        except:
            names[handle] = 'title'
        try:
            #(dataset, codes) = paneldatafilter(dataframe[handle], int(yearmin), int(yearmax), ctrlist, handle)
            dataset = dataframe[handle]
            if not dataset.empty:
                panel.append(dataset)
        except:
            nodata = 0

    if panel:
        totalpanel = pd.concat(panel)
        cleanedpanel = totalpanel.dropna(axis=1, how='any')
        cleanedpanel = totalpanel

#return str(cleanedpanel.to_html())
    totalpanel = cleanedpanel
    if int(thisyear) <= 0:
        thisyear = totalpanel.columns[-2]
    result = ''
    original = {}
    if thisyear:
        if switch == 'historical':
            geocoder = historical
        if switch == 'hist':
            geocoder = historical
        else:
            geocoder = modern
        result = 'Country,'
        for handle in handles:
            result = result + str(metadata[handle]['title']) + ','
        result = result[:-1]

        known = {}
        for code in totalpanel.index:
            if str(code) not in known:
                result = result + '\n' + str(
                    geocoder.ix[int(code)][config['webmappercountry']])
                for handle in handles:
                    tmpframe = totalpanel.loc[totalpanel['handle'] == handle]
                    try:
                        (thisval,
                         original) = value2scale(tmpframe.ix[code][thisyear],
                                                 logscale, original)
                    except:
                        thisval = 'NaN'
                    result = result + ',' + str(thisval)
                known[str(code)] = code

        return Response(result, mimetype='text/plain')

        (allyears, notyears) = selectint(cleanedpanel.columns)
        (codes, notcodes) = selectint(cleanedpanel.index)
        cleanedpanel.index = codes
        (header, data, countries, handles,
         vhandles) = panel2dict(config, cleanedpanel, names)
        #return str(data)
        #thisyear = 1882
        #return str(countries)
        #return str(countries)
        years = []
        for year in sorted(data):
            try:
                years.append(int(year))
                lastyear = year
            except:
                skip = 1

        # Return only years
        if request.args.get('showyears'):
            yearsdata = {}
            yearsdata['years'] = years
            yearsdata['latestyear'] = lastyear
            #yearsdata['data'] = data
            yearsjson = json.dumps(yearsdata,
                                   ensure_ascii=False,
                                   sort_keys=True,
                                   indent=4)
            return Response(yearsjson, mimetype='application/json')

    return Response(result, mimetype='text/plain')
Exemple #55
0
def main():
    handle = ''
    dataverse = ''
    customkey = ''
    config = configuration()

    try:
        myopts, args = getopt.getopt(sys.argv[1:], "H:r:d:k:D:")
    except getopt.GetoptError as e:
        print(str(e))
        print(
            "Usage: %s -y year -d datatype -r region -f filename -DDEBUG -o output"
            % sys.argv[0])
        sys.exit(2)

    (handle, rhandle, customdv) = ('', '', '')
    for o, a in myopts:
        if o == '-H':
            handle = a
        if o == '-r':
            rhandle = a
        if o == '-d':
            dataverse = a
        if o == '-k':
            customkey = a
        if o == '-D':
            customdv = a

    dataset = {}
    DEBUG = ''
    path = config['path']
    # Default dataverse
    root = config['dataverseroot']
    key = config['key']
    dvname = config['branch']

    if dataverse:
        root = dataverse
    if customkey:
        key = customkey
    if customdv:
        dvname = customdv

    files = []
    if rhandle:
        contentsapi = root + "/api/dataverses/" + dvname + "/contents?key=" + key
        print contentsapi
        newdata = load_api_data(contentsapi, '')
        metadata = newdata['data']
        for item in metadata:
            dv = item['id']
            files = getfiles(root, dv, key)

    if handle:
        print handle
        (datahandle, datasetID, fileID) = parsehandle(handle)
        files.append(fileID)

    for fileID in files:
        fullpath = downloadfile(root, path, fileID, key)
        print fullpath
        (pid, revid, cliohandle, clearpid) = findpid(handle)
        #try:
        if pid:
            handle = pid
            jsonfile = dataextractor(fullpath, path, pid, fileID)
            if jsonfile:
                title = 'Test'
                datasetadd(jsonfile, clearpid, handle, title)
                print handle
                print clearpid
Exemple #56
0
 def test_boundaries(self):
     config = configuration()
     root = config['apiroot']
     dataset = 'dataframe66_'
     countries = getboundaries(root, dataset)
     self.assertEqual(countries[1879][0], 'Austria-Hungary')