def signup(settings=''): user = '' config = configuration() if config['error']: return config['error'] (admin, user, tabnum, createnews, editnews) = ('','',0,'','') fields = {} checkboxes = {} fieldslist = ["dataverse", "apitoken", "email", "passwd", "uri", "title", "logo", "description", "summary", "about", "contact", "partners", "news", "newstitle", "newssummary", "newstext"] if request.args.get("user"): user = request.args.get("user") if user == 'admin': admin = 'user' try: if session['name']: user = session['name'] except: donothing = 0 for field in fieldslist: cfield = "%s%s" % ("closed", field) fields[field] = '' fields[cfield] = '' if request.args.get(field): fields[field] = request.args.get(field) elif request.form.getlist(field): fields[field] = str(request.form.getlist(field)[0]) if request.args.get(cfield): fields[cfield] = request.args.get(cfield) elif request.form.getlist(cfield): fields[cfield] = str(request.form.getlist(cfield)[0]) uriparams = {} if request.args: for key in request.args.keys(): uriparams[key] = str(request.args.getlist(key)) elif request.form: for key in request.form.keys(): uriparams[key] = str(request.form.getlist(key)) for key in uriparams: value = uriparams[key] #if key == 'datasets': if value == "[u'on']": value = 'checked' fields[key] = value checkboxes[key] = value if request.args.get('project'): fieldsall = readdata('projects', 'uri', request.args.get('project')) for f in fieldsall: fields = f if request.args.get('view') == 'newseditor': tabnum = 2 if request.args.get('action') == 'create': createnews = 'yes' if request.args.get('action') == 'edit': editnews = 'yes' return make_response(render_template('closedprojectdetails.html', fields=fields, checkboxes=str(checkboxes), admin=admin, username=user, tabnum=tabnum, createnews=createnews, editnews=editnews)) else: # Clean settings first if len(fields['uri']): remove = removedata('projects', 'uri', fields['uri']) result = data2store('projects', fields) return redirect(config['apiroot'] + '/' + fields['uri'], code=301) else: return make_response(render_template('projectdetails.html', fields=fields, checkboxes=str(checkboxes), admin=admin, username=user))
def main(): handle = '' dataverse = '' customkey = '' config = configuration() try: myopts, args = getopt.getopt(sys.argv[1:], "H:r:d:k:D:") except getopt.GetoptError as e: print(str(e)) print( "Usage: %s -y year -d datatype -r region -f filename -DDEBUG -o output" % sys.argv[0]) sys.exit(2) (handle, rhandle, customdv) = ('', '', '') for o, a in myopts: if o == '-H': handle = a if o == '-r': rhandle = a if o == '-d': dataverse = a if o == '-k': customkey = a if o == '-D': customdv = a dataset = {} DEBUG = '' path = config['path'] # Default dataverse root = config['dataverseroot'] key = config['key'] dvname = config['branch'] title = 'Title' units = 'Units' if dataverse: root = dataverse if customkey: key = customkey if customdv: dvname = customdv files = [] if rhandle: contentsapi = root + "/api/dataverses/" + dvname + "/contents?key=" + key print contentsapi newdata = load_api_data(contentsapi, '') metadata = newdata['data'] for item in metadata: dv = item['id'] files = getfiles(root, dv, key) if handle: print handle (datahandle, datasetID, fileID) = parsehandle(handle) files.append(fileID) for fileID in files: fullpath = downloadfile(root, path, fileID, key) print fullpath (pid, revid, cliohandle, clearpid) = findpid(handle) (jsonfile, csvfile) = ('', '') #try: if pid: handle = pid try: (jsonfile, csvfile, tmptitle, tmpunits) = dataextractor(fullpath, path, pid, fileID) except: resultfile = config['tmpdir'] + "/" + fileID (jsonfile, csvfile, tmptitle, tmpunits) = excelvalidator(config['phantompath'], fullpath, resultfile, config['tmpdir']) if jsonfile: remove = removedata('datasets', 'handle', clearpid) try: title = str(tmptitle) units = str(tmpunits) except: donothing = 1 print "ADD " + str(jsonfile) datasetadd(jsonfile, csvfile, clearpid, handle, title, units, datasetID) print handle print clearpid print datasetID
def main(): handle = '' dataverse = '' customkey = '' config = configuration() try: myopts, args = getopt.getopt(sys.argv[1:],"H:r:d:k:D:") except getopt.GetoptError as e: print (str(e)) print("Usage: %s -y year -d datatype -r region -f filename -DDEBUG -o output" % sys.argv[0]) sys.exit(2) (handle, rhandle, customdv) = ('', '', '') for o, a in myopts: if o == '-H': handle=a if o == '-r': rhandle=a if o == '-d': dataverse=a if o == '-k': customkey=a if o == '-D': customdv=a dataset = {} DEBUG = '' path = config['path'] # Default dataverse root = config['dataverseroot'] key = config['key'] dvname = config['branch'] title = 'Title' units = 'Units' if dataverse: root = dataverse if customkey: key = customkey if customdv: dvname = customdv files = [] if rhandle: contentsapi = root + "/api/dataverses/" + dvname +"/contents?key=" + key print contentsapi newdata = load_api_data(contentsapi, '') metadata = newdata['data'] for item in metadata: dv = item['id'] files = getfiles(root, dv, key) if handle: print handle (datahandle, datasetID, fileID) = parsehandle(handle) files.append(fileID) for fileID in files: fullpath = downloadfile(root, path, fileID, key) print fullpath (pid, revid, cliohandle, clearpid) = findpid(handle) (jsonfile, csvfile) =('', '') #try: if pid: handle = pid try: (jsonfile, csvfile, tmptitle, tmpunits) = dataextractor(fullpath, path, pid, fileID) except: resultfile = config['tmpdir'] + "/" + fileID (jsonfile, csvfile, tmptitle, tmpunits) = excelvalidator(config['phantompath'], fullpath, resultfile, config['tmpdir']) if jsonfile: remove = removedata('datasets', 'handle', clearpid) try: title = str(tmptitle) units = str(tmpunits) except: donothing = 1 print "ADD " + str(jsonfile) datasetadd(jsonfile, csvfile, clearpid, handle, title, units, datasetID) print handle print clearpid print datasetID