def check_auth(): api_key = request.headers.get("Authorization") or request.args.get("api_key") if session.get("id"): request.account = Account.by_github_id(session.get("id")) if request.account is None: del session["id"] raise Unauthorized() elif api_key is not None: request.account = Account.by_api_key(api_key) if request.account is None: raise Unauthorized() else: request.account = None
def check_auth(): api_key = request.headers.get('Authorization') \ or request.args.get('api_key') if session.get('id'): request.account = Account.by_github_id(session.get('id')) if request.account is None: del session['id'] raise Unauthorized() elif api_key is not None: request.account = Account.by_api_key(api_key) if request.account is None: raise Unauthorized() else: request.account = None
def authorized(resp): if not "access_token" in resp: return redirect(url_for("index")) access_token = resp["access_token"] session["access_token"] = access_token, "" res = requests.get("https://api.github.com/user?access_token=%s" % access_token, verify=False) for k, v in res.json.items(): session[k] = v account = Account.by_github_id(res.json.get("id")) if account is None: account = Account.create(res.json) db.session.commit() flash("Welcome back, %s." % account.login, "success") return redirect(url_for("index"))
def authorize(): token = oauth.github.authorize_access_token() resp = oauth.github.get('user', token=token) profile = resp.json() for k, v in profile.items(): session[k] = v account = Account.by_github_id(profile.get('id')) if account is None: if app.config.get('SIGNUP_DISABLED'): raise Forbidden("Sorry, account creation is disabled") Account.create(profile) db.session.commit() return redirect('/')
def authorized(resp): if "access_token" not in resp: return redirect(url_for("index")) access_token = resp["access_token"] session["access_token"] = access_token, "" res = requests.get("https://api.github.com/user?access_token=%s" % access_token, verify=False) data = res.json() for k, v in data.items(): session[k] = v account = Account.by_github_id(data.get("id")) if account is None: account = Account.create(data) db.session.commit() return redirect("/")
def authorized(resp): if not 'access_token' in resp: return redirect(url_for('index')) access_token = resp['access_token'] session['access_token'] = access_token, '' res = requests.get('https://api.github.com/user?access_token=%s' % access_token, verify=False) data = res.json() for k, v in data.items(): session[k] = v account = Account.by_github_id(data.get('id')) if account is None: account = Account.create(data) db.session.commit() return redirect('/')
def authorized(resp): if not 'access_token' in resp: return redirect(url_for('index')) access_token = resp['access_token'] session['access_token'] = access_token, '' res = requests.get('https://api.github.com/user?access_token=%s' % access_token, verify=False) for k, v in res.json.items(): session[k] = v account = Account.by_github_id(res.json.get('id')) if account is None: account = Account.create(res.json) db.session.commit() flash("Welcome back, %s." % account.login, "success") return redirect(url_for('index'))
def import_upload(dataset_name, id, account_id, entity_col, alias_col): dataset = Dataset.find(dataset_name) account = Account.by_id(account_id) metadata, row_set = parse_upload(dataset, id) headers = detect_headers(row_set) for row in row_set: data = dict([(c.column, c.value) for c in row]) entity = data.pop(entity_col) if entity_col else None alias = data.pop(alias_col) if alias_col else None if alias_col and alias is not None and len(alias) and alias != entity: d = {'name': alias, 'data': data} alias_obj = Alias.lookup(dataset, d, account, match_entity=False) data = {} if entity_col and entity is not None and len(entity): d = {'name': entity, 'data': data} entity_obj = Entity.by_name(dataset, entity) if entity_obj is None: entity_obj = Entity.create(dataset, d, account) entity_obj.data = data if alias_col and entity_col: alias_obj.match(dataset, {'choice': entity_obj.id}, account) db.session.commit() flush_cache(dataset)
def import_upload(dataset_name, sig, account_id, value_col, link_col): dataset = Dataset.find(dataset_name) account = Account.by_id(account_id) metadata, row_set = parse_upload(dataset, sig) headers = detect_headers(row_set) for row in row_set: data = dict([(c.column, c.value) for c in row]) value = data.pop(value_col) if value_col else None link = data.pop(link_col) if link_col else None if link_col: d = {'key': link, 'data': data} link_obj = Link.lookup(dataset, d, account, match_value=False) data = {} if value_col: d = {'value': value, 'data': data} value_obj = Value.by_value(dataset, value) if value_obj is None: value_obj = Value.create(dataset, d, account) value_obj.data = data if link_col and value_col: link_obj.match(dataset, {'choice': value_obj.id}, account) db.session.commit()
def authorized(resp): if not 'access_token' in resp: return redirect(url_for('index')) access_token = resp['access_token'] session['access_token'] = access_token, '' res = requests.get('https://api.github.com/user?access_token=%s' % access_token, verify=False) print [res.content] for k, v in res.json.items(): session[k] = v account = Account.by_github_id(res.json.get('id')) if account is None: account = Account.create(res.json) db.session.commit() flash("Welcome back, %s." % account.login, "success") return redirect(url_for('index'))
def authorized(resp): if 'access_token' not in resp: return redirect(url_for('index')) access_token = resp['access_token'] session['access_token'] = access_token, '' res = requests.get('https://api.github.com/user?access_token=%s' % access_token, verify=False) data = res.json() for k, v in data.items(): session[k] = v account = Account.by_github_id(data.get('id')) if account is None: if app.config.get('SIGNUP_DISABLED'): raise Forbidden("Sorry, account creation is disabled") account = Account.create(data) db.session.commit() return redirect('/')
def import_upload(upload_id, account_id, mapping): upload = Upload.all().filter_by(id=upload_id).first() account = Account.by_id(account_id) mapped = mapping['columns'].values() rows = [apply_mapping(r, mapping) for r in upload.tab.dict] # put aliases second. rows = sorted(rows, key=lambda r: 2 if r.get('canonical') else 1) for i, row in enumerate(rows): try: entity = None if row.get('id'): entity = Entity.by_id(row.get('id')) if entity is None: entity = Entity.by_name(upload.dataset, row.get('name')) if entity is None: entity = Entity.create(upload.dataset, row, account) # restore some defaults: if entity.canonical_id and 'canonical' not in mapped: row['canonical'] = entity.canonical_id if entity.invalid and 'invalid' not in mapped: row['invalid'] = entity.invalid if entity.attributes: attributes = entity.attributes.copy() else: attributes = {} attributes.update(row['attributes']) row['attributes'] = attributes entity.update(row, account) print(entity) if i % 100 == 0: db.session.commit() logging.debug('Commit') except Invalid as inv: logging.warning('Exception during import: {}'.format(str(inv))) db.session.commit() logging.info('Import Completed')
def import_upload(upload_id, account_id, mapping): upload = Upload.all().filter_by(id=upload_id).first() account = Account.by_id(account_id) mapped = mapping['columns'].values() rows = [apply_mapping(r, mapping) for r in upload.tab.dict] # put aliases second. rows = sorted(rows, key=lambda r: 2 if r.get('canonical') else 1) for i, row in enumerate(rows): try: entity = None if row.get('id'): entity = Entity.by_id(row.get('id')) if entity is None: entity = Entity.by_name(upload.dataset, row.get('name')) if entity is None: entity = Entity.create(upload.dataset, row, account) # restore some defaults: if entity.canonical_id and 'canonical' not in mapped: row['canonical'] = entity.canonical_id if entity.invalid and 'invalid' not in mapped: row['invalid'] = entity.invalid if entity.attributes: attributes = entity.attributes.copy() else: attributes = {} attributes.update(row['attributes']) row['attributes'] = attributes entity.update(row, account) print entity if i % 100 == 0: print 'COMMIT' db.session.commit() except Invalid, inv: # TODO: logging. print inv
def import_upload(dataset_name, sig, account_id, entity_col, alias_col): dataset = Dataset.find(dataset_name) account = Account.by_id(account_id) metadata, row_set = parse_upload(dataset, sig) headers = detect_headers(row_set) for row in row_set: data = dict([(c.column, c.value) for c in row]) entity = data.pop(entity_col) if entity_col else None alias = data.pop(alias_col) if alias_col else None if alias_col and alias is not None and len(alias) and alias != entity: d = {'name': alias, 'data': data} alias_obj = Alias.lookup(dataset, d, account, match_entity=False) data = {} if entity_col and entity is not None and len(entity): d = {'name': entity, 'data': data} entity_obj = Entity.by_name(dataset, entity) if entity_obj is None: entity_obj = Entity.create(dataset, d, account) entity_obj.data = data if alias_col and entity_col: alias_obj.match(dataset, {'choice': entity_obj.id}, account) db.session.commit() flush_cache()