def create(): data = request_data() dataset = Dataset.from_form(data) authz.require(authz.dataset_edit(dataset)) entity = Entity.create(dataset, data, request.account) db.session.commit() return redirect(url_for('.view', id=entity.id))
def edit(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_manage(dataset)) html = render_template('dataset/edit.html', dataset=dataset, algorithms=get_algorithms()) return htmlfill.render(html, defaults=dataset.as_dict())
def create(): authz.require(authz.dataset_create()) if app.config.get('DATASET_CREATION_DISABLED'): raise Forbidden("Sorry, dataset creation is disabled") dataset = Dataset.create(request_data(), request.account) db.session.commit() return redirect(url_for('.view', dataset=dataset.name))
def match(dataset, alias, random=False): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) alias = Alias.find(dataset, alias) random = random or request.args.get('random')=='True' choices = match_op(alias.name, dataset, query=request.args.get('query')) pager = Pager(choices, '.match', dataset=dataset.name, alias=alias.id, limit=10) # HACK: Fetch only the entities on the selected page. entities = Entity.id_map(dataset, map(lambda (c,e,s): e, pager.query[pager.offset:pager.offset+pager.limit])) for i, (c,e,s) in enumerate(pager.query): if e in entities: pager.query[i] = (c, entities.get(e), s) html = render_template('alias/match.html', dataset=dataset, alias=alias, choices=pager, random=random) choice = 'INVALID' if alias.is_invalid else alias.entity_id if len(choices) and choice is None: c, e, s = choices[0] choice = 'INVALID' if s <= 50 else e.id return htmlfill.render(html, force_defaults=False, defaults={'choice': choice, 'name': alias.name, 'query': request.args.get('query', ''), 'random': random})
def match(dataset, link, random=False): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) link = Link.find(dataset, link) random = random or request.args.get('random')=='True' choices = match_op(link.key, dataset, query=request.args.get('query')) pager = Pager(choices, '.match', dataset=dataset.name, link=link.id, limit=10) # HACK: Fetch only the values on the selected page. value_objs = Value.id_map(dataset, map(lambda (c,v,s): v, pager.query[pager.offset:pager.offset+pager.limit])) for i, (c,v,s) in enumerate(pager.query): if v in value_objs: pager.query[i] = (c, value_objs.get(v), s) html = render_template('link/match.html', dataset=dataset, link=link, choices=pager, random=random) choice = 'INVALID' if link.is_invalid else link.value_id if len(choices) and choice is None: c, v, s = choices[0] choice = 'INVALID' if s <= 50 else v return htmlfill.render(html, force_defaults=False, defaults={'choice': choice, 'value': link.key, 'query': request.args.get('query', ''), 'random': random})
def create(): authz.require(authz.dataset_create()) data = request_content() try: dataset = Dataset.create(data, request.account) db.session.commit() return redirect(url_for('.view', dataset=dataset.name)) except Invalid, inv: return handle_invalid(inv, new, data=data)
def match_random(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) aliases = Alias.all_unmatched(dataset) count = aliases.count() if count == 0: return redirect(url_for('dataset.view', dataset=dataset.name)) alias = aliases.offset(randint(0, count - 1)).first() return redirect( url_for('.match', dataset=dataset.name, alias=alias.id, random=True))
def upload(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) file_ = request.files.get('file') if not file_ or not file_.filename: inv = Invalid("No file.", None, None, error_dict={'file': "You need to upload a file"}) raise inv upload = upload_file(dataset, file_, request.account) return redirect(url_for('.map', dataset=dataset.name, id=upload.id))
def upload(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) file_ = request.files.get('file') if not file_ or not file_.filename: err = {'file': "You need to upload a file"} raise Invalid("No file.", None, None, error_dict=err) upload = Upload.create(dataset, request.account, file_) db.session.commit() return jsonify(upload)
def match_random(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) aliases = Alias.all_unmatched(dataset) count = aliases.count() if count == 0: return redirect(url_for('dataset.view', dataset=dataset.name)) alias = aliases.offset(randint(0, count-1)).first() return redirect(url_for('.match', dataset=dataset.name, alias=alias.id, random=True))
def upload(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) file_ = request.files.get('file') if not file_ or not file_.filename: inv = Invalid("No file.", None, None, error_dict={'file': "You need to upload a file"}) return handle_invalid(inv, form, data={}, args=[dataset.name]) sig = upload_file(dataset, file_) return redirect(url_for('.map', dataset=dataset.name, sig=sig))
def update(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_manage(dataset)) data = request_content() try: dataset.update(data) db.session.commit() flash("Updated %s" % dataset.label, "success") return redirect(url_for(".view", dataset=dataset.name)) except Invalid, inv: return handle_invalid(inv, edit, args=[dataset.name], data=data)
def update(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_manage(dataset)) data = request_content() try: flush_cache(dataset) dataset.update(data) db.session.commit() flash("Updated %s" % dataset.label, 'success') return redirect(url_for('.view', dataset=dataset.name)) except Invalid, inv: return handle_invalid(inv, edit, args=[dataset.name], data=data)
def match_save(dataset, alias): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) alias = Alias.find(dataset, alias) random = request.form.get('random')=='True' data = request_content() try: alias.match(dataset, data, request.account) db.session.commit() except Invalid, inv: return handle_invalid(inv, match, data=data, args=[dataset.name, alias.id, random])
def update(dataset, entity): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) entity = Entity.find(dataset, entity) data = request_content() try: entity.update(data, request.account) db.session.commit() flash("Updated %s" % entity.display_name, 'success') return redirect(url_for('.view', dataset=dataset.name, entity=entity.id)) except Invalid, inv: return handle_invalid(inv, view, data=data, args=[dataset.name, entity.id])
def submit(dataset, id): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = request_content() entity_col = data.get('entity') or None alias_col = data.get('alias') or None if not (entity_col or alias_col): flash('You need to pick either a alias or entity column!', 'error') return map(dataset.name, id) import_upload.delay(dataset.name, id, request.account.id, entity_col, alias_col) flash('Loading data...', 'success') return redirect(url_for('dataset.view', dataset=dataset.name))
def submit(dataset, sig): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = request_content() entity_col = data.get('entity') or None alias_col = data.get('alias') or None if not (entity_col or alias_col): flash('You need to pick either a alias or entity column!', 'error') return map(dataset.name, sig) import_upload.delay(dataset.name, sig, request.account.id, entity_col, alias_col) flash('Loading data...', 'success') return redirect(url_for('dataset.view', dataset=dataset.name))
def create(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = request_content() try: value = Value.create(dataset, data, request.account) db.session.commit() return redirect(url_for('.view', dataset=dataset.name, value=value.id)) except Invalid, inv: return handle_invalid(inv, view_dataset, data=data, args=[dataset.name])
def update(dataset, value): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) value = Value.find(dataset, value) data = request_content() try: value.update(data, request.account) db.session.commit() flash("Updated %s" % value.value, 'success') return redirect(url_for('.view', dataset=dataset.name, value=value.id)) except Invalid, inv: return handle_invalid(inv, view, data=data, args=[dataset.name, value.id])
def submit(dataset, sig): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = request_content() value_col = data.get('value') or None link_col = data.get('link') or None if not (value_col or link_col): flash('You need to pick either a link or value column!', 'error') return map(dataset.name, sig) import_upload.delay(dataset.name, sig, request.account.id, value_col, link_col) flash('Loading data...', 'success') return redirect(url_for('dataset.view', dataset=dataset.name))
def match_save(dataset, alias): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) alias = Alias.find(dataset, alias) random = request.form.get('random') == 'True' data = request_content() try: alias.match(dataset, data, request.account) db.session.commit() except Invalid, inv: return handle_invalid(inv, match, data=data, args=[dataset.name, alias.id, random])
def match_save(dataset, link): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) link = Link.find(dataset, link) random = request.form.get('random')=='True' data = request_content() try: link.match(dataset, data, request.account) if link.value is not None: add_candidate_to_cache(dataset, link.key, link.value.id) db.session.commit() except Invalid, inv: return handle_invalid(inv, match, data=data, args=[dataset.name, link.id, random])
def create(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = request_content() try: entity = Entity.create(dataset, data, request.account) db.session.commit() return redirect( url_for('.view', dataset=dataset.name, entity=entity.id)) except Invalid, inv: return handle_invalid(inv, view_dataset, data=data, args=[dataset.name])
def merge(dataset, value): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) value = Value.find(dataset, value) data = request_content() try: target = value.merge_into(data, request.account) flush_cache(dataset) db.session.commit() flash("Merged %s" % value.value, 'success') return redirect(url_for('.view', dataset=dataset.name, value=target.id)) except Invalid, inv: print inv return handle_invalid(inv, view, data=data, args=[dataset.name, value.id])
def update(dataset, entity): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) entity = Entity.find(dataset, entity) data = request_content() try: entity.update(data, request.account) db.session.commit() flash("Updated %s" % entity.display_name, 'success') return redirect( url_for('.view', dataset=dataset.name, entity=entity.id)) except Invalid, inv: return handle_invalid(inv, view, data=data, args=[dataset.name, entity.id])
def process(dataset, id): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) upload = Upload.find(dataset, id) mapping = request_data() mapping['reviewed'] = mapping.get('reviewed') or False mapping['columns'] = mapping.get('columns', {}) fields = mapping['columns'].values() for header in mapping['columns'].keys(): if header not in upload.tab.headers: raise Invalid("Invalid header: %s" % header, None, None) if 'name' not in fields and 'id' not in fields: raise Invalid("You have not selected a field that definies entity names.", None, None) import_upload.delay(upload.id, request.account.id, mapping) return jsonify({'status': 'Loading data...'})
def match(dataset, link, random=False): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) link = Link.find(dataset, link) random = random or request.args.get('random')=='True' choices = match_op(link.key, dataset, query=request.args.get('query')) pager = Pager(choices, '.match', dataset=dataset.name, link=link.id, limit=10) html = render_template('link/match.html', dataset=dataset, link=link, choices=pager, random=random) choice = 'INVALID' if link.is_invalid else link.value_id if len(choices): choice = choices[0][1].id if choice is None else choice return htmlfill.render(html, force_defaults=False, defaults={'choice': choice, 'value': link.key, 'query': request.args.get('query', ''), 'random': random})
def match(dataset, alias, random=False): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) alias = Alias.find(dataset, alias) random = random or request.args.get('random') == 'True' choices = match_op(alias.name, dataset, query=request.args.get('query')) pager = Pager(choices, '.match', dataset=dataset.name, alias=alias.id, limit=10) # HACK: Fetch only the entities on the selected page. entities = Entity.id_map( dataset, map(lambda (c, e, s): e, pager.query[pager.offset:pager.offset + pager.limit])) for i, (c, e, s) in enumerate(pager.query): if e in entities: pager.query[i] = (c, entities.get(e), s) html = render_template('alias/match.html', dataset=dataset, alias=alias, choices=pager, random=random) choice = 'INVALID' if alias.is_invalid else alias.entity_id if len(choices) and choice is None: c, e, s = choices[0] choice = 'INVALID' if s <= 50 else e.id return htmlfill.render(html, force_defaults=False, defaults={ 'choice': choice, 'name': alias.name, 'query': request.args.get('query', ''), 'random': random })
def update(id): entity = Entity.by_id(id) authz.require(authz.dataset_edit(entity.dataset)) entity.update(request_data(), request.account) db.session.commit() return redirect(url_for('.view', id=entity.id))
def form(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) return render_template('upload/form.html', dataset=dataset)
def delete(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_manage(dataset)) dataset.delete() db.session.commit() return ('', 204)
def update(dataset): dataset = Dataset.find(dataset) authz.require(authz.dataset_manage(dataset)) dataset.update(request_data()) db.session.commit() return redirect(url_for('.view', dataset=dataset.name))
def view(dataset, id): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) upload = Upload.find(dataset, id) return jsonify(upload)
def logout(): authz.require(authz.logged_in()) session.clear() flash("You've been logged out.", "success") return redirect(url_for('index'))
def account(): authz.require(authz.logged_in()) return render_template("account.html", api_key=request.account.api_key)
def new(): authz.require(authz.dataset_create()) return render_template('dataset/new.html')
def logout(): authz.require(authz.logged_in()) session.clear() return redirect('/')
def map(dataset, id): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = get_map_metadata(dataset, id) return render_template('upload/map.html', **data)
def logout(): authz.require(authz.logged_in()) session.clear() #flash("You've been logged out.", "success") return redirect(url_for('index'))
def account(): authz.require(authz.logged_in()) return render_template('account.html', api_key=request.account.api_key)
def map(dataset, sig): dataset = Dataset.find(dataset) authz.require(authz.dataset_edit(dataset)) data = get_map_metadata(dataset, sig) return render_template('upload/map.html', **data)
def create(): authz.require(authz.dataset_create()) dataset = Dataset.create(request_data(), request.account) db.session.commit() return redirect(url_for('.view', dataset=dataset.name))