def report(collection_id): collection = obj_or_404(Collection.by_id(collection_id)) require(request.authz.can_read(collection.id)) output = generate_excel(collection, request.authz, links=arg_bool('links'), one_sheet=arg_bool('merge')) outputfile = "%s Cross-referenced.xlsx" % string_value(collection.label) return send_file(output, as_attachment=True, attachment_filename=outputfile)
def index(): q = entities_query(request.args) q['size'] = get_limit(default=50) q['from'] = get_offset() doc_counts = arg_bool('doc_counts') res = execute_entities_query(request.args, q, doc_counts=doc_counts) return jsonify(res)
def index(): authz.require(authz.system_read()) q = Context.all() if arg_bool('imports'): q = q.filter(Context.resource_name != None) # noqa q = q.order_by(Context.updated_at.desc()) return jsonify(Pager(q))
def index(): q = entities_query(request.args) q["size"] = get_limit(default=50) q["from"] = get_offset() doc_counts = arg_bool("doc_counts") res = execute_entities_query(request.args, q, doc_counts=doc_counts) return jsonify(res)
def update(id): entity = obj_or_404(Entity.by_id(id)) entity = Entity.save(get_data(entity=entity), collection_id=entity.collection_id, merge=arg_bool('merge')) db.session.commit() analyze_entity.delay(entity.id) return view(entity.id)
def update(id): _, entity = get_entity(id, request.authz.WRITE) data = parse_request(schema=EntitySchema) if arg_bool('merge'): data['data'] = merge_data(data.get('data') or {}, entity.data or {}) entity.update(data) db.session.commit() update_entity(entity) update_collection(entity.collection) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) data = request_data() data['id'] = entity.id possible_collections = authz.collections(authz.WRITE) possible_collections.extend([c.id for c in entity.collections]) data['collections'] = [c for c in get_collections(data) if c.id in possible_collections] entity = Entity.save(data, merge=arg_bool('merge')) db.session.commit() update_entity(entity) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) data = request_data() data['id'] = entity.id possible_collections = authz.collections(authz.WRITE) possible_collections.extend([c.id for c in entity.collections]) data['collections'] = [ c for c in get_collections(data) if c.id in possible_collections ] entity = Entity.save(data, merge=arg_bool('merge')) db.session.commit() update_entity(entity) return view(entity.id)
def update(id): _, entity = get_entity(id, request.authz.WRITE) try: entity = Entity.save(request_data(), entity.collection, merge=arg_bool('merge')) except (ValueError, TypeError) as ve: raise BadRequest(ve.message) entity.collection.touch() db.session.commit() log_event(request, entity_id=entity.id) update_entity(entity) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) data = request_data() data["id"] = entity.id possible_collections = authz.collections(authz.WRITE) possible_collections.extend([c.id for c in entity.collections]) collections = [c for c in get_collections(data) if c.id in possible_collections] try: entity = Entity.save(data, collections, merge=arg_bool("merge")) except ValueError as ve: raise BadRequest(ve.message) for collection in entity.collections: collection.touch() db.session.commit() log_event(request, entity_id=entity.id) update_entity(entity) return view(entity.id)
def index(): entities = Entity.all() dataset_arg = request.args.get('dataset') if dataset_arg is not None: dataset = Dataset.find(dataset_arg) entities = entities.filter_by(dataset=dataset) filter_name = request.args.get('filter_name', '') if len(filter_name): query = '%' + filter_name + '%' entities = entities.filter(Entity.name.ilike(query)) # TODO, other filters. format = request.args.get('format', 'json').lower().strip() if format == 'csv': res = csvify(entities) else: pager = Pager(entities) res = jsonify(pager.to_dict()) if arg_bool('download'): fn = dataset_filename(dataset, format) res.headers['Content-Disposition'] = 'attachment; filename=' + fn return res
def index(): entities = Entity.all() dataset_arg = request.args.get("dataset") if dataset_arg is not None: dataset = Dataset.find(dataset_arg) entities = entities.filter_by(dataset=dataset) filter_name = request.args.get("filter_name", "") if len(filter_name): query = "%" + filter_name + "%" entities = entities.filter(Entity.name.ilike(query)) # TODO, other filters. format = request.args.get("format", "json").lower().strip() if format == "csv": res = csvify(entities) else: pager = Pager(entities) res = jsonify(pager.to_dict()) if arg_bool("download"): fn = dataset_filename(dataset, format) res.headers["Content-Disposition"] = "attachment; filename=" + fn return res
def similar(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.READ) action = authz.WRITE if arg_bool('writeable') else authz.READ collections = authz.collections(action) return jsonify(similar_entities(entity, request.args, collections))