def decode(self, node, cstruct): if isinstance(cstruct, Project): return cstruct if isinstance(cstruct, basestring): return Project.by_slug(cstruct) if isinstance(cstruct, dict): if cstruct.get('slug'): return Project.by_slug(cstruct.get('slug')) return None
def save(data, project=None): """ Create or update a project with a given slug. """ data = validate(data, project) operation = 'create' if project is None else 'update' if project is None: project = Project() project.slug = data.get('slug') project.author = data.get('author') from grano.logic import permissions as permissions_logic permissions_logic.save({ 'account': data.get('author'), 'project': project, 'admin': True }) project.settings = data.get('settings') project.label = data.get('label') project.private = data.get('private') project.updated_at = datetime.utcnow() db.session.add(project) # TODO: make this nicer - separate files? from grano.logic.schemata import import_schema with app.open_resource('fixtures/base.yaml') as fh: import_schema(project, fh) db.session.flush() _project_changed(project.slug, operation) return project
def schema_import(project, path): """ Load a schema specification from a YAML file. """ pobj = Project.by_slug(project) if pobj is None: pobj = save_project({"slug": project, "label": project, "author": console_account()}) with open(path, "r") as fh: import_schema(pobj, fh)
def es_index(project=None): """ Re-index all entities in the system, or those in a project. """ if project is not None: project = Project.by_slug(project) if project is None: raise ValueError("Project not found.") script_indexer.index_project(project=project)
def suggest_property(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) prefix = '%%%s%%' % request.args.get('prefix', '') q = db.session.query(Attribute) q = q.join(Schema) q = q.filter(Schema.obj=='entity') q = q.filter(Schema.project==project) q = q.filter(or_(Attribute.label.ilike(prefix), Attribute.name.ilike(prefix))) q = q.limit(get_limit(default=5)) matches = [] for attribute in q: matches.append({ 'name': attribute.label, 'n:type': { 'id': '/properties/property', 'name': 'Property' }, 'id': attribute.name }) return jsonify({ "code" : "/api/status/ok", "status" : "200 OK", "prefix" : request.args.get('prefix', ''), "result" : matches })
def create(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) data = request_data({'project': project}) schema = schemata.save(data) db.session.commit() return jsonify(schemata.to_rest(schema), status=201)
def suggest_property(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) prefix = "%%%s%%" % request.args.get("prefix", "") log.info("Suggesting property names in %s: %r", project.slug, prefix) q = db.session.query(Attribute) q = q.join(Schema) q = q.filter(Schema.obj == "entity") q = q.filter(Schema.project == project) q = q.filter(or_(Attribute.label.ilike(prefix), Attribute.name.ilike(prefix))) q = q.limit(get_limit(default=5)) matches = [] for attribute in q: matches.append( { "name": attribute.label, "n:type": {"id": "/properties/property", "name": "Property"}, "id": attribute.name, } ) return jsonify( {"code": "/api/status/ok", "status": "200 OK", "prefix": request.args.get("prefix", ""), "result": matches} )
def reconcile(slug): """ Reconciliation API, emulates Google Refine API. See: http://code.google.com/p/google-refine/wiki/ReconciliationServiceApi """ project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) # TODO: Add proper support for types and namespacing. data = request.args.copy() data.update(request.form.copy()) if 'query' in data: # single q = data.get('query') if q.startswith('{'): try: q = json.loads(q) except ValueError: raise BadRequest() else: q = data return jsonify(reconcile_op(project, q)) elif 'queries' in data: # multiple requests in one query qs = data.get('queries') try: qs = json.loads(qs) except ValueError: raise BadRequest() queries = {} for k, q in qs.items(): queries[k] = reconcile_op(project, q) return jsonify(queries) else: return reconcile_index(project)
def delete(slug, name): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) schema = object_or_404(Schema.by_name(project, name)) schemata.delete(schema) db.session.commit() raise Gone()
def create(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) data = request_data({"project": project}) schema = schemata.save(data) db.session.commit() return jsonify(schema, status=201)
def update(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) data = request_data({'author': request.account}) project = projects.save(data, project=project) db.session.commit() return jsonify(project)
def suggest_type(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) prefix = '%%%s%%' % request.args.get('prefix', '') log.info("Suggesting types in %s: %r", project.slug, prefix) q = db.session.query(Schema) q = q.filter(Schema.obj == 'entity') q = q.filter(Schema.hidden == False) # noqa q = q.filter(Schema.project == project) q = q.filter(or_(Schema.label.ilike(prefix), Schema.name.ilike(prefix))) q = q.limit(get_limit(default=5)) matches = [] for schema in q: matches.append({ 'name': schema.label, 'id': '/%s/%s' % (slug, schema.name) }) return jsonify({ "code": "/api/status/ok", "status": "200 OK", "prefix": request.args.get('prefix', ''), "result": matches })
def view(slug, name): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) if not project.private: validate_cache(last_modified=project.updated_at) schema = object_or_404(Schema.by_name(project, name)) return jsonify(schema)
def create(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) data = request_data({'project': project}) permission = permissions.save(data) db.session.commit() return jsonify(permission, status=201)
def suggest_property(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) prefix = '%%%s%%' % request.args.get('prefix', '') log.info("Suggesting property names in %s: %r", project.slug, prefix) q = db.session.query(Attribute) q = q.join(Schema) q = q.filter(Schema.obj == 'entity') q = q.filter(Schema.project == project) q = q.filter(or_(Attribute.label.ilike(prefix), Attribute.name.ilike(prefix))) q = q.limit(get_limit(default=5)) matches = [] for attribute in q: matches.append({ 'name': attribute.label, 'n:type': { 'id': '/properties/property', 'name': 'Property' }, 'id': attribute.name }) return jsonify({ "code": "/api/status/ok", "status": "200 OK", "prefix": request.args.get('prefix', ''), "result": matches })
def delete(slug, id): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) permission = object_or_404(Permission.by_project_and_id(project, id)) permissions.delete(permission) db.session.commit() raise Gone()
def rebuild(): """ Execute the change processing handlers for all entities and relations currently loaded. This can be used as a housekeeping function. """ for project in Project.all(): _project_changed(project.slug, 'delete') _project_changed(project.slug, 'create') for schema in project.schemata: _schema_changed(schema.project.slug, schema.name, 'delete') _schema_changed(schema.project.slug, schema.name, 'create') eq = Entity.all().filter_by(same_as=None) eq = eq.filter_by(project=project) for i, entity in enumerate(eq): if i > 0 and i % 1000 == 0: log.info("Rebuilt: %s entities", i) _entity_changed(entity.id, 'delete') _entity_changed(entity.id, 'create') rq = Relation.all().filter_by(project=project) for i, relation in enumerate(rq): if i > 0 and i % 1000 == 0: log.info("Rebuilt: %s relation", i) _relation_changed(relation.id, 'delete') _relation_changed(relation.id, 'create')
def graph(slug): project = object_or_404(Project.by_slug(slug)) extractor = GraphExtractor(project_id=project.id) validate_cache(keys=extractor.to_hash()) if extractor.format == 'gexf': return Response(extractor.to_gexf(), mimetype='text/xml') return jsonify(extractor.to_dict())
def facet_schema_list(obj, facets): results = [] project = Project.by_slug('openinterests') for facet in facets: schema = Schema.by_name(project, facet.get('term')) if schema is not None and not schema.hidden: results.append((schema, facet.get('count'))) return results
def index(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) validate_cache(last_modified=project.updated_at) query = Schema.all() query = query.filter_by(project=project) pager = Pager(query, slug=slug) return jsonify(pager, index=not arg_bool("full"))
def index(): q = Project.all() q = q.outerjoin(Permission) q = q.filter(or_(Project.private==False, and_(Permission.reader==True, Permission.account==request.account))) pager = Pager(q) conv = lambda es: [projects.to_rest_index_stats(e) for e in es] return jsonify(pager.to_dict(conv))
def update(slug, id): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) permission = object_or_404(Permission.by_project_and_id(project, id)) data = request_data({'project': project}) permission = permissions.save(data, permission=permission) db.session.commit() return jsonify(permission)
def index(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) query = Permission.all() query = query.filter_by(project=project) pager = Pager(query, slug=slug) validate_cache(keys=pager.cache_keys()) return jsonify(pager, index=True)
def index(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) query = Permission.all() query = query.filter_by(project=project) pager = Pager(query) conv = lambda es: [permissions.to_rest_index(e) for e in es] return jsonify(pager.to_dict(conv))
def index(slug): project = object_or_404(Project.by_slug(slug)) validate_cache(last_modified=project.updated_at) query = Schema.all() query = query.filter_by(project=project) pager = Pager(query) conv = lambda es: [schemata.to_rest_index(e) for e in es] return jsonify(pager.to_dict(conv))
def index(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) validate_cache(last_modified=project.updated_at) query = Schema.all() query = query.filter_by(project=project) pager = Pager(query, slug=slug) return jsonify(pager, index=not arg_bool('full'))
def update(slug, name): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) schema = object_or_404(Schema.by_name(project, name)) data = request_data({'project': project}) schema = schemata.save(data, schema=schema) db.session.commit() return jsonify(schema)
def update(slug, name): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) schema = object_or_404(Schema.by_name(project, name)) data = request_data({'project': project}) project = schemata.save(data, schema=schema) db.session.commit() return jsonify(schemata.to_rest(schema))
def graph(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) extractor = GraphExtractor(project_id=project.id) if not project.private: validate_cache(keys=extractor.to_hash()) if extractor.format == 'gexf': return Response(extractor.to_gexf(), mimetype='text/xml') return jsonify(extractor)
def index(): q = Project.all() q = q.outerjoin(Permission) q = q.filter(or_(Project.private==False, and_(Permission.reader==True, Permission.account==request.account))) q = q.distinct() pager = Pager(q) validate_cache(keys=pager.cache_keys()) return jsonify(pager, index=True)
def aliases(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) sio = StringIO() export_aliases(project, sio) sio.seek(0) res = send_file(sio, mimetype='text/csv') res.headers['Content-Disposition'] = 'filename=%s-aliases.csv' % project.slug return res
def graph(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) extractor = GraphExtractor(project_id=project.id) if not project.private: validate_cache(keys=extractor.to_hash()) if extractor.format == 'gexf': return Response(extractor.to_gexf(), mimetype='text/xml') return jsonify(extractor.to_dict())
def delete(slug, name): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_manage(project)) schema = object_or_404(Schema.by_name(project, name)) deleted = schemata.delete(schema) db.session.commit() if deleted: raise Gone() else: return jsonify(schema)
def schema_import(project, path): """ Load a schema specification from a YAML file. """ pobj = Project.by_slug(project) if pobj is None: pobj = save_project({ 'slug': project, 'label': project, 'author': console_account() }) with open(path, 'r') as fh: import_schema(pobj, fh)
def suggest_entity(slug): """ Suggest API, emulates Google Refine API. See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API """ project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) prefix = "%%%s%%" % request.args.get("prefix", "") log.info("Suggesting entities in %s: %r", project.slug, prefix) q = db.session.query(Entity) q = q.join(Property) q = q.join(Project) q = q.filter(Property.name == "name") q = q.filter(Property.active == True) q = q.filter(Property.entity_id == Entity.id) q = q.filter(Property.value_string.ilike(prefix)) q = q.filter(Project.slug == slug) if "type" in request.args: schema_name = request.args.get("type") if "/" in schema_name: _, schema_name = schema_name.rsplit("/", 1) q = q.join(Schema) q = q.filter(Schema.name == schema_name) q = q.distinct() q = q.limit(get_limit(default=5)) matches = [] for e in q: data = { "name": e["name"].value, "n:type": {}, "type": [], "uri": url_for("entities_api.view", id=e.id, _external=True), "id": e.id, } for schema in e.schemata: if schema.hidden: continue data["type"].append({"id": "/" + project.slug + "/" + schema.name, "name": schema.label}) if len(data["type"]): data["n:type"] = data["type"][0] matches.append(data) return jsonify( {"code": "/api/status/ok", "status": "200 OK", "prefix": request.args.get("prefix", ""), "result": matches} )
def index(): q = Project.all() q = q.outerjoin(Permission) q = q.filter( or_( Project.private == False, and_(Permission.reader == True, Permission.account == request.account))) q = q.distinct() pager = Pager(q) validate_cache(keys=pager.cache_keys()) return jsonify(pager, index=True)
def suggest_entity(slug): """ Suggest API, emulates Google Refine API. See: https://github.com/OpenRefine/OpenRefine/wiki/Reconciliation-Service-API """ project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) prefix = '%%%s%%' % request.args.get('prefix', '') log.info("Suggesting entities in %s: %r", project.slug, prefix) q = db.session.query(Entity) q = q.join(Property) q = q.join(Project) q = q.filter(Property.name == 'name') q = q.filter(Property.active == True) # noqa q = q.filter(Property.entity_id == Entity.id) q = q.filter(Property.value_string.ilike(prefix)) q = q.filter(Project.slug == slug) if 'type' in request.args: schema_name = request.args.get('type') if '/' in schema_name: _, schema_name = schema_name.rsplit('/', 1) q = q.join(Schema) q = q.filter(Schema.name == schema_name) q = q.distinct() q = q.limit(get_limit(default=5)) matches = [] for e in q: data = { 'name': e['name'].value, 'n:type': { 'id': '/' + project.slug + '/' + e.schema.name, 'name': e.schema.label }, 'uri': url_for('entities_api.view', id=e.id, _external=True), 'id': e.id } data['type'] = [data.get('n:type')] matches.append(data) return jsonify({ "code": "/api/status/ok", "status": "200 OK", "prefix": request.args.get('prefix', ''), "result": matches })
def validate(data, project): same_project = lambda s: Project.by_slug(s) == project same_project = colander.Function(same_project, message="Project exists") class ProjectValidator(colander.MappingSchema): slug = colander.SchemaNode(colander.String(), validator=colander.All( database_name, same_project)) label = colander.SchemaNode(colander.String(), validator=colander.Length(min=3)) author = colander.SchemaNode(AccountRef()) settings = colander.SchemaNode(colander.Mapping(), missing={}) validator = ProjectValidator() return validator.deserialize(data)
def _project_and_permission(private=False, reader=False, editor=False, admin=False, account_id=1): project = Project(private=private) db.session.add(project) db.session.commit() perm = Permission(account_id=account_id, project_id=project.id, reader=reader, editor=editor, admin=admin) db.session.add(perm) db.session.commit() return project, perm
def __init__(self, project_slug, source_url=None, project_label=None, project_settings=None, account=None, ignore_errors=True): self.source_url = source_url self.account = account or accounts.console_account() self.ignore_errors = ignore_errors project = Project.by_slug(project_slug) project_settings = project_settings or \ (project.settings if project else {}) project_label = project_label or \ (project.label if project else project_slug) self.project = projects.save({ 'slug': project_slug, 'author': self.account, 'label': project_label, 'settings': project_settings }, project=project)
def __init__(self, project_slug, source_url=None, project_label=None, project_settings=None): self.source_url = source_url self.account = accounts.console_account() project = Project.by_slug(project_slug) project_settings = project_settings or (project.settings if project else {}) project_label = project_label or (project.labek if project else project_slug) self.project = projects.save( { 'slug': project_slug, 'author': self.account, 'label': project_label, 'settings': project_settings }, project=project)
def query(slug): project = object_or_404(Project.by_slug(slug)) authz.require(authz.project_read(project)) if request.method == 'POST': query = request.json else: try: query = json.loads(request.args.get('query', 'null')) assert query is not None except (TypeError, ValueError, AssertionError): raise BadRequest('Invalid data submitted') eq = run_query(project, query) res = { 'status': 'ok', 'query': eq.node, 'results': eq.run(), 'total': eq.count() } return jsonify(res)