def merge(id, other_id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) other = obj_or_404(Entity.by_id(other_id)) check_authz(other, authz.WRITE) entity.merge(other) db.session.commit() update_entity(entity) update_entity(other) return view(entity.id)
def delete(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.collection_write(entity.collection_id)) entity.delete() db.session.commit() analyze_entity.delay(id) return jsonify({'status': 'ok'})
def delete(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) delete_entity(entity) db.session.commit() log_event(request, entity_id=entity.id) return jsonify({'status': 'ok'})
def delete(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) entity.delete() db.session.commit() update_entity(entity) return jsonify({'status': 'ok'})
def prune_entity(collection, entity_id=None, job_id=None): """Prune handles the full deletion of an entity outside of the HTTP request cycle. This involves cleaning up adjacent entities like xref results, notifications and so on.""" # This is recursive and will also delete any entities which # reference the given entity. Usually this is going to be child # documents, or directoships referencing a person. It's a pretty # dangerous operation, though. log.info("[%s] Prune entity: %s", collection, entity_id) for adjacent in index.iter_adjacent(collection.id, entity_id): log.warning("Recursive delete: %s", adjacent.get("id")) delete_entity(collection, adjacent, job_id=job_id) flush_notifications(entity_id, clazz=Entity) obj = Entity.by_id(entity_id, collection=collection) if obj is not None: obj.delete() doc = Document.by_id(entity_id, collection=collection) if doc is not None: doc.delete() EntitySetItem.delete_by_entity(entity_id) Mapping.delete_by_table(entity_id) xref_index.delete_xref(collection, entity_id=entity_id) aggregator = get_aggregator(collection) aggregator.delete(entity_id=entity_id) refresh_entity(collection, entity_id) collection.touch() db.session.commit()
def upsert_entity(data, collection, authz=None, sync=False): """Create or update an entity in the database. This has a side hustle of migrating entities created via the _bulk API or a mapper to a database entity in the event that it gets edited by the user. """ entity = None entity_id = collection.ns.sign(data.get("id")) if entity_id is not None: entity = Entity.by_id(entity_id, collection=collection) if entity is None: role_id = authz.id if authz is not None else None entity = Entity.create(data, collection, role_id=role_id) else: entity.update(data, collection) # Inline name properties from adjacent entities. See the # docstring on `inline_names` for a more detailed discussion. proxy = entity.to_proxy() entity_ids = proxy.get_type_values(registry.entity) for rel in index.entities_by_ids(entity_ids): inline_names(proxy, model.get_proxy(rel)) entity.data = proxy.properties db.session.add(entity) delete_aggregator_entity(collection, entity.id) index.index_proxy(collection, proxy, sync=sync) refresh_entity(collection, entity.id) return entity.id
def delete(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.watchlist_write(entity.watchlist_id)) entity.delete() db.session.commit() analyze_entity.delay(id) return jsonify({"status": "ok"})
def fetch_entity(entity_id): """Load entities from both the ES index and the database.""" entity = get_entity(entity_id) obj = Entity.by_id(entity_id) if obj is not None: entity['data'] = obj.data return entity, obj
def upsert_entity(data, collection, authz=None, sync=False, sign=False, job_id=None): """Create or update an entity in the database. This has a side effect of migrating entities created via the _bulk API or a mapper to a database entity in the event that it gets edited by the user. """ from aleph.logic.profiles import profile_fragments entity = None entity_id = collection.ns.sign(data.get("id")) if entity_id is not None: entity = Entity.by_id(entity_id, collection=collection) if entity is None: role_id = authz.id if authz is not None else None entity = Entity.create(data, collection, sign=sign, role_id=role_id) else: entity.update(data, collection, sign=sign) collection.touch() proxy = entity.to_proxy() aggregator = get_aggregator(collection) aggregator.delete(entity_id=proxy.id) aggregator.put(proxy, origin=MODEL_ORIGIN) profile_fragments(collection, aggregator, entity_id=proxy.id) index.index_proxy(collection, proxy, sync=sync) refresh_entity(collection, proxy.id) queue_task(collection, OP_UPDATE_ENTITY, job_id=job_id, entity_id=proxy.id) return entity.id
def update(id): entity = obj_or_404(Entity.by_id(id)) entity = Entity.save(get_data(entity=entity), collection_id=entity.collection_id, merge=arg_bool('merge')) db.session.commit() analyze_entity.delay(entity.id) return view(entity.id)
def delete(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.list_write(entity.list_id)) selectors = entity.terms entity.delete() db.session.commit() refresh_selectors.delay(list(selectors)) return jsonify({'status': 'ok'})
def delete(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.list_write(entity.list_id)) selectors = entity.terms entity.delete() db.session.commit() refresh_selectors.delay(list(selectors)) return jsonify({"status": "ok"})
def analyze_entity(entity_id): seen = set() query = {'term': {'entities.entity_id': entity_id}} for doc_id in query_doc_ids(query): analyze_document.delay(doc_id) seen.add(doc_id) entity = Entity.by_id(entity_id) if entity is not None: analyze_terms(entity.terms, seen=seen)
def update(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.watchlist_write(entity.watchlist_id)) data = EntityForm().deserialize(request_data()) watchlist = data.get("watchlist") authz.require(watchlist) authz.require(authz.watchlist_write(watchlist.id)) entity.update(data) watchlist.touch() db.session.commit() analyze_entity.delay(entity.id) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.list_write(entity.list_id)) data = EntityForm().deserialize(request_data()) authz.require(data["list"]) authz.require(authz.list_write(data["list"].id)) old_selectors = entity.terms entity.update(data) db.session.commit() selectors = old_selectors.symmetric_difference(entity.terms) refresh_selectors.delay(list(selectors)) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.collection_write(entity.collection_id)) data = request_data() collection = data.get('collection') authz.require(collection) authz.require(authz.collection_write(collection.id)) entity.update(data) collection.touch() db.session.commit() analyze_entity.delay(entity.id) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.list_write(entity.list_id)) data = EntityForm().deserialize(request_data()) authz.require(data['list']) authz.require(authz.list_write(data['list'].id)) old_selectors = entity.terms entity.update(data) db.session.commit() selectors = old_selectors.symmetric_difference(entity.terms) refresh_selectors.delay(list(selectors)) return view(entity.id)
def update(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) data = request_data() data['id'] = entity.id possible_collections = authz.collections(authz.WRITE) possible_collections.extend([c.id for c in entity.collections]) data['collections'] = [c for c in get_collections(data) if c.id in possible_collections] entity = Entity.save(data, merge=arg_bool('merge')) db.session.commit() update_entity(entity) return view(entity.id)
def fetch_entity(entity_id): """Load entities from both the ES index and the database.""" entity = load_entity(entity_id) obj = Entity.by_id(entity_id) if obj is not None: if entity is not None: entity.update(obj.to_dict()) else: entity = obj.to_index() entity = finalize_index(entity, obj.schema) entity['ids'] = EntityIdentity.entity_ids(entity_id) elif entity is not None: entity['ids'] = [entity.get('id')] return entity, obj
def update(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) data = request_data() data['id'] = entity.id possible_collections = authz.collections(authz.WRITE) possible_collections.extend([c.id for c in entity.collections]) data['collections'] = [ c for c in get_collections(data) if c.id in possible_collections ] entity = Entity.save(data, merge=arg_bool('merge')) db.session.commit() update_entity(entity) return view(entity.id)
def delete_entity(entity, deleted_at=None, sync=False): # This is recursive and will also delete any entities which # reference the given entity. Usually this is going to be child # documents, or directoships referencing a person. It's a pretty # dangerous operation, though. for adjacent in index.iter_adjacent(entity): log.warning("Recursive delete: %r", adjacent) delete_entity(adjacent, deleted_at=deleted_at, sync=sync) flush_notifications(entity.get('id'), clazz=Entity) obj = Entity.by_id(entity.get('id')) if obj is not None: obj.delete(deleted_at=deleted_at) doc = Document.by_id(entity.get('id')) if doc is not None: doc.delete(deleted_at=deleted_at) index.delete_entity(entity.get('id'), sync=sync) refresh_entity(entity)
def update(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.WRITE) data = request_data() data["id"] = entity.id possible_collections = authz.collections(authz.WRITE) possible_collections.extend([c.id for c in entity.collections]) collections = [c for c in get_collections(data) if c.id in possible_collections] try: entity = Entity.save(data, collections, merge=arg_bool("merge")) except ValueError as ve: raise BadRequest(ve.message) for collection in entity.collections: collection.touch() db.session.commit() log_event(request, entity_id=entity.id) update_entity(entity) return view(entity.id)
def upsert_entity(data, collection, sync=False): entity = None entity_id = collection.ns.sign(data.get('id')) if entity_id is not None: entity = Entity.by_id(entity_id, collection=collection, deleted=True) # TODO: migrate softly from index. if entity is None: entity = Entity.create(data, collection) else: entity.update(data, collection) collection.touch() db.session.commit() delete_aggregator_entity(collection, entity.id) index.index_entity(entity, sync=sync) refresh_entity(entity.id, sync=sync) refresh_collection(collection.id, sync=sync) return entity.id
def generate_graph(args): fields = ['id', 'collection', 'entities.uuid', 'entities.name', 'entities.$schema'] query = documents_query(args, fields=fields, facets=False) query = {'query': query['query']} graph = nx.MultiGraph() for doc in scan_iter(query): entities = set() for entity in doc.get('_source').get('entities', []): if not graph.has_node(entity.get('uuid')): obj = Entity.by_id(entity.get('uuid')) graph.add_node(entity.get('uuid'), label=obj.name, schema=obj.type) entities.add(entity.get('uuid')) for (src, dst) in combinations(entities, 2): graph.add_edge(src, dst, weight=1) graph = multigraph_to_weighted(graph) return paginate_graph(graph)
def upsert_entity(data, collection, validate=True, sync=False): """Create or update an entity in the database. This has a side hustle of migrating entities created via the _bulk API or a mapper to a database entity in the event that it gets edited by the user. """ entity = None entity_id = collection.ns.sign(data.get('id')) if entity_id is not None: entity = Entity.by_id(entity_id, collection=collection, deleted=True) # TODO: migrate softly from index. if entity is None: entity = Entity.create(data, collection, validate=validate) else: entity.update(data, collection, validate=validate) collection.touch() db.session.commit() delete_aggregator_entity(collection, entity.id) index.index_entity(entity, sync=sync) refresh_entity(entity.id, sync=sync) refresh_collection(collection.id, sync=sync) return entity.id
def delete_entity(collection, entity, deleted_at=None, sync=False): # This is recursive and will also delete any entities which # reference the given entity. Usually this is going to be child # documents, or directoships referencing a person. It's a pretty # dangerous operation, though. entity_id = collection.ns.sign(entity.get("id")) for adjacent in index.iter_adjacent(entity): log.warning("Recursive delete: %r", adjacent) delete_entity(collection, adjacent, deleted_at=deleted_at, sync=sync) flush_notifications(entity_id, clazz=Entity) obj = Entity.by_id(entity_id, collection=collection) if obj is not None: obj.delete() doc = Document.by_id(entity_id, collection=collection) if doc is not None: doc.delete() index.delete_entity(entity_id, sync=sync) EntitySetItem.delete_by_entity(entity_id) Mapping.delete_by_table(entity_id) xref_index.delete_xref(collection, entity_id=entity_id, sync=sync) delete_aggregator_entity(collection, entity_id) refresh_entity(collection, entity_id)
def generate_graph(args): fields = ['id', 'collection', 'entities.uuid', 'entities.name', 'entities.$schema'] query = documents_query(args, fields=fields, facets=False) query = {'query': query['query']} graph = nx.MultiGraph() for doc in scan_iter(query): entities = set() for entity in doc.get('_source').get('entities', []): if not graph.has_node(entity.get('uuid')): obj = Entity.by_id(entity.get('uuid')) if obj is None: continue graph.add_node(entity.get('uuid'), label=obj.name, schema=obj.type) entities.add(entity.get('uuid')) for (src, dst) in combinations(entities, 2): graph.add_edge(src, dst, weight=1) graph = multigraph_to_weighted(graph) return paginate_graph(graph)
def similar(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.READ) return jsonify(similar_entities(entity, request.args))
def view(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.READ) return jsonify(entity)
def view(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.collection_read(entity.collection_id)) return jsonify(entity)
def view(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.watchlist_read(entity.watchlist_id)) return jsonify(entity)
def similar(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.READ) action = authz.WRITE if arg_bool('writeable') else authz.READ collections = authz.collections(action) return jsonify(similar_entities(entity, request.args, collections))
def get_db_entity(entity_id, action=Authz.READ): get_index_entity(entity_id, action=action) entity = Entity.by_id(entity_id) if entity is None: raise MethodNotAllowed(description="Cannot write this entity") return entity
def view(id): entity = obj_or_404(Entity.by_id(id)) check_authz(entity, authz.READ) log_event(request, entity_id=entity.id) return jsonify(entity)
def view(id): entity = obj_or_404(Entity.by_id(id)) authz.require(authz.list_read(entity.list_id)) return jsonify(entity)