def log_object_saved(sender, instance, fields_changed, cached_data): abs_url = None if hasattr(instance, 'absolute_api_v2_url'): abs_url = instance.absolute_api_v2_url if abs_url is not None: enqueue_task(ban_url.s(abs_url))
def after_register(src, dst, user): """Blinker listener for registration initiations. Enqueqes a chain of archive tasks for the current node and its descendants :param src: Node being registered :param dst: registration Node :param user: registration initiator """ archiver_utils.before_archive(dst, user) if dst.root != dst: # if not top-level registration return archive_tasks = [archive.si(job_pk=t.archive_job._id) for t in node_and_primary_descendants(dst)] handlers.enqueue_task( celery.chain(*archive_tasks) )
def after_register(src, dst, user): """Blinker listener for registration initiations. Enqueqes a chain of archive tasks for the current node and its descendants :param src: Node being registered :param dst: registration Node :param user: registration initiator """ # Prevent circular import with app.py from website.archiver import tasks archiver_utils.before_archive(dst, user) if dst.root != dst: # if not top-level registration return archive_tasks = [tasks.archive(job_pk=t.archive_job._id) for t in dst.node_and_primary_descendants()] handlers.enqueue_task( celery.chain(archive_tasks) )
def update_node(node_id, updated_fields): if settings.USE_CELERY: signature = _update_node.s(node_id, updated_fields) enqueue_task(signature) else: _update_node(node_id, updated_fields)
@requires_search def search(query, index=None, doc_type=None): index = index or settings.ELASTIC_INDEX return search_engine.search(query, index=index, doc_type=doc_type) @requires_search def update_node(node, index=None, bulk=False, async=True): if async: node_id = node._id # We need the transaction to be committed before trying to run celery tasks. # For example, when updating a Node's privacy, is_public must be True in the # database in order for method that updates the Node's elastic search document # to run correctly. if settings.USE_CELERY: enqueue_task(search_engine.update_node_async.s(node_id=node_id, index=index, bulk=bulk)) else: search_engine.update_node_async(node_id=node_id, index=index, bulk=bulk) else: index = index or settings.ELASTIC_INDEX return search_engine.update_node(node, index=index, bulk=bulk) @requires_search def bulk_update_nodes(serialize, nodes, index=None): index = index or settings.ELASTIC_INDEX search_engine.bulk_update_nodes(serialize, nodes, index=index) @requires_search def delete_node(node, index=None): index = index or settings.ELASTIC_INDEX doc_type = node.project_or_component
def search(query, index=None, doc_type=None): index = index or settings.ELASTIC_INDEX return search_engine.search(query, index=index, doc_type=doc_type) @requires_search def update_node(node, index=None, bulk=False, async=True): if async: node_id = node._id # We need the transaction to be committed before trying to run celery tasks. # For example, when updating a Node's privacy, is_public must be True in the # database in order for method that updates the Node's elastic search document # to run correctly. if settings.USE_CELERY: enqueue_task( search_engine.update_node_async.s(node_id=node_id, index=index, bulk=bulk)) else: search_engine.update_node_async(node_id=node_id, index=index, bulk=bulk) else: index = index or settings.ELASTIC_INDEX return search_engine.update_node(node, index=index, bulk=bulk) @requires_search def bulk_update_nodes(serialize, nodes, index=None): index = index or settings.ELASTIC_INDEX search_engine.bulk_update_nodes(serialize, nodes, index=index)