def send_group_chat_notifications(sender_username, text, group_id): with app.app_context(): group = Group.query.filter_by(id=group_id).first() _task = (Task.query.filter( and_( Task.name == "send_group_chat_notifications", Task.payload["sender_username"].as_string() == sender_username, Task.payload["text"].as_string() == text, Task.payload["group_id"].as_integer() == group_id, Task.status == "starting", )).order_by(Task.started_at.desc()).first()) _task.status = "processing" _task.save_to_db(db) # TODO: @prithajnath Need a context manager to retry this block error = None try: for user_association in group.users: user = user_association.user receiver_username = user.username receiver_email = user.email if sender_username != user.username: task = Task( name="send_group_chat_notification", started_at=datetime.now(), status="starting", payload={ "receiver_email": receiver_email, "receiver_username": receiver_username, "sender_username": sender_username, "group_name": group.name, "text": text, }, ) task.save_to_db(db) app_celery.send_task( "user.send_group_chat_notification", ( receiver_email, receiver_username, sender_username, group.name, text, ), ) except Exception as e: error = str(e) _task.status = "finished" if error: _task.error = error _task.finished_at = datetime.now() _task.save_to_db(db)
def run_github_aggregator(cls, location): for user in cls.get_github_users_by_location(location=location): ElasticSearchIndexerServices.index_user_in_elasticsearch( user_id=user.user_id, user_serializer_data=UserSerializer(user).data) celery.send_task("index_user_contributions", [ user.user_id, user.user_profile_page, UserSerializer(user).data ])
def train(project_train_task): """Trains the machine learning model using the celery task name. :param project_train_task: Celery task name for training the machine learning model of the project. """ # Celery task must be define in the Celery beat schedule if project_train_task in Config.CELERYBEAT_SCHEDULE: print("Executing the celery task '{}' in parallel".format( project_train_task)) celery.send_task(project_train_task, force_retrain=True) else: print("'{}' is not a valid celery task name for training a machine " "learning model".format(project_train_task))
def make_pairs(group_id): with app.app_context(): task = Task.query.filter( and_( Task.status == "starting", Task.payload["group_id"].as_integer() == group_id, )).first() task.status = "processing" task.save_to_db(db) result = chain(_make_pairs_async, _make_pairs, pipe={"group_id": group_id}) # result = asyncio.run(_make_pairs_async(pipe={"group_id": group_id})) task.status = "finished" task.finished_at = datetime.now() task.save_to_db(db) # Update pair creation status if type(result) == RetryException: task.error = str(result) task.save_to_db(db) else: final_pairs = result["final_pairs"] # Refresh materialzed view manually just to be safe all_latest_pairs_view.refresh() for giver in final_pairs: send_email_task = Task( name="send_secret_santa_email", payload={ "email": giver, "group_id": group_id }, started_at=datetime.now(), status="starting", ) send_email_task.save_to_db(db) app_celery.send_task("user.send_secret_santa_email", (giver, group_id))
def post(self) -> Dict: """ Sends task for scraping images for url given in request body Returns: Dict: Celery task ID """ data = PageImageTask.parser.parse_args() task = celery.send_task("celery_scrape_images", args=[data["url"]]) return {"task_id": task.id}
def ping(): """ This route handles the 'workers' page, and kicks off communication with the workers in queue. :return: An HTML response with embedded javascript """ task = celery.send_task('task.ping', [], queue='master', routing_key='master') State.tasks[task.id] = task resp = make_response(render_template('ping.html', title='Chorus - Workers', url='ajax_ping/%s' % task.id)) return resp
def queue(): """ This route displays the queue history. :return: An HTML response. """ task = celery.send_task('task.ping', [], queue='master', routing_key='master') State.tasks[task.id] = task return render_template('queue.html', title='Chorus - Queue', url='/ajax_queue/%s' % task.id, validation=False)
def status(host): """ This route displays the containers running on an individual host. :param host: The host in question, as a string. :return: HTML response containing web page. """ task = celery.send_task('task.ping', [], queue='master', routing_key='master') State.tasks[task.id] = task resp = make_response(render_template('status.html', title='Chorus - %s' % host, host=host, url='ajax_status/%s/%s' % (task.id, host))) return resp
def enqueue_task(config: dict): document = { 'status': TaskStatus.PENDING.name, 'timestamp': { 'creation': datetime.utcnow(), 'termination': None }, 'offliner': { 'name': "mwoffliner", 'config': config }, 'logs': [] } validator = Validator(Tasks.schema) if not validator.validate(document): raise errors.InternalError() result = Tasks().insert_one(document) task_id = result.inserted_id celery.send_task('mwoffliner', task_id=str(task_id), kwargs={'offliner_config': config}) return task_id
def schedule(options): """ Creates a database entry for a scheduled build based off the 'options' dict provided by the user via web form. :param options: A dictionary of values that define the build parameters :return: None """ db.session.commit() b = models.Build( bid=options["bid"], created=datetime.datetime.utcnow(), options=options, scheduled=True, state="Scheduled" ) b.task = celery.send_task( "task.schedule", [b.options, app.config["CELERY_TIMEOUT"]], queue="feeder", routing_key="feeder" ) b.task_id = b.task.id b.start = datetime.datetime.utcnow() db.session.add(b) db.session.commit()
def ajax_stop_container(host, bid): """ This route is an ajax call from the '/status' route that sends a kill order to a container on a host. :param host: The host in question. :param bid: The bid associated with this build, that can be used to find the container id. :return: A json object to inform the webpage javascript of success or failure. """ host = 'celery@' + host task = celery.send_task('task.kill', [host, bid], queue='master', routing_key='master') try: task.get(timeout=TIMEOUT) # Update State to Indicate Stopped b = models.Build.query.filter_by(bid=bid).first() if b is not None: b.state = 'Stopped' b.scheduled = True db.session.commit() reply = 1 except Exception as e: app.logger.error(str(e)) reply = 0 return jsonify(status=reply)
def ajax_queue_stop_container(bid): """ This route is an ajax call from the '/queue' or '/dockerfile' routes that sends a kill signal to a container. :param bid: The bid associated with this build, that can be used to find the container id. :return: A redirect back to the /queues page to show updated status. """ b = models.Build.query.filter_by(bid=bid).first() if b is None: reply = 0 else: task = celery.send_task('task.kill', [b.options['host'], bid], queue='master', routing_key='master') try: task.get(timeout=TIMEOUT) # Update State to Indicate Stopped b = models.Build.query.filter_by(bid=bid).first() if b is not None: b.state = 'Stopped' b.scheduled = True db.session.commit() reply = 1 except Exception as e: app.logger.error(str(e)) reply = 0 return make_response(redirect('/queue'))
def submit(): data = request.get_json() validate_options(data) task = celery.send_task('tasks.submit', args=[data]) return get_response(task)
def send_remote_task(task, args): task = celery.send_task(task, args)