def _publish_complete(self, task): """Method is triggered when all tasks in a group have completed. Also triggered when tasks are stopped because of failures. Handles final callback message to the user of the queue. :param task: :return: """ all_tasks = get_tasks_for_stepid(task.stepid) warnings = [ warning for sublist in [t.summary['warnings'] for t in all_tasks] for warning in sublist ] errors = [ error for sublist in [t.summary['errors'] for t in all_tasks] for error in sublist ] msg = { **task.extra_msg, 'header': { 'jobid': task.jobid, 'stepid': task.stepid, **task.extra_header, }, 'summary': { 'warnings': warnings, 'errors': errors, } } publish(WORKFLOW_EXCHANGE, task.key_prefix + '.' + TASK_COMPLETE, msg)
def test_publish(self, mock_async_connection): exchange = 'exchange' key = 'key' msg = 'msg' publish(exchange, key, msg) mock_async_connection.return_value.__enter__.return_value.publish.assert_called_with( exchange, key, msg) mock_async_connection.assert_called_with(CONNECTION_PARAMS)
def end_of_workflow(self, msg): logger.configure(msg, "WORKFLOW") on_complete = msg['header'].pop('on_workflow_complete', None) if on_complete is not None: if not isinstance(on_complete, dict) or not all([key in on_complete for key in ['exchange', 'key']]): logger.error("on_workflow_complete should be a dict with keys 'exchange' and 'key'") else: publish(on_complete['exchange'], on_complete['key'], msg) logger.info(f"Publish on_workflow_complete to {on_complete['exchange']} with {on_complete['key']}") logger.info("End of workflow") job_end(msg["header"].get("jobid"))
def publish_job(self, name, request): command = self.startcommands.get(name) args = self._extract_args(command, request) command.validate_arguments(args) msg = { 'workflow': { 'workflow_name': command.workflow, }, 'header': args } if command.start_step: msg['workflow']['step_name'] = command.start_step publish(WORKFLOW_EXCHANGE, WORKFLOW_REQUEST_KEY, msg) return msg
def kvk_endpoint(): """Accepts KvK update berichten sent by KvK :return: """ request_data = request.data.decode('utf-8') kvk_bericht = KvkUpdateBericht(request_data) message = KvkUpdateMessage() message.message = request_data message.kvk_nummer = kvk_bericht.get_kvk_nummer() with DatabaseSession() as session: message = KvkUpdateMessageRepository(session).save(message) publish(MESSAGE_EXCHANGE, KVK_MESSAGE_KEY, {'message_id': message.id}) return Response('OK. Message received. Thank you, good bye.')
def _queue_task(self, task): """Queues Task object :param task: :return: """ msg = { **task.extra_msg, 'taskid': task.id, 'id': task.name, 'header': { 'jobid': task.jobid, 'stepid': task.stepid, 'process_id': task.process_id, **task.extra_header, } } publish(WORKFLOW_EXCHANGE, task.key_prefix + '.' + TASK_REQUEST, msg) task_update({ 'status': self.STATUS_QUEUED, 'id': task.id, 'start': datetime.now(), })
def start_step(key, msg): publish(WORKFLOW_EXCHANGE, f"{key}.request", msg)
parser.add_argument('catalogue', type=str, help='the name of the data catalog (example: "meetbouten"') parser.add_argument( 'collection', type=str, help='the name of the data collection (example: "meetbouten"') parser.add_argument( 'filename', type=str, help= 'the name of the file to write the output to (example: "MBT_MEETBOUT.dat")' ) parser.add_argument('destination', nargs='?', type=str, default="Objectstore", choices=["Objectstore", "File"], help='destination, default is Objectstore') args = parser.parse_args() export_args = { "catalogue": args.catalogue, "collection": args.collection, "filename": args.filename, "destination": args.destination } publish(EXPORT_QUEUE, "export.start", export_args)
def handle_result(msg): key = _get_hook_key(msg) if not key: return publish(WORKFLOW_EXCHANGE, key, msg)
def on_workflow_progress(msg): key = _get_hook_key(msg) if not key: return publish(WORKFLOW_EXCHANGE, key, msg)