def publish_experiment_job_log(self, log_line, status, experiment_uuid, experiment_name, job_uuid, task_type=None, task_idx=None): try: log_line = log_line.decode('utf-8') except AttributeError: pass self._logger.info("Publishing log event for task: %s.%s, %s", task_type, task_idx, experiment_name) celery_app.send_task( EventsCeleryTasks.EVENTS_HANDLE_LOGS_EXPERIMENT_JOB, kwargs={ 'experiment_name': experiment_name, 'experiment_uuid': experiment_uuid, 'job_uuid': job_uuid, 'log_line': log_line, 'task_type': task_type, 'task_idx': task_idx }) try: should_stream = ( RedisToStream.is_monitored_job_logs(job_uuid) or RedisToStream.is_monitored_experiment_logs(experiment_uuid)) except RedisError: should_stream = False if should_stream: self._logger.info("Streaming new log event for experiment: %s", experiment_uuid) with celery_app.producer_or_acquire(None) as producer: try: producer.publish( { 'experiment_uuid': experiment_uuid, 'job_uuid': job_uuid, 'log_line': log_line, 'status': status, 'task_type': task_type, 'task_idx': task_idx }, routing_key='{}.{}.{}'.format( RoutingKeys.LOGS_SIDECARS, experiment_uuid, job_uuid), exchange=settings.INTERNAL_EXCHANGE, ) except (TimeoutError, AMQPError): pass
def publish_log(log_line, status, experiment_uuid, experiment_name, job_uuid, task_type=None, task_idx=None): try: log_line = log_line.decode('utf-8') except AttributeError: pass logger.info("Publishing log event for task: {}.{}, {}".format( task_type, task_idx, experiment_name)) handle_events_job_logs.delay(experiment_name=experiment_name, experiment_uuid=experiment_uuid, job_uuid=job_uuid, log_line=log_line, task_type=task_type, task_idx=task_idx) try: should_stream = ( RedisToStream.is_monitored_job_logs(job_uuid) or RedisToStream.is_monitored_experiment_logs(experiment_uuid)) except RedisError: should_stream = False if should_stream: logger.info("Streaming new log event for experiment: {}".format( experiment_uuid)) with celery_app.producer_or_acquire(None) as producer: try: producer.publish( { 'experiment_uuid': experiment_uuid, 'job_uuid': job_uuid, 'log_line': log_line, 'status': status, 'task_type': task_type, 'task_idx': task_idx }, routing_key='{}.{}.{}'.format(RoutingKeys.LOGS_SIDECARS, experiment_uuid, job_uuid), exchange=settings.INTERNAL_EXCHANGE, ) except (TimeoutError, AMQPError): pass
def publish_experiment_job_log(self, log_lines, status, experiment_uuid, experiment_name, job_uuid, task_type=None, task_idx=None): self._logger.debug("Publishing log event for task: %s.%s, %s", task_type, task_idx, experiment_name) celery_app.send_task( EventsCeleryTasks.EVENTS_HANDLE_LOGS_EXPERIMENT_JOB, kwargs={ 'experiment_name': experiment_name, 'experiment_uuid': experiment_uuid, 'job_uuid': job_uuid, 'log_lines': log_lines, 'task_type': task_type, 'task_idx': task_idx}) try: should_stream = (RedisToStream.is_monitored_job_logs(job_uuid) or RedisToStream.is_monitored_experiment_logs(experiment_uuid)) except RedisError: should_stream = False if should_stream: self._logger.info("Streaming new log event for experiment: %s", experiment_uuid) with celery_app.producer_or_acquire(None) as producer: try: producer.publish( { 'experiment_uuid': experiment_uuid, 'job_uuid': job_uuid, 'log_lines': log_lines, 'status': status, 'task_type': task_type, 'task_idx': task_idx }, routing_key='{}.{}.{}'.format(RoutingKeys.LOGS_SIDECARS, experiment_uuid, job_uuid), exchange=settings.INTERNAL_EXCHANGE, ) except (TimeoutError, AMQPError): pass
def _stream_job_log(self, job_uuid, log_lines, routing_key): try: should_stream = RedisToStream.is_monitored_job_logs(job_uuid) except RedisError: should_stream = False if should_stream: self._logger.info("Streaming new log event for job: %s", job_uuid) with celery_app.producer_or_acquire(None) as producer: try: producer.publish( { 'job_uuid': job_uuid, 'log_lines': log_lines, }, routing_key='{}.{}'.format(routing_key, job_uuid), exchange=settings.INTERNAL_EXCHANGE, ) except (TimeoutError, AMQPError): pass