def _compute_workflow(dbstore): """Cache thread's target callable that computes the workflow. This runnable is called my thread's run() method when thread starts. It will compute workflows data, serialize it, and store it in _WORKFLOW_JSON. This computation will infinitely repeat itself, constantly updating the _WORKFLOW_JSON until pinball_ui server stops. Args: dbstore: The store to retrieve runs status. """ global _WORKFLOWS_JSON data_builder = DataBuilder(dbstore, use_cache=True) while True: try: LOG.info("Workflow data computation starting.") workflows_data = data_builder.get_workflows() schedules_data = data_builder.get_schedules() _WORKFLOWS_JSON = _serialize(workflows_data, schedules_data) LOG.info("Workflow data computation complete.") # TODO(mao): Tune this parameter depending on future # pinball user experience. # TODO(mao): Make this computation run at scheduled time intervals # and cancel the next execution if the previous job hasn't # finished. time.sleep(60 * 20) except Exception as e: LOG.exception(e)
def is_failed(self, store): data_builder = DataBuilder(store, use_cache=True) workflow_data = data_builder.get_workflow(self.workflow) if not workflow_data: return False return (workflow_data.status != Status.RUNNING and workflow_data.status != Status.SUCCESS)
def get_context_data(self, **kwargs): context = super(TokenView, self).get_context_data(**kwargs) token_name = self.request.GET['path'] data_builder = DataBuilder(DbStore()) token_data = data_builder.get_token(token_name) token_format = token_data.format() for key, value in token_format.items(): context[key] = value return context
def get_context_data(self, **kwargs): context = super(ScheduleView, self).get_context_data(**kwargs) workflow = self.request.GET['workflow'] data_builder = DataBuilder(DbStore()) schedule_data = data_builder.get_schedule(workflow) formatted_schedule = schedule_data.format() for key, value in formatted_schedule.items(): context[key] = value context['emails'] = ' '.join(schedule_data.emails) return context
def schedules(_): try: data_builder = DataBuilder(DbStore()) schedules_data = data_builder.get_schedules() schedules_json = _serialize(schedules_data) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(schedules_json, mimetype='application/json')
def instances(request): try: workflow = request.GET['workflow'] data_builder = DataBuilder(DbStore(), use_cache=True) instances_data = data_builder.get_instances(workflow) instances_json = _serialize(instances_data) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(instances_json, mimetype='application/json')
def token_paths(request): try: path = request.GET['path'] data_builder = DataBuilder(DbStore()) tokens_data = data_builder.get_token_paths(path) tokens_json = _serialize(tokens_data) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(tokens_json, mimetype='application/json')
def __init__(self, client, store, emailer): self._client = client self._emailer = emailer self._data_builder = DataBuilder(store) self._owned_job_token = None self._name = get_unique_name() self._inspector = Inspector(client) # The lock synchronizes access to shared attributes between the worker # thread and the lease renewer thread. self._lock = threading.Lock() self._lease_renewer = None self._executor = None self._test_only_end_if_no_runnable = False
def jobs(request): try: data_builder = DataBuilder(DbStore(), use_cache=True) workflow = request.GET['workflow'] instance = request.GET['instance'] if instance == 'latest': instance = data_builder.get_latest_instance(workflow).instance jobs_data = data_builder.get_jobs(workflow, instance) jobs_json = _serialize(jobs_data) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(jobs_json, mimetype='application/json')
def _get_running_instances(self, store): """Find running instances of the workflow. Args: store: The store to query for wokflow instance status. Returns: List of running workflow instance names. """ data_builder = DataBuilder(store, use_cache=True) instances = data_builder.get_instances(self.workflow) result = [] for instance in instances: if instance.status == Status.RUNNING: result.append(instance.instance) return result
def file_content(request): try: workflow = request.GET['workflow'] instance = request.GET['instance'] job = request.GET['job'] execution = int(request.GET['execution']) log_type = request.GET['log_type'] if execution < 0: return HttpResponseServerError( 'execution must not be negative; got ' + execution) data_builder = DataBuilder(DbStore()) file_data = data_builder.get_file_content(workflow, instance, job, execution, log_type) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(file_data, mimetype='text/plain')
def executions(request): try: workflow = request.GET['workflow'] instance = request.GET.get('instance') job = request.GET['job'] data_builder = DataBuilder(DbStore()) if instance: executions_data = data_builder.get_executions( workflow, instance, job) else: executions_data = data_builder.get_executions_across_instances( workflow, job) executions_json = _serialize(executions_data) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(executions_json, content_type='application/json')
def graph(request): try: data_builder = DataBuilder(DbStore(), use_cache=True) workflow = request.GET['workflow'] if 'instance' in request.GET: instance = request.GET['instance'] if instance == 'latest': instance = data_builder.get_latest_instance(workflow).instance jobs_data = data_builder.get_jobs(workflow=workflow, instance=instance) instance_data = data_builder.get_instance(workflow=workflow, instance=instance) workflow_graph = WorkflowGraph(jobs_data, instance_data) else: workflow_graph = WorkflowGraph.from_parser(workflow) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(workflow_graph.get_svg(), mimetype='image/svg+xml')
def get_context_data(self, **kwargs): context = super(ExecutionView, self).get_context_data(**kwargs) workflow = self.request.GET['workflow'] instance = self.request.GET['instance'] job = self.request.GET['job'] execution = int(self.request.GET['execution']) data_builder = DataBuilder(DbStore()) execution_data = data_builder.get_execution(workflow, instance, job, execution) formatted_data = execution_data.format() for key, value in formatted_data.items(): context[key] = value properties = [] for key, value in execution_data.properties.items(): properties.append('%s=%s' % (key, value)) context['properties'] = ', '.join(properties) if not execution_data.end_time: context['end_time'] = '' if execution_data.exit_code is None: context['exit_code'] = '' return context
def status(request): try: workflow = request.GET.get('workflow') instance = request.GET.get('instance') data_builder = DataBuilder(DbStore()) status = [] if data_builder.is_signal_set(workflow, instance, Signal.EXIT): status = ['exiting'] elif data_builder.is_signal_set(workflow, instance, Signal.ABORT): status = ['aborting'] elif data_builder.is_signal_set(workflow, instance, Signal.DRAIN): status = ['draining'] if not _is_master_alive(): status.append('no master at %s:%d' % (socket.gethostname(), PinballConfig.MASTER_PORT)) status_json = json.dumps(status) except: LOG.exception('') return HttpResponseServerError(traceback.format_exc()) else: return HttpResponse(status_json, mimetype='application/json')
def setUp(self): self._store = EphemeralStore() self._data_builder = DataBuilder(self._store)
def is_running(self, store): data_builder = DataBuilder(store, use_cache=True) workflow_data = data_builder.get_workflow(self.workflow) if not workflow_data: return False return workflow_data.status == Status.RUNNING