Exemplo n.º 1
0
def _compute_workflow(dbstore):
    """Cache thread's target callable that computes the workflow.

    This runnable is called my thread's run() method when thread
    starts. It will compute workflows data, serialize it, and store it
    in _WORKFLOW_JSON. This computation will infinitely
    repeat itself, constantly updating the _WORKFLOW_JSON until pinball_ui
    server stops.

    Args:
        dbstore: The store to retrieve runs status.
    """
    global _WORKFLOWS_JSON
    data_builder = DataBuilder(dbstore, use_cache=True)
    while True:
        try:
            LOG.info("Workflow data computation starting.")
            workflows_data = data_builder.get_workflows()
            schedules_data = data_builder.get_schedules()
            _WORKFLOWS_JSON = _serialize(workflows_data, schedules_data)
            LOG.info("Workflow data computation complete.")
            # TODO(mao): Tune this parameter depending on future
            # pinball user experience.
            # TODO(mao): Make this computation run at scheduled time intervals
            # and cancel the next execution if the previous job hasn't
            # finished.
            time.sleep(60 * 20)
        except Exception as e:
            LOG.exception(e)
Exemplo n.º 2
0
 def is_failed(self, store):
     data_builder = DataBuilder(store, use_cache=True)
     workflow_data = data_builder.get_workflow(self.workflow)
     if not workflow_data:
         return False
     return (workflow_data.status != Status.RUNNING
             and workflow_data.status != Status.SUCCESS)
Exemplo n.º 3
0
 def get_context_data(self, **kwargs):
     context = super(TokenView, self).get_context_data(**kwargs)
     token_name = self.request.GET['path']
     data_builder = DataBuilder(DbStore())
     token_data = data_builder.get_token(token_name)
     token_format = token_data.format()
     for key, value in token_format.items():
         context[key] = value
     return context
Exemplo n.º 4
0
 def get_context_data(self, **kwargs):
     context = super(ScheduleView, self).get_context_data(**kwargs)
     workflow = self.request.GET['workflow']
     data_builder = DataBuilder(DbStore())
     schedule_data = data_builder.get_schedule(workflow)
     formatted_schedule = schedule_data.format()
     for key, value in formatted_schedule.items():
         context[key] = value
     context['emails'] = ' '.join(schedule_data.emails)
     return context
Exemplo n.º 5
0
def schedules(_):
    try:
        data_builder = DataBuilder(DbStore())
        schedules_data = data_builder.get_schedules()
        schedules_json = _serialize(schedules_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(schedules_json, mimetype='application/json')
Exemplo n.º 6
0
def instances(request):
    try:
        workflow = request.GET['workflow']
        data_builder = DataBuilder(DbStore(), use_cache=True)
        instances_data = data_builder.get_instances(workflow)
        instances_json = _serialize(instances_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(instances_json, mimetype='application/json')
Exemplo n.º 7
0
def token_paths(request):
    try:
        path = request.GET['path']
        data_builder = DataBuilder(DbStore())
        tokens_data = data_builder.get_token_paths(path)
        tokens_json = _serialize(tokens_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(tokens_json, mimetype='application/json')
Exemplo n.º 8
0
 def __init__(self, client, store, emailer):
     self._client = client
     self._emailer = emailer
     self._data_builder = DataBuilder(store)
     self._owned_job_token = None
     self._name = get_unique_name()
     self._inspector = Inspector(client)
     # The lock synchronizes access to shared attributes between the worker
     # thread and the lease renewer thread.
     self._lock = threading.Lock()
     self._lease_renewer = None
     self._executor = None
     self._test_only_end_if_no_runnable = False
Exemplo n.º 9
0
def jobs(request):
    try:
        data_builder = DataBuilder(DbStore(), use_cache=True)
        workflow = request.GET['workflow']
        instance = request.GET['instance']
        if instance == 'latest':
            instance = data_builder.get_latest_instance(workflow).instance
        jobs_data = data_builder.get_jobs(workflow, instance)
        jobs_json = _serialize(jobs_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(jobs_json, mimetype='application/json')
Exemplo n.º 10
0
    def _get_running_instances(self, store):
        """Find running instances of the workflow.

        Args:
            store: The store to query for wokflow instance status.
        Returns:
            List of running workflow instance names.
        """
        data_builder = DataBuilder(store, use_cache=True)
        instances = data_builder.get_instances(self.workflow)
        result = []
        for instance in instances:
            if instance.status == Status.RUNNING:
                result.append(instance.instance)
        return result
Exemplo n.º 11
0
def file_content(request):
    try:
        workflow = request.GET['workflow']
        instance = request.GET['instance']
        job = request.GET['job']
        execution = int(request.GET['execution'])
        log_type = request.GET['log_type']
        if execution < 0:
            return HttpResponseServerError(
                'execution must not be negative; got ' + execution)
        data_builder = DataBuilder(DbStore())
        file_data = data_builder.get_file_content(workflow, instance, job,
                                                  execution, log_type)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(file_data, mimetype='text/plain')
Exemplo n.º 12
0
def executions(request):
    try:
        workflow = request.GET['workflow']
        instance = request.GET.get('instance')
        job = request.GET['job']
        data_builder = DataBuilder(DbStore())
        if instance:
            executions_data = data_builder.get_executions(
                workflow, instance, job)
        else:
            executions_data = data_builder.get_executions_across_instances(
                workflow, job)
        executions_json = _serialize(executions_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(executions_json, content_type='application/json')
Exemplo n.º 13
0
def graph(request):
    try:
        data_builder = DataBuilder(DbStore(), use_cache=True)
        workflow = request.GET['workflow']
        if 'instance' in request.GET:
            instance = request.GET['instance']
            if instance == 'latest':
                instance = data_builder.get_latest_instance(workflow).instance
            jobs_data = data_builder.get_jobs(workflow=workflow,
                                              instance=instance)
            instance_data = data_builder.get_instance(workflow=workflow,
                                                      instance=instance)
            workflow_graph = WorkflowGraph(jobs_data, instance_data)
        else:
            workflow_graph = WorkflowGraph.from_parser(workflow)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(workflow_graph.get_svg(), mimetype='image/svg+xml')
Exemplo n.º 14
0
 def get_context_data(self, **kwargs):
     context = super(ExecutionView, self).get_context_data(**kwargs)
     workflow = self.request.GET['workflow']
     instance = self.request.GET['instance']
     job = self.request.GET['job']
     execution = int(self.request.GET['execution'])
     data_builder = DataBuilder(DbStore())
     execution_data = data_builder.get_execution(workflow, instance, job,
                                                 execution)
     formatted_data = execution_data.format()
     for key, value in formatted_data.items():
         context[key] = value
     properties = []
     for key, value in execution_data.properties.items():
         properties.append('%s=%s' % (key, value))
     context['properties'] = ', '.join(properties)
     if not execution_data.end_time:
         context['end_time'] = ''
     if execution_data.exit_code is None:
         context['exit_code'] = ''
     return context
Exemplo n.º 15
0
def status(request):
    try:
        workflow = request.GET.get('workflow')
        instance = request.GET.get('instance')
        data_builder = DataBuilder(DbStore())
        status = []
        if data_builder.is_signal_set(workflow, instance, Signal.EXIT):
            status = ['exiting']
        elif data_builder.is_signal_set(workflow, instance, Signal.ABORT):
            status = ['aborting']
        elif data_builder.is_signal_set(workflow, instance, Signal.DRAIN):
            status = ['draining']
        if not _is_master_alive():
            status.append('no master at %s:%d' % (socket.gethostname(),
                                                  PinballConfig.MASTER_PORT))
        status_json = json.dumps(status)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(status_json, mimetype='application/json')
Exemplo n.º 16
0
 def setUp(self):
     self._store = EphemeralStore()
     self._data_builder = DataBuilder(self._store)
Exemplo n.º 17
0
 def is_running(self, store):
     data_builder = DataBuilder(store, use_cache=True)
     workflow_data = data_builder.get_workflow(self.workflow)
     if not workflow_data:
         return False
     return workflow_data.status == Status.RUNNING