Ejemplo n.º 1
0
def _compute_workflow(dbstore):
    """Cache thread's target callable that computes the workflow.

    This runnable is called my thread's run() method when thread
    starts. It will compute workflows data, serialize it, and store it
    in _WORKFLOW_JSON. This computation will infinitely
    repeat itself, constantly updating the _WORKFLOW_JSON until pinball_ui
    server stops.

    Args:
        dbstore: The store to retrieve runs status.
    """
    global _WORKFLOWS_JSON
    data_builder = DataBuilder(dbstore, use_cache=True)
    while True:
        try:
            LOG.info("Workflow data computation starting.")
            workflows_data = data_builder.get_workflows()
            schedules_data = data_builder.get_schedules()
            _WORKFLOWS_JSON = _serialize(workflows_data, schedules_data)
            LOG.info("Workflow data computation complete.")
            # TODO(mao): Tune this parameter depending on future
            # pinball user experience.
            # TODO(mao): Make this computation run at scheduled time intervals
            # and cancel the next execution if the previous job hasn't
            # finished.
            time.sleep(60 * 20)
        except Exception as e:
            LOG.exception(e)
Ejemplo n.º 2
0
def _compute_workflow(dbstore):
    """Cache thread's target callable that computes the workflow.

    This runnable is called my thread's run() method when thread
    starts. It will compute workflows data, serialize it, and store it
    in _WORKFLOW_JSON. This computation will infinitely
    repeat itself, constantly updating the _WORKFLOW_JSON until pinball_ui
    server stops.

    Args:
        dbstore: The store to retrieve runs status.
    """
    global _WORKFLOWS_JSON
    data_builder = DataBuilder(dbstore, use_cache=True)
    while True:
        try:
            LOG.info("Workflow data computation starting.")
            workflows_data = data_builder.get_workflows()
            schedules_data = data_builder.get_schedules()
            _WORKFLOWS_JSON = _serialize(workflows_data, schedules_data)
            LOG.info("Workflow data computation complete.")
            # TODO(mao): Tune this parameter depending on future
            # pinball user experience.
            # TODO(mao): Make this computation run at scheduled time intervals
            # and cancel the next execution if the previous job hasn't
            # finished.
            time.sleep(60 * 20)
        except Exception as e:
            LOG.exception(e)
Ejemplo n.º 3
0
def schedules(_):
    try:
        data_builder = DataBuilder(DbStore())
        schedules_data = data_builder.get_schedules()
        schedules_json = _serialize(schedules_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(schedules_json, mimetype='application/json')
Ejemplo n.º 4
0
def schedules(_):
    try:
        data_builder = DataBuilder(DbStore())
        schedules_data = data_builder.get_schedules()
        schedules_json = _serialize(schedules_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(schedules_json, mimetype='application/json')