Esempio n. 1
0
def main():
    parser = argparse.ArgumentParser(
        description='Start Pinball master server.')
    parser.add_argument(
        '-c',
        '--config_file',
        dest='config_file',
        required=True,
        help='full path to the pinball setting configure file')
    parser.add_argument(
        '-p',
        '--port',
        dest='port',
        type=int,
        default=PinballConfig.MASTER_PORT,
        help='port to run on')
    options = parser.parse_args(sys.argv[1:])

    PinballConfig.parse(options.config_file)
    master_port = options.port if options.port else PinballConfig.MASTER_PORT
    factory = Factory(master_port=master_port)

    # The reason why these imports are not at the top level is that some of the
    # imported code (db models initializing table names) depends on parameters
    # passed on the command line (master name).  Those imports need to be delayed
    # until after command line parameter parsing.
    from pinball.persistence.store import DbStore
    factory.create_master(DbStore())
    factory.run_master_server()
Esempio n. 2
0
 def get_context_data(self, **kwargs):
     context = super(TokenView, self).get_context_data(**kwargs)
     token_name = self.request.GET['path']
     data_builder = DataBuilder(DbStore())
     token_data = data_builder.get_token(token_name)
     token_format = token_data.format()
     for key, value in token_format.items():
         context[key] = value
     return context
Esempio n. 3
0
 def get_context_data(self, **kwargs):
     context = super(ScheduleView, self).get_context_data(**kwargs)
     workflow = self.request.GET['workflow']
     data_builder = DataBuilder(DbStore())
     schedule_data = data_builder.get_schedule(workflow)
     formatted_schedule = schedule_data.format()
     for key, value in formatted_schedule.items():
         context[key] = value
     context['emails'] = ' '.join(schedule_data.emails)
     return context
Esempio n. 4
0
def schedules(_):
    try:
        data_builder = DataBuilder(DbStore())
        schedules_data = data_builder.get_schedules()
        schedules_json = _serialize(schedules_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(schedules_json, mimetype='application/json')
Esempio n. 5
0
def instances(request):
    try:
        workflow = request.GET['workflow']
        data_builder = DataBuilder(DbStore(), use_cache=True)
        instances_data = data_builder.get_instances(workflow)
        instances_json = _serialize(instances_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(instances_json, mimetype='application/json')
Esempio n. 6
0
def token_paths(request):
    try:
        path = request.GET['path']
        data_builder = DataBuilder(DbStore())
        tokens_data = data_builder.get_token_paths(path)
        tokens_json = _serialize(tokens_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(tokens_json, mimetype='application/json')
Esempio n. 7
0
def jobs(request):
    try:
        data_builder = DataBuilder(DbStore(), use_cache=True)
        workflow = request.GET['workflow']
        instance = request.GET['instance']
        if instance == 'latest':
            instance = data_builder.get_latest_instance(workflow).instance
        jobs_data = data_builder.get_jobs(workflow, instance)
        jobs_json = _serialize(jobs_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(jobs_json, mimetype='application/json')
Esempio n. 8
0
def file_content(request):
    try:
        workflow = request.GET['workflow']
        instance = request.GET['instance']
        job = request.GET['job']
        execution = int(request.GET['execution'])
        log_type = request.GET['log_type']
        if execution < 0:
            return HttpResponseServerError(
                'execution must not be negative; got ' + execution)
        data_builder = DataBuilder(DbStore())
        file_data = data_builder.get_file_content(workflow, instance, job,
                                                  execution, log_type)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(file_data, mimetype='text/plain')
Esempio n. 9
0
def executions(request):
    try:
        workflow = request.GET['workflow']
        instance = request.GET.get('instance')
        job = request.GET['job']
        data_builder = DataBuilder(DbStore())
        if instance:
            executions_data = data_builder.get_executions(
                workflow, instance, job)
        else:
            executions_data = data_builder.get_executions_across_instances(
                workflow, job)
        executions_json = _serialize(executions_data)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(executions_json, content_type='application/json')
Esempio n. 10
0
def graph(request):
    try:
        data_builder = DataBuilder(DbStore(), use_cache=True)
        workflow = request.GET['workflow']
        if 'instance' in request.GET:
            instance = request.GET['instance']
            if instance == 'latest':
                instance = data_builder.get_latest_instance(workflow).instance
            jobs_data = data_builder.get_jobs(workflow=workflow,
                                              instance=instance)
            instance_data = data_builder.get_instance(workflow=workflow,
                                                      instance=instance)
            workflow_graph = WorkflowGraph(jobs_data, instance_data)
        else:
            workflow_graph = WorkflowGraph.from_parser(workflow)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(workflow_graph.get_svg(), mimetype='image/svg+xml')
Esempio n. 11
0
def status(request):
    try:
        workflow = request.GET.get('workflow')
        instance = request.GET.get('instance')
        data_builder = DataBuilder(DbStore())
        status = []
        if data_builder.is_signal_set(workflow, instance, Signal.EXIT):
            status = ['exiting']
        elif data_builder.is_signal_set(workflow, instance, Signal.ABORT):
            status = ['aborting']
        elif data_builder.is_signal_set(workflow, instance, Signal.DRAIN):
            status = ['draining']
        if not _is_master_alive():
            status.append('no master at %s:%d' % (socket.gethostname(),
                                                  PinballConfig.MASTER_PORT))
        status_json = json.dumps(status)
    except:
        LOG.exception('')
        return HttpResponseServerError(traceback.format_exc())
    else:
        return HttpResponse(status_json, mimetype='application/json')
Esempio n. 12
0
 def get_context_data(self, **kwargs):
     context = super(ExecutionView, self).get_context_data(**kwargs)
     workflow = self.request.GET['workflow']
     instance = self.request.GET['instance']
     job = self.request.GET['job']
     execution = int(self.request.GET['execution'])
     data_builder = DataBuilder(DbStore())
     execution_data = data_builder.get_execution(workflow, instance, job,
                                                 execution)
     formatted_data = execution_data.format()
     for key, value in formatted_data.items():
         context[key] = value
     properties = []
     for key, value in execution_data.properties.items():
         properties.append('%s=%s' % (key, value))
     context['properties'] = ', '.join(properties)
     if not execution_data.end_time:
         context['end_time'] = ''
     if execution_data.exit_code is None:
         context['exit_code'] = ''
     return context
Esempio n. 13
0
def main():
    parser = argparse.ArgumentParser(
        description='Start Pinball master server.')
    parser.add_argument('-w',
                        '--active_workflows',
                        dest='active_workflows',
                        type=int,
                        default=5,
                        help='number of active workflows to generate')
    parser.add_argument('-r',
                        '--archived_workflows',
                        dest='archived_workflows',
                        type=int,
                        default=5,
                        help='number of archived workflows to generate')
    parser.add_argument('-i',
                        '--instances',
                        dest='instances',
                        type=int,
                        default=5,
                        help='number of instances per workflow')
    parser.add_argument('-j',
                        '--jobs',
                        dest='jobs',
                        type=int,
                        default=5,
                        help='number of jobs per workflow')
    parser.add_argument('-e',
                        '--executions',
                        dest='executions',
                        type=int,
                        default=5,
                        help='number of executions per job')
    options = parser.parse_args(sys.argv[1:])

    generate_workflows(options.active_workflows, options.archived_workflows,
                       options.instances, options.jobs, options.executions,
                       DbStore())
Esempio n. 14
0
def _run_scheduler(factory, emailer):
    client = factory.get_client()
    scheduler = Scheduler(client, DbStore(), emailer)
    scheduler.run()
Esempio n. 15
0
def main():
    _register_signal_listener()

    parser = argparse.ArgumentParser(
        description='Start Pinball master and workers.')
    parser.add_argument(
        '-c',
        '--config_file',
        dest='config_file',
        required=True,
        help='full path to the pinball setting configure file')
    parser.add_argument(
        '-m',
        '--mode',
        dest='mode',
        choices=['master', 'scheduler', 'workers', 'ui'],
        default='master',
        help='execution mode')

    options = parser.parse_args(sys.argv[1:])
    PinballConfig.parse(options.config_file)

    if hasattr(PinballConfig, 'MASTER_NAME') and PinballConfig.MASTER_NAME:
        master_name(PinballConfig.MASTER_NAME)
    _pinball_imports()
    if PinballConfig.UI_HOST:
        emailer = Emailer(PinballConfig.UI_HOST, PinballConfig.UI_PORT)
    else:
        emailer = Emailer(socket.gethostname(), PinballConfig.UI_PORT)

    if options.mode == 'ui':
        hostport = '%s:%d' % (socket.gethostname(), PinballConfig.UI_PORT)
        cache_thread.start_cache_thread(DbStore())
        if not PinballConfig.UI_HOST:
            hostport = 'localhost:%d' % PinballConfig.UI_PORT

        # Disable reloader to prevent auto refresh on file changes.  The
        # problem with auto-refresh is that it starts multiple processes.  Some
        # of those processes will become orphans if we kill the UI in a wrong
        # way.
        management.call_command('runserver', hostport, interactive=False,
                                use_reloader=False)
        return

    factory = Factory(master_hostname=PinballConfig.MASTER_HOST,
                      master_port=PinballConfig.MASTER_PORT)
    threads = []
    if options.mode == 'master':
        factory.create_master(DbStore())
    elif options.mode == 'scheduler':
        threads.append(_create_scheduler(factory, emailer))
    else:
        assert options.mode == 'workers'
        if PinballConfig.UI_HOST:
            emailer = Emailer(PinballConfig.UI_HOST, PinballConfig.UI_PORT)
        else:
            emailer = Emailer(socket.gethostname(), PinballConfig.UI_PORT)
        threads = _create_workers(PinballConfig.WORKERS, factory, emailer)

    try:
        if options.mode == 'master':
            factory.run_master_server()
        else:
            _wait_for_threads(threads)
    except KeyboardInterrupt:
        LOG.info('Exiting')
        sys.exit()
Esempio n. 16
0
def _run_worker(factory, emailer):
    client = factory.get_client()
    worker = Worker(client, DbStore(), emailer)
    worker.run()