def recipe_builder(job_id, recipeset_id, recipefile, overrides, fqdn): global runtime log.debug("recipe_builder(%r, %r, %r, %r, %r)" % (job_id, recipeset_id, recipefile, overrides, fqdn)) f = find_open(recipefile) try: recipexml, machines, tasks, args = json.load(f) finally: f.close() f = find_open(recipexml) try: recipe = f.read() finally: f.close() args['beah_root'] = beah.tools.get_data_root().next() # pylint: disable=E1101 args['beah_py_root'] = beah.tools.get_root() args.update(overrides) args['job_id'] = job_id args['recipeset_id'] = recipeset_id recipe_id = args.setdefault('recipe_id', 99) for task in tasks: args.setdefault('task%d_stat' % task, 'Waiting') args.setdefault('task%d_res' % task, 'None') for machine_ix in range(len(machines)): machine = machines[machine_ix] args.setdefault('machine%d' % machine_ix, machine) args.setdefault('machine%d_stat' % machine_ix, 'None') rtargs = runtimes.TypeDict( runtime, 'args/%s/%s/%s' % (job_id, recipeset_id, recipe_id)) for k, v in args.items(): rtargs.setdefault(k, v) for machine_ix in range(len(machines)): if rtargs.get('machine%d' % machine_ix, '') == fqdn: log.debug("recipe_builder: found %r as %r" % (fqdn, machine_ix)) break else: log.debug("recipe_builder: %r not found, using machine 0" % (fqdn, )) rtargs['machine0'] = fqdn log.debug("recipe_builder: args=%r" % (dict(rtargs), )) for machine_ix in range(len(machines)): machine = rtargs.get('machine%d' % machine_ix, '') if machine == '' or machine == 'machine%d' % machine_ix: log.warning("Machine %d was not specified.", machine_ix) schedule(fqdn, recipe_id, recipe, rtargs, tasks) return recipe_id
def __init__(self, task_path, env): self.process = None self.listener = None self.task_path = task_path self.__done = False self.__waits_for = [] # FIXME: is inheriting the whole environment desirable? if env is not USE_DEFAULT: self.env = dict(env) else: self.env = dict(os.environ) # FIXME: Any other env.variables to set? # FIXME: What values should be used here? # - some values could be received from LC when task is scheduled, but # it would create a dependency! # - let's use fake values, and let the Backend translate it (if # supported) # - e.g. JOBID, RECIPESETID, RECIPEID are not interesting at all # - use task_id for RECIPESETID, and BE (or LC eventually) should # be able to find about the rest... taskid = "J%(JOBID)s-S%(RECIPESETID)s-R%(RECIPEID)s-T%(TASKID)s" % self.env # FIXME! use tempfile and upload log when process ends. log = logging.getLogger('rhts_task') twmisc.twisted_logging(log, level=logging.WARNING) ll = self.env.get('BEAH_TASK_LOG', "warning") log.setLevel(str2log_level(ll)) make_log_handler(log, LOG_PATH, "rhts_task_%s.log" % (taskid, ), syslog=True, console=self.env.get('BEAH_TASK_CONSOLE', False)) # parse task's metadata: try: from rhts import testinfo ti = testinfo.parse_file(os.path.join(self.env['TESTPATH'], 'testinfo.desc'), raise_errors=False) except: log.error("Error in tasks metadata: %s" % format_exc()) ti = None if ti is not None: for k, v in getattr(ti, 'environment', {}).iteritems(): self.env.setdefault(k, v) for o in getattr(ti, 'options', []): opt_lower = o.lower() if opt_lower[0] == '-': opt_lower = opt_lower[1:] value = '' else: value = 'yes' if opt_lower.startswith('compatible'): self.env.setdefault('RHTS_OPTION_COMPATIBLE', value) elif opt_lower.startswith('compatservice'): self.env.setdefault('RHTS_OPTION_COMPAT_SERVICE', value) elif opt_lower.startswith('strongeravc'): self.env.setdefault('RHTS_OPTION_STRONGER_AVC', value) # update log level if necessary: ll2 = self.env.get('BEAH_TASK_LOG', ll) if ll2 != ll: log.setLevel(str2log_level(ll2)) # No point in storing everything in one big file. Use one file per task rt = runtimes.ShelveRuntime(RUNTIME_PATHNAME_TEMPLATE % taskid) self.__files = runtimes.TypeDict(rt, 'files') # FIXME: use configurable range of ports. self.variables = runtimes.TypeDict(rt, 'variables') port = self.variables.setdefault( 'port', int(self.env.get('RHTS_PORT', random.randint(7080, 7099)))) self.variables.setdefault('nohup', False) self.variables.setdefault('has_result', False) self.env.setdefault( 'DIGEST_METHOD', 'no_digest' ) # use no digests by default... Seems waste of time on localhost. self.env.setdefault('TESTORDER', '123') # FIXME: More sensible default # update defaults: for k, v in self.ENV_DEFAULTS.iteritems(): self.env.setdefault(k, v) # provide sensible defaults for selected system env.variables: self.env.setdefault('HOME', '/root') self.env.setdefault('LANG', 'en_US.UTF-8') # FIXME: should any checks go here? # e.g. does Makefile PURPOSE exist? try running `make testinfo.desc`? ... self.controller = ControllerLink(self) stdio.StandardIO(self.controller) self.task = RHTSTask(self) self.server = RHTSServer(self) # If IPv6 has not been disabled, attempt to listen on IPv6 # otherwise fall back to IPv4 def listen_tcp(interface): return reactor.listenTCP(port, self.server, interface=interface) conf = beah.config.get_conf('beah') if not parse_bool(conf.get('DEFAULT', 'IPV6_DISABLED')): try: listen_tcp('::1') self.env['RESULT_SERVER'] = '[::1]:%s' % port except CannotListenError: listen_tcp('127.0.0.1') self.env['RESULT_SERVER'] = '127.0.0.1:%s' % port else: listen_tcp('127.0.0.1') self.env['RESULT_SERVER'] = '127.0.0.1:%s' % port # save env: env_file = ENV_PATHNAME_TEMPLATE % taskid self.env['RHTS_ENV'] = env_file jsonenv.export_env(env_file, self.env) # Execute rhts-test-runner.sh self.server_started()
def start_server(conf=None, backend_host=None, backend_port=None, backend_adaptor=BackendAdaptor_JSON, task_host=None, task_port=None, task_adaptor=TaskAdaptor_JSON, spawn=None): # CONFIG: if not conf: config.beah_conf() conf = config.get_conf('beah') # LOGGING: twisted_logging(log, level=logging.WARNING) log.setLevel(str2log_level(conf.get('CONTROLLER', 'LOG'))) # Create a directory for runtime # FIXME: should try to create a temp path if following fails: ensuredir(conf.get('CONTROLLER', 'VAR_ROOT')) # Create a directory for logging and check permissions lp = conf.get('CONTROLLER', 'LOG_PATH') make_log_handler(log, lp, conf.get('CONTROLLER', 'LOG_FILE_NAME'), syslog=True, console=conf.get('CONTROLLER', 'CONSOLE_LOG', False)) if parse_bool(config.get_conf('beah').get('CONTROLLER', 'DEVEL')): print_this = log_this(log.debug, True) make_class_verbose(Controller, print_this) make_class_verbose(MasterTask, print_this) # RUN: backend_host = backend_host or conf.get('BACKEND', 'INTERFACE') backend_port = backend_port or conf.get('BACKEND', 'PORT') task_host = task_host or conf.get('TASK', 'INTERFACE') task_port = task_port or int(conf.get('TASK', 'PORT')) if os.name == 'posix': if backend_port != '': backend_port = int(backend_port) backend_socket = conf.get('BACKEND', 'SOCKET') if task_port != '': task_port = int(task_port) task_socket = conf.get('TASK', 'SOCKET') else: backend_port = int(backend_port) backend_socket = '' task_port = int(task_port) task_socket = '' runtime = runtimes.ShelveRuntime( conf.get('CONTROLLER', 'RUNTIME_FILE_NAME')) runtime.vars = runtimes.TypeDict(runtime, 'vars') runtime.tasks = runtimes.TypeDict(runtime, 'tasks') controller = Controller(spawn or Spawn(task_host, task_port, socket=task_socket), runtime=runtime) def on_killed(): if not controller.backends: reactor.stop() return reactor.callLater(2, reactor.stop) controller.on_killed = on_killed log.info("################################") log.info("# Starting a Controller... #") log.info("################################") backend_listener = BackendListener(controller, backend_adaptor) if backend_port != '': if backend_host == 'localhost': listening = listen_loopback_tcp(backend_port, backend_listener) elif backend_host: listening = reactor.listenTCP(backend_port, backend_listener, interface=backend_host) else: listening = listen_all_tcp(backend_port, backend_listener, ipv6_disabled=parse_bool( conf.get('DEFAULT', 'IPV6_DISABLED'))) log.info("Controller: BackendListener listening on %s port %s", listening.getHost().host, listening.getHost().port) if backend_socket: if os.path.exists(backend_socket): # clean-up after e.g. system crash: log.warning("Controller: BackendListener cleaning %s", backend_socket) os.remove(backend_socket) log.info("Controller: BackendListener listening on %s", backend_socket) reactor.listenUNIX(backend_socket, backend_listener) task_listener = TaskListener(controller, task_adaptor) if task_port != '': if task_host == 'localhost': listening = listen_loopback_tcp(task_port, task_listener) elif task_host: listening = reactor.listenTCP(task_port, task_listener, interface=task_host) else: listening = listen_all_tcp(task_port, task_listener, ipv6_disabled=parse_bool( conf.get('DEFAULT', 'IPV6_DISABLED'))) log.info("Controller: TaskListener listening on %s port %s", listening.getHost().host, listening.getHost().port) if task_socket: if os.path.exists(task_socket): # clean-up after e.g. system crash: log.warning("Controller: TaskListener cleaning %s", task_socket) os.remove(task_socket) log.info("Controller: TaskListener listening on %s", task_socket) reactor.listenUNIX(task_socket, task_listener) return controller
def testAll(self): TESTDB='.test-runtime.db.tmp' tr = TestingRuntime(TESTDB) tr.tasks = runtimes.TypeDict(tr, 'tasks') tr.tqueue = runtimes.TypeList(tr, 'testqueue') tr.addict = runtimes.TypeAddict(tr, 'addict') tr.vars['var1'] = 'Hello' tr.vars['var2'] = 'World' tr.vars['var3'] = '!' tr.vars.update(x=1, y=2, d=dict(en="Hi", cz="Ahoj", sk="Ahoj")) tr.files['f1'] = dict(name='file/f1', id='f1') tr.files['f2'] = dict(name='file/f2', id='f2') tr.files['f3'] = dict(name='file/f3', id='f3') del tr.files['f3'] tr.tasks['1'] = 'task1' tr.tasks['2'] = 'task2' while len(tr.queue) > 0: tr.queue.pop() assert len(tr.queue) == 0 tr.queue.append('first') tr.queue.extend(['second', 'third', 'fourth']) tr.queue += 'fifth' assert tr.queue == ['first', 'second', 'third', 'fourth', 'fifth'] assert tr.queue != ['first', 'second', 'third', 'fourth'] assert tr.queue != ['first', 'second', 'third', 'fourth', 'fifth', 'sixth'] tr.queue[0] = '1st' tr.queue[4] = '5th' assert tr.queue == ['1st', 'second', 'third', 'fourth', '5th'] tr.queue[-5] = 'First' tr.queue[-1] = 'Fifth' assert tr.queue == ['First', 'second', 'third', 'fourth', 'Fifth'] tr.queue.check() tr.tqueue.extend([0, 1, 2, 3]) del tr.tqueue[3] del tr.tqueue[-1] del tr.tqueue[0] del tr.tqueue[0] assert tr.tqueue == [] tr.tqueue.check() tr.addict[None] = 'b' tr.addict['a'] = None tr.addict['b'] = 'c' tr.addict.update(dict(c=None, d='e')) assert not tr.addict.has_key('a') assert tr.addict['b'] == 'c' assert not tr.addict.has_key('c') assert tr.addict['d'] == 'e' tr.close() tr = TestingRuntime(TESTDB) tr.tasks = runtimes.TypeDict(tr, 'tasks') tr.tqueue = runtimes.TypeList(tr, 'testqueue') tr.addict = runtimes.TypeAddict(tr, 'addict') assert tr.vars['var1'] == 'Hello' assert tr.vars['var2'] == 'World' assert tr.vars['var3'] == '!' assert tr.vars['x'] == 1 assert tr.vars['y'] == 2 assert tr.vars['d']['en'] == 'Hi' assert tr.files['f1'] == dict(name='file/f1', id='f1') assert tr.files['f2'] == dict(name='file/f2', id='f2') assert tr.files.get('f3', None) == None assert tr.tasks['1'] == 'task1' assert tr.tasks['2'] == 'task2' assert tr.queue == ['First', 'second', 'third', 'fourth', 'Fifth'] assert tr.tqueue == [] assert not tr.addict.has_key('a') assert tr.addict['b'] == 'c' assert not tr.addict.has_key('c') assert tr.addict['d'] == 'e' tr.close()
def __init__(self, fname): runtimes.ShelveRuntime.__init__(self, fname) self.vars = runtimes.TypeDict(self, 'var') self.files = runtimes.TypeDict(self, 'file') self.queue = runtimes.TypeList(self, 'queue')