Exemplo n.º 1
0
 def spawn(controller, backend, task_info, env, args):
     task_env = dict(env)
     # 1. set env.variables
     # BEAH_THOST - host name
     # BEAH_TPORT - port
     # BEAH_TSOCKET - socket
     # BEAH_TID - id of task - used to introduce itself when opening socket
     task_id = task_info['id']
     conf = config.get_conf('beah')
     env_file = os.path.join(conf.get('TASK', 'VAR_ROOT'),
                             ENV_PATHNAME_TEMPLATE % task_id)
     dict_update(
         task_env,
         CALLED_BY_BEAH="1",
         BEAH_THOST=str(host),
         BEAH_TPORT=str(port),
         BEAH_TSOCKET=str(socket),
         BEAH_TID=str(task_id),
         BEAH_ROOT=conf.get('TASK', 'ROOT'),
         BEAH_ENV=env_file,
     )
     ll = conf.get('TASK', 'LOG')
     task_env.setdefault('BEAH_TASK_LOG', ll)
     task_env.setdefault('BEAH_TASK_CONSOLE',
                         conf.get('TASK', 'CONSOLE_LOG', 'False'))
     task_env.setdefault('TERM', 'dumb')
     val = os.getenv('PYTHONPATH')
     if val:
         task_env['PYTHONPATH'] = val
     jsonenv.export_env(env_file, task_env)
     # 2. spawn a task
     protocol = (proto or TaskStdoutProtocol)(task_id)
     protocol.controller = controller
     log.debug('spawn: Environment: %r.', task_env)
     reactor.spawnProcess(protocol,
                          task_info['file'],
                          args=[task_info['file']] + (args or []),
                          env=task_env)
     # FIXME: send an answer to backend(?)
     return protocol.task_protocol
Exemplo n.º 2
0
 def spawn(controller, backend, task_info, env, args):
     task_env = dict(env)
     # 1. set env.variables
     # BEAH_THOST - host name
     # BEAH_TPORT - port
     # BEAH_TSOCKET - socket
     # BEAH_TID - id of task - used to introduce itself when opening socket
     task_id = task_info['id']
     conf = config.get_conf('beah')
     env_file = os.path.join(conf.get('TASK', 'VAR_ROOT'),
             ENV_PATHNAME_TEMPLATE % task_id)
     dict_update(task_env,
             CALLED_BY_BEAH="1",
             BEAH_THOST=str(host),
             BEAH_TPORT=str(port),
             BEAH_TSOCKET=str(socket),
             BEAH_TID=str(task_id),
             BEAH_ROOT=conf.get('TASK', 'ROOT'),
             BEAH_ENV=env_file,
             )
     ll = conf.get('TASK', 'LOG')
     task_env.setdefault('BEAH_TASK_LOG', ll)
     task_env.setdefault('BEAH_TASK_CONSOLE', conf.get('TASK', 'CONSOLE_LOG', 'False'))
     task_env.setdefault('TERM', 'dumb')
     val = os.getenv('PYTHONPATH')
     if val:
         task_env['PYTHONPATH'] = val
     jsonenv.export_env(env_file, task_env)
     # 2. spawn a task
     protocol = (proto or TaskStdoutProtocol)(task_id)
     protocol.controller = controller
     log.debug('spawn: Environment: %r.', task_env)
     reactor.spawnProcess(protocol, task_info['file'],
             args=[task_info['file']]+(args or []), env=task_env)
     # FIXME: send an answer to backend(?)
     return protocol.task_protocol
Exemplo n.º 3
0
    def __init__(self, task_path, env):
        self.process = None
        self.listener = None
        self.task_path = task_path
        self.__done = False
        self.__waits_for = []

        # FIXME: is inheriting the whole environment desirable?
        if env is not USE_DEFAULT:
            self.env = dict(env)
        else:
            self.env = dict(os.environ)

        # FIXME: Any other env.variables to set?
        # FIXME: What values should be used here?
        # - some values could be received from LC when task is scheduled, but
        #   it would create a dependency!
        #   - let's use fake values, and let the Backend translate it (if
        #     supported)
        #     - e.g. JOBID, RECIPESETID, RECIPEID are not interesting at all
        #     - use task_id for RECIPESETID, and BE (or LC eventually) should
        #       be able to find about the rest...

        taskid = "J%(JOBID)s-S%(RECIPESETID)s-R%(RECIPEID)s-T%(TASKID)s" % self.env

        # FIXME! use tempfile and upload log when process ends.
        log = logging.getLogger('rhts_task')
        twmisc.twisted_logging(log, level=logging.WARNING)
        ll = self.env.get('BEAH_TASK_LOG', "warning")
        log.setLevel(str2log_level(ll))
        make_log_handler(log,
                         LOG_PATH,
                         "rhts_task_%s.log" % (taskid, ),
                         syslog=True,
                         console=self.env.get('BEAH_TASK_CONSOLE', False))

        # parse task's metadata:
        try:
            from rhts import testinfo
            ti = testinfo.parse_file(os.path.join(self.env['TESTPATH'],
                                                  'testinfo.desc'),
                                     raise_errors=False)
        except:
            log.error("Error in tasks metadata: %s" % format_exc())
            ti = None
        if ti is not None:
            for k, v in getattr(ti, 'environment', {}).iteritems():
                self.env.setdefault(k, v)
            for o in getattr(ti, 'options', []):
                opt_lower = o.lower()
                if opt_lower[0] == '-':
                    opt_lower = opt_lower[1:]
                    value = ''
                else:
                    value = 'yes'
                if opt_lower.startswith('compatible'):
                    self.env.setdefault('RHTS_OPTION_COMPATIBLE', value)
                elif opt_lower.startswith('compatservice'):
                    self.env.setdefault('RHTS_OPTION_COMPAT_SERVICE', value)
                elif opt_lower.startswith('strongeravc'):
                    self.env.setdefault('RHTS_OPTION_STRONGER_AVC', value)

        # update log level if necessary:
        ll2 = self.env.get('BEAH_TASK_LOG', ll)
        if ll2 != ll:
            log.setLevel(str2log_level(ll2))

        # No point in storing everything in one big file. Use one file per task
        rt = runtimes.ShelveRuntime(RUNTIME_PATHNAME_TEMPLATE % taskid)
        self.__files = runtimes.TypeDict(rt, 'files')

        # FIXME: use configurable range of ports.
        self.variables = runtimes.TypeDict(rt, 'variables')
        port = self.variables.setdefault(
            'port', int(self.env.get('RHTS_PORT', random.randint(7080, 7099))))
        self.variables.setdefault('nohup', False)
        self.variables.setdefault('has_result', False)

        self.env.setdefault(
            'DIGEST_METHOD', 'no_digest'
        )  # use no digests by default... Seems waste of time on localhost.
        self.env.setdefault('TESTORDER', '123')  # FIXME: More sensible default

        # update defaults:
        for k, v in self.ENV_DEFAULTS.iteritems():
            self.env.setdefault(k, v)

        # provide sensible defaults for selected system env.variables:
        self.env.setdefault('HOME', '/root')
        self.env.setdefault('LANG', 'en_US.UTF-8')

        # FIXME: should any checks go here?
        # e.g. does Makefile PURPOSE exist? try running `make testinfo.desc`? ...
        self.controller = ControllerLink(self)
        stdio.StandardIO(self.controller)
        self.task = RHTSTask(self)
        self.server = RHTSServer(self)

        # If IPv6 has not been disabled, attempt to listen on IPv6
        # otherwise fall back to IPv4
        def listen_tcp(interface):
            return reactor.listenTCP(port, self.server, interface=interface)

        conf = beah.config.get_conf('beah')
        if not parse_bool(conf.get('DEFAULT', 'IPV6_DISABLED')):
            try:
                listen_tcp('::1')
                self.env['RESULT_SERVER'] = '[::1]:%s' % port
            except CannotListenError:
                listen_tcp('127.0.0.1')
                self.env['RESULT_SERVER'] = '127.0.0.1:%s' % port
        else:
            listen_tcp('127.0.0.1')
            self.env['RESULT_SERVER'] = '127.0.0.1:%s' % port
        # save env:
        env_file = ENV_PATHNAME_TEMPLATE % taskid
        self.env['RHTS_ENV'] = env_file
        jsonenv.export_env(env_file, self.env)

        # Execute rhts-test-runner.sh
        self.server_started()
Exemplo n.º 4
0
    def __init__(self, task_path, env):
        self.process = None
        self.listener = None
        self.task_path = task_path
        self.__done = False
        self.__waits_for = []

        # FIXME: is inheriting the whole environment desirable?
        if env is not USE_DEFAULT:
            self.env = dict(env)
        else:
            self.env = dict(os.environ)

        # FIXME: Any other env.variables to set?
        # FIXME: What values should be used here?
        # - some values could be received from LC when task is scheduled, but
        #   it would create a dependency!
        #   - let's use fake values, and let the Backend translate it (if
        #     supported)
        #     - e.g. JOBID, RECIPESETID, RECIPEID are not interesting at all
        #     - use task_id for RECIPESETID, and BE (or LC eventually) should
        #       be able to find about the rest...

        taskid = "J%(JOBID)s-S%(RECIPESETID)s-R%(RECIPEID)s-T%(TASKID)s" % self.env

        # FIXME! use tempfile and upload log when process ends.
        log = logging.getLogger('rhts_task')
        twmisc.twisted_logging(log, level=logging.WARNING)
        ll = self.env.get('BEAH_TASK_LOG', "warning")
        log.setLevel(str2log_level(ll))
        make_log_handler(log, LOG_PATH, "rhts_task_%s.log" % (taskid,),
                syslog=True,
                console=self.env.get('BEAH_TASK_CONSOLE', False))

        # parse task's metadata:
        try:
            from rhts import testinfo
            ti = testinfo.parse_file(os.path.join(self.env['TESTPATH'], 'testinfo.desc'), raise_errors=False)
        except:
            log.error("Error in tasks metadata: %s" % format_exc())
            ti = None
        if ti is not None:
            for k,v in getattr(ti, 'environment', {}).iteritems():
                self.env.setdefault(k, v)
            for o in getattr(ti, 'options', []):
                opt_lower = o.lower()
                if opt_lower[0] == '-':
                    opt_lower = opt_lower[1:]
                    value = ''
                else:
                    value = 'yes'
                if opt_lower.startswith('compatible'):
                    self.env.setdefault('RHTS_OPTION_COMPATIBLE', value)
                elif opt_lower.startswith('compatservice'):
                    self.env.setdefault('RHTS_OPTION_COMPAT_SERVICE', value)
                elif opt_lower.startswith('strongeravc'):
                    self.env.setdefault('RHTS_OPTION_STRONGER_AVC', value)

        # update log level if necessary:
        ll2 = self.env.get('BEAH_TASK_LOG', ll)
        if ll2 != ll:
            log.setLevel(str2log_level(ll2))

        # No point in storing everything in one big file. Use one file per task
        rt = runtimes.ShelveRuntime(RUNTIME_PATHNAME_TEMPLATE % taskid)
        self.__files = runtimes.TypeDict(rt, 'files')

        # FIXME: use configurable range of ports.
        self.variables = runtimes.TypeDict(rt, 'variables')
        port = self.variables.setdefault('port', int(self.env.get('RHTS_PORT', random.randint(7080, 7099))))
        self.variables.setdefault('nohup', False)
        self.variables.setdefault('has_result', False)


        self.env.setdefault('DIGEST_METHOD', 'no_digest') # use no digests by default... Seems waste of time on localhost.
        self.env.setdefault('TESTORDER', '123') # FIXME: More sensible default

        # update defaults:
        for k, v in self.ENV_DEFAULTS.iteritems():
            self.env.setdefault(k, v)

        # provide sensible defaults for selected system env.variables:
        self.env.setdefault('HOME', '/root')
        self.env.setdefault('LANG', 'en_US.UTF-8')

        # FIXME: should any checks go here?
        # e.g. does Makefile PURPOSE exist? try running `make testinfo.desc`? ...
        self.controller = ControllerLink(self)
        stdio.StandardIO(self.controller)
        self.task = RHTSTask(self)
        self.server = RHTSServer(self)
        # If IPv6 has not been disabled, attempt to listen on IPv6 
        # otherwise fall back to IPv4
        def listen_tcp(interface):
            return reactor.listenTCP(port, self.server, 
                                     interface=interface)
        conf = beah.config.get_conf('beah')
        if not parse_bool(conf.get('DEFAULT', 'IPV6_DISABLED')):
            try:
                listen_tcp('::1')
                self.env['RESULT_SERVER'] = '[::1]:%s' % port
            except CannotListenError:
                listen_tcp('127.0.0.1')
                self.env['RESULT_SERVER'] = '127.0.0.1:%s' % port
        else:
            listen_tcp('127.0.0.1')
            self.env['RESULT_SERVER'] = '127.0.0.1:%s' % port
        # save env:
        env_file = ENV_PATHNAME_TEMPLATE % taskid
        self.env['RHTS_ENV'] = env_file
        jsonenv.export_env(env_file, self.env)

        # Execute rhts-test-runner.sh
        self.server_started()
Exemplo n.º 5
0
    def testMain(self):

        def unlink_failsafe(filename):
            try:
                os.unlink(fn)
            except:
                pass

        def check_empty_db(filename):
            env = {}
            assert jsonenv.update_env(fn, env, jsonenv.RAISE)
            assert env == {}
            env = {"ITEM":"val"}
            assert jsonenv.update_env(fn, env, jsonenv.RAISE)
            assert env == {"ITEM":"val"}

        def check_db(filename, expected=None):
            env = {}
            assert jsonenv.update_env(fn, env, jsonenv.SKIP)
            if expected is not None:
                if env != expected:
                    #print "got:", env
                    #print "file(%s): <<END" % filename
                    f = open(fn, "r")
                    try:
                        pass
                        #print f.read()
                    finally:
                        f.close()
                    #print "END"
                    assert env == expected

        d = tempfile.mkdtemp()
        try:
            #print >> sys.stderr, "Tempdir: %s" % d

            fn = os.path.join(d, "empty.db")
            try:
                check_empty_db(fn)
            finally:
                unlink_failsafe(fn)

            # check non-empty DB will add all values to dict:
            env = {"VAR1":"VAL1", "VAR2":"VAL2"}
            fn = os.path.join(d, "test.db")
            assert jsonenv.export_env(fn, env, jsonenv.RAISE)
            try:
                check_db(fn, env)
                env2 = {}
                assert jsonenv.update_env(fn, env2, jsonenv.RAISE)
                assert env == env2
            finally:
                unlink_failsafe(fn)

            # check RAISE:
            env = {"VAR1":"VAL1", "VAR2":"VAL2", "BAD":object()}
            fn = os.path.join(d, "test.db")
            try:
                try:
                    jsonenv.export_env(fn, env, jsonenv.RAISE)
                    assert False
                except:
                    pass
                check_empty_db(fn)
            finally:
                unlink_failsafe(fn)

            # check ABORT:
            env = {"VAR1":"VAL1", "VAR2":"VAL2", "BAD":object()}
            fn = os.path.join(d, "test.db")
            try:
                assert not jsonenv.export_env(fn, env, jsonenv.ABORT)
                check_empty_db(fn)
            finally:
                unlink_failsafe(fn)

            # check SKIP:
            env = {"VAR1":"VAL1", "VAR2":"VAL2", "BAD":object()}
            fn = os.path.join(d, "test.db")
            try:
                assert not jsonenv.export_env(fn, env, jsonenv.SKIP)
                env2 = {}
                assert jsonenv.update_env(fn, env2, jsonenv.RAISE)
                env3 = dict(env)
                del env3["BAD"]
                assert env2 == env3
            finally:
                unlink_failsafe(fn)

        finally:
            shutil.rmtree(d)
Exemplo n.º 6
0
    def testMain(self):
        def unlink_failsafe(filename):
            try:
                os.unlink(fn)
            except:
                pass

        def check_empty_db(filename):
            env = {}
            assert jsonenv.update_env(fn, env, jsonenv.RAISE)
            assert env == {}
            env = {"ITEM": "val"}
            assert jsonenv.update_env(fn, env, jsonenv.RAISE)
            assert env == {"ITEM": "val"}

        def check_db(filename, expected=None):
            env = {}
            assert jsonenv.update_env(fn, env, jsonenv.SKIP)
            if expected is not None:
                if env != expected:
                    #print "got:", env
                    #print "file(%s): <<END" % filename
                    f = open(fn, "r")
                    try:
                        pass
                        #print f.read()
                    finally:
                        f.close()
                    #print "END"
                    assert env == expected

        d = tempfile.mkdtemp()
        try:
            #print >> sys.stderr, "Tempdir: %s" % d

            fn = os.path.join(d, "empty.db")
            try:
                check_empty_db(fn)
            finally:
                unlink_failsafe(fn)

            # check non-empty DB will add all values to dict:
            env = {"VAR1": "VAL1", "VAR2": "VAL2"}
            fn = os.path.join(d, "test.db")
            assert jsonenv.export_env(fn, env, jsonenv.RAISE)
            try:
                check_db(fn, env)
                env2 = {}
                assert jsonenv.update_env(fn, env2, jsonenv.RAISE)
                assert env == env2
            finally:
                unlink_failsafe(fn)

            # check RAISE:
            env = {"VAR1": "VAL1", "VAR2": "VAL2", "BAD": object()}
            fn = os.path.join(d, "test.db")
            try:
                try:
                    jsonenv.export_env(fn, env, jsonenv.RAISE)
                    assert False
                except:
                    pass
                check_empty_db(fn)
            finally:
                unlink_failsafe(fn)

            # check ABORT:
            env = {"VAR1": "VAL1", "VAR2": "VAL2", "BAD": object()}
            fn = os.path.join(d, "test.db")
            try:
                assert not jsonenv.export_env(fn, env, jsonenv.ABORT)
                check_empty_db(fn)
            finally:
                unlink_failsafe(fn)

            # check SKIP:
            env = {"VAR1": "VAL1", "VAR2": "VAL2", "BAD": object()}
            fn = os.path.join(d, "test.db")
            try:
                assert not jsonenv.export_env(fn, env, jsonenv.SKIP)
                env2 = {}
                assert jsonenv.update_env(fn, env2, jsonenv.RAISE)
                env3 = dict(env)
                del env3["BAD"]
                assert env2 == env3
            finally:
                unlink_failsafe(fn)

        finally:
            shutil.rmtree(d)