Exemplo n.º 1
0
 def getAndUpdateFullRevisionList(self, node_test_suite):
     full_revision_list = []
     config = self.config
     log = self.log
     try:
         for vcs_repository in node_test_suite.vcs_repository_list:
             repository_path = vcs_repository['repository_path']
             repository_id = vcs_repository['repository_id']
             branch = vcs_repository.get('branch')
             # Make sure we have local repository
             updater = Updater(
                 repository_path,
                 git_binary=config['git_binary'],
                 branch=branch,
                 log=log,
                 process_manager=self.process_manager,
                 working_directory=node_test_suite.working_directory,
                 url=vcs_repository["url"])
             updater.checkout()
             revision = "-".join(updater.getRevision())
             full_revision_list.append('%s=%s' % (repository_id, revision))
         node_test_suite.revision = ','.join(full_revision_list)
     except SubprocessError, e:
         log("Error while getting repository, ignoring this test suite : %r"
             % (e, ),
             exc_info=sys.exc_info())
         full_revision_list = None
Exemplo n.º 2
0
 def getAndUpdateFullRevisionList(self, node_test_suite):
   full_revision_list = []
   config = self.config
   log = self.log
   for vcs_repository in node_test_suite.vcs_repository_list:
     repository_path = vcs_repository['repository_path']
     repository_id = vcs_repository['repository_id']
     branch = vcs_repository.get('branch')
     # Make sure we have local repository
     updater = Updater(repository_path, git_binary=config['git_binary'],
        branch=branch, log=log, process_manager=self.process_manager,
        working_directory=node_test_suite.working_directory,
        url=vcs_repository["url"])
     updater.checkout()
     revision = "-".join(updater.getRevision())
     full_revision_list.append('%s=%s' % (repository_id, revision))
   node_test_suite.revision = ','.join(full_revision_list)
   return full_revision_list
Exemplo n.º 3
0
 def getAndUpdateFullRevisionList(self, node_test_suite):
   full_revision_list = []
   config = self.config
   log = self.log
   try:
     for vcs_repository in node_test_suite.vcs_repository_list:
       repository_path = vcs_repository['repository_path']
       repository_id = vcs_repository['repository_id']
       branch = vcs_repository.get('branch')
       # Make sure we have local repository
       updater = Updater(repository_path, git_binary=config['git_binary'],
          branch=branch, log=log, process_manager=self.process_manager,
          working_directory=node_test_suite.working_directory,
          url=vcs_repository["url"])
       updater.checkout()
       revision = "-".join(updater.getRevision())
       full_revision_list.append('%s=%s' % (repository_id, revision))
     node_test_suite.revision = ','.join(full_revision_list)
   except SubprocessError, e:
     log("Error while getting repository, ignoring this test suite : %r" % (e,), exc_info=sys.exc_info())
     full_revision_list = None
Exemplo n.º 4
0
 def getAndUpdateFullRevisionList(self, node_test_suite):
     full_revision_list = []
     config = self.config
     log = self.log
     for vcs_repository in node_test_suite.vcs_repository_list:
         repository_path = vcs_repository['repository_path']
         repository_id = vcs_repository['repository_id']
         branch = vcs_repository.get('branch')
         # Make sure we have local repository
         updater = Updater(
             repository_path,
             git_binary=config['git_binary'],
             branch=branch,
             log=log,
             process_manager=self.process_manager,
             working_directory=node_test_suite.working_directory,
             url=vcs_repository["url"])
         updater.checkout()
         revision = "-".join(updater.getRevision())
         full_revision_list.append('%s=%s' % (repository_id, revision))
     node_test_suite.revision = ','.join(full_revision_list)
     return full_revision_list
Exemplo n.º 5
0
 def getAndUpdateFullRevisionList(self, node_test_suite):
   full_revision_list = []
   config = self.config
   log = self.log
   for vcs_repository in node_test_suite.vcs_repository_list:
     repository_path = vcs_repository['repository_path']
     repository_id = vcs_repository['repository_id']
     if not os.path.exists(repository_path):
       parameter_list = [config['git_binary'], 'clone',
                         vcs_repository['url']]
       if vcs_repository.get('branch') is not None:
         parameter_list.extend(['-b',vcs_repository.get('branch')])
       parameter_list.append(repository_path)
       log(subprocess.check_output(parameter_list, stderr=subprocess.STDOUT))
     # Make sure we have local repository
     updater = Updater(repository_path, git_binary=config['git_binary'],
        log=log, process_manager=self.process_manager)
     updater.checkout()
     revision = "-".join(updater.getRevision())
     full_revision_list.append('%s=%s' % (repository_id, revision))
   node_test_suite.revision = ','.join(full_revision_list)
   return full_revision_list
Exemplo n.º 6
0
 def updateRevisionList(self, node_test_suite):
     config = self.config
     log = self.log
     revision_list = []
     try:
         for vcs_repository in node_test_suite.vcs_repository_list:
             repository_path = vcs_repository["repository_path"]
             repository_id = vcs_repository["repository_id"]
             branch = vcs_repository.get("branch")
             # Make sure we have local repository
             updater = Updater(
                 repository_path,
                 git_binary=config["git_binary"],
                 branch=branch,
                 log=log,
                 process_manager=self.process_manager,
                 working_directory=node_test_suite.working_directory,
                 url=vcs_repository["url"],
             )
             updater.checkout()
             revision_list.append((repository_id, updater.getRevision()))
     except SubprocessError, e:
         log("Error while getting repository, ignoring this test suite", exc_info=1)
         return False
Exemplo n.º 7
0
  def run(self):
    log = self.log
    process_manager = self.process_manager
    config = self.config
    slapgrid = None
    previous_revision = None

    run_software = True
    # Write our own software.cfg to use the local repository
    custom_profile_path = os.path.join(config['working_directory'], 'software.cfg')
    config['custom_profile_path'] = custom_profile_path
    vcs_repository_list = config['vcs_repository_list']
    profile_content = ''
    assert len(vcs_repository_list), "we must have at least one repository"
    try:
      # BBB: Accept global profile_path, which is the same as setting it for the
      # first configured repository.
      profile_path = config.pop(PROFILE_PATH_KEY)
    except KeyError:
      pass
    else:
      vcs_repository_list[0][PROFILE_PATH_KEY] = profile_path
    profile_path_count = 0
    for vcs_repository in vcs_repository_list:
      url = vcs_repository['url']
      buildout_section_id = vcs_repository.get('buildout_section_id', None)
      repository_id = buildout_section_id or \
                                    url.split('/')[-1].split('.')[0]
      repository_path = os.path.join(config['working_directory'],repository_id)
      vcs_repository['repository_id'] = repository_id
      vcs_repository['repository_path'] = repository_path
      try:
        profile_path = vcs_repository[PROFILE_PATH_KEY]
      except KeyError:
        pass
      else:
        profile_path_count += 1
        if profile_path_count > 1:
          raise ValueError(PROFILE_PATH_KEY + ' defined more than once')
        profile_content = """
[buildout]
extends = %(software_config_path)s
""" %  {'software_config_path': os.path.join(repository_path, profile_path)}

      if not(buildout_section_id is None):
        profile_content += """
[%(buildout_section_id)s]
repository = %(repository_path)s
branch = %(branch)s
""" %  {'buildout_section_id': buildout_section_id,
        'repository_path' : repository_path,
        'branch' : vcs_repository.get('branch','master')}

    if not profile_path_count:
      raise ValueError(PROFILE_PATH_KEY + ' not defined')
    custom_profile = open(custom_profile_path, 'w')
    custom_profile.write(profile_content)
    custom_profile.close()
    config['repository_path'] = repository_path
    sys.path.append(repository_path)
    test_suite_title = config['test_suite_title'] or config['test_suite']

    retry = False
    retry_software_count = 0
    same_revision_count = 0
    try:
      while True:
        remote_test_result_needs_cleanup = False
        remote_logger = None
        remote_logger_thread = None
        try:
          # kill processes from previous loop if any
          process_manager.killPreviousRun()
          full_revision_list = []
          # Make sure we have local repository
          for vcs_repository in vcs_repository_list:
            repository_path = vcs_repository['repository_path']
            repository_id = vcs_repository['repository_id']
            if not os.path.exists(repository_path):
              parameter_list = [config['git_binary'], 'clone',
                                vcs_repository['url']]
              if vcs_repository.get('branch') is not None:
                parameter_list.extend(['-b',vcs_repository.get('branch')])
              parameter_list.append(repository_path)
              log(subprocess.check_output(parameter_list, stderr=subprocess.STDOUT))
            # Make sure we have local repository
            updater = Updater(repository_path, git_binary=config['git_binary'],
              log=log, process_manager=process_manager)
            updater.checkout()
            revision = "-".join(updater.getRevision())
            full_revision_list.append('%s=%s' % (repository_id, revision))
          revision = ','.join(full_revision_list)
          if previous_revision == revision:
            log('Same Revision')
            same_revision_count += 1
            if not(retry) and same_revision_count <= 2:
              log('Sleeping a bit since same revision')
              time.sleep(DEFAULT_SLEEP_TIMEOUT)
              continue
            same_revision_count = 0
            log('Retrying install or checking if previous test was cancelled')
          retry = False
          previous_revision = revision
          portal_url = config['test_suite_master_url']
          test_result_path = None
          test_result = (test_result_path, revision)
          if portal_url:
            if portal_url[-1] != '/':
              portal_url += '/'
            portal = xmlrpclib.ServerProxy("%s%s" %
                        (portal_url, 'portal_task_distribution'),
                        allow_none=1)
            assert safeRpcCall(log, portal, "getProtocolRevision", True) == 1
            test_result = safeRpcCall(log, portal, "createTestResult", True,
              config['test_suite'], revision, [],
              False, test_suite_title,
              config['test_node_title'], config['project_title'])
            remote_test_result_needs_cleanup = True
            
          log("testnode, test_result : %r" % (test_result, ))
          if test_result:
            test_result_path, test_revision = test_result
            if config.get('log_file'):
              remote_logger = RemoteLogger(log, config['log_file'],
                                           config['test_node_title'],
                                           process_manager)
              remote_logger.portal = portal
              remote_logger.test_result_path = test_result_path
              remote_logger_thread = threading.Thread(target=remote_logger)
              remote_logger_thread.start()
            if revision != test_revision:
              previous_revision = test_revision
              log('Disagreement on tested revision, checking out:')
              for i, repository_revision in enumerate(test_revision.split(',')):
                vcs_repository = vcs_repository_list[i]
                repository_path = vcs_repository['repository_path']
                revision = repository_revision.rsplit('-', 1)[1]
                # other testnodes on other boxes are already ready to test another
                # revision
                log('  %s at %s' % (repository_path, revision))
                updater = Updater(repository_path, git_binary=config['git_binary'],
                                  revision=revision, log=log,
                                  process_manager=process_manager)
                updater.checkout()

            # Now prepare the installation of SlapOS and create instance
            slapproxy_log = os.path.join(config['log_directory'],
                'slapproxy.log')
            log('Configured slapproxy log to %r' % slapproxy_log)
            log('testnode, retry_software_count : %r' % retry_software_count)
            slapos_controler = SlapOSControler.SlapOSControler(config,
              log=log, slapproxy_log=slapproxy_log, process_manager=process_manager,
              reset_software=(retry_software_count>0 and retry_software_count%10 == 0))
            for method_name in ("runSoftwareRelease", "runComputerPartition",):
              slapos_method = getattr(slapos_controler, method_name)
              status_dict = slapos_method(config,
                environment=config['environment'],
                )
              if status_dict['status_code'] != 0:
                retry = True
                retry_software_count += 1
                raise SubprocessError(status_dict)
              else:
                retry_software_count = 0
            # Give some time so computer partitions may start
            # as partitions can be of any kind we have and likely will never have
            # a reliable way to check if they are up or not ...
            time.sleep(20)

            run_test_suite_path_list = glob.glob("%s/*/bin/runTestSuite" %config['instance_root'])
            if not len(run_test_suite_path_list):
              raise ValueError('No runTestSuite provided in installed partitions.')
            run_test_suite_path = run_test_suite_path_list[0]
            run_test_suite_revision = revision
            if isinstance(revision, tuple):
              revision = ','.join(revision)
            # Deal with Shebang size limitation
            line = open(run_test_suite_path, 'r').readline()
            invocation_list = []
            if line[:2] == '#!':
              invocation_list = line[2:].split()
            invocation_list.extend([run_test_suite_path,
                                    '--test_suite', config['test_suite'],
                                    '--revision', revision,
                                    '--test_suite_title', test_suite_title,
                                    '--node_quantity', config['node_quantity'],
                                    '--master_url', portal_url])
            bt5_path_list = config.get("bt5_path")
            if bt5_path_list not in ('', None,):
              invocation_list.extend(["--bt5_path", bt5_path_list])
            # From this point, test runner becomes responsible for updating test
            # result. We only do cleanup if the test runner itself is not able
            # to run.
            process_manager.spawn(*invocation_list,
              cwd=config['test_suite_directory'],
              log_prefix='runTestSuite', get_output=False)
            if remote_logger:
              remote_logger.quit = True
              remote_logger_thread.join()
        except SubprocessError, e:
          log("SubprocessError", exc_info=sys.exc_info())
          if remote_logger:
            remote_logger.finish = True
            remote_logger_thread.join()
          if remote_test_result_needs_cleanup:
            safeRpcCall(log, portal, "reportTaskFailure", True,
              test_result_path, e.status_dict, config['test_node_title'])
          log("SubprocessError, going to sleep %s" % DEFAULT_SLEEP_TIMEOUT)
          time.sleep(DEFAULT_SLEEP_TIMEOUT)
          continue
        except CancellationError, e:
          log("CancellationError", exc_info=sys.exc_info())
          process_manager.under_cancellation = False
          retry = True
          continue
Exemplo n.º 8
0
def run(args):
    config = args[0]
    slapgrid = None
    supervisord_pid_file = os.path.join(config["instance_root"], "var", "run", "supervisord.pid")
    subprocess.check_call([config["git_binary"], "config", "--global", "http.sslVerify", "false"])
    previous_revision = None

    run_software = True
    # Write our own software.cfg to use the local repository
    custom_profile_path = os.path.join(config["working_directory"], "software.cfg")
    config["custom_profile_path"] = custom_profile_path
    vcs_repository_list = config["vcs_repository_list"]
    profile_content = None
    assert len(vcs_repository_list), "we must have at least one repository"
    for vcs_repository in vcs_repository_list:
        url = vcs_repository["url"]
        buildout_section_id = vcs_repository.get("buildout_section_id", None)
        repository_id = buildout_section_id or url.split("/")[-1].split(".")[0]
        repository_path = os.path.join(config["working_directory"], repository_id)
        vcs_repository["repository_id"] = repository_id
        vcs_repository["repository_path"] = repository_path
        if profile_content is None:
            profile_content = """
[buildout]
extends = %(software_config_path)s
""" % {
                "software_config_path": os.path.join(repository_path, config["profile_path"])
            }
        if not (buildout_section_id is None):
            profile_content += """
[%(buildout_section_id)s]
repository = %(repository_path)s
branch = %(branch)s
""" % {
                "buildout_section_id": buildout_section_id,
                "repository_path": repository_path,
                "branch": vcs_repository.get("branch", "master"),
            }

    custom_profile = open(custom_profile_path, "w")
    custom_profile.write(profile_content)
    custom_profile.close()
    config["repository_path"] = repository_path
    sys.path.append(repository_path)
    test_suite_title = config["test_suite_title"] or config["test_suite"]

    retry_software = False
    try:
        while True:
            # kill processes from previous loop if any
            try:
                for pgpid in process_group_pid_set:
                    try:
                        os.killpg(pgpid, signal.SIGTERM)
                    except:
                        pass
                process_group_pid_set.clear()
                full_revision_list = []
                # Make sure we have local repository
                for vcs_repository in vcs_repository_list:
                    repository_path = vcs_repository["repository_path"]
                    repository_id = vcs_repository["repository_id"]
                    if not os.path.exists(repository_path):
                        parameter_list = [config["git_binary"], "clone", vcs_repository["url"]]
                        if vcs_repository.get("branch") is not None:
                            parameter_list.extend(["-b", vcs_repository.get("branch")])
                        parameter_list.append(repository_path)
                        subprocess.check_call(parameter_list)
                    # Make sure we have local repository
                    updater = Updater(repository_path, git_binary=config["git_binary"])
                    updater.checkout()
                    revision = "-".join(updater.getRevision())
                    full_revision_list.append("%s=%s" % (repository_id, revision))
                revision = ",".join(full_revision_list)
                if previous_revision == revision:
                    time.sleep(120)
                    if not (retry_software):
                        continue
                retry_software = False
                previous_revision = revision

                print config
                portal_url = config["test_suite_master_url"]
                test_result_path = None
                test_result = (test_result_path, revision)
                if portal_url:
                    if portal_url[-1] != "/":
                        portal_url += "/"
                    portal = xmlrpclib.ServerProxy("%s%s" % (portal_url, "portal_task_distribution"), allow_none=1)
                    master = portal.portal_task_distribution
                    assert master.getProtocolRevision() == 1
                    test_result = safeRpcCall(
                        master.createTestResult,
                        config["test_suite"],
                        revision,
                        [],
                        False,
                        test_suite_title,
                        config["test_node_title"],
                        config["project_title"],
                    )
                print "testnode, test_result : %r" % (test_result,)
                if test_result:
                    test_result_path, test_revision = test_result
                    if revision != test_revision:
                        for i, repository_revision in enumerate(test_revision.split(",")):
                            vcs_repository = vcs_repository_list[i]
                            repository_path = vcs_repository["repository_path"]
                            # other testnodes on other boxes are already ready to test another
                            # revision
                            updater = Updater(
                                repository_path,
                                git_binary=config["git_binary"],
                                revision=repository_revision.split("-")[1],
                            )
                            updater.checkout()

                    # Now prepare the installation of SlapOS and create instance
                    slapos_controler = SlapOSControler(config, process_group_pid_set=process_group_pid_set)
                    for method_name in ("runSoftwareRelease", "runComputerPartition"):
                        stdout, stderr = getInputOutputFileList(config, method_name)
                        slapos_method = getattr(slapos_controler, method_name)
                        status_dict = slapos_method(
                            config,
                            environment=config["environment"],
                            process_group_pid_set=process_group_pid_set,
                            stdout=stdout,
                            stderr=stderr,
                        )
                        if status_dict["status_code"] != 0:
                            break
                    if status_dict["status_code"] != 0:
                        safeRpcCall(master.reportTaskFailure, test_result_path, status_dict, config["test_node_title"])
                        retry_software = True
                        continue

                    partition_path = os.path.join(config["instance_root"], config["partition_reference"])
                    run_test_suite_path = os.path.join(partition_path, "bin", "runTestSuite")
                    if not os.path.exists(run_test_suite_path):
                        raise ValueError("No %r provided" % run_test_suite_path)

                    run_test_suite_revision = revision
                    if isinstance(revision, tuple):
                        revision = ",".join(revision)
                    # Deal with Shebang size limitation
                    file_object = open(run_test_suite_path, "r")
                    line = file_object.readline()
                    file_object.close()
                    invocation_list = []
                    if line[:2] == "#!":
                        invocation_list = line[2:].split()
                    invocation_list.extend(
                        [
                            run_test_suite_path,
                            "--test_suite",
                            config["test_suite"],
                            "--revision",
                            revision,
                            "--test_suite_title",
                            test_suite_title,
                            "--node_quantity",
                            config["node_quantity"],
                            "--master_url",
                            config["test_suite_master_url"],
                        ]
                    )
                    run_test_suite = subprocess.Popen(invocation_list)
                    process_group_pid_set.add(run_test_suite.pid)
                    run_test_suite.wait()
                    process_group_pid_set.remove(run_test_suite.pid)
            except SubprocessError:
                time.sleep(120)
                continue

    finally:
        # Nice way to kill *everything* generated by run process -- process
        # groups working only in POSIX compilant systems
        # Exceptions are swallowed during cleanup phase
        print "going to kill %r" % (process_group_pid_set,)
        for pgpid in process_group_pid_set:
            try:
                os.killpg(pgpid, signal.SIGTERM)
            except:
                pass
        try:
            if os.path.exists(supervisord_pid_file):
                os.kill(int(open(supervisord_pid_file).read().strip()), signal.SIGTERM)
        except:
            pass
Exemplo n.º 9
0
def run(config):
  log = config['logger']
  slapgrid = None
  global supervisord_pid_file
  supervisord_pid_file = os.path.join(config['instance_root'], 'var', 'run',
        'supervisord.pid')
  previous_revision = None

  run_software = True
  # Write our own software.cfg to use the local repository
  custom_profile_path = os.path.join(config['working_directory'], 'software.cfg')
  config['custom_profile_path'] = custom_profile_path
  vcs_repository_list = config['vcs_repository_list']
  profile_content = None
  assert len(vcs_repository_list), "we must have at least one repository"
  try:
    # BBB: Accept global profile_path, which is the same as setting it for the
    # first configured repository.
    profile_path = config.pop(PROFILE_PATH_KEY)
  except KeyError:
    pass
  else:
    vcs_repository_list[0][PROFILE_PATH_KEY] = profile_path
  for vcs_repository in vcs_repository_list:
    url = vcs_repository['url']
    buildout_section_id = vcs_repository.get('buildout_section_id', None)
    repository_id = buildout_section_id or \
                                  url.split('/')[-1].split('.')[0]
    repository_path = os.path.join(config['working_directory'],repository_id)
    vcs_repository['repository_id'] = repository_id
    vcs_repository['repository_path'] = repository_path
    try:
      profile_path = vcs_repository[PROFILE_PATH_KEY]
    except KeyError:
      pass
    else:
      if profile_content is not None:
        raise ValueError(PROFILE_PATH_KEY + ' defined more than once')
      profile_content = """
[buildout]
extends = %(software_config_path)s
""" %  {'software_config_path': os.path.join(repository_path, profile_path)}
    if not(buildout_section_id is None):
      profile_content += """
[%(buildout_section_id)s]
repository = %(repository_path)s
branch = %(branch)s
""" %  {'buildout_section_id': buildout_section_id,
        'repository_path' : repository_path,
        'branch' : vcs_repository.get('branch','master')}

  if profile_content is None:
    raise ValueError(PROFILE_PATH_KEY + ' not defined')
  custom_profile = open(custom_profile_path, 'w')
  custom_profile.write(profile_content)
  custom_profile.close()
  config['repository_path'] = repository_path
  sys.path.append(repository_path)
  test_suite_title = config['test_suite_title'] or config['test_suite']

  retry_software = False
  try:
    while True:
      remote_test_result_needs_cleanup = False
      # kill processes from previous loop if any
      try:
        killPreviousRun()
        process_group_pid_set.clear()
        full_revision_list = []
        # Make sure we have local repository
        for vcs_repository in vcs_repository_list:
          repository_path = vcs_repository['repository_path']
          repository_id = vcs_repository['repository_id']
          if not os.path.exists(repository_path):
            parameter_list = [config['git_binary'], 'clone',
                              vcs_repository['url']]
            if vcs_repository.get('branch') is not None:
              parameter_list.extend(['-b',vcs_repository.get('branch')])
            parameter_list.append(repository_path)
            log(subprocess.check_output(parameter_list, stderr=subprocess.STDOUT))
          # Make sure we have local repository
          updater = Updater(repository_path, git_binary=config['git_binary'],
            log=log, realtime_output=False)
          updater.checkout()
          revision = "-".join(updater.getRevision())
          full_revision_list.append('%s=%s' % (repository_id, revision))
        revision = ','.join(full_revision_list)
        if previous_revision == revision:
          log('Sleeping a bit')
          time.sleep(120)
          if not(retry_software):
            continue
          log('Retrying install')
        retry_software = False
        previous_revision = revision

        portal_url = config['test_suite_master_url']
        test_result_path = None
        test_result = (test_result_path, revision)
        if portal_url:
          if portal_url[-1] != '/':
            portal_url += '/'
          portal = xmlrpclib.ServerProxy("%s%s" %
                      (portal_url, 'portal_task_distribution'),
                      allow_none=1)
          master = portal.portal_task_distribution
          assert safeRpcCall(master.getProtocolRevision) == 1
          test_result = safeRpcCall(master.createTestResult,
            config['test_suite'], revision, [],
            False, test_suite_title,
            config['test_node_title'], config['project_title'])
          remote_test_result_needs_cleanup = True
        log("testnode, test_result : %r" % (test_result, ))
        if test_result:
          test_result_path, test_revision = test_result
          if revision != test_revision:
            log('Disagreement on tested revision, checking out:')
            for i, repository_revision in enumerate(test_revision.split(',')):
              vcs_repository = vcs_repository_list[i]
              repository_path = vcs_repository['repository_path']
              revision = repository_revision.split('-')[1]
              # other testnodes on other boxes are already ready to test another
              # revision
              log('  %s at %s' % (repository_path, revision))
              updater = Updater(repository_path, git_binary=config['git_binary'],
                                revision=revision, log=log,
                                realtime_output=False)
              updater.checkout()

          # Now prepare the installation of SlapOS and create instance
          slapproxy_log = os.path.join(config['log_directory'],
              'slapproxy.log')
          log('Configured slapproxy log to %r' % slapproxy_log)
          slapos_controler = SlapOSControler.SlapOSControler(config,
            process_group_pid_set=process_group_pid_set, log=log,
            slapproxy_log=slapproxy_log)
          for method_name in ("runSoftwareRelease", "runComputerPartition"):
            stdout, stderr = getInputOutputFileList(config, method_name)
            slapos_method = getattr(slapos_controler, method_name)
            status_dict = slapos_method(config,
              environment=config['environment'],
              process_group_pid_set=process_group_pid_set,
              stdout=stdout, stderr=stderr
              )
            if status_dict['status_code'] != 0:
              retry_software = True
              raise SubprocessError(status_dict)

          run_test_suite_path = config['runTestSuite']
          if not os.path.exists(run_test_suite_path):
            raise SubprocessError({
              'command': 'os.path.exists(run_test_suite_path)',
              'status_code': 1,
              'stdout': '',
              'stderr': 'File does not exist: %r' % (run_test_suite_path, ),
            })

          run_test_suite_revision = revision
          if isinstance(revision, tuple):
            revision = ','.join(revision)
          # Deal with Shebang size limitation
          line = open(run_test_suite_path, 'r').readline()
          invocation_list = []
          if line[:2] == '#!':
            invocation_list = line[2:].split()
          invocation_list.extend([run_test_suite_path,
                                  '--test_suite', config['test_suite'],
                                  '--revision', revision,
                                  '--test_suite_title', test_suite_title,
                                  '--node_quantity', config['node_quantity'],
                                  '--master_url', config['test_suite_master_url']])
          # From this point, test runner becomes responsible for updating test
          # result.
          # XXX: is it good for all cases (eg: test runner fails too early for
          # any custom code to pick the failure up and react ?)
          remote_test_result_needs_cleanup = False
          run_test_suite = subprocess.Popen(invocation_list,
            preexec_fn=os.setsid, cwd=config['test_suite_directory'],
            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
          process_group_pid_set.add(run_test_suite.pid)
          log(run_test_suite.communicate()[0])
          process_group_pid_set.remove(run_test_suite.pid)
      except SubprocessError, e:
        if remote_test_result_needs_cleanup:
          safeRpcCall(master.reportTaskFailure,
            test_result_path, e.status_dict, config['test_node_title'])
        time.sleep(120)
        continue

  finally:
    # Nice way to kill *everything* generated by run process -- process
    # groups working only in POSIX compilant systems
    # Exceptions are swallowed during cleanup phase
    killPreviousRun()