def run_test_on_jita(
    self, cluster, commit_id=None, build_url=None, job_ids_list=None,
    index=None, hypervisor=None, hypervisor_version=None, branch=None,
    input_type="build_url", build_type="opt", test=None, username=None,
    test_upgrade=None, base_commit=None):

    # NEEDS CHANGE
    if not hypervisor:
      hypervisor = FLAGS.hypervisor 
    if not hypervisor_version:
      hypervisor_version = FLAGS.hypervisor_version
    if not branch:
      branch = FLAGS.branch
    if not test:
      test = FLAGS.test_name
    if not username:
      username = USERNAME
    test_framework = FLAGS.test_framework

    if test_upgrade is None:
      test_upgrade = FLAGS.upgrade_test
      base_commit=FLAGS.upgrade_base_commit

    if test_upgrade:
      phoenix_commit = base_commit
    else:
      phoenix_commit = commit_id

    if not self.image_cluster( cluster, phoenix_commit, build_url, USERNAME,
                              hypervisor, hypervisor_version, branch,
                              build_type):
      job_ids_list[index] = False
      ERROR("Imaging failed on cluster %s" % cluster)
      return False

    jita = JitaRest()
    jita_test = jita.get_test(test,test_framework)
    if test_upgrade:
      svm_installer_url = self.get_svm_installer(commit_id, branch, build_type)
      INFO("Found build %s " % svm_installer_url)
      if not svm_installer_url:
        return False

      args_map = {"target_rel" : svm_installer_url}
    else:
      args_map = None

    INFO("Going to submit task for commit id: %s" % commit_id)
    response_obj = jita.submit_task(
      cluster, commit_id, branch, [jita_test.get_id()],
      test_framework=test_framework, label="auto_bisector", args_map=args_map)

    if response_obj.is_success():
      job_ids_list[index] = response_obj.get_task_id()
      return True
    else:
      ERROR("Task submit failed: %s" % response_obj.get_message())
      return False
  def get_svm_installer(self, commit_id, branch, build_type):
    """
    Get the build location for th given commit id and branch.
    """  
    INFO("Getting installer for commit %s" % commit_id)
    filer = FLAGS.lab
    jita=JitaRest()
    build_url = jita.get_build_url(commit_id, build_type, filer)
    if build_url:
      INFO("Found build %s" % build_url)
      return build_url

    INFO("Checking if build is in progress")
    build_task_id = jita.check_build_in_progress(commit_id, branch, build_type,
                                                 filer)

    if build_task_id:
      status = jita.get_build_task(build_task_id).get_status()

    if not build_task_id or status in ["failed", "aborted", "skipped"]:
      INFO("Could not find an existing build. Hence requesting build")
      res = jita.request_build(commit_id, branch, build_type, filer)
      if res.get_status():
        build_task_id = res.get_build_task_id()
      else:
        ERROR("Build request for commit id %s failed" % commit_id)
        return False

    if self.wait_for_build_to_complete(build_task_id):
      return jita.get_build_url(commit_id, build_type, filer)

    return False
  def wait_for_build_to_complete(self, build_task_id, timeout=7200,
                                 poll_period=180):
    expired_time = 0
    while expired_time < timeout:
      if self.is_build_complete(build_task_id):
        break
      time.sleep(poll_period)
      expired_time += poll_period

    jita=JitaRest()
    status = jita.get_build_task(build_task_id).get_status()
    if status != "passed":
      ERROR("Timed out waiting for build to complete or build failed. Status:"
            " %s" % status)
      return False
    return True
Beispiel #4
0
def run_test_on_nucloud(test=None,
                        username=None,
                        node_pool=None,
                        branch=None,
                        build_url=None,
                        build_type="opt",
                        preferred_hypervisor=None,
                        commit_id=None):
    """
  Run the given test on nucloud and return the job id if submit succeeds.
  """
    if not test:
        test = FLAGS.test_name
    if not username:
        username = USERNAME
    if not node_pool:
        node_pool = NODE_POOL
    if not branch:
        branch = FLAGS.branch
    if not preferred_hypervisor:
        preferred_hypervisor = FLAGS.hypervisor

    nucloud = NuCloudREST()
    INFO("Submitting job to Nucloud for commit id %s" % commit_id)
    response_obj = nucloud.submit_job(
        [test],
        username,
        node_pool,
        branch=branch,
        build_url=build_url,
        build_type=build_type,
        preferred_hypervisor=preferred_hypervisor,
        commit_id=commit_id,
        skip_email_report=True)

    print response_obj.get_message()
    if response_obj.is_success():
        return response_obj.get_job_id()
    else:
        ERROR("Failed to trigger job in nucloud %s " %
              response_obj.get_message())
        return False
  def binary_search_commit(self, commit_search_list, input_type):
    """
    This is the main function that implements the binary search flow through the
    given commit_search_list.
  
    Params:
        commit_search_list: The list of gerrit change ids or commits to be
                            searched
        test: test set name
        node_pool: The node_pool to use to trigger the tests
        input_type: String to indicate type of input for the commits
        branch: The branch on which this commit appears
        hypervisor: The hypervisor that needs to be used  
    """
    INFO("Below are the list of commits being searched:\n%s" %
         pprint.pformat(commit_search_list))

    if len(commit_search_list) == 0:
      ERROR ("Length of commit_search_list is 0, this is probably a bug")
      return None

    elif len(commit_search_list) == 1:
      INFO("Returning %s " % commit_search_list[0])
      return commit_search_list[0]

    elif len(commit_search_list) == 2:
      if commit_search_list[0] in self.global_results_dict.keys():
        if self.global_results_dict[commit_search_list[0]]:
          return commit_search_list[1]
        else:
          return commit_search_list[0]
      if commit_search_list[1] in self.global_results_dict.keys():
        if self.global_results_dict[commit_search_list[1]]:
          return commit_search_list[0]

      # We reach here if we don't already have the test results for at least the
      # first commit
      response_map = {}
      response_map[commit_search_list[0]] = self.run_test_on_nucloud(
        build_url=self.commits_dict[commit_search_list[0]],
        commit_id=commit_search_list[0], input_type=input_type)
      self.wait_for_test_results(response_map)

      if self.global_results_dict[commit_search_list[0]]:
        return commit_search_list[1]
      else:
        return commit_search_list[0]

    # Get the number of free clusters from the pool.
    free_nodes = self.get_free_nodes_from_jarvis()
    num_free_clusters = len(free_nodes)/3
    INFO("free clusters in nucloud: %s " % num_free_clusters)
    search_list_len = len(commit_search_list)

    # If the number of free clusters is less than the number of commits,
    # let us do a binary search.
    if num_free_clusters < search_list_len:
      mid_commit_index = search_list_len / 2
      first_half_mid_commit_index = mid_commit_index / 2
      second_half_mid_commit_index = (
        mid_commit_index + 1 + search_list_len) / 2
      index_list = [mid_commit_index, first_half_mid_commit_index,
                    second_half_mid_commit_index]
      INFO("Commits selected for verification are: %s, %s and %s" % (
        commit_search_list[mid_commit_index], commit_search_list[
          first_half_mid_commit_index], commit_search_list[
          second_half_mid_commit_index]))

      response_map = {}
      for index in index_list:
        # If we already have the test result for this commit don't run the test
        # again.
        if commit_search_list[index] in self.global_results_dict.keys():
          continue

        response_map[commit_search_list[index]] = self.run_test_on_nucloud(
          build_url=self.commits_dict[commit_search_list[index]],
          commit_id=commit_search_list[index], input_type=input_type)

      self.wait_for_test_results(response_map)
      INFO("Results from the run are: %s" % self.global_results_dict)

      # Based on the test result, call the function again.
      if not self.global_results_dict[commit_search_list[
        first_half_mid_commit_index]]:
        INFO("Narrowing the search based on the results to commits between "
             "%s and %s" % (commit_search_list[0], commit_search_list[
              first_half_mid_commit_index]))
        return self.binary_search_commit(commit_search_list[
          0:(first_half_mid_commit_index+1)], input_type)
      elif not self.global_results_dict[commit_search_list[mid_commit_index]]:
        INFO("Narrowing the search based on the results to commits between "
             "%s and %s" % (commit_search_list[first_half_mid_commit_index],
                            commit_search_list[mid_commit_index]))
        return self.binary_search_commit(
          commit_search_list[first_half_mid_commit_index:(mid_commit_index+1)],
          input_type)
      elif not self.global_results_dict[commit_search_list[
        second_half_mid_commit_index]]:
        INFO("Narrowing the search based on the results to commits between "
             "%s and %s" % (commit_search_list[mid_commit_index],
                            commit_search_list[second_half_mid_commit_index]))
        return self.binary_search_commit(
          commit_search_list[mid_commit_index:(second_half_mid_commit_index+1)],
          input_type)
      else:
        INFO("Narrowing the search based on the results to commits between "
             "%s and %s" % (commit_search_list[second_half_mid_commit_index],
                            commit_search_list[-1]))
        return self.binary_search_commit(
          commit_search_list[second_half_mid_commit_index:], input_type)
    else:
      # We have enough clusters. Trigger all the runs in parallel.
      response_map = {}
      for commit in commit_search_list:
        if commit in self.global_results_dict.keys():
          continue
        response_map[commit] = self.run_test_on_nucloud(
          build_url=self.commits_dict[commit],
          commit_id=commit, input_type=input_type)

      self.wait_for_test_results(response_map)
      INFO("Results from the run are: %s" % self.global_results_dict)

      for commit in commit_search_list:
        if not self.global_results_dict[commit]:
          INFO("Returning the offending commit %s" % commit)
          return commit
  def transfer_powershell_module(uvm, out_dir, port,
                                 username="******",
                                 password="******"):
    """
    Transfer Powershell modules to the VM at
    ${Env:ProgramFiles}\WindowsPowershell\Modules.
    uvm: Windows VM for which the Powershell module needs to be transferred.
    out_dir: Test runner output directory from where the files should be
             copied.
    port: Http server port from which the Windows VM will be able to pull the
          modules.
    username: Username of the account to access the Windows VM
    password: Password for the above account.
    Returns True on success, False on failure.
    """
    hyperv_ps_modules_dir = (
        "%s/%s" % (top_dir(), WindowsVMCommandUtil.WINDOWS_VM_MODULE_PATH))
    agave_posh_modules = []
    for (root, _, filenames) in os.walk(hyperv_ps_modules_dir):
      for filename in filenames:
        if filename.endswith(".psm1"):
          module_path = os.path.join(root, filename)
          agave_posh_modules.append(module_path)
    for agave_posh_module in agave_posh_modules:
      try:
        module_basename = os.path.basename(agave_posh_module)
        os.symlink(agave_posh_module, "%s/%s" % (out_dir, module_basename))
      except OSError as ose:
        if ose.errno != errno.EEXIST:
          ERROR("Failed to create a symlink to the Hyper-V Agave PS module "
                "in %s: %s" % (out_dir, str(ose)))

    succeeded = True
    for agave_posh_module in agave_posh_modules:
      module_basename = os.path.basename(agave_posh_module)
      module_name_no_extension = module_basename.rsplit(".psm1")[0]
      remote_path = ("${Env:ProgramFiles}\WindowsPowershell\Modules\%s" %
                     module_name_no_extension)
      INFO("Transferring the NutanixWindows Powershell modules to %s"
           % uvm.ip())

      # Determine the local IP that has reachability to this host.
      sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
      sock.connect((uvm.ip(), 80)) # Port number doesn't matter.
      local_ip = sock.getsockname()[0]
      sock.close()
      install_posh_module_cmd = """
        $ErrorActionPreference = "Stop"
        $dest = New-Item -ItemType Directory -Path {remote_path} -Force
        Invoke-WebRequest http://{ip}:{port}/output/{module_name} `
          -OutFile $dest\\{module_name}""".format(remote_path=remote_path,
                                                  ip=local_ip, port=port,
                                                  module_name=module_basename)
      if FLAGS.agave_verbose:
        INFO("Transferring the %s Powershell module to %s using command: %s" %
             (module_name_no_extension, uvm.ip(), install_posh_module_cmd))
      ret, _, stderr = WindowsVMCommandUtil.execute_command(
          uvm, install_posh_module_cmd, username=username, password=password)
      if ret or stderr.strip():
        ERROR("Failed transferring %s Powershell module to %s: %s" %
              (module_name_no_extension, uvm.ip(), stderr))
        succeeded = succeeded and False
    return succeeded
Beispiel #7
0
        print "%s\nUsage: %s ARGS\n%s" % (err, sys.argv[0], FLAGS)
        sys.exit(1)

    file_template = "run_test_log_%d" % (int(time.time()))
    setup_logging(file_template)

    while True:
        top_dir = os.environ['TOP']
        filename = top_dir + "/" + FLAGS.last_good_commit_file
        f = open(filename)
        last_good_commit = f.read()
        f.close()
        latest_commit = get_latest_commit()
        job_id = run_test_on_nucloud(commit_id=latest_commit)
        if not job_id:
            ERROR("Trigerring test on Nucloud failed")
        if not wait_for_test_result(job_id):
            # Trigger auto bisect
            auto_bisect = Auto_Bisect(last_good_commit, latest_commit)
            offending_commit = auto_bisect.find_offending_commit()
            FATAL("Commit %s has broken the test" % offending_commit)
        else:
            f = open(filename, "w")
            f.write(latest_commit)
            f.close()
            last_good_commit = latest_commit
            latest_commit = get_latest_commit()
            commits = get_commits_in_range(last_good_commit, latest_commit)
            while len(commits) < 10:
                # Sleep for 10 mins
                time.sleep(600)
 def log_error(self, msg):
   """
   See superclass for documentation.
   """
   ERROR(msg)