コード例 #1
0
def _run_dag_and_wait():
    """Run and wait for the DAG to finish.

  Returns:
    state: The end state of the DAG.
  """
    artifacts_path = os.path.join(prow.get_gcs_output(), "artifacts")
    logging.info("Artifacts will be saved to: %s", artifacts_path)

    conf = {
        "PULL_NUMBER": os.getenv("PULL_NUMBER", ""),
        "PULL_PULL_SHA": os.getenv("PULL_PULL_SHA", ""),
        "PULL_BASE_SHA": os.getenv("PULL_BASE_SHA", ""),
        "ARTIFACTS_PATH": artifacts_path,
    }

    # TODO(jlewi): We should probably configure Ingress and IAP for Airflow sever
    # and use a static IP.
    PROW_K8S_MASTER = "35.202.163.166"
    base_url = ("https://{0}/api/v1/proxy/namespaces/default/services"
                "/airflow:80").format(PROW_K8S_MASTER)

    credentials, _ = google.auth.default(
        scopes=["https://www.googleapis.com/auth/cloud-platform"])

    client = AirflowClient(base_url, credentials, verify=False)

    run_id, _ = trigger_tf_k8s_tests_dag(client, conf)

    state = wait_for_tf_k8s_tests(client, run_id)
    return state
コード例 #2
0
def main():
    """Trigger Airflow pipelines.

  This main program is intended to be triggered by PROW and used to launch
  The Airflow pipelines comprising our test and release pipelines.
  """
    _print_debug_info()
    # TODO(jlewi): Need to upload various artifacts for gubernator
    # https://github.com/kubernetes/test-infra/tree/master/gubernator.
    # e.g. started.json.
    test_dir = tempfile.mkdtemp(prefix="tmpTfCrdTest")

    # Setup a logging file handler. This file will be the build log.
    root_logger = logging.getLogger()

    build_log = os.path.join(test_dir, "build-log.txt")
    file_handler = logging.FileHandler(build_log)
    # We need to explicitly set the formatter because it will not pick up
    # the BasicConfig.
    formatter = logging.Formatter(
        fmt=("%(levelname)s|%(asctime)s"
             "|%(pathname)s|%(lineno)d| %(message)s"),
        datefmt="%Y-%m-%dT%H:%M:%S")
    file_handler.setFormatter(formatter)
    root_logger.addHandler(file_handler)

    logging.info("test_dir: %s", test_dir)

    # Activate the service account for gcloud
    # If you don't activate it then you should already be logged in.
    if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
        logging.info("GOOGLE_APPLICATION_CREDENTIALS=%s",
                     os.getenv("GOOGLE_APPLICATION_CREDENTIALS"))
        util.run([
            "gcloud", "auth",
            "activate-service-account", "--key-file={0}".format(
                os.getenv("GOOGLE_APPLICATION_CREDENTIALS"))
        ])
    job_name = os.getenv("JOB_NAME", "")
    build_number = os.getenv("BUILD_NUMBER")
    pull_number = os.getenv("PULL_NUMBER")
    output_dir = prow.get_gcs_output()

    sha = prow.get_commit_from_env()
    gcs_client = storage.Client()
    prow.create_started(gcs_client, output_dir, sha)

    symlink = prow.get_symlink_output(pull_number, job_name, build_number)
    if symlink:
        prow.create_symlink(gcs_client, symlink, output_dir)

    dag_state = _run_dag_and_wait()

    test_dir = tempfile.mkdtemp(prefix="tmpTfCrdTest")

    dag_success = bool(dag_state == "success")

    artifacts_dir = os.path.join(output_dir, "artifacts")
    junit_files = [
        "junit_e2e.xml", "junit_gpu-tests.xml", "junit_pycheckslint.xml",
        "junit_pycheckstest.xml", "junit_setupcluster.xml"
    ]
    test_success = prow.check_no_errors(gcs_client, artifacts_dir, junit_files)

    success = test_success and dag_success

    if success:
        job_name = os.getenv("JOB_NAME", "unknown")
        prow.create_latest(gcs_client, job_name, sha)

    prow.create_finished(gcs_client, output_dir, success)

    file_handler.flush()
    prow.upload_outputs(gcs_client, output_dir, build_log)

    if not success:
        # Exit with a non-zero exit code by raising an exception.
        logging.error(
            "One or more test steps failed exiting with non-zero exit "
            "code.")
        sys.exit(1)