Beispiel #1
0
    def testCreateFinished(self, mock_time):  # pylint: disable=no-self-use
        """Test create finished"""
        mock_time.return_value = 1000
        gcs_client = mock.MagicMock(spec=storage.Client)
        blob = prow.create_finished(gcs_client, "gs://bucket/output", True)

        expected = {
            "timestamp": 1000,
            "result": "SUCCESS",
            "metadata": {},
        }
        blob.upload_from_string.assert_called_once_with(json.dumps(expected))
Beispiel #2
0
def main():
    """Trigger Airflow pipelines.

  This main program is intended to be triggered by PROW and used to launch
  The Airflow pipelines comprising our test and release pipelines.
  """
    _print_debug_info()
    # TODO(jlewi): Need to upload various artifacts for gubernator
    # https://github.com/kubernetes/test-infra/tree/master/gubernator.
    # e.g. started.json.
    test_dir = tempfile.mkdtemp(prefix="tmpTfCrdTest")

    # Setup a logging file handler. This file will be the build log.
    root_logger = logging.getLogger()

    build_log = os.path.join(test_dir, "build-log.txt")
    file_handler = logging.FileHandler(build_log)
    # We need to explicitly set the formatter because it will not pick up
    # the BasicConfig.
    formatter = logging.Formatter(
        fmt=("%(levelname)s|%(asctime)s"
             "|%(pathname)s|%(lineno)d| %(message)s"),
        datefmt="%Y-%m-%dT%H:%M:%S")
    file_handler.setFormatter(formatter)
    root_logger.addHandler(file_handler)

    logging.info("test_dir: %s", test_dir)

    # Activate the service account for gcloud
    # If you don't activate it then you should already be logged in.
    if os.getenv("GOOGLE_APPLICATION_CREDENTIALS"):
        logging.info("GOOGLE_APPLICATION_CREDENTIALS=%s",
                     os.getenv("GOOGLE_APPLICATION_CREDENTIALS"))
        util.run([
            "gcloud", "auth",
            "activate-service-account", "--key-file={0}".format(
                os.getenv("GOOGLE_APPLICATION_CREDENTIALS"))
        ])
    job_name = os.getenv("JOB_NAME", "")
    build_number = os.getenv("BUILD_NUMBER")
    pull_number = os.getenv("PULL_NUMBER")
    output_dir = prow.get_gcs_output()

    sha = prow.get_commit_from_env()
    gcs_client = storage.Client()
    prow.create_started(gcs_client, output_dir, sha)

    symlink = prow.get_symlink_output(pull_number, job_name, build_number)
    if symlink:
        prow.create_symlink(gcs_client, symlink, output_dir)

    dag_state = _run_dag_and_wait()

    test_dir = tempfile.mkdtemp(prefix="tmpTfCrdTest")

    dag_success = bool(dag_state == "success")

    artifacts_dir = os.path.join(output_dir, "artifacts")
    junit_files = [
        "junit_e2e.xml", "junit_gpu-tests.xml", "junit_pycheckslint.xml",
        "junit_pycheckstest.xml", "junit_setupcluster.xml"
    ]
    test_success = prow.check_no_errors(gcs_client, artifacts_dir, junit_files)

    success = test_success and dag_success

    if success:
        job_name = os.getenv("JOB_NAME", "unknown")
        prow.create_latest(gcs_client, job_name, sha)

    prow.create_finished(gcs_client, output_dir, success)

    file_handler.flush()
    prow.upload_outputs(gcs_client, output_dir, build_log)

    if not success:
        # Exit with a non-zero exit code by raising an exception.
        logging.error(
            "One or more test steps failed exiting with non-zero exit "
            "code.")
        sys.exit(1)