Exemple #1
0
def run_tests(args):
  # Print out the pylint version because different versions can produce
  # different results.
  util.run(["pylint", "--version"])

  # kubeflow_testing is imported as a submodule so we should exclude it
  # TODO(jlewi): Perhaps we should get a list of submodules and exclude
  # them automatically?
  dir_excludes = ["kubeflow_testing", "vendor"]
  includes = ["*_test.py"]
  test_cases = []

  env = os.environ.copy()
  # TODO(jlewi): Once we switch to using Argo I think we can stop setting
  # the PYTHONPATH here and just inheriting it from the environment.
  # When we use ARGO each step will run in its own pod and we can set the
  # PYTHONPATH environment variable as needed for that pod.
  env["PYTHONPATH"] = (
    args.src_dir + ":" + os.path.join(args.src_dir, "kubeflow_testing", "py"))

  num_failed = 0
  for root, dirs, files in os.walk(args.src_dir, topdown=True):
    # excludes can be done with fnmatch.filter and complementary set,
    # but it's more annoying to read.
    dirs[:] = [d for d in dirs if d not in dir_excludes]
    for pat in includes:
      for f in fnmatch.filter(files, pat):
        full_path = os.path.join(root, f)

        test_case = test_util.TestCase()
        test_case.class_name = "pytest"
        test_case.name = full_path[len(args.src_dir):]
        start_time = time.time()
        test_cases.append(test_case)
        try:
          util.run(["python", full_path], cwd=args.src_dir, env=env)
        except subprocess.CalledProcessError:
          test_case.failure = "{0} failed.".format(test_case.name)
          num_failed += 1
        finally:
          test_case.time = time.time() - start_time

  if num_failed:
    logging.error("%s tests failed.", num_failed)
  else:
    logging.info("No lint issues.")

  if not args.junit_path:
    logging.info("No --junit_path.")
    return

  gcs_client = None
  if args.junit_path.startswith("gs://"):
    gcs_client = storage.Client(project=args.project)

  test_util.create_junit_xml_file(test_cases, args.junit_path, gcs_client)
Exemple #2
0
def _push_image(image, latest_image):
    if "gcr.io" in image:
        util.run(["gcloud", "docker", "--", "push", image])
        logging.info("Pushed image: %s", image)

        util.run(["gcloud", "docker", "--", "push", latest_image])
        logging.info("Pushed image: %s", latest_image)

    else:
        util.run(["docker", "push", image])
        logging.info("Pushed image: %s", image)

        util.run(["docker", "push", latest_image])
        logging.info("Pushed image: %s", latest_image)
Exemple #3
0
def build_operator_image(root_dir,
                         registry,
                         project=None,
                         should_push=True,
                         version_tag=None):
    """Build the main docker image for the TFJob CRD.
  Args:
    root_dir: Root directory of the repository.
    registry: The registry to use.
    project: If set it will be built using GCB.
    should_push: Should push the image to the registry, Defaule is True.
    version_tag: Optional tag for the version. If not specified derive
      the tag from the git hash.
  Returns:
    build_info: Dictionary containing information about the build.
  """
    context_dir = tempfile.mkdtemp(prefix="tmpTFJobCrdContext")
    logging.info("context_dir: %s", context_dir)
    if not os.path.exists(context_dir):
        os.makedirs(context_dir)

    # Build the go binaries
    go_path = os.environ["GOPATH"]
    commit = build_and_push_image.GetGitHash(root_dir)

    targets = [
        "github.com/kubeflow/tf-operator/cmd/tf-operator.v1",
    ]
    for t in targets:
        if t in ["github.com/kubeflow/tf-operator/cmd/tf-operator.v1"]:
            util.run([
                "go", "install", "-ldflags",
                '''-X github.com/kubeflow/tf-operator/pkg/version.GitSHA={}
          -X github.com/kubeflow/tf-operator/pkg/version.Version={}'''.format(
                    commit, version_tag), t
            ])
            continue
        util.run(["go", "install", t])

    # If the release is not done from a Linux machine
    # we need to grab the artefacts from /bin/linux_amd64
    bin_path = "bin"
    if platform.system() != "Linux":
        bin_path += "/linux_amd64"

    # List of paths to copy relative to root.
    sources = [
        "build/images/tf_operator/Dockerfile",
        "examples/tf_sample/tf_smoke.py",
        os.path.join(go_path, bin_path,
                     "tf-operator.v1"), "cmd", "pkg", "third_party", "vendor"
    ]

    for s in sources:
        src_path = os.path.join(root_dir, s)
        dest_path = os.path.join(context_dir, os.path.basename(s))
        if os.path.exists(dest_path):
            os.unlink(dest_path)
        if os.path.isdir(src_path):
            shutil.copytree(src_path, dest_path, symlinks=True)
        else:
            shutil.copyfile(src_path, dest_path)

    image_base = registry + "/tf_operator"

    if not version_tag:
        logging.info("No version tag specified; computing tag automatically.")
        n = datetime.datetime.now()
        version_tag = n.strftime("v%Y%m%d") + "-" + commit
    logging.info("Using version tag: %s", version_tag)
    image = image_base + ":" + version_tag
    latest_image = image_base + ":latest"

    if project:
        util.run([
            "gcloud", "builds", "submit", context_dir, "--tag=" + image,
            "--project=" + project
        ])

        # Add the latest tag.
        util.run([
            "gcloud", "container", "images", "add-tag", "--quiet", image,
            latest_image
        ])

    else:
        util.run(["docker", "build", "-t", image, context_dir])
        logging.info("Built image: %s", image)

        util.run(["docker", "tag", image, latest_image])

        if should_push:
            _push_image(image, latest_image)

    output = {
        "image": image,
        "commit": commit,
    }
    return output
Exemple #4
0
def run_lint(args):
  start_time = time.time()
  # Print out the pylint version because different versions can produce
  # different results.
  util.run(["pylint", "--version"])

  # kubeflow_testing is imported as a submodule so we should exclude it
  # TODO(jlewi): Perhaps we should get a list of submodules and exclude
  # them automatically?
  dir_excludes = [
    "kubeflow_testing",
    "test/test-app",
    "vendor",
  ]
  full_dir_excludes = [
    os.path.join(os.path.abspath(args.src_dir), f) for f in dir_excludes
  ]
  includes = ["*.py"]
  failed_files = []
  rc_file = os.path.join(args.src_dir, ".pylintrc")
  for root, dirs, files in os.walk(os.path.abspath(args.src_dir), topdown=True):
    # excludes can be done with fnmatch.filter and complementary set,
    # but it's more annoying to read.
    exclude = False
    for e in full_dir_excludes:
      if root.startswith(e):
        exclude = True
        break
    if exclude:
      continue

    dirs[:] = [d for d in dirs]
    for pat in includes:
      for f in fnmatch.filter(files, pat):
        full_path = os.path.join(root, f)
        try:
          util.run(["pylint", "--rcfile=" + rc_file, full_path],
                   cwd=args.src_dir)
        except subprocess.CalledProcessError:
          failed_files.append(full_path[len(args.src_dir):])

  if failed_files:
    failed_files.sort()
    logging.error("%s files had lint errors:\n%s", len(failed_files),
                  "\n".join(failed_files))
  else:
    logging.info("No lint issues.")

  if not args.junit_path:
    logging.info("No --junit_path.")
    return

  test_case = test_util.TestCase()
  test_case.class_name = "pylint"
  test_case.name = "pylint"
  test_case.time = time.time() - start_time
  if failed_files:
    test_case.failure = "Files with lint issues: {0}".format(
      ", ".join(failed_files))

  gcs_client = None
  if args.junit_path.startswith("gs://"):
    gcs_client = storage.Client(project=args.project)

  test_util.create_junit_xml_file([test_case], args.junit_path, gcs_client)