Esempio n. 1
0
    def __collect_gce_quota(self,
                            project,
                            region,
                            project_percent=100.0,
                            region_percent=100.0):
        project_info_json = check_subprocess(
            'gcloud compute project-info describe'
            ' --format yaml'
            ' --project %s' % project)
        project_info = yaml.safe_load(project_info_json)
        # Sometimes gce returns entries and leaves out the a "metric" it was for.
        # We'll ignore those and stick them in 'UNKNOWN' for simplicity.
        project_quota = {
            'gce_global_%s' % info.get('metric', 'UNKNOWN'): int(
                max(
                    1,
                    math.floor(project_percent *
                               (info['limit'] - info['usage']))))
            for info in project_info['quotas']
        }

        region_info_json = check_subprocess('gcloud compute regions describe'
                                            ' --format yaml'
                                            ' %s' % region)
        region_info = yaml.safe_load(region_info_json)
        region_quota = {
            'gce_region_%s' % info.get('metric', 'UNKNOWN'): int(
                max(
                    1,
                    math.floor(region_percent *
                               (info['limit'] - info['usage']))))
            for info in region_info['quotas']
        }
        return project_quota, region_quota
Esempio n. 2
0
  def publish_halyard_version_commits(self, repository):
    """Publish the halyard build to the bucket.

    This also writes the built version to
        <output_dir>/halyard/last_version_commit.yml
    so callers can know what version was written.

    NOTE(ewiseblatt): 20180110 Halyard's policies should be revisited here.
    Although this is a "Publish" it is not a release. It is publishing
    the 'nightly' build which isnt really nightly just 'last-build',
    which could even be on an older branch than latest.
    """
    commit_id = self.source_code_manager.git.query_local_repository_commit_id(
        repository.git_dir)

    # This is only because we need a file to gsutil cp
    # We already need gsutil so its easier to just use it again here.
    output_dir = self.get_output_dir()
    tmp_path = os.path.join(output_dir, self.HALYARD_VERSIONS_BASENAME)

    contents = self.load_halyard_version_commits()
    new_entry = '{version}: {commit}\n'.format(
        version=self.__build_version, commit=commit_id)

    logging.info('Updating %s with %s', self.__versions_url, new_entry)
    if contents and contents[-1] != '\n':
      contents += '\n'
    contents = contents + new_entry
    with open(tmp_path, 'w') as stream:
      stream.write(contents)
    check_subprocess('gsutil cp {path} {url}'.format(
        path=tmp_path, url=self.__versions_url))
    self.__emit_last_commit_entry(new_entry)
Esempio n. 3
0
def __tag_images(versions_to_tag, project, account, project_images,
                 bom_contents_by_name):
    images_to_tag = set([])
    for bom_version in versions_to_tag:
        to_tag = [
            i for i in __derive_images_from_bom(
                bom_version, bom_contents_by_name) if i in project_images
        ]
        images_to_tag.update(to_tag)
    for image in images_to_tag:
        return_code, stdout = run_subprocess(
            'gcloud compute images describe'
            ' --project={project} --account={account} --format=json {image}'.
            format(project=project, account=account, image=image),
            echo=False)
        # Adding labels is idempotent, adding the same label again doesn't break anything.
        if not return_code:
            payload_str = stdout.strip()
            timestamp = json.loads(payload_str)['creationTimestamp']
            timestamp = timestamp[:timestamp.index('T')]
            check_subprocess(
                'gcloud compute images add-labels --project={project} --account={account} --labels={key}={timestamp} {image}'
                .format(project=project,
                        account=account,
                        key=PUBLISHED_TAG_KEY,
                        timestamp=timestamp,
                        image=image))
Esempio n. 4
0
  def _do_repository(self, repository):
    """Implements RepositoryCommandProcessor interface."""
    name = repository.name
    build_version = self.scm.get_repository_service_build_version(repository)

    # TODO(jacobkiefer): go version && go env?
    self.source_code_manager.ensure_local_repository(repository)
    config_root = repository.git_dir

    for dist_arch in DIST_ARCH_LIST:
      # Sub-directory the binaries are stored in are specified by
      # ${build_version}/${dist}.
      version_bin_path = ('spin/{}/{}/{}/spin'
                          .format(build_version, dist_arch.dist, dist_arch.arch))
      nightly_bin_path = ('spin/nightly/{}/{}/spin'
                          .format(dist_arch.dist, dist_arch.arch))

      logging.info('Building spin binary for %s', dist_arch)
      check_subprocess('env CGO_ENABLED=0 GOOS={} GOARCH={} go build .'
                       .format(dist_arch.dist, dist_arch.arch),
                       cwd=config_root)

      spin_path = '{}/spin'.format(config_root)
      self.__gcs_uploader.upload_from_filename(
        version_bin_path, spin_path)
      self.__gcs_uploader.upload_from_filename(
        nightly_bin_path, spin_path)
      os.remove(spin_path)

    output_dir = self.get_output_dir()
    latest_path = os.path.join(output_dir, 'latest')
    with open(latest_path, 'w') as latest_file:
      latest_file.write(build_version)
    self.__gcs_uploader.upload_from_filename(
      'spin/latest', latest_path)
Esempio n. 5
0
 def __generate_json_from_url(
     self, repository, server_url, output_path):
   """Build the swagger json from the swagger endpoint."""
   ensure_dir_exists(os.path.dirname(output_path))
   logging.info('Generating swagger docs for %s', repository.name)
   check_subprocess('curl -s {url} -o {output_path}'
                    .format(url=server_url, output_path=output_path))
Esempio n. 6
0
    def __collect_gce_quota(self,
                            project,
                            region,
                            project_percent=100.0,
                            region_percent=100.0):
        project_info_json = check_subprocess(
            'gcloud compute project-info describe'
            ' --format yaml'
            ' --project %s' % project)
        project_info = yaml.safe_load(project_info_json)
        project_quota = {
            'gce_global_%s' % info['metric']: int(
                max(
                    1,
                    math.floor(project_percent *
                               (info['limit'] - info['usage']))))
            for info in project_info['quotas']
        }

        region_info_json = check_subprocess('gcloud compute regions describe'
                                            ' --format yaml'
                                            ' %s' % region)
        region_info = yaml.safe_load(region_info_json)
        region_quota = {
            'gce_region_%s' % info['metric']: int(
                max(
                    1,
                    math.floor(region_percent *
                               (info['limit'] - info['usage']))))
            for info in region_info['quotas']
        }
        return project_quota, region_quota
Esempio n. 7
0
    def publish_halyard_version_commits(self, repository):
        """Publish the halyard build to the bucket.

    This also writes the built version to
        <output_dir>/halyard/last_version_commit.yml
    so callers can know what version was written.

    NOTE(ewiseblatt): 20180110 Halyard's policies should be revisited here.
    Although this is a "Publish" it is not a release. It is publishing
    the 'nightly' build which isnt really nightly just 'last-build',
    which could even be on an older branch than latest.
    """
        commit_id = self.source_code_manager.git.query_local_repository_commit_id(
            repository.git_dir)

        # This is only because we need a file to gsutil cp
        # We already need gsutil so its easier to just use it again here.
        output_dir = self.get_output_dir()
        tmp_path = os.path.join(output_dir, self.HALYARD_VERSIONS_BASENAME)

        contents = self.load_halyard_version_commits()
        new_entry = '{version}: {commit}\n'.format(
            version=self.__build_version, commit=commit_id)

        logging.info('Updating %s with %s', self.__versions_url, new_entry)
        if contents and contents[-1] != '\n':
            contents += '\n'
        contents = contents + new_entry
        with open(tmp_path, 'w') as stream:
            stream.write(contents)
        check_subprocess('gsutil cp {path} {url}'.format(
            path=tmp_path, url=self.__versions_url))
        self.__emit_last_commit_entry(new_entry)
Esempio n. 8
0
def make_standard_git_repo(git_dir):
  """Initialize local standard test repos.

  These are used by tests that interact with a git repository.
  """
  branch_commits = {'ORIGIN': git_dir}
  repo_name = os.path.basename(git_dir)

  run_git = lambda cmd: 'git %s' % cmd
  os.makedirs(git_dir)
  logging.debug('Initializing git repository in "%s"', git_dir)

  check_subprocess_sequence(
      [
          'touch  %s-basefile.txt' % repo_name,
          run_git('init'),
          run_git('add %s-basefile.txt' % repo_name),
          run_git('commit -a -m "feat(first): first commit"'),
          run_git('tag %s HEAD' % BASE_VERSION_TAG),
      ],
      cwd=git_dir)
  branch_commits['master'] = check_subprocess('git rev-parse HEAD', cwd=git_dir)

  check_subprocess_sequence(
      [
          run_git('checkout -b ' + PATCH_BRANCH),
          'touch %s-patchfile.txt' % repo_name,
          run_git('add %s-patchfile.txt' % repo_name),
          run_git('commit -a -m "fix(patch): added patch change"')
      ],
      cwd=git_dir)
  branch_commits[PATCH_BRANCH] = check_subprocess(
      'git rev-parse HEAD', cwd=git_dir)

  check_subprocess_sequence(
      [
          run_git('checkout master'),
          run_git('checkout -b %s-branch' % repo_name),
          'touch %s-unique.txt' % repo_name,
          run_git('add %s-unique.txt' % repo_name),
          run_git('commit -a -m "chore(uniq): unique commit"')
      ],
      cwd=git_dir)
  branch_commits['%s-branch' % repo_name] = check_subprocess(
      'git rev-parse HEAD', cwd=git_dir)

  check_subprocess_sequence(
      [
          run_git('checkout master'),
          run_git('checkout -b %s' % UNTAGGED_BRANCH),
          'touch %s-untagged.txt' % repo_name,
          run_git('add %s-untagged.txt' % repo_name),
          run_git('commit -a -m "chore(uniq): untagged commit"'),
      ],
      cwd=git_dir)
  branch_commits[UNTAGGED_BRANCH] = check_subprocess(
      'git rev-parse HEAD', cwd=git_dir)

  return branch_commits
Esempio n. 9
0
 def __generate_json_from_url(self, repository, server_url, output_path):
     """Build the swagger json from the swagger endpoint."""
     ensure_dir_exists(os.path.dirname(output_path))
     logging.info("Generating swagger docs for %s", repository.name)
     check_subprocess(
         "curl -s {url} -o {output_path}".format(
             url=server_url, output_path=output_path
         )
     )
Esempio n. 10
0
    def _do_repository(self, repository):
        """Implements RepositoryCommandProcessor interface."""
        name = repository.name
        build_version = self.scm.get_repository_service_build_version(
            repository)

        # TODO(jacobkiefer): go version && go env?
        self.source_code_manager.ensure_local_repository(repository)
        config_root = repository.git_dir

        check_subprocess('go get -d -v', cwd=config_root)
        for dist_arch in DIST_ARCH_LIST:
            # Sub-directory the binaries are stored in are specified by
            # ${build_version}/${dist}.
            version_bin_path = ('spin/{}/{}/{}/spin'.format(
                build_version, dist_arch.dist, dist_arch.arch))
            nightly_bin_path = ('spin/nightly/{}/{}/spin'.format(
                dist_arch.dist, dist_arch.arch))

            context = '%s-%s' % (dist_arch.dist, dist_arch.arch)
            logfile = self.get_logfile_path(repository.name + '-build-' +
                                            context)
            logging.info('Building spin binary for %s', dist_arch)
            labels = {
                'repository': repository.name,
                'dist': dist_arch.dist,
                'arch': dist_arch.arch
            }
            env = dict(os.environ)
            env.update({
                'CGO_ENABLED': '0',
                'GOOS': dist_arch.dist,
                'GOARCH': dist_arch.arch
            })
            self.metrics.time_call(
                'GoBuild',
                labels,
                self.metrics.default_determine_outcome_labels,
                check_subprocesses_to_logfile,
                'Building spin ' + context,
                logfile, ['go build .'],
                cwd=config_root,
                env=env)

            spin_path = '{}/spin'.format(config_root)
            self.__gcs_uploader.upload_from_filename(version_bin_path,
                                                     spin_path)
            self.__gcs_uploader.upload_from_filename(nightly_bin_path,
                                                     spin_path)
            os.remove(spin_path)

        output_dir = self.get_output_dir()
        latest_path = os.path.join(output_dir, 'latest')
        with open(latest_path, 'w') as latest_file:
            latest_file.write(build_version)
        self.__gcs_uploader.upload_from_filename('spin/latest', latest_path)
Esempio n. 11
0
def make_standard_git_repo(git_dir):
    """Initialize local standard test repos.

  These are used by tests that interact with a git repository.
  """
    branch_commits = {'ORIGIN': git_dir}
    repo_name = os.path.basename(git_dir)

    run_git = lambda cmd: 'git %s' % cmd
    os.makedirs(git_dir)
    logging.debug('Initializing git repository in "%s"', git_dir)

    check_subprocess_sequence([
        'touch  %s-basefile.txt' % repo_name,
        run_git('init'),
        run_git('add %s-basefile.txt' % repo_name),
        run_git('commit -a -m "feat(first): first commit"'),
        run_git('tag %s HEAD' % BASE_VERSION_TAG),
    ],
                              cwd=git_dir)
    branch_commits['master'] = check_subprocess('git rev-parse HEAD',
                                                cwd=git_dir)

    check_subprocess_sequence([
        run_git('checkout -b ' + PATCH_BRANCH),
        'touch %s-patchfile.txt' % repo_name,
        run_git('add %s-patchfile.txt' % repo_name),
        run_git('commit -a -m "fix(patch): added patch change"')
    ],
                              cwd=git_dir)
    branch_commits[PATCH_BRANCH] = check_subprocess('git rev-parse HEAD',
                                                    cwd=git_dir)

    check_subprocess_sequence([
        run_git('checkout master'),
        run_git('checkout -b %s-branch' % repo_name),
        'touch %s-unique.txt' % repo_name,
        run_git('add %s-unique.txt' % repo_name),
        run_git('commit -a -m "chore(uniq): unique commit"')
    ],
                              cwd=git_dir)
    branch_commits['%s-branch' % repo_name] = check_subprocess(
        'git rev-parse HEAD', cwd=git_dir)

    check_subprocess_sequence([
        run_git('checkout master'),
        run_git('checkout -b %s' % UNTAGGED_BRANCH),
        'touch %s-untagged.txt' % repo_name,
        run_git('add %s-untagged.txt' % repo_name),
        run_git('commit -a -m "chore(uniq): untagged commit"'),
    ],
                              cwd=git_dir)
    branch_commits[UNTAGGED_BRANCH] = check_subprocess('git rev-parse HEAD',
                                                       cwd=git_dir)

    return branch_commits
Esempio n. 12
0
  def build_all_distributions(self, repository):
    name = repository.name
    source_info = self.source_code_manager.refresh_source_info(
      repository, self.options.build_number)
    self.__build_version = source_info.to_build_version()
    config_root = repository.git_dir

    check_subprocess('go get -d -v', cwd=config_root)
    for dist_arch in DIST_ARCH_LIST:
      # Sub-directory the binaries are stored in are specified by
      # ${build_version}/${dist}.
      version_bin_path = ('spin/{}/{}/{}/spin'
                          .format(self.__build_version, dist_arch.dist, dist_arch.arch))

      context = '%s-%s' % (dist_arch.dist, dist_arch.arch)
      logfile = self.get_logfile_path(
          repository.name + '-build-' + context)
      logging.info('Building spin binary for %s', dist_arch)
      labels = {'repository': repository.name,
                'dist': dist_arch.dist,
                'arch': dist_arch.arch}
      env = dict(os.environ)
      env.update({'CGO_ENABLED': '0',
                  'GOOS': dist_arch.dist,
                  'GOARCH': dist_arch.arch})

      # Note: spin CLI is coupled to the Gate major and minor version.
      # Gate is a routing server, so features and breaking changes in Gate
      # must be reflected in spin since it is a client.
      dash = self.__gate_version.find('-')
      gate_semver = self.__gate_version[:dash]

      prefix = os.path.join(repository.origin, 'version')
      double_slash = prefix.find('//')
      # Trim prefix to format go package properly.
      if prefix.find('//') != -1:
        prefix = prefix[double_slash+2:]
      if prefix[:-1] == '/':
        prefix = prefix[:-1]


      # Unset ReleasePhase tag for proper versions.
      ldflags = '-ldflags "-X {pref}.Version={gate_version} -X {pref}.ReleasePhase="'.format(pref=prefix,
                                                                                             gate_version=gate_semver)
      cmd = 'go build {ldflags} .'.format(ldflags=ldflags)
      self.metrics.time_call(
          'GoBuild', labels, self.metrics.default_determine_outcome_labels,
          check_subprocesses_to_logfile, 'Building spin ' + context, logfile,
          [cmd], cwd=config_root, env=env)

      spin_path = '{}/spin'.format(config_root)
      self.__gcs_uploader.upload_from_filename(
        version_bin_path, spin_path)
      os.remove(spin_path)
Esempio n. 13
0
  def test_check_subprocess_fail(self):
    if os.path.exists('/bin/false'):
      false_path = '/bin/false'
    elif os.path.exists('/usr/bin/false'):
      false_path = '/usr/bin/false'
    else:
      raise NotImplementedError('Unsupported test platform.')

    tests = [false_path, '/bin/ls /abc/def']
    for test in tests:
      with self.assertRaises(ExecutionError) as ex:
        check_subprocess(test)
      self.assertTrue(hasattr(ex.exception, 'loggedit'))
Esempio n. 14
0
  def test_check_subprocess_fail(self):
    if os.path.exists('/bin/false'):
      false_path = '/bin/false'
    elif os.path.exists('/usr/bin/false'):
      false_path = '/usr/bin/false'
    else:
      raise NotImplementedError('Unsupported test platform.')

    tests = [false_path, '/bin/ls /abc/def']
    for test in tests:
      with self.assertRaises(ExecutionError) as ex:
        check_subprocess(test)
      self.assertTrue(hasattr(ex.exception, 'loggedit'))
Esempio n. 15
0
    def test_check_subprocess_fail(self):
        if os.path.exists("/bin/false"):
            false_path = "/bin/false"
        elif os.path.exists("/usr/bin/false"):
            false_path = "/usr/bin/false"
        else:
            raise NotImplementedError("Unsupported test platform.")

        tests = [false_path, "/bin/ls /abc/def"]
        for test in tests:
            with self.assertRaises(ExecutionError) as ex:
                check_subprocess(test)
            self.assertTrue(hasattr(ex.exception, "loggedit"))
Esempio n. 16
0
def create_instance(options):
    """Creates new GCE VM instance for development."""
    project = get_project(options)
    print('Creating instance {project}/{zone}/{instance}'.format(
        project=project, zone=get_zone(options), instance=options.instance))
    print('  with --machine_type={type} and --disk_size={disk_size}...'
          .format(type=options.machine_type, disk_size=options.disk_size))

    google_dev_dir = os.path.join(os.path.dirname(__file__), '../google/dev')
    dev_dir = os.path.dirname(__file__)
    project_dir = os.path.join(dev_dir, '..')

    install_dir = '{dir}/../install'.format(dir=dev_dir)

    startup_command = ['/opt/spinnaker/install/install_development.sh']
    fd, temp_startup = tempfile.mkstemp()
    os.write(fd, str.encode(';'.join(startup_command)))
    os.close(fd)

    metadata_files = [
        'startup-script={google_dev_dir}/google_install_loader.py'
        ',sh_bootstrap_dev={dev_dir}/bootstrap_dev.sh'
        ',sh_install_development={dev_dir}/install_development.sh'
        ',startup_command={temp_startup}'
        .format(google_dev_dir=google_dev_dir,
                dev_dir=dev_dir,
                project_dir=project_dir,
                temp_startup=temp_startup)]

    metadata = ','.join([
        'startup_loader_files='
        'sh_install_development'
        '+sh_bootstrap_dev'])

    command = ['gcloud', 'compute', 'instances', 'create',
               options.instance,
               '--project', get_project(options),
               '--zone', get_zone(options),
               '--machine-type', options.machine_type,
               '--image-family', 'ubuntu-1604-lts',
               '--image-project', 'ubuntu-os-cloud',
               '--scopes', options.scopes,
               '--boot-disk-size={size}'.format(size=options.disk_size),
               '--boot-disk-type={type}'.format(type=options.disk_type),
               '--metadata', metadata,
               '--metadata-from-file={files}'.format(
                   files=','.join(metadata_files))]
    if options.address:
        command.extend(['--address', options.address])

    check_subprocess(' '.join(command))
Esempio n. 17
0
def create_instance(options):
    """Creates new GCE VM instance for development."""
    project = get_project(options)
    print('Creating instance {project}/{zone}/{instance}'.format(
        project=project, zone=get_zone(options), instance=options.instance))
    print('  with --machine_type={type} and --disk_size={disk_size}...'
          .format(type=options.machine_type, disk_size=options.disk_size))

    google_dev_dir = os.path.join(os.path.dirname(__file__), '../google/dev')
    dev_dir = os.path.dirname(__file__)
    project_dir = os.path.join(dev_dir, '..')

    install_dir = '{dir}/../install'.format(dir=dev_dir)

    startup_command = ['/opt/spinnaker/install/install_development.sh']
    fd, temp_startup = tempfile.mkstemp()
    os.write(fd, str.encode(';'.join(startup_command)))
    os.close(fd)

    metadata_files = [
        'startup-script={google_dev_dir}/google_install_loader.py'
        ',sh_bootstrap_dev={dev_dir}/bootstrap_dev.sh'
        ',sh_install_development={dev_dir}/install_development.sh'
        ',startup_command={temp_startup}'
        .format(google_dev_dir=google_dev_dir,
                dev_dir=dev_dir,
                project_dir=project_dir,
                temp_startup=temp_startup)]

    metadata = ','.join([
        'startup_loader_files='
        'sh_install_development'
        '+sh_bootstrap_dev'])

    command = ['gcloud', 'compute', 'instances', 'create',
               options.instance,
               '--project', get_project(options),
               '--zone', get_zone(options),
               '--machine-type', options.machine_type,
               '--image-family', 'ubuntu-1404-lts',
               '--image-project', 'ubuntu-os-cloud',
               '--scopes', options.scopes,
               '--boot-disk-size={size}'.format(size=options.disk_size),
               '--boot-disk-type={type}'.format(type=options.disk_type),
               '--metadata', metadata,
               '--metadata-from-file={files}'.format(
                   files=','.join(metadata_files))]
    if options.address:
        command.extend(['--address', options.address])

    check_subprocess(' '.join(command))
Esempio n. 18
0
  def build_swagger_docs(self, repository, json_path):
    """Build the API from the swagger endpoint."""
    if repository.name != 'gate':
      raise_and_log_error(
          UnexpectedError('Repo "%s" != "gate"' % repository.name))

    docs_dir = os.path.dirname(json_path)
    check_subprocess(
        'java -jar {jar_path} generate -i {json_path} -l html2'
        ' -o {output_dir} -t {templates_directory}'
        .format(jar_path=self.options.swagger_codegen_cli_jar_path,
                json_path=json_path, output_dir=docs_dir,
                templates_directory=self.__templates_directory))
    logging.info('Writing docs to directory %s', docs_dir)
Esempio n. 19
0
    def build_swagger_docs(self, repository, json_path):
        """Build the API from the swagger endpoint."""
        if repository.name != 'gate':
            raise_and_log_error(
                UnexpectedError('Repo "%s" != "gate"' % repository.name))

        docs_dir = os.path.dirname(json_path)
        check_subprocess(
            'java -jar {jar_path} generate -i {json_path} -l html2'
            ' -o {output_dir} -t {templates_directory}'.format(
                jar_path=self.options.swagger_codegen_cli_jar_path,
                json_path=json_path,
                output_dir=docs_dir,
                templates_directory=self.__templates_directory))
        logging.info('Writing docs to directory %s', docs_dir)
Esempio n. 20
0
    def __collect_halconfig_files(self, repository):
        """Gets the component config files and writes them into the output_dir."""
        name = repository.name
        if (name not in SPINNAKER_BOM_REPOSITORY_NAMES
                or name in ['spinnaker']):
            logging.debug('%s does not use config files -- skipping', name)
            return

        if name == 'spinnaker-monitoring':
            config_root = os.path.join(repository.git_dir,
                                       'spinnaker-monitoring-daemon')
        else:
            config_root = repository.git_dir

        service_name = self.scm.repository_name_to_service_name(
            repository.name)
        target_dir = os.path.join(self.get_output_dir(), 'halconfig',
                                  service_name)
        ensure_dir_exists(target_dir)

        config_path = os.path.join(config_root, 'halconfig')
        logging.info('Copying configs from %s...', config_path)
        if not os.path.exists(config_path) and repository.name == 'kayenta':
            logging.warning('Kayenta does not yet have a halconfig.')
            return
        for profile in os.listdir(config_path):
            profile_path = os.path.join(config_path, profile)
            if os.path.isfile(profile_path):
                shutil.copyfile(profile_path,
                                os.path.join(target_dir, profile))
                logging.debug('Copied profile to %s', profile_path)
            elif not os.path.isdir(profile_path):
                logging.warning('%s is neither file nor directory -- ignoring',
                                profile_path)
                continue
            else:
                tar_path = os.path.join(
                    target_dir, '{profile}.tar.gz'.format(profile=profile))
                file_list = ' '.join(os.listdir(profile_path))

                # NOTE: For historic reasons this is not actually compressed
                # even though the tar_path says ".tar.gz"
                check_subprocess(
                    'tar cf {path} -C {profile} {file_list}'.format(
                        path=tar_path,
                        profile=profile_path,
                        file_list=file_list))
                logging.debug('Copied profile to %s', tar_path)
Esempio n. 21
0
    def test_clone_upstream(self):
        git = self.git
        test_parent = os.path.join(self.base_temp_dir, 'test_clone_upstream')
        os.makedirs(test_parent)

        test_dir = os.path.join(test_parent, TEST_REPO_NAME)
        repository = GitRepositorySpec(TEST_REPO_NAME,
                                       git_dir=test_dir,
                                       origin=self.git_dir)
        git.clone_repository_to_path(repository)
        self.assertTrue(os.path.exists(os.path.join(test_dir, 'base_file')))

        want_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN)
        have_tags = git.query_tag_commits(test_dir, TAG_VERSION_PATTERN)
        self.assertEqual(want_tags, have_tags)

        got = check_subprocess('git -C "{dir}" remote -v'.format(dir=test_dir))
        # Disable pushes to the origni
        # No upstream since origin is upstream
        self.assertEqual(
            '\n'.join([
                'origin\t{origin} (fetch)'.format(origin=self.git_dir),
                'origin\tdisabled (push)'
            ]), got)

        reference = git.determine_git_repository_spec(test_dir)
        expect = GitRepositorySpec(os.path.basename(self.git_dir),
                                   origin=self.git_dir,
                                   git_dir=test_dir)
        self.assertEqual(expect, reference)
Esempio n. 22
0
def __delete_unused_bom_images(options):
    client = None
    if options.json_path:
        client = storage.Client.from_service_account_json(options.json_path)
    else:
        client = storage.Client()
    versions_to_tag, possible_versions_to_delete, bom_contents_by_name = __partition_boms(
        client, options.bom_bucket_name)
    if options.additional_boms_to_tag:
        additional_boms_to_tag = options.additional_boms_to_tag.split(',')
        print('Adding additional BOM versions to tag: {}'.format(
            additional_boms_to_tag))
        versions_to_tag.extend(additional_boms_to_tag)
    print('Tagging versions: {}'.format(versions_to_tag))
    print('Deleting versions: {}'.format(possible_versions_to_delete))

    project = options.project
    service_account = options.service_account
    image_list_str = check_subprocess(
        'gcloud compute images list --format=json --project={project} --account={account}'
        .format(project=project, account=service_account),
        echo=False)
    image_list = json.loads(image_list_str)
    project_images = set([image['name'] for image in image_list])
    __tag_images(versions_to_tag, project, service_account, project_images,
                 bom_contents_by_name)
    __write_image_delete_script(possible_versions_to_delete,
                                options.days_before, project, service_account,
                                project_images, bom_contents_by_name)
Esempio n. 23
0
  def determine_halyard_commit(self):
    """Determine the commit_id that we want to publish."""
    options = self.options
    versions_url = options.halyard_version_commits_url
    if not versions_url:
      versions_url = '{base}/{filename}'.format(
          base=options.halyard_bucket_base_url,
          filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME)

    if os.path.exists(versions_url):
      logging.debug('Loading halyard version info from file %s', versions_url)
      with open(versions_url, 'r') as stream:
        version_data = stream.read()
    else:
      logging.debug('Loading halyard version info from bucket %s', versions_url)
      gsutil_output = check_subprocess(
          'gsutil cat {url}'.format(url=versions_url), stderr=subprocess.PIPE)

      # The latest version of gsutil prints a bunch of python warnings to stdout
      # (see b/152449160). This file is a series of lines that look like...
      #   0.41.0-180209172926: 05f1e832ab438e5a980d1102e84cdb348a0ab055
      # ...so we'll just throw out any lines that don't start with digits.
      valid_lines = [line for line in gsutil_output.splitlines()
                     if line[0].isdigit()]
      version_data = "\n".join(valid_lines)

    commit = yaml.safe_load(version_data).get(options.halyard_version)
    if commit is None:
      raise_and_log_error(
          ConfigError('Unknown halyard version "{version}" in "{url}"'.format(
              version=options.halyard_version, url=versions_url)))
    return commit
Esempio n. 24
0
  def test_clone_upstream(self):
    git = self.git
    test_parent = os.path.join(self.base_temp_dir, 'test_clone_upstream')
    os.makedirs(test_parent)

    test_dir = os.path.join(test_parent, TEST_REPO_NAME)
    repository = GitRepositorySpec(
        TEST_REPO_NAME, git_dir=test_dir, origin=self.git_dir)
    git.clone_repository_to_path(repository)
    self.assertTrue(os.path.exists(os.path.join(test_dir, 'base_file')))

    want_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN)
    have_tags = git.query_tag_commits(test_dir, TAG_VERSION_PATTERN)
    self.assertEqual(want_tags, have_tags)

    got = check_subprocess('git -C "{dir}" remote -v'.format(dir=test_dir))
    # Disable pushes to the origni
    # No upstream since origin is upstream
    self.assertEqual(
        '\n'.join([
            'origin\t{origin} (fetch)'.format(origin=self.git_dir),
            'origin\tdisabled (push)']),
        got)

    reference = git.determine_git_repository_spec(test_dir)
    expect = GitRepositorySpec(os.path.basename(self.git_dir),
                               origin=self.git_dir, git_dir=test_dir)
    self.assertEqual(expect, reference)
Esempio n. 25
0
  def collect_gcb_versions(self, pool):
    options = self.options
    logging.debug('Collecting GCB versions from %s', options.docker_registry)
    command_parts = ['gcloud',
                     '--format=json',
                     'container images list',
                     '--repository', options.docker_registry]
    if options.gcb_service_account:
      logging.debug('Using account %s', options.gcb_service_account)
      command_parts.extend(['--account', options.gcb_service_account])

    response = check_subprocess(' '.join(command_parts))
    images = [entry['name']
              for entry in json.JSONDecoder(encoding='utf-8').decode(response)]
    image_versions = pool.map(self.query_gcr_image_versions, images)

    image_map = {}
    for name, versions in image_versions:
      image_map[name] = versions

    path = os.path.join(
        self.get_output_dir(),
        options.docker_registry.replace('/', '__') + '__gcb_versions.yml')
    logging.info('Writing %s versions to %s', options.docker_registry, path)
    write_to_path(yaml.dump(image_map,
                            allow_unicode=True,
                            default_flow_style=False), path)
    return image_map
    def do_run_subprocess_ok(self, check, logfile=None):
        if os.path.exists('/bin/true'):
            true_path = '/bin/true'
        elif os.path.exists('/usr/bin/true'):
            true_path = '/usr/bin/true'
        else:
            raise NotImplementedError('Unsupported test platform.')

        tests = [(true_path, ''), ('/bin/echo Hello', 'Hello'),
                 ('/bin/echo "Hello"', 'Hello'),
                 ('/bin/echo "Hello World"', 'Hello World'),
                 ('/bin/echo "Hello\nWorld"', 'Hello\nWorld'),
                 ('/bin/echo \'"Hello World"\'', '"Hello World"')]
        for cmd, expect in tests:
            if logfile:
                output = check_subprocesses_to_logfile('Test Logfile', logfile,
                                                       [cmd])
            elif check:
                output = check_subprocess(cmd)
            else:
                code, output = run_subprocess(cmd)
                self.assertEqual(0, code)

            if logfile:
                self.assertTrue(os.path.exists(logfile))
                self.assertIsNone(output)
                with io.open(logfile, 'r', encoding='utf-8') as stream:
                    lines = stream.read().split('\n')
                self.assertTrue('Spawning' in lines[0])
                self.assertTrue('process completed with' in lines[-2])
                body = '\n'.join(lines[3:-3]).strip()
                self.assertEqual(expect, body)
            else:
                self.assertEqual(expect, output)
Esempio n. 27
0
  def do_run_subprocess_ok(self, check, logfile=None):
    if os.path.exists('/bin/true'):
      true_path = '/bin/true'
    elif os.path.exists('/usr/bin/true'):
      true_path = '/usr/bin/true'
    else:
      raise NotImplementedError('Unsupported test platform.')

    tests = [(true_path, ''),
             ('/bin/echo Hello', 'Hello'),
             ('/bin/echo "Hello"', 'Hello'),
             ('/bin/echo "Hello World"', 'Hello World'),
             ('/bin/echo "Hello\nWorld"', 'Hello\nWorld'),
             ('/bin/echo \'"Hello World"\'', '"Hello World"')]
    for cmd, expect in tests:
      if logfile:
        output = check_subprocesses_to_logfile('Test Logfile', logfile, [cmd])
      elif check:
        output = check_subprocess(cmd)
      else:
        code, output = run_subprocess(cmd)
        self.assertEqual(0, code)

      if logfile:
        self.assertTrue(os.path.exists(logfile))
        self.assertIsNone(output)
        with io.open(logfile, 'r', encoding='utf-8') as stream:
          lines = stream.read().split('\n')
        self.assertTrue('Spawning' in lines[0])
        self.assertTrue('process completed with' in lines[-2])
        body = '\n'.join(lines[3:-3]).strip()
        self.assertEqual(expect, body)
      else:
        self.assertEqual(expect, output)
Esempio n. 28
0
 def query_remote_repository_commit_id(self, url, branch):
     """Returns the current commit for the remote repository."""
     args = {}
     self.__inject_auth(args)
     result = check_subprocess('git ls-remote %s %s' % (url, branch),
                               **args)
     return result.split('\t')[0]
Esempio n. 29
0
 def __check_gcb_image(self, repository, version):
     """Determine if gcb image already exists."""
     options = self.options
     image_name = self.scm.repository_name_to_service_name(repository.name)
     command = [
         'gcloud', '--account', options.gcb_service_account, 'container',
         'images', 'list-tags', options.docker_registry + '/' + image_name,
         '--filter="%s"' % version, '--format=json'
     ]
     got = check_subprocess(' '.join(command))
     if got.strip() != '[]':
         labels = {
             'repository': repository.name,
             'artifact': 'gcr-container'
         }
         if self.options.skip_existing:
             logging.info('Already have %s -- skipping build', image_name)
             self.metrics.inc_counter('ReuseArtifact', labels)
             return True
         if self.options.delete_existing:
             self.__delete_gcb_image(repository, image_name, version)
         else:
             raise_and_log_error(
                 ConfigError('Already have {name} version {version}'.format(
                     name=image_name, version=version)))
     return False
Esempio n. 30
0
    def determine_commit(self, repository):
        """Determine the commit_id that we want to publish."""
        if repository.name != 'halyard':
            raise_and_log_error(
                ConfigError('Unexpected repository "%s"' % repository.name))

        options = self.options
        versions_url = options.halyard_version_commits_url
        if not versions_url:
            versions_url = '{base}/{filename}'.format(
                base=options.halyard_bucket_base_url,
                filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME)

        if os.path.exists(versions_url):
            logging.debug('Loading halyard version info from file %s',
                          versions_url)
            with open(versions_url, 'r') as stream:
                version_data = stream.read()
        else:
            logging.debug('Loading halyard version info from bucket %s',
                          versions_url)
            version_data = check_subprocess(
                'gsutil cat {url}'.format(url=versions_url))

        commit = yaml.load(version_data).get(options.halyard_version)
        if commit is None:
            raise_and_log_error(
                ConfigError(
                    'Unknown halyard version "{version}" in "{url}"'.format(
                        version=options.halyard_version, url=versions_url)))
Esempio n. 31
0
    def test_determine_tag_at_patch(self):
        git = self.git
        test_method = git.query_local_repository_commits_to_existing_tag_from_id

        tests = [(BRANCH_A, VERSION_A), (BRANCH_B, VERSION_B)]
        for branch, version in tests:
            new_version = str(version)
            new_version = new_version[:-1] + '1'
            self.run_git('checkout ' + branch)
            self.run_git('checkout -b {branch}-patch'.format(branch=branch))
            pending_messages = []
            for change in ['first', 'second']:
                new_path = os.path.join(self.git_dir, change + '_file')
                check_subprocess('touch "{path}"'.format(path=new_path))
                self.run_git('add "{path}"'.format(path=new_path))
                message = 'fix(test): Made {change} change for testing.'.format(
                    change=change)
                self.run_git(
                    'commit -a -m "{message}"'.format(message=message))
                pending_messages.append(' ' * 4 + message)

            # Clone the repo because the <test_method> only works on remote repositories
            # so we need to give a local repository in front of the test repo we set up.
            # The reason for the remote constraint is because it wants to use "branch -r".
            clone_dir = os.path.join(self.base_temp_dir, 'tag_at_patch',
                                     branch)
            os.makedirs(clone_dir)
            check_subprocess('git clone {source} {target}'.format(
                source=self.git_dir, target=clone_dir))
            commit_id = git.query_local_repository_commit_id(clone_dir)

            # The pending change shows up for the old tag (and are most recent first)
            all_tags = git.query_tag_commits(clone_dir, TAG_VERSION_PATTERN)
            tag, messages = test_method(clone_dir, commit_id, all_tags)
            self.assertEqual(version, tag)
            self.assertEqual(len(pending_messages), len(messages))
            self.assertEqual(sorted(pending_messages, reverse=True),
                             [m.message for m in messages])

            # When we re-tag at this change,
            # the new tag shows up without pending change.
            self.run_git('tag {version} HEAD'.format(version=new_version))
            all_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN)

            tag, messages = test_method(self.git_dir, commit_id, all_tags)
            self.assertEqual(new_version, tag)
            self.assertEqual([], messages)
Esempio n. 32
0
  def initiate_github_pull_request(
      self, git_dir, message, base='master', head=None):
    """Initialize a pull request for the given commit on the given branch.

    Args:
      git_dir: [path] The local repository to initiate the pull request with.
      message: [string] The pull request message. If this is multiple lines
         then the first line will be the title, subsequent lines will
         be the PR description.
      base: [string] The base reference for the pull request.
         The default is master, but this could be a BRANCH or OWNER:BRANCH
      head: [string] The branch to use for the pull request. By default this
         is the current branch state of the the git_dir repository. This
         too can be BRANCH or OWNER:BRANCH. This branch must have alraedy been
         pushed to the origin repository -- not the local repository.
    """
    options = self.options
    message = message.strip()
    if options.pr_notify_list:
      message.append('\n\n@' + ', @'.join(','.split(options.pr_notify_list)))

    hub_args = []
    if base:
      hub_args.extend(['-b', base])
    if head:
      hub_args.extend(['-h', head])

    if options.git_never_push:
      logging.warning(
          'SKIP creating pull request because --git_never_push=true.'
          '\nCommand would have been: %s',
          'git -C "{dir}" pull-request {args} -m {msg!r}'.format(
              dir=git_dir, args=' '.join(hub_args), msg=message))
      return

    message_path = None
    if message.find('\n') < 0:
      hub_args.extend(['-m', message])
    else:
      fd, message_path = tempfile.mkstemp(prefix='hubmsg')
      os.write(fd, message)
      os.close(fd)
      hub_args.extend(['-F', message_path])

    logging.info(
        'Initiating pull request in %s from %s to %s with message:\n%s',
        git_dir, base, head if head else '<current branch>', message)

    try:
      kwargs = {}
      self.__inject_auth(kwargs)
      output = check_subprocess(
          'hub -C "{dir}" pull-request {args}'.format(
              dir=git_dir, args=' '.join(hub_args)),
          **kwargs)
      logging.info(output)
    finally:
      if message_path:
        os.remove(message_path)
Esempio n. 33
0
  def initiate_github_pull_request(
      self, git_dir, message, base='master', head=None):
    """Initialize a pull request for the given commit on the given branch.

    Args:
      git_dir: [path] The local repository to initiate the pull request with.
      message: [string] The pull request message. If this is multiple lines
         then the first line will be the title, subsequent lines will
         be the PR description.
      base: [string] The base reference for the pull request.
         The default is master, but this could be a BRANCH or OWNER:BRANCH
      head: [string] The branch to use for the pull request. By default this
         is the current branch state of the the git_dir repository. This
         too can be BRANCH or OWNER:BRANCH. This branch must have alraedy been
         pushed to the origin repository -- not the local repository.
    """
    options = self.options
    message = message.strip()
    if options.pr_notify_list:
      message.append('\n\n@' + ', @'.join(','.split(options.pr_notify_list)))

    hub_args = []
    if base:
      hub_args.extend(['-b', base])
    if head:
      hub_args.extend(['-h', head])

    if options.git_never_push:
      logging.warning(
          'SKIP creating pull request because --git_never_push=true.'
          '\nCommand would have been: %s',
          'git -C "{dir}" pull-request {args} -m {msg!r}'.format(
              dir=git_dir, args=' '.join(hub_args), msg=message))
      return

    message_path = None
    if message.find('\n') < 0:
      hub_args.extend(['-m', message])
    else:
      fd, message_path = tempfile.mkstemp(prefix='hubmsg')
      os.write(fd, message)
      os.close(fd)
      hub_args.extend(['-F', message_path])

    logging.info(
        'Initiating pull request in %s from %s to %s with message:\n%s',
        git_dir, base, head if head else '<current branch>', message)

    try:
      kwargs = {}
      self.__inject_auth(kwargs)
      output = check_subprocess(
          'hub -C "{dir}" pull-request {args}'.format(
              dir=git_dir, args=' '.join(hub_args)),
          **kwargs)
      logging.info(output)
    finally:
      if message_path:
        os.remove(message_path)
Esempio n. 34
0
    def setUpClass(cls):
        cls.git = GitRunner(make_default_options())
        cls.base_temp_dir = tempfile.mkdtemp(prefix='git_test')
        cls.git_dir = os.path.join(cls.base_temp_dir, 'commit_message_test')
        os.makedirs(cls.git_dir)

        git_dir = cls.git_dir
        gitify = lambda args: 'git -C "{dir}" {args}'.format(dir=git_dir,
                                                             args=args)
        check_subprocess_sequence([
            gitify('init'), 'touch "{dir}/base_file"'.format(dir=git_dir),
            gitify('add "{dir}/base_file"'.format(dir=git_dir)),
            gitify('commit -a -m "feat(test): added file"'),
            gitify(
                'tag {base_version} HEAD'.format(base_version=VERSION_BASE)),
            gitify('checkout -b {patch_branch}'.format(
                patch_branch=cls.PATCH_BRANCH)),
            'touch "{dir}/patch_file"'.format(dir=git_dir),
            gitify('add "{dir}/patch_file"'.format(dir=git_dir)),
            gitify('commit -a -m "fix(testA): added patch_file"'),
            gitify('checkout -b {minor_branch}'.format(
                minor_branch=cls.MINOR_BRANCH)),
            'touch "{dir}/minor_file"'.format(dir=git_dir),
            gitify('add "{dir}/minor_file"'.format(dir=git_dir)),
            gitify('commit -a -m "chore(testB): added minor_file"'),
            gitify('checkout -b {major_branch}'.format(
                major_branch=cls.MAJOR_BRANCH)),
            'touch "{dir}/major_file"'.format(dir=git_dir),
            gitify('add "{dir}/major_file"'.format(dir=git_dir)),
            gitify('commit -a -m'
                   ' "feat(testC): added major_file\n'
                   '\nInterestingly enough, this is a BREAKING CHANGE.'
                   '"'),
            gitify('checkout -b {merged_branch}'.format(
                merged_branch=cls.MERGED_BRANCH)),
            gitify('reset --hard HEAD~3'),
            gitify('merge --squash HEAD@{1}')
        ])
        env = dict(os.environ)
        if os.path.exists('/bin/true'):
            env['EDITOR'] = '/bin/true'
        elif os.path.exists('/usr/bin/true'):
            env['EDITOR'] = '/usr/bin/true'
        else:
            raise NotImplementedError('platform not supported for this test')
        check_subprocess('git -C "{dir}" commit'.format(dir=git_dir), env=env)
Esempio n. 35
0
  def test_determine_tag_at_patch(self):
    git = self.git
    test_method = git.query_local_repository_commits_to_existing_tag_from_id

    tests = [(BRANCH_A, VERSION_A),
             (BRANCH_B, VERSION_B)]
    for branch, version in tests:
      new_version = str(version)
      new_version = new_version[:-1] + '1'
      self.run_git('checkout ' + branch)
      self.run_git('checkout -b {branch}-patch'.format(branch=branch))
      pending_messages = []
      for change in ['first', 'second']:
        new_path = os.path.join(self.git_dir, change + '_file')
        check_subprocess('touch "{path}"'.format(path=new_path))
        self.run_git('add "{path}"'.format(path=new_path))
        message = 'fix(test): Made {change} change for testing.'.format(
            change=change)
        self.run_git('commit -a -m "{message}"'.format(message=message))
        pending_messages.append(' '*4 + message)

      # Clone the repo because the <test_method> only works on remote repositories
      # so we need to give a local repository in front of the test repo we set up.
      # The reason for the remote constraint is because it wants to use "branch -r".
      clone_dir = os.path.join(self.base_temp_dir, 'tag_at_patch', branch)
      os.makedirs(clone_dir)
      check_subprocess('git clone {source} {target}'.format(
          source=self.git_dir, target=clone_dir))
      commit_id = git.query_local_repository_commit_id(clone_dir)

      # The pending change shows up for the old tag (and are most recent first)
      all_tags = git.query_tag_commits(clone_dir, TAG_VERSION_PATTERN)
      tag, messages = test_method(clone_dir, commit_id, all_tags)
      self.assertEqual(version, tag)
      self.assertEqual(len(pending_messages), len(messages))
      self.assertEqual(sorted(pending_messages, reverse=True),
                       [m.message for m in messages])

      # When we re-tag at this change,
      # the new tag shows up without pending change.
      self.run_git('tag {version} HEAD'.format(version=new_version))
      all_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN)

      tag, messages = test_method(self.git_dir, commit_id, all_tags)
      self.assertEqual(new_version, tag)
      self.assertEqual([], messages)
Esempio n. 36
0
def get_project(options):
    """Determine the default project name.

    The default project name is the gcloud configured default project.
    """
    if not options.project:
      stdout = check_subprocess('gcloud config list')
      options.project = re.search('project = (.*)\n', stdout).group(1)
    return options.project
Esempio n. 37
0
def get_project(options):
    """Determine the default project name.

    The default project name is the gcloud configured default project.
    """
    if not options.project:
      stdout = check_subprocess('gcloud config list')
      options.project = re.search('project = (.*)\n', stdout).group(1)
    return options.project
Esempio n. 38
0
    def test_clone_origin(self):
        git = self.git

        # Make the origin we're going to test the clone against
        # This is intentionally different from upstream so that
        # we can confirm that upstream is also setup properly.
        origin_user = '******'
        origin_basedir = os.path.join(self.base_temp_dir, origin_user)
        os.makedirs(origin_basedir)
        check_subprocess('git -C "{origin_dir}" clone "{upstream}"'.format(
            origin_dir=origin_basedir, upstream=self.git_dir))

        test_parent = os.path.join(self.base_temp_dir, 'test_clone_origin')
        os.makedirs(test_parent)

        test_dir = os.path.join(test_parent, TEST_REPO_NAME)
        origin_dir = os.path.join(origin_basedir, TEST_REPO_NAME)
        repository = GitRepositorySpec(TEST_REPO_NAME,
                                       git_dir=test_dir,
                                       origin=origin_dir,
                                       upstream=self.git_dir)
        self.git.clone_repository_to_path(repository)

        want_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN)
        have_tags = git.query_tag_commits(test_dir, TAG_VERSION_PATTERN)
        self.assertEqual(want_tags, have_tags)

        got = check_subprocess('git -C "{dir}" remote -v'.format(dir=test_dir))

        # Upstream repo is configured for pulls, but not for pushes.
        self.assertEqual(
            '\n'.join([
                'origin\t{origin} (fetch)'.format(origin=origin_dir),
                'origin\t{origin} (push)'.format(origin=origin_dir),
                'upstream\t{upstream} (fetch)'.format(upstream=self.git_dir),
                'upstream\tdisabled (push)'
            ]), got)

        reference = git.determine_git_repository_spec(test_dir)
        expect = GitRepositorySpec(os.path.basename(origin_dir),
                                   upstream=self.git_dir,
                                   origin=origin_dir,
                                   git_dir=test_dir)
        self.assertEqual(expect, reference)
Esempio n. 39
0
  def test_clone_origin(self):
    git = self.git

    # Make the origin we're going to test the clone against
    # This is intentionally different from upstream so that
    # we can confirm that upstream is also setup properly.
    origin_user = '******'
    origin_basedir = os.path.join(self.base_temp_dir, origin_user)
    os.makedirs(origin_basedir)
    check_subprocess(
        'git -C "{origin_dir}" clone "{upstream}"'.format(
            origin_dir=origin_basedir, upstream=self.git_dir))

    test_parent = os.path.join(self.base_temp_dir, 'test_clone_origin')
    os.makedirs(test_parent)

    test_dir = os.path.join(test_parent, TEST_REPO_NAME)
    origin_dir = os.path.join(origin_basedir, TEST_REPO_NAME)
    repository = GitRepositorySpec(
        TEST_REPO_NAME,
        git_dir=test_dir, origin=origin_dir, upstream=self.git_dir)
    self.git.clone_repository_to_path(repository)

    want_tags = git.query_tag_commits(self.git_dir, TAG_VERSION_PATTERN)
    have_tags = git.query_tag_commits(test_dir, TAG_VERSION_PATTERN)
    self.assertEqual(want_tags, have_tags)

    got = check_subprocess('git -C "{dir}" remote -v'.format(dir=test_dir))

    # Upstream repo is configured for pulls, but not for pushes.
    self.assertEqual(
        '\n'.join([
            'origin\t{origin} (fetch)'.format(origin=origin_dir),
            'origin\t{origin} (push)'.format(origin=origin_dir),
            'upstream\t{upstream} (fetch)'.format(upstream=self.git_dir),
            'upstream\tdisabled (push)'
            ]),
        got)

    reference = git.determine_git_repository_spec(test_dir)
    expect = GitRepositorySpec(
        os.path.basename(origin_dir),
        upstream=self.git_dir, origin=origin_dir, git_dir=test_dir)
    self.assertEqual(expect, reference)
Esempio n. 40
0
    def __collect_gce_quota(
        self,
        gcloud_account,
        project,
        region,
        project_percent=100.0,
        region_percent=100.0,
    ):
        project_info_json = check_subprocess(
            "gcloud compute project-info describe"
            " --account {gcloud_account}"
            " --format yaml"
            " --project {project}".format(project=project,
                                          gcloud_account=gcloud_account))
        project_info = yaml.safe_load(project_info_json)
        # Sometimes gce returns entries and leaves out the a "metric" it was for.
        # We'll ignore those and stick them in 'UNKNOWN' for simplicity.
        project_quota = {
            "gce_global_%s" % info.get("metric", "UNKNOWN"): int(
                max(
                    1,
                    math.floor(project_percent *
                               (info["limit"] - info["usage"]))))
            for info in project_info["quotas"]
        }

        region_info_json = check_subprocess("gcloud compute regions describe"
                                            " --account {gcloud_account}"
                                            " --format yaml"
                                            " --project {project}"
                                            " {region}".format(
                                                gcloud_account=gcloud_account,
                                                project=project,
                                                region=region))
        region_info = yaml.safe_load(region_info_json)
        region_quota = {
            "gce_region_%s" % info.get("metric", "UNKNOWN"): int(
                max(
                    1,
                    math.floor(region_percent *
                               (info["limit"] - info["usage"]))))
            for info in region_info["quotas"]
        }
        return project_quota, region_quota
Esempio n. 41
0
    def build_all_distributions(self, repository):
        name = repository.name
        source_info = self.source_code_manager.refresh_source_info(
            repository, self.options.build_number)
        self.__build_version = source_info.to_build_version()
        config_root = repository.git_dir

        check_subprocess('go get -d -v', cwd=config_root)
        for dist_arch in DIST_ARCH_LIST:
            # Sub-directory the binaries are stored in are specified by
            # ${build_version}/${dist}.
            version_bin_path = ('spin/{}/{}/{}/spin'.format(
                self.__build_version, dist_arch.dist, dist_arch.arch))

            context = '%s-%s' % (dist_arch.dist, dist_arch.arch)
            logfile = self.get_logfile_path(repository.name + '-build-' +
                                            context)
            logging.info('Building spin binary for %s', dist_arch)
            labels = {
                'repository': repository.name,
                'dist': dist_arch.dist,
                'arch': dist_arch.arch
            }
            env = dict(os.environ)
            env.update({
                'CGO_ENABLED': '0',
                'GOOS': dist_arch.dist,
                'GOARCH': dist_arch.arch
            })
            self.metrics.time_call(
                'GoBuild',
                labels,
                self.metrics.default_determine_outcome_labels,
                check_subprocesses_to_logfile,
                'Building spin ' + context,
                logfile, ['go build .'],
                cwd=config_root,
                env=env)

            spin_path = '{}/spin'.format(config_root)
            self.__gcs_uploader.upload_from_filename(version_bin_path,
                                                     spin_path)
            os.remove(spin_path)
Esempio n. 42
0
 def load_bom_from_url(self, url):
   """Returns the bom specification dict from a gcs url."""
   logging.debug('Loading %s', url)
   try:
     text = check_subprocess('gsutil cat ' + url)
     return yaml.load(text)
   except Exception as ex:
     self.__bad_files[self.url_to_bom_name(url)] = exception_to_message(ex)
     maybe_log_exception('load_from_from_url', ex,
                         action_msg='Skipping %s' % url)
     return None
Esempio n. 43
0
  def test_determine_tag_at_later_branch(self):
    git = self.git
    test_method = git.query_local_repository_commits_to_existing_tag_from_id

    # Clone the repo because the <test_method> only works on remote repositories
    # so we need to give a local repository in front of the test repo we set up.
    # The reason for the remote constraint is because it wants to use "branch -r".
    clone_dir = os.path.join(self.base_temp_dir, 'tag_at_later_branch')
    os.makedirs(clone_dir)
    check_subprocess('git clone {source} {target}'.format(
        source=self.git_dir, target=clone_dir))

    check_subprocess('git -C {dir} checkout '.format(dir=clone_dir) + BRANCH_C)
    commit_id = git.query_local_repository_commit_id(clone_dir)
    all_tags = git.query_tag_commits(clone_dir, TAG_VERSION_PATTERN)
    tag, messages = test_method(clone_dir, commit_id, all_tags)
    self.assertEqual(VERSION_B, tag)
    self.assertEqual(2, len(messages))
    self.assertTrue(messages[0].message.find('added c_file'))
    self.assertTrue(messages[1].message.find('added master_file'))
Esempio n. 44
0
  def _do_command(self):
    """Wraps base method so we can start/stop redis if needed."""
    self._ensure_templates_directory()
    redis_name = 'redis-server'
    start_redis = run_subprocess('sudo service %s start' % redis_name)[0] != 0
    logging.debug('redis-server %s running.',
                  'IS NOT' if start_redis else 'IS')

    try:
      if start_redis:
        check_subprocess('sudo service %s start' % redis_name)
      super(BuildApiDocsCommand, self)._do_command()
    finally:
      if start_redis:
        # pylint: disable=broad-except
        try:
          check_subprocess('sudo service %s stop' % redis_name)
        except Exception as ex:
          maybe_log_exception(
              self.name, ex,
              'Ignoring exception while stopping temporary ' + redis_name)
Esempio n. 45
0
  def __collect_halconfig_files(self, repository):
    """Gets the component config files and writes them into the output_dir."""
    name = repository.name
    if (name not in SPINNAKER_BOM_REPOSITORY_NAMES
        or name in ['spin']):
      logging.debug('%s does not use config files -- skipping', name)
      return

    if name == 'spinnaker-monitoring':
      config_root = os.path.join(
          repository.git_dir, 'spinnaker-monitoring-daemon')
    else:
      config_root = repository.git_dir

    service_name = self.scm.repository_name_to_service_name(repository.name)
    target_dir = os.path.join(self.get_output_dir(), 'halconfig', service_name)
    ensure_dir_exists(target_dir)

    config_path = os.path.join(config_root, 'halconfig')
    logging.info('Copying configs from %s...', config_path)
    for profile in os.listdir(config_path):
      profile_path = os.path.join(config_path, profile)
      if os.path.isfile(profile_path):
        shutil.copyfile(profile_path, os.path.join(target_dir, profile))
        logging.debug('Copied profile to %s', profile_path)
      elif not os.path.isdir(profile_path):
        logging.warning('%s is neither file nor directory -- ignoring',
                        profile_path)
        continue
      else:
        tar_path = os.path.join(
            target_dir, '{profile}.tar.gz'.format(profile=profile))
        file_list = ' '.join(os.listdir(profile_path))

        # NOTE: For historic reasons this is not actually compressed
        # even though the tar_path says ".tar.gz"
        check_subprocess(
            'tar cf {path} -C {profile} {file_list}'.format(
                path=tar_path, profile=profile_path, file_list=file_list))
        logging.debug('Copied profile to %s', tar_path)
Esempio n. 46
0
  def __collect_gce_quota(self, project, region,
                          project_percent=100.0, region_percent=100.0):
    project_info_json = check_subprocess('gcloud compute project-info describe'
                                         ' --format yaml'
                                         ' --project %s' % project)
    project_info = yaml.safe_load(project_info_json)
    # Sometimes gce returns entries and leaves out the a "metric" it was for.
    # We'll ignore those and stick them in 'UNKNOWN' for simplicity.
    project_quota = {'gce_global_%s' % info.get('metric', 'UNKNOWN'):
                          int(max(1, math.floor(
                              project_percent * (info['limit'] - info['usage']))))
                     for info in project_info['quotas']}

    region_info_json = check_subprocess('gcloud compute regions describe'
                                        ' --format yaml'
                                        ' --project %s'
                                        ' %s' % (project, region))
    region_info = yaml.safe_load(region_info_json)
    region_quota = {
        'gce_region_%s' % info.get('metric', 'UNKNOWN'): int(max(
            1, math.floor(region_percent * (info['limit'] - info['usage']))))
        for info in region_info['quotas']
    }
    return project_quota, region_quota
Esempio n. 47
0
  def have_image(self, repository):
    """Determine if we already have an image for the repository or not."""
    bom = self.source_code_manager.bom
    dependencies = bom['dependencies']
    services = bom['services']
    service_name = self.scm.repository_name_to_service_name(repository.name)
    if service_name in dependencies:
      build_version = dependencies[service_name]['version']
    else:
      build_version = services[service_name]['version']

    options = self.options
    image_name = 'spinnaker-{repo}-{version}'.format(
        repo=repository.name,
        version=build_version.replace('.', '-').replace(':', '-'))
    lookup_command = ['gcloud', '--account', options.build_gce_service_account,
                      'compute', 'images', 'list', '--filter', image_name,
                      '--project', self.__image_project,
                      '--quiet', '--format=json']
    logging.debug('Checking for existing image for "%s"', repository.name)
    got = check_subprocess(' '.join(lookup_command))
    if got.strip() == '[]':
      return False
    labels = {'repository': repository.name, 'artifact': 'gce-image'}
    if self.options.skip_existing:
      logging.info('Already have %s -- skipping build', image_name)
      self.metrics.inc_counter('ReuseArtifact', labels)
      return True
    if not self.options.delete_existing:
      raise_and_log_error(
          ConfigError('Already have image "{name}"'.format(name=image_name)))

    delete_command = ['gcloud', '--account', options.gcb_service_account,
                      'compute', 'images', 'delete', image_name,
                      '--project', options.build_gce_project,
                      '--quiet']
    logging.debug('Deleting existing image %s', image_name)
    self.metrics.count_call(
        'DeleteArtifact', labels,
        'Attempts to delete existing GCE images.',
        check_subprocess, ' '.join(delete_command))
    return False
Esempio n. 48
0
 def __check_gcb_image(self, repository, version):
   """Determine if gcb image already exists."""
   options = self.options
   image_name = self.scm.repository_name_to_service_name(repository.name)
   command = ['gcloud', '--account', options.gcb_service_account,
              'container', 'images', 'list-tags',
              options.docker_registry + '/' + image_name,
              '--filter="%s"' % version,
              '--format=json']
   got = check_subprocess(' '.join(command))
   if got.strip() != '[]':
     labels = {'repository': repository.name, 'artifact': 'gcr-container'}
     if self.options.skip_existing:
       logging.info('Already have %s -- skipping build', image_name)
       self.metrics.inc_counter('ReuseArtifact', labels)
       return True
     if self.options.delete_existing:
       self.__delete_gcb_image(repository, image_name, version)
     else:
       raise_and_log_error(
           ConfigError('Already have {name} version {version}'.format(
               name=image_name, version=version)))
   return False
Esempio n. 49
0
  def determine_halyard_commit(self):
    """Determine the commit_id that we want to publish."""
    options = self.options
    versions_url = options.halyard_version_commits_url
    if not versions_url:
      versions_url = '{base}/{filename}'.format(
          base=options.halyard_bucket_base_url,
          filename=BuildHalyardCommand.HALYARD_VERSIONS_BASENAME)

    if os.path.exists(versions_url):
      logging.debug('Loading halyard version info from file %s', versions_url)
      with open(versions_url, 'r') as stream:
        version_data = stream.read()
    else:
      logging.debug('Loading halyard version info from bucket %s', versions_url)
      version_data = check_subprocess(
          'gsutil cat {url}'.format(url=versions_url))

    commit = yaml.safe_load(version_data).get(options.halyard_version)
    if commit is None:
      raise_and_log_error(
          ConfigError('Unknown halyard version "{version}" in "{url}"'.format(
              version=options.halyard_version, url=versions_url)))
    return commit
Esempio n. 50
0
 def check_run(self, command_line):
   """Run hal with the supplied command_line."""
   args = ' --color false --daemon-endpoint http://{daemon} '.format(
       daemon=self.__options.halyard_daemon)
   return check_subprocess(self.__hal_path + args + command_line)
Esempio n. 51
0
  def setUpClass(cls):
    cls.git = GitRunner(make_default_options())
    cls.base_temp_dir = tempfile.mkdtemp(prefix='git_test')
    cls.git_dir = os.path.join(cls.base_temp_dir, 'commit_message_test')
    os.makedirs(cls.git_dir)

    git_dir = cls.git_dir
    gitify = lambda args: 'git -C "{dir}" {args}'.format(dir=git_dir, args=args)
    check_subprocess_sequence([
        gitify('init'),
        'touch "{dir}/base_file"'.format(dir=git_dir),
        gitify('add "{dir}/base_file"'.format(dir=git_dir)),
        gitify('commit -a -m "feat(test): added file"'),
        gitify('tag {base_version} HEAD'.format(base_version=VERSION_BASE)),

        # For testing patches
        gitify('checkout -b {patch_branch}'.format(
            patch_branch=cls.PATCH_BRANCH)),
        'touch "{dir}/patch_file"'.format(dir=git_dir),
        gitify('add "{dir}/patch_file"'.format(dir=git_dir)),
        gitify('commit -a -m "fix(testA): added patch_file"'),

        # For testing minor versions
        gitify('checkout -b {minor_branch}'.format(
            minor_branch=cls.MINOR_BRANCH)),
        'touch "{dir}/minor_file"'.format(dir=git_dir),
        gitify('add "{dir}/minor_file"'.format(dir=git_dir)),
        gitify('commit -a -m "feat(testB): added minor_file"'),

        # For testing major versions
        gitify('checkout -b {major_branch}'.format(
            major_branch=cls.MAJOR_BRANCH)),
        'touch "{dir}/major_file"'.format(dir=git_dir),
        gitify('add "{dir}/major_file"'.format(dir=git_dir)),
        gitify('commit -a -m'
               ' "feat(testC): added major_file\n'
               '\nInterestingly enough, this is a BREAKING CHANGE.'
               '"'),

        # For testing composite commits from a merge of commits
        gitify('checkout -b {merged_branch}'.format(
            merged_branch=cls.MERGED_BRANCH)),
        gitify('reset --hard HEAD~3'),
        gitify('merge --squash HEAD@{1}'),
    ])

    env = dict(os.environ)
    if os.path.exists('/bin/true'):
      env['EDITOR'] = '/bin/true'
    elif os.path.exists('/usr/bin/true'):
      env['EDITOR'] = '/usr/bin/true'
    else:
      raise NotImplementedError('platform not supported for this test')
    check_subprocess('git -C "{dir}" commit'.format(dir=git_dir), env=env)

    # For testing changelog from a commit
    check_subprocess_sequence([
        gitify('checkout {minor_branch}'.format(
            minor_branch=cls.MINOR_BRANCH)),
        gitify('checkout -b {x_branch}'.format(
            x_branch=cls.PATCH_MINOR_BRANCH)),
        'touch "{dir}/xbefore_file"'.format(dir=git_dir),
        gitify('add "{dir}/xbefore_file"'.format(dir=git_dir)),
        gitify('commit -a -m "feat(test): COMMIT AT TAG"'),
        gitify('tag {x_marker} HEAD'.format(x_marker=cls.PATCH_MINOR_X)),
        'touch "{dir}/x_first"'.format(dir=git_dir),
        gitify('add "{dir}/x_first"'.format(dir=git_dir)),
        gitify('commit -a -m "fix(test): First Fix"'),
        'rm "{dir}/x_first"'.format(dir=git_dir),
        gitify('commit -a -m "fix(test): Second Fix"'),
    ])
Esempio n. 52
0
 def run_git(cls, command):
   return check_subprocess(
       'git -C "{dir}" {command}'.format(dir=cls.git_dir, command=command))
Esempio n. 53
0
 def check_run(self, git_dir, command, **kwargs):
   """Wrapper around check_subprocess."""
   self.__inject_auth(kwargs)
   return check_subprocess(
       'git -C "{dir}" {command}'.format(dir=git_dir, command=command),
       **kwargs)
Esempio n. 54
0
 def query_remote_repository_commit_id(self, url, branch):
   """Returns the current commit for the remote repository."""
   args = {}
   self.__inject_auth(args)
   result = check_subprocess('git ls-remote %s %s' % (url, branch), **args)
   return result.split('\t')[0]