Example #1
1
  def determine_new_tag(self):
    """Determines the next semver tag for the repository at the path.

    If the commit at HEAD is already tagged with a tag matching --tag_regex_str,
    this function is a no-op. Otherwise it determines the semantic version bump
    for the commits since the last tag matching 'version-X.Y.Z' and suggests a new tag
    based on the commit messages. This suggestion can be overridden with
    --next_tag, which will be used if there are any commits after the last
    semver tag matching 'version-X.Y.Z'.

    Returns:
      [VersionBump]: Next semantic version tag to be used, along with what type
      of version bump it was. Version tag is of the form 'version-X.Y.Z'.
    """
    if self.__next_tag:
      return VersionBump(self.__next_tag, self.get_head_commit())

    # 'git log' entries of the form '$hash $commit_title'
    log_onelines = run_quick('git -C {path} log --pretty=oneline'.format(path=self.path),
                             echo=False).stdout.strip().split('\n')
    commit_hashes = [line.split(' ')[0].strip() for line in log_onelines]

    # Full commit messages, including bodies for finding 'BREAKING CHANGE:'.
    msgs = [
      run_quick('git -C {path} log -n 1 --pretty=medium {hash}'.format(path=self.path, hash=h),
                echo=False).stdout.strip() for h in commit_hashes
    ]

    if len(commit_hashes) != len(msgs):
      raise IOError('Git commit hash list and commit message list are unequal sizes.')

    return self.bump_semver(self.__current_version, commit_hashes, msgs)
Example #2
0
  def generate_changelog(self):
    """Generate a release changelog and write it to a file.

    The changelog contains a section per microservice that describes the
    changes made since the last Spinnaker release. It also contains the
    version information as well.
    """
    changelog = ['Spinnaker {0}\n'.format(self.__toplevel_version)]
    for comp, hash in self.__changelog_start_hashes.iteritems():
      version = self.__version_from_tag(comp)

      # Generate the changelog for the component.
      print 'Generating changelog for {comp}...'.format(comp=comp)
      # Assumes the remote repository is aliased as 'origin'.
      component_url = run_quick('git -C {path} config --get remote.origin.url'
                                .format(path=comp)).stdout.strip()
      if component_url.endswith('.git'):
        component_url = component_url.replace('.git', '')
      result = run_quick('cd {comp}; clog -r {url} -f {hash} --setversion {version}; cd ..'
                         .format(comp=comp, url=component_url, hash=hash, version=version))
      if result.returncode != 0:
        print "Changelog generation failed for {0} with \n{1}\n exiting...".format(comp, result.stdout)
        exit(result.returncode)
      # Capitalize
      comp_cap = comp[0].upper() + comp[1:]
      changelog.append('# {0}\n{1}'.format(comp_cap, result.stdout))
    print 'Writing changelog...'
    # Write the changelog with the toplevel version without the build number.
    # This is ok since the changelog is only published if the toplevel version is released.
    changelog_file = self.__changelog_output or '{0}-changelog.md'.format(self.__toplevel_version)
    with open(changelog_file, 'w') as clog:
      clog.write('\n'.join(changelog))
Example #3
0
  def __tag_gcb_mirror(cls, name, mirror_base_url, gradle_root, sync_branch, logfile):
    add_mirror_cmds = [
      'git remote add mirror {base_url}/{name}.git'.format(base_url=mirror_base_url, name=name),
      'git fetch mirror'
    ]
    run_shell_and_log(add_mirror_cmds, logfile, cwd=gradle_root)

    all_remote_branches = run_quick('git -C {name} branch -r'.format(name=name),
                                    echo=False).stdout.strip().splitlines()
    checkout_cmd = ''
    print all_remote_branches
    if 'mirror/{}'.format(sync_branch) in all_remote_branches:
      checkout_cmd = 'git checkout mirror/{branch}'.format(branch=sync_branch)
    else:
      checkout_cmd = 'git checkout {branch}'.format(branch=sync_branch)

    tag = run_quick('cat {name}-gcb-trigger.yml'.format(name=name), echo=False).stdout.strip()
    cmds = [
      checkout_cmd,
      'git merge origin/{branch}'.format(branch=sync_branch),
      'git push mirror {branch}'.format(branch=sync_branch),
      'git push mirror {tag}'.format(name=name, tag=tag)
    ]
    if os.path.exists(logfile):
      os.remove(logfile)
    run_shell_and_log(cmds, logfile, cwd=gradle_root)
    return tag
Example #4
0
 def create_stable_branch(self):
     """Creates a branch from --initial_branch/HEAD named --stable_branch.
 """
     run_quick('git -C {path} checkout {initial_branch}'.format(
         path=self.path, initial_branch=self.__initial_branch))
     run_quick('git -C {path} checkout -b {stable_branch}'.format(
         path=self.path, stable_branch=self.__stable_branch))
Example #5
0
    def __tag_gcb_mirror(cls, name, mirror_base_url, gradle_root, sync_branch,
                         logfile):
        add_mirror_cmds = [
            'git remote add mirror {base_url}/{name}.git'.format(
                base_url=mirror_base_url, name=name), 'git fetch mirror'
        ]
        run_shell_and_log(add_mirror_cmds, logfile, cwd=gradle_root)

        all_remote_branches = run_quick(
            'git -C {name} branch -r'.format(name=name),
            echo=False).stdout.strip().splitlines()
        checkout_cmd = ''
        print all_remote_branches
        if 'mirror/{}'.format(sync_branch) in all_remote_branches:
            checkout_cmd = 'git checkout mirror/{branch}'.format(
                branch=sync_branch)
        else:
            checkout_cmd = 'git checkout {branch}'.format(branch=sync_branch)

        tag = run_quick('cat {name}-gcb-trigger.yml'.format(name=name),
                        echo=False).stdout.strip()
        cmds = [
            checkout_cmd,
            'git merge origin/{branch}'.format(branch=sync_branch),
            'git push mirror {branch}'.format(branch=sync_branch),
            'git push mirror {tag}'.format(name=name, tag=tag)
        ]
        if os.path.exists(logfile):
            os.remove(logfile)
        run_shell_and_log(cmds, logfile, cwd=gradle_root)
        return tag
Example #6
0
  def determine_new_tag(self):
    """Determines the next semver tag for the repository at the path.

    If the commit at HEAD is already tagged with a tag matching --tag_regex_str,
    this function is a no-op. Otherwise it determines the semantic version bump
    for the commits since the last tag matching 'version-X.Y.Z' and suggests a new tag
    based on the commit messages. This suggestion can be overridden with
    --next_tag, which will be used if there are any commits after the last
    semver tag matching 'version-X.Y.Z'.

    Returns:
      [VersionBump]: Next semantic version tag to be used, along with what type
      of version bump it was. Version tag is of the form 'version-X.Y.Z'.
    """
    if self.__next_tag:
      return VersionBump(self.__next_tag, self.get_head_commit())

    # 'git log' entries of the form '$hash $commit_title'
    log_onelines = run_quick('git -C {path} log --pretty=oneline'.format(path=self.path),
                             echo=False).stdout.strip().split('\n')
    commit_hashes = [line.split(' ')[0].strip() for line in log_onelines]

    # Full commit messages, including bodies for finding 'BREAKING CHANGE:'.
    msgs = [
      run_quick('git -C {path} log -n 1 --pretty=medium {hash}'.format(path=self.path, hash=h),
                echo=False).stdout.strip() for h in commit_hashes
    ]

    if len(commit_hashes) != len(msgs):
      raise IOError('Git commit hash list and commit message list are unequal sizes.')

    return self.bump_semver(self.__current_version, commit_hashes, msgs)
Example #7
0
    def tag_head(self):
        """Tags the current branch's HEAD with the next semver tag.

    Returns:
      [VersionBump]: The version bump used to tag the git repository, or None
      if the tagging fails.
    """
        if self.__is_head_current():
            # We manually specified a tag and want to override with that one.
            if self.__next_tag:
                self.__tag_head_with_build(self.__next_tag)
                run_quick('git -C {path} tag {next_tag} HEAD'.format(
                    path=self.path, next_tag=self.__next_tag))
                return VersionBump(self.__next_tag, self.get_head_commit())
            # We didn't manually specify, but want to force a rebuild of the old tag.
            elif self.__force_rebuild:
                self.__tag_head_with_build(self.__current_version.tag)
                return VersionBump(self.__current_version.tag,
                                   self.get_head_commit(),
                                   patch=True)
            # Else fail.
            else:
                logging.warn(
                    "There is already a tag of the form 'version-X.Y.Z' at HEAD. Not forcing rebuild."
                )
                return None
        else:
            version_bump = self.determine_new_tag()
            # This tag is for logical identification for developers. This will be pushed
            # to the upstream git repository if we choose to use this version in a
            # formal Spinnaker product release.
            run_quick('git -C {path} tag {next_tag} HEAD'.format(
                path=self.path, next_tag=version_bump.version_str))
            self.__tag_head_with_build(version_bump.version_str)
            return version_bump
Example #8
0
  def generate_changelog(self):
    """Generate a release changelog and write it to a file.

    The changelog contains a section per microservice that describes the
    changes made since the last Spinnaker release. It also contains the
    version information as well.
    """
    changelog = ['Spinnaker {0}\n'.format(self.__toplevel_version)]
    for comp, hash in self.__changelog_start_hashes.iteritems():
      version = self.__version_from_tag(comp)

      # Generate the changelog for the component.
      print 'Generating changelog for {comp}...'.format(comp=comp)
      # Assumes the remote repository is aliased as 'origin'.
      component_url = run_quick('git -C {path} config --get remote.origin.url'
                                .format(path=comp)).stdout.strip()
      if component_url.endswith('.git'):
        component_url = component_url.replace('.git', '')
      result = run_quick('cd {comp}; clog -r {url} -f {hash} --setversion {version}; cd ..'
                         .format(comp=comp, url=component_url, hash=hash, version=version))
      if result.returncode != 0:
        print "Changelog generation failed for {0} with \n{1}\n exiting...".format(comp, result.stdout)
        exit(result.returncode)
      # Capitalize
      comp_cap = comp[0].upper() + comp[1:]
      changelog.append('# {0}\n{1}'.format(comp_cap, result.stdout))
    print 'Writing changelog...'
    # Write the changelog with the toplevel version without the build number.
    # This is ok since the changelog is only published if the toplevel version is released.
    changelog_file = self.__changelog_output or '{0}-changelog.md'.format(self.__toplevel_version)
    with open(changelog_file, 'w') as clog:
      clog.write('\n'.join(changelog))
Example #9
0
    def tag_head(self):
        """Tags the current branch's HEAD with the next semver tag.
    """
        version_bump = self.determine_next_tag()
        if version_bump is None:
            # next_tag is None if we don't need to add a tag. There is already a
            # 'version-X.Y.Z-$build' tag at HEAD.
            logging.warn(
                "There is already a tag of the form 'version-X.Y.Z-$build' at HEAD."
            )
            return

        next_tag = '{0}-{1}'.format(version_bump.version_str,
                                    self.__build_number)
        # This tag is for logical identification for developers. This will be pushed
        # to the upstream git repository if we choose to use this version in a
        # formal Spinnaker product release.
        run_quick('git -C {path} tag {next_tag} HEAD'.format(
            path=self.__path, next_tag=next_tag))

        # This tag is for gradle to use as the package version. It incorporates the
        # build number for uniqueness when publishing. This tag is of the form
        # 'X.Y.Z-$build_number' for gradle to use correctly. This is not pushed
        # to the upstream git repository.
        first_dash_idx = next_tag.index('-')
        if first_dash_idx != -1:
            gradle_version = next_tag[first_dash_idx + 1:]
            run_quick('git -C {path} tag {next_tag} HEAD'.format(
                path=self.__path, next_tag=gradle_version))
Example #10
0
    def determine_next_tag(self):
        """Determines the next semver tag for the repository at the path.

    If the commit at HEAD is already tagged with a tag matching --tag_regex_str,
    this function is a no-op. Otherwise it determines the semantic version bump
    for the commits since the last tag matching 'version-X.Y.Z-$build' and suggests a new tag
    based on the commit messages. This suggestion can be overridden with
    --next_tag, which will be used if there are any commits after the last
    semver tag matching 'version-X.Y.Z-$build'.

    Returns:
      [VersionBump]: Next semantic version tag to be used, along with what type
      of version bump it was. Version tag is of the form 'version-X.Y.Z'.
    """
        head_commit_res = run_quick(
            'git -C {path} rev-parse HEAD'.format(path=self.__path),
            echo=False)
        head_commit = head_commit_res.stdout.strip()

        sorted_filtered_tags = sorted(self.__filtered_tags,
                                      key=lambda ht: ht.version,
                                      reverse=True)

        if len(sorted_filtered_tags) == 0:
            raise GitTagMissingException(
                "No previous version tags of the form 'version-X.Y.Z-$build'.")

        prev_version = sorted_filtered_tags[0]
        if prev_version.hash == head_commit:
            # HEAD already has a tag matching 'version-X.Y.Z-$build', so we don't want to add
            # another.
            logging.warn(
                "There is already a tag of the form 'version-X.Y.Z-$build' at HEAD."
            )
            return None

        if self.__next_tag:
            return self.__next_tag

        # 'git log' entries of the form '$hash $commit_title'
        log_onelines = run_quick(
            'git -C {path} log --pretty=oneline'.format(path=self.__path),
            echo=False).stdout.strip().split('\n')
        commit_hashes = [line.split(' ')[0].strip() for line in log_onelines]

        # Full commit messages, including bodies for finding 'BREAKING CHANGE:'.
        msgs = [
            run_quick('git -C {path} log -n 1 --pretty=medium {hash}'.format(
                path=self.__path, hash=h),
                      echo=False).stdout.strip() for h in commit_hashes
        ]

        if len(commit_hashes) != len(msgs):
            raise IOError(
                'Git commit hash list and commit message list are unequal sizes.'
            )

        return self.bump_semver(prev_version, commit_hashes, msgs)
Example #11
0
  def delete_unwanted_tags(self):
    """Locally deletes tags that don't match TAG_MATCHER.

    This is so that gradle will use the latest resolved semantic version from
    our tag pattern when it builds the package.
    """
    for bad_hash_tag in self.__tags_to_delete:
      # NOTE: The following command prints output to STDOUT, so we don't
      # explicitly log anything.
      run_quick('git -C {path} tag -d {tag}'
                .format(path=self.path, tag=bad_hash_tag.tag))
Example #12
0
  def delete_unwanted_tags(self):
    """Locally deletes tags that don't match TAG_MATCHER.

    This is so that gradle will use the latest resolved semantic version from
    our tag pattern when it builds the package.
    """
    print ('Deleting {0} unwanted git tags locally from {1}'
           .format(len(self.__tags_to_delete), self.path))
    for bad_hash_tag in self.__tags_to_delete:
      run_quick('git -C {path} tag -d {tag}'
                .format(path=self.path, tag=bad_hash_tag.tag), echo=False)
Example #13
0
  def delete_unwanted_tags(self):
    """Locally deletes tags that don't match TAG_MATCHER.

    This is so that gradle will use the latest resolved semantic version from
    our tag pattern when it builds the package.
    """
    print ('Deleting {0} unwanted git tags locally from {1}'
           .format(len(self.__tags_to_delete), self.path))
    for bad_hash_tag in self.__tags_to_delete:
      run_quick('git -C {path} tag -d {tag}'
                .format(path=self.path, tag=bad_hash_tag.tag), echo=False)
Example #14
0
    def delete_unwanted_tags(self):
        """Locally deletes tags that don't match TAG_MATCHER.

    This is so that gradle will use the latest resolved semantic version from
    our tag pattern when it builds the package.
    """
        for bad_hash_tag in self.__tags_to_delete:
            # NOTE: The following command prints output to STDOUT, so we don't
            # explicitly log anything.
            run_quick('git -C {path} tag -d {tag}'.format(
                path=self.path, tag=bad_hash_tag.tag))
Example #15
0
  def create_tarball(self):
    """Create a tar.gz file from the instance specified by the options.

    The file will be written to options.tarball_uri.
    It can be later turned into a GCE image by passing it as the --source-uri
    to gcloud images create.
    """
    project = self.__project
    basename = os.path.basename(self.options.tarball_uri).replace('_', '-')
    first_dot = basename.find('.')
    if first_dot:
        basename = basename[0:first_dot]
    disk_name = '{name}-export'.format(name=basename)
    print 'Attaching external disk "{disk}" to extract image tarball.'.format(
        disk=disk_name)

    # TODO(ewiseblatt): 20151002
    # Add an option to reuse an existing disk to reduce the cycle time.
    # Then guard the create/format/destroy around this option.
    # Still may want/need to attach/detach it here to reduce race conditions
    # on its use since it can only be bound to once instance at a time.
    check_run_quick('gcloud compute disks create '
                    ' {disk_name} --project {project} --zone {zone} --size=10'
                    .format(disk_name=disk_name,
                            project=self.__project,
                            zone=self.__zone),
                    echo=False)

    check_run_quick('gcloud compute instances attach-disk {instance}'
                    ' --disk={disk_name} --device-name=export-disk'
                    ' --project={project} --zone={zone}'
                    .format(instance=self.__instance,
                            disk_name=disk_name,
                            project=self.__project,
                            zone=self.__zone),
                    echo=False)
    try:
      self.__extract_image_tarball_helper()
    finally:
      print 'Detaching and deleting external disk.'
      run_quick('gcloud compute instances detach-disk -q {instance}'
                ' --disk={disk_name} --project={project} --zone={zone}'
                .format(instance=self.__instance,
                        disk_name=disk_name,
                        project=self.__project,
                        zone=self.__zone),
                echo=False)
      run_quick('gcloud compute disks delete -q {disk_name}'
                ' --project={project} --zone={zone}'
                .format(disk_name=disk_name,
                        project=self.__project,
                        zone=self.__zone),
                echo=False)
  def __is_ready(self):
    description = run_quick(
        'aws ec2 describe-instances'
        ' --profile {region}'
        ' --output json'
        ' --instance-ids {id}'
        ' --query "Reservations[*].Instances[*]"'
        .format(region=self.options.deploy_aws_region,
                id=self.__instance_id),
        echo=False)
    if description.returncode != 0:
      logging.warning('Could not determine public IP: %s', description)
      return False

    # result is an array of reservations of ararys of instances.
    # but we only expect one, so fish out the first instance info
    info = json.JSONDecoder().decode(description.stdout)[0][0]
    state = info.get('State', {}).get('Name')
    if state in ['pending', 'initializing']:
      logging.info('Waiting for %s to finish initializing (state=%s)',
                   self.__instance_id, state)
      return False

    if state in ['shutting-down', 'terminated']:
      raise ValueError('VM failed: {0}'.format(info))

    logging.info('%s is in state %s', self.__instance_id, state)
    self.set_instance_ip(info.get('PublicIpAddress'))
    # attempt to ssh into it so we know we're accepting connections when
    # we return. It takes time to start
    logging.info('Checking if it is ready for ssh...')
    check = run_quick(
        'ssh'
        ' -i {ssh_key}'
        ' -o StrictHostKeyChecking=no'
        ' -o UserKnownHostsFile=/dev/null'
        ' {user}@{ip}'
        ' "exit 0"'
        .format(user=self.hal_user,
                ip=self.instance_ip,
                ssh_key=self.ssh_key_path),
        echo=False)
    if check.returncode == 0:
      logging.info('READY')
      return True

    # Sometimes ssh accepts but authentication still fails
    # for a while. If this is the case, then try again
    # though the whole loop to distinguish VM going away.
    logging.info('%s\nNot yet ready...', check.stdout.strip())
    return False
Example #17
0
  def __is_ready(self):
    description = run_quick(
        'aws ec2 describe-instances'
        ' --profile {region}'
        ' --output json'
        ' --instance-ids {id}'
        ' --query "Reservations[*].Instances[*]"'
        .format(region=self.options.deploy_aws_region,
                id=self.__instance_id),
        echo=False)
    if description.returncode != 0:
      logging.warning('Could not determine public IP: %s', description)
      return False

    # result is an array of reservations of ararys of instances.
    # but we only expect one, so fish out the first instance info
    info = json.JSONDecoder().decode(description.stdout)[0][0]
    state = info.get('State', {}).get('Name')
    if state in ['pending', 'initializing']:
      logging.info('Waiting for %s to finish initializing (state=%s)',
                   self.__instance_id, state)
      return False

    if state in ['shutting-down', 'terminated']:
      raise ValueError('VM failed: {0}'.format(info))

    logging.info('%s is in state %s', self.__instance_id, state)
    self.set_instance_ip(info.get('PublicIpAddress'))
    # attempt to ssh into it so we know we're accepting connections when
    # we return. It takes time to start
    logging.info('Checking if it is ready for ssh...')
    check = run_quick(
        'ssh'
        ' -i {ssh_key}'
        ' -o StrictHostKeyChecking=no'
        ' -o UserKnownHostsFile=/dev/null'
        ' {user}@{ip}'
        ' "exit 0"'
        .format(user=self.hal_user,
                ip=self.instance_ip,
                ssh_key=self.ssh_key_path),
        echo=False)
    if check.returncode == 0:
      logging.info('READY')
      return True

    # Sometimes ssh accepts but authentication still fails
    # for a while. If this is the case, then try again
    # though the whole loop to distinguish VM going away.
    logging.info('%s\nNot yet ready...', check.stdout.strip())
    return False
Example #18
0
  def cleanup_instance(self):
    """If we deployed an instance, tear it down."""
    if self.options.instance:
      print 'Leaving pre-existing instance {name}'.format(
          self.options.instance)
      return

    print 'Deleting instance {name}'.format(name=self.__instance)
    run_quick('gcloud compute instances delete {name}'
              '  --zone={zone} --project={project}'
              .format(name=self.__instance,
                      zone=self.__zone,
                      project=self.__project),
              echo=False)
Example #19
0
def __write_image_delete_script(possible_versions_to_delete, days_before, project,
                                account, project_images, bom_contents_by_name):
  images_to_delete = set([])
  print 'Calculating images for {} versions to delete.'.format(len(possible_versions_to_delete))
  for bom_version in possible_versions_to_delete:
    deletable = [i for i in __derive_images_from_bom(bom_version, bom_contents_by_name) if i in project_images]
    images_to_delete.update(deletable)
  delete_script_lines = []
  for image in images_to_delete:
    result = run_quick('gcloud compute images describe --project={project} --account={account} --format=json {image}'
                       .format(project=project, account=account, image=image), echo=False)
    json_str = ''
    if result.returncode:
      # Some BOMs may refer to service versions without HA images.
      print('Lookup for image {image} in project {project} failed, ignoring'.format(image=image, project=project))
      continue
    else:
      json_str = result.stdout.strip()
    payload = json.loads(json_str)

    if __image_age_days(payload) > days_before:
      labels = payload.get('labels', None)
      if not labels or not PUBLISHED_TAG_KEY in labels:
        line = 'gcloud compute images delete --project={project} --account={account} {image} -q'.format(project=project, account=account, image=image)
        delete_script_lines.append(line)
  delete_script = '\n'.join(delete_script_lines)
  timestamp = '{:%Y%m%d%H%M%S}'.format(datetime.datetime.utcnow())
  script_name = 'delete-images-{}'.format(timestamp)
  with open(script_name, 'w') as script:
    script.write(delete_script)
  print 'Wrote image janitor script to {}'.format(script_name)
Example #20
0
  def generate_changelog(self):
    """Generate a release changelog and write it to a file.

    The changelog contains a section per microservice that describes the
    changes made since the last Spinnaker release. It also contains the
    version information as well.
    """
    changelog = ['Spinnaker {0}\n'.format(self.__toplevel_version)]
    for comp, hash in self.__changelog_start_hashes.iteritems():
      version_bump = self.__component_versions[comp]
      next_tag_with_build = '{0}-{1}'.format(version_bump.version_str,
                                             self.build_number)
      first_dash_idx = next_tag_with_build.index('-')
      version = next_tag_with_build[first_dash_idx + 1:]

      # Generate the changelog for the component.
      print 'Generating changelog for {comp}...'.format(comp=comp)
      result = run_quick('cd {comp}; clog -f {hash} --setversion {version}; cd ..'
                         .format(comp=comp, hash=hash, version=version))
      if result.returncode != 0:
        print "Changelog generation failed for {0} with \n{1}\n exiting...".format(comp, result.stdout)
        exit(result.returncode)
      # Capitalize
      comp_cap = comp[0].upper() + comp[1:]
      changelog.append('# {0}\n{1}'.format(comp_cap, result.stdout))
    print 'Writing changelog...'
    # Write the changelog with the toplevel version without the build number.
    # This is ok since the changelog is only published if the toplevel version is released.
    changelog_file = self.__changelog_output or '{0}-changelog.md'.format(self.__toplevel_version)
    with open(changelog_file, 'w') as clog:
      clog.write('\n'.join(changelog))
Example #21
0
    def do_undeploy(self):
        """Implements the BaseBomValidateDeployer interface."""
        options = self.options
        logging.info('Terminating "%s"', options.deploy_aws_name)

        if self.__instance_id:
            all_ids = [self.__instance_id]
        else:
            lookup_response = run_and_monitor(
                'aws ec2 describe-instances'
                ' --filters "Name=tag:Name,Values={name}'
                ',Name=instance-state-name,Values=running"'.format(
                    name=options.deploy_aws_name),
                echo=False)
            if lookup_response.returncode != 0:
                raise ValueError('Could not lookup instance id: {0}',
                                 lookup_response)
            exists = json.JSONDecoder().decode(
                lookup_response.stdout).get('Reservations')
            if not exists:
                logging.warning('"%s" is not running', options.deploy_aws_name)
                return
            all_ids = [
                instance['InstanceId'] for instance in exists['Instances']
            ]

        for instance_id in all_ids:
            logging.info('Terminating "%s" instanceId=%s',
                         options.deploy_aws_name, instance_id)
            response = run_quick(
                'aws ec2 terminate-instances --instance-ids {id}'.format(
                    id=instance_id))
            if response.returncode != 0:
                logging.warning('Failed to delete "%s" instanceId=%s',
                                options.deploy_aws_name, instance_id)
Example #22
0
    def maybe_generate_clean_user_local():
        """Generate a spinnaker-local.yml file without environment variables refs"""
        user_dir = DevInstallationParameters.USER_CONFIG_DIR
        user_config_path = os.path.join(user_dir, 'spinnaker-local.yml')
        if os.path.exists(user_config_path):
            return
        if not os.path.exists(user_dir):
            os.mkdir(user_dir)

        with open(
                '{config_dir}/default-spinnaker-local.yml'.format(
                    config_dir=DevInstallationParameters.INSTALLED_CONFIG_DIR),
                'r') as f:
            content = f.read()

        content = populate_aws_yml(content)
        content = populate_google_yml(content)

        with open(user_config_path, 'w') as f:
            f.write(content)
        os.chmod(user_config_path, 0600)

        change_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                   'install', 'change_cassandra.sh')
        got = run_quick(change_path + ' --echo=inMemory --front50=gcs' +
                        ' --change_defaults=false --change_local=true',
                        echo=False)
Example #23
0
    def __tag_head_with_build(self, version_bump_tag):
        """Tags the current branch's HEAD with the next semver gradle build tag.

    Args:
      version_bump_tag [String]: Semver string to add as a gradle build tag.
    """
        next_tag_with_build = '{0}-{1}'.format(version_bump_tag,
                                               self.build_number)
        # This tag is for gradle to use as the package version. It incorporates the
        # build number for uniqueness when publishing. This tag is of the form
        # 'X.Y.Z-$build_number' for gradle to use correctly. This is not pushed
        # to the upstream git repository.
        first_dash_idx = next_tag_with_build.index('-')
        gradle_version = next_tag_with_build[first_dash_idx + 1:]
        run_quick('git -C {path} tag {next_tag} HEAD'.format(
            path=self.path, next_tag=gradle_version))
Example #24
0
def __tag_images(versions_to_tag, project, account, project_images,
                 bom_contents_by_name):
    images_to_tag = set([])
    for bom_version in versions_to_tag:
        to_tag = [
            i for i in __derive_images_from_bom(
                bom_version, bom_contents_by_name) if i in project_images
        ]
        images_to_tag.update(to_tag)
    for image in images_to_tag:
        result = run_quick(
            'gcloud compute images describe --project={project} --account={account} --format=json {image}'
            .format(project=project, account=account, image=image),
            echo=False)
        # Adding labels is idempotent, adding the same label again doesn't break anything.
        if not result.returncode:
            payload_str = result.stdout.strip()
            timestamp = json.loads(payload_str)['creationTimestamp']
            timestamp = timestamp[:timestamp.index('T')]
            check_run_quick(
                'gcloud compute images add-labels --project={project} --account={account} --labels={key}={timestamp} {image}'
                .format(project=project,
                        account=account,
                        key=PUBLISHED_TAG_KEY,
                        timestamp=timestamp,
                        image=image))
Example #25
0
 def get_head_commit(self):
   """Retrieves the head commit hash.
   """
   head_commit_res = run_quick('git -C {path} rev-parse HEAD'
                               .format(path=self.path),
                               echo=False)
   return head_commit_res.stdout.strip()
Example #26
0
 def get_head_commit(self):
   """Retrieves the head commit hash.
   """
   head_commit_res = run_quick('git -C {path} rev-parse HEAD'
                               .format(path=self.path),
                               echo=False)
   return head_commit_res.stdout.strip()
Example #27
0
    def __publish_config(self, component, profile_path):
        """Publishes the yaml configuration consumed by Halyard for the component.

    Args:
      component [string]: Name of the Spinnaker component.
      profile_path [string]: Path to component's yaml configuration file.
    """
        for profile in os.listdir(profile_path):
            full_profile = os.path.join(profile_path, profile)
            if os.path.isfile(full_profile):
                # Don't zip with `tar` if the profile is a file.
                self.__hal_upload_profile(component, self.__bom_file,
                                          full_profile)
            elif os.path.isdir(full_profile):
                tar_path = '{0}.tar.gz'.format(full_profile)
                result = run_quick('tar -cvf {0} -C {1} $(ls {1})'.format(
                    tar_path, full_profile))
                if result.returncode != 0:
                    print "Creating a tar archive of '{0}/*' failed with \n{1}\nexiting...".format(
                        full_profile, result.stdout)
                    exit(result.returncode)
                self.__hal_upload_profile(component, self.__bom_file, tar_path)
            else:
                print 'Listed profile was neither a file nor a directory. Ignoring...'
                continue
Example #28
0
  def validate_options_helper(cls, options):
    """Adds custom configuration parameters to argument parser.

    This is a helper function for make_deployer().
    """
    if not options.deploy_google_project:
      raise ValueError('--deploy_google_project not specified.')
    if not options.deploy_google_instance:
      raise ValueError('--deploy_google_instance not specified.')
    if not options.deploy_hal_google_service_account:
      raise ValueError('--deploy_hal_google_service_account not specified.')

    if options.deploy_deploy:
      response = run_quick(
          'gcloud compute instances describe'
          ' --account {gcloud_account}'
          ' --project {project} --zone {zone} {instance}'
          .format(gcloud_account=options.deploy_hal_google_service_account,
                  project=options.deploy_google_project,
                  zone=options.deploy_google_zone,
                  instance=options.deploy_google_instance),
          echo=False)

      if response.returncode == 0:
        raise ValueError(
            '"{instance}" already exists in project={project} zone={zone}'
            .format(instance=options.deploy_google_instance,
                    project=options.deploy_google_project,
                    zone=options.deploy_google_zone))
def make_remote_directories(options):
    all = []
    if options.copy_personal_files:
        all.append('.gradle')
    if options.aws_credentials:
        all.append('.aws')
    if options.master_yml:
        all.append('.spinnaker')
    if options.copy_gcloud_config:
        all.append('.config/gcloud')

    if all:
        command = ' '.join([
            'gcloud compute ssh',
            options.instance,
            '--project', get_project(options),
            '--zone', get_zone(options),
            '--command=\'bash -c "for i in {0}; do mkdir -p \\$i; done"\''.format(' '.join(all))])

        while True:
            result = run_quick(command, echo=False)
            if not result.returncode:
                break
            print 'New instance does not seem ready yet...retry in 5s.'
            time.sleep(5)
Example #30
0
def copy_file(options, source, target):
    if os.path.exists(source):
        # TODO(ewiseblatt): we can use scp here instead, and pass the
        # credentials we want to copy with rather than the additional command
        # below. But we need to figure out the IP address to copy to.
        # For now, do it the long way.
        print 'Copying {source}'.format(source=source)
        command = ' '.join([
            'gcloud compute copy-files',
            '--project', get_project(options),
            '--zone', options.zone,
            source,
            '{instance}:{target}'.format(instance=options.instance,
                                         target=target)])
        while True:
            result = run_quick(command, echo=False)
            if not result.returncode:
                break
            print 'New instance does not seem ready yet...retry in 5s.'
            time.sleep(5)

        command = ' '.join([
            'gcloud compute ssh',
            '--command="chmod 600 /home/{gcp_user}/{target}"'.format(
                gcp_user=os.environ['LOGNAME'], target=target),
            options.instance,
            '--project', get_project(options),
            '--zone', options.zone])
        check_run_quick(command, echo=False)
Example #31
0
  def __tag_head_with_build(self, version_bump_tag):
    """Tags the current branch's HEAD with the next semver gradle build tag.

    Args:
      version_bump_tag [String]: Semver string to add as a gradle build tag.
    """
    next_tag_with_build = '{0}-{1}'.format(version_bump_tag,
                                           self.build_number)
    # This tag is for gradle to use as the package version. It incorporates the
    # build number for uniqueness when publishing. This tag is of the form
    # 'X.Y.Z-$build_number' for gradle to use correctly. This is not pushed
    # to the upstream git repository.
    first_dash_idx = next_tag_with_build.index('-')
    gradle_version = next_tag_with_build[first_dash_idx + 1:]
    run_quick('git -C {path} tag {next_tag} HEAD'
              .format(path=self.path, next_tag=gradle_version))
Example #32
0
  def maybe_generate_clean_user_local():
    """Generate a spinnaker-local.yml file without environment variables refs"""
    user_dir = DevInstallationParameters.USER_CONFIG_DIR
    user_config_path = os.path.join(user_dir, 'spinnaker-local.yml')
    if os.path.exists(user_config_path):
      return
    if not os.path.exists(user_dir):
      os.mkdir(user_dir)

    with open('{config_dir}/default-spinnaker-local.yml'.format(
                  config_dir=DevInstallationParameters.INSTALLED_CONFIG_DIR),
              'r') as f:
      content = f.read()

    content = populate_aws_yml(content)
    content = populate_google_yml(content)

    with open(user_config_path, 'w') as f:
      f.write(content)
    os.chmod(user_config_path, 0600)

    change_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                               'install', 'change_cassandra.sh')
    got = run_quick(change_path
                    + ' --echo=inMemory --front50=gcs'
                    + ' --change_defaults=false --change_local=true',
                    echo=False)
    def validate_options_helper(cls, options):
        """Adds custom configuration parameters to argument parser.

    This is a helper function for make_deployer().
    """
        if not options.deploy_google_project:
            raise ValueError('--deploy_google_project not specified.')
        if not options.deploy_google_instance:
            raise ValueError('--deploy_google_instance not specified.')
        if not options.deploy_hal_google_service_account:
            raise ValueError(
                '--deploy_hal_google_service_account not specified.')

        if options.deploy_deploy:
            response = run_quick(
                'gcloud compute instances describe'
                ' --account {gcloud_account}'
                ' --project {project} --zone {zone} {instance}'.format(
                    gcloud_account=options.deploy_hal_google_service_account,
                    project=options.deploy_google_project,
                    zone=options.deploy_google_zone,
                    instance=options.deploy_google_instance),
                echo=False)

            if response.returncode == 0:
                raise ValueError(
                    '"{instance}" already exists in project={project} zone={zone}'
                    .format(instance=options.deploy_google_instance,
                            project=options.deploy_google_project,
                            zone=options.deploy_google_zone))
Example #34
0
 def __hal_upload_profile(self, component, bom_file, profile_path):
     result = run_quick(
         'hal admin publish profile {0} --color false --bom-path {1} --profile-path {2}'
         .format(component, bom_file, profile_path))
     if result.returncode != 0:
         print "'hal admin publish profile' command failed with: \n{0}\nexiting...".format(
             result.stdout)
         exit(result.returncode)
Example #35
0
def check_s3_path(path):
    check_result = run_quick('aws --version', echo=False)
    if check_result.returncode:
        error = """
ERROR: aws is required to retrieve the spinnaker release from S3.
       If you already have aws, fix your path.
       Otherwise install awscli with "sudo apt-get install awscli".
       Then run again.
"""
        raise RuntimeError(error)

    result = run_quick('aws s3 ls ' + path, echo=False)
    if result.returncode:
        error = ('The path "{dir}" does not seem to exist within S3.'
                 ' aws s3 ls returned "{stdout}"\n'.format(
                     dir=path, stdout=result.stdout.strip()))
        raise RuntimeError(error)
def check_s3_path(path):
  check_result = run_quick('aws --version', echo=False)
  if check_result.returncode:
    error = """
ERROR: aws is required to retrieve the spinnaker release from S3.
       If you already have aws, fix your path.
       Otherwise install awscli with "sudo apt-get install awscli".
       Then run again.
"""
    raise RuntimeError(error)

  result = run_quick('aws s3 ls ' + path, echo=False)
  if result.returncode:
      error = ('The path "{dir}" does not seem to exist within S3.'
               ' aws s3 ls returned "{stdout}"\n'.format(
                    dir=path,  stdout=result.stdout.strip()))
      raise RuntimeError(error)
Example #37
0
 def __hal_upload_profile(self, component, bom_file, profile_path):
   result = run_quick(
     'hal admin publish profile {0} --color false --bom-path {1} --profile-path {2}'
     .format(component, bom_file, profile_path)
   )
   if result.returncode != 0:
     print "'hal admin publish profile' command failed with: \n{0}\nexiting...".format(result.stdout)
     exit(result.returncode)
Example #38
0
 def unpack_bom(self):
   """Load the release candidate BOM into memory.
   """
   bom_yaml_string = run_quick('hal versions bom {0} --color false'
                               .format(self.__rc_version), echo=False).stdout.strip()
   print bom_yaml_string
   self.__bom_dict = yaml.load(bom_yaml_string)
   print self.__bom_dict
Example #39
0
def check_gcloud():
    result = run_quick('gcloud --version', echo=False)
    if not result.returncode:
        return

    sys.stderr.write('ERROR: This program requires gcloud. To obtain gcloud:\n'
                     '       curl https://sdk.cloud.google.com | bash\n')
    sys.exit(-1)
Example #40
0
def ensure_gcs_bucket(name, project=''):
  """Ensure that the desired GCS bucket exists, creating it if needed.

  Args:
    name [string]: The bucket name.
    project [string]: Optional Google Project id that will own the bucket.
      If none is provided, then the bucket will be associated with the default
      bucket configured to gcloud.

  Raises:
    RutimeError if the bucket could not be created.
  """
  bucket = 'gs://'+ name
  if not project:
      config_result = run_quick('gcloud config list', echo=False)
      error = None
      if config_result.returncode:
        error = 'Could not run gcloud: {error}'.format(
            error=config_result.stdout)
      else:
        match = re.search('(?m)^project = (.*)', config_result.stdout)
        if not match:
          error = ('gcloud is not configured with a default project.\n'
                   'run gcloud config or provide a --google_project.\n')
      if error:
        raise SystemError(error)

      project = match.group(1)

  list_result = run_quick('gsutil list -p ' +  project, echo=False)
  if list_result.returncode:
    error = ('Could not create Google Cloud Storage bucket'
             '"{name}" in project "{project}":\n{error}'
             .format(name=name, project=project, error=list_result.stdout))
    raise RuntimeError(error)

  if re.search('(?m)^{bucket}/\n'.format(bucket=bucket), list_result.stdout):
    sys.stderr.write(
        'WARNING: "{bucket}" already exists. Overwriting.\n'.format(
        bucket=bucket))
  else:
    print 'Creating GCS bucket "{bucket}" in project "{project}".'.format(
        bucket=bucket, project=project)
    check_run_quick('gsutil mb -p {project} {bucket}'
                    .format(project=project, bucket=bucket),
                    echo=True)
Example #41
0
 def unpack_bom(self):
   """Load the release candidate BOM into memory.
   """
   bom_yaml_string = run_quick('hal version bom {0} --color false --quiet'
                               .format(self.__rc_version), echo=False).stdout.strip()
   print 'bom yaml string pulled by hal: \n\n{0}\n\n'.format(bom_yaml_string)
   self.__bom_dict = yaml.load(bom_yaml_string)
   print self.__bom_dict
Example #42
0
def ensure_gcs_bucket(name, project=''):
    """Ensure that the desired GCS bucket exists, creating it if needed.

  Args:
    name [string]: The bucket name.
    project [string]: Optional Google Project id that will own the bucket.
      If none is provided, then the bucket will be associated with the default
      bucket configured to gcloud.

  Raises:
    RuntimeError if the bucket could not be created.
  """
    bucket = 'gs://' + name
    if not project:
        config_result = run_quick('gcloud config list', echo=False)
        error = None
        if config_result.returncode:
            error = 'Could not run gcloud: {error}'.format(
                error=config_result.stdout)
        else:
            match = re.search('(?m)^project = (.*)', config_result.stdout)
            if not match:
                error = ('gcloud is not configured with a default project.\n'
                         'run gcloud config or provide a --google_project.\n')
        if error:
            raise SystemError(error)

        project = match.group(1)

    list_result = run_quick('gsutil list -p ' + project, echo=False)
    if list_result.returncode:
        error = ('Could not create Google Cloud Storage bucket'
                 '"{name}" in project "{project}":\n{error}'.format(
                     name=name, project=project, error=list_result.stdout))
        raise RuntimeError(error)

    if re.search('(?m)^{bucket}/\n'.format(bucket=bucket), list_result.stdout):
        sys.stderr.write(
            'WARNING: "{bucket}" already exists. Overwriting.\n'.format(
                bucket=bucket))
    else:
        print 'Creating GCS bucket "{bucket}" in project "{project}".'.format(
            bucket=bucket, project=project)
        check_run_quick('gsutil mb -p {project} {bucket}'.format(
            project=project, bucket=bucket),
                        echo=True)
Example #43
0
 def unpack_bom(self):
   """Load the release candidate BOM into memory.
   """
   bom_yaml_string = run_quick('hal version bom {0} --color false --quiet'
                               .format(self.__rc_version), echo=False).stdout.strip()
   print 'bom yaml string pulled by hal: \n\n{0}\n\n'.format(bom_yaml_string)
   self.__bom_dict = yaml.load(bom_yaml_string)
   print self.__bom_dict
def check_gcloud():
    result = run_quick('gcloud --version', echo=False)
    if not result.returncode:
        return

    sys.stderr.write('ERROR: This program requires gcloud. To obtain gcloud:\n'
                     '       curl https://sdk.cloud.google.com | bash\n')
    sys.exit(-1)
Example #45
0
    def __listen_gcb_build_status(cls, name, subscription, tag, gcb_project,
                                  gcb_service_account, logfile):
        def fail_build(name):
            raise Exception(
                'GCB triggered build for {} timed out'.format(name))

        # Set an egg timer to fail.
        timer = threading.Timer(GCB_BUILD_STATUS_TIMEOUT, fail_build, (name))
        timer.start()

        # Poll Google Cloud Pubsub for the build status
        completed = False
        try:
            while not completed:
                pulled = subscription.pull()
                for ack_id, message in pulled:
                    comp_name = ''
                    if name == 'spinnaker-monitoring':
                        comp_name = 'monitoring-daemon'
                    else:
                        comp_name = name
                    payload = json.loads(message.data)
                    repo_name = payload['source']['repoSource']['repoName']
                    tag_name = payload['source']['repoSource']['tagName']
                    if repo_name == comp_name and tag_name == tag:
                        subscription.acknowledge([ack_id])
                        status = payload['status']
                        print 'Received status: {} for building tag {} of {}'.format(
                            status, tag_name, comp_name)
                        if status in ['SUCCESS', 'FAILURE']:
                            completed = True
                            build_id = payload['id']
                            print 'Retrieving logs for build_id: {}'.format(
                                build_id)
                            get_log_cmd = (
                                'gcloud container builds log --project {project} --account {account} {id}'
                                .format(project=gcb_project,
                                        account=gcb_service_account,
                                        id=build_id))
                            build_log = run_quick(get_log_cmd,
                                                  echo=False).stdout.strip()
                            with open(logfile, 'a') as log:
                                log.write(
                                    'Fetching GCB build logs with: {}\n---\n'.
                                    format(get_log_cmd))
                                log.write(build_log)
                                log.write(
                                    '\n---\nFinished fetching GCB build logs')

                                if status == 'FAILURE':
                                    raise Exception(
                                        'Triggered GCB build for {name} failed.'
                                        .format(name=comp_name))
                                if not completed:
                                    time.sleep(10)
        finally:
            timer.cancel()
            subscription.delete()
  def do_deploy(self, script, files_to_upload):
    """Implements the BaseBomValidateDeployer interface."""
    options = self.options
    ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

    script_path = write_script_to_path(script, path=None)
    files_to_upload.add(script_path)
    if options.jenkins_master_name:
      write_data_to_secure_path(
          os.environ.get('JENKINS_MASTER_PASSWORD'),
          path=os.path.join(os.sep, 'tmp', 'jenkins_{name}_password'
                            .format(name=options.jenkins_master_name)),
          is_script=True)

    try:
      self.do_create_vm(options)

      copy_files = (
          'scp'
          ' -i {ssh_key_path}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {files} {ip}:~'
          .format(ssh_key_path=self.__ssh_key_path,
                  files=' '.join(files_to_upload),
                  ip=self.instance_ip))
      logging.info('Copying files %s', copy_files)

      # pylint: disable=unused-variable
      for retry in range(0, 10):
        result = run_quick(copy_files)
        if result.returncode == 0:
          break
        time.sleep(2)

      if result.returncode != 0:
        check_run_quick(copy_files)
    except Exception as ex:
      logging.error('Caught %s', ex)
      raise
    finally:
      os.remove(script_path)

    logging.info('Running install script')
    try:
      check_run_and_monitor(
          'ssh'
          ' -i {ssh_key}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {ip}'
          ' "sudo ./{script_name}"'
          .format(ip=self.instance_ip,
                  ssh_key=self.__ssh_key_path,
                  script_name=os.path.basename(script_path)))
    except RuntimeError:
      raise RuntimeError('Halyard deployment failed.')
Example #47
0
 def __unpack_bom(self):
     """Load the release candidate BOM into memory.
 """
     bom_yaml_string = run_quick(
         'hal versions bom {0} --color false'.format(self.__rc_version),
         echo=False).stdout.strip()
     print bom_yaml_string
     self.__bom_dict = yaml.load(bom_yaml_string)
     print self.__bom_dict
Example #48
0
    def do_deploy(self, script, files_to_upload):
        """Implements the BaseBomValidateDeployer interface."""
        options = self.options
        ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

        script_path = write_script_to_path(script, path=None)
        files_to_upload.add(script_path)
        if options.jenkins_master_name:
            write_data_to_secure_path(
                os.environ.get('JENKINS_MASTER_PASSWORD'),
                path=os.path.join(
                    os.sep, 'tmp', 'jenkins_{name}_password'.format(
                        name=options.jenkins_master_name)),
                is_script=True)

        try:
            self.do_create_vm(options)

            copy_files = ('scp'
                          ' -i {ssh_key_path}'
                          ' -o StrictHostKeyChecking=no'
                          ' -o UserKnownHostsFile=/dev/null'
                          ' {files} {ip}:~'.format(
                              ssh_key_path=self.__ssh_key_path,
                              files=' '.join(files_to_upload),
                              ip=self.instance_ip))
            logging.info('Copying files %s', copy_files)

            # pylint: disable=unused-variable
            for retry in range(0, 10):
                result = run_quick(copy_files)
                if result.returncode == 0:
                    break
                time.sleep(2)

            if result.returncode != 0:
                check_run_quick(copy_files)
        except Exception as ex:
            logging.error('Caught %s', ex)
            raise
        finally:
            os.remove(script_path)

        logging.info('Running install script')
        try:
            check_run_and_monitor(
                'ssh'
                ' -i {ssh_key}'
                ' -o StrictHostKeyChecking=no'
                ' -o UserKnownHostsFile=/dev/null'
                ' {ip}'
                ' "sudo ./{script_name}"'.format(
                    ip=self.instance_ip,
                    ssh_key=self.__ssh_key_path,
                    script_name=os.path.basename(script_path)))
        except RuntimeError:
            raise RuntimeError('Halyard deployment failed.')
def check_google_path(path):
  check_result = run_quick('gsutil --version', echo=False)
  if check_result.returncode:
      error = """
ERROR: gsutil is required to retrieve the spinnaker release from GCS.
       If you already have gsutil, fix your path.
       Otherwise follow the instructions at
       https://cloud.google.com/storage/docs/gsutil_install?hl=en#install
       and be sure you run gsutil config.
       Then run again.
"""
      raise RuntimeError(error)

  result = run_quick('gsutil ls ' + path, echo=False)
  if result.returncode:
      error = ('The path "{dir}" does not seem to exist within GCS.'
               ' gsutil ls returned "{stdout}"\n'.format(
                    dir=path,  stdout=result.stdout.strip()))
      raise RuntimeError(error)
def safe_mkdir(dir):
    """Create a local directory if it does not already exist.

    Args:
      dir [string]: The path to the directory to create.
    """
    result = run_quick('sudo mkdir -p "{dir}"'.format(dir=dir), echo=False)
    if result.returncode:
      raise RuntimeError('Could not create directory "{dir}": {error}'.format(
          dir=dir, error=result.stdout))
Example #51
0
 def __jar_build(cls, name, gradle_root):
   version = run_quick('cat {name}-component-version.yml'.format(name=name),
                       echo=False).stdout.strip()
   cmds = [
     './release/all.sh {version} nightly'.format(version=version),
   ]
   logfile = '{name}-jar-build.log'.format(name=name)
   if os.path.exists(logfile):
     os.remove(logfile)
   run_shell_and_log(cmds, logfile, cwd=gradle_root)
Example #52
0
 def __docker_build(cls, name, gradle_root):
   docker_tag = run_quick('cat {name}-docker.yml', echo=False).stdout.strip()
   cmds = [
     'docker build -f Dockerfile -t {docker_tag} .'.format(name=name, docker_tag=docker_tag),
     'docker push {docker_tag}'.format(name=name, docker_tag=docker_tag)
   ]
   logfile = '{name}-docker-build.log'.format(name=name)
   if os.path.exists(logfile):
     os.remove(logfile)
   run_shell_and_log(cmds, logfile, cwd=gradle_root)
Example #53
0
  def publish_bom(self, bom_path):
    """Publishes the BOM using Halyard.

    Assumes that Halyard is installed and correctly configured on the current
    machine.
    """
    result = run_quick('hal admin publish bom --color false --bom-path {0}'
                       .format(bom_path))
    if result.returncode != 0:
      print "'hal admin publish bom' command failed with: \n{0}\nexiting...".format(result.stdout)
      exit(result.returncode)
Example #54
0
  def __is_head_current(self):
    """Checks if the current version is at HEAD.

    Returns:
      [Boolean]: True if the current version tag is on HEAD, else False.
    """
    head_commit_res = run_quick('git -C {path} rev-parse HEAD'
                                    .format(path=self.path),
                                echo=False)
    head_commit = head_commit_res.stdout.strip()
    return self.__current_version.hash == head_commit
  def cleanup_instance(self):
    """If we deployed an instance, tear it down."""
    if self.options.instance:
      print 'Leaving pre-existing instance {name}'.format(
          self.options.instance)
      return

    print 'Deleting snapshot {name}-snapshot'.format(name=self.__instance)
    check_run_quick('gcloud {account} compute snapshots delete -q {name}-snapshot'
                    .format(account=self.__gcloud_account_arg, name=self.__instance),
                    echo=False)

    print 'Deleting instance {name}'.format(name=self.__instance)
    run_quick('gcloud {account} compute instances delete -q {name}'
              '  --zone={zone} --project={project}'
              .format(account=self.__gcloud_account_arg,
                      name=self.__instance,
                      zone=self.__zone,
                      project=self.__project),
              echo=False)
def install_gcloud(options):
  if not options.gcloud:
      return

  result = run_quick('gcloud --version', echo=False)
  if not result.returncode:
    print 'GCloud is already installed:\n    {version_info}'.format(
      version_info=result.stdout.replace('\n', '\n    '))
    return

  print 'Installing GCloud.'
  check_run_and_monitor('curl https://sdk.cloud.google.com | bash', echo=True)
Example #57
0
def try_until_ready(command):
    while True:
        result = run_quick(command, echo=False)
        if not result.returncode:
            break
        msg = result.stderr or result.stdout
        if msg.find('refused') > 0:
            print 'New instance does not seem ready yet...retry in 5s.'
        else:
            print msg.strip()
            print 'Retrying in 5s.'
        time.sleep(5)