Example #1
0
 def __checkout_githubio_repo(self):
   """Clones the spinnaker.github.io git repo.
   """
   check_run_quick('git clone {0}'.format(self.__githubio_repo_uri))
   self.__repo_name = os.path.basename(self.__githubio_repo_uri)
   if self.__repo_name.endswith('.git'):
     self.__repo_name = self.__repo_name.replace('.git', '')
Example #2
0
    def __push_halyard_tag_and_branch(self):
        """Pushes a stable branch and git version tag to --github_publisher's Halyard repository.
    """
        major, minor, _ = self.__stable_version.split('.')
        self.__stable_branch = format_stable_branch(major, minor)

        if self.__patch_release:
            check_run_quick('git -C halyard checkout {0}'.format(
                self.__stable_branch))
        else:
            # Create new release branch.
            check_run_quick('git -C halyard checkout -b {0}'.format(
                self.__stable_branch))

        repo_to_push = '[email protected]:{owner}/halyard.git'.format(
            owner=self.__github_publisher)
        check_run_quick(
            'git -C halyard remote add release {url}'.format(url=repo_to_push))

        print('Pushing Halyard stable branch {branch} to {repo}'.format(
            branch=self.__stable_branch, repo=repo_to_push))
        check_run_quick('git -C halyard push release {branch}'.format(
            branch=self.__stable_branch))

        print('Pushing Halyard stable version tag {tag} to {repo}'.format(
            tag=self.__stable_version_tag, repo=self.__halyard_repo_uri))
        check_run_quick('git -C halyard push release {tag}'.format(
            tag=self.__stable_version_tag))
Example #3
0
    def __extract_image_tarball_helper(self):
        """Helper function for make_image_tarball that does the work.

    Note that the work happens on the instance itself. So this function
    builds a remote command that it then executes on the prototype instance.
    """
        print 'Creating image tarball.'

        tar_path = self.options.tarball_uri
        tar_name = os.path.basename(tar_path)
        remote_script = [
            'sudo mkdir /mnt/exportdisk',
            'sudo mkfs.ext4 -F /dev/disk/by-id/google-export-disk',
            'sudo mount -t ext4 -o discard,defaults'
            ' /dev/disk/by-id/google-export-disk /mnt/exportdisk',
            'sudo mkdir /mnt/snapshotdisk',
            'sudo mount /dev/disk/by-id/google-snapshot-disk /mnt/snapshotdisk',
            'cd /mnt/snapshotdisk', 'sudo rm -rf home/*',
            'sudo dd if=/dev/disk/by-id/google-snapshot-disk'
            ' of=/mnt/exportdisk/disk.raw bs=4096', 'cd /mnt/exportdisk',
            'sudo tar czvf {tar_name} disk.raw'.format(tar_name=tar_name),
            'gsutil -q cp /mnt/exportdisk/{tar_name} {output_path}'.format(
                tar_name=tar_name, output_path=tar_path)
        ]

        command = '; '.join(remote_script)
        print 'Running: {0}'.format(command)
        check_run_quick('gcloud {account} compute ssh --command="{command}"'
                        ' --project {project} --zone {zone} {instance}'.format(
                            account=self.__gcloud_account_arg,
                            command=command.replace('"', r'\"'),
                            project=self.__project,
                            zone=self.__zone,
                            instance=self.__instance))
Example #4
0
def __tag_images(versions_to_tag, project, account, project_images,
                 bom_contents_by_name):
    images_to_tag = set([])
    for bom_version in versions_to_tag:
        to_tag = [
            i for i in __derive_images_from_bom(
                bom_version, bom_contents_by_name) if i in project_images
        ]
        images_to_tag.update(to_tag)
    for image in images_to_tag:
        result = run_quick(
            'gcloud compute images describe --project={project} --account={account} --format=json {image}'
            .format(project=project, account=account, image=image),
            echo=False)
        # Adding labels is idempotent, adding the same label again doesn't break anything.
        if not result.returncode:
            payload_str = result.stdout.strip()
            timestamp = json.loads(payload_str)['creationTimestamp']
            timestamp = timestamp[:timestamp.index('T')]
            check_run_quick(
                'gcloud compute images add-labels --project={project} --account={account} --labels={key}={timestamp} {image}'
                .format(project=project,
                        account=account,
                        key=PUBLISHED_TAG_KEY,
                        timestamp=timestamp,
                        image=image))
Example #5
0
 def __checkout_githubio_repo(self):
     """Clones the spinnaker.github.io git repo.
 """
     check_run_quick('git clone {0}'.format(self.__githubio_repo_uri))
     self.__repo_name = os.path.basename(self.__githubio_repo_uri)
     if self.__repo_name.endswith('.git'):
         self.__repo_name = self.__repo_name.replace('.git', '')
Example #6
0
 def __update_versions_tracking_file(self):
     """Updates the global versions.yml tracking file to point at the new
     version of Halyard.
     """
     check_run_quick(
         "hal admin publish latest-halyard {}".format(self.__stable_version)
     )
Example #7
0
 def __generate_halyard_docs(self):
     """Builds Halyard's CLI, which writes the new documentation locally to halyard/docs/commands.md"""
     check_run_quick(
         "git -C halyard rev-parse HEAD | xargs git -C halyard checkout ;"
     )
     cmds = ["make"]
     run_shell_and_log(cmds, "halyard-generate-docs.log", cwd="halyard/halyard-cli")
def __record_halyard_nightly_version(version_bump, options):
    """Record the version and commit hash at which Halyard was built in a bucket.

  Assumes that gsutil is installed on the machine this script is run from.

  This function uses `gsutil rsync` to read the GCS file, changes it in-place,
  and then uses `gsutil rsync` to write the file again. `rsync` is eventually
  consistent, so running this script (or manually manipulating the GCS file)
  concurrently could likely result in file corruption. Don't parallelize this.
  """
    bucket_uri = options.hal_nightly_bucket_uri
    build_number = options.build_number
    local_bucket_name = os.path.basename(bucket_uri)
    # Copy all the bucket contents to local (-r) and get rid of extra stuff (-d).
    if not os.path.exists(local_bucket_name):
        os.mkdir(local_bucket_name)
    check_run_quick('gsutil rsync -r -d {remote_uri} {local_bucket}'.format(
        remote_uri=bucket_uri, local_bucket=local_bucket_name))
    hal_version = version_bump.version_str.replace('version-', '')
    full_hal_version = '{version}-{build}'.format(version=hal_version,
                                                  build=build_number)
    new_hal_nightly_entry = ('{full_hal_version}: {commit}'.format(
        full_hal_version=full_hal_version, commit=version_bump.commit_hash))
    nightly_entry_file = '{0}/nightly-version-commits.yml'.format(
        local_bucket_name)
    with open(nightly_entry_file, 'a') as nef:
        nef.write('{0}\n'.format(new_hal_nightly_entry))
    # Now sync the local dir with the bucket again after the update.
    check_run_quick('gsutil rsync -r -d {local_bucket} {remote_uri}'.format(
        remote_uri=bucket_uri, local_bucket=local_bucket_name))

    # Opening with 'w' stomps the old file.
    with open(options.output_built_halyard_version, 'w') as hal_version_file:
        hal_version_file.write('{}'.format(full_hal_version))
Example #9
0
 def do_fetch_service_log_file(self, service, log_dir):
     k8s_namespace = self.options.deploy_k8s_namespace
     service_pod = self.__get_pod_name(k8s_namespace, service)
     path = os.path.join(log_dir, service + '.log')
     write_data_to_secure_path('', path)
     check_run_quick('kubectl -n {namespace} logs {pod} >> {path}'.format(
         namespace=k8s_namespace, pod=service_pod, path=path))
Example #10
0
  def deploy_instance(self):
    """Deploy an instance (from an image) so we can get at its disks.

    This isnt necessarily efficient, but is simple since we already have
    means to create images.
    """
    if self.__instance:
      print 'Using existing instance {name}'.format(name=self.__instance)
      return

    if not self.options.image:
        raise ValueError('Neither --instance nor --image was specified.')

    instance = 'build-spinnaker-tarball-{unique}'.format(
        unique=time.strftime('%Y%m%d%H%M%S'))

    print 'Deploying temporary instance {name}'.format(name=instance)
    check_run_quick('gcloud compute instances create {name}'
                    ' --zone={zone} --project={project}'
                    ' --image={image} --image-project={image_project}'
                    ' --scopes compute-rw,storage-rw'
                    .format(name=instance,
                            zone=self.__zone,
                            project=self.__project,
                            image=self.options.image,
                            image_project=self.options.image_project),
                    echo=False)
    self.__instance = instance
def install_nvm(options):
  print '---------- Installing NVM ---------'
  check_run_quick('sudo chmod 775 /usr/local')
  check_run_quick('sudo mkdir -m 777 -p /usr/local/node /usr/local/nvm')

  result = check_fetch(
    'https://raw.githubusercontent.com/creationix/nvm/{nvm_version}/install.sh'
    .format(nvm_version=NVM_VERSION))

  fd, temp = tempfile.mkstemp()
  os.write(fd, result.content)
  os.close(fd)

  try:
    run_and_monitor(
        'bash -c "NVM_DIR=/usr/local/nvm source {temp}"'.format(temp=temp))
  finally:
    os.remove(temp)

#  curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.26.0/install.sh | NVM_DIR=/usr/local/nvm bash


  check_run_and_monitor('sudo bash -c "cat > /etc/profile.d/nvm.sh"',
                        input=__NVM_SCRIPT)

  print '---------- Installing Node {version} ---------'.format(
    version=NODE_VERSION)

  run_and_monitor('bash -c "source /etc/profile.d/nvm.sh'
                  '; nvm install {version}'
                  '; nvm alias default {version}"'
                  .format(version=NODE_VERSION))
Example #12
0
  def __extract_image_tarball_helper(self):
    """Helper function for make_image_tarball that does the work.

    Note that the work happens on the instance itself. So this function
    builds a remote command that it then executes on the prototype instance.
    """
    print 'Creating image tarball.'
    set_excludes_bash_command = (
        'EXCLUDES=`python -c'
        ' "import glob; print \',\'.join(glob.glob(\'/home/*\'))"`')

    tar_path = self.options.tarball_uri
    tar_name = os.path.basename(tar_path)
    remote_script = [
      'sudo mkdir /mnt/tmp',
      'sudo /usr/share/google/safe_format_and_mount -m'
          ' "mkfs.ext4 -F" /dev/sdb /mnt/tmp',
      set_excludes_bash_command,
      'sudo gcimagebundle -d /dev/sda -o /mnt/tmp'
          ' --log_file=/tmp/export.log --output_file_name={tar_name}'
          ' --excludes=/tmp,\\$EXCLUDES'.format(tar_name=tar_name),
      'gsutil -q cp /mnt/tmp/{tar_name} {output_path}'.format(
          tar_name=tar_name, output_path=tar_path)]

    command = '; '.join(remote_script)
    check_run_quick('gcloud compute ssh --command="{command}"'
                    ' --project {project} --zone {zone} {instance}'
                    .format(command=command.replace('"', r'\"'),
                            project=self.__project,
                            zone=self.__zone,
                            instance=self.__instance))
Example #13
0
def copy_file(options, source, target):
    if os.path.exists(source):
        # TODO(ewiseblatt): we can use scp here instead, and pass the
        # credentials we want to copy with rather than the additional command
        # below. But we need to figure out the IP address to copy to.
        # For now, do it the long way.
        print 'Copying {source}'.format(source=source)
        command = ' '.join([
            'gcloud compute copy-files',
            '--project', get_project(options),
            '--zone', options.zone,
            source,
            '{instance}:{target}'.format(instance=options.instance,
                                         target=target)])
        while True:
            result = run_quick(command, echo=False)
            if not result.returncode:
                break
            print 'New instance does not seem ready yet...retry in 5s.'
            time.sleep(5)

        command = ' '.join([
            'gcloud compute ssh',
            '--command="chmod 600 /home/{gcp_user}/{target}"'.format(
                gcp_user=os.environ['LOGNAME'], target=target),
            options.instance,
            '--project', get_project(options),
            '--zone', options.zone])
        check_run_quick(command, echo=False)
Example #14
0
def __record_halyard_nightly_version(version_bump, options):
  """Record the version and commit hash at which Halyard was built in a bucket.

  Assumes that gsutil is installed on the machine this script is run from.

  This function uses `gsutil rsync` to read the GCS file, changes it in-place,
  and then uses `gsutil rsync` to write the file again. `rsync` is eventually
  consistent, so running this script (or manually manipulating the GCS file)
  concurrently could likely result in file corruption. Don't parallelize this.
  """
  bucket_uri = options.hal_nightly_bucket_uri
  build_number = options.build_number
  local_bucket_name = os.path.basename(bucket_uri)
  # Copy all the bucket contents to local (-r) and get rid of extra stuff (-d).
  if not os.path.exists(local_bucket_name):
    os.mkdir(local_bucket_name)
  check_run_quick('gsutil rsync -r -d {remote_uri} {local_bucket}'
                  .format(remote_uri=bucket_uri, local_bucket=local_bucket_name))
  hal_version = version_bump.version_str.replace('version-', '')
  full_hal_version = '{version}-{build}'.format(version=hal_version, build=build_number)
  new_hal_nightly_entry = ('{full_hal_version}: {commit}'
                           .format(full_hal_version=full_hal_version, commit=version_bump.commit_hash))
  nightly_entry_file = '{0}/nightly-version-commits.yml'.format(local_bucket_name)
  with open(nightly_entry_file, 'a') as nef:
    nef.write('{0}\n'.format(new_hal_nightly_entry))
  # Now sync the local dir with the bucket again after the update.
  check_run_quick('gsutil rsync -r -d {local_bucket} {remote_uri}'
                  .format(remote_uri=bucket_uri, local_bucket=local_bucket_name))

  # Opening with 'w' stomps the old file.
  with open(options.output_built_halyard_version, 'w') as hal_version_file:
    hal_version_file.write('{}'.format(full_hal_version))
Example #15
0
 def __generate_halyard_docs(self):
   """Builds Halyard's CLI, which writes the new documentation locally to halyard/docs/commands.md
   """
   check_run_quick('git -C halyard rev-parse HEAD | xargs git -C halyard checkout ;')
   cmds = [
     'make'
   ]
   run_shell_and_log(cmds, 'halyard-generate-docs.log', cwd='halyard/halyard-cli')
  def do_deploy(self, script, files_to_upload):
    """Implements the BaseBomValidateDeployer interface."""
    options = self.options
    ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

    script_path = write_script_to_path(script, path=None)
    files_to_upload.add(script_path)
    if options.jenkins_master_name:
      write_data_to_secure_path(
          os.environ.get('JENKINS_MASTER_PASSWORD'),
          path=os.path.join(os.sep, 'tmp', 'jenkins_{name}_password'
                            .format(name=options.jenkins_master_name)),
          is_script=True)

    try:
      self.do_create_vm(options)

      copy_files = (
          'scp'
          ' -i {ssh_key_path}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {files} {ip}:~'
          .format(ssh_key_path=self.__ssh_key_path,
                  files=' '.join(files_to_upload),
                  ip=self.instance_ip))
      logging.info('Copying files %s', copy_files)

      # pylint: disable=unused-variable
      for retry in range(0, 10):
        result = run_quick(copy_files)
        if result.returncode == 0:
          break
        time.sleep(2)

      if result.returncode != 0:
        check_run_quick(copy_files)
    except Exception as ex:
      logging.error('Caught %s', ex)
      raise
    finally:
      os.remove(script_path)

    logging.info('Running install script')
    try:
      check_run_and_monitor(
          'ssh'
          ' -i {ssh_key}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {ip}'
          ' "sudo ./{script_name}"'
          .format(ip=self.instance_ip,
                  ssh_key=self.__ssh_key_path,
                  script_name=os.path.basename(script_path)))
    except RuntimeError:
      raise RuntimeError('Halyard deployment failed.')
Example #17
0
    def do_deploy(self, script, files_to_upload):
        """Implements the BaseBomValidateDeployer interface."""
        options = self.options
        ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

        script_path = write_script_to_path(script, path=None)
        files_to_upload.add(script_path)
        if options.jenkins_master_name:
            write_data_to_secure_path(
                os.environ.get('JENKINS_MASTER_PASSWORD'),
                path=os.path.join(
                    os.sep, 'tmp', 'jenkins_{name}_password'.format(
                        name=options.jenkins_master_name)),
                is_script=True)

        try:
            self.do_create_vm(options)

            copy_files = ('scp'
                          ' -i {ssh_key_path}'
                          ' -o StrictHostKeyChecking=no'
                          ' -o UserKnownHostsFile=/dev/null'
                          ' {files} {ip}:~'.format(
                              ssh_key_path=self.__ssh_key_path,
                              files=' '.join(files_to_upload),
                              ip=self.instance_ip))
            logging.info('Copying files %s', copy_files)

            # pylint: disable=unused-variable
            for retry in range(0, 10):
                result = run_quick(copy_files)
                if result.returncode == 0:
                    break
                time.sleep(2)

            if result.returncode != 0:
                check_run_quick(copy_files)
        except Exception as ex:
            logging.error('Caught %s', ex)
            raise
        finally:
            os.remove(script_path)

        logging.info('Running install script')
        try:
            check_run_and_monitor(
                'ssh'
                ' -i {ssh_key}'
                ' -o StrictHostKeyChecking=no'
                ' -o UserKnownHostsFile=/dev/null'
                ' {ip}'
                ' "sudo ./{script_name}"'.format(
                    ip=self.instance_ip,
                    ssh_key=self.__ssh_key_path,
                    script_name=os.path.basename(script_path)))
        except RuntimeError:
            raise RuntimeError('Halyard deployment failed.')
Example #18
0
def copy_master_yml(options):
    """Copy the specified master spinnaker-local.yml, and credentials.

    This will look for paths to credentials within the spinnaker-local.yml, and
    copy those as well. The paths to the credentials (and the reference
    in the config file) will be changed to reflect the filesystem on the
    new instance, which may be different than on this instance.

    Args:
      options [Namespace]: The parser namespace options contain information
        about the instance we're going to copy to, as well as the source
        of the master spinnaker-local.yml file.
    """
    print 'Creating .spinnaker directory...'
    check_run_quick('gcloud compute ssh --command "mkdir -p .spinnaker"'
                    ' --project={project} --zone={zone} {instance}'
                    .format(project=get_project(options),
                            zone=options.zone,
                            instance=options.instance),
                    echo=False)

    bindings = YamlBindings()
    bindings.import_path(options.master_yml)

    try:
      json_credential_path = bindings.get(
          'providers.google.primaryCredentials.jsonPath')
    except KeyError:
      json_credential_path = None

    gcp_home = os.path.join('/home', os.environ['LOGNAME'], '.spinnaker')

    # If there are credentials, write them to this path
    gcp_credential_path = os.path.join(gcp_home, 'google-credentials.json')

    with open(options.master_yml, 'r') as f:
        content = f.read()

    # Replace all the occurances of the original credentials path with the
    # path that we are going to place the file in on the new instance.
    if json_credential_path:
        content = content.replace(json_credential_path, gcp_credential_path)

    fd, temp_path = tempfile.mkstemp()
    os.write(fd, content)
    os.close(fd)
    actual_path = temp_path

    # Copy the credentials here. The cfg file will be copied after.
    copy_file(options, actual_path, '.spinnaker/spinnaker-local.yml')

    if json_credential_path:
        copy_file(options, json_credential_path,
                  '.spinnaker/google-credentials.json')

    if temp_path:
      os.remove(temp_path)
def create_instance(options):
    """Creates new GCE VM instance for development."""
    project = get_project(options)
    print 'Creating instance {project}/{zone}/{instance}'.format(
        project=project, zone=get_zone(options), instance=options.instance)
    print('  with --machine_type={type} and --disk_size={disk_size}...'.format(
        type=options.machine_type, disk_size=options.disk_size))

    google_dev_dir = os.path.join(os.path.dirname(__file__), '../google/dev')
    dev_dir = os.path.dirname(__file__)
    project_dir = os.path.join(dev_dir, '..')

    install_dir = '{dir}/../install'.format(dir=dev_dir)

    startup_command = [
        '/opt/spinnaker/install/install_spinnaker.sh'
        ' --dependencies_only', '/opt/spinnaker/install/install_development.sh'
    ]
    fd, temp_startup = tempfile.mkstemp()
    os.write(fd, ';'.join(startup_command))
    os.close(fd)

    metadata_files = [
        'startup-script={google_dev_dir}/google_install_loader.py'
        ',sh_bootstrap_dev={dev_dir}/bootstrap_dev.sh'
        ',sh_install_spinnaker={project_dir}/InstallSpinnaker.sh'
        ',sh_install_development={dev_dir}/install_development.sh'
        ',startup_command={temp_startup}'.format(google_dev_dir=google_dev_dir,
                                                 dev_dir=dev_dir,
                                                 project_dir=project_dir,
                                                 temp_startup=temp_startup)
    ]

    metadata = ','.join([
        'startup_loader_files='
        'sh_install_spinnaker'
        '+sh_install_development'
        '+sh_bootstrap_dev'
    ])

    command = [
        'gcloud', 'compute', 'instances', 'create', options.instance,
        '--project',
        get_project(options), '--zone',
        get_zone(options), '--machine-type', options.machine_type,
        '--image-family', 'ubuntu-1404-lts', '--image-project',
        'ubuntu-os-cloud', '--scopes', options.scopes,
        '--boot-disk-size={size}'.format(size=options.disk_size),
        '--boot-disk-type={type}'.format(type=options.disk_type), '--metadata',
        metadata,
        '--metadata-from-file={files}'.format(files=','.join(metadata_files))
    ]
    if options.address:
        command.extend(['--address', options.address])

    check_run_quick(' '.join(command), echo=False)
Example #20
0
def create_instance(options):
    """Creates new GCE VM instance for development."""
    project = get_project(options)
    print 'Creating instance {project}/{zone}/{instance}'.format(
        project=project, zone=get_zone(options), instance=options.instance)
    print ('  with --machine_type={type} and --disk_size={disk_size}...'
           .format(type=options.machine_type, disk_size=options.disk_size))

    google_dev_dir = os.path.join(os.path.dirname(__file__), '../google/dev')
    dev_dir = os.path.dirname(__file__)
    project_dir = os.path.join(dev_dir, '..')

    install_dir = '{dir}/../install'.format(dir=dev_dir)

    startup_command = ['/opt/spinnaker/install/install_spinnaker.sh'
                           ' --dependencies_only',
                       '/opt/spinnaker/install/install_development.sh']
    fd, temp_startup = tempfile.mkstemp()
    os.write(fd, ';'.join(startup_command))
    os.close(fd)

    metadata_files = [
        'startup-script={google_dev_dir}/google_install_loader.py'
        ',sh_bootstrap_dev={dev_dir}/bootstrap_dev.sh'
        ',sh_install_spinnaker={project_dir}/InstallSpinnaker.sh'
        ',sh_install_development={dev_dir}/install_development.sh'
        ',startup_command={temp_startup}'
        .format(google_dev_dir=google_dev_dir,
                dev_dir=dev_dir,
                project_dir=project_dir,
                temp_startup=temp_startup)]

    metadata = ','.join([
        'startup_loader_files='
        'sh_install_spinnaker'
        '+sh_install_development'
        '+sh_bootstrap_dev'])

    command = ['gcloud', 'compute', 'instances', 'create',
               options.instance,
               '--project', get_project(options),
               '--zone', get_zone(options),
               '--machine-type', options.machine_type,
               '--image-family', 'ubuntu-1404-lts',
               '--image-project', 'ubuntu-os-cloud',
               '--scopes', options.scopes,
               '--boot-disk-size={size}'.format(size=options.disk_size),
               '--boot-disk-type={type}'.format(type=options.disk_type),
               '--metadata', metadata,
               '--metadata-from-file={files}'.format(
                   files=','.join(metadata_files))]
    if options.address:
        command.extend(['--address', options.address])

    check_run_quick(' '.join(command), echo=False)
Example #21
0
    def git_clone(self, repository, owner=None):
        """Clone the specified repository

      Args:
        repository [string]: The name of the github repository (without owner).
        owner [string]: An explicit repository owner.
               If not provided use the configured options.
      """
        name = repository.name
        repository_dir = get_repository_dir(name)
        upstream_user = repository.owner
        branch = self.pull_branch or 'master'
        origin_url = self.get_github_repository_url(repository, owner=owner)
        upstream_url = 'https://github.com/{upstream_user}/{name}.git'.format(
            upstream_user=upstream_user, name=name)

        # Don't echo because we're going to hide some failure.
        print 'Cloning {name} from {origin_url} -b {branch}.'.format(
            name=name, origin_url=origin_url, branch=branch)
        shell_result = run_and_monitor('git clone {url} -b {branch}'.format(
            url=origin_url, branch=branch),
                                       echo=False)
        if not shell_result.returncode:
            if shell_result.stdout:
                print shell_result.stdout
        else:
            if repository in self.__extra_repositories:
                sys.stderr.write(
                    'WARNING: Missing optional repository {name}.\n'.format(
                        name=name))
                sys.stderr.write('         Continue on without it.\n')
                return
            sys.stderr.write(shell_result.stderr or shell_result.stdout)
            sys.stderr.write(
                'FATAL: Cannot continue without required repository {name}.\n'
                '       Consider using github to fork one from {upstream}.\n'.
                format(name=name, upstream=upstream_url))
            raise SystemExit(
                'Repository {url} not found.'.format(url=origin_url))

        if self.__options.add_upstream and origin_url != upstream_url:
            print '  Adding upstream repository {upstream}.'.format(
                upstream=upstream_url)
            check_run_quick('git -C "{dir}" remote add upstream {url}'.format(
                dir=repository_dir, url=upstream_url),
                            echo=False)

        if self.__options.disable_upstream_push:
            which = 'upstream' if origin_url != upstream_url else 'origin'
            print '  Disabling git pushes to {which} {upstream}'.format(
                which=which, upstream=upstream_url)
            check_run_quick(
                'git -C "{dir}" remote set-url --push {which} disabled'.format(
                    dir=repository_dir, which=which),
                echo=False)
Example #22
0
    def push_branch_and_tags(self):
        """Creates a release branch and pushes tags to the microservice repos owned by --github_publisher.

    A private key that has access to --github_publisher's github repos needs added
    to a running ssh-agent on the machine this script will run on:

    > <copy or rsync the key to the vm>
    > eval `ssh-agent`
    > ssh-add ~/.ssh/<key with access to github repos>

    """
        major, minor, _ = self.__release_version.split('.')

        # The stable branch will look like release-<major>.<minor>.x since nebula
        # enforces restrictions on what branches it does releases from.
        # https://github.com/nebula-plugins/nebula-release-plugin#extension-provided
        stable_branch = format_stable_branch(major, minor)
        for comp in COMPONENTS:
            comp_path = os.path.join(self.base_dir, comp)
            if self.__patch_release:
                check_run_quick('git -C {0} checkout {1}'.format(
                    comp_path, stable_branch))
            else:
                # Create new release branch.
                check_run_quick('git -C {0} checkout -b {1}'.format(
                    comp_path, stable_branch))

            version_tag_build = ''
            if comp == 'spinnaker-monitoring':
                version_tag_build = 'version-{0}'.format(
                    self.__bom_dict[SERVICES]['monitoring-daemon'][VERSION])
            else:
                version_tag_build = 'version-{0}'.format(
                    self.__bom_dict[SERVICES][comp][VERSION])

            last_dash = version_tag_build.rindex('-')
            version_tag = version_tag_build[:last_dash]
            repo_to_push = ('[email protected]:{owner}/{comp}.git'.format(
                owner=self.__github_publisher, comp=comp))
            check_run_quick('git -C {comp} remote add release {url}'.format(
                comp=comp_path, url=repo_to_push))
            check_run_quick('git -C {comp} push release {branch}'.format(
                comp=comp_path, branch=stable_branch))

            repo = self.__github.get_repo('{owner}/{comp}'.format(
                owner=self.__github_publisher, comp=comp))
            paginated_tags = repo.get_tags()
            tag_names = [tag.name for tag in paginated_tags]
            if version_tag not in tag_names:
                # The tag doesn't exist and we need to push a tag.
                print('pushing version tag {tag} to {owner}/{comp}'.format(
                    tag=version_tag, owner=self.__github_publisher, comp=comp))
                check_run_quick('git -C {comp} push release {tag}'.format(
                    comp=comp_path, tag=version_tag))
Example #23
0
  def git_clone(self, repository, owner=None):
      """Clone the specified repository

      Args:
        repository [string]: The name of the github repository (without owner).
        owner [string]: An explicit repository owner.
               If not provided use the configured options.
      """
      name = repository.name
      repository_dir = get_repository_dir(name)
      upstream_user = repository.owner
      branch = self.pull_branch or 'master'
      origin_url = self.get_github_repository_url(repository, owner=owner)
      upstream_url = 'https://github.com/{upstream_user}/{name}.git'.format(
              upstream_user=upstream_user, name=name)

      # Don't echo because we're going to hide some failure.
      print 'Cloning {name} from {origin_url} -b {branch}.'.format(
          name=name, origin_url=origin_url, branch=branch)
      shell_result = run_and_monitor(
          'git clone {url} -b {branch}'.format(url=origin_url, branch=branch),
          echo=False)
      if not shell_result.returncode:
          if shell_result.stdout:
              print shell_result.stdout
      else:
          if repository in self.__extra_repositories:
             sys.stderr.write('WARNING: Missing optional repository {name}.\n'
                                  .format(name=name))
             sys.stderr.write('         Continue on without it.\n')
             return
          sys.stderr.write(shell_result.stderr or shell_result.stdout)
          sys.stderr.write(
              'FATAL: Cannot continue without required repository {name}.\n'
              '       Consider using github to fork one from {upstream}.\n'.
              format(name=name, upstream=upstream_url))
          raise SystemExit('Repository {url} not found.'.format(url=origin_url))

      if self.__options.add_upstream and origin_url != upstream_url:
          print '  Adding upstream repository {upstream}.'.format(
              upstream=upstream_url)
          check_run_quick('git -C "{dir}" remote add upstream {url}'
                              .format(dir=repository_dir, url=upstream_url),
                          echo=False)

      if self.__options.disable_upstream_push:
          which = 'upstream' if origin_url != upstream_url else 'origin'
          print '  Disabling git pushes to {which} {upstream}'.format(
              which=which, upstream=upstream_url)
          check_run_quick(
              'git -C "{dir}" remote set-url --push {which} disabled'
                  .format(dir=repository_dir, which=which),
              echo=False)
Example #24
0
  def create_tarball(self):
    """Create a tar.gz file from the instance specified by the options.

    The file will be written to options.tarball_uri.
    It can be later turned into a GCE image by passing it as the --source-uri
    to gcloud images create.
    """
    project = self.__project
    basename = os.path.basename(self.options.tarball_uri).replace('_', '-')
    first_dot = basename.find('.')
    if first_dot:
        basename = basename[0:first_dot]
    disk_name = '{name}-export'.format(name=basename)
    print 'Attaching external disk "{disk}" to extract image tarball.'.format(
        disk=disk_name)

    # TODO(ewiseblatt): 20151002
    # Add an option to reuse an existing disk to reduce the cycle time.
    # Then guard the create/format/destroy around this option.
    # Still may want/need to attach/detach it here to reduce race conditions
    # on its use since it can only be bound to once instance at a time.
    check_run_quick('gcloud compute disks create '
                    ' {disk_name} --project {project} --zone {zone} --size=10'
                    .format(disk_name=disk_name,
                            project=self.__project,
                            zone=self.__zone),
                    echo=False)

    check_run_quick('gcloud compute instances attach-disk {instance}'
                    ' --disk={disk_name} --device-name=export-disk'
                    ' --project={project} --zone={zone}'
                    .format(instance=self.__instance,
                            disk_name=disk_name,
                            project=self.__project,
                            zone=self.__zone),
                    echo=False)
    try:
      self.__extract_image_tarball_helper()
    finally:
      print 'Detaching and deleting external disk.'
      run_quick('gcloud compute instances detach-disk -q {instance}'
                ' --disk={disk_name} --project={project} --zone={zone}'
                .format(instance=self.__instance,
                        disk_name=disk_name,
                        project=self.__project,
                        zone=self.__zone),
                echo=False)
      run_quick('gcloud compute disks delete -q {disk_name}'
                ' --project={project} --zone={zone}'
                .format(disk_name=disk_name,
                        project=self.__project,
                        zone=self.__zone),
                echo=False)
Example #25
0
 def do_fetch_service_log_file(self, service, log_dir):
     """Implements the BaseBomValidateDeployer interface."""
     write_data_to_secure_path('', os.path.join(log_dir, service + '.log'))
     check_run_quick('scp'
                     ' -i {ssh_key}'
                     ' -o StrictHostKeyChecking=no'
                     ' -o UserKnownHostsFile=/dev/null'
                     ' {ip}:/var/log/spinnaker/{service}/{service}.log'
                     ' {log_dir}'.format(ip=self.instance_ip,
                                         ssh_key=self.ssh_key_path,
                                         service=service,
                                         log_dir=log_dir))
Example #26
0
  def __determine_gate_version(self):
    bom_file = 'bom.yml'
    check_run_quick('gsutil cat gs://halconfig/bom/{spinnaker_version}.yml > {bom_file}'
                    .format(spinnaker_version=self.__spinnaker_version, bom_file=bom_file))

    with open(bom_file, 'r') as stream:
      try:
        bom = yaml.load(stream)
        return bom['services']['gate']['version']
      except yaml.YAMLError as err:
        print 'Failed to load Gate version from BOM.'
        raise err
Example #27
0
    def do_fetch_service_log_file(self, service, log_dir):
        """Retrieve log file for the given service's pod.

    Args:
      service: [string] The service's log to get
      log_dir: [string] The directory name to write the logs into.
    """
        k8s_namespace = self.options.deploy_k8s_namespace
        service_pod = self.__get_pod_name(k8s_namespace, service)
        path = os.path.join(log_dir, service + '.log')
        write_data_to_secure_path('', path)
        check_run_quick('kubectl -n {namespace} logs {pod} >> {path}'.format(
            namespace=k8s_namespace, pod=service_pod, path=path))
Example #28
0
  def __publish_halyard_docs(self):
    """ Formats Halyard's documentation, then pushes to Spinnaker's documentation repository.
    """
    docs_source = 'halyard/docs/commands.md'
    docs_target = '{repo_name}/reference/halyard/commands.md'.format(repo_name=self.__docs_repo_name)

    repo_uri = '[email protected]:{repo_owner}/{repo_name}'.format(repo_owner=self.__docs_repo_owner,
                                                                repo_name=self.__docs_repo_name)
    check_run_quick('git clone {repo_uri}'.format(repo_uri=repo_uri))

    with open(docs_source, 'r') as source:
      with open(docs_target, 'w') as target:
        header = '\n'.join([
          '---',
          'layout: single',
          'title:  "Commands"',
          'sidebar:',
          '  nav: reference',
          '---',
          '',
          'Published: {}'.format(datetime
              .datetime
              .utcnow()
              .strftime('%Y-%m-%d %H:%M:%S')),
          '',
        ])
        target.write(header + source.read())

    commit_message = 'docs(halyard): {version}'.format(version=self.__stable_version)
    check_run_quick('git -C {repo_name} add reference/halyard/commands.md'.format(repo_name=self.__docs_repo_name))
    check_run_quick('git -C {repo_name} commit -m "{message}"'
                    .format(repo_name=self.__docs_repo_name, message=commit_message))
    check_run_quick('git -C {repo_name} push origin master'.format(repo_name=self.__docs_repo_name))
def ensure_empty_ssh_key(path, user):
    """Ensure there is an ssh key at the given path.

  It is assumed that this key has no password associated with it so we
  can use it for ssh/scp.
  """

    if os.path.exists(path):
        return

    logging.debug('Creating %s SSH key for user "%s"', path, user)
    check_run_quick('ssh-keygen -N "" -t rsa -f {path} -C {user}'
                    '; sed "s/^ssh-rsa/{user}:ssh-rsa/" -i {path}'.format(
                        user=user, path=path))
  def do_fetch_service_log_file(self, service, log_dir):
    """Retrieve log file for the given service's pod.

    Args:
      service: [string] The service's log to get
      log_dir: [string] The directory name to write the logs into.
    """
    k8s_namespace = self.options.deploy_k8s_namespace
    service_pod = self.__get_pod_name(k8s_namespace, service)
    path = os.path.join(log_dir, service + '.log')
    write_data_to_secure_path('', path)
    check_run_quick(
        'kubectl -n {namespace} logs {pod} >> {path}'
        .format(namespace=k8s_namespace, pod=service_pod, path=path))
Example #31
0
  def __publish_halyard_docs(self):
    """ Formats Halyard's documentation, then pushes to Spinnaker's documentation repository.
    """
    docs_source = 'halyard/docs/commands.md'
    docs_target = '{repo_name}/reference/halyard/commands.md'.format(repo_name=self.__docs_repo_name)

    repo_uri = '[email protected]:{repo_owner}/{repo_name}'.format(repo_owner=self.__docs_repo_owner,
                                                                repo_name=self.__docs_repo_name)
    check_run_quick('git clone {repo_uri}'.format(repo_uri=repo_uri))

    with open(docs_source, 'r') as source:
      with open(docs_target, 'w') as target:
        header = '\n'.join([
          '---',
          'layout: single',
          'title:  "Commands"',
          'sidebar:',
          '  nav: reference',
          '---',
          '',
          ''
        ])
        target.write(header + source.read())

    commit_message = 'docs(halyard): {version}'.format(version=self.__stable_version)
    check_run_quick('git -C {repo_name} add reference/halyard/commands.md'.format(repo_name=self.__docs_repo_name))
    check_run_quick('git -C {repo_name} commit -m "{message}"'
                    .format(repo_name=self.__docs_repo_name, message=commit_message))
    check_run_quick('git -C {repo_name} push origin master'.format(repo_name=self.__docs_repo_name))
 def do_fetch_service_log_file(self, service, log_dir):
   """Implements the BaseBomValidateDeployer interface."""
   write_data_to_secure_path('', os.path.join(log_dir, service + '.log'))
   check_run_quick(
       'scp'
       ' -i {ssh_key}'
       ' -o StrictHostKeyChecking=no'
       ' -o UserKnownHostsFile=/dev/null'
       ' {ip}:/var/log/spinnaker/{service}/{service}.log'
       ' {log_dir}'
       .format(ip=self.instance_ip,
               ssh_key=self.ssh_key_path,
               service=service,
               log_dir=log_dir))
Example #33
0
def ensure_empty_ssh_key(path, user):
  """Ensure there is an ssh key at the given path.

  It is assumed that this key has no password associated with it so we
  can use it for ssh/scp.
  """

  if os.path.exists(path):
    return

  logging.debug('Creating %s SSH key for user "%s"', path, user)
  check_run_quick(
      'ssh-keygen -N "" -t rsa -f {path} -C {user}'
      '; sed "s/^ssh-rsa/{user}:ssh-rsa/" -i {path}'
      .format(user=user, path=path))
Example #34
0
  def push_branch_and_tags(self):
    """Creates a release branch and pushes tags to the microservice repos owned by --github_publisher.

    A private key that has access to --github_publisher's github repos needs added
    to a running ssh-agent on the machine this script will run on:

    > <copy or rsync the key to the vm>
    > eval `ssh-agent`
    > ssh-add ~/.ssh/<key with access to github repos>

    """
    major, minor, _ = self.__release_version.split('.')

    # The stable branch will look like <major>.<minor>.X since nebula
    # enforces restrictions on what branches it does releases from.
    # https://github.com/nebula-plugins/nebula-release-plugin#extension-provided
    stable_branch = '.'.join([major, minor, 'X'])
    for comp in COMPONENTS:
      if self.__patch_release:
        check_run_quick('git -C {0} checkout {1}'.format(comp, stable_branch))
      else:
        # Create new release branch.
        check_run_quick('git -C {0} checkout -b {1}'.format(comp, stable_branch))

      version_tag_build = ''
      if comp == 'spinnaker-monitoring':
        version_tag_build = 'version-{0}'.format(self.__bom_dict[SERVICES]['monitoring-daemon'][VERSION])
      else:
        version_tag_build = 'version-{0}'.format(self.__bom_dict[SERVICES][comp][VERSION])

      last_dash = version_tag_build.rindex('-')
      version_tag = version_tag_build[:last_dash]
      repo_to_push = ('[email protected]:{owner}/{comp}.git'
                      .format(owner=self.__github_publisher, comp=comp))
      check_run_quick('git -C {comp} remote add release {url}'
                      .format(comp=comp, url=repo_to_push))
      check_run_quick('git -C {comp} push release {branch}'
                      .format(comp=comp,  branch=stable_branch))

      repo = self.__github.get_repo('{owner}/{comp}'.format(owner=self.__github_publisher, comp=comp))
      paginated_tags = repo.get_tags()
      tag_names = [tag.name for tag in paginated_tags]
      if version_tag not in tag_names:
        # The tag doesn't exist and we need to push a tag.
        print ('pushing version tag {tag} to {owner}/{comp}'
               .format(tag=version_tag, owner=self.__github_publisher, comp=comp))
        check_run_quick('git -C {comp} push release {tag}'
                        .format(comp=comp,  tag=version_tag))
Example #35
0
 def do_fetch_service_log_file(self, service, log_dir):
   """Implements the BaseBomValidateDeployer interface."""
   options = self.options
   write_data_to_secure_path('', os.path.join(log_dir, service + '.log'))
   check_run_quick(
       'gcloud compute copy-files '
       ' --ssh-key-file {ssh_key}'
       ' --project {project} --zone {zone}'
       ' {instance}:/var/log/spinnaker/{service}/{service}.log'
       ' {log_dir}'
       .format(project=options.google_deploy_project,
               zone=options.google_deploy_zone,
               instance=options.google_deploy_instance,
               ssh_key=self.EMPTY_SSH_KEY,
               service=service,
               log_dir=log_dir))
Example #36
0
def ensure_empty_ssh_key(path):
  """Ensure there is an ssh key at the given path.

  It is assumed that this key has no password associated with it so we
  can use it for ssh/scp.
  """

  pub_path = path + '.pub'
  if os.path.exists(pub_path):
    return

  logging.debug('Creating %s SSH key', path)
  check_run_quick(
      'ssh-keygen -N "" -t rsa -f {path} -C $USER'
      '; sed "s/^ssh-rsa/$USER:ssh-rsa/" -i {path}'
      .format(path=path))
Example #37
0
def __delete_unused_bom_images(options):
  client = None
  if options.json_path:
    client = storage.Client.from_service_account_json(options.json_path)
  else:
    client = storage.Client()
  versions_to_tag, possible_versions_to_delete, bom_contents_by_name = __partition_boms(client, options.bom_bucket_name)
  if options.additional_boms_to_tag:
    additional_boms_to_tag = options.additional_boms_to_tag.split(',')
    print('Adding additional BOM versions to tag: {}'.format(additional_boms_to_tag))
    versions_to_tag.extend(additional_boms_to_tag)
  print('Tagging versions: {}'.format(versions_to_tag))
  print('Deleting versions: {}'.format(possible_versions_to_delete))

  project = options.project
  service_account = options.service_account
  image_list_str = check_run_quick('gcloud compute images list --format=json --project={project} --account={account}'
                                   .format(project=project, account=service_account), echo=False).stdout.strip()
  image_list = json.loads(image_list_str)
  project_images = set([image['name'] for image in image_list])
  __tag_images(versions_to_tag, project, service_account, project_images,
               bom_contents_by_name)
  __write_image_delete_script(possible_versions_to_delete, options.days_before, project,
                              service_account, project_images,
                              bom_contents_by_name)
Example #38
0
 def __load_bom(self):
   """Load the release candidate BOM into memory.
   """
   bom_yaml_string = check_run_quick('hal version bom {0} --color false --quiet'
                                     .format(self.__bom_version), echo=False).stdout.strip()
   print 'bom yaml string pulled by hal: \n\n{0}\n\n'.format(bom_yaml_string)
   self.__bom_dict = yaml.load(bom_yaml_string)
    def __get_pod_name(self, k8s_namespace, service):
        """Determine the pod name for the deployed service."""
        options = self.options
        response = check_run_quick(
            'kubectl {context} get pods --namespace {namespace}'
            ' | gawk -F "[[:space:]]+" "/{service}-v/ {{print \\$1}}" | tail -1'
            .format(
                context=('--context {0}'.format(options.k8s_account_context)
                         if options.k8s_account_context else ''),
                namespace=k8s_namespace,
                service=service))
        pod = response.stdout.strip()
        if not pod:
            message = 'There is no pod for "{service}" in {namespace}'.format(
                service=service, namespace=k8s_namespace)
            logging.error(message)
            raise ValueError(message)

        if response.returncode != 0:
            message = 'Could not find pod for "{service}".: {error}'.format(
                service=service, error=response.stdout.strip())
            logging.error(message)
            raise ValueError(message)
        else:
            print '{0} -> "{1}"'.format(service, response.stdout)

        return response.stdout.strip()
  def __open_changelog_pull_request(self):
    """Opens a pull request from --github_publisher's repo to the upstream 'spinnaker' repo.

    Uses 'hub' to open the pull request (https://github.com/github/hub).
    This assumes that 'hub' is installed on the machine running this script.
    """
    title = 'Changelog for version {0}'.format(self.__version)
    branch_head = '{user}:{branch}'.format(user=self.__github_publisher, branch=self.__changelog_branch)
    with open('message', 'w') as msg_file:
      # TODO(jacobkiefer): Add notification to spinnaker/google-reviewers in body.
      message = '{title}'.format(title=title)
      msg_file.write(message)

    base = 'spinnaker:master'
    check_run_quick('hub -C spinnaker.github.io pull-request -b {base} -h {head} -F message'
                    .format(base=base, head=branch_head, msg=''))
Example #41
0
  def __get_pod_name(self, k8s_namespace, service):
    """Determine the pod name for the deployed service."""
    options = self.options
    response = check_run_quick(
        'kubectl {context} get pods --namespace {namespace}'
        ' | gawk -F "[[:space:]]+" "/{service}-v/ {{print \\$1}}" | tail -1'
        .format(context=('--context {0}'.format(options.k8s_account_context)
                         if options.k8s_account_context
                         else ''),
                namespace=k8s_namespace,
                service=service))
    pod = response.stdout.strip()
    if not pod:
      message = 'There is no pod for "{service}" in {namespace}'.format(
          service=service, namespace=k8s_namespace)
      logging.error(message)
      raise ValueError(message)

    if response.returncode != 0:
      message = 'Could not find pod for "{service}".: {error}'.format(
          service=service,
          error=response.stdout.strip())
      logging.error(message)
      raise ValueError(message)
    else:
      print '{0} -> "{1}"'.format(service, response.stdout)

    return response.stdout.strip()
Example #42
0
def get_default_project():
    """Determine the default project name.

  The default project name is the gcloud configured default project.
  """
    result = check_run_quick('gcloud config list', echo=False)
    return re.search('project = (.*)\n', result.stdout).group(1)
Example #43
0
def get_default_project():
  """Determine the default project name.

  The default project name is the gcloud configured default project.
  """
  result = check_run_quick('gcloud config list', echo=False)
  return re.search('project = (.*)\n', result.stdout).group(1)
Example #44
0
  def __push_docs_to_repo(self):
    docs_path = 'reference/api/docs.html'
    commit_message = ('docs(api): API Documentation for Spinnaker {spinnaker_version}'
                      .format(spinnaker_version=self.__spinnaker_version))

    check_run_quick('mv index.html {repo_name}/{docs_path}'.format(repo_name=self.__repo_name, docs_path=docs_path))
    check_run_quick('git -C {repo_name} add {docs_path}'.format(repo_name=self.__repo_name, docs_path=docs_path))
    check_run_quick('git -C {repo_name} commit -m "{message}"'.format(repo_name=self.__repo_name, message=commit_message))
    check_run_quick('git -C {repo_name} push origin master'.format(repo_name=self.__repo_name))
Example #45
0
  def publish_to_bintray(self, source, package, version, path, debian_tags=''):
    bintray_key = os.environ['BINTRAY_KEY']
    bintray_user = os.environ['BINTRAY_USER']
    parts = self.__options.bintray_repo.split('/')
    if len(parts) != 2:
      raise ValueError(
          'Expected --bintray_repo to be in the form <owner>/<repo')
    subject, repo = parts[0], parts[1]

    deb_filename = os.path.basename(path)
    if (deb_filename.startswith('spinnaker-')
        and not package.startswith('spinnaker')):
      package = 'spinnaker-' + package

    if debian_tags and debian_tags[0] != ';':
      debian_tags = ';' + debian_tags

    url = ('https://api.bintray.com/content'
               '/{subject}/{repo}/{package}/{version}/{path}'
               '{debian_tags}'
               ';publish=1;override=1'
                   .format(subject=subject, repo=repo, package=package,
                           version=version, path=path,
                           debian_tags=debian_tags))

    if False:
        # This results in a 405
        with open(source, 'r') as f:
          data = f.read()
        request = urllib2.Request(url)
        encoded_auth = base64.encodestring('{user}:{pwd}'.format(
            user=bintray_user, pwd=bintray_key))[:-1]  # strip eoln

        request.add_header('Authorization', 'Basic ' + encoded_auth)
        result = urllib2.urlopen(request, data)
        request.get_method = lambda: 'PUT'
        code = result.getcode()
        if code < 200 or code >= 300:
          raise ValueError('Could not write {url}\n{response}\n'.format(
            url=url, response=result.read()))
    else:
        # Use curl to workaround for now.
        command = 'curl -s -u{user}:{key} -X PUT -T "{source}" "{url}"'.format(
            user=bintray_user, key=bintray_key, source=source, url=url)
        check_run_quick(command, echo=False)

    print 'Wrote {source} to {url}'.format(source=source, url=url)
Example #46
0
def ensure_gcs_bucket(name, project=''):
  """Ensure that the desired GCS bucket exists, creating it if needed.

  Args:
    name [string]: The bucket name.
    project [string]: Optional Google Project id that will own the bucket.
      If none is provided, then the bucket will be associated with the default
      bucket configured to gcloud.

  Raises:
    RutimeError if the bucket could not be created.
  """
  bucket = 'gs://'+ name
  if not project:
      config_result = run_quick('gcloud config list', echo=False)
      error = None
      if config_result.returncode:
        error = 'Could not run gcloud: {error}'.format(
            error=config_result.stdout)
      else:
        match = re.search('(?m)^project = (.*)', config_result.stdout)
        if not match:
          error = ('gcloud is not configured with a default project.\n'
                   'run gcloud config or provide a --google_project.\n')
      if error:
        raise SystemError(error)

      project = match.group(1)

  list_result = run_quick('gsutil list -p ' +  project, echo=False)
  if list_result.returncode:
    error = ('Could not create Google Cloud Storage bucket'
             '"{name}" in project "{project}":\n{error}'
             .format(name=name, project=project, error=list_result.stdout))
    raise RuntimeError(error)

  if re.search('(?m)^{bucket}/\n'.format(bucket=bucket), list_result.stdout):
    sys.stderr.write(
        'WARNING: "{bucket}" already exists. Overwriting.\n'.format(
        bucket=bucket))
  else:
    print 'Creating GCS bucket "{bucket}" in project "{project}".'.format(
        bucket=bucket, project=project)
    check_run_quick('gsutil mb -p {project} {bucket}'
                    .format(project=project, bucket=bucket),
                    echo=True)
Example #47
0
def ensure_gcs_bucket(name, project=''):
    """Ensure that the desired GCS bucket exists, creating it if needed.

  Args:
    name [string]: The bucket name.
    project [string]: Optional Google Project id that will own the bucket.
      If none is provided, then the bucket will be associated with the default
      bucket configured to gcloud.

  Raises:
    RuntimeError if the bucket could not be created.
  """
    bucket = 'gs://' + name
    if not project:
        config_result = run_quick('gcloud config list', echo=False)
        error = None
        if config_result.returncode:
            error = 'Could not run gcloud: {error}'.format(
                error=config_result.stdout)
        else:
            match = re.search('(?m)^project = (.*)', config_result.stdout)
            if not match:
                error = ('gcloud is not configured with a default project.\n'
                         'run gcloud config or provide a --google_project.\n')
        if error:
            raise SystemError(error)

        project = match.group(1)

    list_result = run_quick('gsutil list -p ' + project, echo=False)
    if list_result.returncode:
        error = ('Could not create Google Cloud Storage bucket'
                 '"{name}" in project "{project}":\n{error}'.format(
                     name=name, project=project, error=list_result.stdout))
        raise RuntimeError(error)

    if re.search('(?m)^{bucket}/\n'.format(bucket=bucket), list_result.stdout):
        sys.stderr.write(
            'WARNING: "{bucket}" already exists. Overwriting.\n'.format(
                bucket=bucket))
    else:
        print 'Creating GCS bucket "{bucket}" in project "{project}".'.format(
            bucket=bucket, project=project)
        check_run_quick('gsutil mb -p {project} {bucket}'.format(
            project=project, bucket=bucket),
                        echo=True)
def inject_spring_config_location(options, subsystem):
  """Add spinnaker.yml to the spring config location path.

  This might be temporary. Once this is standardized perhaps the packages will
  already be shipped with this.
  """
  if subsystem == "deck":
    return

  path = os.path.join('/opt', subsystem, 'bin', subsystem)
  with open(path, 'r') as f:
      content = f.read()
  match = re.search('\nDEFAULT_JVM_OPTS=(.+)\n', content)
  if not match:
      raise ValueError('Expected DEFAULT_JVM_OPTS in ' + path)
  value = match.group(1)

  if value.find('-Dspring.config.location=') >= 0:
      sys.stderr.write(
          'WARNING: spring.config.location was already explicitly defined.'
          '\nLeaving ' + match.group(0) + '\n')  # Show whole thing.
      return

  new_content = [content[0:match.start(1)]]

  offset = 1 if value[0] == '\'' or value[0] == '"' else 0
  quote = '"' if value[0] == '\'' else '\''
  root = '/opt/spinnaker/config'
  home = '/root/.spinnaker'
  new_content.append(value[0:offset])
  new_content.append('{quote}-Dspring.config.location={root}/,{home}/{quote}'
                     .format(quote=quote, home=home, root=root))
  new_content.append(' ')

  new_content.append(content[match.start(1) + offset:])
  fd,temp = tempfile.mkstemp()
  os.write(fd, ''.join(new_content))
  os.close(fd)

  check_run_quick(
      'chmod --reference={path} {temp}'.format(path=path, temp=temp),
      echo=False)
  check_run_quick(
      'sudo mv {temp} {path}'.format(temp=temp, path=path),
      echo=False)
def get_project(options):
    """Determine the default project name.

    The default project name is the gcloud configured default project.
    """
    if not options.project:
      result = check_run_quick('gcloud config list', echo=False)
      options.project = re.search('project = (.*)\n', result.stdout).group(1)
    return options.project
Example #50
0
  def __extract_image_tarball_helper(self):
    """Helper function for make_image_tarball that does the work.

    Note that the work happens on the instance itself. So this function
    builds a remote command that it then executes on the prototype instance.
    """
    print 'Creating image tarball.'

    tar_path = self.options.tarball_uri
    tar_name = os.path.basename(tar_path)
    remote_script = [
      'sudo mkdir /mnt/exportdisk',
      'sudo mkfs.ext4 -F /dev/disk/by-id/google-export-disk',
      'sudo mount -t ext4 -o discard,defaults'
          ' /dev/disk/by-id/google-export-disk /mnt/exportdisk',

      'sudo mkdir /mnt/snapshotdisk',
      'sudo mount /dev/disk/by-id/google-snapshot-disk /mnt/snapshotdisk',
      'cd /mnt/snapshotdisk',
      'sudo rm -rf home/*',
      'sudo rm -rf tmp/*',
      'if [[ -f root/.ssh/authorized_keys ]]; then'
      ' sudo cat /dev/null > root/.ssh/authorized_keys'
      '; fi',
      'sudo find var/log -type f -exec rm {} \;',

      'sudo dd if=/dev/disk/by-id/google-snapshot-disk'
          ' of=/mnt/exportdisk/disk.raw bs=4096',
      'cd /mnt/exportdisk',

      'sudo tar czvf {tar_name} disk.raw'.format(tar_name=tar_name),
      'gsutil -q cp /mnt/exportdisk/{tar_name} {output_path}'.format(
          tar_name=tar_name, output_path=tar_path)
    ]

    command = '; '.join(remote_script)
    print 'Running: {0}'.format(command)
    check_run_quick('gcloud {account} compute ssh --command="{command}"'
                    ' --project {project} --zone {zone} {instance}'
                    .format(account=self.__gcloud_account_arg,
                            command=command.replace('"', r'\"'),
                            project=self.__project,
                            zone=self.__zone,
                            instance=self.__instance))
def add_gcevm_to_etc_hosts(options):
  """Add gcevm as an alias for localhost to ease working with SOCKS proxy."""
  with open('/etc/hosts', 'r') as f:
      content = f.read()
  modified = content.replace('127.0.0.1 localhost',
                             '127.0.0.1 localhost gcevm')

  fd, tmp = tempfile.mkstemp()
  os.write(fd, modified)
  os.close(fd)
  try:
    check_run_quick('sudo bash -c "'
                    'chown --reference=/etc/hosts {tmp}'
                    '; chmod --reference=/etc/hosts {tmp}'
                    '; mv {tmp} /etc/hosts'
                    '"'.format(tmp=tmp),
                    echo=False)
  except BaseException:
    os.remove(tmp)
Example #52
0
  def publish_release_bom(self):
    """Read, update, and publish a release candidate BOM.
    """
    new_bom_file = '{0}.yml'.format(self.__release_version)
    self.__bom_dict[VERSION] = self.__release_version
    self.write_bom_file(new_bom_file, self.__bom_dict)
    self.publish_bom(new_bom_file)
    # Re-write the 'latest' Spinnaker version.
    if self.__alias:
      alias_file = '{0}.yml'.format(self.__alias)
      self.write_bom_file(alias_file, self.__bom_dict)
      self.publish_bom(alias_file)

    # Update the available Spinnaker versions.
    check_run_quick(
      'hal admin publish version --version {version} --alias "{alias}" --changelog {changelog}'
      .format(version=self.__release_version, alias=self.__release_name, changelog=self.__gist_uri))
    check_run_quick('hal admin publish latest {version}'
                    .format(version=self.__release_version))