Ejemplo n.º 1
0
  def pull_from_origin(self, repository):
      """Pulls the current branch from the git origin.

      Args:
        repository [string]: The local repository to update.
      """
      name = repository.name
      repository_dir = get_repository_dir(name)
      if not os.path.exists(repository_dir):
          self.git_clone(repository)
          return

      print 'Updating {name} from origin'.format(name=name)
      branch = self.get_local_branch_name(name)
      if branch != self.pull_branch:
          sys.stderr.write(
              'WARNING: Updating {name} branch={branch}, *NOT* "{want}"\n'
                  .format(name=name, branch=branch, want=self.pull_branch))
      try:
        check_run_and_monitor('git -C "{dir}" pull origin {branch}'
                                  .format(dir=repository_dir, branch=branch),
                              echo=True)
      except RuntimeError:
        result = check_run_and_monitor('git -C "{dir}" branch -r'
                                           .format(dir=repository_dir),
                                       echo=False)
        if result.stdout.find('origin/{branch}\n') >= 0:
          raise
        sys.stderr.write(
              'WARNING {name} branch={branch} is not known to the origin.\n'
              .format(name=name, branch=branch))
def install_apache(options):
    """Install Apache2

    This will update /etc/apache2/ports so Apache listens on DECK_PORT
    instead of its default port 80.

    Args:
      options: ArgumentParserNamespace options.
    """
    if not options.apache:
        print '--noapache skips Apache install.'
        return

    print 'Installing apache2...'
    check_install_package('apache2', version=None)

    # Change apache to run on port $DECK_PORT by default.
    # We're writing back with cat so we can sudo.
    with open('/etc/apache2/ports.conf', 'r') as f:
        content = f.read()
    print 'Changing default port to {0}'.format(DECK_PORT)
    content = content.replace('Listen 80\n', 'Listen {0}\n'.format(DECK_PORT))
    # write changes to a temp file so we can atomically replace the old one
    fd, temp_path = tempfile.mkstemp()
    os.write(fd, content)
    os.close(fd)

    # Replace the file while preserving the original owner and protection bits.
    check_run_and_monitor('sudo bash -c "'
                          'chmod --reference={etc} {temp}'
                          '; chown --reference={etc} {temp}'
                          '; mv {temp} {etc}"'
                          .format(etc='/etc/apache2/ports.conf', temp=temp_path),
                          echo=False)
    check_run_and_monitor('sudo apt-get install -f -y', echo=True)
Ejemplo n.º 3
0
  def pull_from_upstream_if_master(self, repository):
      """Pulls the master branch from the upstream repository.

      This will only have effect if the local repository exists
      and is currently in the master branch.

      Args:
        repository [string]: The name of the local repository to update.
      """
      name = repository.name
      repository_dir = get_repository_dir(name)
      if not os.path.exists(repository_dir):
          self.pull_from_origin(repository)
          if not os.path.exists(repository_dir):
            return

      branch = self.get_local_branch_name(name)
      if branch != 'master':
          sys.stderr.write('Skipping {name} because it is in branch={branch}.\n'
                           .format(name=name, branch=branch))
          return

      print 'Pulling master {name} from upstream'.format(name=name)
      check_run_and_monitor('git -C "{dir}" pull upstream master --tags'
                                .format(dir=repository_dir),
                            echo=True)
Ejemplo n.º 4
0
  def do_create_vm(self, options):
    """Implements the BaseBomValidateDeployer interface."""
    logging.info('Creating "%s" in project "%s"',
                 options.deploy_google_instance,
                 options.deploy_google_project)
    with open(self.ssh_key_path + '.pub', 'r') as f:
      ssh_key = f.read().strip()
    if ssh_key.startswith('ssh-rsa'):
      ssh_key = self.hal_user + ':' + ssh_key

    check_run_and_monitor(
        'gcloud compute instances create'
        ' --account {gcloud_account}'
        ' --machine-type n1-standard-4'
        ' --image-family ubuntu-1404-lts'
        ' --image-project ubuntu-os-cloud'
        ' --metadata block-project-ssh-keys=TRUE,ssh-keys="{ssh_key}"'
        ' --project {project} --zone {zone}'
        ' --network {network}'
        ' --tags {network_tags}'
        ' --scopes {scopes}'
        ' {instance}'
        .format(gcloud_account=options.deploy_hal_google_service_account,
                project=options.deploy_google_project,
                zone=options.deploy_google_zone,
                scopes='compute-rw,storage-full,logging-write,monitoring',
                network=options.deploy_google_network,
                network_tags=options.deploy_google_tags,
                ssh_key=ssh_key,
                instance=options.deploy_google_instance))
Ejemplo n.º 5
0
  def push_to_origin_if_target_branch(self, repository):
      """Pushes the current target branch of the local repository to the origin.

      This will only have effect if the local repository exists
      and is currently in the target branch.

      Args:
        repository [string]: The name of the local repository to push from.
      """
      name = repository.name
      repository_dir = get_repository_dir(name)
      if not os.path.exists(repository_dir):
          sys.stderr.write('Skipping {name} because it does not yet exist.\n'
                               .format(name=name))
          return

      branch = self.get_local_branch_name(name)
      if branch != self.push_branch:
          sys.stderr.write(
              'Skipping {name} because it is in branch={branch}, not {want}.\n'
                  .format(name=name, branch=branch, want=self.push_branch))
          return

      print 'Pushing {name} to origin.'.format(name=name)
      check_run_and_monitor('git -C "{dir}" push origin {branch} --tags'.format(
                                dir=repository_dir, branch=self.push_branch),
                            echo=True)
def install_apache(options):
    """Install Apache2

    This will update /etc/apache2/ports so Apache listens on DECK_PORT
    instead of its default port 80.

    Args:
      options: ArgumentParserNamespace options.
    """
    if not options.apache:
        print '--noapache skips Apache install.'
        return

    print 'Installing apache2...'
    check_install_package('apache2', version=None)

    # Change apache to run on port $DECK_PORT by default.
    # We're writing back with cat so we can sudo.
    with open('/etc/apache2/ports.conf', 'r') as f:
        content = f.read()
    print 'Changing default port to {0}'.format(DECK_PORT)
    content = content.replace('Listen 80\n', 'Listen {0}\n'.format(DECK_PORT))
    # write changes to a temp file so we can atomically replace the old one
    fd, temp_path = tempfile.mkstemp()
    os.write(fd, content)
    os.close(fd)

    # Replace the file while preserving the original owner and protection bits.
    check_run_and_monitor('sudo bash -c "'
                          'chmod --reference={etc} {temp}'
                          '; chown --reference={etc} {temp}'
                          '; mv {temp} {etc}"'.format(
                              etc='/etc/apache2/ports.conf', temp=temp_path),
                          echo=False)
    check_run_and_monitor('sudo apt-get install -f -y', echo=True)
Ejemplo n.º 7
0
    def do_create_vm(self, options):
        """Implements the BaseBomValidateDeployer interface."""
        logging.info('Creating "%s" in project "%s"',
                     options.deploy_google_instance,
                     options.deploy_google_project)
        with open(self.ssh_key_path + '.pub', 'r') as f:
            ssh_key = f.read().strip()
        if ssh_key.startswith('ssh-rsa'):
            ssh_key = self.hal_user + ':' + ssh_key

        check_run_and_monitor(
            'gcloud compute instances create'
            ' --account {gcloud_account}'
            ' --machine-type n1-standard-4'
            ' --image-family ubuntu-1404-lts'
            ' --image-project ubuntu-os-cloud'
            ' --metadata block-project-ssh-keys=TRUE,ssh-keys="{ssh_key}"'
            ' --project {project} --zone {zone}'
            ' --scopes {scopes}'
            ' {instance}'.format(
                gcloud_account=options.deploy_hal_google_service_account,
                project=options.deploy_google_project,
                zone=options.deploy_google_zone,
                scopes='compute-rw,storage-full,logging-write,monitoring',
                ssh_key=ssh_key,
                instance=options.deploy_google_instance))
Ejemplo n.º 8
0
def install_nvm(options):
  print '---------- Installing NVM ---------'
  check_run_quick('sudo chmod 775 /usr/local')
  check_run_quick('sudo mkdir -m 777 -p /usr/local/node /usr/local/nvm')

  result = check_fetch(
    'https://raw.githubusercontent.com/creationix/nvm/{nvm_version}/install.sh'
    .format(nvm_version=NVM_VERSION))

  fd, temp = tempfile.mkstemp()
  os.write(fd, result.content)
  os.close(fd)

  try:
    run_and_monitor(
        'bash -c "NVM_DIR=/usr/local/nvm source {temp}"'.format(temp=temp))
  finally:
    os.remove(temp)

#  curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.26.0/install.sh | NVM_DIR=/usr/local/nvm bash


  check_run_and_monitor('sudo bash -c "cat > /etc/profile.d/nvm.sh"',
                        input=__NVM_SCRIPT)

  print '---------- Installing Node {version} ---------'.format(
    version=NODE_VERSION)

  run_and_monitor('bash -c "source /etc/profile.d/nvm.sh'
                  '; nvm install {version}'
                  '; nvm alias default {version}"'
                  .format(version=NODE_VERSION))
Ejemplo n.º 9
0
  def pull_from_origin(self, repository):
      """Pulls the current branch from the git origin.

      Args:
        repository [string]: The local repository to update.
      """
      name = repository.name
      repository_dir = get_repository_dir(name)
      if not os.path.exists(repository_dir):
          self.git_clone(repository)
          return

      print 'Updating {name} from origin'.format(name=name)
      branch = self.get_local_branch_name(name)
      if branch != self.pull_branch:
          sys.stderr.write(
              'WARNING: Updating {name} branch={branch}, *NOT* "{want}"\n'
                  .format(name=name, branch=branch, want=self.pull_branch))
      try:
        check_run_and_monitor('git -C "{dir}" pull origin {branch} --tags'
                                  .format(dir=repository_dir, branch=branch),
                              echo=True)
      except RuntimeError:
        result = check_run_and_monitor('git -C "{dir}" branch -r'
                                           .format(dir=repository_dir),
                                       echo=False)
        if result.stdout.find('origin/{branch}\n') >= 0:
          raise
        sys.stderr.write(
              'WARNING {name} branch={branch} is not known to the origin.\n'
              .format(name=name, branch=branch))
def install_os_updates(options):
    if not options.update_os:
        print 'Skipping os upgrades.'
        return

    print 'Upgrading packages...'
    check_run_and_monitor('sudo apt-get -y update', echo=True)
    check_run_and_monitor('sudo apt-get -y dist-upgrade', echo=True)
def install_os_updates(options):
  if not options.update_os:
      print 'Skipping os upgrades.'
      return

  print 'Upgrading packages...'
  check_run_and_monitor('sudo apt-get -y update', echo=True)
  check_run_and_monitor('sudo apt-get -y dist-upgrade', echo=True)
Ejemplo n.º 12
0
    def do_deploy(self, script, files_to_upload):
        """Implements the BaseBomValidateDeployer interface."""
        options = self.options
        ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

        script_path = write_script_to_path(script, path=None)
        files_to_upload.add(script_path)
        if options.jenkins_master_name:
            write_data_to_secure_path(
                os.environ.get('JENKINS_MASTER_PASSWORD'),
                path=os.path.join(
                    os.sep, 'tmp', 'jenkins_{name}_password'.format(
                        name=options.jenkins_master_name)),
                is_script=True)

        try:
            self.do_create_vm(options)

            copy_files = ('scp'
                          ' -i {ssh_key_path}'
                          ' -o StrictHostKeyChecking=no'
                          ' -o UserKnownHostsFile=/dev/null'
                          ' {files} {ip}:~'.format(
                              ssh_key_path=self.__ssh_key_path,
                              files=' '.join(files_to_upload),
                              ip=self.instance_ip))
            logging.info('Copying files %s', copy_files)

            # pylint: disable=unused-variable
            for retry in range(0, 10):
                result = run_quick(copy_files)
                if result.returncode == 0:
                    break
                time.sleep(2)

            if result.returncode != 0:
                check_run_quick(copy_files)
        except Exception as ex:
            logging.error('Caught %s', ex)
            raise
        finally:
            os.remove(script_path)

        logging.info('Running install script')
        try:
            check_run_and_monitor(
                'ssh'
                ' -i {ssh_key}'
                ' -o StrictHostKeyChecking=no'
                ' -o UserKnownHostsFile=/dev/null'
                ' {ip}'
                ' "sudo ./{script_name}"'.format(
                    ip=self.instance_ip,
                    ssh_key=self.__ssh_key_path,
                    script_name=os.path.basename(script_path)))
        except RuntimeError:
            raise RuntimeError('Halyard deployment failed.')
Ejemplo n.º 13
0
  def do_deploy(self, script, files_to_upload):
    """Implements the BaseBomValidateDeployer interface."""
    options = self.options
    ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

    script_path = write_script_to_path(script, path=None)
    files_to_upload.add(script_path)
    if options.jenkins_master_name:
      write_data_to_secure_path(
          os.environ.get('JENKINS_MASTER_PASSWORD'),
          path=os.path.join(os.sep, 'tmp', 'jenkins_{name}_password'
                            .format(name=options.jenkins_master_name)),
          is_script=True)

    try:
      self.do_create_vm(options)

      copy_files = (
          'scp'
          ' -i {ssh_key_path}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {files} {ip}:~'
          .format(ssh_key_path=self.__ssh_key_path,
                  files=' '.join(files_to_upload),
                  ip=self.instance_ip))
      logging.info('Copying files %s', copy_files)

      # pylint: disable=unused-variable
      for retry in range(0, 10):
        result = run_quick(copy_files)
        if result.returncode == 0:
          break
        time.sleep(2)

      if result.returncode != 0:
        check_run_quick(copy_files)
    except Exception as ex:
      logging.error('Caught %s', ex)
      raise
    finally:
      os.remove(script_path)

    logging.info('Running install script')
    try:
      check_run_and_monitor(
          'ssh'
          ' -i {ssh_key}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {ip}'
          ' "sudo ./{script_name}"'
          .format(ip=self.instance_ip,
                  ssh_key=self.__ssh_key_path,
                  script_name=os.path.basename(script_path)))
    except RuntimeError:
      raise RuntimeError('Halyard deployment failed.')
Ejemplo n.º 14
0
def install_gcloud(options):
  if not options.gcloud:
      return

  result = run_quick('gcloud --version', echo=False)
  if not result.returncode:
    print 'GCloud is already installed:\n    {version_info}'.format(
      version_info=result.stdout.replace('\n', '\n    '))
    return

  print 'Installing GCloud.'
  check_run_and_monitor('curl https://sdk.cloud.google.com | bash', echo=True)
def check_install_package(name, version=None, options=[]):
  """Install the specified package, with specific version if provide.

  Args:
    name: The unversioned package name.
    version: If provided, the specific version to install.
    options: Additional command-line options to apt-get install.
  """
  package_name = name
  if version:
      package_name += '={0}'.format(version)

  command = ['sudo apt-get -q -y']
  command.extend(options)
  command.extend(['install', package_name])
  check_run_and_monitor(' '.join(command), echo=True)
def check_install_package(name, version=None, options=[]):
    """Install the specified package, with specific version if provide.

  Args:
    name: The unversioned package name.
    version: If provided, the specific version to install.
    options: Additional command-line options to apt-get install.
  """
    package_name = name
    if version:
        package_name += '={0}'.format(version)

    command = ['sudo apt-get -q -y']
    command.extend(options)
    command.extend(['install', package_name])
    check_run_and_monitor(' '.join(command), echo=True)
Ejemplo n.º 17
0
 def do_undeploy(self):
     """Implements the BaseBomValidateDeployer interface."""
     options = self.options
     if options.deploy_spinnaker_type == 'distributed':
         run_and_monitor('ssh'
                         ' -i {ssh_key}'
                         ' -o StrictHostKeyChecking=no'
                         ' -o UserKnownHostsFile=/dev/null'
                         ' {ip} sudo hal deploy clean'.format(
                             ip=self.instance_ip,
                             ssh_key=self.ssh_key_path))
     check_run_and_monitor('az vm delete -y'
                           ' --name {name}'
                           ' --resource-group {rg}'.format(
                               name=options.deploy_azure_name,
                               rg=options.deploy_azure_resource_group))
Ejemplo n.º 18
0
 def do_undeploy(self):
   """Implements the BaseBomValidateDeployer interface."""
   options = self.options
   if options.deploy_spinnaker_type == 'distributed':
     run_and_monitor(
         'ssh'
         ' -i {ssh_key}'
         ' -o StrictHostKeyChecking=no'
         ' -o UserKnownHostsFile=/dev/null'
         ' {ip} sudo hal deploy clean'
         .format(ip=self.instance_ip, ssh_key=self.ssh_key_path))
   check_run_and_monitor(
       'az vm delete -y'
       ' --name {name}'
       ' --resource-group {rg}'
       .format(name=options.deploy_azure_name,
               rg=options.deploy_azure_resource_group))
Ejemplo n.º 19
0
def _install_spinnaker_packages_helper(options, bucket):
  """Install the spinnaker packages from the specified path.

  Args:
    bucket [string]: The path to install from, or a storage service URI.
  """
  if not options.spinnaker:
      return

  print 'Installing Spinnaker components from {0}.'.format(bucket)

  install_config_dir = get_config_install_dir(options)
  spinnaker_dir = get_spinnaker_dir(options)

  with open(os.path.join(spinnaker_dir, 'release_config.cfg'), 'r') as f:
    content = f.read()
    package_list = (re.search('\nPACKAGE_LIST="(.*?)"', content)
                    .group(1).split())


  ###########################
  # Copy Subsystem Packages
  ###########################
  print 'Downloading spinnaker release packages...'
  package_dir = os.path.join(spinnaker_dir, 'install')
  safe_mkdir(package_dir)
  jobs = []
  for pkg in package_list:
    jobs.append(start_copy_file(options,
                                os.path.join(bucket, pkg), package_dir))

  check_wait_for_copy_complete(jobs)

  for pkg in package_list:
    print 'Installing {0}.'.format(pkg)

    # Let this fail because it may have dependencies
    # that we'll pick up below.
    run_and_monitor('sudo dpkg -i ' + os.path.join(package_dir, pkg))
    check_run_and_monitor('sudo apt-get install -f -y')
    # Convert package name to install directory name.
    inject_spring_config_location(options, pkg[0:pkg.find('_')])

  # Install package dependencies
  check_run_and_monitor('sudo apt-get install -f -y')
Ejemplo n.º 20
0
def _install_spinnaker_packages_helper(options, bucket):
    """Install the spinnaker packages from the specified path.

  Args:
    bucket [string]: The path to install from, or a storage service URI.
  """
    if not options.spinnaker:
        return

    print 'Installing Spinnaker components from {0}.'.format(bucket)

    install_config_dir = get_config_install_dir(options)
    spinnaker_dir = get_spinnaker_dir(options)

    with open(os.path.join(spinnaker_dir, 'release_config.cfg'), 'r') as f:
        content = f.read()
        package_list = (re.search('\nPACKAGE_LIST="(.*?)"',
                                  content).group(1).split())

    ###########################
    # Copy Subsystem Packages
    ###########################
    print 'Downloading spinnaker release packages...'
    package_dir = os.path.join(spinnaker_dir, 'install')
    safe_mkdir(package_dir)
    jobs = []
    for pkg in package_list:
        jobs.append(
            start_copy_file(options, os.path.join(bucket, pkg), package_dir))

    check_wait_for_copy_complete(jobs)

    for pkg in package_list:
        print 'Installing {0}.'.format(pkg)

        # Let this fail because it may have dependencies
        # that we'll pick up below.
        run_and_monitor('sudo dpkg -i ' + os.path.join(package_dir, pkg))
        check_run_and_monitor('sudo apt-get install -f -y')
        # Convert package name to install directory name.
        inject_spring_config_location(options, pkg[0:pkg.find('_')])

    # Install package dependencies
    check_run_and_monitor('sudo apt-get install -f -y')
Ejemplo n.º 21
0
 def do_undeploy(self):
   """Implements the BaseBomValidateDeployer interface."""
   options = self.options
   if options.deploy_spinnaker_type == 'distributed':
     run_and_monitor(
         'gcloud compute ssh'
         ' --ssh-key-file {ssh_key}'
         ' --project {project} --zone {zone} {instance}'
         ' --command "sudo hal deploy clean"'
         .format(project=options.google_deploy_project,
                 zone=options.google_deploy_zone,
                 instance=options.google_deploy_instance,
                 ssh_key=self.EMPTY_SSH_KEY))
   check_run_and_monitor(
       'gcloud -q compute instances delete'
       ' --project {project} --zone {zone} {instance}'
       .format(project=options.google_deploy_project,
               zone=options.google_deploy_zone,
               instance=options.google_deploy_instance))
Ejemplo n.º 22
0
 def do_undeploy(self):
     """Implements the BaseBomValidateDeployer interface."""
     options = self.options
     if options.deploy_spinnaker_type == 'distributed':
         run_and_monitor('ssh'
                         ' -i {ssh_key}'
                         ' -o StrictHostKeyChecking=no'
                         ' -o UserKnownHostsFile=/dev/null'
                         ' {instance} sudo hal deploy clean'.format(
                             instance=options.google_deploy_instance,
                             ssh_key=self.__ssh_key_path))
     check_run_and_monitor(
         'gcloud -q compute instances delete'
         ' --account {gcloud_account}'
         ' --project {project} --zone {zone} {instance}'.format(
             gcloud_account=options.deploy_hal_google_service_account,
             project=options.google_deploy_project,
             zone=options.google_deploy_zone,
             instance=options.google_deploy_instance))
Ejemplo n.º 23
0
  def do_undeploy(self):
    """Implements the BaseBomValidateDeployer interface."""
    options = self.options
    if options.deploy_spinnaker_type == 'distributed':
      run_and_monitor(
          'ssh'
          ' -i {ssh_key}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {ip} sudo hal deploy clean'
          .format(ip=self.instance_ip, ssh_key=self.ssh_key_path))

    check_run_and_monitor(
        'gcloud -q compute instances delete'
        ' --account {gcloud_account}'
        ' --project {project} --zone {zone} {instance}'
        .format(gcloud_account=options.deploy_hal_google_service_account,
                project=options.deploy_google_project,
                zone=options.deploy_google_zone,
                instance=options.deploy_google_instance))
def install_cassandra(options):
    """Install Cassandra.

    Args:
      options: ArgumentParserNamespace options.
    """
    if not options.cassandra:
        print '--nocassandra skipping Casssandra install.'
        return

    print 'Installing Cassandra...'
    check_options(options)
    preferred_version = None
    if not options.package_manager:
        root = 'https://archive.apache.org/dist/cassandra/debian/pool/main/c'
        try:
          os.mkdir('downloads')
        except OSError:
          pass

        preferred_version = EXPLICIT_CASSANDRA_VERSION
        cassandra = 'cassandra_{ver}_all.deb'.format(ver=preferred_version)
        tools = 'cassandra-tools_{ver}_all.deb'.format(ver=preferred_version)

        fetch_result = check_fetch(
            '{root}/cassandra/{cassandra}'.format(root=root, cassandra=cassandra))

        with open('downloads/{cassandra}'
                  .format(cassandra=cassandra), 'w') as f:
            f.write(fetch_result.content)

        fetch_result = check_fetch(
            '{root}/cassandra/{tools}'
            .format(root=root, tools=tools))
        with open('downloads/{tools}'
                  .format(tools=tools), 'w') as f:
            f.write(fetch_result.content)

        check_run_and_monitor('sudo dpkg -i downloads/' + cassandra, echo=True)
        check_run_and_monitor('sudo dpkg -i downloads/' + tools, echo=True)
    else:
      check_run_and_monitor(
          'sudo add-apt-repository -s'
          ' "deb http://www.apache.org/dist/cassandra/debian 21x main"',
          echo=True)

    check_run_and_monitor('sudo apt-get -q -y update', echo=True)
    check_install_package('cassandra', version=preferred_version,
                          options=['--force-yes'])
def install_cassandra(options):
    """Install Cassandra.

    Args:
      options: ArgumentParserNamespace options.
    """
    if not options.cassandra:
        print '--nocassandra skipping Casssandra install.'
        return

    print 'Installing Cassandra...'
    check_options(options)
    preferred_version = None
    if not options.package_manager:
        root = 'https://archive.apache.org/dist/cassandra/debian/pool/main/c'
        try:
            os.mkdir('downloads')
        except OSError:
            pass

        preferred_version = EXPLICIT_CASSANDRA_VERSION
        cassandra = 'cassandra_{ver}_all.deb'.format(ver=preferred_version)
        tools = 'cassandra-tools_{ver}_all.deb'.format(ver=preferred_version)

        fetch_result = check_fetch('{root}/cassandra/{cassandra}'.format(
            root=root, cassandra=cassandra))

        with open('downloads/{cassandra}'.format(cassandra=cassandra),
                  'w') as f:
            f.write(fetch_result.content)

        fetch_result = check_fetch('{root}/cassandra/{tools}'.format(
            root=root, tools=tools))
        with open('downloads/{tools}'.format(tools=tools), 'w') as f:
            f.write(fetch_result.content)

        check_run_and_monitor('sudo dpkg -i downloads/' + cassandra, echo=True)
        check_run_and_monitor('sudo dpkg -i downloads/' + tools, echo=True)
    else:
        check_run_and_monitor(
            'sudo add-apt-repository -s'
            ' "deb http://www.apache.org/dist/cassandra/debian 21x main"',
            echo=True)

    check_run_and_monitor('sudo apt-get -q -y update', echo=True)
    check_install_package('cassandra',
                          version=preferred_version,
                          options=['--force-yes'])
Ejemplo n.º 26
0
  def do_create_vm(self, options):
    """Implements GenericVmValidateBomDeployer interface."""
    pem_basename = os.path.basename(options.deploy_aws_pem_path)
    key_pair_name = os.path.splitext(pem_basename)[0]
    logging.info('Creating "%s" with key-pair "%s"',
                 options.deploy_aws_name, key_pair_name)

    response = check_run_and_monitor(
        'aws ec2 run-instances'
        ' --profile {region}'
        ' --output json'
        ' --count 1'
        ' --image-id {ami}'
        ' --instance-type {type}'
        ' --key-name {key_pair_name}'
        ' --security-group-ids {sg}'
        .format(region=options.deploy_aws_region,
                ami=options.deploy_aws_ami,
                type='t2.xlarge',  # 4 core x 16G
                key_pair_name=key_pair_name,
                sg=options.deploy_aws_security_group),
        echo=False)
    doc = json.JSONDecoder().decode(response.stdout)
    self.__instance_id = doc["Instances"][0]["InstanceId"]
    logging.info('Created instance id=%s to tag as "%s"',
                 self.__instance_id, options.deploy_aws_name)

    # It's slow to start up and sometimes there is a race condition
    # in which describe-instances doesnt know about our id even though
    # create-tags did, or create-tags doesnt know abut the new id.
    time.sleep(5)
    end_time = time.time() + 10*60
    did_tag = False
    while time.time() < end_time:
      if not did_tag:
        tag_response = run_quick(
            'aws ec2 create-tags'
            ' --region {region}'
            ' --resources {instance_id}'
            ' --tags "Key=Name,Value={name}"'
            .format(region=options.deploy_aws_region,
                    instance_id=self.__instance_id,
                    name=options.deploy_aws_name),
            echo=False)
        did_tag = tag_response.returncode == 0
      if self.__is_ready():
        return
      time.sleep(5)
    raise RuntimeError('Giving up waiting for deployment.')
Ejemplo n.º 27
0
  def do_create_vm(self, options):
    """Implements GenericVmValidateBomDeployer interface."""
    pem_basename = os.path.basename(options.deploy_aws_pem_path)
    key_pair_name = os.path.splitext(pem_basename)[0]
    logging.info('Creating "%s" with key-pair "%s"',
                 options.deploy_aws_name, key_pair_name)

    response = check_run_and_monitor(
        'aws ec2 run-instances'
        ' --profile {region}'
        ' --output json'
        ' --count 1'
        ' --image-id {ami}'
        ' --instance-type {type}'
        ' --key-name {key_pair_name}'
        ' --security-group-ids {sg}'
        .format(region=options.deploy_aws_region,
                ami=options.deploy_aws_ami,
                type='t2.xlarge',  # 4 core x 16G
                key_pair_name=key_pair_name,
                sg=options.deploy_aws_security_group),
        echo=False)
    doc = json.JSONDecoder().decode(response.stdout)
    self.__instance_id = doc["Instances"][0]["InstanceId"]
    logging.info('Created instance id=%s to tag as "%s"',
                 self.__instance_id, options.deploy_aws_name)

    # It's slow to start up and sometimes there is a race condition
    # in which describe-instances doesnt know about our id even though
    # create-tags did, or create-tags doesnt know abut the new id.
    time.sleep(5)
    end_time = time.time() + 10*60
    did_tag = False
    while time.time() < end_time:
      if not did_tag:
        tag_response = run_quick(
            'aws ec2 create-tags'
            ' --region {region}'
            ' --resources {instance_id}'
            ' --tags "Key=Name,Value={name}"'
            .format(region=options.deploy_aws_region,
                    instance_id=self.__instance_id,
                    name=options.deploy_aws_name),
            echo=False)
        did_tag = tag_response.returncode == 0
      if self.__is_ready():
        return
      time.sleep(5)
    raise RuntimeError('Giving up waiting for deployment.')
Ejemplo n.º 28
0
  def do_create_vm(self, options):
    """Implements GenericVmValidateBomDeployer interface."""
    logging.info('Creating "%s" in resource-group "%s"',
                 options.deploy_azure_name,
                 options.deploy_azure_resource_group)

    response = check_run_and_monitor(
        'az vm create'
        ' --name {name}'
        ' --resource-group {rg}'
        ' --location {location}'
        ' --image Canonical:UbuntuServer:14.04.5-LTS:latest'
        ' --use-unmanaged-disk'
        ' --storage-sku Standard_LRS'
        ' --size Standard_D12_v2_Promo'
        ' --ssh-key-value {ssh_key_path}.pub'
        .format(name=options.deploy_azure_name,
                rg=options.deploy_azure_resource_group,
                location=options.deploy_azure_location,
                ssh_key_path=self.ssh_key_path))
    self.set_instance_ip(json.JSONDecoder().decode(
        response.stdout)['publicIpAddress'])
Ejemplo n.º 29
0
    def do_create_vm(self, options):
        """Implements GenericVmValidateBomDeployer interface."""
        logging.info('Creating "%s" in resource-group "%s"',
                     options.deploy_azure_name,
                     options.deploy_azure_resource_group)

        response = check_run_and_monitor(
            'az vm create'
            ' --name {name}'
            ' --resource-group {rg}'
            ' --location {location}'
            ' --image Canonical:UbuntuServer:14.04.5-LTS:latest'
            ' --use-unmanaged-disk'
            ' --storage-sku Standard_LRS'
            ' --size Standard_D12_v2_Promo'
            ' --ssh-key-value {ssh_key_path}.pub'.format(
                name=options.deploy_azure_name,
                rg=options.deploy_azure_resource_group,
                location=options.deploy_azure_location,
                ssh_key_path=self.ssh_key_path))
        self.instance_ip = json.JSONDecoder().decode(
            response.stdout)['publicIpAddress']
def install_java(options, which='jre'):
    """Install java.

    TODO(ewiseblatt):
    This requires a package manager, but only because I'm not sure how
    to install it without one. If you are not using a package manager,
    then verison 1.8 must already be installed.

    Args:
      options: ArgumentParserNamespace options.
      which: Install either 'jre' or 'jdk'.
    """
    if not options.jdk:
        print '--nojdk skipping Java install.'
        return

    if which != 'jre' and which != 'jdk':
        raise ValueError('Expected which=(jdk|jre)')

    check_options(options)
    if not options.package_manager:
        msg = check_java_version()
        if msg:
          sys.stderr.write(
              ('{msg}\nSorry, Java must already be installed using the'
               ' package manager.\n'.format(msg=msg)))
          raise SystemExit('Java must already be installed.')
        else:
          print 'Using existing java.'
          return

    print 'Installing OpenJdk...'
    check_run_and_monitor('sudo add-apt-repository -y ppa:openjdk-r/ppa',
                          echo=True)
    check_run_and_monitor('sudo apt-get -y update', echo=True)

    check_install_package('openjdk-8-{which}'.format(which=which),
                          version=EXPLICIT_OPENJDK_8_VERSION)
    cmd =  ['sudo', 'update-java-alternatives']
    if which == 'jre':
        cmd.append('--jre')
    cmd.extend(['-s', '/usr/lib/jvm/java-1.8.0-openjdk-amd64'])
    check_run_and_monitor(' '.join(cmd), echo=True)
def install_java(options, which='jre'):
    """Install java.

    TODO(ewiseblatt):
    This requires a package manager, but only because I'm not sure how
    to install it without one. If you are not using a package manager,
    then verison 1.8 must already be installed.

    Args:
      options: ArgumentParserNamespace options.
      which: Install either 'jre' or 'jdk'.
    """
    if not options.jdk:
        print '--nojdk skipping Java install.'
        return

    if which != 'jre' and which != 'jdk':
        raise ValueError('Expected which=(jdk|jre)')

    check_options(options)
    if not options.package_manager:
        msg = check_java_version()
        if msg:
            sys.stderr.write(
                ('{msg}\nSorry, Java must already be installed using the'
                 ' package manager.\n'.format(msg=msg)))
            raise SystemExit('Java must already be installed.')
        else:
            print 'Using existing java.'
            return

    print 'Installing OpenJdk...'
    check_run_and_monitor('sudo add-apt-repository -y ppa:openjdk-r/ppa',
                          echo=True)
    check_run_and_monitor('sudo apt-get -y update', echo=True)

    check_install_package('openjdk-8-{which}'.format(which=which),
                          version=EXPLICIT_OPENJDK_8_VERSION)
    cmd = ['sudo', 'update-java-alternatives']
    if which == 'jre':
        cmd.append('--jre')
    cmd.extend(['-s', '/usr/lib/jvm/java-1.8.0-openjdk-amd64'])
    check_run_and_monitor(' '.join(cmd), echo=True)
Ejemplo n.º 32
0
def install_awscli(options):
  if not options.awscli:
      return
  print 'Installing AWS CLI'
  check_run_and_monitor('sudo apt-get install -y awscli', echo=True)
Ejemplo n.º 33
0
def install_build_tools(options):
  check_run_and_monitor('sudo apt-get update')
  check_run_and_monitor('sudo apt-get install -y git')
  check_run_and_monitor('sudo apt-get install -y zip')
  check_run_and_monitor('sudo apt-get install -y build-essential')
  install_nvm(options)
Ejemplo n.º 34
0
 def checkout_branch_as_hash(self):
     hash = check_run_and_monitor(
         'git -C {path} rev-parse HEAD'.format(path=self.path), echo=True)
     check_run_and_monitor('git -C {path} checkout {hash}'.format(
         path=self.path, hash=hash.stdout),
                           echo=True)
Ejemplo n.º 35
0
    def do_deploy(self, script, files_to_upload):
        """Implements the BaseBomValidateDeployer interface."""
        options = self.options
        ensure_empty_ssh_key(self.__ssh_key_path, self.hal_user)

        script_parts = []
        for path in files_to_upload:
            filename = os.path.basename(path)
            script_parts.append('sudo chmod 600 {file}'.format(file=filename))
            script_parts.append('sudo chown {user}:{user} {file}'.format(
                user=self.hal_user, file=filename))

        script_parts.extend(script)
        script_path = write_script_to_path(script_parts, path=None)
        files_to_upload.add(script_path)
        if options.jenkins_master_name:
            write_data_to_secure_path(
                os.environ.get('JENKINS_MASTER_PASSWORD'),
                path=os.path.join(
                    os.sep, 'tmp', 'jenkins_{name}_password'.format(
                        name=options.jenkins_master_name)),
                is_script=True)

        try:
            self.do_create_vm(options)

            copy_files = ('scp'
                          ' -i {ssh_key_path}'
                          ' -o StrictHostKeyChecking=no'
                          ' -o UserKnownHostsFile=/dev/null'
                          ' {files}'
                          ' {user}@{ip}:~'.format(
                              ssh_key_path=self.__ssh_key_path,
                              files=' '.join(files_to_upload),
                              user=self.hal_user,
                              ip=self.instance_ip))
            logging.info('Copying files %s', copy_files)

            # pylint: disable=unused-variable
            for retry in range(0, 10):
                result = run_quick(copy_files)
                if result.returncode == 0:
                    break
                time.sleep(2)

            if result.returncode != 0:
                check_run_quick(copy_files)
        except Exception as ex:
            logging.error('Caught %s', ex)
            raise
        finally:
            os.remove(script_path)

        try:
            logging.info('Waiting for ssh...')
            end_time = time.time() + 30
            logging.info('Entering while %f < %f', time.time(), end_time)
            while time.time() < end_time:
                logging.info('Running quick...')
                ready_response = run_quick('ssh'
                                           ' -i {ssh_key}'
                                           ' -o StrictHostKeyChecking=no'
                                           ' -o UserKnownHostsFile=/dev/null'
                                           ' {user}@{ip}'
                                           ' "exit 0"'.format(
                                               user=self.hal_user,
                                               ip=self.instance_ip,
                                               ssh_key=self.__ssh_key_path),
                                           echo=False)
                logging.info('got %s', ready_response)
                if ready_response.returncode == 0:
                    logging.info('ssh is ready.')
                    break
                logging.info('ssh not yet ready...')
                time.sleep(1)

            logging.info('Running install script')
            check_run_and_monitor(
                'ssh'
                ' -i {ssh_key}'
                ' -o StrictHostKeyChecking=no'
                ' -o UserKnownHostsFile=/dev/null'
                ' {user}@{ip}'
                ' ./{script_name}'.format(
                    user=self.hal_user,
                    ip=self.instance_ip,
                    ssh_key=self.__ssh_key_path,
                    script_name=os.path.basename(script_path)))
        except RuntimeError as error:
            logging.error('Caught runtime error: %s', error)
            raise RuntimeError('Halyard deployment failed.')
        except Exception as ex:
            print str(ex)
            logging.exception('Unexpected exception: %s', ex)
            raise
Ejemplo n.º 36
0
 def checkout_branch_as_hash(self):
   hash = check_run_and_monitor('git -C {path} rev-parse HEAD'.format(path=self.path), echo=True)
   check_run_and_monitor('git -C {path} checkout {hash}'.format(path=self.path, hash=hash.stdout), echo=True)
Ejemplo n.º 37
0
    def do_deploy(self, script, files_to_upload):
        """Implements the BaseBomValidateDeployer interface."""
        options = self.options
        ensure_empty_ssh_key(self.__ssh_key_path, self.__hal_user)

        script_path = write_script_to_path(script, path=None)
        files_to_upload.add(script_path)
        if options.jenkins_master_name:
            write_data_to_secure_path(
                os.environ.get('JENKINS_MASTER_PASSWORD'),
                path=os.path.join(
                    os.sep, 'tmp', 'jenkins_{name}_password'.format(
                        name=options.jenkins_master_name)),
                is_script=True)

        try:
            logging.info('Creating "%s" in project "%s"',
                         options.google_deploy_instance,
                         options.google_deploy_project)
            with open(self.__ssh_key_path + '.pub', 'r') as f:
                ssh_key = f.read().strip()
            if ssh_key.startswith('ssh-rsa'):
                ssh_key = self.__hal_user + ':' + ssh_key

            check_run_and_monitor(
                'gcloud compute instances create'
                ' --account {gcloud_account}'
                ' --machine-type n1-standard-4'
                ' --image-family ubuntu-1404-lts'
                ' --image-project ubuntu-os-cloud'
                ' --metadata block-project-ssh-keys=TRUE,ssh-keys="{ssh_key}"'
                ' --project {project} --zone {zone}'
                ' --scopes {scopes}'
                ' {instance}'.format(
                    gcloud_account=options.deploy_hal_google_service_account,
                    project=options.google_deploy_project,
                    zone=options.google_deploy_zone,
                    scopes='compute-rw,storage-full,logging-write,monitoring',
                    ssh_key=ssh_key,
                    instance=options.google_deploy_instance))
            response = check_run_quick(
                'gcloud compute instances describe'
                ' --account {gcloud_account}'
                ' --project {project} --zone {zone} {instance}'.format(
                    gcloud_account=options.deploy_hal_google_service_account,
                    project=options.google_deploy_project,
                    zone=options.google_deploy_zone,
                    instance=options.google_deploy_instance))
            self.__instance_ip = re.search(r'networkIP: ([0-9\.]+)',
                                           response.stdout).group(1)
            copy_files = ('scp'
                          ' -i {ssh_key}'
                          ' -o StrictHostKeyChecking=no'
                          ' -o UserKnownHostsFile=/dev/null'
                          ' {files} {instance}:~'.format(
                              ssh_key=self.__ssh_key_path,
                              files=' '.join(files_to_upload),
                              instance=self.__instance_ip))
            logging.info('Copying files %s', copy_files)

            # pylint: disable=unused-variable
            for retry in range(0, 10):
                result = run_quick(copy_files)
                if result.returncode == 0:
                    break
                time.sleep(2)

            if result.returncode != 0:
                check_run_quick(copy_files)
        finally:
            os.remove(script_path)

        logging.info('Running install script')
        try:
            check_run_and_monitor(
                'ssh'
                ' -i {ssh_key}'
                ' -o StrictHostKeyChecking=no'
                ' -o UserKnownHostsFile=/dev/null'
                ' {instance}'
                ' "sudo ./{script_name}"'.format(
                    instance=options.google_deploy_instance,
                    ssh_key=self.__ssh_key_path,
                    script_name=os.path.basename(script_path)))
        except RuntimeError as err:
            raise RuntimeError('Halyard deployment failed.')
Ejemplo n.º 38
0
  def do_deploy(self, script, files_to_upload):
    """Implements the BaseBomValidateDeployer interface."""
    options = self.options
    ensure_empty_ssh_key(self.__ssh_key_path, self.hal_user)

    script_parts = []
    for path in files_to_upload:
      filename = os.path.basename(path)
      script_parts.append('sudo chmod 600 {file}'.format(file=filename))
      script_parts.append('sudo chown {user}:{user} {file}'
                          .format(user=self.hal_user, file=filename))

    script_parts.extend(script)
    script_path = write_script_to_path(script_parts, path=None)
    files_to_upload.add(script_path)
    if options.jenkins_master_name:
      write_data_to_secure_path(
          os.environ.get('JENKINS_MASTER_PASSWORD'),
          path=os.path.join(os.sep, 'tmp', 'jenkins_{name}_password'
                            .format(name=options.jenkins_master_name)),
          is_script=True)

    try:
      self.do_create_vm(options)

      copy_files = (
          'scp'
          ' -i {ssh_key_path}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {files}'
          ' {user}@{ip}:~'
          .format(ssh_key_path=self.__ssh_key_path,
                  files=' '.join(files_to_upload),
                  user=self.hal_user,
                  ip=self.instance_ip))
      logging.info('Copying files %s', copy_files)

      # pylint: disable=unused-variable
      for retry in range(0, 10):
        result = run_quick(copy_files)
        if result.returncode == 0:
          break
        time.sleep(2)

      if result.returncode != 0:
        check_run_quick(copy_files)
    except Exception as ex:
      logging.error('Caught %s', ex)
      raise
    finally:
      os.remove(script_path)

    try:
      logging.info('Waiting for ssh...')
      end_time = time.time() + 30
      logging.info('Entering while %f < %f', time.time(), end_time)
      while time.time() < end_time:
        logging.info('Running quick...')
        ready_response = run_quick(
            'ssh'
            ' -i {ssh_key}'
            ' -o StrictHostKeyChecking=no'
            ' -o UserKnownHostsFile=/dev/null'
            ' {user}@{ip}'
            ' "exit 0"'
            .format(user=self.hal_user,
                    ip=self.instance_ip,
                    ssh_key=self.__ssh_key_path),
            echo=False)
        logging.info('got %s', ready_response)
        if ready_response.returncode == 0:
          logging.info('ssh is ready.')
          break
        logging.info('ssh not yet ready...')
        time.sleep(1)

      logging.info('Running install script')
      check_run_and_monitor(
          'ssh'
          ' -i {ssh_key}'
          ' -o StrictHostKeyChecking=no'
          ' -o UserKnownHostsFile=/dev/null'
          ' {user}@{ip}'
          ' ./{script_name}'
          .format(user=self.hal_user,
                  ip=self.instance_ip,
                  ssh_key=self.__ssh_key_path,
                  script_name=os.path.basename(script_path)))
    except RuntimeError as error:
      logging.error('Caught runtime error: %s', error)
      raise RuntimeError('Halyard deployment failed.')
    except Exception as ex:
      print str(ex)
      logging.exception('Unexpected exception: %s', ex)
      raise
Ejemplo n.º 39
0
  def do_deploy(self, script, files_to_upload):
    """Implements the BaseBomValidateDeployer interface."""
    options = self.options
    ensure_empty_ssh_key(self.EMPTY_SSH_KEY)

    script_path = write_script_to_path(script, path=None)
    files_to_upload.add(script_path)
    if options.jenkins_master_name:
      write_data_to_secure_path(
          os.environ.get('JENKINS_MASTER_PASSWORD'),
          path=os.path.join(os.sep, 'tmp', 'jenkins_{name}_password'
                            .format(name=options.jenkins_master_name)),
          is_script=True)

    try:
      logging.info('Creating "%s" in project "%s"',
                   options.google_deploy_instance,
                   options.google_deploy_project)
      check_run_and_monitor(
          'gcloud compute instances create'
          ' --machine-type n1-standard-4'
          ' --image-family ubuntu-1404-lts'
          ' --image-project ubuntu-os-cloud'
          ' --metadata block-project-ssh-keys=TRUE'
          ' --project {project} --zone {zone}'
          ' --scopes {scopes}'
          ' --metadata-from-file ssh-keys={ssh_key}'
          ' {instance}'
          .format(project=options.google_deploy_project,
                  zone=options.google_deploy_zone,
                  scopes='compute-rw,storage-full,logging-write,monitoring',
                  ssh_key=self.EMPTY_SSH_KEY,
                  instance=options.google_deploy_instance))

      copy_files = (
          'gcloud compute copy-files '
          ' --ssh-key-file {ssh_key}'
          ' --project {project} --zone {zone}'
          ' {files} {instance}:.'
          .format(project=options.google_deploy_project,
                  ssh_key=self.EMPTY_SSH_KEY,
                  zone=options.google_deploy_zone,
                  instance=options.google_deploy_instance,
                  files=' '.join(files_to_upload)))
      logging.info('Copying files %s', copy_files)

      # pylint: disable=unused-variable
      for retry in range(0, 10):
        result = run_quick(copy_files)
        if result.returncode == 0:
          break
        time.sleep(2)

      if result.returncode != 0:
        check_run_quick(copy_files)
    finally:
      os.remove(script_path)

    logging.info('Running install script')
    check_run_and_monitor(
        'gcloud compute ssh'
        ' --ssh-key-file {ssh_key}'
        ' --project {project} --zone {zone} {instance}'
        ' --command "sudo ./{script_name}"'
        .format(project=options.google_deploy_project,
                zone=options.google_deploy_zone,
                instance=options.google_deploy_instance,
                ssh_key=self.EMPTY_SSH_KEY,
                script_name=os.path.basename(script_path)))