Beispiel #1
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.
    """

    if args.pull_request_preview:
      if args.repo_type != 'github':
        raise c_exceptions.InvalidArgumentException(
            '--repo-type',
            "Repo type must be 'github' to configure pull request previewing.")
      if args.namespace:
        raise c_exceptions.InvalidArgumentException(
            '--namespace',
            'Namespace must not be provided to configure pull request '
            'previewing. --namespace must only be provided when configuring '
            'automated deployments with the --branch-pattern or --tag-pattern '
            'flags.')
      if args.preview_expiry <= 0:
        raise c_exceptions.InvalidArgumentException(
            '--preview-expiry',
            'Preview expiry must be > 0.')

    # Determine github or csr
    github_repo_name = None
    github_repo_owner = None
    csr_repo_name = None

    if args.repo_type == 'github':
      if not args.repo_owner:
        raise c_exceptions.RequiredArgumentException(
            '--repo-owner',
            'Repo owner is required for --repo-type=github.')
      github_repo_name = args.repo_name
      github_repo_owner = args.repo_owner
      # We do not have to verify that this repo exists because the request to
      # create the BuildTrigger will fail with the appropriate message asking
      # the user to connect their repo, if the repo is not found.

    elif args.repo_type == 'csr':
      if args.repo_owner:
        raise c_exceptions.InvalidArgumentException(
            '--repo-owner',
            'Repo owner must not be provided for --repo-type=csr.')
      csr_repo_name = args.repo_name
      self._VerifyCSRRepoExists(csr_repo_name)

    elif args.repo_type == 'bitbucket_mirrored':
      if not args.repo_owner:
        raise c_exceptions.RequiredArgumentException(
            '--repo-owner',
            'Repo owner is required for --repo-type=bitbucket_mirrored.')
      csr_repo_name = 'bitbucket_{}_{}'.format(args.repo_owner, args.repo_name)
      self._VerifyBitbucketCSRRepoExists(
          csr_repo_name, args.repo_owner, args.repo_name)

    elif args.repo_type == 'github_mirrored':
      if not args.repo_owner:
        raise c_exceptions.RequiredArgumentException(
            '--repo-owner',
            'Repo owner is required for --repo-type=github_mirrored.')
      csr_repo_name = 'github_{}_{}'.format(args.repo_owner, args.repo_name)
      self._VerifyGitHubCSRRepoExists(
          csr_repo_name, args.repo_owner, args.repo_name)

    self._VerifyClusterExists(args.cluster, args.location)

    # Determine app_name
    if args.app_name:
      app_name = args.app_name
    else:
      app_name = args.repo_name

    # Determine gcs_config_staging_dir_bucket, gcs_config_staging_dir_object
    if args.gcs_config_staging_dir is None:
      gcs_config_staging_dir_bucket = \
        staging_bucket_util.GetDefaultStagingBucket()
      gcs_config_staging_dir_object = 'deploy/config'
    else:
      try:
        gcs_config_staging_dir_ref = resources.REGISTRY.Parse(
            args.gcs_config_staging_dir, collection='storage.objects')
        gcs_config_staging_dir_object = gcs_config_staging_dir_ref.object
      except resources.WrongResourceCollectionException:
        gcs_config_staging_dir_ref = resources.REGISTRY.Parse(
            args.gcs_config_staging_dir, collection='storage.buckets')
        gcs_config_staging_dir_object = None
      gcs_config_staging_dir_bucket = gcs_config_staging_dir_ref.bucket

    gcs_client = storage_api.StorageClient()
    gcs_client.CreateBucketIfNotExists(gcs_config_staging_dir_bucket)

    # If we are using a default bucket check that it is owned by user project
    # (b/33046325)
    if (args.gcs_config_staging_dir is None
        and not staging_bucket_util.BucketIsInProject(
            gcs_client, gcs_config_staging_dir_bucket)):
      raise c_exceptions.RequiredArgumentException(
          '--gcs-config-staging-dir',
          'A bucket with name {} already exists and is owned by '
          'another project. Specify a bucket using '
          '--gcs-config-staging-dir.'.format(gcs_config_staging_dir_bucket))

    if gcs_config_staging_dir_object:
      gcs_config_staging_path = '{}/{}'.format(
          gcs_config_staging_dir_bucket, gcs_config_staging_dir_object)
    else:
      gcs_config_staging_path = gcs_config_staging_dir_bucket

    project = properties.VALUES.core.project.Get(required=True)
    project_number = projects_util.GetProjectNumber(project)
    cloudbuild_service_account = '{}@cloudbuild.gserviceaccount.com'.format(
        project_number)
    log.status.Print(
        'Add the roles/container.developer role to your Cloud Build '
        'service agent account, if you have not already done so. This allows '
        'the account to deploy to your cluster:\n\n'
        'gcloud projects add-iam-policy-binding {project} '
        '--member=serviceAccount:{service_account_email} '
        '--role=roles/container.developer --project={project}\n'.format(
            project=project,
            service_account_email=cloudbuild_service_account
        ))

    if args.pull_request_preview:
      self._ConfigurePRPreview(
          repo_owner=github_repo_owner,
          repo_name=github_repo_name,
          pull_request_pattern=args.pull_request_pattern,
          preview_expiry=args.preview_expiry,
          comment_control=args.comment_control,
          dockerfile_path=args.dockerfile,
          app_name=app_name,
          config_path=args.config,
          expose_port=args.expose,
          gcs_config_staging_path=gcs_config_staging_path,
          cluster=args.cluster,
          location=args.location)
    else:
      self._ConfigureGitPushBuildTrigger(
          repo_type=args.repo_type,
          csr_repo_name=csr_repo_name,
          github_repo_owner=github_repo_owner,
          github_repo_name=github_repo_name,
          branch_pattern=args.branch_pattern,
          tag_pattern=args.tag_pattern,
          dockerfile_path=args.dockerfile,
          app_name=app_name,
          config_path=args.config,
          namespace=args.namespace,
          expose_port=args.expose,
          gcs_config_staging_path=gcs_config_staging_path,
          cluster=args.cluster,
          location=args.location)
Beispiel #2
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedDeployException: If the build is completed and not 'SUCCESS'.
    """

        if not args.source and not args.no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

        if args.no_source:
            if args.tag:
                raise c_exceptions.RequiredArgumentException(
                    'SOURCE', 'Source is required to build container image.')
            if args.config:
                raise c_exceptions.RequiredArgumentException(
                    'SOURCE',
                    'Source is required when specifying --config because it is a '
                    'relative path in the source directory.')

        do_build_and_push = args.image is None
        if not do_build_and_push and not args.config:
            args.no_source = True

        image = self._DetermineImageFromArgs(args)

        # Determine app_name
        if args.app_name:
            app_name = args.app_name
        else:
            app_name = self._ImageName(image)

        # Determine app_version
        app_version = None
        image_has_tag = '@' not in image and ':' in image
        if args.app_version:
            app_version = args.app_version
        elif image_has_tag:
            app_version = image.split(':')[-1]  # Set version to tag
        elif args.source:
            if git.IsGithubRepository(
                    args.source) and not git.HasPendingChanges(args.source):
                commit_sha = git.GetGitHeadRevision(args.source)
                if commit_sha:
                    app_version = commit_sha

        # Validate expose
        if args.expose and args.expose < 0:
            raise c_exceptions.InvalidArgumentException(
                '--expose', 'port number is invalid')

        # Determine gcs_staging_dir_bucket and gcs_staging_dir_object
        if args.gcs_staging_dir is None:
            gcs_staging_dir_bucket = staging_bucket_util.GetDefaultStagingBucket(
            )
            gcs_staging_dir_object = 'deploy'
        else:
            try:
                gcs_staging_dir_ref = resources.REGISTRY.Parse(
                    args.gcs_staging_dir, collection='storage.objects')
                gcs_staging_dir_object = gcs_staging_dir_ref.object
            except resources.WrongResourceCollectionException:
                gcs_staging_dir_ref = resources.REGISTRY.Parse(
                    args.gcs_staging_dir, collection='storage.buckets')
                gcs_staging_dir_object = None
            gcs_staging_dir_bucket = gcs_staging_dir_ref.bucket

        gcs_client = storage_api.StorageClient()
        try:
            gcs_client.CreateBucketIfNotExists(
                gcs_staging_dir_bucket,
                check_ownership=args.gcs_staging_dir is None)
        except storage_api.BucketInWrongProjectError:
            # If we're using the default bucket but it already exists in a different
            # project, then it could belong to a malicious attacker (b/33046325).
            raise c_exceptions.RequiredArgumentException(
                '--gcs-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-staging-dir.'.format(gcs_staging_dir_bucket))

        if gcs_staging_dir_object:
            gcs_config_staging_path = '{}/{}/config'.format(
                gcs_staging_dir_bucket, gcs_staging_dir_object)
        else:
            gcs_config_staging_path = gcs_staging_dir_bucket

        if not args.no_source:
            staged_source = self._StageSource(args.source,
                                              gcs_staging_dir_bucket,
                                              gcs_staging_dir_object)
        else:
            staged_source = None

        messages = cloudbuild_util.GetMessagesModule()
        build_config = build_util.CreateBuild(
            messages,
            build_timeout=properties.VALUES.builds.timeout.Get(),
            build_and_push=do_build_and_push,
            staged_source=staged_source,
            image=image,
            dockerfile_path='Dockerfile',
            app_name=app_name,
            app_version=app_version,
            config_path=args.config,
            namespace=args.namespace,
            expose_port=args.expose,
            gcs_config_staging_path=gcs_config_staging_path,
            cluster=args.cluster,
            location=args.location,
            build_tags=([] if not args.app_name else [args.app_name]))

        client = cloudbuild_util.GetClientInstance()
        self._SubmitBuild(client, messages, build_config,
                          gcs_config_staging_path, args.config is None,
                          args.async_)
Beispiel #3
0
def _SetSource(build_config, messages, is_specified_source, no_source, source,
               gcs_source_staging_dir, ignore_file):
    """Set the source for the build config."""
    default_gcs_source = False
    default_bucket_name = None
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
    gcs_client = storage_api.StorageClient()

    # --no-source overrides the default --source.
    if not is_specified_source and no_source:
        source = None

    gcs_source_staging = None
    if source:
        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )
        gcs_source_staging_dir = resources.REGISTRY.Parse(
            gcs_source_staging_dir, collection='storage.objects')

        # We create the bucket (if it does not exist) first. If we do an existence
        # check and then create the bucket ourselves, it would be possible for an
        # attacker to get lucky and beat us to creating the bucket. Block on this
        # creation to avoid this race condition.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source`), check
        # that the default bucket is also owned by the project (b/33046325).
        if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                gcs_client, default_bucket_name):
            raise c_exceptions.RequiredArgumentException(
                'gcs-source-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-source-staging-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object
        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            if os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source,
                                                    ignore_file=ignore_file)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging, ignore_file=ignore_file)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
    else:
        # No source
        if not no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

    return build_config
Beispiel #4
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

        default_gcs_source = False
        default_bucket_name = None
        if args.gcs_source_staging_dir is None:
            default_gcs_source = True
            default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
            args.gcs_source_staging_dir = 'gs://{}/source'.format(
                default_bucket_name)

        client = cloudbuild_util.GetClientInstance()
        messages = cloudbuild_util.GetMessagesModule()

        gcs_client = storage_api.StorageClient()

        # First, create the build request.
        build_timeout = properties.VALUES.builds.timeout.Get()

        if build_timeout is not None:
            try:
                # A bare number is interpreted as seconds.
                build_timeout_secs = int(build_timeout)
            except ValueError:
                build_timeout_duration = times.ParseDuration(build_timeout)
                build_timeout_secs = int(build_timeout_duration.total_seconds)
            timeout_str = six.text_type(build_timeout_secs) + 's'
        else:
            timeout_str = None

        if args.tag is not None:
            if (properties.VALUES.builds.check_tag.GetBool()
                    and 'gcr.io/' not in args.tag):
                raise c_exceptions.InvalidArgumentException(
                    '--tag',
                    'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.'
                )
            if properties.VALUES.builds.use_kaniko.GetBool():
                if args.no_cache:
                    ttl = '0h'
                else:
                    ttl = '{}h'.format(
                        properties.VALUES.builds.kaniko_cache_ttl.Get())
                build_config = messages.Build(
                    steps=[
                        messages.BuildStep(
                            name=properties.VALUES.builds.kaniko_image.Get(),
                            args=[
                                '--destination',
                                args.tag,
                                '--cache',
                                '--cache-ttl',
                                ttl,
                                '--cache-dir',
                                '',
                            ],
                        ),
                    ],
                    timeout=timeout_str,
                    substitutions=cloudbuild_util.EncodeSubstitutions(
                        args.substitutions, messages))
            else:
                if args.no_cache:
                    raise c_exceptions.InvalidArgumentException(
                        'no-cache',
                        'Cannot specify --no-cache if builds/use_kaniko property is '
                        'False')
                build_config = messages.Build(
                    images=[args.tag],
                    steps=[
                        messages.BuildStep(
                            name='gcr.io/cloud-builders/docker',
                            args=[
                                'build', '--network', 'cloudbuild',
                                '--no-cache', '-t', args.tag, '.'
                            ],
                        ),
                    ],
                    timeout=timeout_str,
                    substitutions=cloudbuild_util.EncodeSubstitutions(
                        args.substitutions, messages))
        elif args.config is not None:
            if args.no_cache:
                raise c_exceptions.ConflictingArgumentsException(
                    '--config', '--no-cache')
            if not args.config:
                raise c_exceptions.InvalidArgumentException(
                    '--config', 'Config file path must not be empty.')
            build_config = config.LoadCloudbuildConfigFromPath(
                args.config, messages, params=args.substitutions)
        else:
            raise c_exceptions.OneOfArgumentsRequiredException(
                ['--tag', '--config'],
                'Requires either a docker tag or a config file.')

        # If timeout was set by flag, overwrite the config file.
        if timeout_str:
            build_config.timeout = timeout_str

        # --no-source overrides the default --source.
        if not args.IsSpecified('source') and args.no_source:
            args.source = None

        gcs_source_staging = None
        if args.source:
            suffix = '.tgz'
            if args.source.startswith('gs://') or os.path.isfile(args.source):
                _, suffix = os.path.splitext(args.source)

            # Next, stage the source to Cloud Storage.
            staged_object = '{stamp}-{uuid}{suffix}'.format(
                stamp=times.GetTimeStampFromDateTime(times.Now()),
                uuid=uuid.uuid4().hex,
                suffix=suffix,
            )
            gcs_source_staging_dir = resources.REGISTRY.Parse(
                args.gcs_source_staging_dir, collection='storage.objects')

            # We create the bucket (if it does not exist) first. If we do an existence
            # check and then create the bucket ourselves, it would be possible for an
            # attacker to get lucky and beat us to creating the bucket. Block on this
            # creation to avoid this race condition.
            gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

            # If no bucket is specified (for the source `default_gcs_source`), check
            # that the default bucket is also owned by the project (b/33046325).
            if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                    gcs_client, default_bucket_name):
                raise c_exceptions.RequiredArgumentException(
                    'gcs-source-staging-dir',
                    'A bucket with name {} already exists and is owned by '
                    'another project. Specify a bucket using '
                    '--gcs-source-staging-dir.'.format(default_bucket_name))

            if gcs_source_staging_dir.object:
                staged_object = gcs_source_staging_dir.object + '/' + staged_object
            gcs_source_staging = resources.REGISTRY.Create(
                collection='storage.objects',
                bucket=gcs_source_staging_dir.bucket,
                object=staged_object)

            if args.source.startswith('gs://'):
                gcs_source = resources.REGISTRY.Parse(
                    args.source, collection='storage.objects')
                staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                       gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            else:
                if not os.path.exists(args.source):
                    raise c_exceptions.BadFileException(
                        'could not find source [{src}]'.format(
                            src=args.source))
                if os.path.isdir(args.source):
                    source_snapshot = snapshot.Snapshot(
                        args.source, ignore_file=args.ignore_file)
                    size_str = resource_transform.TransformSize(
                        source_snapshot.uncompressed_size)
                    log.status.Print(
                        'Creating temporary tarball archive of {num_files} file(s)'
                        ' totalling {size} before compression.'.format(
                            num_files=len(source_snapshot.files),
                            size=size_str))
                    staged_source_obj = source_snapshot.CopyTarballToGCS(
                        gcs_client,
                        gcs_source_staging,
                        ignore_file=args.ignore_file)
                    build_config.source = messages.Source(
                        storageSource=messages.StorageSource(
                            bucket=staged_source_obj.bucket,
                            object=staged_source_obj.name,
                            generation=staged_source_obj.generation,
                        ))
                elif os.path.isfile(args.source):
                    unused_root, ext = os.path.splitext(args.source)
                    if ext not in _ALLOWED_SOURCE_EXT:
                        raise c_exceptions.BadFileException(
                            'Local file [{src}] is none of ' +
                            ', '.join(_ALLOWED_SOURCE_EXT))
                    log.status.Print('Uploading local file [{src}] to '
                                     '[gs://{bucket}/{object}].'.format(
                                         src=args.source,
                                         bucket=gcs_source_staging.bucket,
                                         object=gcs_source_staging.object,
                                     ))
                    staged_source_obj = gcs_client.CopyFileToGCS(
                        args.source, gcs_source_staging)
                    build_config.source = messages.Source(
                        storageSource=messages.StorageSource(
                            bucket=staged_source_obj.bucket,
                            object=staged_source_obj.name,
                            generation=staged_source_obj.generation,
                        ))
        else:
            # No source
            if not args.no_source:
                raise c_exceptions.InvalidArgumentException(
                    '--no-source', 'To omit source, use the --no-source flag.')

        if args.gcs_log_dir:
            gcs_log_dir = resources.REGISTRY.Parse(
                args.gcs_log_dir, collection='storage.objects')

            build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                       gcs_log_dir.object)

        # Machine type.
        if args.machine_type is not None:
            machine_type = flags.GetMachineType(args.machine_type)
            if not build_config.options:
                build_config.options = messages.BuildOptions()
            build_config.options.machineType = machine_type

        # Disk size.
        if args.disk_size is not None:
            disk_size = compute_utils.BytesToGb(args.disk_size)
            if not build_config.options:
                build_config.options = messages.BuildOptions()
            build_config.options.diskSizeGb = int(disk_size)

        log.debug('submitting build: ' + repr(build_config))

        # Start the build.
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(
                build=build_config,
                projectId=properties.VALUES.core.project.Get()))
        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = resources.REGISTRY.Create(
            collection='cloudbuild.projects.builds',
            projectId=build.projectId,
            id=build.id)

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.Print('Logs are available at [{log_url}].'.format(
                log_url=build.logUrl))
        else:
            log.status.Print('Logs are available in the Cloud Console.')

        # If the command is run --async, we just print out a reference to the build.
        if args.async_:
            return build

        mash_handler = execution.MashHandler(
            execution.GetCancelBuildHandler(client, messages, build_ref))

        # Otherwise, logs are streamed from GCS.
        with execution_utils.CtrlCSection(mash_handler):
            build = cb_logs.CloudBuildClient(client,
                                             messages).Stream(build_ref)

        if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
            log.status.Print(
                'Your build timed out. Use the [--timeout=DURATION] flag to change '
                'the timeout threshold.')

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            raise FailedBuildException(build)

        return build
Beispiel #5
0
def _SetSource(build_config,
               messages,
               is_specified_source,
               no_source,
               source,
               gcs_source_staging_dir,
               ignore_file,
               hide_logs=False):
  """Set the source for the build config."""
  default_gcs_source = False
  default_bucket_name = None
  if gcs_source_staging_dir is None:
    default_gcs_source = True
    default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
    gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
  gcs_client = storage_api.StorageClient()

  # --no-source overrides the default --source.
  if not is_specified_source and no_source:
    source = None

  gcs_source_staging = None
  if source:
    suffix = '.tgz'
    if source.startswith('gs://') or os.path.isfile(source):
      _, suffix = os.path.splitext(source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}-{uuid}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        uuid=uuid.uuid4().hex,
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        gcs_source_staging_dir, collection='storage.objects')

    try:
      gcs_client.CreateBucketIfNotExists(
          gcs_source_staging_dir.bucket, check_ownership=default_gcs_source)
    except api_exceptions.HttpForbiddenError:
      raise BucketForbiddenError(
          'The user is forbidden from accessing the bucket [{}]. Please check '
          'your organization\'s policy or if the user has the "serviceusage.services.use" permission'
          .format(gcs_source_staging_dir.bucket))
    except storage_api.BucketInWrongProjectError:
      # If we're using the default bucket but it already exists in a different
      # project, then it could belong to a malicious attacker (b/33046325).
      raise c_exceptions.RequiredArgumentException(
          'gcs-source-staging-dir',
          'A bucket with name {} already exists and is owned by '
          'another project. Specify a bucket using '
          '--gcs-source-staging-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
      staged_object = gcs_source_staging_dir.object + '/' + staged_object
    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)

    if source.startswith('gs://'):
      gcs_source = resources.REGISTRY.Parse(
          source, collection='storage.objects')
      staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
      build_config.source = messages.Source(
          storageSource=messages.StorageSource(
              bucket=staged_source_obj.bucket,
              object=staged_source_obj.name,
              generation=staged_source_obj.generation,
          ))
    else:
      if not os.path.exists(source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=source))
      if os.path.isdir(source):
        source_snapshot = snapshot.Snapshot(source, ignore_file=ignore_file)
        size_str = resource_transform.TransformSize(
            source_snapshot.uncompressed_size)
        if not hide_logs:
          log.status.Print(
              'Creating temporary tarball archive of {num_files} file(s)'
              ' totalling {size} before compression.'.format(
                  num_files=len(source_snapshot.files), size=size_str))
        staged_source_obj = source_snapshot.CopyTarballToGCS(
            gcs_client,
            gcs_source_staging,
            ignore_file=ignore_file,
            hide_logs=hide_logs)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      elif os.path.isfile(source):
        unused_root, ext = os.path.splitext(source)
        if ext not in _ALLOWED_SOURCE_EXT:
          raise c_exceptions.BadFileException('Local file [{src}] is none of ' +
                                              ', '.join(_ALLOWED_SOURCE_EXT))
        if not hide_logs:
          log.status.Print('Uploading local file [{src}] to '
                           '[gs://{bucket}/{object}].'.format(
                               src=source,
                               bucket=gcs_source_staging.bucket,
                               object=gcs_source_staging.object,
                           ))
        staged_source_obj = gcs_client.CopyFileToGCS(source, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
  else:
    # No source
    if not no_source:
      raise c_exceptions.InvalidArgumentException(
          '--no-source', 'To omit source, use the --no-source flag.')

  return build_config
Beispiel #6
0
def CreateBuildConfig(tag, no_cache, messages, substitutions, arg_config,
                      is_specified_source, no_source, source,
                      gcs_source_staging_dir, ignore_file, arg_gcs_log_dir,
                      arg_machine_type, arg_disk_size):
    """Returns a build config."""
    # Get the build timeout.
    build_timeout = properties.VALUES.builds.timeout.Get()
    if build_timeout is not None:
        try:
            # A bare number is interpreted as seconds.
            build_timeout_secs = int(build_timeout)
        except ValueError:
            build_timeout_duration = times.ParseDuration(build_timeout)
            build_timeout_secs = int(build_timeout_duration.total_seconds)
        timeout_str = six.text_type(build_timeout_secs) + 's'
    else:
        timeout_str = None

    if tag is not None:
        if (properties.VALUES.builds.check_tag.GetBool()
                and 'gcr.io/' not in tag):
            raise c_exceptions.InvalidArgumentException(
                '--tag',
                'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
        if properties.VALUES.builds.use_kaniko.GetBool():
            if no_cache:
                ttl = '0h'
            else:
                ttl = '{}h'.format(
                    properties.VALUES.builds.kaniko_cache_ttl.Get())
            build_config = messages.Build(
                steps=[
                    messages.BuildStep(
                        name=properties.VALUES.builds.kaniko_image.Get(),
                        args=[
                            '--destination',
                            tag,
                            '--cache',
                            '--cache-ttl',
                            ttl,
                            '--cache-dir',
                            '',
                        ],
                    ),
                ],
                timeout=timeout_str,
                substitutions=cloudbuild_util.EncodeSubstitutions(
                    substitutions, messages))
        else:
            if no_cache:
                raise c_exceptions.InvalidArgumentException(
                    'no-cache',
                    'Cannot specify --no-cache if builds/use_kaniko property is '
                    'False')
            build_config = messages.Build(
                images=[tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=[
                            'build', '--network', 'cloudbuild', '--no-cache',
                            '-t', tag, '.'
                        ],
                    ),
                ],
                timeout=timeout_str,
                substitutions=cloudbuild_util.EncodeSubstitutions(
                    substitutions, messages))
    elif arg_config is not None:
        if no_cache:
            raise c_exceptions.ConflictingArgumentsException(
                '--config', '--no-cache')
        if not arg_config:
            raise c_exceptions.InvalidArgumentException(
                '--config', 'Config file path must not be empty.')
        build_config = config.LoadCloudbuildConfigFromPath(
            arg_config, messages, params=substitutions)
    else:
        raise c_exceptions.OneOfArgumentsRequiredException(
            ['--tag', '--config'],
            'Requires either a docker tag or a config file.')

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
        build_config.timeout = timeout_str

    # Set the source for the build config.
    default_gcs_source = False
    default_bucket_name = None
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
    gcs_client = storage_api.StorageClient()

    # --no-source overrides the default --source.
    if not is_specified_source and no_source:
        source = None

    gcs_source_staging = None
    if source:
        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )
        gcs_source_staging_dir = resources.REGISTRY.Parse(
            gcs_source_staging_dir, collection='storage.objects')

        # We create the bucket (if it does not exist) first. If we do an existence
        # check and then create the bucket ourselves, it would be possible for an
        # attacker to get lucky and beat us to creating the bucket. Block on this
        # creation to avoid this race condition.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source`), check
        # that the default bucket is also owned by the project (b/33046325).
        if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                gcs_client, default_bucket_name):
            raise c_exceptions.RequiredArgumentException(
                'gcs-source-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-source-staging-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object
        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            if os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source,
                                                    ignore_file=ignore_file)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging, ignore_file=ignore_file)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
    else:
        # No source
        if not no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

    # Set a Google Cloud Storage directory to hold build logs.
    if arg_gcs_log_dir:
        gcs_log_dir = resources.REGISTRY.Parse(arg_gcs_log_dir,
                                               collection='storage.objects')
        build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                   gcs_log_dir.object)

    # Set the machine type used to run the build.
    if arg_machine_type is not None:
        machine_type = flags.GetMachineType(arg_machine_type)
        if not build_config.options:
            build_config.options = messages.BuildOptions()
        build_config.options.machineType = machine_type

    # Set the disk size used to run the build.
    if arg_disk_size is not None:
        disk_size = compute_utils.BytesToGb(arg_disk_size)
        if not build_config.options:
            build_config.options = messages.BuildOptions()
        build_config.options.diskSizeGb = int(disk_size)

    return build_config
Beispiel #7
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.
    """

        if args.pull_request_preview:
            if args.repo_type != 'github':
                raise c_exceptions.InvalidArgumentException(
                    '--repo-type',
                    "Repo type must be 'github' to configure pull request previewing."
                )
            if args.namespace:
                raise c_exceptions.InvalidArgumentException(
                    '--namespace',
                    'Namespace must not be provided to configure pull request '
                    'previewing. --namespace must only be provided when configuring '
                    'automated deployments with the --branch-pattern or --tag-pattern '
                    'flags.')
            if args.preview_expiry <= 0:
                raise c_exceptions.InvalidArgumentException(
                    '--preview-expiry', 'Preview expiry must be > 0.')

        # Determine image based on repo type
        image = None

        # Determine github app or csr
        github_repo_name = None
        github_repo_owner = None
        csr_repo_name = None

        project = properties.VALUES.core.project.Get(required=True)

        if args.repo_type == 'github':
            if not args.repo_owner:
                raise c_exceptions.RequiredArgumentException(
                    '--repo-owner',
                    'Repo owner is required for --repo-type=github.')
            image = 'gcr.io/{}/github.com/{}/{}:$COMMIT_SHA'.format(
                project, args.repo_owner, args.repo_name)
            github_repo_name = args.repo_name
            github_repo_owner = args.repo_owner
            # We do not have to verify that this repo exists because the request to
            # create the BuildTrigger will fail with the appropriate message asking
            # the user to connect their repo, if the repo is not found.

        elif args.repo_type == 'csr':
            if args.repo_owner:
                raise c_exceptions.InvalidArgumentException(
                    '--repo-owner',
                    'Repo owner must not be provided for --repo-type=csr.')
            image = 'gcr.io/{}/{}:$COMMIT_SHA'.format(project, args.repo_name)
            csr_repo_name = args.repo_name
            self._VerifyCSRRepoExists(csr_repo_name)

        elif args.repo_type == 'bitbucket_mirrored':
            if not args.repo_owner:
                raise c_exceptions.RequiredArgumentException(
                    '--repo-owner',
                    'Repo owner is required for --repo-type=bitbucket_mirrored.'
                )
            image = 'gcr.io/{}/bitbucket.org/{}/{}:$COMMIT_SHA'.format(
                project, args.repo_owner, args.repo_name)
            csr_repo_name = 'bitbucket_{}_{}'.format(args.repo_owner,
                                                     args.repo_name)
            self._VerifyBitbucketCSRRepoExists(csr_repo_name, args.repo_owner,
                                               args.repo_name)

        elif args.repo_type == 'github_mirrored':
            if not args.repo_owner:
                raise c_exceptions.RequiredArgumentException(
                    '--repo-owner',
                    'Repo owner is required for --repo-type=github_mirrored.')
            image = 'gcr.io/{}/github.com/{}/{}:$COMMIT_SHA'.format(
                project, args.repo_owner, args.repo_name)
            csr_repo_name = 'github_{}_{}'.format(args.repo_owner,
                                                  args.repo_name)
            self._VerifyGitHubCSRRepoExists(csr_repo_name, args.repo_owner,
                                            args.repo_name)

        self._VerifyClusterExists(args.cluster, args.location)

        # Determine app_name
        if args.app_name:
            app_name = args.app_name
        else:
            app_name = args.repo_name

        # Determine gcs_config_staging_dir_bucket, gcs_config_staging_dir_object
        if args.gcs_config_staging_dir is None:
            gcs_config_staging_dir_bucket = \
              staging_bucket_util.GetDefaultStagingBucket()
            gcs_config_staging_dir_object = 'deploy/config'
        else:
            try:
                gcs_config_staging_dir_ref = resources.REGISTRY.Parse(
                    args.gcs_config_staging_dir, collection='storage.objects')
                gcs_config_staging_dir_object = gcs_config_staging_dir_ref.object
            except resources.WrongResourceCollectionException:
                gcs_config_staging_dir_ref = resources.REGISTRY.Parse(
                    args.gcs_config_staging_dir, collection='storage.buckets')
                gcs_config_staging_dir_object = None
            gcs_config_staging_dir_bucket = gcs_config_staging_dir_ref.bucket

        gcs_client = storage_api.StorageClient()
        try:
            gcs_client.CreateBucketIfNotExists(
                gcs_config_staging_dir_bucket,
                check_ownership=args.gcs_config_staging_dir is None)
        except storage_api.BucketInWrongProjectError:
            # If we're using the default bucket but it already exists in a different
            # project, then it could belong to a malicious attacker (b/33046325).
            raise c_exceptions.RequiredArgumentException(
                '--gcs-config-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-config-staging-dir.'.format(
                    gcs_config_staging_dir_bucket))

        if gcs_config_staging_dir_object:
            gcs_config_staging_path = '{}/{}'.format(
                gcs_config_staging_dir_bucket, gcs_config_staging_dir_object)
        else:
            gcs_config_staging_path = gcs_config_staging_dir_bucket

        if args.pull_request_preview:
            log.status.Print(
                'Setting up previewing {} on pull requests.\n'.format(
                    github_repo_name))
            self._ConfigurePRPreview(
                repo_owner=github_repo_owner,
                repo_name=github_repo_name,
                pull_request_pattern=args.pull_request_pattern,
                preview_expiry=args.preview_expiry,
                comment_control=args.comment_control,
                image=image,
                dockerfile_path=args.dockerfile,
                app_name=app_name,
                config_path=args.config,
                expose_port=args.expose,
                gcs_config_staging_path=gcs_config_staging_path,
                cluster=args.cluster,
                location=args.location)
        else:
            log.status.Print(
                'Setting up automated deployments for {}.\n'.format(
                    args.repo_name))
            self._ConfigureGitPushBuildTrigger(
                repo_type=args.repo_type,
                csr_repo_name=csr_repo_name,
                github_repo_owner=github_repo_owner,
                github_repo_name=github_repo_name,
                branch_pattern=args.branch_pattern,
                tag_pattern=args.tag_pattern,
                image=image,
                dockerfile_path=args.dockerfile,
                app_name=app_name,
                config_path=args.config,
                namespace=args.namespace,
                expose_port=args.expose,
                gcs_config_staging_path=gcs_config_staging_path,
                cluster=args.cluster,
                location=args.location)