Beispiel #1
0
    def LoadCloudBuild(self, params):
        """Loads the cloudbuild.yaml configuration file for this runtime version.

    Pulls the file from the app/runtime_builders_root value. Supported protocols
    are Cloud Storage ('gs://') and local filesystem ('file://').

    Args:
      params: dict, a dictionary of values to be substituted in to the
        cloudbuild.yaml template corresponding to this runtime version.

    Returns:
      Build message, the parsed and parameterized cloudbuild.yaml file.

    Raises:
      CloudBuildLoadError: if the cloudbuild.yaml file could not be loaded.
    """
        build_file_root = properties.VALUES.app.runtime_builders_root.Get(
            required=True)
        build_file_name = self.ToYamlFileName()
        messages = cloudbuild_util.GetMessagesModule()

        if build_file_root.startswith('file://'):
            build_file_path = os.path.join(build_file_root[len('file://'):],
                                           build_file_name)
            try:
                return cloudbuild_config.LoadCloudbuildConfig(
                    build_file_path, messages=messages, params=params)
            except cloudbuild_config.NotFoundException:
                raise CloudBuildFileNotFound(build_file_name, build_file_root,
                                             self)
        elif build_file_root.startswith('gs://'):
            # Cloud Storage always uses '/' as separator, regardless of local platform
            if not build_file_root.endswith('/'):
                build_file_root += '/'
            object_ = storage_util.ObjectReference.FromUrl(build_file_root +
                                                           build_file_name)
            storage_client = storage_api.StorageClient()
            # TODO(b/34169164): keep this in-memory.
            with file_utils.TemporaryDirectory() as temp_dir:
                build_file_path = os.path.join(temp_dir, 'cloudbuild.yaml')
                try:
                    storage_client.CopyFileFromGCS(object_.bucket_ref,
                                                   object_.name,
                                                   build_file_path)
                except calliope_exceptions.BadFileException:
                    raise CloudBuildObjectNotFound(
                        object_.name, object_.bucket_ref.ToBucketUrl(), self)
                return cloudbuild_config.LoadCloudbuildConfig(
                    build_file_path, messages=messages, params=params)
        else:
            raise InvalidRuntimeBuilderPath(build_file_root)
Beispiel #2
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

        if args.gcs_source_staging_dir is None:
            args.gcs_source_staging_dir = 'gs://{project}_cloudbuild/source'.format(
                project=properties.VALUES.core.project.Get(), )
        if args.gcs_log_dir is None:
            args.gcs_log_dir = 'gs://{project}_cloudbuild/logs'.format(
                project=properties.VALUES.core.project.Get(), )

        client = core_apis.GetClientInstance('cloudbuild', 'v1')
        messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
        registry = self.context['registry']

        gcs_client = storage_api.StorageClient()

        # First, create the build request.
        build_timeout = properties.VALUES.container.build_timeout.Get()
        if build_timeout is not None:
            timeout_str = build_timeout + 's'
        else:
            timeout_str = None

        if args.tag:
            if 'gcr.io/' not in args.tag:
                raise c_exceptions.InvalidArgumentException(
                    '--tag',
                    'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.'
                )
            build_config = messages.Build(
                images=[args.tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=['build', '--no-cache', '-t', args.tag, '.'],
                    ),
                ],
                timeout=timeout_str,
            )
        elif args.config:
            build_config = config.LoadCloudbuildConfig(args.config, messages)

        if build_config.timeout is None:
            build_config.timeout = timeout_str

        suffix = '.tgz'
        if args.source.startswith('gs://') or os.path.isfile(args.source):
            _, suffix = os.path.splitext(args.source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}_{tag_ish}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            tag_ish='_'.join(build_config.images or 'null').replace('/', '_'),
            suffix=suffix,
        )
        gcs_source_staging_dir = registry.Parse(args.gcs_source_staging_dir,
                                                collection='storage.objects')
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)
        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object

        gcs_source_staging = registry.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if args.source.startswith('gs://'):
            gcs_source = registry.Parse(args.source,
                                        collection='storage.objects')
            staged_source_obj = gcs_client.Copy(gcs_source, gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(args.source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=args.source))
            if os.path.isdir(args.source):
                source_snapshot = snapshot.Snapshot(args.source)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.write(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.\n'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(args.source):
                unused_root, ext = os.path.splitext(args.source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.write('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}]\n'.format(
                                     src=args.source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    storage_util.BucketReference.FromBucketUrl(
                        gcs_source_staging.bucket), args.source,
                    gcs_source_staging.object)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))

        gcs_log_dir = registry.Parse(args.gcs_log_dir,
                                     collection='storage.objects')

        if gcs_log_dir.bucket != gcs_source_staging.bucket:
            # Create the logs bucket if it does not yet exist.
            gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
        build_config.logsBucket = 'gs://' + gcs_log_dir.bucket + '/' + gcs_log_dir.object

        log.debug('submitting build: ' + repr(build_config))

        # Start the build.
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(
                build=build_config,
                projectId=properties.VALUES.core.project.Get()))
        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = registry.Create(collection='cloudbuild.projects.builds',
                                    projectId=build.projectId,
                                    id=build.id)

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.write(
                'Logs are permanently available at [{log_url}]\n'.format(
                    log_url=build.logUrl))
        else:
            log.status.write('Logs are available in the Cloud Console.\n')

        # If the command is run --async, we just print out a reference to the build.
        if args. async:
            return build

        # Otherwise, logs are streamed from GCS.
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            raise FailedBuildException(build.status)

        return build
Beispiel #3
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

        project = properties.VALUES.core.project.Get()
        safe_project = project.replace(':', '_')
        safe_project = safe_project.replace('.', '_')
        # The string 'google' is not allowed in bucket names.
        safe_project = safe_project.replace('google', 'elgoog')

        default_bucket_name = '{}_cloudbuild'.format(safe_project)

        default_gcs_source = False
        if args.gcs_source_staging_dir is None:
            default_gcs_source = True
            args.gcs_source_staging_dir = 'gs://{}/source'.format(
                default_bucket_name)

        default_gcs_log_dir = False
        if args.gcs_log_dir is None:
            default_gcs_log_dir = True
            args.gcs_log_dir = 'gs://{}/logs'.format(default_bucket_name)

        client = cloudbuild_util.GetClientInstance()
        messages = cloudbuild_util.GetMessagesModule()
        registry = self.context['registry']

        gcs_client = storage_api.StorageClient()

        # First, create the build request.
        build_timeout = properties.VALUES.container.build_timeout.Get()

        if build_timeout is not None:
            try:
                # A bare number is interpreted as seconds.
                build_timeout_secs = int(build_timeout)
            except ValueError:
                build_timeout_duration = times.ParseDuration(build_timeout)
                build_timeout_secs = int(build_timeout_duration.total_seconds)
            timeout_str = str(build_timeout_secs) + 's'
        else:
            timeout_str = None

        if args.tag:
            if 'gcr.io/' not in args.tag:
                raise c_exceptions.InvalidArgumentException(
                    '--tag',
                    'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.'
                )
            build_config = messages.Build(
                images=[args.tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=['build', '--no-cache', '-t', args.tag, '.'],
                    ),
                ],
                timeout=timeout_str,
            )
        elif args.config:
            build_config = config.LoadCloudbuildConfig(args.config, messages)

        # If timeout was set by flag, overwrite the config file.
        if timeout_str:
            build_config.timeout = timeout_str

        suffix = '.tgz'
        if args.source.startswith('gs://') or os.path.isfile(args.source):
            _, suffix = os.path.splitext(args.source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            suffix=suffix,
        )
        gcs_source_staging_dir = registry.Parse(args.gcs_source_staging_dir,
                                                collection='storage.objects')

        # We first try to create the bucket, before doing all the checks, in order
        # to avoid a race condition. If we do the check first, an attacker could
        # be lucky enough to create the bucket after the check and before this
        # bucket creation.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source` or for the
        # logs `default_gcs_log_dir`), check that the default bucket is also owned
        # by the project (b/33046325).
        if default_gcs_source or default_gcs_log_dir:
            # This request returns only the buckets owned by the project.
            bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
                project=project, prefix=default_bucket_name)
            bucket_list = gcs_client.client.buckets.List(bucket_list_req)
            found_bucket = False
            for bucket in bucket_list.items:
                if bucket.id == default_bucket_name:
                    found_bucket = True
                    break
            if not found_bucket:
                if default_gcs_source:
                    raise c_exceptions.RequiredArgumentException(
                        'gcs_source_staging_dir',
                        'A bucket with name {} already exists and is owned by '
                        'another project. Specify a bucket using '
                        '--gcs_source_staging_dir.'.format(
                            default_bucket_name))
                elif default_gcs_log_dir:
                    raise c_exceptions.RequiredArgumentException(
                        'gcs-log-dir',
                        'A bucket with name {} already exists and is owned by '
                        'another project. Specify a bucket to hold build logs '
                        'using --gcs-log-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object

        gcs_source_staging = registry.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if args.source.startswith('gs://'):
            gcs_source = registry.Parse(args.source,
                                        collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(args.source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=args.source))
            if os.path.isdir(args.source):
                source_snapshot = snapshot.Snapshot(args.source)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(args.source):
                unused_root, ext = os.path.splitext(args.source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=args.source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    storage_util.BucketReference.FromBucketUrl(
                        gcs_source_staging.bucket), args.source,
                    gcs_source_staging.object)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))

        gcs_log_dir = registry.Parse(args.gcs_log_dir,
                                     collection='storage.objects')

        if gcs_log_dir.bucket != gcs_source_staging.bucket:
            # Create the logs bucket if it does not yet exist.
            gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
        build_config.logsBucket = 'gs://' + gcs_log_dir.bucket + '/' + gcs_log_dir.object

        log.debug('submitting build: ' + repr(build_config))

        # Start the build.
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(
                build=build_config,
                projectId=properties.VALUES.core.project.Get()))
        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = registry.Create(collection='cloudbuild.projects.builds',
                                    projectId=build.projectId,
                                    id=build.id)

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.Print(
                'Logs are permanently available at [{log_url}].'.format(
                    log_url=build.logUrl))
        else:
            log.status.Print('Logs are available in the Cloud Console.')

        # If the command is run --async, we just print out a reference to the build.
        if args. async:
            return build

        mash_handler = execution.MashHandler(
            execution.GetCancelBuildHandler(client, messages, build_ref))

        # Otherwise, logs are streamed from GCS.
        with execution_utils.CtrlCSection(mash_handler):
            build = cb_logs.CloudBuildClient(client,
                                             messages).Stream(build_ref)

        if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
            log.status.Print(
                'Your build timed out. Use the [--timeout=DURATION] flag to change '
                'the timeout threshold.')

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            raise FailedBuildException(build)

        return build