def testJsonSubs(self): """Make sure that $PROJECT_ID gets replaced as appropriate.""" properties.VALUES.core.project.Set('myproj') self.Touch( '.', 'subs.json', """ { "steps": [ {"name": "gcr.io/$PROJECT_ID/step", "args": "gcr.io/$PROJECT_ID/simple", "env": "project=$PROJECT_ID" } ], "images": "gcr.io/$PROJECT_ID/simple" } """) build = config.LoadCloudbuildConfigFromPath('subs.json', self.messages) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/$PROJECT_ID/step', args=['gcr.io/$PROJECT_ID/simple'], env=['project=$PROJECT_ID'], ), ], images=['gcr.io/$PROJECT_ID/simple'], ))
def testWaitForSnake(self): self.Touch( '.', 'waitFor.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/my-project/simple wait_for: ['foo', 'bar'] images: gcr.io/my-project/simple """) build = config.LoadCloudbuildConfigFromPath('waitFor.yaml', self.messages) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/cloud-builders/docker', args=['build', '-t', 'gcr.io/my-project/simple'], waitFor=['foo', 'bar'], ), ], images=['gcr.io/my-project/simple'], ))
def testLoadJson(self): self.Touch( '.', 'basic.json', """ { "steps": [ {"name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", "gcr.io/my-project/simple"] } ], "images": "gcr.io/my-project/simple" } """) build = config.LoadCloudbuildConfigFromPath('basic.json', self.messages) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/cloud-builders/docker', args=['build', '-t', 'gcr.io/my-project/simple'], ), ], images=['gcr.io/my-project/simple'], ))
def testBadEncoding(self): self.Touch('.', 'garbage.garbage', """ this file is neither json nor yaml """) with self.assertRaisesRegex(cloudbuild_util.ParserError, 'Could not parse as a dictionary'): config.LoadCloudbuildConfigFromPath('garbage.garbage', self.messages)
def testBadConfigSource(self): """Not allowed to specify the source since it comes in from an argument.""" self.Touch( '.', 'has_source.yaml', """ source: storageSource: bucket: boo object: oo """) with self.assertRaisesRegex(config.InvalidBuildConfigException, 'config cannot specify source'): config.LoadCloudbuildConfigFromPath('has_source.yaml', self.messages) self.Touch('.', 'no_steps.yaml', """ images: foobar """) with self.assertRaisesRegex(config.InvalidBuildConfigException, 'config must list at least one step'): config.LoadCloudbuildConfigFromPath('no_steps.yaml', self.messages)
def testLoadJsonWithParameters(self): self.Touch( '.', 'basic.json', """ { "steps": [ {"name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", "gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR"] } ], "substitutions": { "_DAY_OF_WEEK": "monday", "_BEST_BEER": "orval" }, "images": "gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR", "timeout": "gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR" } """) build = config.LoadCloudbuildConfigFromPath( 'basic.json', self.messages, { '_DAY_OF_WEEK': 'tuesday', '_FAVORITE_COLOR': 'blue' }) # Only substitute images/steps, not any other fields (see `timeout`) # (there are very few string fields in Build that aren't output only, so use # a nonsensical one to test this.) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/cloud-builders/docker', args=[ 'build', '-t', 'gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR' ], ), ], substitutions=self.messages.Build. SubstitutionsValue(additionalProperties=[ self.messages.Build.SubstitutionsValue.AdditionalProperty( key='_BEST_BEER', value='orval'), self.messages.Build.SubstitutionsValue.AdditionalProperty( key='_DAY_OF_WEEK', value='tuesday'), self.messages.Build.SubstitutionsValue.AdditionalProperty( key='_FAVORITE_COLOR', value='blue'), ]), images=['gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR'], timeout='gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR'))
def testYamlSyntaxError(self): """Misplaced brace at the end of the document.""" self.Touch( '.', 'error.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/my-project/simple images: gcr.io/my-project/simple } """) with self.assertRaisesRegex(cloudbuild_util.ParserError, 'error.yaml'): config.LoadCloudbuildConfigFromPath('error.yaml', self.messages)
def testYamlUnusedField(self): """testYamlUnusedField checks the misindented tags field.""" self.Touch( '.', 'error.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/my-project/simple tags: sometag images: gcr.io/my-project/simple1 """) with self.assertRaisesRegex( cloudbuild_util.ParseProtoException, r'error.yaml as build config: .steps\[0\].tags: unused'): config.LoadCloudbuildConfigFromPath('error.yaml', self.messages)
def testJsonUnusedField(self): """testJsonUnusedField checks the misplaced tags field.""" self.Touch( '.', 'error.json', """ { "steps": [ {"name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", "gcr.io/$PROJECT_ID/simple", "."], "tags": "sometag" } ], "images": "gcr.io/$PROJECT_ID/simple" } """) with self.assertRaisesRegex( cloudbuild_util.ParseProtoException, r'error.json as build config: .steps\[0\].tags: unused'): config.LoadCloudbuildConfigFromPath('error.json', self.messages)
def testYamlUnusedNested(self): """Only present an error for the highest-level mistake.""" self.Touch( '.', 'error.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/my-project/simple extra: data: is: "bad" images: gcr.io/my-project/simple1 """) with self.assertRaisesRegex( cloudbuild_util.ParseProtoException, r'error\.yaml as build config: \.extra: unused'): config.LoadCloudbuildConfigFromPath('error.yaml', self.messages)
def testJsonMultipleUnused(self): self.Touch( '.', 'error.json', """ { "steps": [ {"name": "gcr.io/cloud-builders/docker", "args": ["build", "-t", "gcr.io/$PROJECT_ID/simple", "."], "bogus": "200s", "foo": "bar" } ], "images": "gcr.io/$PROJECT_ID/simple" } """) with self.assertRaisesRegex( cloudbuild_util.ParseProtoException, r'error\.json as build config: \.steps\[0\]\.{bogus,foo}: unused' ): config.LoadCloudbuildConfigFromPath('error.json', self.messages)
def testLoadYamlWithParameters(self): self.Touch( '.', 'basic.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR images: gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR timeout: gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR """) build = config.LoadCloudbuildConfigFromPath( 'basic.yaml', self.messages, { '_DAY_OF_WEEK': 'tuesday', '_FAVORITE_COLOR': 'blue' }) # # Only substitute images/steps, not any other fields (see `timeout`) # (there are very few string fields in Build that aren't output only, so use # a nonsensical one to test this.) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/cloud-builders/docker', args=[ 'build', '-t', 'gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR' ], ), ], substitutions=self.messages.Build. SubstitutionsValue(additionalProperties=[ self.messages.Build.SubstitutionsValue.AdditionalProperty( key='_DAY_OF_WEEK', value='tuesday'), self.messages.Build.SubstitutionsValue.AdditionalProperty( key='_FAVORITE_COLOR', value='blue'), ]), images=['gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR'], timeout='gcr.io/$_DAY_OF_WEEK/$_FAVORITE_COLOR'))
def testYamlMultipleUnused(self): """More than one mistake on the same level gets a more interesting error.""" self.Touch( '.', 'error.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/my-project/simple extra: data: is: "bad" nonsense: "bad as well" images: gcr.io/my-project/simple1 """) with self.assertRaisesRegex( cloudbuild_util.ParseProtoException, r'error\.yaml as build config: \.\{extra,nonsense\}: unused'): config.LoadCloudbuildConfigFromPath('error.yaml', self.messages)
def testYamlSubs(self): """Make sure that $PROJECT_ID gets replaced as appropriate.""" properties.VALUES.core.project.Set('myproj') self.Touch( '.', 'subs.yaml', """ steps: - name: gcr.io/$PROJECT_ID/step args: gcr.io/$PROJECT_ID/simple env: project=$PROJECT_ID images: gcr.io/$PROJECT_ID/simple """) build = config.LoadCloudbuildConfigFromPath('subs.yaml', self.messages) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/$PROJECT_ID/step', args=['gcr.io/$PROJECT_ID/simple'], env=['project=$PROJECT_ID'], ), ], images=['gcr.io/$PROJECT_ID/simple'], ))
def testLoadYaml(self): self.Touch( '.', 'basic.yaml', """ steps: - name: gcr.io/cloud-builders/docker args: - build - -t - gcr.io/my-project/simple images: gcr.io/my-project/simple """) build = config.LoadCloudbuildConfigFromPath('basic.yaml', self.messages) self.assertEqual( build, self.messages.Build( steps=[ self.messages.BuildStep( name='gcr.io/cloud-builders/docker', args=['build', '-t', 'gcr.io/my-project/simple'], ), ], images=['gcr.io/my-project/simple'], ))
def _SetBuildSteps(tag, no_cache, messages, substitutions, arg_config, timeout_str): """Set build steps.""" if tag is not None: if (properties.VALUES.builds.check_tag.GetBool() and not any(reg in tag for reg in _SUPPORTED_REGISTRIES)): raise c_exceptions.InvalidArgumentException( '--tag', 'Tag value must be in the gcr.io/*, *.gcr.io/*, ' 'or *.pkg.dev/* namespace.') if properties.VALUES.builds.use_kaniko.GetBool(): if no_cache: ttl = '0h' else: ttl = '{}h'.format( properties.VALUES.builds.kaniko_cache_ttl.Get()) build_config = messages.Build( steps=[ messages.BuildStep( name=properties.VALUES.builds.kaniko_image.Get(), args=[ '--destination', tag, '--cache', '--cache-ttl', ttl, '--cache-dir', '', ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) else: if no_cache: raise c_exceptions.InvalidArgumentException( 'no-cache', 'Cannot specify --no-cache if builds/use_kaniko property is ' 'False') build_config = messages.Build( images=[tag], steps=[ messages.BuildStep( name='gcr.io/cloud-builders/docker', args=[ 'build', '--network', 'cloudbuild', '--no-cache', '-t', tag, '.' ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) elif arg_config is not None: if no_cache: raise c_exceptions.ConflictingArgumentsException( '--config', '--no-cache') if not arg_config: raise c_exceptions.InvalidArgumentException( '--config', 'Config file path must not be empty.') build_config = config.LoadCloudbuildConfigFromPath( arg_config, messages, params=substitutions) else: raise c_exceptions.OneOfArgumentsRequiredException( ['--tag', '--config'], 'Requires either a docker tag or a config file.') # If timeout was set by flag, overwrite the config file. if timeout_str: build_config.timeout = timeout_str return build_config
def _SetBuildStepsAlpha(tag, no_cache, messages, substitutions, arg_config, timeout_str, buildpack): """Set build steps.""" if tag is not None: if (properties.VALUES.builds.check_tag.GetBool() and not any(reg in tag for reg in _SUPPORTED_REGISTRIES)): raise c_exceptions.InvalidArgumentException( '--tag', 'Tag value must be in the gcr.io/*, *.gcr.io/*, ' 'or *.pkg.dev/* namespace.') if properties.VALUES.builds.use_kaniko.GetBool(): if no_cache: ttl = '0h' else: ttl = '{}h'.format( properties.VALUES.builds.kaniko_cache_ttl.Get()) build_config = messages.Build( steps=[ messages.BuildStep( name=properties.VALUES.builds.kaniko_image.Get(), args=[ '--destination', tag, '--cache', '--cache-ttl', ttl, '--cache-dir', '', ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) else: if no_cache: raise c_exceptions.InvalidArgumentException( 'no-cache', 'Cannot specify --no-cache if builds/use_kaniko property is ' 'False') build_config = messages.Build( images=[tag], steps=[ messages.BuildStep( name='gcr.io/cloud-builders/docker', args=[ 'build', '--network', 'cloudbuild', '--no-cache', '-t', tag, '.' ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) elif buildpack is not None: if not buildpack: raise c_exceptions.InvalidArgumentException( '--pack', 'Image value must not be empty.') if buildpack[0].get('builder') is None: builder = _DEFAULT_BUILDPACK_BUILDER else: builder = buildpack[0].get('builder') if buildpack[0].get('image') is None: raise c_exceptions.InvalidArgumentException( '--pack', 'Image value must not be empty.') image = buildpack[0].get('image') if (properties.VALUES.builds.check_tag.GetBool() and not any(reg in image for reg in _SUPPORTED_REGISTRIES)): raise c_exceptions.InvalidArgumentException( '--pack', 'Image value must be in the gcr.io/*, *.gcr.io/*, or *.pkg.dev/* namespace.' ) env = buildpack[0].get('env') pack_args = ['build', image, '--builder', builder] if env is not None: pack_args.append('--env') pack_args.append(env) build_config = messages.Build( images=[image], steps=[ messages.BuildStep( name='gcr.io/k8s-skaffold/pack', entrypoint='pack', args=pack_args, ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) elif arg_config is not None: if no_cache: raise c_exceptions.ConflictingArgumentsException( '--config', '--no-cache') if not arg_config: raise c_exceptions.InvalidArgumentException( '--config', 'Config file path must not be empty.') build_config = config.LoadCloudbuildConfigFromPath( arg_config, messages, params=substitutions) else: raise c_exceptions.OneOfArgumentsRequiredException( ['--tag', '--config', '--pack'], 'Requires either a docker tag, a config file, or pack argument.') # If timeout was set by flag, overwrite the config file. if timeout_str: build_config.timeout = timeout_str return build_config
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: Some value that we want to have printed later. Raises: FailedBuildException: If the build is completed and not 'SUCCESS'. """ project = properties.VALUES.core.project.Get() safe_project = project.replace(':', '_') safe_project = safe_project.replace('.', '_') # The string 'google' is not allowed in bucket names. safe_project = safe_project.replace('google', 'elgoog') default_bucket_name = '{}_cloudbuild'.format(safe_project) default_gcs_source = False if args.gcs_source_staging_dir is None: default_gcs_source = True args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name) default_gcs_log_dir = False if args.gcs_log_dir is None: default_gcs_log_dir = True args.gcs_log_dir = 'gs://{}/logs'.format(default_bucket_name) client = cloudbuild_util.GetClientInstance() messages = cloudbuild_util.GetMessagesModule() gcs_client = storage_api.StorageClient() # First, create the build request. build_timeout = properties.VALUES.container.build_timeout.Get() if build_timeout is not None: try: # A bare number is interpreted as seconds. build_timeout_secs = int(build_timeout) except ValueError: build_timeout_duration = times.ParseDuration(build_timeout) build_timeout_secs = int(build_timeout_duration.total_seconds) timeout_str = str(build_timeout_secs) + 's' else: timeout_str = None if args.tag: if 'gcr.io/' not in args.tag: raise c_exceptions.InvalidArgumentException( '--tag', 'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.') build_config = messages.Build( images=[args.tag], steps=[ messages.BuildStep( name='gcr.io/cloud-builders/docker', args=['build', '--no-cache', '-t', args.tag, '.'], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions(args.substitutions, messages) ) elif args.config: build_config = config.LoadCloudbuildConfigFromPath( args.config, messages, params=args.substitutions) # If timeout was set by flag, overwrite the config file. if timeout_str: build_config.timeout = timeout_str suffix = '.tgz' if args.source.startswith('gs://') or os.path.isfile(args.source): _, suffix = os.path.splitext(args.source) # Next, stage the source to Cloud Storage. staged_object = '{stamp}{suffix}'.format( stamp=times.GetTimeStampFromDateTime(times.Now()), suffix=suffix, ) gcs_source_staging_dir = resources.REGISTRY.Parse( args.gcs_source_staging_dir, collection='storage.objects') # We first try to create the bucket, before doing all the checks, in order # to avoid a race condition. If we do the check first, an attacker could # be lucky enough to create the bucket after the check and before this # bucket creation. gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket) # If no bucket is specified (for the source `default_gcs_source` or for the # logs `default_gcs_log_dir`), check that the default bucket is also owned # by the project (b/33046325). if default_gcs_source or default_gcs_log_dir: # This request returns only the buckets owned by the project. bucket_list_req = gcs_client.messages.StorageBucketsListRequest( project=project, prefix=default_bucket_name) bucket_list = gcs_client.client.buckets.List(bucket_list_req) found_bucket = False for bucket in bucket_list.items: if bucket.id == default_bucket_name: found_bucket = True break if not found_bucket: if default_gcs_source: raise c_exceptions.RequiredArgumentException( 'gcs_source_staging_dir', 'A bucket with name {} already exists and is owned by ' 'another project. Specify a bucket using ' '--gcs_source_staging_dir.'.format(default_bucket_name)) elif default_gcs_log_dir: raise c_exceptions.RequiredArgumentException( 'gcs-log-dir', 'A bucket with name {} already exists and is owned by ' 'another project. Specify a bucket to hold build logs ' 'using --gcs-log-dir.'.format(default_bucket_name)) if gcs_source_staging_dir.object: staged_object = gcs_source_staging_dir.object + '/' + staged_object gcs_source_staging = resources.REGISTRY.Create( collection='storage.objects', bucket=gcs_source_staging_dir.bucket, object=staged_object) if args.source.startswith('gs://'): gcs_source = resources.REGISTRY.Parse( args.source, collection='storage.objects') staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) else: if not os.path.exists(args.source): raise c_exceptions.BadFileException( 'could not find source [{src}]'.format(src=args.source)) if os.path.isdir(args.source): source_snapshot = snapshot.Snapshot(args.source) size_str = resource_transform.TransformSize( source_snapshot.uncompressed_size) log.status.Print( 'Creating temporary tarball archive of {num_files} file(s)' ' totalling {size} before compression.'.format( num_files=len(source_snapshot.files), size=size_str)) staged_source_obj = source_snapshot.CopyTarballToGCS( gcs_client, gcs_source_staging) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) elif os.path.isfile(args.source): unused_root, ext = os.path.splitext(args.source) if ext not in _ALLOWED_SOURCE_EXT: raise c_exceptions.BadFileException( 'Local file [{src}] is none of '+', '.join(_ALLOWED_SOURCE_EXT)) log.status.Print( 'Uploading local file [{src}] to ' '[gs://{bucket}/{object}].'.format( src=args.source, bucket=gcs_source_staging.bucket, object=gcs_source_staging.object, )) staged_source_obj = gcs_client.CopyFileToGCS( storage_util.BucketReference.FromBucketUrl( gcs_source_staging.bucket), args.source, gcs_source_staging.object) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) gcs_log_dir = resources.REGISTRY.Parse( args.gcs_log_dir, collection='storage.objects') if gcs_log_dir.bucket != gcs_source_staging.bucket: # Create the logs bucket if it does not yet exist. gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket) build_config.logsBucket = 'gs://'+gcs_log_dir.bucket+'/'+gcs_log_dir.object log.debug('submitting build: '+repr(build_config)) # Start the build. op = client.projects_builds.Create( messages.CloudbuildProjectsBuildsCreateRequest( build=build_config, projectId=properties.VALUES.core.project.Get())) json = encoding.MessageToJson(op.metadata) build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build build_ref = resources.REGISTRY.Create( collection='cloudbuild.projects.builds', projectId=build.projectId, id=build.id) log.CreatedResource(build_ref) if build.logUrl: log.status.Print('Logs are available at [{log_url}].'.format( log_url=build.logUrl)) else: log.status.Print('Logs are available in the Cloud Console.') # If the command is run --async, we just print out a reference to the build. if args.async: return build mash_handler = execution.MashHandler( execution.GetCancelBuildHandler(client, messages, build_ref)) # Otherwise, logs are streamed from GCS. with execution_utils.CtrlCSection(mash_handler): build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref) if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT: log.status.Print( 'Your build timed out. Use the [--timeout=DURATION] flag to change ' 'the timeout threshold.') if build.status != messages.Build.StatusValueValuesEnum.SUCCESS: raise FailedBuildException(build) return build
def testNoFile(self): with self.assertRaises(files.MissingFileError): config.LoadCloudbuildConfigFromPath('not-here.json', self.messages)
def Run(self, args): """This is what gets called when the user runs this command. Args: args: an argparse namespace. All the arguments that were provided to this command invocation. Returns: Some value that we want to have printed later. Raises: FailedBuildException: If the build is completed and not 'SUCCESS'. """ project = properties.VALUES.core.project.Get(required=True) safe_project = project.replace(':', '_') safe_project = safe_project.replace('.', '_') # The string 'google' is not allowed in bucket names. safe_project = safe_project.replace('google', 'elgoog') default_bucket_name = '{}_cloudbuild'.format(safe_project) default_gcs_source = False if args.gcs_source_staging_dir is None: default_gcs_source = True args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name) client = cloudbuild_util.GetClientInstance() messages = cloudbuild_util.GetMessagesModule() gcs_client = storage_api.StorageClient() # First, create the build request. build_timeout = properties.VALUES.builds.timeout.Get() if build_timeout is not None: try: # A bare number is interpreted as seconds. build_timeout_secs = int(build_timeout) except ValueError: build_timeout_duration = times.ParseDuration(build_timeout) build_timeout_secs = int(build_timeout_duration.total_seconds) timeout_str = six.text_type(build_timeout_secs) + 's' else: timeout_str = None if args.tag is not None: if (properties.VALUES.builds.check_tag.GetBool() and 'gcr.io/' not in args.tag): raise c_exceptions.InvalidArgumentException( '--tag', 'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.') if properties.VALUES.builds.use_kaniko.GetBool(): if args.no_cache: ttl = '0h' else: ttl = '{}h'.format(properties.VALUES.builds.kaniko_cache_ttl.Get()) build_config = messages.Build( steps=[ messages.BuildStep( name=properties.VALUES.builds.kaniko_image.Get(), args=[ '--destination', args.tag, '--cache', 'true', '--cache-ttl', ttl ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( args.substitutions, messages)) else: if args.no_cache: raise c_exceptions.InvalidArgumentException( 'no-cache', 'Cannot specify --no-cache if builds/use_kaniko property is ' 'False') build_config = messages.Build( images=[args.tag], steps=[ messages.BuildStep( name='gcr.io/cloud-builders/docker', args=[ 'build', '--network', 'cloudbuild', '--no-cache', '-t', args.tag, '.' ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( args.substitutions, messages)) elif args.config is not None: if args.no_cache: raise c_exceptions.ConflictingArgumentsException( '--config', '--no-cache') if not args.config: raise c_exceptions.InvalidArgumentException( '--config', 'Config file path must not be empty.') build_config = config.LoadCloudbuildConfigFromPath( args.config, messages, params=args.substitutions) else: raise c_exceptions.OneOfArgumentsRequiredException( ['--tag', '--config'], 'Requires either a docker tag or a config file.') # If timeout was set by flag, overwrite the config file. if timeout_str: build_config.timeout = timeout_str # --no-source overrides the default --source. if not args.IsSpecified('source') and args.no_source: args.source = None gcs_source_staging = None if args.source: suffix = '.tgz' if args.source.startswith('gs://') or os.path.isfile(args.source): _, suffix = os.path.splitext(args.source) # Next, stage the source to Cloud Storage. staged_object = '{stamp}-{uuid}{suffix}'.format( stamp=times.GetTimeStampFromDateTime(times.Now()), uuid=uuid.uuid4().hex, suffix=suffix, ) gcs_source_staging_dir = resources.REGISTRY.Parse( args.gcs_source_staging_dir, collection='storage.objects') # We create the bucket (if it does not exist) first. If we do an existence # check and then create the bucket ourselves, it would be possible for an # attacker to get lucky and beat us to creating the bucket. Block on this # creation to avoid this race condition. gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket) # If no bucket is specified (for the source `default_gcs_source`), check # that the default bucket is also owned by the project (b/33046325). if default_gcs_source: # This request returns only the buckets owned by the project. bucket_list_req = gcs_client.messages.StorageBucketsListRequest( project=project, prefix=default_bucket_name) bucket_list = gcs_client.client.buckets.List(bucket_list_req) found_bucket = False for bucket in bucket_list.items: if bucket.id == default_bucket_name: found_bucket = True break if not found_bucket: if default_gcs_source: raise c_exceptions.RequiredArgumentException( 'gcs_source_staging_dir', 'A bucket with name {} already exists and is owned by ' 'another project. Specify a bucket using ' '--gcs_source_staging_dir.'.format(default_bucket_name)) if gcs_source_staging_dir.object: staged_object = gcs_source_staging_dir.object + '/' + staged_object gcs_source_staging = resources.REGISTRY.Create( collection='storage.objects', bucket=gcs_source_staging_dir.bucket, object=staged_object) if args.source.startswith('gs://'): gcs_source = resources.REGISTRY.Parse( args.source, collection='storage.objects') staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) else: if not os.path.exists(args.source): raise c_exceptions.BadFileException( 'could not find source [{src}]'.format(src=args.source)) if os.path.isdir(args.source): source_snapshot = snapshot.Snapshot(args.source, ignore_file=args.ignore_file) size_str = resource_transform.TransformSize( source_snapshot.uncompressed_size) log.status.Print( 'Creating temporary tarball archive of {num_files} file(s)' ' totalling {size} before compression.'.format( num_files=len(source_snapshot.files), size=size_str)) staged_source_obj = source_snapshot.CopyTarballToGCS( gcs_client, gcs_source_staging, ignore_file=args.ignore_file) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) elif os.path.isfile(args.source): unused_root, ext = os.path.splitext(args.source) if ext not in _ALLOWED_SOURCE_EXT: raise c_exceptions.BadFileException( 'Local file [{src}] is none of ' + ', '.join(_ALLOWED_SOURCE_EXT)) log.status.Print('Uploading local file [{src}] to ' '[gs://{bucket}/{object}].'.format( src=args.source, bucket=gcs_source_staging.bucket, object=gcs_source_staging.object, )) staged_source_obj = gcs_client.CopyFileToGCS(args.source, gcs_source_staging) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) else: # No source if not args.no_source: raise c_exceptions.InvalidArgumentException( '--no-source', 'To omit source, use the --no-source flag.') if args.gcs_log_dir: gcs_log_dir = resources.REGISTRY.Parse( args.gcs_log_dir, collection='storage.objects') build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' + gcs_log_dir.object) # Machine type. if args.machine_type is not None: machine_type = Submit._machine_type_flag_map.GetEnumForChoice( args.machine_type) if not build_config.options: build_config.options = messages.BuildOptions() build_config.options.machineType = machine_type # Disk size. if args.disk_size is not None: disk_size = compute_utils.BytesToGb(args.disk_size) if not build_config.options: build_config.options = messages.BuildOptions() build_config.options.diskSizeGb = int(disk_size) log.debug('submitting build: ' + repr(build_config)) # Start the build. op = client.projects_builds.Create( messages.CloudbuildProjectsBuildsCreateRequest( build=build_config, projectId=properties.VALUES.core.project.Get())) json = encoding.MessageToJson(op.metadata) build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build build_ref = resources.REGISTRY.Create( collection='cloudbuild.projects.builds', projectId=build.projectId, id=build.id) log.CreatedResource(build_ref) if build.logUrl: log.status.Print( 'Logs are available at [{log_url}].'.format(log_url=build.logUrl)) else: log.status.Print('Logs are available in the Cloud Console.') # If the command is run --async, we just print out a reference to the build. if args.async: return build mash_handler = execution.MashHandler( execution.GetCancelBuildHandler(client, messages, build_ref)) # Otherwise, logs are streamed from GCS. with execution_utils.CtrlCSection(mash_handler): build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref) if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT: log.status.Print( 'Your build timed out. Use the [--timeout=DURATION] flag to change ' 'the timeout threshold.') if build.status != messages.Build.StatusValueValuesEnum.SUCCESS: raise FailedBuildException(build) return build
def CreateBuildConfig(tag, no_cache, messages, substitutions, arg_config, is_specified_source, no_source, source, gcs_source_staging_dir, ignore_file, arg_gcs_log_dir, arg_machine_type, arg_disk_size): """Returns a build config.""" # Get the build timeout. build_timeout = properties.VALUES.builds.timeout.Get() if build_timeout is not None: try: # A bare number is interpreted as seconds. build_timeout_secs = int(build_timeout) except ValueError: build_timeout_duration = times.ParseDuration(build_timeout) build_timeout_secs = int(build_timeout_duration.total_seconds) timeout_str = six.text_type(build_timeout_secs) + 's' else: timeout_str = None if tag is not None: if (properties.VALUES.builds.check_tag.GetBool() and 'gcr.io/' not in tag): raise c_exceptions.InvalidArgumentException( '--tag', 'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.') if properties.VALUES.builds.use_kaniko.GetBool(): if no_cache: ttl = '0h' else: ttl = '{}h'.format( properties.VALUES.builds.kaniko_cache_ttl.Get()) build_config = messages.Build( steps=[ messages.BuildStep( name=properties.VALUES.builds.kaniko_image.Get(), args=[ '--destination', tag, '--cache', '--cache-ttl', ttl, '--cache-dir', '', ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) else: if no_cache: raise c_exceptions.InvalidArgumentException( 'no-cache', 'Cannot specify --no-cache if builds/use_kaniko property is ' 'False') build_config = messages.Build( images=[tag], steps=[ messages.BuildStep( name='gcr.io/cloud-builders/docker', args=[ 'build', '--network', 'cloudbuild', '--no-cache', '-t', tag, '.' ], ), ], timeout=timeout_str, substitutions=cloudbuild_util.EncodeSubstitutions( substitutions, messages)) elif arg_config is not None: if no_cache: raise c_exceptions.ConflictingArgumentsException( '--config', '--no-cache') if not arg_config: raise c_exceptions.InvalidArgumentException( '--config', 'Config file path must not be empty.') build_config = config.LoadCloudbuildConfigFromPath( arg_config, messages, params=substitutions) else: raise c_exceptions.OneOfArgumentsRequiredException( ['--tag', '--config'], 'Requires either a docker tag or a config file.') # If timeout was set by flag, overwrite the config file. if timeout_str: build_config.timeout = timeout_str # Set the source for the build config. default_gcs_source = False default_bucket_name = None if gcs_source_staging_dir is None: default_gcs_source = True default_bucket_name = staging_bucket_util.GetDefaultStagingBucket() gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name) gcs_client = storage_api.StorageClient() # --no-source overrides the default --source. if not is_specified_source and no_source: source = None gcs_source_staging = None if source: suffix = '.tgz' if source.startswith('gs://') or os.path.isfile(source): _, suffix = os.path.splitext(source) # Next, stage the source to Cloud Storage. staged_object = '{stamp}-{uuid}{suffix}'.format( stamp=times.GetTimeStampFromDateTime(times.Now()), uuid=uuid.uuid4().hex, suffix=suffix, ) gcs_source_staging_dir = resources.REGISTRY.Parse( gcs_source_staging_dir, collection='storage.objects') # We create the bucket (if it does not exist) first. If we do an existence # check and then create the bucket ourselves, it would be possible for an # attacker to get lucky and beat us to creating the bucket. Block on this # creation to avoid this race condition. gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket) # If no bucket is specified (for the source `default_gcs_source`), check # that the default bucket is also owned by the project (b/33046325). if default_gcs_source and not staging_bucket_util.BucketIsInProject( gcs_client, default_bucket_name): raise c_exceptions.RequiredArgumentException( 'gcs-source-staging-dir', 'A bucket with name {} already exists and is owned by ' 'another project. Specify a bucket using ' '--gcs-source-staging-dir.'.format(default_bucket_name)) if gcs_source_staging_dir.object: staged_object = gcs_source_staging_dir.object + '/' + staged_object gcs_source_staging = resources.REGISTRY.Create( collection='storage.objects', bucket=gcs_source_staging_dir.bucket, object=staged_object) if source.startswith('gs://'): gcs_source = resources.REGISTRY.Parse(source, collection='storage.objects') staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) else: if not os.path.exists(source): raise c_exceptions.BadFileException( 'could not find source [{src}]'.format(src=source)) if os.path.isdir(source): source_snapshot = snapshot.Snapshot(source, ignore_file=ignore_file) size_str = resource_transform.TransformSize( source_snapshot.uncompressed_size) log.status.Print( 'Creating temporary tarball archive of {num_files} file(s)' ' totalling {size} before compression.'.format( num_files=len(source_snapshot.files), size=size_str)) staged_source_obj = source_snapshot.CopyTarballToGCS( gcs_client, gcs_source_staging, ignore_file=ignore_file) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) elif os.path.isfile(source): unused_root, ext = os.path.splitext(source) if ext not in _ALLOWED_SOURCE_EXT: raise c_exceptions.BadFileException( 'Local file [{src}] is none of ' + ', '.join(_ALLOWED_SOURCE_EXT)) log.status.Print('Uploading local file [{src}] to ' '[gs://{bucket}/{object}].'.format( src=source, bucket=gcs_source_staging.bucket, object=gcs_source_staging.object, )) staged_source_obj = gcs_client.CopyFileToGCS( source, gcs_source_staging) build_config.source = messages.Source( storageSource=messages.StorageSource( bucket=staged_source_obj.bucket, object=staged_source_obj.name, generation=staged_source_obj.generation, )) else: # No source if not no_source: raise c_exceptions.InvalidArgumentException( '--no-source', 'To omit source, use the --no-source flag.') # Set a Google Cloud Storage directory to hold build logs. if arg_gcs_log_dir: gcs_log_dir = resources.REGISTRY.Parse(arg_gcs_log_dir, collection='storage.objects') build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' + gcs_log_dir.object) # Set the machine type used to run the build. if arg_machine_type is not None: machine_type = flags.GetMachineType(arg_machine_type) if not build_config.options: build_config.options = messages.BuildOptions() build_config.options.machineType = machine_type # Set the disk size used to run the build. if arg_disk_size is not None: disk_size = compute_utils.BytesToGb(arg_disk_size) if not build_config.options: build_config.options = messages.BuildOptions() build_config.options.diskSizeGb = int(disk_size) return build_config