Esempio n. 1
0
  def testCopyTarballToGcs_ignorefile(self):
    object_ = resources.REGISTRY.Create(collection='storage.objects',
                                        bucket='bucket', object='object')
    fake_storage_client = FakeStorageClient()

    proj = self.CreateTempDir('project')  # Directory to snapshot.
    self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
    self._writeFile(os.path.join(proj, 'file_to_ignore'), 'empty')
    self._writeFile(os.path.join(proj, '.custom_ignore_file'),
                    '.custom_ignore_file\nfile_to_ignore')

    source_snapshot = snapshot.Snapshot(proj, ignore_file='.custom_ignore_file')
    tf_data = source_snapshot.CopyTarballToGCS(fake_storage_client,
                                               object_,
                                               ignore_file=
                                               '.custom_ignore_file')
    tf = tarfile.open(fileobj=BytesIO(tf_data), mode='r:*')
    self.assertEqual(len(tf.getmembers()), 1)
    self.assertEqual(tf.getmember('Dockerfile').size, 5)
    tf.close()

    # This message doesn't display with an explicit gcloudignore file
    self.AssertErrNotContains('Some files were not included')
    self.AssertErrNotContains('Check the gcloud log')
    self.AssertLogContains('Using ignore file')
Esempio n. 2
0
 def testMakeTarball_BrokenSymlink(self):
   """Test basic tarball excluding a broken symlink."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   os.symlink(os.path.join(proj, 'does-not-exist'), os.path.join(proj, 'link'))
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 1)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       os.remove('link')  # Delete the broken symlink, it breaks test cleanup.
       tf.close()
Esempio n. 3
0
 def testMakeTarball_gcloudignore(self):
   """Test that gcloudignore is respected."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   self._writeFile(os.path.join(proj, 'file_to_ignore'), 'empty')
   self._writeFile(os.path.join(proj, '.gcloudignore'), '.*\nfile_to_ignore')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 1)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       tf.close()
Esempio n. 4
0
 def testMakeTarball_NestedDir(self):
   """Test tarball with file in nested dir."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'path', 'to', 'Dockerfile'), 'empty')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 3)
       self.assertEqual(tf.getmember('path/to/Dockerfile').size, 5)
       self.assertTrue(tf.getmember('path').isdir())
       self.assertTrue(tf.getmember('path/to').isdir())
       tf.close()
Esempio n. 5
0
 def testMakeTarball_Symlink(self):
   """Test basic tarball with file resolved from symlink."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   os.symlink(os.path.join(proj, 'Dockerfile'), os.path.join(proj, 'link'))
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 2)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       self.assertEqual(tf.getmember('link').size, 0)
       tf.close()
Esempio n. 6
0
 def testMakeTarball(self):
   """Test basic tarball with single file."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 1)
       tinfo = tf.getmember('Dockerfile')
       self.assertEqual(tinfo.size, 5)
       self.assertEqual(tinfo.uid, 0)
       self.assertEqual(tinfo.gid, 0)
       tf.close()
Esempio n. 7
0
 def testMakeTarball_EmptyDir(self):
   """Test tarball with file and empty dir."""
   proj = self.CreateTempDir('project')  # Directory to snapshot.
   os.mkdir(os.path.join(proj, 'emptydir'))
   os.chmod(os.path.join(proj, 'emptydir'), 0o777)
   self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')
   with files.ChDir(proj):
     with files.TemporaryDirectory() as tmp:
       archive_path = os.path.join(tmp, 'file.tgz')
       tf = snapshot.Snapshot(proj)._MakeTarball(archive_path)
       self.assertEqual(len(tf.getmembers()), 2)
       self.assertEqual(tf.getmember('Dockerfile').size, 5)
       self.assertTrue(tf.getmember('emptydir').isdir())
       mask = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
       self.assertEqual(tf.getmember('emptydir').mode & mask, 0o777)
       tf.close()
Esempio n. 8
0
  def testCopyTarballToGcs_NoIgnoredFiles(self):
    object_ = resources.REGISTRY.Create(collection='storage.objects',
                                        bucket='bucket', object='object')
    fake_storage_client = FakeStorageClient()

    proj = self.CreateTempDir('project')  # Directory to snapshot.
    self._writeFile(os.path.join(proj, 'Dockerfile'), 'empty')

    tf_data = snapshot.Snapshot(proj).CopyTarballToGCS(fake_storage_client,
                                                       object_)
    tf = tarfile.open(fileobj=BytesIO(tf_data), mode='r:*')
    self.assertEqual(len(tf.getmembers()), 1)
    self.assertEqual(tf.getmember('Dockerfile').size, 5)
    tf.close()

    self.assertFalse(os.path.exists(os.path.join(proj, '.gcloudignore')))
    self.AssertErrNotContains('Some files were not included')
    self.AssertErrNotContains('Check the gcloud log')
Esempio n. 9
0
def _UploadSnapshot(source):
    """Uploads snapshot of the source directory.

  Args:
    source: str, Path to local directory to be uploaded

  Returns:
    Source message with uploaded source archive.

  Raises:
    BadFileException: if source directory does not exist.
    FunctionBuilderError: if source is invalid.
  """

    if not os.path.exists(source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=source))

    if os.path.isfile(source):
        raise FunctionBuilderError('cannot use local file for source')

    source_snapshot = snapshot.Snapshot(source)
    size_str = resource_transform.TransformSize(
        source_snapshot.uncompressed_size)
    log.status.Print(
        'Creating temporary tarball archive of {num_files} file(s)'
        ' totalling {size} before compression.'.format(num_files=len(
            source_snapshot.files),
                                                       size=size_str))

    messages = cloudbuild_util.GetMessagesModule()
    gcs_client = storage_api.StorageClient()

    staging_object = _PrepareStagingObject()
    staged_source_obj = source_snapshot.CopyTarballToGCS(
        gcs_client, staging_object)
    return messages.Source(storageSource=messages.StorageSource(
        bucket=staged_source_obj.bucket, object=staged_source_obj.name))
Esempio n. 10
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

        if args.gcs_source_staging_dir is None:
            args.gcs_source_staging_dir = 'gs://{project}_cloudbuild/source'.format(
                project=properties.VALUES.core.project.Get(), )
        if args.gcs_log_dir is None:
            args.gcs_log_dir = 'gs://{project}_cloudbuild/logs'.format(
                project=properties.VALUES.core.project.Get(), )

        client = core_apis.GetClientInstance('cloudbuild', 'v1')
        messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
        registry = self.context['registry']

        gcs_client = storage_api.StorageClient()

        # First, create the build request.
        build_timeout = properties.VALUES.container.build_timeout.Get()
        if build_timeout is not None:
            timeout_str = build_timeout + 's'
        else:
            timeout_str = None

        if args.tag:
            if 'gcr.io/' not in args.tag:
                raise c_exceptions.InvalidArgumentException(
                    '--tag',
                    'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.'
                )
            build_config = messages.Build(
                images=[args.tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=['build', '--no-cache', '-t', args.tag, '.'],
                    ),
                ],
                timeout=timeout_str,
            )
        elif args.config:
            build_config = config.LoadCloudbuildConfig(args.config, messages)

        if build_config.timeout is None:
            build_config.timeout = timeout_str

        suffix = '.tgz'
        if args.source.startswith('gs://') or os.path.isfile(args.source):
            _, suffix = os.path.splitext(args.source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}_{tag_ish}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            tag_ish='_'.join(build_config.images or 'null').replace('/', '_'),
            suffix=suffix,
        )
        gcs_source_staging_dir = registry.Parse(args.gcs_source_staging_dir,
                                                collection='storage.objects')
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)
        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object

        gcs_source_staging = registry.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if args.source.startswith('gs://'):
            gcs_source = registry.Parse(args.source,
                                        collection='storage.objects')
            staged_source_obj = gcs_client.Copy(gcs_source, gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(args.source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=args.source))
            if os.path.isdir(args.source):
                source_snapshot = snapshot.Snapshot(args.source)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.write(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.\n'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(args.source):
                unused_root, ext = os.path.splitext(args.source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.write('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}]\n'.format(
                                     src=args.source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    storage_util.BucketReference.FromBucketUrl(
                        gcs_source_staging.bucket), args.source,
                    gcs_source_staging.object)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))

        gcs_log_dir = registry.Parse(args.gcs_log_dir,
                                     collection='storage.objects')

        if gcs_log_dir.bucket != gcs_source_staging.bucket:
            # Create the logs bucket if it does not yet exist.
            gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
        build_config.logsBucket = 'gs://' + gcs_log_dir.bucket + '/' + gcs_log_dir.object

        log.debug('submitting build: ' + repr(build_config))

        # Start the build.
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(
                build=build_config,
                projectId=properties.VALUES.core.project.Get()))
        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = registry.Create(collection='cloudbuild.projects.builds',
                                    projectId=build.projectId,
                                    id=build.id)

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.write(
                'Logs are permanently available at [{log_url}]\n'.format(
                    log_url=build.logUrl))
        else:
            log.status.write('Logs are available in the Cloud Console.\n')

        # If the command is run --async, we just print out a reference to the build.
        if args. async:
            return build

        # Otherwise, logs are streamed from GCS.
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            raise FailedBuildException(build.status)

        return build
Esempio n. 11
0
    def _StageSourceAndConfigFiles(self, args, messages, build):
        """Stages the source and config files in a staging Google Cloud Storage bucket.

    Args:
      args: argsparse object from the DeployGKE command.
      messages: Cloud Build messages module.
      build: Cloud Build config.
    """

        project = properties.VALUES.core.project.Get(required=True)
        safe_project = project.replace(':', '_')
        safe_project = safe_project.replace('.', '_')
        # The string 'google' is not allowed in bucket names.
        safe_project = safe_project.replace('google', 'elgoog')

        gcs_client = storage_api.StorageClient()

        default_bucket_name = '{}_cloudbuild'.format(safe_project)

        gcs_staging_dir_name = (args.gcs_staging_dir
                                if args.gcs_staging_dir else
                                'gs://{}/deploy'.format(default_bucket_name))

        try:
            gcs_staging_dir = resources.REGISTRY.Parse(
                gcs_staging_dir_name, collection='storage.objects')
            gcs_staging_dir_obj = gcs_staging_dir.object
        except resources.WrongResourceCollectionException:
            gcs_staging_dir = resources.REGISTRY.Parse(
                gcs_staging_dir_name, collection='storage.buckets')
            gcs_staging_dir_obj = None

        gcs_client.CreateBucketIfNotExists(gcs_staging_dir.bucket)

        if args.gcs_staging_dir is None:
            # Check that the default bucket is also owned by the project (b/33046325)
            bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
                project=project, prefix=default_bucket_name)
            bucket_list = gcs_client.client.buckets.List(bucket_list_req)

            if not any(bucket.id == default_bucket_name
                       for bucket in bucket_list.items):
                raise c_exceptions.RequiredArgumentException(
                    '--gcs-staging-dir',
                    'A bucket with name {} already exists and is owned by '
                    'another project. Specify a bucket using '
                    '--gcs-staging-dir.'.format(default_bucket_name))

        if args.source:
            suffix = '.tgz'
            if args.source.startswith('gs://') or os.path.isfile(args.source):
                _, suffix = os.path.splitext(args.source)

            staged_source = 'source/{stamp}-{uuid}{suffix}'.format(
                stamp=times.GetTimeStampFromDateTime(times.Now()),
                uuid=uuid.uuid4().hex,
                suffix=suffix,
            )

            if gcs_staging_dir_obj:
                staged_source = gcs_staging_dir_obj + '/' + staged_source
            gcs_source_staging = resources.REGISTRY.Create(
                collection='storage.objects',
                bucket=gcs_staging_dir.bucket,
                object=staged_source)

            staged_source_obj = None

            if args.source.startswith('gs://'):
                gcs_source = resources.REGISTRY.Parse(
                    args.source, collection='storage.objects')
                staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                       gcs_source_staging)
            else:
                if not os.path.exists(args.source):
                    raise c_exceptions.BadFileException(
                        'could not find source [{src}]'.format(
                            src=args.source))
                elif os.path.isdir(args.source):
                    source_snapshot = snapshot.Snapshot(args.source)
                    size_str = resource_transform.TransformSize(
                        source_snapshot.uncompressed_size)
                    log.status.Print(
                        'Creating temporary tarball archive of {num_files} file(s)'
                        ' totalling {size} before compression.'.format(
                            num_files=len(source_snapshot.files),
                            size=size_str))
                    staged_source_obj = source_snapshot.CopyTarballToGCS(
                        gcs_client, gcs_source_staging)
                elif os.path.isfile(args.source):
                    unused_root, ext = os.path.splitext(args.source)
                    if ext not in _ALLOWED_SOURCE_EXT:
                        raise c_exceptions.BadFileException(
                            'Local file [{src}] is none of '.format(
                                src=args.source) +
                            ', '.join(_ALLOWED_SOURCE_EXT))
                    log.status.Print('Uploading local file [{src}] to '
                                     '[gs://{bucket}/{object}].'.format(
                                         src=args.source,
                                         bucket=gcs_source_staging.bucket,
                                         object=gcs_source_staging.object,
                                     ))
                    staged_source_obj = gcs_client.CopyFileToGCS(
                        args.source, gcs_source_staging)

            build.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        if gcs_staging_dir_obj:
            config_path = gcs_staging_dir.bucket + '/' + gcs_staging_dir_obj
        else:
            config_path = gcs_staging_dir.bucket

        build.artifacts = messages.Artifacts(objects=messages.ArtifactObjects(
            location='gs://{}/config/$BUILD_ID/expanded'.format(config_path),
            paths=['output/expanded/*'],
        ))

        build.steps.append(
            messages.BuildStep(
                name='gcr.io/cloud-builders/gsutil',
                args=[
                    'cp', '-r', 'output/suggested',
                    'gs://{}/config/$BUILD_ID/suggested'.format(config_path)
                ],
            ))
        return
def _SetSource(release_config,
               source,
               gcs_source_staging_dir,
               ignore_file,
               skaffold_version,
               location,
               hide_logs=False):
  """Set the source for the release config.

  Sets the source for the release config and creates a default Cloud Storage
  bucket with location for staging if gcs-source-staging-dir is not specified.

  Args:
    release_config: a Release message
    source: the location of the source files
    gcs_source_staging_dir: directory in google cloud storage to use for staging
    ignore_file: the ignore file to use
    skaffold_version: version of Skaffold binary
    location: the cloud region for the release
    hide_logs: whether to show logs, defaults to False

  Returns:
    Modified release_config
  """
  safe_project_id = staging_bucket_util.GetSafeProject()
  default_gcs_source = False
  default_bucket_name = staging_bucket_util.GetDefaultStagingBucket(
      safe_project_id, location)

  if gcs_source_staging_dir is None:
    default_gcs_source = True
    gcs_source_staging_dir = _SOURCE_STAGING_TEMPLATE.format(
        default_bucket_name)

  if not gcs_source_staging_dir.startswith('gs://'):
    raise c_exceptions.InvalidArgumentException('--gcs-source-staging-dir',
                                                'must be a GCS bucket')

  gcs_client = storage_api.StorageClient()
  suffix = '.tgz'
  if source.startswith('gs://') or os.path.isfile(source):
    _, suffix = os.path.splitext(source)

  # Next, stage the source to Cloud Storage.
  staged_object = '{stamp}-{uuid}{suffix}'.format(
      stamp=times.GetTimeStampFromDateTime(times.Now()),
      uuid=uuid.uuid4().hex,
      suffix=suffix,
  )
  gcs_source_staging_dir = resources.REGISTRY.Parse(
      gcs_source_staging_dir, collection='storage.objects')

  try:
    gcs_client.CreateBucketIfNotExists(
        gcs_source_staging_dir.bucket,
        location=location,
        check_ownership=default_gcs_source)
  except storage_api.BucketInWrongProjectError:
    # If we're using the default bucket but it already exists in a different
    # project, then it could belong to a malicious attacker (b/33046325).
    raise c_exceptions.RequiredArgumentException(
        'gcs-source-staging-dir',
        'A bucket with name {} already exists and is owned by '
        'another project. Specify a bucket using '
        '--gcs-source-staging-dir.'.format(default_bucket_name))

  if gcs_source_staging_dir.object:
    staged_object = gcs_source_staging_dir.object + '/' + staged_object
  gcs_source_staging = resources.REGISTRY.Create(
      collection='storage.objects',
      bucket=gcs_source_staging_dir.bucket,
      object=staged_object)
  if source.startswith('gs://'):
    gcs_source = resources.REGISTRY.Parse(source, collection='storage.objects')
    staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
    release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
        bucket=staged_source_obj.bucket, object=staged_source_obj.name)
  else:
    if os.path.isdir(source):
      source_snapshot = snapshot.Snapshot(source, ignore_file=ignore_file)
      size_str = resource_transform.TransformSize(
          source_snapshot.uncompressed_size)
      if not hide_logs:
        log.status.Print(
            'Creating temporary tarball archive of {num_files} file(s)'
            ' totalling {size} before compression.'.format(
                num_files=len(source_snapshot.files), size=size_str))
      staged_source_obj = source_snapshot.CopyTarballToGCS(
          gcs_client,
          gcs_source_staging,
          ignore_file=ignore_file,
          hide_logs=hide_logs)
      release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
          bucket=staged_source_obj.bucket, object=staged_source_obj.name)
    elif os.path.isfile(source):
      if not hide_logs:
        log.status.Print('Uploading local file [{src}] to '
                         '[gs://{bucket}/{object}].'.format(
                             src=source,
                             bucket=gcs_source_staging.bucket,
                             object=gcs_source_staging.object,
                         ))
      staged_source_obj = gcs_client.CopyFileToGCS(source, gcs_source_staging)
      release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
          bucket=staged_source_obj.bucket, object=staged_source_obj.name)

  if skaffold_version:
    release_config.skaffoldVersion = skaffold_version

  return release_config
Esempio n. 13
0
def _SetSource(build_config,
               messages,
               is_specified_source,
               no_source,
               source,
               gcs_source_staging_dir,
               ignore_file,
               hide_logs=False):
  """Set the source for the build config."""
  default_gcs_source = False
  default_bucket_name = None
  if gcs_source_staging_dir is None:
    default_gcs_source = True
    default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
    gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
  gcs_client = storage_api.StorageClient()

  # --no-source overrides the default --source.
  if not is_specified_source and no_source:
    source = None

  gcs_source_staging = None
  if source:
    suffix = '.tgz'
    if source.startswith('gs://') or os.path.isfile(source):
      _, suffix = os.path.splitext(source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}-{uuid}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        uuid=uuid.uuid4().hex,
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        gcs_source_staging_dir, collection='storage.objects')

    try:
      gcs_client.CreateBucketIfNotExists(
          gcs_source_staging_dir.bucket, check_ownership=default_gcs_source)
    except api_exceptions.HttpForbiddenError:
      raise BucketForbiddenError(
          'The user is forbidden from accessing the bucket [{}]. Please check '
          'your organization\'s policy or if the user has the "serviceusage.services.use" permission'
          .format(gcs_source_staging_dir.bucket))
    except storage_api.BucketInWrongProjectError:
      # If we're using the default bucket but it already exists in a different
      # project, then it could belong to a malicious attacker (b/33046325).
      raise c_exceptions.RequiredArgumentException(
          'gcs-source-staging-dir',
          'A bucket with name {} already exists and is owned by '
          'another project. Specify a bucket using '
          '--gcs-source-staging-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
      staged_object = gcs_source_staging_dir.object + '/' + staged_object
    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)

    if source.startswith('gs://'):
      gcs_source = resources.REGISTRY.Parse(
          source, collection='storage.objects')
      staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
      build_config.source = messages.Source(
          storageSource=messages.StorageSource(
              bucket=staged_source_obj.bucket,
              object=staged_source_obj.name,
              generation=staged_source_obj.generation,
          ))
    else:
      if not os.path.exists(source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=source))
      if os.path.isdir(source):
        source_snapshot = snapshot.Snapshot(source, ignore_file=ignore_file)
        size_str = resource_transform.TransformSize(
            source_snapshot.uncompressed_size)
        if not hide_logs:
          log.status.Print(
              'Creating temporary tarball archive of {num_files} file(s)'
              ' totalling {size} before compression.'.format(
                  num_files=len(source_snapshot.files), size=size_str))
        staged_source_obj = source_snapshot.CopyTarballToGCS(
            gcs_client,
            gcs_source_staging,
            ignore_file=ignore_file,
            hide_logs=hide_logs)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      elif os.path.isfile(source):
        unused_root, ext = os.path.splitext(source)
        if ext not in _ALLOWED_SOURCE_EXT:
          raise c_exceptions.BadFileException('Local file [{src}] is none of ' +
                                              ', '.join(_ALLOWED_SOURCE_EXT))
        if not hide_logs:
          log.status.Print('Uploading local file [{src}] to '
                           '[gs://{bucket}/{object}].'.format(
                               src=source,
                               bucket=gcs_source_staging.bucket,
                               object=gcs_source_staging.object,
                           ))
        staged_source_obj = gcs_client.CopyFileToGCS(source, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
  else:
    # No source
    if not no_source:
      raise c_exceptions.InvalidArgumentException(
          '--no-source', 'To omit source, use the --no-source flag.')

  return build_config
Esempio n. 14
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

    project = properties.VALUES.core.project.Get(required=True)
    safe_project = project.replace(':', '_')
    safe_project = safe_project.replace('.', '_')
    # The string 'google' is not allowed in bucket names.
    safe_project = safe_project.replace('google', 'elgoog')

    default_bucket_name = '{}_cloudbuild'.format(safe_project)

    default_gcs_source = False
    if args.gcs_source_staging_dir is None:
      default_gcs_source = True
      args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)

    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    gcs_client = storage_api.StorageClient()

    # First, create the build request.
    build_timeout = properties.VALUES.builds.timeout.Get()

    if build_timeout is not None:
      try:
        # A bare number is interpreted as seconds.
        build_timeout_secs = int(build_timeout)
      except ValueError:
        build_timeout_duration = times.ParseDuration(build_timeout)
        build_timeout_secs = int(build_timeout_duration.total_seconds)
      timeout_str = six.text_type(build_timeout_secs) + 's'
    else:
      timeout_str = None

    if args.tag is not None:
      if (properties.VALUES.builds.check_tag.GetBool() and
          'gcr.io/' not in args.tag):
        raise c_exceptions.InvalidArgumentException(
            '--tag',
            'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
      if properties.VALUES.builds.use_kaniko.GetBool():
        if args.no_cache:
          ttl = '0h'
        else:
          ttl = '{}h'.format(properties.VALUES.builds.kaniko_cache_ttl.Get())
        build_config = messages.Build(
            steps=[
                messages.BuildStep(
                    name=properties.VALUES.builds.kaniko_image.Get(),
                    args=[
                        '--destination', args.tag, '--cache', 'true',
                        '--cache-ttl', ttl
                    ],
                ),
            ],
            timeout=timeout_str,
            substitutions=cloudbuild_util.EncodeSubstitutions(
                args.substitutions, messages))
      else:
        if args.no_cache:
          raise c_exceptions.InvalidArgumentException(
              'no-cache',
              'Cannot specify --no-cache if builds/use_kaniko property is '
              'False')
        build_config = messages.Build(
            images=[args.tag],
            steps=[
                messages.BuildStep(
                    name='gcr.io/cloud-builders/docker',
                    args=[
                        'build', '--network', 'cloudbuild', '--no-cache', '-t',
                        args.tag, '.'
                    ],
                ),
            ],
            timeout=timeout_str,
            substitutions=cloudbuild_util.EncodeSubstitutions(
                args.substitutions, messages))
    elif args.config is not None:
      if args.no_cache:
        raise c_exceptions.ConflictingArgumentsException(
            '--config', '--no-cache')
      if not args.config:
        raise c_exceptions.InvalidArgumentException(
            '--config', 'Config file path must not be empty.')
      build_config = config.LoadCloudbuildConfigFromPath(
          args.config, messages, params=args.substitutions)
    else:
      raise c_exceptions.OneOfArgumentsRequiredException(
          ['--tag', '--config'],
          'Requires either a docker tag or a config file.')

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
      build_config.timeout = timeout_str

    # --no-source overrides the default --source.
    if not args.IsSpecified('source') and args.no_source:
      args.source = None

    gcs_source_staging = None
    if args.source:
      suffix = '.tgz'
      if args.source.startswith('gs://') or os.path.isfile(args.source):
        _, suffix = os.path.splitext(args.source)

      # Next, stage the source to Cloud Storage.
      staged_object = '{stamp}-{uuid}{suffix}'.format(
          stamp=times.GetTimeStampFromDateTime(times.Now()),
          uuid=uuid.uuid4().hex,
          suffix=suffix,
      )
      gcs_source_staging_dir = resources.REGISTRY.Parse(
          args.gcs_source_staging_dir, collection='storage.objects')

      # We create the bucket (if it does not exist) first. If we do an existence
      # check and then create the bucket ourselves, it would be possible for an
      # attacker to get lucky and beat us to creating the bucket. Block on this
      # creation to avoid this race condition.
      gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

      # If no bucket is specified (for the source `default_gcs_source`), check
      # that the default bucket is also owned by the project (b/33046325).
      if default_gcs_source:
        # This request returns only the buckets owned by the project.
        bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
            project=project, prefix=default_bucket_name)
        bucket_list = gcs_client.client.buckets.List(bucket_list_req)
        found_bucket = False
        for bucket in bucket_list.items:
          if bucket.id == default_bucket_name:
            found_bucket = True
            break
        if not found_bucket:
          if default_gcs_source:
            raise c_exceptions.RequiredArgumentException(
                'gcs_source_staging_dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs_source_staging_dir.'.format(default_bucket_name))

      if gcs_source_staging_dir.object:
        staged_object = gcs_source_staging_dir.object + '/' + staged_object
      gcs_source_staging = resources.REGISTRY.Create(
          collection='storage.objects',
          bucket=gcs_source_staging_dir.bucket,
          object=staged_object)

      if args.source.startswith('gs://'):
        gcs_source = resources.REGISTRY.Parse(
            args.source, collection='storage.objects')
        staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      else:
        if not os.path.exists(args.source):
          raise c_exceptions.BadFileException(
              'could not find source [{src}]'.format(src=args.source))
        if os.path.isdir(args.source):
          source_snapshot = snapshot.Snapshot(args.source,
                                              ignore_file=args.ignore_file)
          size_str = resource_transform.TransformSize(
              source_snapshot.uncompressed_size)
          log.status.Print(
              'Creating temporary tarball archive of {num_files} file(s)'
              ' totalling {size} before compression.'.format(
                  num_files=len(source_snapshot.files), size=size_str))
          staged_source_obj = source_snapshot.CopyTarballToGCS(
              gcs_client, gcs_source_staging, ignore_file=args.ignore_file)
          build_config.source = messages.Source(
              storageSource=messages.StorageSource(
                  bucket=staged_source_obj.bucket,
                  object=staged_source_obj.name,
                  generation=staged_source_obj.generation,
              ))
        elif os.path.isfile(args.source):
          unused_root, ext = os.path.splitext(args.source)
          if ext not in _ALLOWED_SOURCE_EXT:
            raise c_exceptions.BadFileException(
                'Local file [{src}] is none of ' +
                ', '.join(_ALLOWED_SOURCE_EXT))
          log.status.Print('Uploading local file [{src}] to '
                           '[gs://{bucket}/{object}].'.format(
                               src=args.source,
                               bucket=gcs_source_staging.bucket,
                               object=gcs_source_staging.object,
                           ))
          staged_source_obj = gcs_client.CopyFileToGCS(args.source,
                                                       gcs_source_staging)
          build_config.source = messages.Source(
              storageSource=messages.StorageSource(
                  bucket=staged_source_obj.bucket,
                  object=staged_source_obj.name,
                  generation=staged_source_obj.generation,
              ))
    else:
      # No source
      if not args.no_source:
        raise c_exceptions.InvalidArgumentException(
            '--no-source', 'To omit source, use the --no-source flag.')

    if args.gcs_log_dir:
      gcs_log_dir = resources.REGISTRY.Parse(
          args.gcs_log_dir, collection='storage.objects')

      build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                 gcs_log_dir.object)

    # Machine type.
    if args.machine_type is not None:
      machine_type = Submit._machine_type_flag_map.GetEnumForChoice(
          args.machine_type)
      if not build_config.options:
        build_config.options = messages.BuildOptions()
      build_config.options.machineType = machine_type

    # Disk size.
    if args.disk_size is not None:
      disk_size = compute_utils.BytesToGb(args.disk_size)
      if not build_config.options:
        build_config.options = messages.BuildOptions()
      build_config.options.diskSizeGb = int(disk_size)

    log.debug('submitting build: ' + repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config, projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print(
          'Logs are available at [{log_url}].'.format(log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if args.async:
      return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
      build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
      log.status.Print(
          'Your build timed out. Use the [--timeout=DURATION] flag to change '
          'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
      raise FailedBuildException(build)

    return build
Esempio n. 15
0
def _SetSource(build_config, messages, is_specified_source, no_source, source,
               gcs_source_staging_dir, ignore_file):
    """Set the source for the build config."""
    default_gcs_source = False
    default_bucket_name = None
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
    gcs_client = storage_api.StorageClient()

    # --no-source overrides the default --source.
    if not is_specified_source and no_source:
        source = None

    gcs_source_staging = None
    if source:
        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )
        gcs_source_staging_dir = resources.REGISTRY.Parse(
            gcs_source_staging_dir, collection='storage.objects')

        # We create the bucket (if it does not exist) first. If we do an existence
        # check and then create the bucket ourselves, it would be possible for an
        # attacker to get lucky and beat us to creating the bucket. Block on this
        # creation to avoid this race condition.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source`), check
        # that the default bucket is also owned by the project (b/33046325).
        if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                gcs_client, default_bucket_name):
            raise c_exceptions.RequiredArgumentException(
                'gcs-source-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-source-staging-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object
        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            if os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source,
                                                    ignore_file=ignore_file)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging, ignore_file=ignore_file)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
    else:
        # No source
        if not no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

    return build_config
Esempio n. 16
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

    project = properties.VALUES.core.project.Get()
    safe_project = project.replace(':', '_')
    safe_project = safe_project.replace('.', '_')
    # The string 'google' is not allowed in bucket names.
    safe_project = safe_project.replace('google', 'elgoog')

    default_bucket_name = '{}_cloudbuild'.format(safe_project)

    default_gcs_source = False
    if args.gcs_source_staging_dir is None:
      default_gcs_source = True
      args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)

    default_gcs_log_dir = False
    if args.gcs_log_dir is None:
      default_gcs_log_dir = True
      args.gcs_log_dir = 'gs://{}/logs'.format(default_bucket_name)

    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    gcs_client = storage_api.StorageClient()

    # First, create the build request.
    build_timeout = properties.VALUES.container.build_timeout.Get()

    if build_timeout is not None:
      try:
        # A bare number is interpreted as seconds.
        build_timeout_secs = int(build_timeout)
      except ValueError:
        build_timeout_duration = times.ParseDuration(build_timeout)
        build_timeout_secs = int(build_timeout_duration.total_seconds)
      timeout_str = str(build_timeout_secs) + 's'
    else:
      timeout_str = None

    if args.tag:
      if 'gcr.io/' not in args.tag:
        raise c_exceptions.InvalidArgumentException(
            '--tag',
            'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
      build_config = messages.Build(
          images=[args.tag],
          steps=[
              messages.BuildStep(
                  name='gcr.io/cloud-builders/docker',
                  args=['build', '--no-cache', '-t', args.tag, '.'],
              ),
          ],
          timeout=timeout_str,
          substitutions=cloudbuild_util.EncodeSubstitutions(args.substitutions,
                                                            messages)
      )
    elif args.config:
      build_config = config.LoadCloudbuildConfigFromPath(
          args.config, messages, params=args.substitutions)

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
      build_config.timeout = timeout_str

    suffix = '.tgz'
    if args.source.startswith('gs://') or os.path.isfile(args.source):
      _, suffix = os.path.splitext(args.source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        args.gcs_source_staging_dir, collection='storage.objects')

    # We first try to create the bucket, before doing all the checks, in order
    # to avoid a race condition. If we do the check first, an attacker could
    # be lucky enough to create the bucket after the check and before this
    # bucket creation.
    gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

    # If no bucket is specified (for the source `default_gcs_source` or for the
    # logs `default_gcs_log_dir`), check that the default bucket is also owned
    # by the project (b/33046325).
    if default_gcs_source or default_gcs_log_dir:
      # This request returns only the buckets owned by the project.
      bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
          project=project,
          prefix=default_bucket_name)
      bucket_list = gcs_client.client.buckets.List(bucket_list_req)
      found_bucket = False
      for bucket in bucket_list.items:
        if bucket.id == default_bucket_name:
          found_bucket = True
          break
      if not found_bucket:
        if default_gcs_source:
          raise c_exceptions.RequiredArgumentException(
              'gcs_source_staging_dir',
              'A bucket with name {} already exists and is owned by '
              'another project. Specify a bucket using '
              '--gcs_source_staging_dir.'.format(default_bucket_name))
        elif default_gcs_log_dir:
          raise c_exceptions.RequiredArgumentException(
              'gcs-log-dir',
              'A bucket with name {} already exists and is owned by '
              'another project. Specify a bucket to hold build logs '
              'using --gcs-log-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
      staged_object = gcs_source_staging_dir.object + '/' + staged_object

    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)

    if args.source.startswith('gs://'):
      gcs_source = resources.REGISTRY.Parse(
          args.source, collection='storage.objects')
      staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
      build_config.source = messages.Source(
          storageSource=messages.StorageSource(
              bucket=staged_source_obj.bucket,
              object=staged_source_obj.name,
              generation=staged_source_obj.generation,
          ))
    else:
      if not os.path.exists(args.source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=args.source))
      if os.path.isdir(args.source):
        source_snapshot = snapshot.Snapshot(args.source)
        size_str = resource_transform.TransformSize(
            source_snapshot.uncompressed_size)
        log.status.Print(
            'Creating temporary tarball archive of {num_files} file(s)'
            ' totalling {size} before compression.'.format(
                num_files=len(source_snapshot.files),
                size=size_str))
        staged_source_obj = source_snapshot.CopyTarballToGCS(
            gcs_client, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      elif os.path.isfile(args.source):
        unused_root, ext = os.path.splitext(args.source)
        if ext not in _ALLOWED_SOURCE_EXT:
          raise c_exceptions.BadFileException(
              'Local file [{src}] is none of '+', '.join(_ALLOWED_SOURCE_EXT))
        log.status.Print(
            'Uploading local file [{src}] to '
            '[gs://{bucket}/{object}].'.format(
                src=args.source,
                bucket=gcs_source_staging.bucket,
                object=gcs_source_staging.object,
            ))
        staged_source_obj = gcs_client.CopyFileToGCS(
            storage_util.BucketReference.FromBucketUrl(
                gcs_source_staging.bucket),
            args.source, gcs_source_staging.object)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))

    gcs_log_dir = resources.REGISTRY.Parse(
        args.gcs_log_dir, collection='storage.objects')

    if gcs_log_dir.bucket != gcs_source_staging.bucket:
      # Create the logs bucket if it does not yet exist.
      gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
    build_config.logsBucket = 'gs://'+gcs_log_dir.bucket+'/'+gcs_log_dir.object

    log.debug('submitting build: '+repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config,
            projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print('Logs are available at [{log_url}].'.format(
          log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if args.async:
      return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
      build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
      log.status.Print(
          'Your build timed out. Use the [--timeout=DURATION] flag to change '
          'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
      raise FailedBuildException(build)

    return build
Esempio n. 17
0
def CreateBuildConfig(tag, no_cache, messages, substitutions, arg_config,
                      is_specified_source, no_source, source,
                      gcs_source_staging_dir, ignore_file, arg_gcs_log_dir,
                      arg_machine_type, arg_disk_size):
    """Returns a build config."""
    # Get the build timeout.
    build_timeout = properties.VALUES.builds.timeout.Get()
    if build_timeout is not None:
        try:
            # A bare number is interpreted as seconds.
            build_timeout_secs = int(build_timeout)
        except ValueError:
            build_timeout_duration = times.ParseDuration(build_timeout)
            build_timeout_secs = int(build_timeout_duration.total_seconds)
        timeout_str = six.text_type(build_timeout_secs) + 's'
    else:
        timeout_str = None

    if tag is not None:
        if (properties.VALUES.builds.check_tag.GetBool()
                and 'gcr.io/' not in tag):
            raise c_exceptions.InvalidArgumentException(
                '--tag',
                'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
        if properties.VALUES.builds.use_kaniko.GetBool():
            if no_cache:
                ttl = '0h'
            else:
                ttl = '{}h'.format(
                    properties.VALUES.builds.kaniko_cache_ttl.Get())
            build_config = messages.Build(
                steps=[
                    messages.BuildStep(
                        name=properties.VALUES.builds.kaniko_image.Get(),
                        args=[
                            '--destination',
                            tag,
                            '--cache',
                            '--cache-ttl',
                            ttl,
                            '--cache-dir',
                            '',
                        ],
                    ),
                ],
                timeout=timeout_str,
                substitutions=cloudbuild_util.EncodeSubstitutions(
                    substitutions, messages))
        else:
            if no_cache:
                raise c_exceptions.InvalidArgumentException(
                    'no-cache',
                    'Cannot specify --no-cache if builds/use_kaniko property is '
                    'False')
            build_config = messages.Build(
                images=[tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=[
                            'build', '--network', 'cloudbuild', '--no-cache',
                            '-t', tag, '.'
                        ],
                    ),
                ],
                timeout=timeout_str,
                substitutions=cloudbuild_util.EncodeSubstitutions(
                    substitutions, messages))
    elif arg_config is not None:
        if no_cache:
            raise c_exceptions.ConflictingArgumentsException(
                '--config', '--no-cache')
        if not arg_config:
            raise c_exceptions.InvalidArgumentException(
                '--config', 'Config file path must not be empty.')
        build_config = config.LoadCloudbuildConfigFromPath(
            arg_config, messages, params=substitutions)
    else:
        raise c_exceptions.OneOfArgumentsRequiredException(
            ['--tag', '--config'],
            'Requires either a docker tag or a config file.')

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
        build_config.timeout = timeout_str

    # Set the source for the build config.
    default_gcs_source = False
    default_bucket_name = None
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
    gcs_client = storage_api.StorageClient()

    # --no-source overrides the default --source.
    if not is_specified_source and no_source:
        source = None

    gcs_source_staging = None
    if source:
        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )
        gcs_source_staging_dir = resources.REGISTRY.Parse(
            gcs_source_staging_dir, collection='storage.objects')

        # We create the bucket (if it does not exist) first. If we do an existence
        # check and then create the bucket ourselves, it would be possible for an
        # attacker to get lucky and beat us to creating the bucket. Block on this
        # creation to avoid this race condition.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source`), check
        # that the default bucket is also owned by the project (b/33046325).
        if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                gcs_client, default_bucket_name):
            raise c_exceptions.RequiredArgumentException(
                'gcs-source-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-source-staging-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object
        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            if os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source,
                                                    ignore_file=ignore_file)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging, ignore_file=ignore_file)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
    else:
        # No source
        if not no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

    # Set a Google Cloud Storage directory to hold build logs.
    if arg_gcs_log_dir:
        gcs_log_dir = resources.REGISTRY.Parse(arg_gcs_log_dir,
                                               collection='storage.objects')
        build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                   gcs_log_dir.object)

    # Set the machine type used to run the build.
    if arg_machine_type is not None:
        machine_type = flags.GetMachineType(arg_machine_type)
        if not build_config.options:
            build_config.options = messages.BuildOptions()
        build_config.options.machineType = machine_type

    # Set the disk size used to run the build.
    if arg_disk_size is not None:
        disk_size = compute_utils.BytesToGb(arg_disk_size)
        if not build_config.options:
            build_config.options = messages.BuildOptions()
        build_config.options.diskSizeGb = int(disk_size)

    return build_config
Esempio n. 18
0
    def _StageSource(self, source, gcs_staging_dir_bucket,
                     gcs_staging_dir_object):
        """Stages source onto the provided bucket and returns its reference.

    Args:
      source: Path to source repo as a directory on a local disk or a
        gzipped archive file (.tar.gz) in Google Cloud Storage.
      gcs_staging_dir_bucket: Bucket name of staging directory.
      gcs_staging_dir_object: Bucket object of staging directory.

    Returns:
      Reference to the staged source, which has bucket, name, and generation
        fields.
    """

        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        source_object = 'source/{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )

        if gcs_staging_dir_object:
            source_object = gcs_staging_dir_object + '/' + source_object

        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_staging_dir_bucket,
            object=source_object)

        gcs_client = storage_api.StorageClient()
        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            elif os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging)
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of '.format(src=source) +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)

        return staged_source
Esempio n. 19
0
def _SetSource(release_config,
               source,
               gcs_source_staging_dir,
               gcs_render_dir,
               ignore_file,
               hide_logs=False):
    """Set the source for the release config."""
    safe_project_id = staging_bucket_util.GetSafeProject()
    default_gcs_source = False
    default_bucket_name = staging_bucket_util.GetDefaultStagingBucket(
        safe_project_id)
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        gcs_source_staging_dir = _SKAFFOLD_CONFIG_PATH.format(
            default_bucket_name)

    if not gcs_source_staging_dir.startswith('gs://'):
        raise c_exceptions.InvalidArgumentException('--gcs-source-staging-dir',
                                                    'must be a GCS bucket')

    if gcs_render_dir:
        if not gcs_render_dir.startswith('gs://'):
            raise c_exceptions.InvalidArgumentException(
                '--gcs-render-dir', 'must be a GCS bucket')
        # Leave this field unset as default. The server will create a new bucket.
        release_config.manifestBucket = gcs_render_dir

    gcs_client = storage_api.StorageClient()
    suffix = '.tgz'
    if source.startswith('gs://') or os.path.isfile(source):
        _, suffix = os.path.splitext(source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}-{uuid}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        uuid=uuid.uuid4().hex,
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        gcs_source_staging_dir, collection='storage.objects')

    try:
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket,
                                           check_ownership=default_gcs_source)
    except storage_api.BucketInWrongProjectError:
        # If we're using the default bucket but it already exists in a different
        # project, then it could belong to a malicious attacker (b/33046325).
        raise c_exceptions.RequiredArgumentException(
            'gcs-source-staging-dir',
            'A bucket with name {} already exists and is owned by '
            'another project. Specify a bucket using '
            '--gcs-source-staging-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
        staged_object = gcs_source_staging_dir.object + '/' + staged_object
    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)
    if source.startswith('gs://'):
        gcs_source = resources.REGISTRY.Parse(source,
                                              collection='storage.objects')
        staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
            bucket=staged_source_obj.bucket, object=staged_source_obj.name)
    else:
        if not os.path.exists(source):
            raise c_exceptions.BadFileException(
                'could not find source [{src}]'.format(src=source))
        if os.path.isdir(source):
            source_snapshot = snapshot.Snapshot(source,
                                                ignore_file=ignore_file)
            size_str = resource_transform.TransformSize(
                source_snapshot.uncompressed_size)
            if not hide_logs:
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
            staged_source_obj = source_snapshot.CopyTarballToGCS(
                gcs_client,
                gcs_source_staging,
                ignore_file=ignore_file,
                hide_logs=hide_logs)
            release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
                bucket=staged_source_obj.bucket, object=staged_source_obj.name)
        elif os.path.isfile(source):
            _, ext = os.path.splitext(source)
            if ext not in _ALLOWED_SOURCE_EXT:
                raise c_exceptions.BadFileException(
                    'local file [{src}] is none of ' +
                    ', '.join(_ALLOWED_SOURCE_EXT))
            if not hide_logs:
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
            staged_source_obj = gcs_client.CopyFileToGCS(
                source, gcs_source_staging)
            release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
                bucket=staged_source_obj.bucket, object=staged_source_obj.name)
    return release_config