コード例 #1
0
  def SetUp(self):
    self.StartPatch('time.sleep')  # To speed up tests with polling

    self.mocked_cloudbuild_v1 = mock.Client(
        core_apis.GetClientClass('cloudbuild', 'v1'))
    self.mocked_cloudbuild_v1.Mock()
    self.addCleanup(self.mocked_cloudbuild_v1.Unmock)
    self.cloudbuild_v1_messages = core_apis.GetMessagesModule(
        'cloudbuild', 'v1')

    self.mocked_storage_v1 = mock.Client(
        core_apis.GetClientClass('storage', 'v1'))
    self.mocked_storage_v1.Mock()
    self.addCleanup(self.mocked_storage_v1.Unmock)
    self.storage_v1_messages = core_apis.GetMessagesModule('storage', 'v1')

    properties.VALUES.core.project.Set('my-project')

    messages = self.cloudbuild_v1_messages
    self._statuses = messages.Build.StatusValueValuesEnum
    self._vmtypes = messages.BuildOptions.MachineTypeValueValuesEnum

    frozen_time = times.ParseDateTime('2016-05-26T00:05:00.000000Z')
    frozen_uuid = uuid.uuid4()
    self.frozen_zip_filename = 'source/{frozen_time}-{frozen_uuid}.zip'.format(
        frozen_time=times.GetTimeStampFromDateTime(frozen_time),
        frozen_uuid=frozen_uuid.hex)
    self.frozen_tgz_filename = 'source/{frozen_time}-{frozen_uuid}.tgz'.format(
        frozen_time=times.GetTimeStampFromDateTime(frozen_time),
        frozen_uuid=frozen_uuid.hex)
    self.StartObjectPatch(times, 'Now', return_value=frozen_time)
    self.StartObjectPatch(uuid, 'uuid4', return_value=frozen_uuid)
    self.StartObjectPatch(os.path, 'getsize', return_value=100)
コード例 #2
0
    def SetUp(self):
        properties.VALUES.core.project.Set('my-project')
        self.StartPatch('time.sleep')  # To speed up tests with polling

        self.mocked_cloudbuild_v1 = mock.Client(
            core_apis.GetClientClass('cloudbuild', 'v1'))
        self.mocked_cloudbuild_v1.Mock()
        self.addCleanup(self.mocked_cloudbuild_v1.Unmock)
        self.build_msg = core_apis.GetMessagesModule('cloudbuild', 'v1')
        self._step_statuses = self.build_msg.BuildStep.StatusValueValuesEnum
        self._statuses = self.build_msg.Build.StatusValueValuesEnum
        self._sub_options = self.build_msg.BuildOptions.SubstitutionOptionValueValuesEnum

        self.mocked_storage_v1 = mock.Client(
            core_apis.GetClientClass('storage', 'v1'))
        self.mocked_storage_v1.Mock()
        self.addCleanup(self.mocked_storage_v1.Unmock)
        self.storage_msg = core_apis.GetMessagesModule('storage', 'v1')

        self.frozen_time = '2019-07-29T00:05:00.000000Z'
        frozen_time = times.ParseDateTime(self.frozen_time)
        frozen_uuid = uuid.uuid4()
        self.frozen_tgz_filename = '{frozen_time}-{frozen_uuid}.tgz'.format(
            frozen_time=times.GetTimeStampFromDateTime(frozen_time),
            frozen_uuid=frozen_uuid.hex)
        self.frozen_tgz_filepath = 'deploy/source/{}'.format(
            self.frozen_tgz_filename)
        self.StartObjectPatch(times, 'Now', return_value=frozen_time)
        self.StartObjectPatch(uuid, 'uuid4', return_value=frozen_uuid)
        self.StartObjectPatch(os.path, 'getsize', return_value=100)
コード例 #3
0
def GetSSHKeyExpirationFromArgs(args):
    """Converts flags to an ssh key expiration in datetime and micros."""
    if args.ssh_key_expiration:
        # this argument is checked in ParseFutureDatetime to be sure that it
        # is not already expired.  I.e. the expiration should be in the future.
        expiration = args.ssh_key_expiration
    elif args.ssh_key_expire_after:
        expiration = times.Now() + datetime.timedelta(
            seconds=args.ssh_key_expire_after)
    else:
        return None, None

    expiration_micros = times.GetTimeStampFromDateTime(expiration) * 1e6
    return expiration, int(expiration_micros)
コード例 #4
0
def _RewriteTimeTerm(key, op, operand):
    """Rewrites <createTime op operand>."""
    if op not in ['<', '<=', '=', ':', '>=', '>']:
        return None
    try:
        dt = times.ParseDateTime(operand)
    except ValueError as e:
        raise ValueError(
            '{operand}: date-time value expected for {key}: {error}'.format(
                operand=operand, key=key, error=str(e)))
    seconds = int(times.GetTimeStampFromDateTime(dt))
    if op == ':':
        op = '='
    elif op == '<':
        op = '<='
        seconds -= 1
    elif op == '>':
        op = '>='
        seconds += 1
    return '{key} {op} {seconds}'.format(key=key, op=op, seconds=seconds)
コード例 #5
0
ファイル: submit.py プロジェクト: krisztinagy/master_thesis
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

    project = properties.VALUES.core.project.Get()
    safe_project = project.replace(':', '_')
    safe_project = safe_project.replace('.', '_')
    # The string 'google' is not allowed in bucket names.
    safe_project = safe_project.replace('google', 'elgoog')

    default_bucket_name = '{}_cloudbuild'.format(safe_project)

    default_gcs_source = False
    if args.gcs_source_staging_dir is None:
      default_gcs_source = True
      args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)

    default_gcs_log_dir = False
    if args.gcs_log_dir is None:
      default_gcs_log_dir = True
      args.gcs_log_dir = 'gs://{}/logs'.format(default_bucket_name)

    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    gcs_client = storage_api.StorageClient()

    # First, create the build request.
    build_timeout = properties.VALUES.container.build_timeout.Get()

    if build_timeout is not None:
      try:
        # A bare number is interpreted as seconds.
        build_timeout_secs = int(build_timeout)
      except ValueError:
        build_timeout_duration = times.ParseDuration(build_timeout)
        build_timeout_secs = int(build_timeout_duration.total_seconds)
      timeout_str = str(build_timeout_secs) + 's'
    else:
      timeout_str = None

    if args.tag:
      if 'gcr.io/' not in args.tag:
        raise c_exceptions.InvalidArgumentException(
            '--tag',
            'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
      build_config = messages.Build(
          images=[args.tag],
          steps=[
              messages.BuildStep(
                  name='gcr.io/cloud-builders/docker',
                  args=['build', '--no-cache', '-t', args.tag, '.'],
              ),
          ],
          timeout=timeout_str,
          substitutions=cloudbuild_util.EncodeSubstitutions(args.substitutions,
                                                            messages)
      )
    elif args.config:
      build_config = config.LoadCloudbuildConfigFromPath(
          args.config, messages, params=args.substitutions)

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
      build_config.timeout = timeout_str

    suffix = '.tgz'
    if args.source.startswith('gs://') or os.path.isfile(args.source):
      _, suffix = os.path.splitext(args.source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        args.gcs_source_staging_dir, collection='storage.objects')

    # We first try to create the bucket, before doing all the checks, in order
    # to avoid a race condition. If we do the check first, an attacker could
    # be lucky enough to create the bucket after the check and before this
    # bucket creation.
    gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

    # If no bucket is specified (for the source `default_gcs_source` or for the
    # logs `default_gcs_log_dir`), check that the default bucket is also owned
    # by the project (b/33046325).
    if default_gcs_source or default_gcs_log_dir:
      # This request returns only the buckets owned by the project.
      bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
          project=project,
          prefix=default_bucket_name)
      bucket_list = gcs_client.client.buckets.List(bucket_list_req)
      found_bucket = False
      for bucket in bucket_list.items:
        if bucket.id == default_bucket_name:
          found_bucket = True
          break
      if not found_bucket:
        if default_gcs_source:
          raise c_exceptions.RequiredArgumentException(
              'gcs_source_staging_dir',
              'A bucket with name {} already exists and is owned by '
              'another project. Specify a bucket using '
              '--gcs_source_staging_dir.'.format(default_bucket_name))
        elif default_gcs_log_dir:
          raise c_exceptions.RequiredArgumentException(
              'gcs-log-dir',
              'A bucket with name {} already exists and is owned by '
              'another project. Specify a bucket to hold build logs '
              'using --gcs-log-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
      staged_object = gcs_source_staging_dir.object + '/' + staged_object

    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)

    if args.source.startswith('gs://'):
      gcs_source = resources.REGISTRY.Parse(
          args.source, collection='storage.objects')
      staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
      build_config.source = messages.Source(
          storageSource=messages.StorageSource(
              bucket=staged_source_obj.bucket,
              object=staged_source_obj.name,
              generation=staged_source_obj.generation,
          ))
    else:
      if not os.path.exists(args.source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=args.source))
      if os.path.isdir(args.source):
        source_snapshot = snapshot.Snapshot(args.source)
        size_str = resource_transform.TransformSize(
            source_snapshot.uncompressed_size)
        log.status.Print(
            'Creating temporary tarball archive of {num_files} file(s)'
            ' totalling {size} before compression.'.format(
                num_files=len(source_snapshot.files),
                size=size_str))
        staged_source_obj = source_snapshot.CopyTarballToGCS(
            gcs_client, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      elif os.path.isfile(args.source):
        unused_root, ext = os.path.splitext(args.source)
        if ext not in _ALLOWED_SOURCE_EXT:
          raise c_exceptions.BadFileException(
              'Local file [{src}] is none of '+', '.join(_ALLOWED_SOURCE_EXT))
        log.status.Print(
            'Uploading local file [{src}] to '
            '[gs://{bucket}/{object}].'.format(
                src=args.source,
                bucket=gcs_source_staging.bucket,
                object=gcs_source_staging.object,
            ))
        staged_source_obj = gcs_client.CopyFileToGCS(
            storage_util.BucketReference.FromBucketUrl(
                gcs_source_staging.bucket),
            args.source, gcs_source_staging.object)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))

    gcs_log_dir = resources.REGISTRY.Parse(
        args.gcs_log_dir, collection='storage.objects')

    if gcs_log_dir.bucket != gcs_source_staging.bucket:
      # Create the logs bucket if it does not yet exist.
      gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
    build_config.logsBucket = 'gs://'+gcs_log_dir.bucket+'/'+gcs_log_dir.object

    log.debug('submitting build: '+repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config,
            projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print('Logs are available at [{log_url}].'.format(
          log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if args.async:
      return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
      build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
      log.status.Print(
          'Your build timed out. Use the [--timeout=DURATION] flag to change '
          'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
      raise FailedBuildException(build)

    return build
コード例 #6
0
def FormatDateTime(string):
    """Returns a yyyy-mm-dd hh:mm:ss formatted date/time for string."""
    dt = times.ParseDateTime(string)
    if not times.GetTimeStampFromDateTime(dt):
        return None
    return times.FormatDateTime(dt, '%Y-%m-%d %H:%M:%S')
コード例 #7
0
ファイル: submit_util.py プロジェクト: PinTrees/novelhub
def _SetSource(build_config, messages, is_specified_source, no_source, source,
               gcs_source_staging_dir, ignore_file):
    """Set the source for the build config."""
    default_gcs_source = False
    default_bucket_name = None
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
    gcs_client = storage_api.StorageClient()

    # --no-source overrides the default --source.
    if not is_specified_source and no_source:
        source = None

    gcs_source_staging = None
    if source:
        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )
        gcs_source_staging_dir = resources.REGISTRY.Parse(
            gcs_source_staging_dir, collection='storage.objects')

        # We create the bucket (if it does not exist) first. If we do an existence
        # check and then create the bucket ourselves, it would be possible for an
        # attacker to get lucky and beat us to creating the bucket. Block on this
        # creation to avoid this race condition.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source`), check
        # that the default bucket is also owned by the project (b/33046325).
        if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                gcs_client, default_bucket_name):
            raise c_exceptions.RequiredArgumentException(
                'gcs-source-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-source-staging-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object
        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            if os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source,
                                                    ignore_file=ignore_file)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging, ignore_file=ignore_file)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
    else:
        # No source
        if not no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

    return build_config
コード例 #8
0
    def Run(self, args):
        """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

        if args.gcs_source_staging_dir is None:
            args.gcs_source_staging_dir = 'gs://{project}_cloudbuild/source'.format(
                project=properties.VALUES.core.project.Get(), )
        if args.gcs_log_dir is None:
            args.gcs_log_dir = 'gs://{project}_cloudbuild/logs'.format(
                project=properties.VALUES.core.project.Get(), )

        client = core_apis.GetClientInstance('cloudbuild', 'v1')
        messages = core_apis.GetMessagesModule('cloudbuild', 'v1')
        registry = self.context['registry']

        gcs_client = storage_api.StorageClient()

        # First, create the build request.
        build_timeout = properties.VALUES.container.build_timeout.Get()
        if build_timeout is not None:
            timeout_str = build_timeout + 's'
        else:
            timeout_str = None

        if args.tag:
            if 'gcr.io/' not in args.tag:
                raise c_exceptions.InvalidArgumentException(
                    '--tag',
                    'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.'
                )
            build_config = messages.Build(
                images=[args.tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=['build', '--no-cache', '-t', args.tag, '.'],
                    ),
                ],
                timeout=timeout_str,
            )
        elif args.config:
            build_config = config.LoadCloudbuildConfig(args.config, messages)

        if build_config.timeout is None:
            build_config.timeout = timeout_str

        suffix = '.tgz'
        if args.source.startswith('gs://') or os.path.isfile(args.source):
            _, suffix = os.path.splitext(args.source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}_{tag_ish}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            tag_ish='_'.join(build_config.images or 'null').replace('/', '_'),
            suffix=suffix,
        )
        gcs_source_staging_dir = registry.Parse(args.gcs_source_staging_dir,
                                                collection='storage.objects')
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)
        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object

        gcs_source_staging = registry.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if args.source.startswith('gs://'):
            gcs_source = registry.Parse(args.source,
                                        collection='storage.objects')
            staged_source_obj = gcs_client.Copy(gcs_source, gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(args.source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=args.source))
            if os.path.isdir(args.source):
                source_snapshot = snapshot.Snapshot(args.source)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.write(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.\n'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(args.source):
                unused_root, ext = os.path.splitext(args.source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.write('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}]\n'.format(
                                     src=args.source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    storage_util.BucketReference.FromBucketUrl(
                        gcs_source_staging.bucket), args.source,
                    gcs_source_staging.object)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))

        gcs_log_dir = registry.Parse(args.gcs_log_dir,
                                     collection='storage.objects')

        if gcs_log_dir.bucket != gcs_source_staging.bucket:
            # Create the logs bucket if it does not yet exist.
            gcs_client.CreateBucketIfNotExists(gcs_log_dir.bucket)
        build_config.logsBucket = 'gs://' + gcs_log_dir.bucket + '/' + gcs_log_dir.object

        log.debug('submitting build: ' + repr(build_config))

        # Start the build.
        op = client.projects_builds.Create(
            messages.CloudbuildProjectsBuildsCreateRequest(
                build=build_config,
                projectId=properties.VALUES.core.project.Get()))
        json = encoding.MessageToJson(op.metadata)
        build = encoding.JsonToMessage(messages.BuildOperationMetadata,
                                       json).build

        build_ref = registry.Create(collection='cloudbuild.projects.builds',
                                    projectId=build.projectId,
                                    id=build.id)

        log.CreatedResource(build_ref)
        if build.logUrl:
            log.status.write(
                'Logs are permanently available at [{log_url}]\n'.format(
                    log_url=build.logUrl))
        else:
            log.status.write('Logs are available in the Cloud Console.\n')

        # If the command is run --async, we just print out a reference to the build.
        if args. async:
            return build

        # Otherwise, logs are streamed from GCS.
        build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

        if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
            raise FailedBuildException(build.status)

        return build
コード例 #9
0
    def _StageSourceAndConfigFiles(self, args, messages, build):
        """Stages the source and config files in a staging Google Cloud Storage bucket.

    Args:
      args: argsparse object from the DeployGKE command.
      messages: Cloud Build messages module.
      build: Cloud Build config.
    """

        project = properties.VALUES.core.project.Get(required=True)
        safe_project = project.replace(':', '_')
        safe_project = safe_project.replace('.', '_')
        # The string 'google' is not allowed in bucket names.
        safe_project = safe_project.replace('google', 'elgoog')

        gcs_client = storage_api.StorageClient()

        default_bucket_name = '{}_cloudbuild'.format(safe_project)

        gcs_staging_dir_name = (args.gcs_staging_dir
                                if args.gcs_staging_dir else
                                'gs://{}/deploy'.format(default_bucket_name))

        try:
            gcs_staging_dir = resources.REGISTRY.Parse(
                gcs_staging_dir_name, collection='storage.objects')
            gcs_staging_dir_obj = gcs_staging_dir.object
        except resources.WrongResourceCollectionException:
            gcs_staging_dir = resources.REGISTRY.Parse(
                gcs_staging_dir_name, collection='storage.buckets')
            gcs_staging_dir_obj = None

        gcs_client.CreateBucketIfNotExists(gcs_staging_dir.bucket)

        if args.gcs_staging_dir is None:
            # Check that the default bucket is also owned by the project (b/33046325)
            bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
                project=project, prefix=default_bucket_name)
            bucket_list = gcs_client.client.buckets.List(bucket_list_req)

            if not any(bucket.id == default_bucket_name
                       for bucket in bucket_list.items):
                raise c_exceptions.RequiredArgumentException(
                    '--gcs-staging-dir',
                    'A bucket with name {} already exists and is owned by '
                    'another project. Specify a bucket using '
                    '--gcs-staging-dir.'.format(default_bucket_name))

        if args.source:
            suffix = '.tgz'
            if args.source.startswith('gs://') or os.path.isfile(args.source):
                _, suffix = os.path.splitext(args.source)

            staged_source = 'source/{stamp}-{uuid}{suffix}'.format(
                stamp=times.GetTimeStampFromDateTime(times.Now()),
                uuid=uuid.uuid4().hex,
                suffix=suffix,
            )

            if gcs_staging_dir_obj:
                staged_source = gcs_staging_dir_obj + '/' + staged_source
            gcs_source_staging = resources.REGISTRY.Create(
                collection='storage.objects',
                bucket=gcs_staging_dir.bucket,
                object=staged_source)

            staged_source_obj = None

            if args.source.startswith('gs://'):
                gcs_source = resources.REGISTRY.Parse(
                    args.source, collection='storage.objects')
                staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                       gcs_source_staging)
            else:
                if not os.path.exists(args.source):
                    raise c_exceptions.BadFileException(
                        'could not find source [{src}]'.format(
                            src=args.source))
                elif os.path.isdir(args.source):
                    source_snapshot = snapshot.Snapshot(args.source)
                    size_str = resource_transform.TransformSize(
                        source_snapshot.uncompressed_size)
                    log.status.Print(
                        'Creating temporary tarball archive of {num_files} file(s)'
                        ' totalling {size} before compression.'.format(
                            num_files=len(source_snapshot.files),
                            size=size_str))
                    staged_source_obj = source_snapshot.CopyTarballToGCS(
                        gcs_client, gcs_source_staging)
                elif os.path.isfile(args.source):
                    unused_root, ext = os.path.splitext(args.source)
                    if ext not in _ALLOWED_SOURCE_EXT:
                        raise c_exceptions.BadFileException(
                            'Local file [{src}] is none of '.format(
                                src=args.source) +
                            ', '.join(_ALLOWED_SOURCE_EXT))
                    log.status.Print('Uploading local file [{src}] to '
                                     '[gs://{bucket}/{object}].'.format(
                                         src=args.source,
                                         bucket=gcs_source_staging.bucket,
                                         object=gcs_source_staging.object,
                                     ))
                    staged_source_obj = gcs_client.CopyFileToGCS(
                        args.source, gcs_source_staging)

            build.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        if gcs_staging_dir_obj:
            config_path = gcs_staging_dir.bucket + '/' + gcs_staging_dir_obj
        else:
            config_path = gcs_staging_dir.bucket

        build.artifacts = messages.Artifacts(objects=messages.ArtifactObjects(
            location='gs://{}/config/$BUILD_ID/expanded'.format(config_path),
            paths=['output/expanded/*'],
        ))

        build.steps.append(
            messages.BuildStep(
                name='gcr.io/cloud-builders/gsutil',
                args=[
                    'cp', '-r', 'output/suggested',
                    'gs://{}/config/$BUILD_ID/suggested'.format(config_path)
                ],
            ))
        return
コード例 #10
0
ファイル: submit_util.py プロジェクト: bopopescu/GoogleAPI
def CreateBuildConfig(tag, no_cache, messages, substitutions, arg_config,
                      is_specified_source, no_source, source,
                      gcs_source_staging_dir, ignore_file, arg_gcs_log_dir,
                      arg_machine_type, arg_disk_size):
    """Returns a build config."""
    # Get the build timeout.
    build_timeout = properties.VALUES.builds.timeout.Get()
    if build_timeout is not None:
        try:
            # A bare number is interpreted as seconds.
            build_timeout_secs = int(build_timeout)
        except ValueError:
            build_timeout_duration = times.ParseDuration(build_timeout)
            build_timeout_secs = int(build_timeout_duration.total_seconds)
        timeout_str = six.text_type(build_timeout_secs) + 's'
    else:
        timeout_str = None

    if tag is not None:
        if (properties.VALUES.builds.check_tag.GetBool()
                and 'gcr.io/' not in tag):
            raise c_exceptions.InvalidArgumentException(
                '--tag',
                'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
        if properties.VALUES.builds.use_kaniko.GetBool():
            if no_cache:
                ttl = '0h'
            else:
                ttl = '{}h'.format(
                    properties.VALUES.builds.kaniko_cache_ttl.Get())
            build_config = messages.Build(
                steps=[
                    messages.BuildStep(
                        name=properties.VALUES.builds.kaniko_image.Get(),
                        args=[
                            '--destination',
                            tag,
                            '--cache',
                            '--cache-ttl',
                            ttl,
                            '--cache-dir',
                            '',
                        ],
                    ),
                ],
                timeout=timeout_str,
                substitutions=cloudbuild_util.EncodeSubstitutions(
                    substitutions, messages))
        else:
            if no_cache:
                raise c_exceptions.InvalidArgumentException(
                    'no-cache',
                    'Cannot specify --no-cache if builds/use_kaniko property is '
                    'False')
            build_config = messages.Build(
                images=[tag],
                steps=[
                    messages.BuildStep(
                        name='gcr.io/cloud-builders/docker',
                        args=[
                            'build', '--network', 'cloudbuild', '--no-cache',
                            '-t', tag, '.'
                        ],
                    ),
                ],
                timeout=timeout_str,
                substitutions=cloudbuild_util.EncodeSubstitutions(
                    substitutions, messages))
    elif arg_config is not None:
        if no_cache:
            raise c_exceptions.ConflictingArgumentsException(
                '--config', '--no-cache')
        if not arg_config:
            raise c_exceptions.InvalidArgumentException(
                '--config', 'Config file path must not be empty.')
        build_config = config.LoadCloudbuildConfigFromPath(
            arg_config, messages, params=substitutions)
    else:
        raise c_exceptions.OneOfArgumentsRequiredException(
            ['--tag', '--config'],
            'Requires either a docker tag or a config file.')

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
        build_config.timeout = timeout_str

    # Set the source for the build config.
    default_gcs_source = False
    default_bucket_name = None
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
    gcs_client = storage_api.StorageClient()

    # --no-source overrides the default --source.
    if not is_specified_source and no_source:
        source = None

    gcs_source_staging = None
    if source:
        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        # Next, stage the source to Cloud Storage.
        staged_object = '{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )
        gcs_source_staging_dir = resources.REGISTRY.Parse(
            gcs_source_staging_dir, collection='storage.objects')

        # We create the bucket (if it does not exist) first. If we do an existence
        # check and then create the bucket ourselves, it would be possible for an
        # attacker to get lucky and beat us to creating the bucket. Block on this
        # creation to avoid this race condition.
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

        # If no bucket is specified (for the source `default_gcs_source`), check
        # that the default bucket is also owned by the project (b/33046325).
        if default_gcs_source and not staging_bucket_util.BucketIsInProject(
                gcs_client, default_bucket_name):
            raise c_exceptions.RequiredArgumentException(
                'gcs-source-staging-dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs-source-staging-dir.'.format(default_bucket_name))

        if gcs_source_staging_dir.object:
            staged_object = gcs_source_staging_dir.object + '/' + staged_object
        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_source_staging_dir.bucket,
            object=staged_object)

        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source_obj = gcs_client.Rewrite(gcs_source,
                                                   gcs_source_staging)
            build_config.source = messages.Source(
                storageSource=messages.StorageSource(
                    bucket=staged_source_obj.bucket,
                    object=staged_source_obj.name,
                    generation=staged_source_obj.generation,
                ))
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            if os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source,
                                                    ignore_file=ignore_file)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source_obj = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging, ignore_file=ignore_file)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of ' +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source_obj = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)
                build_config.source = messages.Source(
                    storageSource=messages.StorageSource(
                        bucket=staged_source_obj.bucket,
                        object=staged_source_obj.name,
                        generation=staged_source_obj.generation,
                    ))
    else:
        # No source
        if not no_source:
            raise c_exceptions.InvalidArgumentException(
                '--no-source', 'To omit source, use the --no-source flag.')

    # Set a Google Cloud Storage directory to hold build logs.
    if arg_gcs_log_dir:
        gcs_log_dir = resources.REGISTRY.Parse(arg_gcs_log_dir,
                                               collection='storage.objects')
        build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                   gcs_log_dir.object)

    # Set the machine type used to run the build.
    if arg_machine_type is not None:
        machine_type = flags.GetMachineType(arg_machine_type)
        if not build_config.options:
            build_config.options = messages.BuildOptions()
        build_config.options.machineType = machine_type

    # Set the disk size used to run the build.
    if arg_disk_size is not None:
        disk_size = compute_utils.BytesToGb(arg_disk_size)
        if not build_config.options:
            build_config.options = messages.BuildOptions()
        build_config.options.diskSizeGb = int(disk_size)

    return build_config
コード例 #11
0
def _SetSource(release_config,
               source,
               gcs_source_staging_dir,
               ignore_file,
               skaffold_version,
               location,
               hide_logs=False):
  """Set the source for the release config.

  Sets the source for the release config and creates a default Cloud Storage
  bucket with location for staging if gcs-source-staging-dir is not specified.

  Args:
    release_config: a Release message
    source: the location of the source files
    gcs_source_staging_dir: directory in google cloud storage to use for staging
    ignore_file: the ignore file to use
    skaffold_version: version of Skaffold binary
    location: the cloud region for the release
    hide_logs: whether to show logs, defaults to False

  Returns:
    Modified release_config
  """
  safe_project_id = staging_bucket_util.GetSafeProject()
  default_gcs_source = False
  default_bucket_name = staging_bucket_util.GetDefaultStagingBucket(
      safe_project_id, location)

  if gcs_source_staging_dir is None:
    default_gcs_source = True
    gcs_source_staging_dir = _SOURCE_STAGING_TEMPLATE.format(
        default_bucket_name)

  if not gcs_source_staging_dir.startswith('gs://'):
    raise c_exceptions.InvalidArgumentException('--gcs-source-staging-dir',
                                                'must be a GCS bucket')

  gcs_client = storage_api.StorageClient()
  suffix = '.tgz'
  if source.startswith('gs://') or os.path.isfile(source):
    _, suffix = os.path.splitext(source)

  # Next, stage the source to Cloud Storage.
  staged_object = '{stamp}-{uuid}{suffix}'.format(
      stamp=times.GetTimeStampFromDateTime(times.Now()),
      uuid=uuid.uuid4().hex,
      suffix=suffix,
  )
  gcs_source_staging_dir = resources.REGISTRY.Parse(
      gcs_source_staging_dir, collection='storage.objects')

  try:
    gcs_client.CreateBucketIfNotExists(
        gcs_source_staging_dir.bucket,
        location=location,
        check_ownership=default_gcs_source)
  except storage_api.BucketInWrongProjectError:
    # If we're using the default bucket but it already exists in a different
    # project, then it could belong to a malicious attacker (b/33046325).
    raise c_exceptions.RequiredArgumentException(
        'gcs-source-staging-dir',
        'A bucket with name {} already exists and is owned by '
        'another project. Specify a bucket using '
        '--gcs-source-staging-dir.'.format(default_bucket_name))

  if gcs_source_staging_dir.object:
    staged_object = gcs_source_staging_dir.object + '/' + staged_object
  gcs_source_staging = resources.REGISTRY.Create(
      collection='storage.objects',
      bucket=gcs_source_staging_dir.bucket,
      object=staged_object)
  if source.startswith('gs://'):
    gcs_source = resources.REGISTRY.Parse(source, collection='storage.objects')
    staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
    release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
        bucket=staged_source_obj.bucket, object=staged_source_obj.name)
  else:
    if os.path.isdir(source):
      source_snapshot = snapshot.Snapshot(source, ignore_file=ignore_file)
      size_str = resource_transform.TransformSize(
          source_snapshot.uncompressed_size)
      if not hide_logs:
        log.status.Print(
            'Creating temporary tarball archive of {num_files} file(s)'
            ' totalling {size} before compression.'.format(
                num_files=len(source_snapshot.files), size=size_str))
      staged_source_obj = source_snapshot.CopyTarballToGCS(
          gcs_client,
          gcs_source_staging,
          ignore_file=ignore_file,
          hide_logs=hide_logs)
      release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
          bucket=staged_source_obj.bucket, object=staged_source_obj.name)
    elif os.path.isfile(source):
      if not hide_logs:
        log.status.Print('Uploading local file [{src}] to '
                         '[gs://{bucket}/{object}].'.format(
                             src=source,
                             bucket=gcs_source_staging.bucket,
                             object=gcs_source_staging.object,
                         ))
      staged_source_obj = gcs_client.CopyFileToGCS(source, gcs_source_staging)
      release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
          bucket=staged_source_obj.bucket, object=staged_source_obj.name)

  if skaffold_version:
    release_config.skaffoldVersion = skaffold_version

  return release_config
コード例 #12
0
def _UploadSourceToGCS(source, stage_bucket, ignore_file):
    """Uploads local content to GCS.

  This will ensure that the source and destination exist before triggering the
  upload.

  Args:
    source: string, a local path.
    stage_bucket: optional string. When not provided, the default staging bucket
      will be used (see GetDefaultStagingBucket). This string is of the
      format "gs://bucket-name/". A "source" object will be created under this
        bucket, and any uploaded artifacts will be stored there.
    ignore_file: string, a path to a gcloudignore file.

  Returns:
    A string in the format "gs://path/to/resulting/upload".

  Raises:
    RequiredArgumentException: if stage-bucket is owned by another project.
    BadFileException: if the source doesn't exist or isn't a directory.
  """
    gcs_client = storage_api.StorageClient()

    # The object name to use for our GCS storage.
    gcs_object_name = 'source'

    if stage_bucket is None:
        used_default_bucket_name = True
        gcs_source_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
        gcs_source_staging_dir = 'gs://{0}/{1}'.format(gcs_source_bucket_name,
                                                       gcs_object_name)
    else:
        used_default_bucket_name = False
        gcs_source_bucket_name = stage_bucket
        gcs_source_staging_dir = stage_bucket + gcs_object_name

    # By calling REGISTRY.Parse on "gs://my-bucket/foo", the result's "bucket"
    # property will be "my-bucket" and the "object" property will be "foo".
    gcs_source_staging_dir_ref = resources.REGISTRY.Parse(
        gcs_source_staging_dir, collection='storage.objects')

    # Make sure the bucket exists
    try:
        gcs_client.CreateBucketIfNotExists(
            gcs_source_staging_dir_ref.bucket,
            check_ownership=used_default_bucket_name)
    except storage_api.BucketInWrongProjectError:
        # If we're using the default bucket but it already exists in a different
        # project, then it could belong to a malicious attacker (b/33046325).
        raise c_exceptions.RequiredArgumentException(
            'stage-bucket',
            'A bucket with name {} already exists and is owned by '
            'another project. Specify a bucket using '
            '--stage-bucket.'.format(gcs_source_staging_dir_ref.bucket))

    # This will look something like this:
    # "1615850562.234312-044e784992744951b0cd71c0b011edce"
    staged_object = '{stamp}-{uuid}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        uuid=uuid.uuid4().hex,
    )

    if gcs_source_staging_dir_ref.object:
        staged_object = gcs_source_staging_dir_ref.object + '/' + staged_object

    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir_ref.bucket,
        object=staged_object)

    if not os.path.exists(source):
        raise c_exceptions.BadFileException(
            'could not find source [{}]'.format(source))

    if not os.path.isdir(source):
        raise c_exceptions.BadFileException(
            'source is not a directory [{}]'.format(source))

    _UploadSourceDirToGCS(gcs_client, source, gcs_source_staging, ignore_file)

    upload_bucket = 'gs://{0}/{1}'.format(gcs_source_staging.bucket,
                                          gcs_source_staging.object)

    return upload_bucket
コード例 #13
0
def _SetSource(build_config,
               messages,
               is_specified_source,
               no_source,
               source,
               gcs_source_staging_dir,
               ignore_file,
               hide_logs=False):
  """Set the source for the build config."""
  default_gcs_source = False
  default_bucket_name = None
  if gcs_source_staging_dir is None:
    default_gcs_source = True
    default_bucket_name = staging_bucket_util.GetDefaultStagingBucket()
    gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)
  gcs_client = storage_api.StorageClient()

  # --no-source overrides the default --source.
  if not is_specified_source and no_source:
    source = None

  gcs_source_staging = None
  if source:
    suffix = '.tgz'
    if source.startswith('gs://') or os.path.isfile(source):
      _, suffix = os.path.splitext(source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}-{uuid}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        uuid=uuid.uuid4().hex,
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        gcs_source_staging_dir, collection='storage.objects')

    try:
      gcs_client.CreateBucketIfNotExists(
          gcs_source_staging_dir.bucket, check_ownership=default_gcs_source)
    except api_exceptions.HttpForbiddenError:
      raise BucketForbiddenError(
          'The user is forbidden from accessing the bucket [{}]. Please check '
          'your organization\'s policy or if the user has the "serviceusage.services.use" permission'
          .format(gcs_source_staging_dir.bucket))
    except storage_api.BucketInWrongProjectError:
      # If we're using the default bucket but it already exists in a different
      # project, then it could belong to a malicious attacker (b/33046325).
      raise c_exceptions.RequiredArgumentException(
          'gcs-source-staging-dir',
          'A bucket with name {} already exists and is owned by '
          'another project. Specify a bucket using '
          '--gcs-source-staging-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
      staged_object = gcs_source_staging_dir.object + '/' + staged_object
    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)

    if source.startswith('gs://'):
      gcs_source = resources.REGISTRY.Parse(
          source, collection='storage.objects')
      staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
      build_config.source = messages.Source(
          storageSource=messages.StorageSource(
              bucket=staged_source_obj.bucket,
              object=staged_source_obj.name,
              generation=staged_source_obj.generation,
          ))
    else:
      if not os.path.exists(source):
        raise c_exceptions.BadFileException(
            'could not find source [{src}]'.format(src=source))
      if os.path.isdir(source):
        source_snapshot = snapshot.Snapshot(source, ignore_file=ignore_file)
        size_str = resource_transform.TransformSize(
            source_snapshot.uncompressed_size)
        if not hide_logs:
          log.status.Print(
              'Creating temporary tarball archive of {num_files} file(s)'
              ' totalling {size} before compression.'.format(
                  num_files=len(source_snapshot.files), size=size_str))
        staged_source_obj = source_snapshot.CopyTarballToGCS(
            gcs_client,
            gcs_source_staging,
            ignore_file=ignore_file,
            hide_logs=hide_logs)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      elif os.path.isfile(source):
        unused_root, ext = os.path.splitext(source)
        if ext not in _ALLOWED_SOURCE_EXT:
          raise c_exceptions.BadFileException('Local file [{src}] is none of ' +
                                              ', '.join(_ALLOWED_SOURCE_EXT))
        if not hide_logs:
          log.status.Print('Uploading local file [{src}] to '
                           '[gs://{bucket}/{object}].'.format(
                               src=source,
                               bucket=gcs_source_staging.bucket,
                               object=gcs_source_staging.object,
                           ))
        staged_source_obj = gcs_client.CopyFileToGCS(source, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
  else:
    # No source
    if not no_source:
      raise c_exceptions.InvalidArgumentException(
          '--no-source', 'To omit source, use the --no-source flag.')

  return build_config
コード例 #14
0
def _SetSource(release_config,
               source,
               gcs_source_staging_dir,
               gcs_render_dir,
               ignore_file,
               hide_logs=False):
    """Set the source for the release config."""
    safe_project_id = staging_bucket_util.GetSafeProject()
    default_gcs_source = False
    default_bucket_name = staging_bucket_util.GetDefaultStagingBucket(
        safe_project_id)
    if gcs_source_staging_dir is None:
        default_gcs_source = True
        gcs_source_staging_dir = _SKAFFOLD_CONFIG_PATH.format(
            default_bucket_name)

    if not gcs_source_staging_dir.startswith('gs://'):
        raise c_exceptions.InvalidArgumentException('--gcs-source-staging-dir',
                                                    'must be a GCS bucket')

    if gcs_render_dir:
        if not gcs_render_dir.startswith('gs://'):
            raise c_exceptions.InvalidArgumentException(
                '--gcs-render-dir', 'must be a GCS bucket')
        # Leave this field unset as default. The server will create a new bucket.
        release_config.manifestBucket = gcs_render_dir

    gcs_client = storage_api.StorageClient()
    suffix = '.tgz'
    if source.startswith('gs://') or os.path.isfile(source):
        _, suffix = os.path.splitext(source)

    # Next, stage the source to Cloud Storage.
    staged_object = '{stamp}-{uuid}{suffix}'.format(
        stamp=times.GetTimeStampFromDateTime(times.Now()),
        uuid=uuid.uuid4().hex,
        suffix=suffix,
    )
    gcs_source_staging_dir = resources.REGISTRY.Parse(
        gcs_source_staging_dir, collection='storage.objects')

    try:
        gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket,
                                           check_ownership=default_gcs_source)
    except storage_api.BucketInWrongProjectError:
        # If we're using the default bucket but it already exists in a different
        # project, then it could belong to a malicious attacker (b/33046325).
        raise c_exceptions.RequiredArgumentException(
            'gcs-source-staging-dir',
            'A bucket with name {} already exists and is owned by '
            'another project. Specify a bucket using '
            '--gcs-source-staging-dir.'.format(default_bucket_name))

    if gcs_source_staging_dir.object:
        staged_object = gcs_source_staging_dir.object + '/' + staged_object
    gcs_source_staging = resources.REGISTRY.Create(
        collection='storage.objects',
        bucket=gcs_source_staging_dir.bucket,
        object=staged_object)
    if source.startswith('gs://'):
        gcs_source = resources.REGISTRY.Parse(source,
                                              collection='storage.objects')
        staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
            bucket=staged_source_obj.bucket, object=staged_source_obj.name)
    else:
        if not os.path.exists(source):
            raise c_exceptions.BadFileException(
                'could not find source [{src}]'.format(src=source))
        if os.path.isdir(source):
            source_snapshot = snapshot.Snapshot(source,
                                                ignore_file=ignore_file)
            size_str = resource_transform.TransformSize(
                source_snapshot.uncompressed_size)
            if not hide_logs:
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
            staged_source_obj = source_snapshot.CopyTarballToGCS(
                gcs_client,
                gcs_source_staging,
                ignore_file=ignore_file,
                hide_logs=hide_logs)
            release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
                bucket=staged_source_obj.bucket, object=staged_source_obj.name)
        elif os.path.isfile(source):
            _, ext = os.path.splitext(source)
            if ext not in _ALLOWED_SOURCE_EXT:
                raise c_exceptions.BadFileException(
                    'local file [{src}] is none of ' +
                    ', '.join(_ALLOWED_SOURCE_EXT))
            if not hide_logs:
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
            staged_source_obj = gcs_client.CopyFileToGCS(
                source, gcs_source_staging)
            release_config.skaffoldConfigUri = 'gs://{bucket}/{object}'.format(
                bucket=staged_source_obj.bucket, object=staged_source_obj.name)
    return release_config
コード例 #15
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Returns:
      Some value that we want to have printed later.

    Raises:
      FailedBuildException: If the build is completed and not 'SUCCESS'.
    """

    project = properties.VALUES.core.project.Get(required=True)
    safe_project = project.replace(':', '_')
    safe_project = safe_project.replace('.', '_')
    # The string 'google' is not allowed in bucket names.
    safe_project = safe_project.replace('google', 'elgoog')

    default_bucket_name = '{}_cloudbuild'.format(safe_project)

    default_gcs_source = False
    if args.gcs_source_staging_dir is None:
      default_gcs_source = True
      args.gcs_source_staging_dir = 'gs://{}/source'.format(default_bucket_name)

    client = cloudbuild_util.GetClientInstance()
    messages = cloudbuild_util.GetMessagesModule()

    gcs_client = storage_api.StorageClient()

    # First, create the build request.
    build_timeout = properties.VALUES.builds.timeout.Get()

    if build_timeout is not None:
      try:
        # A bare number is interpreted as seconds.
        build_timeout_secs = int(build_timeout)
      except ValueError:
        build_timeout_duration = times.ParseDuration(build_timeout)
        build_timeout_secs = int(build_timeout_duration.total_seconds)
      timeout_str = six.text_type(build_timeout_secs) + 's'
    else:
      timeout_str = None

    if args.tag is not None:
      if (properties.VALUES.builds.check_tag.GetBool() and
          'gcr.io/' not in args.tag):
        raise c_exceptions.InvalidArgumentException(
            '--tag',
            'Tag value must be in the gcr.io/* or *.gcr.io/* namespace.')
      if properties.VALUES.builds.use_kaniko.GetBool():
        if args.no_cache:
          ttl = '0h'
        else:
          ttl = '{}h'.format(properties.VALUES.builds.kaniko_cache_ttl.Get())
        build_config = messages.Build(
            steps=[
                messages.BuildStep(
                    name=properties.VALUES.builds.kaniko_image.Get(),
                    args=[
                        '--destination', args.tag, '--cache', 'true',
                        '--cache-ttl', ttl
                    ],
                ),
            ],
            timeout=timeout_str,
            substitutions=cloudbuild_util.EncodeSubstitutions(
                args.substitutions, messages))
      else:
        if args.no_cache:
          raise c_exceptions.InvalidArgumentException(
              'no-cache',
              'Cannot specify --no-cache if builds/use_kaniko property is '
              'False')
        build_config = messages.Build(
            images=[args.tag],
            steps=[
                messages.BuildStep(
                    name='gcr.io/cloud-builders/docker',
                    args=[
                        'build', '--network', 'cloudbuild', '--no-cache', '-t',
                        args.tag, '.'
                    ],
                ),
            ],
            timeout=timeout_str,
            substitutions=cloudbuild_util.EncodeSubstitutions(
                args.substitutions, messages))
    elif args.config is not None:
      if args.no_cache:
        raise c_exceptions.ConflictingArgumentsException(
            '--config', '--no-cache')
      if not args.config:
        raise c_exceptions.InvalidArgumentException(
            '--config', 'Config file path must not be empty.')
      build_config = config.LoadCloudbuildConfigFromPath(
          args.config, messages, params=args.substitutions)
    else:
      raise c_exceptions.OneOfArgumentsRequiredException(
          ['--tag', '--config'],
          'Requires either a docker tag or a config file.')

    # If timeout was set by flag, overwrite the config file.
    if timeout_str:
      build_config.timeout = timeout_str

    # --no-source overrides the default --source.
    if not args.IsSpecified('source') and args.no_source:
      args.source = None

    gcs_source_staging = None
    if args.source:
      suffix = '.tgz'
      if args.source.startswith('gs://') or os.path.isfile(args.source):
        _, suffix = os.path.splitext(args.source)

      # Next, stage the source to Cloud Storage.
      staged_object = '{stamp}-{uuid}{suffix}'.format(
          stamp=times.GetTimeStampFromDateTime(times.Now()),
          uuid=uuid.uuid4().hex,
          suffix=suffix,
      )
      gcs_source_staging_dir = resources.REGISTRY.Parse(
          args.gcs_source_staging_dir, collection='storage.objects')

      # We create the bucket (if it does not exist) first. If we do an existence
      # check and then create the bucket ourselves, it would be possible for an
      # attacker to get lucky and beat us to creating the bucket. Block on this
      # creation to avoid this race condition.
      gcs_client.CreateBucketIfNotExists(gcs_source_staging_dir.bucket)

      # If no bucket is specified (for the source `default_gcs_source`), check
      # that the default bucket is also owned by the project (b/33046325).
      if default_gcs_source:
        # This request returns only the buckets owned by the project.
        bucket_list_req = gcs_client.messages.StorageBucketsListRequest(
            project=project, prefix=default_bucket_name)
        bucket_list = gcs_client.client.buckets.List(bucket_list_req)
        found_bucket = False
        for bucket in bucket_list.items:
          if bucket.id == default_bucket_name:
            found_bucket = True
            break
        if not found_bucket:
          if default_gcs_source:
            raise c_exceptions.RequiredArgumentException(
                'gcs_source_staging_dir',
                'A bucket with name {} already exists and is owned by '
                'another project. Specify a bucket using '
                '--gcs_source_staging_dir.'.format(default_bucket_name))

      if gcs_source_staging_dir.object:
        staged_object = gcs_source_staging_dir.object + '/' + staged_object
      gcs_source_staging = resources.REGISTRY.Create(
          collection='storage.objects',
          bucket=gcs_source_staging_dir.bucket,
          object=staged_object)

      if args.source.startswith('gs://'):
        gcs_source = resources.REGISTRY.Parse(
            args.source, collection='storage.objects')
        staged_source_obj = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        build_config.source = messages.Source(
            storageSource=messages.StorageSource(
                bucket=staged_source_obj.bucket,
                object=staged_source_obj.name,
                generation=staged_source_obj.generation,
            ))
      else:
        if not os.path.exists(args.source):
          raise c_exceptions.BadFileException(
              'could not find source [{src}]'.format(src=args.source))
        if os.path.isdir(args.source):
          source_snapshot = snapshot.Snapshot(args.source,
                                              ignore_file=args.ignore_file)
          size_str = resource_transform.TransformSize(
              source_snapshot.uncompressed_size)
          log.status.Print(
              'Creating temporary tarball archive of {num_files} file(s)'
              ' totalling {size} before compression.'.format(
                  num_files=len(source_snapshot.files), size=size_str))
          staged_source_obj = source_snapshot.CopyTarballToGCS(
              gcs_client, gcs_source_staging, ignore_file=args.ignore_file)
          build_config.source = messages.Source(
              storageSource=messages.StorageSource(
                  bucket=staged_source_obj.bucket,
                  object=staged_source_obj.name,
                  generation=staged_source_obj.generation,
              ))
        elif os.path.isfile(args.source):
          unused_root, ext = os.path.splitext(args.source)
          if ext not in _ALLOWED_SOURCE_EXT:
            raise c_exceptions.BadFileException(
                'Local file [{src}] is none of ' +
                ', '.join(_ALLOWED_SOURCE_EXT))
          log.status.Print('Uploading local file [{src}] to '
                           '[gs://{bucket}/{object}].'.format(
                               src=args.source,
                               bucket=gcs_source_staging.bucket,
                               object=gcs_source_staging.object,
                           ))
          staged_source_obj = gcs_client.CopyFileToGCS(args.source,
                                                       gcs_source_staging)
          build_config.source = messages.Source(
              storageSource=messages.StorageSource(
                  bucket=staged_source_obj.bucket,
                  object=staged_source_obj.name,
                  generation=staged_source_obj.generation,
              ))
    else:
      # No source
      if not args.no_source:
        raise c_exceptions.InvalidArgumentException(
            '--no-source', 'To omit source, use the --no-source flag.')

    if args.gcs_log_dir:
      gcs_log_dir = resources.REGISTRY.Parse(
          args.gcs_log_dir, collection='storage.objects')

      build_config.logsBucket = ('gs://' + gcs_log_dir.bucket + '/' +
                                 gcs_log_dir.object)

    # Machine type.
    if args.machine_type is not None:
      machine_type = Submit._machine_type_flag_map.GetEnumForChoice(
          args.machine_type)
      if not build_config.options:
        build_config.options = messages.BuildOptions()
      build_config.options.machineType = machine_type

    # Disk size.
    if args.disk_size is not None:
      disk_size = compute_utils.BytesToGb(args.disk_size)
      if not build_config.options:
        build_config.options = messages.BuildOptions()
      build_config.options.diskSizeGb = int(disk_size)

    log.debug('submitting build: ' + repr(build_config))

    # Start the build.
    op = client.projects_builds.Create(
        messages.CloudbuildProjectsBuildsCreateRequest(
            build=build_config, projectId=properties.VALUES.core.project.Get()))
    json = encoding.MessageToJson(op.metadata)
    build = encoding.JsonToMessage(messages.BuildOperationMetadata, json).build

    build_ref = resources.REGISTRY.Create(
        collection='cloudbuild.projects.builds',
        projectId=build.projectId,
        id=build.id)

    log.CreatedResource(build_ref)
    if build.logUrl:
      log.status.Print(
          'Logs are available at [{log_url}].'.format(log_url=build.logUrl))
    else:
      log.status.Print('Logs are available in the Cloud Console.')

    # If the command is run --async, we just print out a reference to the build.
    if args.async:
      return build

    mash_handler = execution.MashHandler(
        execution.GetCancelBuildHandler(client, messages, build_ref))

    # Otherwise, logs are streamed from GCS.
    with execution_utils.CtrlCSection(mash_handler):
      build = cb_logs.CloudBuildClient(client, messages).Stream(build_ref)

    if build.status == messages.Build.StatusValueValuesEnum.TIMEOUT:
      log.status.Print(
          'Your build timed out. Use the [--timeout=DURATION] flag to change '
          'the timeout threshold.')

    if build.status != messages.Build.StatusValueValuesEnum.SUCCESS:
      raise FailedBuildException(build)

    return build
コード例 #16
0
    def _StageSource(self, source, gcs_staging_dir_bucket,
                     gcs_staging_dir_object):
        """Stages source onto the provided bucket and returns its reference.

    Args:
      source: Path to source repo as a directory on a local disk or a
        gzipped archive file (.tar.gz) in Google Cloud Storage.
      gcs_staging_dir_bucket: Bucket name of staging directory.
      gcs_staging_dir_object: Bucket object of staging directory.

    Returns:
      Reference to the staged source, which has bucket, name, and generation
        fields.
    """

        suffix = '.tgz'
        if source.startswith('gs://') or os.path.isfile(source):
            _, suffix = os.path.splitext(source)

        source_object = 'source/{stamp}-{uuid}{suffix}'.format(
            stamp=times.GetTimeStampFromDateTime(times.Now()),
            uuid=uuid.uuid4().hex,
            suffix=suffix,
        )

        if gcs_staging_dir_object:
            source_object = gcs_staging_dir_object + '/' + source_object

        gcs_source_staging = resources.REGISTRY.Create(
            collection='storage.objects',
            bucket=gcs_staging_dir_bucket,
            object=source_object)

        gcs_client = storage_api.StorageClient()
        if source.startswith('gs://'):
            gcs_source = resources.REGISTRY.Parse(source,
                                                  collection='storage.objects')
            staged_source = gcs_client.Rewrite(gcs_source, gcs_source_staging)
        else:
            if not os.path.exists(source):
                raise c_exceptions.BadFileException(
                    'could not find source [{src}]'.format(src=source))
            elif os.path.isdir(source):
                source_snapshot = snapshot.Snapshot(source)
                size_str = resource_transform.TransformSize(
                    source_snapshot.uncompressed_size)
                log.status.Print(
                    'Creating temporary tarball archive of {num_files} file(s)'
                    ' totalling {size} before compression.'.format(
                        num_files=len(source_snapshot.files), size=size_str))
                staged_source = source_snapshot.CopyTarballToGCS(
                    gcs_client, gcs_source_staging)
            elif os.path.isfile(source):
                unused_root, ext = os.path.splitext(source)
                if ext not in _ALLOWED_SOURCE_EXT:
                    raise c_exceptions.BadFileException(
                        'Local file [{src}] is none of '.format(src=source) +
                        ', '.join(_ALLOWED_SOURCE_EXT))
                log.status.Print('Uploading local file [{src}] to '
                                 '[gs://{bucket}/{object}].'.format(
                                     src=source,
                                     bucket=gcs_source_staging.bucket,
                                     object=gcs_source_staging.object,
                                 ))
                staged_source = gcs_client.CopyFileToGCS(
                    source, gcs_source_staging)

        return staged_source
コード例 #17
0
  def Run(self, args):
    """This is what gets called when the user runs this command.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.

    Yields:
      Some value that we want to have printed later.
    """

    client = core_apis.GetClientInstance('cloudbuild', 'v1')
    messages = core_apis.GetMessagesModule('cloudbuild', 'v1')

    if args.ongoing:
      tz = times.GetTimeZone('UTC')
      now = times.Now(tz)
      now_seconds = times.GetTimeStampFromDateTime(now)

    # We are wrapping list_pager.YieldFromList in another yield loop so that
    # we can use custom exit-early and filtering functionality. This code will
    # be simplified once the cloudbuild service supports server-side filtering.
    #
    # The exit-early is to ensure that, when listing ongoing builds, the command
    # doesn't page through the entire history of terminated builds to find out
    # that there weren't any. The build list will always be delivered in sorted
    # order with createTime descending.
    #
    # The custom filtering checks build.status to see if a build is ongoing or
    # not, and skips those that are terminated.
    #
    # We copy and decrement the limit, because otherwise YieldFromList would
    # not understand when to stop, due to skipping terminated builds.
    #
    # We cannot give YieldFromList a predicate, because it would not know that
    # it needs to stop paging after a certain time threshold - with no ongoing
    # builds it would page through the entire build history.

    limit = args.limit

    for build in list_pager.YieldFromList(
        client.projects_builds,
        messages.CloudbuildProjectsBuildsListRequest(
            pageSize=args.page_size,
            projectId=properties.VALUES.core.project.Get()),
        field='builds',
        batch_size_attribute='pageSize'):
      if args.ongoing:
        tz_create_time = build.createTime
        create_time = times.ParseDateTime(tz_create_time, tz)
        create_seconds = times.GetTimeStampFromDateTime(create_time)
        delta_seconds = now_seconds - create_seconds
        if delta_seconds > _ONGOING_THRESHOLD_SECONDS:
          break
        if build.status not in [
            messages.Build.StatusValueValuesEnum.QUEUED,
            messages.Build.StatusValueValuesEnum.WORKING]:
          continue
      yield build
      limit -= 1
      if limit == 0:
        break