def _UploadFiles(service, code_bucket_ref):
    """Upload files in the service being deployed, if necessary.

  "Necessary" here means that the service is not "hermetic." A hermetic service
  is an image-based (i.e. Flexible) deployment that does not also serve static
  files.

  The upload method used depends on the app.use_gsutil property.

  Args:
    service: configuration for service to upload files for
    code_bucket_ref: cloud_storage.BucketReference, the code bucket to upload to

  Returns:
    A manifest of files uploaded in the format expected by the Admin API.
  """
    manifest = None
    # "Non-hermetic" services require file upload outside the Docker image.
    if not service.is_hermetic:
        if properties.VALUES.app.use_gsutil.GetBool():
            manifest = deploy_app_command_util.CopyFilesToCodeBucket(
                service, code_bucket_ref)
            metrics.CustomTimedEvent(metric_names.COPY_APP_FILES)
        else:
            manifest = deploy_app_command_util.CopyFilesToCodeBucketNoGsUtil(
                service, code_bucket_ref)
            metrics.CustomTimedEvent(metric_names.COPY_APP_FILES_NO_GSUTIL)
    return manifest
Пример #2
0
    def _PossiblyUploadFiles(self, image, service_info, source_dir,
                             code_bucket_ref, flex_image_build_option):
        """Uploads files for this deployment is required for this service.

    Uploads if flex_image_build_option is FlexImageBuildOptions.ON_SERVER,
    or if the deployment is non-hermetic and the image is not provided.

    Args:
      image: str or None, the URL for the Docker image to be deployed (if image
        already exists).
      service_info: yaml_parsing.ServiceYamlInfo, service configuration to be
        deployed
      source_dir: str, path to the service's source directory
      code_bucket_ref: cloud_storage.BucketReference where the service's files
        have been uploaded
      flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
        should upload files so that the server can build the image or build the
        image on client.

    Returns:
      Dictionary mapping source files to Google Cloud Storage locations.
    """
        manifest = None
        # "Non-hermetic" services require file upload outside the Docker image
        # unless an image was already built.
        if flex_image_build_option == FlexImageBuildOptions.ON_SERVER or (
                not image and not service_info.is_hermetic):
            manifest = deploy_app_command_util.CopyFilesToCodeBucket(
                service_info, source_dir, code_bucket_ref)
        return manifest
Пример #3
0
    def Deploy(self, service, new_version, code_bucket_ref, image,
               all_services, gcr_domain):
        """Deploy the given service.

    Performs all deployment steps for the given service (if applicable):
    * Enable endpoints (for beta deployments)
    * Build and push the Docker image (Flex only, if image_url not provided)
    * Upload files (non-hermetic deployments)
    * Create the new version
    * Promote the version to receieve all traffic (if --promote given (default))
    * Stop the previous version (if new version promoted and
      --stop-previous-version given (default))

    Args:
      service: yaml_parsing.ServiceYamlInfo, service configuration to be
        deployed
      new_version: version_util.Version describing where to deploy the service
      code_bucket_ref: cloud_storage.BucketReference where the service's files
        have been uploaded
      image: str or None, the URL for the Docker image to be deployed (if image
        already exists).
      all_services: dict of service ID to service_util.Service objects
        corresponding to all pre-existing services (used to determine how to
        promote this version to receive all traffic, if applicable).
      gcr_domain: str, Cloud Registry domain, determines the physical location
        of the image. E.g. `us.gcr.io`.
    """
        log.status.Print(
            'Beginning deployment of service [{service}]...'.format(
                service=new_version.service))

        if service.env is util.Environment.MANAGED_VMS:
            log.warning(MANAGED_VMS_DEPRECATION_WARNING)
        with self.stager.Stage(service.file, service.runtime,
                               service.env) as staging_dir:
            source_dir = staging_dir or os.path.dirname(service.file)
            endpoints_info = self._PossiblyConfigureEndpoints(
                service, source_dir, new_version)
            image = self._PossiblyBuildAndPush(new_version, service,
                                               source_dir, image,
                                               code_bucket_ref, gcr_domain)
            manifest = None
            # "Non-hermetic" services require file upload outside the Docker image.
            if not service.is_hermetic:
                manifest = deploy_app_command_util.CopyFilesToCodeBucket(
                    service, source_dir, code_bucket_ref,
                    self.deploy_options.upload_strategy)

            # Actually create the new version of the service.
            message = 'Updating service [{service}]'.format(
                service=new_version.service)
            with progress_tracker.ProgressTracker(message):
                metrics.CustomTimedEvent(metric_names.DEPLOY_API_START)
                self.api_client.DeployService(new_version.service,
                                              new_version.id, service,
                                              manifest, image, endpoints_info)
                metrics.CustomTimedEvent(metric_names.DEPLOY_API)
                self._PossiblyPromote(all_services, new_version)
    def testLifecyclePolicy(self):
        """Check that life cycle policy is inspected.

    The methods are unit tested in other classes. This is more of an integration
    test, to ensure that the actual file upload respects the lifecycle policy.
    """

        # Temporarily disable the lifecycle mocks.
        self.lifecycle_patcher.stop()
        messages = cloud_storage_util.storage_v1
        self.now_mock = self.StartObjectPatch(times,
                                              'Now',
                                              return_value=datetime.datetime(
                                                  2018, 4, 30))

        # Expect buckets.Get for lifecycle metadata
        rules = [
            messages.Bucket.LifecycleValue.RuleValueListEntry(
                action=messages.Bucket.LifecycleValue.RuleValueListEntry.
                ActionValue(type='Delete'),
                condition=messages.Bucket.LifecycleValue.RuleValueListEntry.
                ConditionValue(age=15))
        ]
        self.apitools_client.buckets.Get.Expect(
            messages.StorageBucketsGetRequest(bucket=self._BUCKET_NAME),
            response=messages.Bucket(lifecycle=messages.Bucket.LifecycleValue(
                rule=rules)))

        # The boundary is 2018-04-16 00:00
        file_list = [('somecontents5', datetime.datetime(2018, 4, 15, 23)),
                     ('somecontents6', datetime.datetime(2018, 4, 16, 2))]
        objects = messages.Objects(items=[
            messages.Object(name=cloud_storage_util.GetSha(c), timeCreated=d)
            for c, d in file_list
        ])

        self.apitools_client.objects.List.Expect(
            messages.StorageObjectsListRequest(bucket=self._BUCKET_NAME),
            response=objects)

        # We are re-uploading app.yaml due to old age, to be safe
        self.ExpectUploads([('app.yaml', 'somecontents5')])

        manifest = deploy_app_command_util.CopyFilesToCodeBucket(
            self.sub_dir, self.sub_dir_fnames, self._BUCKET)

        # Only files under extra/ should be in the extra manifest
        extra_module_files = [
            f for f in _FILES if os.path.dirname(f) == 'extra'
        ]
        rel_paths = [os.path.relpath(f, 'extra/') for f in extra_module_files]
        self._AssertFilesInManifest(
            [rel_path.replace('\\', '/') for rel_path in rel_paths], manifest)

        # re-enable
        self.lifecycle_patcher.start()
    def testSingleModuleEmptyBucket(self):
        self.ExpectList([])
        self.ExpectUploads(six.iteritems(_FILES))

        manifest = deploy_app_command_util.CopyFilesToCodeBucket(
            self.top_dir, self.top_dir_fnames, self._BUCKET)

        self._AssertFilesInManifest(
            [filename.replace('\\', '/') for filename in _FILES], manifest)
        self.get_pool_mock.assert_called_once_with(16)
    def testSingleModuleLargeFile(self):
        self.ExpectList([])

        # 13 isn't arbitrary, it comes from the largest object in _FILES
        with self.assertRaisesRegex(
                deploy_app_command_util.LargeFileError,
                r'Cannot upload file \[.*\], which has size \[13\] '
                r'\(greater than maximum allowed size of \[12\]\).'):
            deploy_app_command_util.CopyFilesToCodeBucket(self.top_dir,
                                                          self.top_dir_fnames,
                                                          self._BUCKET,
                                                          max_file_size=12)
    def testListBucketError(self):
        exception = http_error.MakeHttpError(
            code=404,
            url='https://www.googleapis.com/storage/v1/b/missing_bucket/o',
            message='Not Found',
            reason='notFound')
        self.ExpectListException(exception)

        with self.assertRaisesRegex(
                storage_api.BucketNotFoundError,
                r'Could not list bucket: \[{}\] bucket does not exist.'.format(
                    'somebucket')):
            deploy_app_command_util.CopyFilesToCodeBucket(
                self.top_dir, self.top_dir_fnames, self._BUCKET)
    def testSingleModulePartialUpload(self):
        all_files = sorted(six.iteritems(_FILES))
        # Pretend that two of these are already in the bucket.
        existing_files = all_files[:2]
        remaining_files = all_files[2:]

        self.ExpectList(existing_files)
        self.ExpectUploads(remaining_files)

        manifest = deploy_app_command_util.CopyFilesToCodeBucket(
            self.top_dir, self.top_dir_fnames, self._BUCKET)

        self._AssertFilesInManifest(
            [filename.replace('\\', '/') for filename in _FILES], manifest)
 def testSingleModuleEmptyBucketSourceContext(self):
     self.ExpectList([])
     self.ExpectUploads(six.iteritems(_FILES_WITH_SOURCE_CONTEXTS))
     with mock.patch.object(context_util,
                            '_GetSourceContexts',
                            autospec=True,
                            return_value=source_context_util.FAKE_CONTEXTS
                            ) as get_source_context:
         manifest = deploy_app_command_util.CopyFilesToCodeBucket(
             self.top_dir, self.top_dir_fnames, self._BUCKET)
     get_source_context.assert_called_once_with(self.top_dir)
     self._AssertFilesInManifest([
         filename.replace('\\', '/')
         for filename in _FILES_WITH_SOURCE_CONTEXTS
     ], manifest)
    def _PossiblyUploadFiles(self, image, service_info, upload_dir,
                             source_files, code_bucket_ref,
                             flex_image_build_option):
        """Uploads files for this deployment is required for this service.

    Uploads if flex_image_build_option is FlexImageBuildOptions.ON_SERVER,
    or if the deployment is non-hermetic and the image is not provided.

    Args:
      image: str or None, the URL for the Docker image to be deployed (if image
        already exists).
      service_info: yaml_parsing.ServiceYamlInfo, service configuration to be
        deployed
      upload_dir: str, path to the service's upload directory
      source_files: [str], relative paths to upload.
      code_bucket_ref: cloud_storage.BucketReference where the service's files
        have been uploaded
      flex_image_build_option: FlexImageBuildOptions, whether a flex deployment
        should upload files so that the server can build the image or build the
        image on client or build the image on client using the buildpacks.

    Returns:
      Dictionary mapping source files to Google Cloud Storage locations.

    Raises:
      RequiredFileMissingError: if a required file is not uploaded.
    """
        manifest = None
        # "Non-hermetic" services require file upload outside the Docker image
        # unless an image was already built.
        if (not image
                and (flex_image_build_option == FlexImageBuildOptions.ON_SERVER
                     or not service_info.is_hermetic)):
            if (service_info.env == env.FLEX and not _AppYamlInSourceFiles(
                    source_files, service_info.GetAppYamlBasename())):
                raise RequiredFileMissingError(
                    service_info.GetAppYamlBasename())

            limit = None
            if (service_info.env == env.STANDARD and service_info.runtime
                    in _RUNTIMES_WITH_FILE_SIZE_LIMITS):
                limit = _MAX_FILE_SIZE_STANDARD
            manifest = deploy_app_command_util.CopyFilesToCodeBucket(
                upload_dir, source_files, code_bucket_ref, max_file_size=limit)
        return manifest
Пример #11
0
    def Run(self, args):
        if args.env_vars:
            log.warn(
                'The env-vars flag is deprecated, and will soon be removed.')
        # Do this up-front to print applicable warnings early
        promote = deploy_command_util.GetPromoteFromArgs(args)

        project = properties.VALUES.core.project.Get(required=True)
        version = args.version or util.GenerateVersionId()
        use_cloud_build = properties.VALUES.app.use_cloud_build.GetBool()

        app_config = yaml_parsing.AppConfigSet(args.deployables)

        remote_build = True
        docker_build_property = properties.VALUES.app.docker_build.Get()
        if args.docker_build:
            remote_build = args.docker_build == 'remote'
        elif docker_build_property:
            remote_build = docker_build_property == 'remote'

        gae_client = appengine_client.AppengineClient(args.server)
        api_client = appengine_api_client.GetApiClient(self.Http(timeout=None))
        log.debug(
            'API endpoint: [{endpoint}], API version: [{version}]'.format(
                endpoint=api_client.client.url,
                version=api_client.api_version))
        cloudbuild_client = cloudbuild_v1.CloudbuildV1(http=self.Http(),
                                                       get_credentials=False)
        deployed_urls = _DisplayProposedDeployment(project, app_config,
                                                   version, promote)
        if args.version or promote:
            # Prompt if there's a chance that you're overwriting something important:
            # If the version is set manually, you could be deploying over something.
            # If you're setting the new deployment to be the default version, you're
            # changing the target of the default URL.
            # Otherwise, all existing URLs will continue to work, so need to prompt.
            console_io.PromptContinue(default=True,
                                      throw_if_unattended=False,
                                      cancel_on_no=True)

        log.status.Print('Beginning deployment...')

        code_bucket = None
        if use_cloud_build:
            # If using Argo CloudBuild, we'll need to upload source to a GCS bucket.
            code_bucket = self._GetCodeBucket(api_client, args)

        modules = app_config.Modules()
        if args.image_url:
            if len(modules) != 1:
                raise exceptions.ToolException(
                    'No more than one module may be deployed when using the '
                    'image-url flag')
            for registry in constants.ALL_SUPPORTED_REGISTRIES:
                if args.image_url.startswith(registry):
                    break
            else:
                raise exceptions.ToolException(
                    '%s is not in a supported registry.  Supported registries are %s'
                    % (args.image_url, constants.ALL_SUPPORTED_REGISTRIES))
            module = modules.keys()[0]
            images = {module: args.image_url}
        else:
            images = deploy_command_util.BuildAndPushDockerImages(
                modules, version, gae_client, cloudbuild_client, code_bucket,
                self.cli, remote_build)

        deployment_manifests = {}
        if app_config.NonHermeticModules() and self.use_admin_api:
            # TODO(clouser): Consider doing this in parallel with
            # BuildAndPushDockerImage.
            code_bucket = self._GetCodeBucket(api_client, args)
            metrics.CustomTimedEvent(metric_names.GET_CODE_BUCKET)
            log.debug('Using bucket [{b}].'.format(b=code_bucket))
            if not code_bucket:
                raise exceptions.ToolException(
                    ('Could not retrieve the default Google '
                     'Cloud Storage bucket for [{a}]. '
                     'Please try again or use the [bucket] '
                     'argument.').format(a=project))
            deployment_manifests = deploy_app_command_util.CopyFilesToCodeBucket(
                app_config.NonHermeticModules().items(), code_bucket)
            metrics.CustomTimedEvent(metric_names.COPY_APP_FILES)

        # Now do deployment.
        for (module, info) in app_config.Modules().iteritems():
            message = 'Updating module [{module}]'.format(module=module)
            with console_io.ProgressTracker(message):
                if args.force:
                    gae_client.CancelDeployment(module=module, version=version)
                    metrics.CustomTimedEvent(metric_names.CANCEL_DEPLOYMENT)

                if info.is_hermetic or self.use_admin_api:
                    api_client.DeployModule(module, version, info,
                                            deployment_manifests.get(module),
                                            images.get(module))
                    metrics.CustomTimedEvent(metric_names.DEPLOY_API)
                else:
                    gae_client.DeployModule(module, version, info.parsed,
                                            info.file)
                    metrics.CustomTimedEvent(metric_names.DEPLOY_ADMIN_CONSOLE)

                if promote:
                    if info.is_hermetic or self.use_admin_api:
                        api_client.SetDefaultVersion(module, version)
                        metrics.CustomTimedEvent(
                            metric_names.SET_DEFAULT_VERSION_API)
                    else:
                        gae_client.SetDefaultVersion(modules=[module],
                                                     version=version)
                        metrics.CustomTimedEvent(
                            metric_names.SET_DEFAULT_VERSION_ADMIN_CONSOLE)

        # Config files.
        for (c, info) in app_config.Configs().iteritems():
            message = 'Updating config [{config}]'.format(config=c)
            with console_io.ProgressTracker(message):
                gae_client.UpdateConfig(c, info.parsed)
        return deployed_urls