def _GetDockerfiles(info, dockerfile_dir):
  """Returns map of in-memory Docker-related files to be packaged.

  Returns the files in-memory, so that we don't have to drop them on disk;
  instead, we include them in the archive sent to App Engine directly.

  Args:
    info: (googlecloudsdk.api_lib.app.yaml_parsing.ServiceYamlInfo)
      The service config.
    dockerfile_dir: str, path to the directory to fingerprint and generate
      Dockerfiles for.

  Raises:
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.

  Returns:
    A dictionary of filename relative to the archive root (str) to file contents
    (str).
  """
  params = ext_runtime.Params(appinfo=info.parsed, deploy=True)
  configurator = fingerprinter.IdentifyDirectory(dockerfile_dir, params)
  if configurator:
    dockerfiles = configurator.GenerateConfigData()
    return {d.filename: d.contents for d in dockerfiles}
  else:
    raise UnsatisfiedRequirementsError(
        'Your application does not satisfy all of the requirements for a '
        'runtime of type [{0}].  Please correct the errors and try '
        'again.'.format(info.runtime))
Beispiel #2
0
def _GetDockerfileCreator(info):
    """Returns a function to create a dockerfile if the user doesn't have one.

  Args:
    info: (googlecloudsdk.api_lib.app.yaml_parsing.ServiceYamlInfo)
      The service config.

  Raises:
    DockerfileError: Raised if a user supplied a Dockerfile and a non-custom
      runtime.
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  Returns:
    callable(), a function that can be used to create the correct Dockerfile
    later on.
  """
    # Use the path to app.yaml (info.file) to determine the location of the
    # Dockerfile.
    dockerfile_dir = os.path.dirname(info.file)
    dockerfile = os.path.join(dockerfile_dir, 'Dockerfile')

    if info.runtime != 'custom' and os.path.exists(dockerfile):
        raise DockerfileError(
            'There is a Dockerfile in the current directory, and the runtime field '
            'in {0} is currently set to [runtime: {1}]. To use your Dockerfile to '
            'build a custom runtime, set the runtime field in {0} to '
            '[runtime: custom]. To continue using the [{1}] runtime, please omit '
            'the Dockerfile from this directory.'.format(
                info.file, info.runtime))

    # If we're "custom" there needs to be a Dockerfile.
    if info.runtime == 'custom':
        if os.path.exists(dockerfile):
            log.info('Using %s found in %s', config.DOCKERFILE, dockerfile_dir)

            def NullGenerator():
                return lambda: None

            return NullGenerator
        else:
            raise NoDockerfileError(
                'You must provide your own Dockerfile when using a custom runtime.  '
                'Otherwise provide a "runtime" field with one of the supported '
                'runtimes.')

    # Check the fingerprinting based code.
    params = ext_runtime.Params(appinfo=info.parsed, deploy=True)
    configurator = fingerprinter.IdentifyDirectory(dockerfile_dir, params)
    if configurator:
        return configurator.GenerateConfigs
    # Then throw an error.
    else:
        raise UnsatisfiedRequirementsError(
            'Your application does not satisfy all of the requirements for a '
            'runtime of type [{0}].  Please correct the errors and try '
            'again.'.format(info.runtime))
def _GetDockerfiles(info, dockerfile_dir):
    """Returns file objects to create dockerfiles if the user doesn't have them.

  Args:
    info: (googlecloudsdk.api_lib.app.yaml_parsing.ServiceYamlInfo)
      The service config.
    dockerfile_dir: str, path to the directory with the Dockerfile
  Raises:
    DockerfileError: Raised if a user supplied a Dockerfile and a non-custom
      runtime.
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsatisfiedRequirementsError: Raised if the code in the directory doesn't
      satisfy the requirements of the specified runtime type.
  Returns:
    A dictionary of filename to (str) Dockerfile contents.
  """
    dockerfile = os.path.join(dockerfile_dir, 'Dockerfile')

    if info.runtime != 'custom' and os.path.exists(dockerfile):
        raise DockerfileError(
            'There is a Dockerfile in the current directory, and the runtime field '
            'in {0} is currently set to [runtime: {1}]. To use your Dockerfile to '
            'build a custom runtime, set the runtime field in {0} to '
            '[runtime: custom]. To continue using the [{1}] runtime, please omit '
            'the Dockerfile from this directory.'.format(
                info.file, info.runtime))

    # If we're "custom" there needs to be a Dockerfile.
    if info.runtime == 'custom':
        if os.path.exists(dockerfile):
            log.info('Using %s found in %s', config.DOCKERFILE, dockerfile_dir)
            return {}
        else:
            raise NoDockerfileError(
                'You must provide your own Dockerfile when using a custom runtime.  '
                'Otherwise provide a "runtime" field with one of the supported '
                'runtimes.')

    # Check the fingerprinting based code.
    gen_files = {}
    params = ext_runtime.Params(appinfo=info.parsed, deploy=True)
    configurator = fingerprinter.IdentifyDirectory(dockerfile_dir, params)
    if configurator:
        dockerfiles = configurator.GenerateConfigData()
        gen_files.update((d.filename, d.contents) for d in dockerfiles)
        return gen_files
    # Then throw an error.
    else:
        raise UnsatisfiedRequirementsError(
            'Your application does not satisfy all of the requirements for a '
            'runtime of type [{0}].  Please correct the errors and try '
            'again.'.format(info.runtime))
def CreateAppYamlForAppDirectory(directory):
    """Ensures that an app.yaml exists or creates it if necessary.

  Attempt to fingerprint the directory and create one. This is an interactive
  process. If this does not raise an error, the app.yaml is guaranteed to exist
  once this is done.

  Args:
    directory: str, The path to the directory to create the app.yaml in.

  Raises:
    NoAppIdentifiedError, If the application type could not be identified, or
        if a yaml file could not be generated based on the state of the source.

  Returns:
    str, The path to the created app.yaml file.
  """
    console_io.PromptContinue(
        'Deployment to Google App Engine requires an app.yaml file. '
        'This command will run `gcloud beta app gen-config` to generate an '
        'app.yaml file for you in the current directory (if the current '
        'directory does not contain an App Engine service, please answer '
        '"no").',
        cancel_on_no=True)
    # This indicates we don't have an app.yaml, we do not want to generate
    # docker files (we will do that in a single place later), and that we don't
    # want to persist the dockerfiles.
    params = ext_runtime.Params(appinfo=None, deploy=False, custom=False)
    configurator = fingerprinter.IdentifyDirectory(directory, params=params)
    if configurator is None:
        raise app_exc.NoAppIdentifiedError(
            'Could not identify an app in the current directory.\n\n'
            'Please prepare an app.yaml file for your application manually '
            'and deploy again.')
    configurator.MaybeWriteAppYaml()
    yaml_path = os.path.join(directory, DEFAULT_DEPLOYABLE)
    if not os.path.exists(yaml_path):
        raise app_exc.NoAppIdentifiedError(
            'Failed to create an app.yaml for your app.\n\n'
            'Please prepare an app.yaml file for your application manually '
            'and deploy again.')
    return yaml_path
def _GetDockerfileCreator(info, config_cleanup=None):
    """Returns a function to create a dockerfile if the user doesn't have one.

  Args:
    info: (googlecloudsdk.api_lib.app.yaml_parsing.ModuleYamlInfo)
      The module config.
    config_cleanup: (callable() or None) If a temporary Dockerfile has already
      been created during the course of the deployment, this should be a
      callable that deletes it.

  Raises:
    DockerfileError: Raised if a user supplied a Dockerfile and a non-custom
      runtime.
    NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a
      custom runtime.
    UnsupportedRuntimeError: Raised if we can't detect a runtime.
  Returns:
    callable(), a function that can be used to create the correct Dockerfile
    later on.
  """
    # Use the path to app.yaml (info.file) to determine the location of the
    # Dockerfile.
    dockerfile_dir = os.path.dirname(info.file)
    dockerfile = os.path.join(dockerfile_dir, 'Dockerfile')

    if config_cleanup:
        # Dockerfile has already been generated. It still needs to be cleaned up.
        # This must be before the other conditions, since it's a special case.
        return lambda: config_cleanup

    if info.runtime != 'custom' and os.path.exists(dockerfile):
        raise DockerfileError(
            'There is a Dockerfile in the current directory, and the runtime field '
            'in {0} is currently set to [runtime: {1}]. To use your Dockerfile to '
            'build a custom runtime, set the runtime field in {0} to '
            '[runtime: custom]. To continue using the [{1}] runtime, please omit '
            'the Dockerfile from this directory.'.format(
                info.file, info.runtime))

    # If we're "custom" there needs to be a Dockerfile.
    if info.runtime == 'custom':
        if os.path.exists(dockerfile):
            log.info('Using %s found in %s', config.DOCKERFILE, dockerfile_dir)

            def NullGenerator():
                return lambda: None

            return NullGenerator
        else:
            raise NoDockerfileError(
                'You must provide your own Dockerfile when using a custom runtime.  '
                'Otherwise provide a "runtime" field with one of the supported '
                'runtimes.')

    # Check the fingerprinting based code.
    params = fingerprinting.Params(appinfo=info.parsed, deploy=True)
    configurator = fingerprinter.IdentifyDirectory(dockerfile_dir, params)
    if configurator:
        return configurator.GenerateConfigs
    # Then throw an error.
    else:
        raise UnsupportedRuntimeError(
            'We were unable to detect the runtime to use for this application. '
            'Please specify the [runtime] field in your application yaml file '
            'or check that your application is configured correctly.')
    def Run(self, args):
        project = properties.VALUES.core.project.Get(required=True)
        version = args.version or util.GenerateVersionId()
        use_cloud_build = properties.VALUES.app.use_cloud_build.GetBool()

        config_cleanup = None
        if args.deployables:
            app_config = yaml_parsing.AppConfigSet(args.deployables)
        else:
            if not os.path.exists(DEFAULT_DEPLOYABLE):
                console_io.PromptContinue(
                    'Deployment to Google App Engine requires an app.yaml file. '
                    'This command will run `gcloud preview app gen-config` to generate '
                    'an app.yaml file for you in the current directory (if the current '
                    'directory does not contain an App Engine module, please answer '
                    '"no").',
                    cancel_on_no=True)
                # This generates the app.yaml AND the Dockerfile (and related files).
                params = ext_runtime.Params(deploy=True)
                configurator = fingerprinter.IdentifyDirectory(os.getcwd(),
                                                               params=params)
                if configurator is None:
                    raise NoAppIdentifiedError(
                        'Could not identify an app in the current directory.\n\n'
                        'Please prepare an app.yaml file for your application manually '
                        'and deploy again.')
                config_cleanup = configurator.GenerateConfigs()
                log.status.Print(
                    '\nCreated [{0}] in the current directory.\n'.format(
                        DEFAULT_DEPLOYABLE))
            app_config = yaml_parsing.AppConfigSet([DEFAULT_DEPLOYABLE])

        # If the app has enabled Endpoints API Management features, pass
        # control to the cloud_endpoints handler.
        for _, module in app_config.Modules().items():
            if module and module.parsed and module.parsed.beta_settings:
                bs = module.parsed.beta_settings
                use_endpoints = bs.get('use_endpoints_api_management',
                                       '').lower()
                if (use_endpoints in ('true', '1', 'yes')
                        and bs.get('endpoints_swagger_spec_file')):
                    cloud_endpoints.PushServiceConfig(
                        bs.get('endpoints_swagger_spec_file'), project,
                        apis.GetClientInstance('servicemanagement', 'v1',
                                               self.Http()),
                        apis.GetMessagesModule('servicemanagement', 'v1'))

        remote_build = True
        docker_build_property = properties.VALUES.app.docker_build.Get()
        if args.docker_build:
            remote_build = args.docker_build == 'remote'
        elif docker_build_property:
            remote_build = docker_build_property == 'remote'

        clients = _AppEngineClients(
            appengine_client.AppengineClient(args.server,
                                             args.ignore_bad_certs),
            appengine_api_client.GetApiClient(self.Http(timeout=None)))
        log.debug(
            'API endpoint: [{endpoint}], API version: [{version}]'.format(
                endpoint=clients.api.client.url,
                version=clients.api.api_version))
        cloudbuild_client = apis.GetClientInstance('cloudbuild', 'v1',
                                                   self.Http())
        storage_client = apis.GetClientInstance('storage', 'v1', self.Http())
        promote = properties.VALUES.app.promote_by_default.GetBool()
        deployed_urls = _DisplayProposedDeployment(project, app_config,
                                                   version, promote)
        if args.version or promote:
            # Prompt if there's a chance that you're overwriting something important:
            # If the version is set manually, you could be deploying over something.
            # If you're setting the new deployment to be the default version, you're
            # changing the target of the default URL.
            # Otherwise, all existing URLs will continue to work, so need to prompt.
            console_io.PromptContinue(default=True,
                                      throw_if_unattended=False,
                                      cancel_on_no=True)

        log.status.Print('Beginning deployment...')

        source_contexts = []
        if args.repo_info_file:
            if args.image_url:
                raise NoRepoInfoWithImageUrlError()

            try:
                with open(args.repo_info_file, 'r') as f:
                    source_contexts = json.load(f)
            except (ValueError, IOError) as ex:
                raise RepoInfoLoadError(args.repo_info_file, ex)
            if isinstance(source_contexts, dict):
                # This is an old-style source-context.json file. Convert to a new-
                # style array of extended contexts.
                source_contexts = [
                    context_util.ExtendContextDict(source_contexts)
                ]

        code_bucket_ref = None
        if use_cloud_build or app_config.NonHermeticModules():
            # If using Argo CloudBuild, we'll need to upload source to a GCS bucket.
            code_bucket_ref = self._GetCodeBucket(clients.api, args)
            metrics.CustomTimedEvent(metric_names.GET_CODE_BUCKET)
            log.debug('Using bucket [{b}].'.format(b=code_bucket_ref))

        modules = app_config.Modules()
        if any([m.RequiresImage() for m in modules.values()]):
            deploy_command_util.DoPrepareManagedVms(clients.gae)
        if args.image_url:
            if len(modules) != 1:
                raise MultiDeployError()
            for registry in constants.ALL_SUPPORTED_REGISTRIES:
                if args.image_url.startswith(registry):
                    break
            else:
                raise UnsupportedRegistryError(args.image_url)
            module = modules.keys()[0]
            images = {module: args.image_url}
        else:
            images = deploy_command_util.BuildAndPushDockerImages(
                modules, version, cloudbuild_client, storage_client,
                self.Http(), code_bucket_ref, self.cli, remote_build,
                source_contexts, config_cleanup)

        deployment_manifests = {}
        if app_config.NonHermeticModules():
            if properties.VALUES.app.use_gsutil.GetBool():
                copy_func = deploy_app_command_util.CopyFilesToCodeBucket
                metric_name = metric_names.COPY_APP_FILES
            else:
                copy_func = deploy_app_command_util.CopyFilesToCodeBucketNoGsUtil
                metric_name = metric_names.COPY_APP_FILES_NO_GSUTIL

            deployment_manifests = copy_func(
                app_config.NonHermeticModules().items(), code_bucket_ref,
                source_contexts, storage_client)
            metrics.CustomTimedEvent(metric_name)

        all_services = clients.api.ListServices()
        # Now do deployment.
        for (module, info) in app_config.Modules().iteritems():
            message = 'Updating module [{module}]'.format(module=module)
            with console_io.ProgressTracker(message):
                if args.force:
                    log.warning(
                        'The --force argument is deprecated and no longer '
                        'required. It will be removed in a future release.')

                clients.api.DeployModule(module, version, info,
                                         deployment_manifests.get(module),
                                         images.get(module))
                metrics.CustomTimedEvent(metric_names.DEPLOY_API)

                stop_previous_version = (
                    deploy_command_util.GetStopPreviousVersionFromArgs(args))
                if promote:
                    new_version = version_util.Version(project, module,
                                                       version)
                    _Promote(all_services, new_version, clients,
                             stop_previous_version)
                elif stop_previous_version:
                    log.info(
                        'Not stopping previous version because new version was not '
                        'promoted.')

        # Config files.
        for (c, info) in app_config.Configs().iteritems():
            message = 'Updating config [{config}]'.format(config=c)
            with console_io.ProgressTracker(message):
                clients.gae.UpdateConfig(c, info.parsed)
        return deployed_urls