def _GetYamlPath(source_dir, service_path, skip_files, gen_files): """Returns the yaml path, optionally updating gen_files. Args: source_dir: str, the absolute path to the root of the application directory. service_path: str, the absolute path to the service YAML file skip_files: appengine.api.Validation._RegexStr, the validated regex object from the service info file. gen_files: dict, the dict of files to generate. May be updated if a file needs to be generated. Returns: str, the relative path to the service YAML file that should be used for build. """ if files.IsDirAncestorOf(source_dir, service_path): rel_path = os.path.relpath(service_path, start=source_dir) if not util.ShouldSkip(skip_files, rel_path): return rel_path yaml_contents = files.ReadFileContents(service_path) # Use a checksum to ensure file uniqueness, not for security reasons. checksum = files.Checksum().AddContents(yaml_contents).HexDigest() generated_path = '_app_{}.yaml'.format(checksum) gen_files[generated_path] = yaml_contents return generated_path
def _CopyIfNotWritable(source_dir, temp_dir): """Returns a writable directory with the same contents as source_dir. If source_dir is writable, it is used. Otherwise, a directory 'dest' inside of temp_dir is used. Args: source_dir: str, the directory to (potentially) copy temp_dir: str, the path to a writable temporary directory in which to store any copied code. Returns: str, the path to a writable directory with the same contents as source_dir (i.e. source_dir, if it's writable, or a copy otherwise). Raises: UploadFailureError: if the command exits non-zero. """ if files.HasWriteAccessInDir(source_dir): return source_dir if files.IsDirAncestorOf(source_dir, temp_dir): raise UncopyablePackageError( 'Cannot copy directory since working directory [{}] is inside of ' 'source directory [{}].'.format(temp_dir, source_dir)) dest_dir = os.path.join(temp_dir, 'dest') log.debug('Copying local source tree from [%s] to [%s]', source_dir, dest_dir) try: files.CopyTree(source_dir, dest_dir) except OSError: raise UncopyablePackageError( 'Cannot write to working location [{}]'.format(dest_dir)) return dest_dir
def BuildAndPushDockerImage( project, service, source_dir, version_id, code_bucket_ref, gcr_domain, runtime_builder_strategy=runtime_builders.RuntimeBuilderStrategy.NEVER ): """Builds and pushes a set of docker images. Args: project: str, The project being deployed to. service: ServiceYamlInfo, The parsed service config. source_dir: str, path to the service's source directory version_id: The version id to deploy these services under. code_bucket_ref: The reference to the GCS bucket where the source will be uploaded. gcr_domain: str, Cloud Registry domain, determines the physical location of the image. E.g. `us.gcr.io`. runtime_builder_strategy: runtime_builders.RuntimeBuilderStrategy, whether to use the new CloudBuild-based runtime builders (alternative is old externalized runtimes). Returns: str, The name of the pushed container image. Raises: DockerfileError: if a Dockerfile is present, but the runtime is not "custom". NoDockerfileError: Raised if a user didn't supply a Dockerfile and chose a custom runtime. UnsatisfiedRequirementsError: Raised if the code in the directory doesn't satisfy the requirements of the specified runtime type. """ needs_dockerfile = _NeedsDockerfile(service, source_dir) use_runtime_builders = runtime_builder_strategy.ShouldUseRuntimeBuilders( service.runtime, needs_dockerfile) # Nothing to do if this is not an image-based deployment. if not service.RequiresImage(): return None log.status.Print( 'Building and pushing image for service [{service}]'.format( service=service.module)) gen_files = dict(_GetSourceContextsForUpload(source_dir)) if needs_dockerfile and not use_runtime_builders: # The runtime builders will generate a Dockerfile in the Cloud, so we only # need to do this if use_runtime_builders is True gen_files.update(_GetDockerfiles(service, source_dir)) image = docker_image.Image(dockerfile_dir=source_dir, repo=_GetImageName(project, service.module, version_id, gcr_domain), nocache=False, tag=config.DOCKER_IMAGE_TAG) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD_START) object_ref = storage_util.ObjectReference(code_bucket_ref, image.tagged_repo) if files.IsDirAncestorOf(source_dir, service.file): relative_yaml_path = os.path.relpath(service.file, source_dir) else: yaml_contents = files.GetFileContents(service.file) checksum = files.Checksum().AddContents(yaml_contents).HexDigest() relative_yaml_path = checksum + '.yaml' gen_files[relative_yaml_path] = yaml_contents try: cloud_build.UploadSource(image.dockerfile_dir, object_ref, gen_files=gen_files, skip_files=service.parsed.skip_files.regex) except (OSError, IOError) as err: if platforms.OperatingSystem.IsWindows(): if err.filename and len(err.filename) > _WINDOWS_MAX_PATH: raise WindowMaxPathError(err.filename) raise metrics.CustomTimedEvent(metric_names.CLOUDBUILD_UPLOAD) if use_runtime_builders: builder_reference = runtime_builders.FromServiceInfo( service, source_dir) log.info('Using runtime builder [%s]', builder_reference.build_file_uri) builder_reference.WarnIfDeprecated() yaml_path = posixpath.join(*relative_yaml_path.split(os.sep)) build = builder_reference.LoadCloudBuild({ '_OUTPUT_IMAGE': image.tagged_repo, '_GAE_APPLICATION_YAML_PATH': yaml_path }) # TODO(b/37542869) Remove this hack once the API can take the gs:// path # as a runtime name. service.runtime = builder_reference.runtime service.parsed.SetEffectiveRuntime(builder_reference.runtime) else: build = cloud_build.GetDefaultBuild(image.tagged_repo) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE_START) cloudbuild_build.CloudBuildClient().ExecuteCloudBuild( cloud_build.FixUpBuild(build, object_ref), project=project) metrics.CustomTimedEvent(metric_names.CLOUDBUILD_EXECUTE) return image.tagged_repo
def Update(self, update_seed=None, allow_no_backup=False, throw_if_unattended=False): """Performs an update of the given components. If no components are provided, it will attempt to update everything you have installed. Args: update_seed: list of str, A list of component ids to update. allow_no_backup: bool, True if we want to allow the updater to run without creating a backup. This lets us be in the root directory of the SDK and still do an update. It is more fragile if there is a failure, so we only do it if necessary. throw_if_unattended: bool, True to throw an exception on prompts when not running in interactive mode. Returns: bool, True if the update succeeded (or there was nothing to do, False if if was cancelled by the user. Raises: InvalidComponentError: If any of the given component ids do not exist. """ md5dict1 = self._HashRcfiles(_SHELL_RCFILES) self._EnsureNotDisabled() try: install_state, diff = self._GetStateAndDiff( command_path='components.update') except snapshots.IncompatibleSchemaVersionError as e: return self._ReinstallOnError(e) original_update_seed = update_seed if update_seed: invalid_seeds = diff.InvalidUpdateSeeds(update_seed) if invalid_seeds: if os.environ.get('CLOUDSDK_REINSTALL_COMPONENTS'): # We are doing a reinstall. Ignore any components that no longer # exist. update_seed = set(update_seed) - invalid_seeds else: ignored = set(_IGNORED_MISSING_COMPONENTS) deprecated = invalid_seeds & ignored for item in deprecated: log.warning('Component [%s] no longer exists.', item) additional_msg = _IGNORED_MISSING_COMPONENTS.get(item) if additional_msg: log.warning(additional_msg) invalid_seeds -= ignored if invalid_seeds: raise InvalidComponentError( 'The following components are unknown [{invalid_seeds}]' .format(invalid_seeds=', '.join(invalid_seeds))) update_seed = set(update_seed) - deprecated else: update_seed = diff.current.components.keys() to_remove = diff.ToRemove(update_seed) to_install = diff.ToInstall(update_seed) self.__Write(log.status) if not to_remove and not to_install: self.__Write(log.status, 'All components are up to date.') with install_state.LastUpdateCheck() as update_check: update_check.SetFromSnapshot( diff.latest, force=True, platform_filter=self.__platform_filter) return True current_os = platforms.OperatingSystem.Current() if (current_os is platforms.OperatingSystem.WINDOWS and file_utils.IsDirAncestorOf(self.__sdk_root, sys.executable)): # On Windows, you can't use a Python installed within a directory to move # that directory, which means that with a bundled Python, updates will # fail. To get around this, we copy the Python interpreter to a temporary # directory and run it there. # There's no issue that the `.py` files themselves are inside the install # directory, because the Python interpreter loads them into memory and # closes them immediately. RestartCommand(python=_CopyPython(), block=False) sys.exit(0) # If explicitly listing components, you are probably installing and not # doing a full udpate, change the message to be more clear. if original_update_seed: latest_msg = 'Installing components from version: ' else: latest_msg = 'You will be upgraded to version: ' self._PrintVersions(diff, latest_msg=latest_msg) disable_backup = self._ShouldDoFastUpdate( allow_no_backup=allow_no_backup) self._PrintPendingAction( diff.DetailsForCurrent(to_remove - to_install), 'removed') self._PrintPendingAction(diff.DetailsForLatest(to_remove & to_install), 'updated') self._PrintPendingAction(diff.DetailsForLatest(to_install - to_remove), 'installed') self.__Write(log.status) release_notes.PrintReleaseNotesDiff( diff.latest.sdk_definition.release_notes_url, config.INSTALLATION_CONFIG.version, diff.latest.version) message = self._GetDontCancelMessage(disable_backup) if not console_io.PromptContinue( message=message, throw_if_unattended=throw_if_unattended): return False components_to_install = diff.DetailsForLatest(to_install) components_to_remove = diff.DetailsForCurrent(to_remove) for c in components_to_install: metrics.Installs(c.id, c.version.version_string) if disable_backup: with execution_utils.UninterruptibleSection(stream=log.status): self.__Write(log.status, 'Performing in place update...\n') self._UpdateWithProgressBar(components_to_remove, 'Uninstalling', install_state.Uninstall, first=True, last=not components_to_install) self._UpdateWithProgressBar(components_to_install, 'Installing', self._InstallFunction( install_state, diff), first=not components_to_remove, last=True) else: with console_io.ProgressBar(label='Creating update staging area', stream=log.status, last=False) as pb: staging_state = install_state.CloneToStaging(pb.SetProgress) self._UpdateWithProgressBar(components_to_remove, 'Uninstalling', staging_state.Uninstall, first=False, last=False) self._UpdateWithProgressBar(components_to_install, 'Installing', self._InstallFunction( staging_state, diff), first=False, last=False) with console_io.ProgressBar( label='Creating backup and activating new installation', stream=log.status, first=False) as pb: install_state.ReplaceWith(staging_state, pb.SetProgress) with install_state.LastUpdateCheck() as update_check: update_check.SetFromSnapshot( diff.latest, force=True, platform_filter=self.__platform_filter) md5dict2 = self._HashRcfiles(_SHELL_RCFILES) if md5dict1 != md5dict2: self.__Write( log.status, '\nStart a new shell for the changes to take effect.\n') self.__Write(log.status, '\nUpdate done!\n') if self.__warn: bad_commands = self.FindAllOldToolsOnPath() if bad_commands and not os.environ.get( 'CLOUDSDK_REINSTALL_COMPONENTS'): log.warning("""\ There are older versions of Google Cloud Platform tools on your system PATH. Please remove the following to avoid accidentally invoking these old tools: {0} """.format('\n'.join(bad_commands))) return True