def WriteTraceEmailToConfig(self, config_name, trace_email): files.MakeDir(self.named_config_dir) fname = os.path.join(self.named_config_dir, 'config_{0}'.format(config_name)) with open(fname, 'w') as f: f.write('[core]\n' 'trace_email = {0}\n'.format(trace_email))
def testMarkdownGenerators(self): """CommandMarkdownGenerator and CliTreeMarkdownGenerator should match.""" # The normal help doc generation flow: generate the markdown for the loaded # CLI into a directory, one markdown file per command. command_directory = os.path.join(self.temp_path, 'command') walker_util.DocumentGenerator(self.cli, command_directory, 'markdown', '.md').Walk() # The help doc generation flow under test: generate the markdown for the # generated cli_tree into a directory, one markdown file per command using # the same markdown file name scheme as above. tree_directory = os.path.join(self.temp_path, 'tree') files.MakeDir(tree_directory) internal_tree = walker_util.GCloudTreeGenerator(self.cli).Walk() external_tree = io.StringIO() resource_printer.Print(resources=internal_tree, print_format='json', out=external_tree) tree = json.loads(external_tree.getvalue()) GenerateMarkdownFromCliTree(tree, tree, tree_directory) # Compare the output dir contents. accumulator = Accumulator() help_util.DirDiff(command_directory, tree_directory, accumulator) self.assertEqual(0, accumulator.GetChanges())
def Create(root_directory): """Create a workspace at the provided root directory and return it. Args: root_directory: str, Where to root the new workspace. Returns: The Workspace. Raises: InvalidWorkspaceException: If the desired directory is already in an existing gcloud workspace. CannotCreateWorkspaceException: If the directory for the workspace does not exist. """ containing_workspace = files.FindDirectoryContaining( root_directory, config.Paths.CLOUDSDK_WORKSPACE_CONFIG_DIR_NAME) if containing_workspace: raise InvalidWorkspaceException(root_directory, containing_workspace) if not os.path.exists(root_directory): raise CannotCreateWorkspaceException(root_directory) workspace_config_path = os.path.join( root_directory, config.Paths.CLOUDSDK_WORKSPACE_CONFIG_DIR_NAME) files.MakeDir(workspace_config_path) log.status.write('Initialized gcloud directory in [{path}].\n'.format( path=workspace_config_path)) return Workspace(root_directory=root_directory)
def Run(self, args): contexts = context_util.CalculateExtendedSourceContexts( args.source_directory) # First create the old-style source-context.json file output_file = context_util.CONTEXT_FILENAME output_directory = args.output_directory output_file = os.path.join(output_directory, output_file) if context_util.HasPendingChanges(args.source_directory): log.warn( 'There are uncommitted changes in directory [{0}].\n' 'The generated source context files will not reflect the current ' 'state of your source code.\n' 'For best results, commit all changes and re-run this command.\n' .format(args.source_directory)) best_context = context_util.BestSourceContext(contexts) files.MakeDir(output_directory) with open(output_file, 'w') as f: json.dump(best_context, f, indent=2, sort_keys=True) # Create the new source-contexts.json file. with open( os.path.join(output_directory, context_util.EXT_CONTEXT_FILENAME), 'w') as f: json.dump(contexts, f, indent=2, sort_keys=True)
def Visit(self, node, parent, is_group): """Renders a help text doc for each node in the CLI tree. Args: node: group/command CommandCommon info. parent: The parent Visit() return value, None at the top level. is_group: True if node is a group, otherwise its is a command. Returns: The parent value, ignored here. """ # Set up the destination dir for this level. command = node.GetPath() if is_group: directory = os.path.join(self._directory, *command[1:]) files.MakeDir(directory, mode=0755) else: directory = os.path.join(self._directory, *command[1:-1]) # Render the help text document. path = os.path.join(directory, 'GROUP' if is_group else command[-1]) with open(path, 'w') as f: md = markdown.Markdown(node) render_document.RenderDocument(style='text', fin=cStringIO.StringIO(md), out=f) return parent
def PersistProperty(file_path, section, name, value): """Persists a value for a given property to a specific property file. Args: file_path: str, The path to the property file to update. section: str, The section name of the property to set. name: str, The name of the property to set. value: str, The value to set for the given property, or None to unset it. """ parsed_config = configparser.ConfigParser() parsed_config.read(file_path) if not parsed_config.has_section(section): if value is None: return parsed_config.add_section(section) if value is None: parsed_config.remove_option(section, name) else: parsed_config.set(section, name, str(value)) properties_dir, unused_name = os.path.split(file_path) files.MakeDir(properties_dir) with open(file_path, 'w') as fp: parsed_config.write(fp)
def __init__(self, cli, directory, hidden=False, progress_callback=None, restrict=None): """Constructor. Args: cli: The Cloud SDK CLI object. directory: The devsite output directory path name. hidden: Boolean indicating whether to consider the hidden CLI. progress_callback: f(float), The function to call to update the progress bar or None for no progress bar. restrict: Restricts the walk to the command/group dotted paths in this list. For example, restrict=['gcloud.alpha.test', 'gcloud.topic'] restricts the walk to the 'gcloud topic' and 'gcloud alpha test' commands/groups. """ super(DevSiteGenerator, self).__init__(cli) self._directory = directory files.MakeDir(self._directory) self._need_section_tag = [] toc_path = os.path.join(self._directory, self._TOC) self._toc_root = files.FileWriter(toc_path) self._toc_root.write('toc:\n') self._toc_root.write('- title: "gcloud Reference"\n') self._toc_root.write(' path: %s\n' % self._REFERENCE) self._toc_root.write(' section:\n') self._toc_main = None
def Run(self, args): """Run the upload command.""" if not os.path.isdir(args.directory): raise exceptions.InvalidArgumentException( 'directory', args.directory + ' is not a directory.') mgr = upload.UploadManager() result = mgr.Upload(args.branch, args.directory) output_dir = args.source_context_directory if output_dir: files.MakeDir(output_dir) output_dir = os.path.realpath(output_dir) extended_contexts = result['source_contexts'] result['context_file'] = os.path.join(output_dir, 'source-context.json') best_context = context_util.BestSourceContext(extended_contexts) result['best_context'] = context_util.BestSourceContext( extended_contexts) with open(result['context_file'], 'w') as f: json.dump(best_context, f) log.status.write('Wrote {0} file(s), {1} bytes.\n'.format( result['files_written'], result['size_written'])) files_skipped = result['files_skipped'] if files_skipped: log.status.write( 'Skipped {0} file(s) due to size limitations.\n'.format( files_skipped)) return [result]
def __init__(self, cli, directory, hidden=False, progress_callback=None, restrict=None): """Constructor. Args: cli: The Cloud SDK CLI object. directory: The Help Text output directory path name. hidden: Boolean indicating whether to consider the hidden CLI. progress_callback: f(float), The function to call to update the progress bar or None for no progress bar. restrict: Restricts the walk to the command/group dotted paths in this list. For example, restrict=['gcloud.alpha.test', 'gcloud.topic'] restricts the walk to the 'gcloud topic' and 'gcloud alpha test' commands/groups. """ super(HelpTextGenerator, self).__init__(cli, progress_callback=progress_callback, restrict=restrict) self._directory = directory files.MakeDir(self._directory)
def CreateConfig(config_name): """Creates a configuration with the given name. Args: config_name: str, The name of the configuration to create. Returns: Configuration, The configuration that was just created. Raises: NamedConfigError: If the configuration already exists. NamedConfigFileAccessError: If there a problem manipulating the configuration files. """ _EnsureValidConfigName(config_name, allow_reserved=False) paths = config.Paths() file_path = _FileForConfig(config_name, paths) if os.path.exists(file_path): raise NamedConfigError( 'Cannot create configuration [{0}], it already exists.'.format( config_name)) try: file_utils.MakeDir(paths.named_config_directory) file_utils.WriteFileContents(file_path, '') except file_utils.Error as e: raise NamedConfigFileAccessError( 'Failed to create configuration [{0}]. Ensure you have the correct ' 'permissions on [{1}]'.format(config_name, paths.named_config_directory), e) return Configuration(config_name, is_active=False)
def testBuildPackages_TempDirInSubdir(self): # Making a temporary directory *inside* of the package root. This should be # fine, as we don't have to copy. other_temp_dir = os.path.join(self.package_root, 'tmp') files.MakeDir(other_temp_dir) self._RunExpectingPackages(['trainer-0.0.0.tar.gz'])
def Run(self, args): log.warning('This command is deprecated. Please use ' '`gcloud beta source debug gen-repo-info-file` instead.') contexts = context_util.CalculateExtendedSourceContexts( args.source_directory) # First create the old-style source-context.json file if args.output_file: log.warning( 'The --output-file option is deprecated and will soon be removed.' ) output_directory = os.path.dirname(args.output_file) output_file = args.output_file else: output_directory = '' output_file = context_util.CONTEXT_FILENAME if not output_directory: if args.output_directory: output_directory = args.output_directory output_file = os.path.join(output_directory, output_file) else: output_directory = '.' best_context = context_util.BestSourceContext(contexts) files.MakeDir(output_directory) with open(output_file, 'w') as f: json.dump(best_context, f, indent=2, sort_keys=True)
def CreateTempDir(self, name=None): if name: path = os.path.join(self.root_path, name) file_utils.MakeDir(path) else: path = tempfile.mkdtemp(dir=self.root_path) return path
def SetUp(self): class _FakeStream(object): @staticmethod def close(): self.completions_closed = True @staticmethod def write(s): self.completions_value = s cli_dir = os.path.join(self.temp_path, 'data', 'cli') files.MakeDir(cli_dir) self.WalkTestCli('sdk4') with files.FileWriter(os.path.join(cli_dir, 'gcloud_completions.py')) as f: self.root = generate.ListCompletionTree(cli=self.test_cli, out=f) self.completions_closed = False self.completions_value = None self.StartObjectPatch(lookup, '_OpenCompletionsOutputStream', return_value=_FakeStream()) if 'gcloud_completions' in sys.modules: # At least one test exercises the real import in the lookup module. That # one skips this branch, but it poisons sys.modules and hangs around for # the remaining tests. This mocks the subsequent tests to return the test # CLI tree generated above. self.StartObjectPatch(lookup, 'LoadCompletionCliTree', return_value=self.root) self.StartObjectPatch(lookup, '_GetInstallationRootDir', return_value=self.temp_path) self.env = {lookup.IFS_ENV_VAR: ' '}
def Run(self, staging_area, jar_file, app_dir, appyaml=None): # Logic is simple: copy the jar in the staged area, and create a simple # file app.yaml for runtime: java11. shutil.copy2(jar_file, staging_area) if appyaml: shutil.copyfile(appyaml, os.path.join(staging_area, 'app.yaml')) else: files.WriteFileContents(os.path.join(staging_area, 'app.yaml'), 'runtime: java11\n', private=True) manifest = jarfile.ReadManifest(jar_file) if manifest: main_entry = manifest.main_section.get('Main-Class') if main_entry is None: raise NoMainClassError() classpath_entry = manifest.main_section.get('Class-Path') if classpath_entry: libs = classpath_entry.split() for lib in libs: dependent_file = os.path.join(app_dir, lib) # We copy the dep jar in the correct staging sub directories # and only if it exists, if os.path.isfile(dependent_file): destination = os.path.join(staging_area, lib) files.MakeDir( os.path.abspath( os.path.join(destination, os.pardir))) try: os.symlink(dependent_file, destination) except (AttributeError, OSError): log.debug( 'Could not symlink files in staging directory, falling back ' 'to copying') shutil.copy(dependent_file, destination) return staging_area
def Run(self, args): """Run the capture upload command.""" mgr = capture.CaptureManager() result = mgr.UploadCapture(args.capture_id, args.source_location, args.target_path) output_dir = args.output_directory files.MakeDir(output_dir) output_dir = os.path.realpath(output_dir) extended_contexts = result['source_contexts'] result = dict(result) result['context_file'] = os.path.join(output_dir, 'source-context.json') best_context = context_util.BestSourceContext(extended_contexts) result['best_context'] = context_util.BestSourceContext(extended_contexts) with open(result['context_file'], 'w') as f: json.dump(best_context, f) log.status.write('Wrote {0} file(s), {1} bytes.\n'.format( result['files_written'], result['size_written'])) files_skipped = result['files_skipped'] if files_skipped: log.status.write('Skipped {0} file(s) due to size limitations.\n'.format( files_skipped)) return [result]
def __init__(self, sdk_root): """Initializes the installation state for the given sdk install. Args: sdk_root: str, The file path of the root of the SDK installation. Raises: ValueError: If the given SDK root does not exist. """ if not os.path.isdir(sdk_root): raise ValueError( 'The given Cloud SDK root does not exist: [{0}]'.format( sdk_root)) self.__sdk_root = sdk_root self._state_directory = os.path.join(sdk_root, InstallationState.STATE_DIR_NAME) self.__backup_directory = os.path.join( self._state_directory, InstallationState.BACKUP_DIR_NAME) self.__trash_directory = os.path.join(self._state_directory, InstallationState.TRASH_DIR_NAME) self.__sdk_staging_root = (os.path.normpath(self.__sdk_root) + InstallationState.STAGING_ROOT_SUFFIX) for d in [self._state_directory]: if not os.path.isdir(d): file_utils.MakeDir(d)
def Run(self, args): try: contexts = context_util.CalculateExtendedSourceContexts( args.source_directory) except context_util.GenerateSourceContextError as e: # This is a usage error. Wrap it with core_exceptions.Error to report # it properly (i.e., as an error instead of a crash). raise core_exceptions.Error(e) # First create the old-style source-context.json file output_file = context_util.CONTEXT_FILENAME output_directory = args.output_directory output_file = os.path.join(output_directory, output_file) if context_util.HasPendingChanges(args.source_directory): log.warn( 'There are uncommitted changes in directory [{0}].\n' 'The generated source context files will not reflect the current ' 'state of your source code.\n' 'For best results, commit all changes and re-run this command.\n' .format(args.source_directory)) best_context = context_util.BestSourceContext(contexts) files.MakeDir(output_directory) with open(output_file, 'w') as f: json.dump(best_context, f, indent=2, sort_keys=True) # Create the new source-contexts.json file. with open( os.path.join(output_directory, context_util.EXT_CONTEXT_FILENAME), 'w') as f: json.dump(contexts, f, indent=2, sort_keys=True)
def PersistProperty(file_path, section, name, value): """Persists a value for a given property to a specific property file. Args: file_path: str, The path to the property file to update. section: str, The section name of the property to set. name: str, The name of the property to set. value: str, The value to set for the given property, or None to unset it. """ parsed_config = configparser.ConfigParser() parsed_config.read(file_path) if not parsed_config.has_section(section): if value is None: return parsed_config.add_section(section) if value is None: parsed_config.remove_option(section, name) else: parsed_config.set(section, name, str(value)) properties_dir, unused_name = os.path.split(file_path) files.MakeDir(properties_dir) # They changed the interface for configparser. On Python 2 it operates with # byte strings, on Python 3 it operaters with text strings. writer = files.BinaryFileWriter if six.PY2 else files.FileWriter with writer(file_path) as fp: parsed_config.write(fp)
def Execute(self, scenario_context): full_path = os.path.join(os.getcwd(), self._path) files.MakeDir(os.path.dirname(full_path)) if self._binary_contents: files.WriteBinaryFileContents(full_path, self._binary_contents) else: rrr = scenario_context.resource_ref_resolver files.WriteFileContents(full_path, rrr.Resolve(self._contents))
def testGenerateHelpDocsHtmlDir(self): with files.TemporaryDirectory() as temp_dir: html_dir = os.path.join(temp_dir, 'www') files.MakeDir(html_dir) self.Run(['meta', 'generate-help-docs', '--html-dir=' + html_dir]) self.AssertDirectoryIsGolden(html_dir, __file__, 'html.dir') menu_html = os.path.join(html_dir, '_menu_.html') self.AssertFileIsGolden(menu_html, __file__, '_menu_.html')
def CreateTestDirectory(self, directory): test_dir = os.path.join(directory, 'test') file_utils.MakeDir(test_dir) with open(os.path.join(test_dir, 'empty.file'), 'w') as f: pass with open(os.path.join(test_dir, 'something.file'), 'w') as f: f.write('something\n') return test_dir
def TestGenClient(self): api_version_in_targets = api_config.get('version', api_version) prefix = api_name + '_' + api_version_in_targets with files.TemporaryDirectory() as tmp_dir_path: # Place discovery doc into tmp folder. discovery_dir = os.path.join(tmp_dir_path, apis_dir) files.MakeDir(discovery_dir) shutil.copy( _GetClientPath(base_dir, apis_dir, api_config['discovery_doc']), discovery_dir) # Create parent folder __init__ files, as they do not exist in tmp dir, # this is to avoid unnecessary warnings which generally does not happen. api_dir = os.path.join(discovery_dir, api_name) files.MakeDir(api_dir) with open(os.path.join(discovery_dir, '__init__.py'), 'w'): pass with open(os.path.join(api_dir, '__init__.py'), 'w'): pass generate.GenerateApi(tmp_dir_path, apis_dir, api_name, api_version, api_config) generate.GenerateResourceModule(tmp_dir_path, apis_dir, api_name, api_version, api_config['discovery_doc'], api_config.get('resources', {})) expected_files = set([ prefix + '_client.py', prefix + '_messages.py', 'resources.py', '__init__.py' ]) output_dir = os.path.join(tmp_dir_path, apis_dir, api_name, api_version) actual_files = set(os.listdir(output_dir)) self.assertTrue( actual_files <= expected_files, 'At most expected {0} but got {1}'.format( expected_files, actual_files)) for file_name in actual_files: AssertDiffEqual( files.ReadFileContents( _GetClientPath(base_dir, apis_dir, api_name, api_version, file_name)), files.ReadFileContents(os.path.join(output_dir, file_name)))
def GetDefaultName(): """Returns the default resource cache name.""" path = [config.Paths().cache_dir] account = properties.VALUES.core.account.Get(required=False) if account: path.append(account) files.MakeDir(os.path.join(*path)) path.append('resource.cache') return os.path.join(*path)
def testPartialDefaultKubeconfig(self): path = kconfig.Kubeconfig.DefaultPath() file_utils.MakeDir(os.path.dirname(path)) with open(path, 'w') as fp: fp.write('clusters: []') self.assertTrue(os.path.isfile(path)) kconfig.Kubeconfig.Default() with open(path, 'r') as fp: self.assertEqual(fp.read(), _EMPTY_KUBECONFIG)
def _TableDirPath(): paths = config.Paths() # Completion table will be stored at root/.install/static_completion. table_dir_path = os.path.join(paths.sdk_root, paths.CLOUDSDK_STATE_DIR, 'static_completion') # Ensure directory exists. files.MakeDir(table_dir_path) return table_dir_path
def _IndexDirPath(): paths = config.Paths() # Table will be stored at root/.install/help_text. index_dir_path = os.path.join(paths.sdk_root, paths.CLOUDSDK_STATE_DIR, 'help_text') # Ensure directory exists. files.MakeDir(index_dir_path) return index_dir_path
def CreateFiles(self, paths): full_paths = [] for path in paths: full_path = os.path.join(self.tmpdir, os.path.normpath(path)) file_utils.MakeDir(os.path.dirname(full_path)) with open(full_path, 'w') as f: f.write('contents of ' + path) full_paths.append(full_path) return full_paths
def __init__(self, cli, directory): """Constructor. Args: cli: The Cloud SDK CLI object. directory: The help text output directory path name. """ super(HelpTextGenerator, self).__init__(cli) self._directory = directory files.MakeDir(self._directory)
def LoadOrCreate(cls, filename): try: return cls.LoadFromFile(filename) except (Error, IOError) as error: log.debug('unable to load default kubeconfig: {0}; recreating {1}'. format(error, filename)) file_utils.MakeDir(os.path.dirname(filename)) kubeconfig = cls(EmptyKubeconfig(), filename) kubeconfig.SaveToFile() return kubeconfig