Exemple #1
0
    def Run(self, args):
        if not args.IsSpecified('service_name'):
            dir_name = os.path.basename(os.path.dirname(args.dockerfile))
            service_name = console_io.PromptWithDefault(message='Service name',
                                                        default=dir_name)
        else:
            service_name = args.service_name

        if not args.IsSpecified('image_name'):
            default_image_name = 'gcr.io/{project}/{service}'.format(
                project=properties.VALUES.core.project.Get(required=True),
                service=service_name)
            image_name = console_io.PromptWithDefault(
                message='Docker image tag', default=default_image_name)
        else:
            image_name = args.image_name

        kubernetes_configs = local.CreatePodAndService(service_name,
                                                       image_name)
        with files.FileWriter(args.kubernetes_file) as output:
            yaml.dump_all(kubernetes_configs, output)

        skaffold_yaml_text = _SKAFFOLD_TEMPLATE.format(
            image_name=image_name,
            context_path=args.build_context_directory
            or os.path.dirname(args.dockerfile) or '.')
        with files.FileWriter(args.skaffold_file) as output:
            output.write(skaffold_yaml_text)
  def Run(self, args):
    settings = local.Settings.FromArgs(args)
    local_file_generator = local_files.LocalRuntimeFiles(settings)

    with files.FileWriter(args.kubernetes_file) as output:
      output.write(six.u(local_file_generator.KubernetesConfig()))

    if not args.no_skaffold_file:
      with files.FileWriter(args.skaffold_file) as output:
        output.write(
            six.u(local_file_generator.SkaffoldConfig(args.kubernetes_file)))
Exemple #3
0
def Run(cmd, show_output=True):
    """Run command and optionally send the output to /dev/null or nul."""
    if show_output:
        subprocess.check_call(cmd)
    else:
        with file_utils.FileWriter(os.devnull) as devnull:
            subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
    def Run(self, args):
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        client = holder.client

        (backend_services_utils.
         IsDefaultRegionalBackendServicePropertyNoneWarnOtherwise())
        backend_service_ref = (
            flags.GLOBAL_REGIONAL_BACKEND_SERVICE_ARG.ResolveAsResource(
                args,
                holder.resources,
                scope_lister=compute_flags.GetDefaultScopeLister(client)))

        get_request = backend_services_utils.ComposeGetRequest(
            client, backend_service_ref)

        backend_service = client.MakeRequests([get_request])[0]

        if args.destination:
            with files.FileWriter(args.destination) as stream:
                export_util.Export(message=backend_service,
                                   stream=stream,
                                   schema_path=self.GetSchemaPath())
        else:
            export_util.Export(message=backend_service,
                               stream=sys.stdout,
                               schema_path=self.GetSchemaPath())
    def testImportFromFile(self):
        target_grpc_proxy = copy.deepcopy(self._existing_target_grpc_proxy)
        target_grpc_proxy.description = 'changed'
        self.make_requests.side_effect = iter(
            [[test_resources.TARGET_GRPC_PROXIES_ALPHA[0]],
             [target_grpc_proxy]])

        # Write the modified target_grpc_proxies to a file
        file_name = os.path.join(self.temp_path, 'temp-tgp.yaml')
        with files.FileWriter(file_name) as stream:
            export_util.Export(message=target_grpc_proxy, stream=stream)
        self.WriteInput('y\n')

        response = self.RunImport('{0} --source {1}'.format(
            self._resource_name, file_name))
        self.CheckRequests([
            (self._target_grpc_proxies_api, 'Get',
             self.messages.ComputeTargetGrpcProxiesGetRequest(
                 project='my-project', targetGrpcProxy='target-grpc-proxy-1'))
        ], [(self._target_grpc_proxies_api, 'Patch',
             self.messages.ComputeTargetGrpcProxiesPatchRequest(
                 project='my-project',
                 targetGrpcProxy=self._resource_name,
                 targetGrpcProxyResource=target_grpc_proxy))])
        self.assertEqual(response, target_grpc_proxy)
Exemple #6
0
def GenerateModuleFile(import_data, project, dest_file=None, dest_dir=None):
  """Generate terraform modules file from template."""
  output_file_name = os.path.join(dest_dir, dest_file)
  output_template = _BuildTemplate('terraform_module_template.tpl')
  module_contents = set()
  for import_path, _ in import_data:
    module_source, module_name = ConstructModuleParameters(
        import_path, dest_dir)
    module_contents.add((module_name, module_source))
  module_declarations = []
  for module in module_contents:
    module_declarations.append(
        MODULE_TEMPLATE.format(module_name=module[0], module_source=module[1]))

  context = {'project': project}
  context['modules'] = os.linesep.join(module_declarations)

  try:
    with files.FileWriter(output_file_name, create_path=True) as f:
      ctx = runtime.Context(f, **context)
      output_template.render_context(ctx)
    os.chmod(output_file_name, 0o755)
  except files.Error as e:
    raise TerraformGenerationError(  # pylint: disable=raise-missing-from
        'Error writing import script::{}'.format(e))
  return output_file_name, len(module_contents)
    def testImportAutoscalingPolicies_update_declineOverwrite(self):
        policy = self.MakeAutoscalingPolicy('fake-project',
                                            'antarctica-north42', 'policy-1')

        # Write to a file so that we can use stdin to decline the prompt. Otherwise,
        # we wouldn't have a way to demarcate where the policy ends and declining
        # the prompt starts.
        file_name = os.path.join(self.temp_path, 'policy.yaml')
        with files.FileWriter(file_name) as stream:
            export_util.Export(
                message=policy,
                stream=stream,
            )

        expected_request_policy = copy.deepcopy(policy)
        expected_request_policy.name = None

        self.mock_client.projects_regions_autoscalingPolicies.Create.Expect(
            self.messages.
            DataprocProjectsRegionsAutoscalingPoliciesCreateRequest(
                parent='projects/fake-project/regions/antarctica-north42',
                autoscalingPolicy=expected_request_policy,
            ),
            exception=self.MakeHttpError(status_code=409))

        # Don't pass --quiet, and decline the prompt
        self.WriteInput('n\n')
        with self.AssertRaisesExceptionMatches(
                console_io.OperationCancelledError, 'Aborted by user.'):
            self.RunDataproc(
                'autoscaling-policies import policy-1 --source {}'.format(
                    file_name))
    def Run(self, args):
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        client = holder.client

        # Retrieve the specified compute instance.
        instance_ref = flags.INSTANCE_ARG.ResolveAsResource(
            args,
            holder.resources,
            scope_lister=flags.GetInstanceZoneScopeLister(client))

        request = client.messages.ComputeInstancesGetRequest(
            **instance_ref.AsDict())
        instance = client.MakeRequests([(client.apitools_client.instances,
                                         'Get', request)])[0]

        # Get JSON Schema for Compute Engine instances (located in
        # third_party/py/googlecloudsdk/schemas/...).
        schema_path = self.GetSchemaPath(for_help=False)

        # Write configuration data to either designated file or stdout.
        if args.destination:
            with files.FileWriter(args.destination) as stream:
                export_util.Export(message=instance,
                                   stream=stream,
                                   schema_path=schema_path)
            return log.status.Print('Exported [{}] to \'{}\'.'.format(
                instance.name, args.destination))
        else:
            export_util.Export(message=instance,
                               stream=sys.stdout,
                               schema_path=schema_path)
    def Run(self, args):
        """Exports a build trigger.

    Args:
      args: an argparse namespace. All the arguments that were provided to this
        command invocation.
    """
        client = cloudbuild_util.GetClientInstance()
        messages = cloudbuild_util.GetMessagesModule()

        project = properties.VALUES.core.project.Get(required=True)
        location = args.region or cloudbuild_util.DEFAULT_REGION
        trigger = args.TRIGGER

        name = resources.REGISTRY.Parse(
            trigger,
            params={
                'projectsId': project,
                'locationsId': location,
                'triggersId': trigger,
            },
            collection='cloudbuild.projects.locations.triggers').RelativeName(
            )

        got_trigger = client.projects_locations_triggers.Get(
            messages.CloudbuildProjectsLocationsTriggersGetRequest(
                name=name, triggerId=trigger))
        with files.FileWriter(args.destination) as out:
            yaml.dump(encoding.MessageToDict(got_trigger), stream=out)
Exemple #10
0
 def _DownloadLogs(self, valid_dates, sentinel, output_file, append):
     """Common utility method for both normal and append modes."""
     # A temporary file is used because the API for requesting logs
     # gives us the newest logs first.  We write them in this order to
     # the temporary file and then read the temporary file backwards,
     # copying to the output file line by line (special-casing null
     # bytes).
     tf = tempfile.TemporaryFile()
     last_offset = None
     try:
         while True:
             new_offset = self.RequestLogLines(tf, last_offset, valid_dates,
                                               sentinel)
             if not new_offset or new_offset == last_offset:
                 break
             last_offset = new_offset
         if output_file == '-':
             of = log.out
         else:
             try:
                 of = files.FileWriter(output_file, append=append)
             except files.Error as e:
                 raise CannotOpenFileError(output_file, e)
         try:
             line_count = CopyReversedLines(tf, of)
         finally:
             of.flush()
             if of is not log.out:
                 of.close()  # pytype: disable=attribute-error
     finally:
         tf.close()
     log.info('Copied %d records.', line_count)
Exemple #11
0
    def SetUp(self):
        class _FakeStream(object):
            @staticmethod
            def close():
                self.completions_closed = True

            @staticmethod
            def write(s):
                self.completions_value = s

        cli_dir = os.path.join(self.temp_path, 'data', 'cli')
        files.MakeDir(cli_dir)
        self.WalkTestCli('sdk4')
        with files.FileWriter(os.path.join(cli_dir,
                                           'gcloud_completions.py')) as f:
            self.root = generate.ListCompletionTree(cli=self.test_cli, out=f)
        self.completions_closed = False
        self.completions_value = None
        self.StartObjectPatch(lookup,
                              '_OpenCompletionsOutputStream',
                              return_value=_FakeStream())
        if 'gcloud_completions' in sys.modules:
            # At least one test exercises the real import in the lookup module. That
            # one skips this branch, but it poisons sys.modules and hangs around for
            # the remaining tests. This mocks the subsequent tests to return the test
            # CLI tree generated above.
            self.StartObjectPatch(lookup,
                                  'LoadCompletionCliTree',
                                  return_value=self.root)
        self.StartObjectPatch(lookup,
                              '_GetInstallationRootDir',
                              return_value=self.temp_path)
        self.env = {lookup.IFS_ENV_VAR: ' '}
Exemple #12
0
    def _ProvisionClientCert(self, cmd, cert_path):
        """Executes certificate provider to obtain client certificate and keys."""
        try:
            # monkey-patch command line args to get password protected cert
            pass_arg = '--with_passphrase'
            if '--print_certificate' in cmd and pass_arg not in cmd:
                cmd.append(pass_arg)

            cert_pem_io = io.StringIO()
            ret_val = execution_utils.Exec(cmd,
                                           no_exit=True,
                                           out_func=cert_pem_io.write,
                                           err_func=log.file_only_logger.debug)
            if ret_val:
                raise CertProviderUnexpectedExit(
                    'certificate provider exited with error')

            sections = _SplitPemIntoSections(cert_pem_io.getvalue())
            with files.FileWriter(cert_path) as f:
                f.write(sections['CERTIFICATE'])
                f.write(sections['ENCRYPTED PRIVATE KEY'])
            self.client_cert_password = sections['PASSPHRASE'].splitlines()[1]
        except (files.Error, execution_utils.PermissionError,
                execution_utils.InvalidCommandError,
                CertProviderUnexpectedExit) as e:
            raise CertProvisionException(e)
        except KeyError as e:
            raise CertProvisionException(
                'Invalid output format from certificate provider, no %s' % e)
 def testGenerateExistingCommandsNoOverwrite(self):
     self.fake_message_module = self.StartObjectPatch(
         apis, 'GetMessagesModule', return_value=fake_api_message_module)
     self.fake_collection_info = self.StartObjectPatch(
         resources.Registry,
         'GetCollectionInfo',
         return_value=FAKEAPI_ICECREAM_INFO)
     output_dir = self.temp_path
     command = [
         'meta', 'generate-command', 'compute.instances', '--output-dir',
         output_dir
     ]
     file_contents = 'throwaway'
     for command_type in SUPPORTED_COMMANDS:  # make all command types pre-exist
         yaml_filename = command_type.replace('-', '_') + '.yaml'
         yaml_filepath = os.path.join(output_dir, yaml_filename)
         with files.FileWriter(yaml_filepath, create_path=True) as f:
             f.write(file_contents)
     mock_file_writer = self.StartObjectPatch(files, 'FileWriter')
     self.mock_prompt.return_value = False
     self.Run(command)
     self.assertEqual(self.mock_prompt.call_count, len(SUPPORTED_COMMANDS))
     if mock_file_writer.call_count > 0:
         self.assertEqual(mock_file_writer.call_count,
                          1)  # only write should be survey response
     else:
         self.assertEqual(self.mock_prompt.call_count,
                          0)  # no survey, no write
     self.AssertFileEquals(file_contents, yaml_filepath)
    def testImportWorkflowTemplatesUpdateExisting(self):
        # Provided template does not have an id or a name.
        provided_template = self.MakeWorkflowTemplate()
        provided_template.id = None
        provided_template.name = None

        get_response = self.MakeWorkflowTemplate()
        get_response.version = 1

        # The id, name, and version are populated before we make the update request.
        expected_request = copy.deepcopy(provided_template)
        expected_request.id = self.WORKFLOW_TEMPLATE
        expected_request.name = self.WorkflowTemplateName()
        expected_request.version = 1

        # Response has version incremented.
        expected_response = copy.deepcopy(expected_request)
        expected_response.version = 2

        # Write test template to file.
        file_name = os.path.join(self.temp_path, 'template.yaml')
        with files.FileWriter(file_name) as stream:
            export_util.Export(message=provided_template, stream=stream)

        self.ExpectGetWorkflowTemplate(response=get_response)
        self.ExpectUpdateWorkflowTemplate(workflow_template=expected_request,
                                          response=expected_response)
        self.WriteInput('y\n')
        result = self.RunDataproc(
            'workflow-templates import {0} --source {1}'.format(
                self.WORKFLOW_TEMPLATE, file_name))
        self.AssertMessagesEqual(expected_response, result)
    def testImportWorkflowTemplatesCreateNewWithRegion(self):
        # Set region property.
        properties.VALUES.dataproc.region.Set('us-test1')

        # Provided template does not have an id or a name.
        provided_template = self.MakeWorkflowTemplate()
        provided_template.id = None
        provided_template.name = None

        # The id is populated before we make the create request.
        expected_request = copy.deepcopy(provided_template)
        expected_request.id = self.WORKFLOW_TEMPLATE

        # The create response has the name populated.
        expected_response = copy.deepcopy(expected_request)
        expected_response.name = self.WorkflowTemplateName(region='us-test1')

        # Write test template to file.
        file_name = os.path.join(self.temp_path, 'template.yaml')
        with files.FileWriter(file_name) as stream:
            export_util.Export(message=provided_template, stream=stream)

        self.ExpectGetWorkflowTemplate(
            name=self.WorkflowTemplateName(region='us-test1'),
            exception=self.MakeHttpError(status_code=404))
        parent = self.WorkflowTemplateParentName(region='us-test1')
        self.ExpectCreateWorkflowTemplate(workflow_template=expected_request,
                                          response=expected_response,
                                          parent=parent)
        result = self.RunDataproc(
            'workflow-templates import {0} --source {1}'.format(
                self.WORKFLOW_TEMPLATE, file_name))
        self.AssertMessagesEqual(expected_response, result)
Exemple #16
0
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())
        messages = dataproc.messages

        policy_ref = args.CONCEPTS.autoscaling_policy.Parse()

        request = messages.DataprocProjectsRegionsAutoscalingPoliciesGetRequest(
            name=policy_ref.RelativeName())
        policy = dataproc.client.projects_regions_autoscalingPolicies.Get(
            request)

        schema_path = export_util.GetSchemaPath('dataproc',
                                                dataproc.api_version,
                                                'AutoscalingPolicy',
                                                for_help=False)
        if args.destination:
            with files.FileWriter(args.destination) as stream:
                export_util.Export(message=policy,
                                   stream=stream,
                                   schema_path=schema_path)
        else:
            # Print to stdout
            export_util.Export(message=policy,
                               stream=sys.stdout,
                               schema_path=schema_path)
 def testGenerateTree(self):
   cli_dir = os.path.join(self.temp_path, 'data', 'cli')
   files.MakeDir(cli_dir)
   self.WalkTestCli('sdk13')
   with files.FileWriter(os.path.join(cli_dir, 'gcloud_coverage.py')) as f:
     self.root = generate.OutputCoverageTree(cli=self.test_cli, out=f)
   self.assertEqual(
       self.root['sdk'],
       {
           '--yes': True,
           '--no': False,
           'group': self.root['sdk']['group'],
           'require-coverage': self.root['sdk']['require-coverage']
       })
   self.assertTrue(self.root['--help'])
   self.assertEqual(
       self.root['sdk']['require-coverage'],
       {
           '--needs_coverage': True,
           '--also_needs_coverage': True
       })
   self.assertEqual(
       self.root['sdk']['group']['do-not-require-coverage'],
       {
           '--no_include': False,
           '--sort-by': False,
           '--filter': False,
           '--limit': False,
           '--page-size': False,
           '--uri': False,
       })
  def Run(self, args):
    dataproc = dp.Dataproc(self.ReleaseTrack())

    template_ref = args.CONCEPTS.template.Parse()

    # Get specified version, or most recent version if no version arg provided.
    workflow_template = dataproc.GetRegionsWorkflowTemplate(
        template_ref, args.version)

    # Filter out OUTPUT_ONLY fields and resource identifying fields. Note this
    # needs to be kept in sync with v1 workflow_templates.proto.
    workflow_template.id = None
    workflow_template.name = None
    workflow_template.version = None
    workflow_template.createTime = None
    workflow_template.updateTime = None
    # We do not need to clear any fields from workflow_template.placement.
    # 1) Managed cluster:
    #    a) cluster_name is really a name prefix, so it's okay that multiple
    #       templates have the same value.
    #    b) The server does not resolve OUTPUT_ONLY fields when storing a
    #       workflow template, so cluster_config is fine as is.
    # 2) Cluster selector: there are no OUTPUT_ONLY or directly resource
    # identifying fields here.

    if args.destination:
      with files.FileWriter(args.destination) as stream:
        export_util.Export(message=workflow_template, stream=stream)
    else:
      export_util.Export(message=workflow_template, stream=sys.stdout)
  def Visit(self, node, parent, is_group):
    """Renders document file for each node in the CLI tree.

    Args:
      node: group/command CommandCommon info.
      parent: The parent Visit() return value, None at the top level.
      is_group: True if node is a group, otherwise its is a command.

    Returns:
      The parent value, ignored here.
    """

    if self._style == 'linter':
      meta_data = actions.GetCommandMetaData(node)
    else:
      meta_data = None
    command = node.GetPath()
    path = os.path.join(self._directory, '_'.join(command)) + self._suffix
    with files.FileWriter(path) as f:
      md = markdown.Markdown(node)
      render_document.RenderDocument(style=self._style,
                                     title=' '.join(command),
                                     fin=io.StringIO(md),
                                     out=f,
                                     command_metadata=meta_data)
    return parent
def Dump(cli, path=None, name=DEFAULT_CLI_NAME, branch=None):
  """Dumps the CLI tree to a JSON file.

  The tree is processed by cli_tree._Serialize() to minimize the JSON file size
  and generation time.

  Args:
    cli: The CLI.
    path: The JSON file path to dump to, the standard output if '-', the
      default CLI tree path if None.
    name: The CLI name.
    branch: The path of the CLI subtree to generate.

  Returns:
    The generated CLI tree.
  """
  if path is None:
    path = CliTreePath()
  tree = _GenerateRoot(cli=cli, path=path, name=name, branch=branch)
  if path == '-':
    _DumpToFile(tree, sys.stdout)
  else:
    with files.FileWriter(path) as f:
      _DumpToFile(tree, f)
  from googlecloudsdk.core.resource import resource_projector

  return resource_projector.MakeSerializable(tree)
Exemple #21
0
    def Run(self, args):
        api_version = 'v1'
        # If in the future there are differences between API version, do NOT use
        # this patter of checking ReleaseTrack. Break this into multiple classes.
        if self.ReleaseTrack() == base.ReleaseTrack.BETA:
            api_version = 'v1beta2'

        if os.path.isfile(args.transaction_file):
            raise transaction_util.TransactionFileAlreadyExists(
                'Transaction already exists at [{0}]'.format(
                    args.transaction_file))

        dns = apis.GetClientInstance('dns', api_version)

        # Get the managed-zone.
        zone_ref = util.GetRegistry(api_version).Parse(
            args.zone,
            params={
                'project': properties.VALUES.core.project.GetOrFail,
            },
            collection='dns.managedZones')

        try:
            zone = dns.managedZones.Get(
                dns.MESSAGES_MODULE.DnsManagedZonesGetRequest(
                    project=zone_ref.project,
                    managedZone=zone_ref.managedZone))
        except apitools_exceptions.HttpError as error:
            raise calliope_exceptions.HttpException(error)

        # Initialize an empty change
        change = dns.MESSAGES_MODULE.Change()

        # Get the SOA record, there will be one and only one.
        # Add addition and deletion for SOA incrementing to change.
        records = [
            record for record in list_pager.YieldFromList(
                dns.resourceRecordSets,
                dns.MESSAGES_MODULE.DnsResourceRecordSetsListRequest(
                    project=zone_ref.project,
                    managedZone=zone_ref.Name(),
                    name=zone.dnsName,
                    type='SOA'),
                field='rrsets')
        ]
        change.deletions.append(records[0])
        change.additions.append(
            import_util.NextSOARecordSet(records[0], api_version=api_version))

        # Write change to transaction file
        try:
            with files.FileWriter(args.transaction_file) as transaction_file:
                transaction_util.WriteToYamlFile(transaction_file, change)
        except Exception as exp:
            msg = 'Unable to write transaction [{0}] because [{1}]'
            msg = msg.format(args.transaction_file, exp)
            raise transaction_util.UnableToAccessTransactionFile(msg)

        log.status.Print('Transaction started [{0}].'.format(
            args.transaction_file))
  def Visit(self, node, parent, is_group):
    """Renders a help text doc for each node in the CLI tree.

    Args:
      node: group/command CommandCommon info.
      parent: The parent Visit() return value, None at the top level.
      is_group: True if node is a group, otherwise its is a command.

    Returns:
      The parent value, ignored here.
    """
    # Set up the destination dir for this level.
    command = node.GetPath()

    if is_group:
      directory = os.path.join(self._directory, *command[1:])
    else:
      directory = os.path.join(self._directory, *command[1:-1])

    files.MakeDir(directory, mode=0o755)
    # Render the help text document.
    path = os.path.join(directory, 'GROUP' if is_group else command[-1])
    with files.FileWriter(path) as f:
      md = markdown.Markdown(node)
      render_document.RenderDocument(style='text', fin=io.StringIO(md),
                                     out=f)
    return parent
Exemple #23
0
    def Run(self, args):
        # Get the security policy.
        holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
        ref = self.SECURITY_POLICY_ARG.ResolveAsResource(
            args, holder.resources)

        requests = []
        security_policy = client.SecurityPolicy(ref,
                                                compute_client=holder.client)
        requests.extend(security_policy.Describe(only_generate_request=True))
        resources = holder.client.MakeRequests(requests)

        # Export the security policy.
        try:
            with files.FileWriter(args.file_name) as export_file:
                if args.file_format == 'json':
                    security_policies_utils.WriteToFile(
                        export_file, resources[0], 'json')
                else:
                    security_policies_utils.WriteToFile(
                        export_file, resources[0], 'yaml')
        except EnvironmentError as exp:
            msg = 'Unable to export security policy to file [{0}]: {1}'.format(
                args.file_name, exp)
            raise exceptions.BadFileException(msg)

        log.status.Print('Exported security policy to [{0}].'.format(
            args.file_name))
  def __init__(self, cli, directory, hidden=False, progress_callback=None,
               restrict=None):
    """Constructor.

    Args:
      cli: The Cloud SDK CLI object.
      directory: The devsite output directory path name.
      hidden: Boolean indicating whether to consider the hidden CLI.
      progress_callback: f(float), The function to call to update the progress
        bar or None for no progress bar.
      restrict: Restricts the walk to the command/group dotted paths in this
        list. For example, restrict=['gcloud.alpha.test', 'gcloud.topic']
        restricts the walk to the 'gcloud topic' and 'gcloud alpha test'
        commands/groups.

    """
    super(DevSiteGenerator, self).__init__(cli)
    self._directory = directory
    files.MakeDir(self._directory)
    self._need_section_tag = []
    toc_path = os.path.join(self._directory, self._TOC)
    self._toc_root = files.FileWriter(toc_path)
    self._toc_root.write('toc:\n')
    self._toc_root.write('- title: "gcloud Reference"\n')
    self._toc_root.write('  path: %s\n' % self._REFERENCE)
    self._toc_root.write('  section:\n')
    self._toc_main = None
  def testInstantiateWorkflowFromFile(self):
    template_yaml = """
      jobs:
      - stepId: teragen
        hadoopJob:
          mainJarFileUri: file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar
          args:
          - teragen
          - "10"
          - hdfs:///tmp/terasort-input
      placement:
        managedCluster:
          clusterName: "workflow-template-gcloud-e2e"
          config:
            gceClusterConfig:
              zoneUri: {zone}
            softwareConfig:
              properties:
                dataproc:fake.property: "fake-value"
    """.format(zone=self.zone)

    file_name = os.path.join(self.temp_path, 'template.yaml')
    with files.FileWriter(file_name) as stream:
      stream.write(template_yaml)

    self.RunDataproc(
        'workflow-templates instantiate-from-file --file {file_name}'.format(
            file_name=file_name))
    def Run(self, args):
        dataproc = dp.Dataproc(self.ReleaseTrack())

        cluster_ref = args.CONCEPTS.cluster.Parse()

        request = dataproc.messages.DataprocProjectsRegionsClustersGetRequest(
            projectId=cluster_ref.projectId,
            region=cluster_ref.region,
            clusterName=cluster_ref.clusterName)

        cluster = dataproc.client.projects_regions_clusters.Get(request)

        # Filter out Dataproc-generated labels.
        clusters.DeleteGeneratedLabels(cluster, dataproc)

        schema_path = self.GetSchemaPath()
        if args.destination:
            with files.FileWriter(args.destination) as stream:
                export_util.Export(message=cluster,
                                   stream=stream,
                                   schema_path=schema_path)
        else:
            export_util.Export(message=cluster,
                               stream=sys.stdout,
                               schema_path=schema_path)
  def __init__(self, command_name, root_command_args=None):
    """Initializes the CLI tree generator.

    Args:
      command_name: str, The name of the CLI tree command (e.g. 'gsutil').
      root_command_args: [str], The argument list to invoke the root CLI tree
        command. Examples:
        * ['gcloud']
        * ['python', '/tmp/tarball_dir/gsutil/gsutil']
    Raises:
      CommandInvocationError: If the provided root command cannot be invoked.
    """
    if root_command_args:
      with files.FileWriter(os.devnull) as devnull:
        try:
          # We don't actually care about whether the root command succeeds here;
          # we just want to see if it can be invoked.
          subprocess.Popen(root_command_args, stdin=devnull, stdout=devnull,
                           stderr=devnull).communicate()
        except OSError as e:
          raise CommandInvocationError(e)

    self.command_name = command_name
    self._root_command_args = root_command_args or [command_name]
    self._cli_version = None  # For memoizing GetVersion()
    def Run(self, args):
        req = self._CreateDecryptRequest(args)
        client = cloudkms_base.GetClientInstance()
        try:
            resp = client.projects_locations_keyRings_cryptoKeys.Decrypt(req)
        # Intercept INVALID_ARGUMENT errors related to checksum verification to
        # present a user-friendly message. All other errors are surfaced as-is.
        except apitools_exceptions.HttpBadRequestError as error:
            e2e_integrity.ProcessHttpBadRequestError(error)

        if self._PerformIntegrityVerification(args):
            self._VerifyResponseIntegrityFields(req, resp)

        try:
            if resp.plaintext is None:
                with files.FileWriter(args.plaintext_file):
                    # to create an empty file
                    pass
                log.Print('Decrypted file is empty')
            else:
                log.WriteToFileOrStdout(args.plaintext_file,
                                        resp.plaintext,
                                        binary=True,
                                        overwrite=True)
        except files.Error as e:
            raise exceptions.BadFileException(e)
 def SavePromptRecordToFile(self):
     """Serializes data to the cache file."""
     if not self._dirty:
         return
     with file_utils.FileWriter(self._cache_file_path) as f:
         yaml.dump(self._ToDictionary(), stream=f)
     self._dirty = False
 def _GetRawManPageText(self):
     """Returns the raw man page text."""
     try:
         with files.FileWriter(os.devnull) as f:
             return subprocess.check_output(['man', self.command], stderr=f)
     except (OSError, subprocess.CalledProcessError):
         raise NoManPageTextForCommand(
             'Cannot get man(1) command man page text for [{}].'.format(
                 self.command))