Exemple #1
0
 def Args(cls, parser):
     flags.AddConcepts(
         parser,
         flags.GetAttestorPresentationSpec(
             required=True,
             positional=False,
             group_help=(
                 'The attestor on which the public key should be updated.'),
         ),
     )
     parser.add_argument('public_key_id',
                         help='The ID of the public key to update.')
     file_group = parser.add_mutually_exclusive_group()
     file_group.add_argument(
         '--public-key-file',
         action=actions.DeprecationAction(
             'public-key-file',
             warn=
             'This flag is deprecated. Use --pgp-public-key-file instead.'),
         type=arg_parsers.FileContents(),
         help='The path to a file containing the '
         'updated ASCII-armored PGP public key.')
     file_group.add_argument('--pgp-public-key-file',
                             type=arg_parsers.FileContents(),
                             help='The path to a file containing the '
                             'updated ASCII-armored PGP public key.')
     parser.add_argument('--comment',
                         help='The comment describing the public key.')
Exemple #2
0
 def Args(cls, parser):
     flags.AddConcepts(
         parser,
         flags.GetAttestorPresentationSpec(
             required=True,
             positional=False,
             group_help=(
                 'The attestor to which the public key should be added.'),
         ),
     )
     # TODO(b/133451183): Remove deprecated flag.
     if cls.ReleaseTrack() == base.ReleaseTrack.GA:
         parser.add_argument('--pgp-public-key-file',
                             type=arg_parsers.FileContents(),
                             required=True,
                             help='The path to the file containing the '
                             'ASCII-armored PGP public key to add.')
     else:
         pgp_group = parser.add_mutually_exclusive_group(required=True)
         pgp_group.add_argument('--public-key-file',
                                action=actions.DeprecationAction(
                                    'public-key-file',
                                    warn='This flag is deprecated. '
                                    'Use --pgp-public-key-file instead.'),
                                type=arg_parsers.FileContents(),
                                help='The path to the file containing the '
                                'ASCII-armored PGP public key to add.')
         pgp_group.add_argument('--pgp-public-key-file',
                                type=arg_parsers.FileContents(),
                                help='The path to the file containing the '
                                'ASCII-armored PGP public key to add.')
     parser.add_argument('--comment',
                         help='The comment describing the public key.')
Exemple #3
0
    def Args(cls, parser):
        flags.AddConcepts(
            parser,
            flags.GetAttestorPresentationSpec(
                required=True,
                positional=False,
                group_help=(
                    'The attestor to which the public key should be added.'),
            ),
        )
        parser.add_argument('--comment',
                            help='The comment describing the public key.')

        key_group = parser.add_group(mutex=True, required=True)
        pgp_group = key_group.add_group()
        pgp_group.add_argument('--pgp-public-key-file',
                               type=arg_parsers.FileContents(),
                               help='The path to the file containing the '
                               'ASCII-armored PGP public key to add.')
        kms_group = key_group.add_group()
        flags.AddConcepts(
            kms_group,
            flags.GetCryptoKeyVersionPresentationSpec(
                base_name='keyversion',
                required=True,
                positional=False,
                use_global_project_flag=False,
                group_help=textwrap.dedent("""\
              The Cloud KMS (Key Management Service) CryptoKeyVersion whose
              public key will be added to the attestor.""")),
        )
        pkix_group = key_group.add_group()
        pkix_group.add_argument(
            '--pkix-public-key-file',
            required=True,
            type=arg_parsers.FileContents(),
            help='The path to the file containing the PKIX public key to add.')
        pkix_group.add_argument('--pkix-public-key-algorithm',
                                choices=pkix.GetAlgorithmMapper().choices,
                                required=True,
                                help=textwrap.dedent("""\
            The signing algorithm of the associated key. This will be used to
            verify the signatures associated with this key."""))

        parser.add_argument('--public-key-id-override',
                            type=str,
                            help=textwrap.dedent("""\
          If provided, the ID to replace the default API-generated one. All IDs
          must be valid URIs as defined by RFC 3986
          (https://tools.ietf.org/html/rfc3986).

          When creating Attestations to be verified by this key, one must always
          provide this custom ID as the public key ID."""))
Exemple #4
0
def AddArguments(parser, bucket_metric_enabled=False):
    """Add arguments for logging metrics update command."""
    parser.add_argument('metric_name',
                        help='The name of the log-based metric to update.')
    config_group = parser.add_argument_group(
        help='Data about the metric to update.', mutex=True, required=True)
    legacy_mode_group = config_group.add_argument_group(
        help=('Arguments to specify information about simple counter logs-'
              'based metrics.'))
    legacy_mode_group.add_argument(
        '--description',
        required=False,
        help=('A new description for the metric. '
              'If omitted, the description is not changed.'))
    legacy_mode_group.add_argument(
        '--log-filter',
        required=False,
        help=('A new filter string for the metric. '
              'If omitted, the filter is not changed.'))
    config_group.add_argument('--config-from-file',
                              help=('A path to a YAML file specifying the '
                                    'updates to be made to the logs-based '
                                    'metric.'),
                              type=arg_parsers.FileContents())

    if bucket_metric_enabled:
        legacy_mode_group.add_argument(
            '--bucket-name',
            help='The Log Bucket name which owns the log-based metric.')
def AddGitLabConfigArgs(parser, update=False):
  """Set up all the argparse flags for creating or updating a GitLab config.

  Args:
    parser: An argparse.ArgumentParser-like object.
    update: If true, use the version of the flags for updating a config.
      Otherwise, use the version for creating a config.

  Returns:
    The parser argument with GitLab config flags added in.
  """
  parser.add_argument(
      '--host-uri',
      required=not update,
      help='The host uri of the GitLab Enterprise instance.')
  parser.add_argument(
      '--user-name',
      required=not update,
      help='The GitLab user name that should be associated with this config.')
  parser.add_argument(
      '--api-key-secret-version',
      required=not update,
      help='Secret Manager resource containing the Cloud Build API key that should be associated with this config. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
  )
  parser.add_argument(
      '--api-access-token-secret-version',
      required=not update,
      help='Secret Manager resource containing the API access token. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
  )
  parser.add_argument(
      '--read-access-token-secret-version',
      required=not update,
      help='Secret Manager resource containing the read access token. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
  )
  parser.add_argument(
      '--webhook-secret-secret-version',
      required=not update,
      help='Secret Manager resource containing the webhook secret. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
  )
  parser.add_argument(
      '--ssl-ca-file',
      type=arg_parsers.FileContents(),
      help='Path to a local file that contains SSL certificate to use for requests to GitLab Enterprise. The certificate should be in PEM format.'
  )
  parser.add_argument(
      '--service-directory-service',
      help="""\
Service Directory service that should be used when making calls to the GitLab Enterprise instance.

If not specified, calls will be made over the public internet.
""")
  if not update:
    parser.add_argument(
        '--name', required=True, help='The name of the GitLab config.')
    parser.add_argument(
        '--region',
        required=True,
        help='The Cloud location of the GitLab config.')

  return parser
Exemple #6
0
def AddReservationsFromFileFlag(parser, custom_text=None):
    help_text = (
        custom_text if custom_text else
        'Path to a YAML file of multiple reservations\' configuration.')
    return parser.add_argument('--reservations-from-file',
                               type=arg_parsers.FileContents(),
                               help=help_text)
  def Args(parser):
    parser.add_argument(
        'template_file_gcs_path',
        metavar='TEMPLATE_FILE_GCS_PATH',
        help=('The Google Cloud Storage location of the flex template file.'
              'Overrides if file already exists.'),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument(
        '--image',
        help=('Path to the any image registry location of the flex template '
              'image.'),
        required=True)

    parser.add_argument(
        '--sdk-language',
        help=('SDK language of the flex template job.'),
        choices=['JAVA', 'PYTHON'],
        required=True)

    parser.add_argument(
        '--metadata-file',
        help='Local path to the metadata json file for the flex template.',
        type=arg_parsers.FileContents())

    parser.add_argument(
        '--print-only',
        help=('Prints the container spec to stdout. Does not save in '
              'Google Cloud Storage.'),
        default=False,
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.print_only))
Exemple #8
0
def ParseConditionFromFile(condition_from_file):
    """Read condition from YAML or JSON file."""

    condition = arg_parsers.FileContents()(condition_from_file)
    condition_dict = iam_util.ParseYamlOrJsonCondition(
        condition, _ConditionFileFormatException(condition_from_file))
    return condition_dict
Exemple #9
0
def AddNodeAffinityFlagToParser(parser):
    """Adds a node affinity flag used for scheduling instances."""
    sole_tenancy_group = parser.add_group('Sole Tenancy.', mutex=True)
    sole_tenancy_group.add_argument('--node-affinity-file',
                                    type=arg_parsers.FileContents(),
                                    help="""\
          The JSON/YAML file containing the configuration of desired nodes onto
          which this instance could be scheduled. These rules filter the nodes
          according to their node affinity labels. A node's affinity labels come
          from the node template of the group the node is in.

          The file should contain a list of a JSON/YAML objects with the
          following fields:

          *key*::: Corresponds to the node affinity label keys of
          the Node resource.
          *operator*::: Specifies the node selection type. Must be one of:
            `IN`: Requires Compute Engine to seek for matched nodes.
            `NOT_IN`: Requires Compute Engine to avoid certain nodes.
          *values*::: Optional. A list of values which correspond to the node
          affinity label values of the Node resource.
          """)
    sole_tenancy_group.add_argument(
        '--node-group',
        help='The name of the node group to schedule this instance on.')
    sole_tenancy_group.add_argument(
        '--node', help='The name of the node to schedule this instance on.')
def AddReservationArgGroup(parser):
    """Adds all flags needed for reservations creation."""
    reservations_manage_group = parser.add_group(
        'Manage the reservations to be created with the commitment.',
        mutex=True)

    reservations_manage_group.add_argument(
        '--reservations-from-file',
        type=arg_parsers.FileContents(),
        help='Path to a YAML file of multiple reservations\' configuration.')

    single_reservation_group = reservations_manage_group.add_argument_group(
        help='Manage the reservation to be created with the commitment.')
    resource_args.GetReservationResourceArg(
        positional=False).AddArgument(single_reservation_group)
    single_reservation_group.add_argument(
        '--reservation-type',
        hidden=True,
        choices=['specific'],
        default='specific',
        help='The type of the reservation to be created.')

    specific_sku_reservation_group = single_reservation_group.add_argument_group(
        help='Manage the specific SKU reservation properties to create.')
    AddFlagsToSpecificSkuGroup(specific_sku_reservation_group)
Exemple #11
0
def AddCycleFrequencyArgs(parser, flag_suffix, start_time_help,
                          cadence_help, supports_hourly=False,
                          has_restricted_start_times=False,
                          supports_weekly=False, parent_group=None):
  """Add Cycle Frequency args for Resource Policies."""
  if parent_group:
    freq_group = parent_group.add_argument_group(
        'Cycle Frequency Group.', mutex=True)
  else:
    freq_group = parser.add_argument_group(
        'Cycle Frequency Group.', required=True, mutex=True)
  if has_restricted_start_times:
    start_time_help += """\
        Valid choices are 00:00, 04:00, 08:00,12:00,
        16:00 and 20:00 UTC. For example, `--start-time="03:00-05"`
        (which gets converted to 08:00 UTC)."""
  freq_flags_group = freq_group.add_group(
      'From flags:' if supports_weekly else '')
  freq_flags_group.add_argument(
      '--start-time', required=True,
      type=arg_parsers.Datetime.Parse,
      help=start_time_help)
  cadence_group = freq_flags_group.add_group(mutex=True, required=True)
  cadence_group.add_argument(
      '--daily-{}'.format(flag_suffix),
      dest='daily_cycle',
      action='store_true',
      help='{} starts daily at START_TIME.'.format(cadence_help))

  if supports_hourly:
    cadence_group.add_argument(
        '--hourly-{}'.format(flag_suffix),
        metavar='HOURS',
        dest='hourly_cycle',
        type=arg_parsers.BoundedInt(lower_bound=1),
        help='{} occurs every n hours starting at START_TIME.'.format(
            cadence_help))

  if supports_weekly:
    base.ChoiceArgument(
        '--weekly-{}'.format(flag_suffix),
        dest='weekly_cycle',
        choices=['monday', 'tuesday', 'wednesday', 'thursday', 'friday',
                 'saturday', 'sunday'],
        help_str='{} occurs weekly on WEEKLY_{} at START_TIME.'.format(
            cadence_help, flag_suffix.upper())).AddToParser(cadence_group)
    freq_file_group = freq_group.add_group('From file:')
    freq_file_group.add_argument(
        '--weekly-{}-from-file'.format(flag_suffix),
        dest='weekly_cycle_from_file',
        type=arg_parsers.FileContents(),
        help="""\
        A JSON/YAML file which specifies a weekly schedule. It should be a
        list of objects with the following fields:

        day: Day of the week with the same choices as `--weekly-{}`.
        startTime: Start time of the snapshot schedule with the same format
            as --start-time.
        """.format(flag_suffix))
def AddSelfManagedCertificateDataFlagsToParser(parser, is_required):
    """Adds certificate file and private key file flags."""
    # If the group itself is not required, the command will fail if
    # 1. any argument in the group is provided and
    # 2. any required argument in the group is not provided.
    cert_flag = base.Argument('--certificate-file',
                              help='The certificate data in PEM-encoded form.',
                              type=arg_parsers.FileContents(),
                              required=True)
    key_flag = base.Argument('--private-key-file',
                             help='The private key data in PEM-encoded form.',
                             type=arg_parsers.FileContents(),
                             required=True)

    group = base.ArgumentGroup(
        help='Arguments to configure self-managed certificate data.',
        required=is_required,
        category=base.COMMONLY_USED_FLAGS if not is_required else None)
    group.AddArgument(cert_flag)
    group.AddArgument(key_flag)
    group.AddToParser(parser)
Exemple #13
0
def AddMessageFlags(parser, resource, flag=None):
  """Adds flags for specifying a message as a string/file to the parser."""
  message_group = parser.add_group(mutex=True)
  message_group.add_argument(
      '--{}'.format(flag or resource),
      help='The {} as a string. In either JSON or YAML format.'.format(
          resource))
  message_group.add_argument(
      '--{}-from-file'.format(flag or resource),
      type=arg_parsers.FileContents(),
      help='The path to a JSON or YAML file containing the {}.'.format(
          resource))
Exemple #14
0
 def Args(cls, parser):
     flags.AddConcepts(
         parser,
         flags.GetAttestorPresentationSpec(
             required=True,
             positional=False,
             group_help=(
                 'The attestor to which the public key should be added.'),
         ),
     )
     parser.add_argument('--pgp-public-key-file',
                         type=arg_parsers.FileContents(),
                         required=True,
                         help='The path to the file containing the '
                         'ASCII-armored PGP public key to add.')
     parser.add_argument('--comment',
                         help='The comment describing the public key.')
Exemple #15
0
def GeneratePublicKeyDataFromFile(path):
    """Generate public key data from a path.

  Args:
    path: (bytes) the public key file path given by the command.

  Raises:
    InvalidArgumentException: if the public key file path provided does not
                              exist or is too large.
  Returns:
    A public key encoded using the UTF-8 charset.
  """
    try:
        public_key_data = arg_parsers.FileContents()(path).strip()
    except arg_parsers.ArgumentTypeError as e:
        raise gcloud_exceptions.InvalidArgumentException(
            'public_key_file',
            '{}. Please double check your input and try again.'.format(e))
    return public_key_data.encode('utf-8')
Exemple #16
0
def AddPolicySettingsFlags(parser, update=False):
  """Adds policy settings flags to the parser."""
  policy_settings_group = parser.add_group(help="""\
      Policy Settings.
      If any of these are specified, they will overwrite fields in the
      `--policy` or `--policy-from-file` flags if specified.""")
  AddDisplayNameFlag(policy_settings_group, resource='Alert Policy')
  AddCombinerFlag(policy_settings_group, resource='Alert Policy')
  enabled_kwargs = {
      'action': arg_parsers.StoreTrueFalseAction if update else 'store_true'
  }
  if not update:
    # Can't specify default if using StoreTrueFalseAction.
    enabled_kwargs['default'] = True
  policy_settings_group.add_argument(
      '--enabled', help='If the policy is enabled.', **enabled_kwargs)

  documentation_group = policy_settings_group.add_group(help='Documentation')
  documentation_group.add_argument(
      '--documentation-format',
      default='text/markdown' if not update else None,
      help='The MIME type that should be used with `--documentation` or '
           '`--documentation-from-file`. Currently, only "text/markdown" is '
           'supported.')
  documentation_string_group = documentation_group.add_group(mutex=True)
  documentation_string_group.add_argument(
      '--documentation',
      help='The documentation to be included with the policy.')
  documentation_string_group.add_argument(
      '--documentation-from-file',
      type=arg_parsers.FileContents(),
      help='The path to a file containing the documentation to be included '
           'with the policy.')
  if update:
    repeated.AddPrimitiveArgs(
        policy_settings_group,
        'Alert Policy',
        'notification-channels',
        'Notification Channels')
    AddUpdateLabelsFlags(
        'user-labels', policy_settings_group, group_text='User Labels')
  else:
    AddCreateLabelsFlag(policy_settings_group, 'user-labels', 'policy')
 def Args(parser):
     """Register flags for this command."""
     parser.add_argument('metric_name', help='The name of the new metric.')
     config_group = parser.add_argument_group(
         help='Data about the new metric.', mutex=True, required=True)
     legacy_mode_group = config_group.add_argument_group(
         help=('A group of arguments to specify simple counter logs-based '
               'metrics. '))
     legacy_mode_group.add_argument('--description',
                                    required=True,
                                    help='The metric\'s description.')
     legacy_mode_group.add_argument('--log-filter',
                                    required=True,
                                    help='The metric\'s filter expression.')
     config_group.add_argument(
         '--config-from-file',
         help=('A path to a YAML or JSON file specifying '
               'the logs-based metric to create.'),
         type=arg_parsers.FileContents())
Exemple #18
0
def AddResourceTypeFlags(parser):
    """Add resource-type flag to parser."""
    group = parser.add_group(mutex=True, required=False)
    group.add_argument('--resource-types',
                       type=arg_parsers.ArgList(),
                       metavar='RESOURCE_TYPE',
                       help="""List of Config Connector KRM Kinds to export.
  For a full list of supported resource types for a given parent scope run:

  $ {parent_command} list-resource-types --[project|organization|folder]=<PARENT>
  """)
    group.add_argument(
        '--resource-types-file',
        type=arg_parsers.FileContents(),
        metavar='RESOURCE_TYPE_FILE',
        help=
        """A comma (',') or newline ('\\n') separated file containing the list of
      Config Connector KRM Kinds to export.
  For a full list of supported resource types for a given parent scope run:

  $ {parent_command} list-resource-types --[project|organization|folder]=<PARENT>
  """)
Exemple #19
0
def AddBitbucketServerConfigArgs(parser, update=False):
    """Set up all the argparse flags for creating or updating a Bitbucket Server config.

  Args:
    parser: An argparse.ArgumentParser-like object.
    update: If true, use the version of the flags for updating a config.
      Otherwise, use the version for creating a config.

  Returns:
    The parser argument with Bitbucket Server config flags added in.
  """
    parser.add_argument('--host-uri',
                        required=not update,
                        help='The host uri of the Bitbucket Server instance.')
    parser.add_argument(
        '--user-name',
        required=not update,
        help=
        'The Bitbucket Server user name that should be associated with this config.'
    )
    parser.add_argument(
        '--api-key',
        required=not update,
        help=
        'The Cloud Build API key that should be associated with this config.')
    parser.add_argument(
        '--admin-access-token-secret-version',
        required=not update,
        help=
        'Secret Manager resource containing the admin access token. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
    )
    parser.add_argument(
        '--read-access-token-secret-version',
        required=not update,
        help=
        'Secret Manager resource containing the read access token. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
    )
    parser.add_argument(
        '--webhook-secret-secret-version',
        required=not update,
        help=
        'Secret Manager resource containing the webhook secret. The secret is specified in resource URL format projects/{secret_project}/secrets/{secret_name}/versions/{secret_version}.'
    )
    parser.add_argument(
        '--ssl-ca-file',
        type=arg_parsers.FileContents(),
        help=
        'Path to a local file that contains SSL certificate to use for requests to Bitbucket Server. The certificate should be in PEM format.'
    )
    if not update:
        parser.add_argument(
            '--name',
            required=True,
            help='The config name of the Bitbucket Server connection.')
        parser.add_argument('--peered-network',
                            help="""\
VPC network that should be used when making calls to the Bitbucket Server instance.

If not specified, calls will be made over the public internet.
""")
    if update:
        parser.add_argument(
            'CONFIG',
            help=
            'The unique identifier of the Bitbucket Server Config to be updated.'
        )
    return parser
Exemple #20
0
def _CommonArgs(parser):
    """Register flags for this command.

  Args:
    parser: argparse.ArgumentParser to register arguments with.
  """
    image_args = parser.add_mutually_exclusive_group(required=True)
    image_building_args = image_args.add_argument_group()
    parser.add_argument(
        'template_file_gcs_path',
        metavar='TEMPLATE_FILE_GCS_PATH',
        help=('The Google Cloud Storage location of the flex template file.'
              'Overrides if file already exists.'),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    image_args.add_argument(
        '--image',
        help=('Path to the any image registry location of the prebuilt flex '
              'template image.'))

    parser.add_argument('--sdk-language',
                        help=('SDK language of the flex template job.'),
                        choices=['JAVA', 'PYTHON'],
                        required=True)

    parser.add_argument(
        '--metadata-file',
        help='Local path to the metadata json file for the flex template.',
        type=arg_parsers.FileContents())

    parser.add_argument(
        '--print-only',
        help=('Prints the container spec to stdout. Does not save in '
              'Google Cloud Storage.'),
        default=False,
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.print_only))

    image_building_args.add_argument(
        '--image-gcr-path',
        help=('The Google Container Registry location to store the flex '
              'template image to be built.'),
        type=arg_parsers.RegexpValidator(r'^gcr.io/.*',
                                         'Must begin with \'gcr.io/\''),
        required=True)

    image_building_args.add_argument(
        '--jar',
        metavar='JAR',
        type=arg_parsers.ArgList(),
        action=arg_parsers.UpdateAction,
        help=('Local path to your dataflow pipeline jar file and all their '
              'dependent jar files required for the flex template classpath. '
              'You can pass them as a comma separated list or repeat '
              'individually with --jar flag. Ex: --jar="code.jar,dep.jar" or '
              '--jar code.jar, --jar dep.jar.'),
        required=True)

    image_building_args.add_argument(
        '--flex-template-base-image',
        help=('Flex template base image to be used while building the '
              'container image. Allowed choices are JAVA8, JAVA11 or gcr.io '
              'path of the specific version of the base image. For JAVA8 and '
              'JAVA11 option, we use the latest base image version to build '
              'the container. You can also provide a specific version from '
              'this link  https://gcr.io/dataflow-templates-base/'),
        type=arg_parsers.RegexpValidator(
            r'^JAVA11$|^JAVA8$|^gcr.io/.*',
            'Must be JAVA11 or JAVA8 or begin with \'gcr.io/\''),
        required=True)

    image_building_args.add_argument(
        '--env',
        metavar='ENV',
        type=arg_parsers.ArgDict(),
        action=arg_parsers.UpdateAction,
        help=
        ('Environment variables to create for the Dockerfile. '
         'You can pass them as a comma separated list or repeat individually '
         'with --env flag. Ex: --env="A=B,C=D" or --env A=B, --env C=D.'
         'You can find the list of supported environment variables in this '
         'link. https://cloud.google.com/dataflow/docs/guides/templates/'
         'troubleshooting-flex-templates'
         '#setting_required_dockerfile_environment_variables'),
        required=True)
def _CommonArgs(parser):
    """Registers flags for this command.

  Args:
    parser: argparse.ArgumentParser to register arguments with.
  """
    image_args = parser.add_mutually_exclusive_group(required=True)
    image_building_args = image_args.add_argument_group()
    parser.add_argument(
        'template_file_gcs_path',
        metavar='TEMPLATE_FILE_GCS_PATH',
        help=('The Google Cloud Storage location of the flex template file.'
              'Overrides if file already exists.'),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    image_args.add_argument(
        '--image',
        help=('Path to the any image registry location of the prebuilt flex '
              'template image.'))

    parser.add_argument(
        '--image-repository-username-secret-id',
        help=(
            'Secret Manager secret id for the username to authenticate to '
            'private registry. Should be in the format '
            'projects/{project}/secrets/{secret}/versions/{secret_version} or '
            'projects/{project}/secrets/{secret}. If the version is not '
            'provided latest version will be used.'),
        type=arg_parsers.RegexpValidator(
            r'^projects\/[^\n\r\/]+\/secrets\/[^\n\r\/]+(\/versions\/[^\n\r\/]+)?$',
            'Must be in the format '
            '\'projects/{project}/secrets/{secret}\' or'
            '\'projects/{project}/secrets/{secret}/versions/{secret_version}\'.'
        ))

    parser.add_argument(
        '--image-repository-password-secret-id',
        help=(
            'Secret Manager secret id for the password to authenticate to '
            'private registry. Should be in the format '
            'projects/{project}/secrets/{secret}/versions/{secret_version} or '
            'projects/{project}/secrets/{secret}. If the version is not '
            'provided latest version will be used.'),
        type=arg_parsers.RegexpValidator(
            r'^projects\/[^\n\r\/]+\/secrets\/[^\n\r\/]+(\/versions\/[^\n\r\/]+)?$',
            'Must be in the format '
            '\'projects/{project}/secrets/{secret}\' or'
            '\'projects/{project}/secrets/{secret}/versions/{secret_version}\'.'
        ))

    parser.add_argument(
        '--image-repository-cert-path',
        help=
        ('The full URL to self-signed certificate of private registry in '
         'Cloud Storage. For example, gs://mybucket/mycerts/selfsigned.crt. '
         'The certificate provided in Cloud Storage must be DER-encoded and '
         'may be supplied in binary or printable (Base64) encoding. If the '
         'certificate is provided in Base64 encoding, it must be bounded at '
         'the beginning by -----BEGIN CERTIFICATE-----, and must be bounded '
         'at the end by -----END CERTIFICATE-----. If this parameter is '
         'provided, the docker daemon in the template launcher will be '
         'instructed to trust that certificate. '),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument('--sdk-language',
                        help=('SDK language of the flex template job.'),
                        choices=['JAVA', 'PYTHON'],
                        required=True)

    parser.add_argument(
        '--metadata-file',
        help='Local path to the metadata json file for the flex template.',
        type=arg_parsers.FileContents())

    parser.add_argument(
        '--print-only',
        help=('Prints the container spec to stdout. Does not save in '
              'Google Cloud Storage.'),
        default=False,
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.print_only))

    parser.add_argument(
        '--staging-location',
        help=('Default Google Cloud Storage location to stage local files.'
              "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument(
        '--temp-location',
        help=(
            'Default Google Cloud Storage location to stage temporary files. '
            'If not set, defaults to the value for --staging-location.'
            "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument('--service-account-email',
                        type=arg_parsers.RegexpValidator(
                            r'.*@.*\..*',
                            'must provide a valid email address'),
                        help='Default service account to run the workers as.')

    parser.add_argument('--max-workers',
                        type=int,
                        help='Default maximum number of workers to run.')

    parser.add_argument(
        '--disable-public-ips',
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.disable_public_ips),
        help='Cloud Dataflow workers must not use public IP addresses.')

    parser.add_argument('--num-workers',
                        type=int,
                        help='Initial number of workers to use by default.')

    parser.add_argument(
        '--worker-machine-type',
        help='Default type of machine to use for workers. Defaults to '
        'server-specified.')

    parser.add_argument(
        '--subnetwork',
        help='Default Compute Engine subnetwork for launching instances '
        'to run your pipeline.')

    parser.add_argument(
        '--network',
        help='Default Compute Engine network for launching instances to '
        'run your pipeline.')

    parser.add_argument(
        '--dataflow-kms-key',
        help='Default Cloud KMS key to protect the job resources.')

    region_group = parser.add_mutually_exclusive_group()
    region_group.add_argument('--worker-region',
                              help='Default region to run the workers in.')

    region_group.add_argument('--worker-zone',
                              help='Default zone to run the workers in.')

    parser.add_argument(
        '--enable-streaming-engine',
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.enable_streaming_engine),
        help='Enable Streaming Engine for the streaming job by default.')

    parser.add_argument(
        '--gcs-log-dir',
        help=('Google Cloud Storage directory to save build logs.'
              "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''),
        default=None)

    parser.add_argument('--additional-experiments',
                        metavar='ADDITIONAL_EXPERIMENTS',
                        type=arg_parsers.ArgList(),
                        action=arg_parsers.UpdateAction,
                        help=('Default experiments to pass to the job.'))

    parser.add_argument('--additional-user-labels',
                        metavar='ADDITIONAL_USER_LABELS',
                        type=arg_parsers.ArgDict(),
                        action=arg_parsers.UpdateAction,
                        help=('Default user labels to pass to the job.'))

    image_building_args.add_argument(
        '--image-gcr-path',
        help=('The Google Container Registry or Google Artifact Registry '
              'location to store the flex template image to be built.'),
        type=arg_parsers.RegexpValidator(
            r'^(.*\.){0,1}gcr.io/.*|^(.){2,}-docker.pkg.dev/.*',
            ('Must begin with \'[multi-region.]gcr.io/\' or '
             '\'[region.]-docker.pkg.dev/\'. Please check '
             'https://cloud.google.com/container-registry/docs/overview '
             'for available multi-regions in GCR or '
             'https://cloud.google.com/artifact-registry/docs/repo-organize#'
             'locations for available location in GAR')),
        required=True)
    pipeline_args = image_building_args.add_mutually_exclusive_group(
        required=True)
    pipeline_args.add_argument(
        '--jar',
        metavar='JAR',
        type=arg_parsers.ArgList(),
        action=arg_parsers.UpdateAction,
        help=('Local path to your dataflow pipeline jar file and all their '
              'dependent jar files required for the flex template classpath. '
              'You can pass them as a comma separated list or repeat '
              'individually with --jar flag. Ex: --jar="code.jar,dep.jar" or '
              '--jar code.jar, --jar dep.jar.'))

    pipeline_args.add_argument(
        '--py-path',
        metavar='PY_PATH',
        type=arg_parsers.ArgList(),
        action=arg_parsers.UpdateAction,
        help=(
            'Local path to your dataflow pipeline python files and all their '
            'dependent files required for the flex template classpath. '
            'You can pass them as a comma separated list or repeat '
            'individually with --py-path flag. '
            'Ex: --py-path="path/pipleline/,path/dependency/" or '
            '--py-path path/pipleline/, --py-path path/dependency/.'))

    image_building_args.add_argument(
        '--flex-template-base-image',
        help=('Flex template base image to be used while building the '
              'container image. Allowed choices are JAVA8, JAVA11 or gcr.io '
              'path of the specific version of the base image. For JAVA8 and '
              'JAVA11 option, we use the latest base image version to build '
              'the container. You can also provide a specific version from '
              'this link  https://gcr.io/dataflow-templates-base/'),
        type=arg_parsers.RegexpValidator(
            r'^JAVA11$|^JAVA8$|^PYTHON3$|^gcr.io/.*',
            'Must be JAVA11, JAVA8, PYTHON3 or begin with \'gcr.io/\''),
        required=True)

    image_building_args.add_argument(
        '--env',
        metavar='ENV',
        type=arg_parsers.ArgDict(),
        action=arg_parsers.UpdateAction,
        help=
        ('Environment variables to create for the Dockerfile. '
         'You can pass them as a comma separated list or repeat individually '
         'with --env flag. Ex: --env="A=B,C=D" or --env A=B, --env C=D.'
         'When you reference files/dir in env variables, please specify relative '
         'path to the paths passed via --py-path.Ex: if you pass. '
         '--py-path="path/pipleline/" then set '
         'FLEX_TEMPLATE_PYTHON_PY_FILE="pipeline/pipeline.py" '
         'You can find the list of supported environment variables in this '
         'link. https://cloud.google.com/dataflow/docs/guides/templates/'
         'configuring-flex-templates'
         '#setting_required_dockerfile_environment_variables.'),
        required=True)