コード例 #1
0
def AddStatusFlag(parser):
  """Add the status argument.

  Args:
    parser: An argparse parser that you can use to add arguments that go
        on the command line after this command. Positional arguments are
        allowed.
  """
  parser.add_argument('--status',
                      help='Optional status for a composite type.',
                      choices=['DEPRECATED', 'EXPERIMENTAL', 'SUPPORTED'],
                      default=None)


template_flag_arg_type = arg_parsers.RegexpValidator(
    r'.*\.(py|jinja)',
    'must be a python (".py") or jinja (".jinja") file')


def AddTemplateFlag(parser):
  """Add the template path argument.

  Args:
    parser: An argparse parser that you can use to add arguments that go
        on the command line after this command. Positional arguments are
        allowed.
  """
  parser.add_argument('--template',
                      help=('Path to a python or jinja file (local or via URL) '
                            'that defines the composite type. If you want to '
                            'provide a schema, that file must be in the same '
コード例 #2
0
ファイル: flags.py プロジェクト: bopopescu/facerec-1
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common arguments for `gcloud source repos` commands."""
from __future__ import absolute_import
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.command_lib.source import resource_args
from googlecloudsdk.command_lib.util.concepts import concept_parsers

REPO_NAME_VALIDATOR = arg_parsers.RegexpValidator(
    '[A-Za-z0-9_][-_A-Za-z0-9/]{0,127}',
    'repostory name may contain between 1 and 128 (inclusive) letters, digits, '
    'hyphens, underscores and slashes.')


def AddPushblockFlags(group):
    """Add pushblock enabled/disabled flags to the given group."""

    group.add_argument('--enable-pushblock',
                       action='store_true',
                       help="""\
Enable PushBlock for all repositories under current project.
PushBlock allows repository owners to block git push transactions containing
private key data.""")

    group.add_argument('--disable-pushblock',
                       action='store_true',
コード例 #3
0
  Args:
    app: App resource for this project
    project: str, The name of the current project.

  Returns:
    storage_util.BucketReference, The bucket to use.
  """
  # Attempt to retrieve the default appspot bucket, if one can be created.
  log.debug('No bucket specified, retrieving default bucket.')
  if not app.codeBucket:
    raise exceptions.DefaultBucketAccessError(project)
  return storage_util.BucketReference.FromBucketUrl(app.codeBucket)


VERSION_TYPE = arg_parsers.RegexpValidator(
    appinfo.MODULE_VERSION_ID_RE_STRING,
    'May only contain lowercase letters, digits, and hyphens. '
    'Must begin and end with a letter or digit. Must not exceed 63 characters.')


def ValidateImageUrl(image_url, services):
  """Check the user-provided image URL.

  Ensures that:
  - it is consistent with the services being deployed (there must be exactly
    one)
  - it is an image in a supported Docker registry

  Args:
    image_url: str, the URL of the image to deploy provided by the user
    services: list, the services to deploy
コード例 #4
0
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""cloud-shell scp command."""

from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals

from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.cloud_shell import util
from googlecloudsdk.command_lib.util.ssh import ssh
from googlecloudsdk.core import log

FILE_TYPE = arg_parsers.RegexpValidator(
    r'^(cloudshell|localhost):.*$', 'must start with cloudshell: or localhost:')


def ToFileReference(path, remote):
  if path.startswith('cloudshell:'):
    return ssh.FileReference.FromPath(
        path.replace('cloudshell', str(remote), 1))
  elif path.startswith('localhost:'):
    return ssh.FileReference.FromPath(path.replace('localhost:', '', 1))
  else:
    raise Exception('invalid path: ' + path)


@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class SshAlpha(base.Command):
  """Copies files between Cloud Shell and the local machine."""
コード例 #5
0
def CommonArgs(parser):
    """Register flags applicable to all template launches.

  Args:
    parser: argparse.ArgumentParser to register arguments with.
  """
    parser.add_argument('--dataflow-kms-key',
                        help='The Cloud KMS key to protect the job resources.')

    parser.add_argument(
        '--disable-public-ips',
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.disable_public_ips),
        help='The Cloud Dataflow workers must not use public IP addresses.')

    parser.add_argument('--max-workers',
                        type=int,
                        help='The maximum number of workers to run.')

    parser.add_argument(
        '--network',
        help='The Compute Engine network for launching instances to '
        'run your pipeline.')

    parser.add_argument('--num-workers',
                        type=int,
                        help='The initial number of workers to use.')

    parser.add_argument('--service-account-email',
                        type=arg_parsers.RegexpValidator(
                            r'.*@.*\..*',
                            'must provide a valid email address'),
                        help='The service account to run the workers as.')

    parser.add_argument(
        '--subnetwork',
        help='The Compute Engine subnetwork for launching instances '
        'to run your pipeline.')

    parser.add_argument(
        '--worker-machine-type',
        help='The type of machine to use for workers. Defaults to '
        'server-specified.')

    group = parser.add_group(mutex=True, help='Worker location options.')

    group.add_argument('--worker-region',
                       type=arg_parsers.RegexpValidator(
                           r'\w+-\w+\d', 'must provide a valid region'),
                       help='The region to run the workers in.')

    group.add_argument('--worker-zone',
                       type=arg_parsers.RegexpValidator(
                           r'\w+-\w+\d-\w', 'must provide a valid zone'),
                       help='The zone to run the workers in.')

    group.add_argument(
        '--zone',
        type=arg_parsers.RegexpValidator(r'\w+-\w+\d-\w',
                                         'must provide a valid zone'),
        help='The zone to run the workers in.',
        action=actions.DeprecationAction(
            '--zone',
            warn=('The {flag_name} option is deprecated; '
                  'use --worker-region or --worker-zone instead.'),
            removed=False))
コード例 #6
0
ファイル: flags.py プロジェクト: hiroshiyoshida1980/jpopjam
def _GetHeaderArgValidator():
    return arg_parsers.RegexpValidator(
        r'^(\S+):(.+)$', 'Must be of the form: "HEADER_FIELD: HEADER_VALUE".')
コード例 #7
0
ファイル: sql_util.py プロジェクト: PinTrees/novelhub
def ArgsForSqlQuery(parser):
    """Register flags for running a SQL query.

  Args:
    parser: The argparse.ArgParser to configure with query arguments.
  """
    job_utils.CommonArgs(parser)

    parser.add_argument('query',
                        metavar='QUERY',
                        help='The SQL query to execute.')

    parser.add_argument(
        '--job-name',
        help='The unique name to assign to the Cloud Dataflow job.',
        required=True)

    parser.add_argument(
        '--region',
        type=arg_parsers.RegexpValidator(r'\w+-\w+\d',
                                         'must provide a valid region'),
        help=('The region ID of the job\'s regional endpoint. ' +
              dataflow_util.DEFAULT_REGION_MESSAGE),
        required=True)

    output_group = parser.add_group(
        required=True, help='The destination(s) for the output of the query.')

    concept_parsers.ConceptParser([
        presentation_specs.ResourcePresentationSpec(
            '--bigquery-table',
            concepts.ResourceSpec(
                'bigquery.tables',
                resource_name='BigQuery table',
                tableId=concepts.ResourceParameterAttributeConfig(
                    name='bigquery-table', help_text='The BigQuery table ID.'),
                projectId=concepts.ResourceParameterAttributeConfig(
                    name='bigquery-project',
                    help_text='The BigQuery project ID.'),
                datasetId=concepts.ResourceParameterAttributeConfig(
                    name='bigquery-dataset',
                    help_text='The BigQuery dataset ID.')),
            'The BigQuery table to write query output to.',
            prefixes=False,
            group=output_group),
        presentation_specs.ResourcePresentationSpec(
            '--pubsub-topic',
            concepts.ResourceSpec(
                'pubsub.projects.topics',
                resource_name='Pub/Sub topic',
                topicsId=concepts.ResourceParameterAttributeConfig(
                    name='pubsub-topic', help_text='The Pub/Sub topic ID.'),
                projectsId=concepts.ResourceParameterAttributeConfig(
                    name='pubsub-project',
                    help_text='The Pub/Sub project ID.')),
            'The Cloud Pub/Sub topic to write query output to.',
            prefixes=False,
            group=output_group),
    ]).AddToParser(parser)

    parser.add_argument(
        '--bigquery-write-disposition',
        help='The behavior of the BigQuery write operation.',
        choices=['write-empty', 'write-truncate', 'write-append'],
        default='write-empty')

    parser.add_argument('--pubsub-create-disposition',
                        help='The behavior of the Pub/Sub create operation.',
                        choices=['create-if-not-found', 'fail-if-not-found'],
                        default='create-if-not-found')

    parameter_group = parser.add_mutually_exclusive_group()

    parameter_group.add_argument(
        '--parameter',
        action='append',
        help='Parameters to pass to a query. Parameters must use the format '
        'name:type:value, for example min_word_count:INT64:250.')

    parameter_group.add_argument(
        '--parameters-file',
        help='Path to a file containing query parameters in JSON format.'
        ' e.g. [{"parameterType": {"type": "STRING"}, "parameterValue":'
        ' {"value": "foo"}, "name": "x"}, {"parameterType": {"type":'
        ' "FLOAT64"}, "parameterValue": {"value": "1.0"}, "name": "y"}]')

    parser.add_argument(
        '--dry-run',
        action='store_true',
        help='Construct but do not run the SQL pipeline, for smoke testing.')

    parser.add_argument(
        '--sql-launcher-template',
        hidden=True,
        help='The full GCS path to a SQL launcher template spec, e.g. '
        'gs://dataflow-sql-templates-us-west1/cloud_dataflow_sql_launcher_template_20200128_RC00/sql_launcher_template. '
        'If None is specified, default to the latest release in the region. '
        'Note that older releases are not guaranteed to be compatible.')
コード例 #8
0
def AccountNameValidator():
  return arg_parsers.RegexpValidator(
      r'[a-z][a-z0-9\-]{3,61}[a-z0-9]',
      'Service account name must be between 5 and 63 characters (inclusive), '
      'must begin with a lowercase letter, and consist of alphanumeric '
      'characters that can be separated by hyphens.')
コード例 #9
0
def _CommonArgs(parser):
    """Registers flags for this command.

  Args:
    parser: argparse.ArgumentParser to register arguments with.
  """
    image_args = parser.add_mutually_exclusive_group(required=True)
    image_building_args = image_args.add_argument_group()
    parser.add_argument(
        'template_file_gcs_path',
        metavar='TEMPLATE_FILE_GCS_PATH',
        help=('The Google Cloud Storage location of the flex template file.'
              'Overrides if file already exists.'),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    image_args.add_argument(
        '--image',
        help=('Path to the any image registry location of the prebuilt flex '
              'template image.'))

    parser.add_argument(
        '--image-repository-username-secret-id',
        help=(
            'Secret Manager secret id for the username to authenticate to '
            'private registry. Should be in the format '
            'projects/{project}/secrets/{secret}/versions/{secret_version} or '
            'projects/{project}/secrets/{secret}. If the version is not '
            'provided latest version will be used.'),
        type=arg_parsers.RegexpValidator(
            r'^projects\/[^\n\r\/]+\/secrets\/[^\n\r\/]+(\/versions\/[^\n\r\/]+)?$',
            'Must be in the format '
            '\'projects/{project}/secrets/{secret}\' or'
            '\'projects/{project}/secrets/{secret}/versions/{secret_version}\'.'
        ))

    parser.add_argument(
        '--image-repository-password-secret-id',
        help=(
            'Secret Manager secret id for the password to authenticate to '
            'private registry. Should be in the format '
            'projects/{project}/secrets/{secret}/versions/{secret_version} or '
            'projects/{project}/secrets/{secret}. If the version is not '
            'provided latest version will be used.'),
        type=arg_parsers.RegexpValidator(
            r'^projects\/[^\n\r\/]+\/secrets\/[^\n\r\/]+(\/versions\/[^\n\r\/]+)?$',
            'Must be in the format '
            '\'projects/{project}/secrets/{secret}\' or'
            '\'projects/{project}/secrets/{secret}/versions/{secret_version}\'.'
        ))

    parser.add_argument(
        '--image-repository-cert-path',
        help=
        ('The full URL to self-signed certificate of private registry in '
         'Cloud Storage. For example, gs://mybucket/mycerts/selfsigned.crt. '
         'The certificate provided in Cloud Storage must be DER-encoded and '
         'may be supplied in binary or printable (Base64) encoding. If the '
         'certificate is provided in Base64 encoding, it must be bounded at '
         'the beginning by -----BEGIN CERTIFICATE-----, and must be bounded '
         'at the end by -----END CERTIFICATE-----. If this parameter is '
         'provided, the docker daemon in the template launcher will be '
         'instructed to trust that certificate. '),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument('--sdk-language',
                        help=('SDK language of the flex template job.'),
                        choices=['JAVA', 'PYTHON'],
                        required=True)

    parser.add_argument(
        '--metadata-file',
        help='Local path to the metadata json file for the flex template.',
        type=arg_parsers.FileContents())

    parser.add_argument(
        '--print-only',
        help=('Prints the container spec to stdout. Does not save in '
              'Google Cloud Storage.'),
        default=False,
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.print_only))

    parser.add_argument(
        '--staging-location',
        help=('Default Google Cloud Storage location to stage local files.'
              "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument(
        '--temp-location',
        help=(
            'Default Google Cloud Storage location to stage temporary files. '
            'If not set, defaults to the value for --staging-location.'
            "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument('--service-account-email',
                        type=arg_parsers.RegexpValidator(
                            r'.*@.*\..*',
                            'must provide a valid email address'),
                        help='Default service account to run the workers as.')

    parser.add_argument('--max-workers',
                        type=int,
                        help='Default maximum number of workers to run.')

    parser.add_argument(
        '--disable-public-ips',
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.disable_public_ips),
        help='Cloud Dataflow workers must not use public IP addresses.')

    parser.add_argument('--num-workers',
                        type=int,
                        help='Initial number of workers to use by default.')

    parser.add_argument(
        '--worker-machine-type',
        help='Default type of machine to use for workers. Defaults to '
        'server-specified.')

    parser.add_argument(
        '--subnetwork',
        help='Default Compute Engine subnetwork for launching instances '
        'to run your pipeline.')

    parser.add_argument(
        '--network',
        help='Default Compute Engine network for launching instances to '
        'run your pipeline.')

    parser.add_argument(
        '--dataflow-kms-key',
        help='Default Cloud KMS key to protect the job resources.')

    region_group = parser.add_mutually_exclusive_group()
    region_group.add_argument('--worker-region',
                              help='Default region to run the workers in.')

    region_group.add_argument('--worker-zone',
                              help='Default zone to run the workers in.')

    parser.add_argument(
        '--enable-streaming-engine',
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.enable_streaming_engine),
        help='Enable Streaming Engine for the streaming job by default.')

    parser.add_argument(
        '--gcs-log-dir',
        help=('Google Cloud Storage directory to save build logs.'
              "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''),
        default=None)

    parser.add_argument('--additional-experiments',
                        metavar='ADDITIONAL_EXPERIMENTS',
                        type=arg_parsers.ArgList(),
                        action=arg_parsers.UpdateAction,
                        help=('Default experiments to pass to the job.'))

    parser.add_argument('--additional-user-labels',
                        metavar='ADDITIONAL_USER_LABELS',
                        type=arg_parsers.ArgDict(),
                        action=arg_parsers.UpdateAction,
                        help=('Default user labels to pass to the job.'))

    image_building_args.add_argument(
        '--image-gcr-path',
        help=('The Google Container Registry or Google Artifact Registry '
              'location to store the flex template image to be built.'),
        type=arg_parsers.RegexpValidator(
            r'^(.*\.){0,1}gcr.io/.*|^(.){2,}-docker.pkg.dev/.*',
            ('Must begin with \'[multi-region.]gcr.io/\' or '
             '\'[region.]-docker.pkg.dev/\'. Please check '
             'https://cloud.google.com/container-registry/docs/overview '
             'for available multi-regions in GCR or '
             'https://cloud.google.com/artifact-registry/docs/repo-organize#'
             'locations for available location in GAR')),
        required=True)
    pipeline_args = image_building_args.add_mutually_exclusive_group(
        required=True)
    pipeline_args.add_argument(
        '--jar',
        metavar='JAR',
        type=arg_parsers.ArgList(),
        action=arg_parsers.UpdateAction,
        help=('Local path to your dataflow pipeline jar file and all their '
              'dependent jar files required for the flex template classpath. '
              'You can pass them as a comma separated list or repeat '
              'individually with --jar flag. Ex: --jar="code.jar,dep.jar" or '
              '--jar code.jar, --jar dep.jar.'))

    pipeline_args.add_argument(
        '--py-path',
        metavar='PY_PATH',
        type=arg_parsers.ArgList(),
        action=arg_parsers.UpdateAction,
        help=(
            'Local path to your dataflow pipeline python files and all their '
            'dependent files required for the flex template classpath. '
            'You can pass them as a comma separated list or repeat '
            'individually with --py-path flag. '
            'Ex: --py-path="path/pipleline/,path/dependency/" or '
            '--py-path path/pipleline/, --py-path path/dependency/.'))

    image_building_args.add_argument(
        '--flex-template-base-image',
        help=('Flex template base image to be used while building the '
              'container image. Allowed choices are JAVA8, JAVA11 or gcr.io '
              'path of the specific version of the base image. For JAVA8 and '
              'JAVA11 option, we use the latest base image version to build '
              'the container. You can also provide a specific version from '
              'this link  https://gcr.io/dataflow-templates-base/'),
        type=arg_parsers.RegexpValidator(
            r'^JAVA11$|^JAVA8$|^PYTHON3$|^gcr.io/.*',
            'Must be JAVA11, JAVA8, PYTHON3 or begin with \'gcr.io/\''),
        required=True)

    image_building_args.add_argument(
        '--env',
        metavar='ENV',
        type=arg_parsers.ArgDict(),
        action=arg_parsers.UpdateAction,
        help=
        ('Environment variables to create for the Dockerfile. '
         'You can pass them as a comma separated list or repeat individually '
         'with --env flag. Ex: --env="A=B,C=D" or --env A=B, --env C=D.'
         'When you reference files/dir in env variables, please specify relative '
         'path to the paths passed via --py-path.Ex: if you pass. '
         '--py-path="path/pipleline/" then set '
         'FLEX_TEMPLATE_PYTHON_PY_FILE="pipeline/pipeline.py" '
         'You can find the list of supported environment variables in this '
         'link. https://cloud.google.com/dataflow/docs/guides/templates/'
         'configuring-flex-templates'
         '#setting_required_dockerfile_environment_variables.'),
        required=True)
コード例 #10
0
ファイル: run.py プロジェクト: piotradamczyk5/gcloud_cli
    def Args(parser):
        """Register flags for this command.

    Args:
      parser: argparse.ArgumentParser to register arguments with.
    """
        parser.add_argument('job_name',
                            metavar='JOB_NAME',
                            help='Unique name to assign to the job.')

        parser.add_argument(
            '--template-file-gcs-location',
            help=('Google Cloud Storage location of the flex template to run. '
                  "(Must be a URL beginning with 'gs://'.)"),
            type=arg_parsers.RegexpValidator(r'^gs://.*',
                                             'Must begin with \'gs://\''),
            required=True)

        parser.add_argument(
            '--region',
            metavar='REGION_ID',
            help=('Region ID of the job\'s regional endpoint. ' +
                  dataflow_util.DEFAULT_REGION_MESSAGE))

        parser.add_argument(
            '--staging-location',
            help=('Google Cloud Storage location to stage temporary files. '
                  "(Must be a URL beginning with 'gs://'.)"),
            type=arg_parsers.RegexpValidator(r'^gs://.*',
                                             'Must begin with \'gs://\''))

        parser.add_argument('--service-account-email',
                            type=arg_parsers.RegexpValidator(
                                r'.*@.*\..*',
                                'must provide a valid email address'),
                            help='Service account to run the workers as.')

        parser.add_argument('--max-workers',
                            type=int,
                            help='Maximum number of workers to run.')

        parser.add_argument(
            '--disable-public-ips',
            action=actions.StoreBooleanProperty(
                properties.VALUES.dataflow.disable_public_ips),
            help='Cloud Dataflow workers must not use public IP addresses.')

        parser.add_argument('--num-workers',
                            type=int,
                            help='Initial number of workers to use.')

        parser.add_argument(
            '--worker-machine-type',
            help='Type of machine to use for workers. Defaults to '
            'server-specified.')

        parser.add_argument(
            '--subnetwork',
            help='Compute Engine subnetwork for launching instances '
            'to run your pipeline.')

        parser.add_argument(
            '--network',
            help='Compute Engine network for launching instances to '
            'run your pipeline.')

        parser.add_argument('--dataflow-kms-key',
                            help='Cloud KMS key to protect the job resources.')

        region_group = parser.add_mutually_exclusive_group()
        region_group.add_argument('--worker-region',
                                  help='Region to run the workers in.')

        region_group.add_argument('--worker-zone',
                                  help='Zone to run the workers in.')

        parser.add_argument(
            '--enable-streaming-engine',
            action=actions.StoreBooleanProperty(
                properties.VALUES.dataflow.enable_streaming_engine),
            help='Enabling Streaming Engine for the streaming job.')

        parser.add_argument(
            '--additional-experiments',
            metavar='ADDITIONAL_EXPERIMENTS',
            type=arg_parsers.ArgList(),
            help=('Additional experiments to pass to the job.'))

        parser.add_argument(
            '--additional-user-labels',
            metavar='ADDITIONAL_USER_LABELS',
            type=arg_parsers.ArgDict(),
            action=arg_parsers.UpdateAction,
            help=('Additional user labels to pass to the job.'))

        parser.add_argument('--parameters',
                            metavar='PARAMETERS',
                            type=arg_parsers.ArgDict(),
                            action=arg_parsers.UpdateAction,
                            help=('Parameters to pass to the job.'))
コード例 #11
0
def _CommonArgs(parser, support_max_pods_per_node, release_track):
    """Common arguments that apply to all ReleaseTracks."""
    resource_args.AddEnvironmentResourceArg(parser, 'to create')
    base.ASYNC_FLAG.AddToParser(parser)
    parser.add_argument(
        '--node-count',
        type=int,
        help='The number of nodes to create to run the environment.')
    parser.add_argument(
        '--zone',
        help='The Compute Engine zone in which the environment will '
        'be created. For example `--zone=us-central1-a`.')
    parser.add_argument(
        '--machine-type',
        help='The Compute Engine machine type '
        '(https://cloud.google.com/compute/docs/machine-types) to use for '
        'nodes. For example `--machine-type=n1-standard-1`.')
    parser.add_argument(
        '--disk-size',
        type=arg_parsers.BinarySize(lower_bound='20GB',
                                    upper_bound='64TB',
                                    suggested_binary_size_scales=['GB', 'TB']),
        help='The disk size for each VM node in the environment. The minimum '
        'size is 20GB, and the maximum is 64TB. Specified value must be an '
        'integer multiple of gigabytes. Cannot be updated after the '
        'environment has been created. If units are not provided, defaults to '
        'GB.',
        action=flags.V1ExclusiveStoreAction)
    networking_group = parser.add_group(
        help='Virtual Private Cloud networking')
    networking_group.add_argument(
        '--network',
        required=True,
        help='The Compute Engine Network to which the environment will '
        'be connected. If a \'Custom Subnet Network\' is provided, '
        '`--subnetwork` must be specified as well.')
    networking_group.add_argument(
        '--subnetwork',
        help='The Compute Engine subnetwork '
        '(https://cloud.google.com/compute/docs/subnetworks) to which the '
        'environment will be connected.')
    labels_util.AddCreateLabelsFlags(parser)
    flags.CREATE_ENV_VARS_FLAG.AddToParser(parser)
    # Default is provided by API server.
    parser.add_argument(
        '--service-account',
        help='The Google Cloud Platform service account to be used by the node '
        'VMs. If a service account is not specified, the "default" Compute '
        'Engine service account for the project is used. Cannot be updated.')
    # Default is provided by API server.
    parser.add_argument(
        '--oauth-scopes',
        help='The set of Google API scopes to be made available on all of the '
        'node VMs. Defaults to '
        '[\'https://www.googleapis.com/auth/cloud-platform\']. Cannot be '
        'updated.',
        type=arg_parsers.ArgList(),
        metavar='SCOPE',
        action=arg_parsers.UpdateAction)
    parser.add_argument(
        '--tags',
        help='The set of instance tags applied to all node VMs. Tags are used '
        'to identify valid sources or targets for network firewalls. Each tag '
        'within the list must comply with RFC 1035. Cannot be updated.',
        type=arg_parsers.ArgList(),
        metavar='TAG',
        action=arg_parsers.UpdateAction)

    # API server will validate key/value pairs.
    parser.add_argument('--airflow-configs',
                        help="""\
A list of Airflow software configuration override KEY=VALUE pairs to set. For
information on how to structure KEYs and VALUEs, run
`$ {top_command} help composer environments update`.""",
                        type=arg_parsers.ArgDict(),
                        metavar='KEY=VALUE',
                        action=arg_parsers.UpdateAction)

    parser.add_argument(
        '--python-version',
        type=str,
        choices={
            '2': 'Created environment will use Python 2',
            '3': 'Created environment will use Python 3'
        },
        action=flags.V1ExclusiveStoreAction,
        help='The Python version to be used within the created environment. '
        'Supplied value should represent the desired major Python version. '
        'Cannot be updated.')

    version_group = parser.add_mutually_exclusive_group()
    airflow_version_type = arg_parsers.RegexpValidator(
        r'^(\d+(?:\.\d+(?:\.\d+)?)?)', 'must be in the form X[.Y[.Z]].')
    version_group.add_argument(
        '--airflow-version',
        type=airflow_version_type,
        help="""Version of Apache Airflow to run in the environment.

      Must be of the form `X[.Y[.Z]]`, where `[]` denotes optional fragments.

      The current Cloud Composer version will be used within the created
      environment. The Apache Airflow version is a semantic version or an alias
      in the form of major or major.minor version numbers, resolved to the
      latest matching Apache Airflow version supported in the current Cloud
      Composer version. The resolved version is stored in the created
      environment.""")

    image_version_type = arg_parsers.RegexpValidator(
        r'^composer-(\d+(?:\.\d+.\d+(?:-[a-z]+\.\d+)?)?|latest)-airflow-(\d+(?:\.\d+(?:\.\d+)?)?)',
        'must be in the form \'composer-A[.B.C[-D.E]]-airflow-X[.Y[.Z]]\' or '
        '\'latest\' can be provided in place of the Cloud Composer version '
        'string. For example: \'composer-latest-airflow-1.10.0\'.')
    version_group.add_argument(
        '--image-version',
        type=image_version_type,
        help="""Version of the image to run in the environment.

      The image version encapsulates the versions of both Cloud Composer
      and Apache Airflow. Must be of the form
      `composer-A[.B.C[-D.E]]-airflow-X[.Y[.Z]]`, where `[]` denotes optional
      fragments.

      The Cloud Composer portion of the image version is a semantic version or
      an alias in the form of major version number or `latest`, resolved to the
      current Cloud Composer version. The Apache Airflow portion of the image
      version is a semantic version or an alias in the form of major or
      major.minor version numbers, resolved to the latest matching Apache
      Airflow version supported in the given Cloud Composer version. The
      resolved versions are stored in the created environment.""")
    flags.AddIpAliasEnvironmentFlags(parser, support_max_pods_per_node)
    flags.AddPrivateIpEnvironmentFlags(parser)
    web_server_group = parser.add_mutually_exclusive_group()
    flags.WEB_SERVER_ALLOW_IP.AddToParser(web_server_group)
    flags.WEB_SERVER_ALLOW_ALL.AddToParser(web_server_group)
    flags.WEB_SERVER_DENY_ALL.AddToParser(web_server_group)
    flags.CLOUD_SQL_MACHINE_TYPE.AddToParser(parser)
    flags.WEB_SERVER_MACHINE_TYPE.AddToParser(parser)
    flags.AddMaintenanceWindowFlagsGroup(parser)

    permission_info = '{} must hold permission {}'.format(
        "The 'Cloud Composer Service Agent' service account",
        "'Cloud KMS CryptoKey Encrypter/Decrypter'")
    kms_resource_args.AddKmsKeyResourceArg(parser,
                                           'environment',
                                           permission_info=permission_info)

    if release_track == base.ReleaseTrack.GA:
        flags.ENVIRONMENT_SIZE_GA.choice_arg.AddToParser(parser)
    elif release_track == base.ReleaseTrack.BETA:
        flags.ENVIRONMENT_SIZE_BETA.choice_arg.AddToParser(parser)
    elif release_track == base.ReleaseTrack.ALPHA:
        flags.ENVIRONMENT_SIZE_ALPHA.choice_arg.AddToParser(parser)

    autoscaling_group_parser = parser.add_argument_group(
        flags.AUTOSCALING_FLAG_GROUP_DESCRIPTION)
    flags.SCHEDULER_CPU.AddToParser(autoscaling_group_parser)
    flags.WORKER_CPU.AddToParser(autoscaling_group_parser)
    flags.WEB_SERVER_CPU.AddToParser(autoscaling_group_parser)
    flags.SCHEDULER_MEMORY.AddToParser(autoscaling_group_parser)
    flags.WORKER_MEMORY.AddToParser(autoscaling_group_parser)
    flags.WEB_SERVER_MEMORY.AddToParser(autoscaling_group_parser)
    flags.SCHEDULER_STORAGE.AddToParser(autoscaling_group_parser)
    flags.WORKER_STORAGE.AddToParser(autoscaling_group_parser)
    flags.WEB_SERVER_STORAGE.AddToParser(autoscaling_group_parser)
    flags.MIN_WORKERS.AddToParser(autoscaling_group_parser)
    flags.MAX_WORKERS.AddToParser(autoscaling_group_parser)
    flags.NUM_SCHEDULERS.AddToParser(autoscaling_group_parser)
コード例 #12
0
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common arguments for `gcloud source repos` commands."""
from googlecloudsdk.calliope import arg_parsers

REPO_NAME_VALIDATOR = arg_parsers.RegexpValidator(
    '[-_A-Za-z0-9]+',
    'repostory name may contain between 1 and 63 (inclusive) letters, digits, '
    'hyphens, and underscores.')
コード例 #13
0
def AddDeviceFlagsToParser(parser, default_for_blocked_flags=True):
    """Add flags for device commands to parser.

  Args:
    parser: argparse parser to which to add these flags.
    default_for_blocked_flags: bool, whether to populate default values for
        device blocked state flags.
  """
    blocked_state_help_text = (
        'If {0}, connections from this device will fail.\n\n'
        'Can be used to temporarily prevent the device from '
        'connecting if, for example, the sensor is generating bad '
        'data and needs maintenance.\n\n')
    enable_device_format_args = ('disabled', )
    blocked_format_args = ('blocked', )
    if not default_for_blocked_flags:
        blocked_state_help_text += (
            '+\n\n'  # '+' here preserves markdown indentation.
            'Use `--{1}` to enable connections and `--{2}` to disable.')
        enable_device_format_args += ('enable-device', 'no-enable-device')
        blocked_format_args += ('no-blocked', 'blocked')
    else:
        blocked_state_help_text += (
            '+\n\n'
            'Connections to device is not blocked by default.')

    blocked_state_args = parser.add_mutually_exclusive_group()
    # Defaults are set to None because with nested groups, help text isn't being
    # generated correctly.
    blocked_state_args.add_argument(
        '--enable-device',
        default=None,
        action=actions.DeprecationAction(
            '--[no-]enable-device',
            warn=('Flag {flag_name} is deprecated. '
                  'Use --[no-]blocked instead.'),
            action='store_true'),
        help=blocked_state_help_text.format(*enable_device_format_args))
    blocked_state_args.add_argument(
        '--blocked',
        default=None,
        action='store_true',
        help=blocked_state_help_text.format(*blocked_format_args))

    metadata_key_validator = arg_parsers.RegexpValidator(
        r'[a-zA-Z0-9-_]{1,127}',
        'Invalid metadata key. Keys should only contain the following characters '
        '[a-zA-Z0-9-_] and be fewer than 128 bytes in length.')
    base.Argument('--metadata',
                  metavar='KEY=VALUE',
                  type=arg_parsers.ArgDict(key_type=metadata_key_validator),
                  help="""\
The metadata key/value pairs assigned to devices. This metadata is not
interpreted or indexed by Cloud IoT Core. It can be used to add contextual
information for the device.

Keys should only contain the following characters ```[a-zA-Z0-9-_]``` and be
fewer than 128 bytes in length. Values are free-form strings. Each value must
be fewer than or equal to 32 KB in size.

The total size of all keys and values must be less than 256 KB, and the
maximum number of key-value pairs is 500.
""").AddToParser(parser)

    base.Argument(
        '--metadata-from-file',
        metavar='KEY=PATH',
        type=arg_parsers.ArgDict(key_type=metadata_key_validator),
        help=(
            'Same as --metadata, but the metadata values will be read from the '
            'file specified by path.')).AddToParser(parser)
コード例 #14
0
def _CommonArgs(parser):
  """Common arguments that apply to all ReleaseTracks."""
  resource_args.AddEnvironmentResourceArg(parser, 'to create')
  base.ASYNC_FLAG.AddToParser(parser)
  parser.add_argument(
      '--node-count',
      type=int,
      help='The number of nodes to create to run the environment.')
  parser.add_argument(
      '--zone',
      help='The Compute Engine zone in which the environment will '
      'be created. For example `--zone=us-central1-a`.')
  parser.add_argument(
      '--machine-type',
      help='The Compute Engine machine type '
      '(https://cloud.google.com/compute/docs/machine-types) to use for '
      'nodes. For example `--machine-type=n1-standard-1`.')
  parser.add_argument(
      '--disk-size',
      default='100GB',
      type=arg_parsers.BinarySize(
          lower_bound='20GB',
          upper_bound='64TB',
          suggested_binary_size_scales=['GB', 'TB']),
      help='The disk size for each VM node in the environment. The minimum '
      'size is 20GB, and the maximum is 64TB. Specified value must be an '
      'integer multiple of gigabytes. Cannot be updated after the '
      'environment has been created. If units are not provided, defaults to '
      'GB.')
  networking_group = parser.add_group(help='Virtual Private Cloud networking')
  networking_group.add_argument(
      '--network',
      required=True,
      help='The Compute Engine Network to which the environment will '
      'be connected. If a \'Custom Subnet Network\' is provided, '
      '`--subnetwork` must be specified as well.')
  networking_group.add_argument(
      '--subnetwork',
      help='The Compute Engine subnetwork '
      '(https://cloud.google.com/compute/docs/subnetworks) to which the '
      'environment will be connected.')
  labels_util.AddCreateLabelsFlags(parser)
  flags.CREATE_ENV_VARS_FLAG.AddToParser(parser)
  # Default is provided by API server.
  parser.add_argument(
      '--service-account',
      help='The Google Cloud Platform service account to be used by the node '
      'VMs. If a service account is not specified, the "default" Compute '
      'Engine service account for the project is used. Cannot be updated.')
  # Default is provided by API server.
  parser.add_argument(
      '--oauth-scopes',
      help='The set of Google API scopes to be made available on all of the '
      'node VMs. Defaults to '
      '[\'https://www.googleapis.com/auth/cloud-platform\']. Cannot be '
      'updated.',
      type=arg_parsers.ArgList(),
      metavar='SCOPE',
      action=arg_parsers.UpdateAction)
  parser.add_argument(
      '--tags',
      help='The set of instance tags applied to all node VMs. Tags are used '
      'to identify valid sources or targets for network firewalls. Each tag '
      'within the list must comply with RFC 1035. Cannot be updated.',
      type=arg_parsers.ArgList(),
      metavar='TAG',
      action=arg_parsers.UpdateAction)

  # API server will validate key/value pairs.
  parser.add_argument(
      '--airflow-configs',
      help="""\
A list of Airflow software configuration override KEY=VALUE pairs to set. For
information on how to structure KEYs and VALUEs, run
`$ {top_command} help composer environments update`.""",
      type=arg_parsers.ArgDict(),
      metavar='KEY=VALUE',
      action=arg_parsers.UpdateAction)

  parser.add_argument(
      '--python-version',
      type=str,
      choices={
          '2': 'Created environment will use Python 2',
          '3': 'Created environment will use Python 3'
      },
      help='The Python version to be used within the created environment. '
      'Supplied value should represent the desired major Python version. '
      'Cannot be updated.')

  version_group = parser.add_mutually_exclusive_group()
  airflow_version_type = arg_parsers.RegexpValidator(
      r'^(\d+\.\d+(?:\.\d+)?)', 'must be in the form X.Y[.Z].')
  version_group.add_argument(
      '--airflow-version',
      type=airflow_version_type,
      help="""Version of Airflow to run in the environment.

      Must be of the form `X.Y[.Z]`.

      The latest supported Cloud Composer version will be used within
      the created environment.""")

  image_version_type = arg_parsers.RegexpValidator(
      r'^composer-(\d+\.\d+.\d+|latest)-airflow-(\d+\.\d+(?:\.\d+)?)',
      'must be in the form \'composer-A.B.C-airflow-X.Y[.Z]\' or '
      '\'latest\' can be provided in place of the Cloud Composer version '
      'string. For example: \'composer-latest-airflow-1.10.0\'.')
  version_group.add_argument(
      '--image-version',
      type=image_version_type,
      help="""Version of the image to run in the environment.

      The image version encapsulates the versions of both Cloud Composer
      and Apache Airflow. Must be of the form `composer-A.B.C-airflow-X.Y[.Z]`.

      The Cloud Composer and Airflow versions are semantic versions.
      `latest` can be provided instead of an explicit Cloud Composer
      version number indicating that the server will replace `latest`
      with the current Cloud Composer version. For the Apache Airflow
      portion, the patch version can be omitted and the current
      version will be selected. The version numbers that are used will
      be stored.""")
コード例 #15
0
ファイル: build.py プロジェクト: piotradamczyk5/gcloud_cli
def _CommonArgs(parser):
    """Register flags for this command.

  Args:
    parser: argparse.ArgumentParser to register arguments with.
  """
    image_args = parser.add_mutually_exclusive_group(required=True)
    image_building_args = image_args.add_argument_group()
    parser.add_argument(
        'template_file_gcs_path',
        metavar='TEMPLATE_FILE_GCS_PATH',
        help=('The Google Cloud Storage location of the flex template file.'
              'Overrides if file already exists.'),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    image_args.add_argument(
        '--image',
        help=('Path to the any image registry location of the prebuilt flex '
              'template image.'))

    parser.add_argument('--sdk-language',
                        help=('SDK language of the flex template job.'),
                        choices=['JAVA', 'PYTHON'],
                        required=True)

    parser.add_argument(
        '--metadata-file',
        help='Local path to the metadata json file for the flex template.',
        type=arg_parsers.FileContents())

    parser.add_argument(
        '--print-only',
        help=('Prints the container spec to stdout. Does not save in '
              'Google Cloud Storage.'),
        default=False,
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.print_only))

    image_building_args.add_argument(
        '--image-gcr-path',
        help=('The Google Container Registry location to store the flex '
              'template image to be built.'),
        type=arg_parsers.RegexpValidator(r'^gcr.io/.*',
                                         'Must begin with \'gcr.io/\''),
        required=True)

    image_building_args.add_argument(
        '--jar',
        metavar='JAR',
        type=arg_parsers.ArgList(),
        action=arg_parsers.UpdateAction,
        help=('Local path to your dataflow pipeline jar file and all their '
              'dependent jar files required for the flex template classpath. '
              'You can pass them as a comma separated list or repeat '
              'individually with --jar flag. Ex: --jar="code.jar,dep.jar" or '
              '--jar code.jar, --jar dep.jar.'),
        required=True)

    image_building_args.add_argument(
        '--flex-template-base-image',
        help=('Flex template base image to be used while building the '
              'container image. Allowed choices are JAVA8, JAVA11 or gcr.io '
              'path of the specific version of the base image. For JAVA8 and '
              'JAVA11 option, we use the latest base image version to build '
              'the container. You can also provide a specific version from '
              'this link  https://gcr.io/dataflow-templates-base/'),
        type=arg_parsers.RegexpValidator(
            r'^JAVA11$|^JAVA8$|^gcr.io/.*',
            'Must be JAVA11 or JAVA8 or begin with \'gcr.io/\''),
        required=True)

    image_building_args.add_argument(
        '--env',
        metavar='ENV',
        type=arg_parsers.ArgDict(),
        action=arg_parsers.UpdateAction,
        help=
        ('Environment variables to create for the Dockerfile. '
         'You can pass them as a comma separated list or repeat individually '
         'with --env flag. Ex: --env="A=B,C=D" or --env A=B, --env C=D.'
         'You can find the list of supported environment variables in this '
         'link. https://cloud.google.com/dataflow/docs/guides/templates/'
         'troubleshooting-flex-templates'
         '#setting_required_dockerfile_environment_variables'),
        required=True)
コード例 #16
0
ファイル: flags.py プロジェクト: bopopescu/GoogleAPI
import re

from googlecloudsdk.calliope import actions
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.composer import parsers
from googlecloudsdk.command_lib.composer import util as command_util
from googlecloudsdk.command_lib.util.args import labels_util
from googlecloudsdk.core import properties

import ipaddress
import six


AIRFLOW_VERSION_TYPE = arg_parsers.RegexpValidator(
    r'^(\d+\.\d+(?:\.\d+)?)', 'must be in the form X.Y[.Z].')

IMAGE_VERSION_TYPE = arg_parsers.RegexpValidator(
    r'^composer-(\d+\.\d+\.\d+|latest)-airflow-(\d+\.\d+(?:\.\d+)?)',
    'must be in the form \'composer-A.B.C-airflow-X.Y[.Z]\' or '
    '\'latest\' can be provided in place of the Cloud Composer version '
    'string. For example: \'composer-latest-airflow-1.10.0\'.')

# TODO(b/118349075): Refactor global Argument definitions to be factory methods.
ENVIRONMENT_NAME_ARG = base.Argument(
    'name', metavar='NAME', help='The name of an environment.')

MULTI_ENVIRONMENT_NAME_ARG = base.Argument(
    'name', metavar='NAME', nargs='+', help='The name of an environment.')

MULTI_OPERATION_NAME_ARG = base.Argument(
コード例 #17
0
ファイル: run.py プロジェクト: martcatnip/sturdy-potato-story
def _CommonArgs(parser):
    """Register flags for this command.

  Args:
    parser: argparse.ArgumentParser to register arguments with.

  """
    parser.add_argument('job_name',
                        metavar='JOB_NAME',
                        help='The unique name to assign to the job.')

    parser.add_argument(
        '--gcs-location',
        help=('The Google Cloud Storage location of the job template to run. '
              "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''),
        required=True)

    parser.add_argument(
        '--staging-location',
        help=('The Google Cloud Storage location to stage temporary files. '
              "(Must be a URL beginning with 'gs://'.)"),
        type=arg_parsers.RegexpValidator(r'^gs://.*',
                                         'Must begin with \'gs://\''))

    parser.add_argument('--zone',
                        type=arg_parsers.RegexpValidator(
                            r'\w+-\w+\d-\w', 'must provide a valid zone'),
                        help='The zone to run the workers in.')

    parser.add_argument('--service-account-email',
                        type=arg_parsers.RegexpValidator(
                            r'.*@.*\..*',
                            'must provide a valid email address'),
                        help='The service account to run the workers as.')

    parser.add_argument('--max-workers',
                        type=int,
                        help='The maximum number of workers to run.')

    parser.add_argument('--parameters',
                        metavar='PARAMETERS',
                        type=arg_parsers.ArgDict(),
                        action=arg_parsers.UpdateAction,
                        help='The parameters to pass to the job.')

    # TODO(b/139889563): Mark as required when default region is removed
    parser.add_argument(
        '--region',
        metavar='REGION_ID',
        help=('The region ID of the job\'s regional endpoint. ' +
              dataflow_util.DEFAULT_REGION_MESSAGE))

    parser.add_argument(
        '--disable-public-ips',
        action=actions.StoreBooleanProperty(
            properties.VALUES.dataflow.disable_public_ips),
        help='The Cloud Dataflow workers must not use public IP addresses.')

    parser.add_argument('--dataflow-kms-key',
                        help='The Cloud KMS key to protect the job resources.')