def _DeclareDnsFlags(): """Declare global flags in an idempotent way.""" if 'api_endpoint' in flags.FLAGS: return flags.DEFINE_string( 'api_endpoint', u'https://www.googleapis.com/dns/v1/', 'URL of the API endpoint to use.', short_name='dns_url') flags.DEFINE_string( 'history_file', u'~/.dns.v1.history', 'File with interactive shell history.') flags.DEFINE_multistring( 'add_header', [], 'Additional http headers (as key=value strings). ' 'Can be specified multiple times.') flags.DEFINE_string( 'service_account_json_keyfile', '', 'Filename for a JSON service account key downloaded' ' from the Developer Console.') flags.DEFINE_enum( 'alt', u'json', [u'json'], u'Data format for the response.') flags.DEFINE_string( 'fields', None, u'Selector specifying which fields to include in a partial response.') flags.DEFINE_string( 'key', None, u'API key. Your API key identifies your project and provides you with ' u'API access, quota, and reports. Required unless you provide an OAuth ' u'2.0 token.') flags.DEFINE_string( 'oauth_token', None, u'OAuth 2.0 token for the current user.') flags.DEFINE_boolean( 'prettyPrint', 'True', u'Returns response with indentations and line breaks.') flags.DEFINE_string( 'quotaUser', None, u'Available to use for quota purposes for server-side applications. Can' u' be any arbitrary string assigned to a user, but should not exceed 40' u' characters. Overrides userIp if both are provided.') flags.DEFINE_string( 'trace', None, 'A tracing token of the form "token:<tokenid>" to include in api ' 'requests.') flags.DEFINE_string( 'userIp', None, u'IP address of the site where the request originates. Use this if you ' u'want to enforce per-user limits.')
def __init__(self, flag_values): flags.DEFINE_multistring( 'metadata', [], 'Metadata to be made available within the VM ' 'environment via the local metadata server. This ' 'should be in the form key:value. Metadata keys ' 'must be unique', flag_values=flag_values) flags.DEFINE_multistring( 'metadata_from_file', [], 'Metadata to be made available within the VM ' 'environment via the local metadata server. The ' 'value is loaded from a file. This should be in ' 'the form key:filename. Metadata keys must be ' 'unique', flag_values=flag_values) self._flags = flag_values
def _DeclareIamFlags(): """Declare global flags in an idempotent way.""" if 'api_endpoint' in flags.FLAGS: return flags.DEFINE_string('api_endpoint', u'https://iam.googleapis.com/', 'URL of the API endpoint to use.', short_name='iam_url') flags.DEFINE_string('history_file', u'~/.iam.v1.history', 'File with interactive shell history.') flags.DEFINE_multistring( 'add_header', [], 'Additional http headers (as key=value strings). ' 'Can be specified multiple times.') flags.DEFINE_string( 'service_account_json_keyfile', '', 'Filename for a JSON service account key downloaded' ' from the Developer Console.') flags.DEFINE_enum('f__xgafv', u'_1', [u'_1', u'_2'], u'V1 error format.') flags.DEFINE_string('access_token', None, u'OAuth access token.') flags.DEFINE_enum('alt', u'json', [u'json', u'media', u'proto'], u'Data format for response.') flags.DEFINE_string('bearer_token', None, u'OAuth bearer token.') flags.DEFINE_string('callback', None, u'JSONP') flags.DEFINE_string( 'fields', None, u'Selector specifying which fields to include in a partial response.') flags.DEFINE_string( 'key', None, u'API key. Your API key identifies your project and provides you with ' u'API access, quota, and reports. Required unless you provide an OAuth ' u'2.0 token.') flags.DEFINE_string('oauth_token', None, u'OAuth 2.0 token for the current user.') flags.DEFINE_boolean('pp', 'True', u'Pretty-print response.') flags.DEFINE_boolean( 'prettyPrint', 'True', u'Returns response with indentations and line breaks.') flags.DEFINE_string( 'quotaUser', None, u'Available to use for quota purposes for server-side applications. Can' u' be any arbitrary string assigned to a user, but should not exceed 40' u' characters.') flags.DEFINE_string( 'trace', None, 'A tracing token of the form "token:<tokenid>" to include in api ' 'requests.') flags.DEFINE_string( 'uploadType', None, u'Legacy upload protocol for media (e.g. "media", "multipart").') flags.DEFINE_string( 'upload_protocol', None, u'Upload protocol for media (e.g. "raw", "multipart").')
def make_gflags(): for field in DEBIAN_FIELDS: fieldname = field[0].replace('-', '_').lower() msg = 'The value for the %s content header entry.' % field[0] if len(field) > 3: if type(field[3]) is list: gflags.DEFINE_multistring(fieldname, field[3], msg) else: gflags.DEFINE_string(fieldname, field[3], msg) else: gflags.DEFINE_string(fieldname, None, msg) if field[1]: gflags.MarkFlagAsRequired(fieldname)
def testFlagHelpInXML_MultiString(self): gflags.DEFINE_multistring('to_delete', ['a.cc', 'b.h'], 'Files to delete', flag_values=self.fv) expected_output = ( ' <flag>\n' ' <file>tool</file>\n' ' <name>to_delete</name>\n' ' <meaning>Files to delete;\n ' 'repeat this option to specify a list of values</meaning>\n' ' <default>[\'a.cc\', \'b.h\']</default>\n' ' <current>[\'a.cc\', \'b.h\']</current>\n' ' <type>multi string</type>\n' ' </flag>\n') self._CheckFlagHelpInXML('to_delete', 'tool', expected_output)
def __init__(self, name, flag_values): super(SshInstanceBase, self).__init__(name, flag_values) flags.DEFINE_integer('ssh_port', 22, 'TCP port to connect to', flag_values=flag_values) flags.DEFINE_multistring('ssh_arg', [], 'Additional arguments to pass to ssh', flag_values=flag_values) flags.DEFINE_integer( 'ssh_key_push_wait_time', 300, # 5 minutes 'Number of seconds to wait for updates to project-wide ssh keys ' 'to cascade to the instances within the project', flag_values=flag_values)
def __init__(self, name, flag_values, **kwargs): super(DownloadCommand, self).__init__(name, flag_values, **kwargs) # Command-specific flags. flags.DEFINE_multistring('file_path', [], 'Remote file to download.', flag_values=flag_values) flags.DEFINE_string('dir_path', None, 'Remote directory to download.', flag_values=flag_values) flags.DEFINE_bool('recursive', False, 'Downloads from a directory recursively', flag_values=flag_values) flags.DEFINE_integer( 'depth', None, 'Specifies recusion depth if "recursive" is specified', flag_values=flag_values)
short_name='d') gflags.DEFINE_boolean('debug', False, 'Log folder contents as being fetched') gflags.DEFINE_string('logfile', 'drive.log', 'Location of file to write the log') gflags.DEFINE_string('drive_id', 'root', 'ID of the folder whose contents are to be fetched') gflags.DEFINE_enum( 'export', 'OO', ['PDF', 'OO', 'MSO'], 'Export format. Export to PDF, OpenOffice, or MS Office format') gflags.DEFINE_boolean('from_folders_list', False, 'Download only the folders in folders.json') gflags.DEFINE_boolean("list_folder_names", False, 'List of all the folders in folders.json') gflags.DEFINE_multistring( 'add_folder', ['default_name', 'default_drive_id'], '''Adds folder to folders.json. Have to use it two times with the name of the folder and then with drive id of the folder. Usage : --add_folder 'name of the folder' --add_folder 'folder id'.''') def export_type(): #Defining a "export_format" dictionary: # *key = source mimeType of the Gdoc # *value = a list of the target mimeType (index 0) + the target file extension (index 1) #Values change according to the "export format" defined by the user. #Maybe is there a cleaner way to do this? if FLAGS['export'].value == 'MSO': return { 'application/vnd.google-apps.document': ('application/vnd.openxmlformats-officedocument.wordprocessingml.document',
import io import json import os import os.path import subprocess import sys import re import tarfile import tempfile from container import archive gflags.DEFINE_string('output', None, 'The output file, mandatory') gflags.MarkFlagAsRequired('output') gflags.DEFINE_multistring('file', [], 'A file to add to the layer') gflags.DEFINE_string('manifest', None, 'JSON manifest of contents to add to the layer') gflags.DEFINE_multistring('empty_file', [], 'An empty file to add to the layer') gflags.DEFINE_multistring('empty_dir', [], 'An empty dir to add to the layer') gflags.DEFINE_string( 'mode', None, 'Force the mode on the added files (in octal).') gflags.DEFINE_string( 'mtime', None, 'Set mtime on tar file entries. May be an integer or the' ' value "portable", to get the value 2000-01-01, which is' ' usable with non *nix OSes.')
]) flags.DEFINE_boolean( 'strict', False, 'Whether to validate against the stricter Closure style. ' 'This includes ' + (', '.join(Rule.CLOSURE_RULES)) + '.') flags.DEFINE_multistring( 'jslint_error', [], 'List of specific lint errors to check. Here is a list' ' of accepted values:\n' ' - ' + Rule.ALL + ': enables all following errors.\n' ' - ' + Rule.BLANK_LINES_AT_TOP_LEVEL + ': validates' 'number of blank lines between blocks at top level.\n' ' - ' + Rule.INDENTATION + ': checks correct ' 'indentation of code.\n' ' - ' + Rule.WELL_FORMED_AUTHOR + ': validates the ' '@author JsDoc tags.\n' ' - ' + Rule.NO_BRACES_AROUND_INHERIT_DOC + ': ' 'forbids braces around @inheritdoc JsDoc tags.\n' ' - ' + Rule.BRACES_AROUND_TYPE + ': enforces braces ' 'around types in JsDoc tags.\n' ' - ' + Rule.OPTIONAL_TYPE_MARKER + ': checks correct ' 'use of optional marker = in param types.\n' ' - ' + Rule.UNUSED_PRIVATE_MEMBERS + ': checks for ' 'unused private variables.\n' ' - ' + Rule.UNUSED_LOCAL_VARIABLES + ': checks for ' 'unused local variables.\n') def ShouldCheck(rule): """Returns whether the optional rule should be checked. Computes different flags (strict, jslint_error, jslint_noerror) to find out if
"launch", "local", "The launch mode. See mincepie.launcher.launch() for details.") gflags.DEFINE_integer( "num_clients", 1, "The number of clients. Does not apply in the case of MPI.") gflags.RegisterValidator('num_clients', lambda x: x > 0, message='--num_clients must be positive.') # slurm flags gflags.DEFINE_string("slurm_shebang", "#!/bin/bash", "The shebang of the slurm batch script") gflags.DEFINE_string("slurm_python_bin", "python", "The command to call python") gflags.DEFINE_string("sbatch_bin", "sbatch", "The command to call sbatch") gflags.DEFINE_string("scancel_bin", "scancel", "The command to call scancel") gflags.DEFINE_multistring("sbatch_args", [], "The sbatch arguments") # easy access to FLAGS FLAGS = gflags.FLAGS def process_argv(argv): """processes the arguments using gflags """ try: # parse flags inputlist = gflags.FLAGS(argv) except gflags.FlagsError as message: print('%s\\nUsage: %s ARGS\\n%s' % (message, argv[0], gflags.FLAGS)) sys.exit(1) # set some common stuff logging.basicConfig(level=FLAGS.loglevel)
import gflags from permissions_lib import define from permissions_lib import use from permissions_lib import utils from pylib import app from pylib import db FLAGS = gflags.FLAGS gflags.DEFINE_string('db', None, 'DB spec to read from/push to') gflags.DEFINE_integer('push_duration', None, 'Staggered push duration (seconds)') gflags.DEFINE_multistring( 'file', None, 'File containing permissions settings on which to ' 'operate. The file may contain one or more named sets ' 'of permissions.') gflags.DEFINE_string('public_keyfile', None, 'File to write/read public encryption key to/from') gflags.DEFINE_string('private_keyfile', None, 'File to write/read private encryption key to/from') gflags.DEFINE_integer('key_bits', None, 'Size (in bits) of new RSA key') gflags.DEFINE_string('set', None, 'Set name to publish to') gflags.DEFINE_string('source_set', None, 'Set name to publish from; defaults to target set') gflags.DEFINE_boolean( 'incremental', True, 'Write only differences to existing tables, instead of ' 'clearing tables and starting from scratch') gflags.DEFINE_multistring('comment_name', [], 'Comment name to retrieve.')
def __init__(self, name, flag_values): super(AddInstance, self).__init__(name, flag_values) flags.DEFINE_string('description', '', 'Instance description', flag_values=flag_values) flags.DEFINE_string( 'image', None, 'Image name. To get a list of images built by Google, ' 'run \'gcutil listimages --project=projects/google\'. ' 'To get a list of images you have built, run \'gcutil ' 'listimages\'.', flag_values=flag_values) flags.DEFINE_string( 'machine_type', None, 'Machine type name. To get a list of available machine ' 'types, run \'gcutil listmachinetypes\'.', flag_values=flag_values) flags.DEFINE_string('network', 'default', 'The network to which to attach the instance.', flag_values=flag_values) flags.DEFINE_string( 'internal_ip_address', '', 'The internal (within the specified network) IP ' 'address for the instance; if not set the instance ' 'will be assigned an appropriate address.', flag_values=flag_values) flags.DEFINE_string( 'external_ip_address', self.EPHEMERAL_ACCESS_CONFIG_NAT_IP, 'The external NAT IP of the new instance. The default ' 'value "ephemeral" indicates the service should choose ' 'an available ephemeral IP. The value "none" (or an ' 'empty string) indicates no external IP will be ' 'assigned to the new instance. If an explicit IP is ' 'given, that IP must be reserved by the project and ' 'not yet assigned to another instance.', flag_values=flag_values) flags.DEFINE_multistring( 'disk', [], 'The name of a disk to be attached to the ' 'instance. The name may be followed by a ' 'comma-separated list of name=value pairs ' 'specifying options. Legal option names are ' '\'deviceName\', to specify the disk\'s device ' 'name, and \'mode\', to indicate whether the disk ' 'should be attached READ_WRITE (the default) or ' 'READ_ONLY', flag_values=flag_values) flags.DEFINE_boolean( 'use_compute_key', False, 'Whether or not to include the default ' 'Google Compute Engine ssh key as one of the ' 'authorized ssh keys for the created instance. This ' 'has the side effect of disabling project-wide ssh ' 'key management for the instance.', flag_values=flag_values) flags.DEFINE_boolean( 'add_compute_key_to_project', None, 'Whether or not to add the default Google Compute ' 'Engine ssh key as one of the authorized ssh keys ' 'for the project. If the default key has already ' 'been added to the project, then this will have no ' 'effect. The default behavior is to add the key to ' 'the project if no instance-specific keys are ' 'defined.', flag_values=flag_values) flags.DEFINE_list( 'authorized_ssh_keys', [], 'Fix the list of user/key-file pairs to the specified ' 'entries, disabling project-wide key management for this ' 'instance. These are specified as a comma separated list ' 'of colon separated entries: ' 'user1:keyfile1,user2:keyfile2,...', flag_values=flag_values) flags.DEFINE_string('zone', None, 'The zone for this instance.', flag_values=flag_values) flags.DEFINE_string( 'service_account', 'default', 'The service account whose credentials are to be made' ' available for this instance.', flag_values=flag_values) flags.DEFINE_list( 'service_account_scopes', [], 'The scopes of credentials of the above service' ' account that are to be made available for this' ' instance (comma separated). There are also a set of ' 'scope aliases supported: %s' % ', '.join(sorted(scopes.SCOPE_ALIASES.keys())), flag_values=flag_values) flags.DEFINE_boolean( 'wait_until_running', False, 'Whether the program should wait until the instance is' ' in running state.', flag_values=flag_values) flags.DEFINE_list('tags', [], 'A set of tags applied to this instance. Used for ' 'filtering and to configure network firewall rules ' '(comma separated).', flag_values=flag_values) self._metadata_flags_processor = metadata.MetadataFlagsProcessor( flag_values)
"""ADB debugging binary. Call it similar to how you call android's adb. Takes either --serial or --port_path to connect to a device. """ import os import sys import gflags import adb_commands import common_cli gflags.ADOPT_module_key_flags(common_cli) gflags.DEFINE_multistring('rsa_key_path', '~/.android/adbkey', 'RSA key(s) to use') gflags.DEFINE_integer( 'auth_timeout_s', 60, 'Seconds to wait for the dialog to be accepted when using ' 'authenticated ADB.') FLAGS = gflags.FLAGS def GetRSAKwargs(): if FLAGS.rsa_key_path: return { 'rsa_keys': [ adb_commands.M2CryptoSigner(os.path.expanduser(path)) for path in FLAGS.rsa_key_path ], 'auth_timeout_ms':
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module to handle log persistence.""" import logging import os import time import gflags FLAGS = gflags.FLAGS gflags.DEFINE_multistring('output_dir', ['./'], 'Directories to which to output test results.') def PersistTestRun(test_run): # pylint: disable=invalid-name """Persists a testrun proto to disk. Args: test_run: The TestRun proto to persist. Returns: True iff the TestRun was successfully persisted to disk, False otherwise. """ if not test_run.IsInitialized(): logging.error( 'Request to persist uninitialized TestRun, ignoring (fields: %s)', test_run.FindInitializationErrors())
import boto import boto.sqs import boto.sdb import boto.s3 import json import gflags from fractions import Fraction FLAGS = gflags.FLAGS gflags.DEFINE_string('source', None, 'Source SQS queue to read from') gflags.RegisterValidator('source', lambda x: x is not None, 'You must specify a source queue') gflags.DEFINE_multistring('targets', [], 'Select SQS queue target') gflags.DEFINE_string('region', 'us-west-1', 'AWS region to connect to') gflags.DEFINE_string('bucket', 'wnyc.org-foliage-exif', 'bucket to read from') gflags.RegisterValidator('bucket', lambda x: x is not None, 'You must specify a bucket') gflags.DEFINE_string('domain', None, 'Domain') gflags.DEFINE_float('threshold', 5.0, 'Brightness threshold for outdoors') def main(argv=None, stdin=None, stdout=None, stderr=None): import sys argv = argv or sys.argv stdin = stdin or sys.stdin stdout = stdout or sys.stdout stderr = stderr or sys.stderr try: argv = FLAGS(argv)[1:] except gflags.FlagsError, e:
"""Generate a Skylark file containing a map of hash of bazel installers.""" from __future__ import print_function import gflags import urllib2 import sys _URL_FORMAT = "http://releases.bazel.build/{version}/release/bazel-{version}-installer-{platform}.sh.sha256" _URL_EXISTS = "http://releases.bazel.build/{version}/release/index.html" gflags.DEFINE_string("output", "bazel_hash_dict.bzl", "The output file") gflags.DEFINE_string("map_name", "BAZEL_HASH_DICT", "The name of the generated map in the output file") gflags.DEFINE_multistring("platforms", ["darwin-x86_64", "linux-x86_64"], "List of platforms to download SHA-256.") gflags.DEFINE_string("minimum_version", "0.15.2", "The lowest version of Bazel supported") FLAGS = gflags.FLAGS #versions bazel team decided to skip and not cut skipped_versions = [[0, 17, 0]] def get_hash_map(f): """Construct the hash map reading the release website, writing it to f.""" splitted_version = FLAGS.minimum_version.split(".") if len(splitted_version) != 3: sys.stderr.write(("Invalid version '%s', "
# forward hardware joystick commands through the simulator. gflags.DEFINE_enum('joystick', None, ['programmed', 'software', 'hardware'], 'Which type of joystick to use with the simulator.', short_name='j') gflags.DEFINE_string('flight_plan', None, 'Flight plan.') gflags.DEFINE_bool('kill', False, 'Don''t run anything, but kill all sim-related processes.', short_name='k') gflags.DEFINE_bool('log', False, 'Whether to log.', short_name='l') gflags.DEFINE_string('log_file', '', 'Generate a symlink to the log file with this name.') gflags.DEFINE_multistring('monitor', None, 'Monitors to run.', short_name='m') gflags.DEFINE_multistring('webmonitor', None, 'Web Monitors to run.', short_name='M') gflags.DEFINE_bool('monitor_sim', False, 'Observe simulator data in the web monitor.', short_name='S') gflags.DEFINE_integer('num_controllers', 1, 'Number of controllers to run.') gflags.DEFINE_string('overrides', None, 'JSON string of override parameters.', short_name='o') gflags.DEFINE_bool('load_state', False, 'Whether to load the sim and controller state from a ' 'previous run.') gflags.DEFINE_float('time', None, 'Duration of simulator run.', short_name='t') gflags.DEFINE_float('save_state_time', None, 'Time at which to save the state.')
'transform.') gflags.DEFINE_string('basename', '', 'Base filename for output files.') gflags.DEFINE_float( 'min_coherence', 0.0, 'Minimum coherence required to include data point on ' 'transfer function plot.') gflags.DEFINE_enum( 'detrend', 'mean', ['none', 'mean', 'linear'], 'Detrending algorithm to be applied before taking the ' 'Fourier transform.') gflags.DEFINE_boolean('do_unwrap', True, 'Whether phase should be unwrapped.') gflags.DEFINE_float('settling_time', 5.0, 'Duration [s] to skip at beginning of time series.') gflags.DEFINE_float('shutdown_time', 5.0, 'Duration [s] to skip at end of time series.') gflags.DEFINE_multistring('motor_name', ['ALL'], 'Name of motor to process.') gflags.MarkFlagAsRequired('motor_name') FLAGS = gflags.FLAGS def main(argv): def PrintUsage(argv, error=None): if error: print '\nError: %s\n' % error print 'Usage: %s --motor_name PBI logfile.h5 [logfile2.h5 ...]\n%s' % ( argv[0], FLAGS) try: argv = FLAGS(argv) except gflags.FlagsError, e:
lower_bound=0) flags.DEFINE_integer( 'ts_end', None, 'Timestamp to end export, in seconds since the epoch. Only use this option ' 'for debugging. Normally, ts_end is calculated automatically from: ' 'ts_start + length.', lower_bound=0) flags.DEFINE_integer( 'ts_offset', 600, 'Amount of time (seconds) that must have passed after ' 'ts_end to ensure that values cached by collectd have been flushed to disk ' 'before attempting an export.', lower_bound=0) flags.DEFINE_multistring( 'ignored_experiments', [], 'List of experiment names to ignore. Experiment ' 'must be in "slice.site" form not "site_slice".') flags.DEFINE_bool('pretty_json', None, 'Add extra indenting to json output (for debugging).') flags.DEFINE_string('output_dir', EXPORT_DIR, 'Root directory of json output files.') flags.DEFINE_string( 'output', None, 'Name of json output file. Set automatically if not given.') flags.DEFINE_string( 'export_metrics', METRIC_MAP_CONF, 'File name with metric map. The metric ' 'map defines canonical metric names for raw, metric names taken from ' 'collectd RRD files.') flags.DEFINE_bool('verbose', False, 'Increase verbosity level.') flags.DEFINE_bool('show_nagios', False, 'Shows collectd-nagios commands to monitor metrics.')
from __future__ import absolute_import from __future__ import print_function import os import subprocess import sys import tempfile import gflags import makani from makani.lib.python.autogen import autogen_util gflags.DEFINE_string('header', None, 'Input .h file.') gflags.DEFINE_string('output', None, 'Output .py file.') gflags.DEFINE_multistring('include_dir', None, 'Directory of included header files.') gflags.DEFINE_list('defines', None, 'Compilation definitions.') gflags.DEFINE_list('shared_libs', None, 'Shared libraries against which the Python wrapper module ' 'will link.') gflags.DEFINE_string('shared_lib_root', None, 'Root of the shared lib tree. This is needed to make ' 'relative references to .so files work properly.') FLAGS = gflags.FLAGS def main(argv):
'Miminum required version is %s.' % OAUTH2CLIENT_REQUIRED_VERSION) elif (parse_version(oauth2client_version) < parse_version(OAUTH2CLIENT_REQUIRED_VERSION)): downloadUsage(('oauth2client module version %s is too old.\n' + 'Miminum required version is %s.') % (oauth2client_version, OAUTH2CLIENT_REQUIRED_VERSION)) # # End of the import boilerplate # FLAGS = gflags.FLAGS gflags.DEFINE_multistring( 'scope', 'https://www.googleapis.com/auth/devstorage.full_control', 'API scope to use') gflags.DEFINE_string('client_id', None, 'Client-id for installed applications', short_name='i') gflags.DEFINE_string('client_secret', None, 'Client-secret for installed applications', short_name='s') gflags.DEFINE_string('credentials_file', os.path.expanduser(os.path.join('~', '.oauth2.dat')), 'File name for storing OAuth 2.0 credentials.',
FLAGS = gflags.FLAGS gflags.DEFINE_enum('logging_level', 'INFO', ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], 'Set the level of logging detail.') gflags.DEFINE_string( 'client_secrets', os.path.join(os.path.dirname(__file__), 'client_secrets.json'), 'The OAuth 2.0 client secrets file to use') gflags.DEFINE_string('project_id', None, 'The Cloud Storage project id') gflags.DEFINE_string('bucket', None, 'The bucket to upload to') gflags.DEFINE_string('publish_version', None, 'The version being published (e.g. 1.23)') gflags.DEFINE_multistring( 'publish', [], 'A file to publish to Cloud Storage; this may be specified multiple times') gflags.DEFINE_enum('acl', 'private', ['private', 'public-read', 'authenticated-read'], 'The ACLs to assign to the uploaded files') API_VERSION = '2' DEFAULT_SECRETS_FILE = os.path.join(os.path.dirname(__file__), 'client_secrets.json') OAUTH_CREDENTIALS_FILE = '.credentials.dat' OAUTH_SCOPE = 'https://www.googleapis.com/auth/devstorage.full_control' class Error(Exception): def __init__(self, status, message): self.status = status
} binding_models = { 'pod': pod_binding, 'callback': callback_binding, 'enum': enum_binding, 'by_value': by_value_binding, 'by_pointer': by_pointer_binding, 'unsized_array': unsized_array_binding, 'nullable': nullable_binding } FLAGS = gflags.FLAGS gflags.DEFINE_multistring( 'binding-module', [], 'include a binding model' ' module. Value is name:path where \'name\' is the' ' binding model name, and \'path\' is the binding' ' model module path.') gflags.DEFINE_multistring( 'generator-module', [], 'include a generator module.' ' Value is name:path where \'name\' is the generator' ' name, and \'path\' is the generator module path.') gflags.DEFINE_multistring('generate', [], 'the generator to use') gflags.DEFINE_string('output-dir', '.', 'the output directory') gflags.DEFINE_boolean( 'exclusive-lock', False, 'Use file locking to make sure' ' there is only one instance running at a time.') gflags.DEFINE_boolean(
from util import google_fonts as fonts FLAGS = flags.FLAGS flags.DEFINE_boolean('suppress_pass', True, 'Whether to print pass: results') flags.DEFINE_boolean('check_metadata', True, 'Whether to check METADATA values') flags.DEFINE_boolean('check_font', True, 'Whether to check font values') flags.DEFINE_string('repair_script', None, 'Where to write a repair script') _FIX_TYPE_OPTS = [ 'all', 'name', 'filename', 'postScriptName', 'fullName', 'fsSelection', 'fsType', 'usWeightClass' ] flags.DEFINE_multistring( 'fix_type', 'all', 'What types of problems should be fixed by ' 'repair_script. Choices: ' + ', '.join(_FIX_TYPE_OPTS)) ResultMessageTuple = collections.namedtuple( 'ResultMessageTuple', ['happy', 'message', 'path', 'repair_script']) def _HappyResult(message, path): return ResultMessageTuple(True, message, path, None) def _SadResult(message, path, repair_script=None): return ResultMessageTuple(False, message, path, repair_script) def _DropEmptyPathSegments(path):
def testWriteHelpInXMLFormat(self): fv = gflags.FlagValues() # Since these flags are defined by the top module, they are all key. gflags.DEFINE_integer('index', 17, 'An integer flag', flag_values=fv) gflags.DEFINE_integer('nb_iters', 17, 'An integer flag', lower_bound=5, upper_bound=27, flag_values=fv) gflags.DEFINE_string('file_path', '/path/to/my/dir', 'A test string flag.', flag_values=fv) gflags.DEFINE_boolean('use_hack', False, 'Use performance hack', flag_values=fv) gflags.DEFINE_enum('cc_version', 'stable', ['stable', 'experimental'], 'Compiler version to use.', flag_values=fv) gflags.DEFINE_list('files', 'a.cc,a.h,archive/old.zip', 'Files to process.', flag_values=fv) gflags.DEFINE_list('allow_users', ['alice', 'bob'], 'Users with access.', flag_values=fv) gflags.DEFINE_spaceseplist('dirs', 'src libs bins', 'Directories to create.', flag_values=fv) gflags.DEFINE_multistring('to_delete', ['a.cc', 'b.h'], 'Files to delete', flag_values=fv) gflags.DEFINE_multi_int('cols', [5, 7, 23], 'Columns to select', flag_values=fv) # Define a few flags in a different module. module_bar.DefineFlags(flag_values=fv) # And declare only a few of them to be key. This way, we have # different kinds of flags, defined in different modules, and not # all of them are key flags. gflags.DECLARE_key_flag('tmod_bar_z', flag_values=fv) gflags.DECLARE_key_flag('tmod_bar_u', flag_values=fv) # Generate flag help in XML format in the StringIO sio. sio = StringIO.StringIO() fv.WriteHelpInXMLFormat(sio) # Check that we got the expected result. expected_output_template = EXPECTED_HELP_XML_START main_module_name = gflags._GetMainModule() module_bar_name = module_bar.__name__ if main_module_name < module_bar_name: expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MAIN_MODULE expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MODULE_BAR else: expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MODULE_BAR expected_output_template += EXPECTED_HELP_XML_FOR_FLAGS_FROM_MAIN_MODULE expected_output_template += EXPECTED_HELP_XML_END # XML representation of the whitespace list separators. whitespace_separators = _ListSeparatorsInXMLFormat(string.whitespace, indent=' ') expected_output = (expected_output_template % { 'usage_doc': sys.modules['__main__'].__doc__, 'main_module_name': main_module_name, 'module_bar_name': module_bar_name, 'whitespace_separators': whitespace_separators }) actual_output = sio.getvalue() self.assertMultiLineEqual(actual_output, expected_output) # Also check that our result is valid XML. minidom.parseString # throws an xml.parsers.expat.ExpatError in case of an error. xml.dom.minidom.parseString(actual_output)
flags.DEFINE_string( 'outdir', '', 'Directory name for output files. (Defaults to the API name.)') flags.DEFINE_boolean( 'overwrite', False, 'Only overwrite the output directory if this flag is specified.') flags.DEFINE_string( 'root_package_dir', '', 'Ultimate destination for generated code (used for generating ' 'correct import lines). Defaults to the value of FLAGS.outdir.') flags.DEFINE_string( 'root_package', '', 'Python import path for where these modules should be imported from.') flags.DEFINE_multistring( 'strip_prefix', [], 'Prefix to strip from type names in the discovery document. (May ' 'be specified multiple times.)') flags.DEFINE_string('api_key', None, 'API key to use for API access.') flags.DEFINE_string('client_id', None, 'Client ID to use for the generated client.') flags.DEFINE_string('client_secret', None, 'Client secret for the generated client.') flags.DEFINE_multistring( 'scope', [], 'Scopes to request in the generated client. May be specified more than ' 'once.') flags.DEFINE_string( 'user_agent', '', 'User agent for the generated client. Defaults to <api>-generated/0.1.') flags.DEFINE_boolean('generate_cli', True, 'If True, a CLI is also generated.')
# limitations under the License. """Determines the list of structure to be checked from command line arguments.""" __author__ = ('[email protected] (Robert Walker)', '[email protected] (Andy Perelson)') import glob import os import re import gflags as flags FLAGS = flags.FLAGS flags.DEFINE_multistring('recurse', None, 'Recurse in to the subdirectories of the given path', short_name='r') flags.DEFINE_list( 'exclude_directories', ('_demos'), 'Exclude the specified directories (only applicable along with -r or ' '--presubmit)', short_name='e') flags.DEFINE_list('exclude_files', ('deps.js'), 'Exclude the specified structure', short_name='x') def MatchesSuffixes(filename, suffixes): """Returns whether the given filename matches one of the given suffixes. Args:
'affects behavior that expects user interaction, like whether ' 'debug_mode will break into the debugger and lowers the frequency ' 'of informational printing.') flags.DEFINE_enum( 'format', None, ['none', 'json', 'prettyjson', 'csv', 'sparse', 'pretty'], 'Format for command output. Options include:' '\n pretty: formatted table output' '\n sparse: simpler table output' '\n prettyjson: easy-to-read JSON format' '\n json: maximally compact JSON' '\n csv: csv format with header' '\nThe first three are intended to be human-readable, and the latter ' 'three are for passing to another program. If no format is selected, ' 'one will be chosen based on the command run.') flags.DEFINE_multistring( 'job_property', None, 'Additional key-value pairs to include in the properties field of ' 'the job configuration') # No period: Multistring adds flagspec suffix. flags.DEFINE_boolean( 'use_gce_service_account', False, 'Use this when running on a Google Compute Engine instance to use service ' 'account credentials instead of stored credentials. For more information, ' 'see: https://developers.google.com/compute/docs/authentication') flags.DEFINE_string( 'service_account', '', 'Use this service account email address for authorization. ' 'For example, [email protected].') flags.DEFINE_string( 'service_account_private_key_file', '', 'Filename that contains the service account private key. ' 'Required if --service_account is specified.') flags.DEFINE_string(
print " google-api-python-client" print "Or, try: " print " easy_install --upgrade google-api-python-client" print " easy_install -U matplotlib" traceback.print_exc() sys.exit(1) gflags.DEFINE_string('query', None, "Required: name of SQL query in $PWD/sql/", short_name='q') gflags.DEFINE_string('csvfile', None, "Optional: name of CSV file instead of --query", short_name='c') gflags.DEFINE_multistring('mergefiles', [], "Optional: merge multiple CSV files", short_name='m') gflags.DEFINE_string('timestamp', None, "Required: X-axis column name with timestamps.", short_name='t') gflags.DEFINE_multistring('columns', [], ("Required: Y-axis column name to plot as a line. "+ "Can be specified multiple times. "+ "To add an error bar to the line, add a second "+ "column with a comma, such as: column1,column2. "), short_name='l') gflags.DEFINE_bool("refresh", False, ("By default, query results are cached and reused "+