def Main(argv): """Script main entry point.""" gflags.DEFINE_string('bin_file', None, 'Full path to output binary file.') gflags.DEFINE_string('elf_file', None, 'Full path to input elf file.') gflags.DEFINE_string( 'aio_node', 'kAioNodeUnknown', 'AioNode enumeration name or short name of node to ' 'encode in the bin file.') gflags.DEFINE_string('ip_address', None, 'IP address of node to encode in bin file.') gflags.DEFINE_string('hardware_type', 'kHardwareTypeUnknown', 'Hardware type of node to encode in bin file.') gflags.DEFINE_string('file_type', None, 'Specify "application" or "bootloader".') gflags.MarkFlagAsRequired('bin_file') gflags.MarkFlagAsRequired('elf_file') gflags.MarkFlagAsRequired('file_type') flags = gflags.FLAGS try: argv = flags(argv) except gflags.FlagsError, e: print '%s\nUsage: %s ARGS\n%s' % (e, sys.argv[0], flags) sys.exit(1)
def main(argv): gflags.MarkFlagAsRequired('phantomjs_binary') gflags.MarkFlagAsRequired('phantomjs_script') try: argv = FLAGS(argv) except gflags.FlagsError, e: print '%s\nUsage: %s ARGS\n%s' % (e, sys.argv[0], FLAGS) sys.exit(1)
def ParseFlags(argv): """Define and parse gflags parameters.""" gflags.DEFINE_string('database', 'hardware.db', 'Full path to shared hardware database file.') gflags.DEFINE_string('config', None, 'Full path to test configuration file.') gflags.DEFINE_string('q7_bin', None, 'Full path to Q7 binary directory.') gflags.DEFINE_string('tms570_bin', None, 'Full path to TMS570 binary directory.') gflags.MarkFlagAsRequired('config') gflags.MarkFlagAsRequired('q7_bin') gflags.MarkFlagAsRequired('tms570_bin') return gflags.FLAGS(argv)
def main(argv): gflags.MarkFlagAsRequired('a') gflags.MarkFlagAsRequired('b') try: argv = FLAGS(argv) except gflags.FlagsError as e: print ('%s\nUsage: %s -a filename_a.json -b filename_b.json\n%s' % (e, sys.argv[0], FLAGS)) sys.exit(1) _ExportHistograms(FLAGS.case_a, FLAGS.case_b, FLAGS.change_desc, FLAGS.case_a_url, FLAGS.case_b_url, FLAGS.output_dir) _WriteSummaryFile(FLAGS.case_a, FLAGS.case_b, FLAGS.threshold, FLAGS.output_dir)
def main(argv): """Implement a simple demo for computing error CDFs.""" # Input/output flags. gflags.DEFINE_string('input_file', None, 'Full path to wing HDF5 log file.') gflags.MarkFlagAsRequired('input_file') gflags.DEFINE_string('output_file', None, 'Full path to output MAT file.') gflags.MarkFlagAsRequired('output_file') # Segment processing flags. gflags.DEFINE_integer('increment', 100, 'Integer number of messages between segments.') gflags.DEFINE_integer('seg_length', 1000, 'Integer number of messages in each segment.') # Evaluate segments over a specific time interval. gflags.DEFINE_float('start_time', -float('inf'), 'Start time to evaluate segment errors.') gflags.DEFINE_float('end_time', float('inf'), 'End time to evaluate segment errors.') # Override default parameters. gflags.DEFINE_list('params', [], 'A comma-separated list of param=value tokens, where ' 'each param describes the dot path to a parameter in ' 'EstimatorParams.') gflags.RegisterValidator('params', lambda l: all(len(s.split('=')) == 2 for s in l), message='Invalid key=value parameter syntax.') # Scenarios to process. gflags.DEFINE_bool('scenario_pure_inertial', False, 'Process pure inertial scenario.') gflags.DEFINE_bool('scenario_gps_dropout', False, 'Process GPS dropout scenario.') # Common faults to introduce. gflags.DEFINE_bool('fault_weather', False, 'Fault weather subsystems to avoid an assert when ' 'reprocessing historical data.') gflags.DEFINE_bool('fault_glas', False, 'Fault GLAS subsystems.') # Specify flight for special handling. gflags.DEFINE_string('flight', None, 'Fix known issues associated with the given flight.') try: argv = gflags.FLAGS(argv) except gflags.FlagsError, e: print '{}\nUsage: {} ARGS\n{}'.format(e, sys.argv[0], gflags.FLAGS) sys.exit(1)
def main(argv): gflags.MarkFlagAsRequired('f') try: argv = FLAGS(argv) except gflags.FlagsError as e: print('%s\nUsage: %s -f filename.json\n%s' % (e, sys.argv[0], FLAGS)) sys.exit(1) _autoglide_analysis(FLAGS)
def ParseFlags(argv): """Define and parse gflags parameters.""" gflags.DEFINE_string('database', 'hardware.db', 'Full path to shared hardware database file.') gflags.DEFINE_string( 'config', None, 'Full path to test fixture hardware configuration file.') gflags.MarkFlagAsRequired('config') return gflags.FLAGS(argv)
def _define_flags_args(func, flag_values): """Define string flags from the name and doc of a function's args.""" args_doc = parse_args_doc(func.__doc__) for func_arg, param_obj in _arg_names(func).items(): has_default_val = param_obj.default != inspect.Parameter.empty gflags.DEFINE_string(func_arg, param_obj.default if has_default_val else None, args_doc.get(func_arg), flag_values=flag_values) if not has_default_val: gflags.MarkFlagAsRequired(func_arg, flag_values=flag_values)
def ParseFlags(argv): """Parse common command line arguments.""" gflags.DEFINE_string('config_file', None, 'Full path to Python config file.') gflags.DEFINE_string('prefix', None, 'Function and enumeration prefix name (snake case).') gflags.DEFINE_string('source_file', None, 'Full path to output source file.') gflags.DEFINE_string('header_file', None, 'Full path to output header file.') gflags.DEFINE_string('autogen_root', makani.HOME, 'Root of the source tree for the output files.') gflags.MarkFlagAsRequired('prefix') gflags.MarkFlagAsRequired('config_file') try: argv = gflags.FLAGS(argv) except gflags.FlagsError, e: print '{}\nUsage: {} ARGS\n{}'.format(e, sys.argv[0], gflags.FLAGS) sys.exit(1)
def make_gflags(): for field in DEBIAN_FIELDS: fieldname = field[0].replace('-', '_').lower() msg = 'The value for the %s content header entry.' % field[0] if len(field) > 3: if type(field[3]) is list: gflags.DEFINE_multistring(fieldname, field[3], msg) else: gflags.DEFINE_string(fieldname, field[3], msg) else: gflags.DEFINE_string(fieldname, None, msg) if field[1]: gflags.MarkFlagAsRequired(fieldname)
import os import re import gflags # Create an npy file given a google word2vec file(not binary) within the given dictfile # those unknown to dictfile in word2vec will be initialized randomly FLAGS = gflags.FLAGS gflags.DEFINE_string("dictfile", None, "dict file that hold all tokens used in model training") gflags.DEFINE_string("w2vfile", None, "google word2vec file, maybe very large") gflags.DEFINE_integer("topn", None, "Top N frequency tokens u want to filter the data with") gflags.MarkFlagAsRequired('dictfile') gflags.MarkFlagAsRequired('topn') gflags.MarkFlagAsRequired('w2vfile') try: FLAGS(sys.argv) except gflags.FlagsError as e: print "\n%s" % e print FLAGS.GetHelp(include_special_flags=False) sys.exit(1) w2v = {} dim = 0 token_cnt = 0 dicfile = FLAGS.dictfile
"to do this).")) gflags.DEFINE_integer("total_epochs", 500, "Total number of epochs.") gflags.DEFINE_boolean("reweight", True, "Try re-weighting.") gflags.DEFINE_string("frames", "-10 -9 -8 -7 -6 -5 -4 -3 -2 -1 0 1 2 3 4 5 6 7 8 9 10", "Frames to process.") # gflags.DEFINE_float( # "hantman_weight_decay", 0.0001, "Weight decay value.") gflags.DEFINE_float("learning_rate", 0.001, "Learning rate.") gflags.DEFINE_integer("hantman_mini_batch", 256, "Mini batch size for training.") # needed to limit how much to process during eval. important for speed purposes. gflags.DEFINE_integer("seq_len", 1500, "Sequence length.") gflags.MarkFlagAsRequired("out_dir") gflags.MarkFlagAsRequired("train_file") gflags.MarkFlagAsRequired("test_file") # gflags.DEFINE_boolean("help", False, "Help") gflags.ADOPT_module_key_flags(arg_parsing) gflags.ADOPT_module_key_flags(flags.cuda_flags) g_label_names = ["lift", "hand", "grab", "supinate", "mouth", "chew"] def _setup_opts(argv): """Parse inputs.""" FLAGS = gflags.FLAGS opts = arg_parsing.setup_opts(argv, FLAGS) # this is dumb... passing in negative numbers to DEFINE_multi_int doesn't
__author__ = '[email protected] (Sergio Gomes)' import sys import gflags from oauth2client.client import AccessTokenRefreshError import sample_utils MAX_PAGE_SIZE = 50 # Declare command-line flags, and set them as required. gflags.DEFINE_string('account_id', None, 'The ID of the account with the specified ad unit', short_name='a') gflags.MarkFlagAsRequired('account_id') gflags.DEFINE_string('ad_client_id', None, 'The ID of the ad client with the specified ad unit', short_name='c') gflags.MarkFlagAsRequired('ad_client_id') gflags.DEFINE_string('ad_unit_id', None, 'The ID of the ad unit for which to get custom channels', short_name='u') gflags.MarkFlagAsRequired('ad_unit_id') def main(argv):
import gflags import gzip import io import json import os import os.path import subprocess import sys import re import tarfile import tempfile from container import archive gflags.DEFINE_string('output', None, 'The output file, mandatory') gflags.MarkFlagAsRequired('output') gflags.DEFINE_multistring('file', [], 'A file to add to the layer') gflags.DEFINE_string('manifest', None, 'JSON manifest of contents to add to the layer') gflags.DEFINE_multistring('empty_file', [], 'An empty file to add to the layer') gflags.DEFINE_multistring('empty_dir', [], 'An empty dir to add to the layer') gflags.DEFINE_string( 'mode', None, 'Force the mode on the added files (in octal).') gflags.DEFINE_string( 'mtime', None, 'Set mtime on tar file entries. May be an integer or the' ' value "portable", to get the value 2000-01-01, which is'
import os import os.path import sys WHEELBARROW_HOME = os.getenv('WHEELBARROW_HOME', os.path.dirname(__file__)) sys.path.append(WHEELBARROW_HOME) from host.nfs_analysis_setup_agent import NfsAnalysisSetupAgent from host.scoring.result_directory_scorer import ScoreResultDirectory import host.vm_launcher FLAGS = gflags.FLAGS gflags.DEFINE_string('image', None, 'The VM image which should be used.', short_name='i') gflags.MarkFlagAsRequired('image') gflags.DEFINE_integer('memory', 4096, 'Amount of physical memory for the VM.', short_name='m') gflags.DEFINE_integer('timeout', 120, 'VM timeout, after which it will be ' 'killed.', short_name='t') gflags.DEFINE_string('batchfile', None, 'A file containing a description of ' 'packages to be downloaded.', short_name='b') gflags.DEFINE_string('nfshost', None, 'The path to an NFS share on the host.',
import threading import time import gflags from frontend import frontend_util from util import api from util import consts from util import stdlog FLAGS = gflags.FLAGS BAILOUT_FILE = os.path.join(os.path.dirname(__file__), os.pardir, 'BAILOUT') gflags.DEFINE_string('alice_solver', None, 'Path to alice solver binary.') gflags.MarkFlagAsRequired('alice_solver') gflags.DEFINE_integer('initial_arguments', 3, 'Number of arguments initially given to cardinal.') gflags.DEFINE_string( 'detail_log_dir', None, 'Problem details for post-mortem are logged to this directory.') gflags.MarkFlagAsRequired('detail_log_dir') gflags.DEFINE_boolean('keep_going', False, 'Keep going even on expiration') gflags.DEFINE_integer('time_limit_sec', 300, 'Time limit in seconds') class Alice(object):
'Set the level of logging detail.') gflags.DEFINE_string('model_id', None, 'The unique name for the predictive model (ex foo)') gflags.DEFINE_string('data_file', None, 'Full Google Storage path of csv data (ex bucket/object)') gflags.DEFINE_string('pmml_file', None, 'Full Google Storage path of pmml for ' 'preprocessing (ex bucket/object)') gflags.MarkFlagAsRequired('model_id') gflags.MarkFlagAsRequired('data_file') gflags.MarkFlagAsRequired('pmml_file') def main(argv): # Let the gflags module process the command-line arguments try: argv = FLAGS(argv) except gflags.FlagsError, e: print '%s\\nUsage: %s ARGS\\n%s' % (e, argv[0], FLAGS) sys.exit(1) # Set the logging according to the command-line flag logging.getLogger().setLevel(getattr(logging, FLAGS.logging_level)) # If the Credentials don't exist or are invalid run through the native client
FLOW = flow_from_clientsecrets( CLIENT_SECRETS, scope='https://www.googleapis.com/auth/prediction', message=MISSING_CLIENT_SECRETS_MESSAGE) # The gflags module makes defining command-line options easy for # applications. Run this program with the '--help' argument to see # all the flags that it understands. gflags.DEFINE_enum('logging_level', 'ERROR', ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], 'Set the level of logging detail.') gflags.DEFINE_string( 'object_name', None, 'Full Google Storage path of csv data (ex bucket/object)') gflags.MarkFlagAsRequired('object_name') gflags.DEFINE_string('id', None, 'Model Id of your choosing to name trained model') gflags.MarkFlagAsRequired('id') # Time to wait (in seconds) between successive checks of training status. SLEEP_TIME = 10 def print_header(line): '''Format and print header block sized to length of line''' header_str = '=' header_line = header_str * len(line) print '\n' + header_line print line
__author__ = '[email protected] (Sergio Gomes)' import sys import gflags from oauth2client.client import AccessTokenRefreshError import sample_utils MAX_PAGE_SIZE = 50 # Declare command-line flags, and set them as required. gflags.DEFINE_string('ad_client_id', None, 'The ad client ID for which to get URL channels', short_name='c') gflags.MarkFlagAsRequired('ad_client_id') def main(argv): # Process flags and read their values. sample_utils.process_flags(argv) ad_client_id = gflags.FLAGS.ad_client_id # Authenticate and construct service. service = sample_utils.initialize_service() try: # Retrieve URL channel list in pages and display data as we receive it. request = service.urlchannels().list(adClientId=ad_client_id, maxResults=MAX_PAGE_SIZE)
import sys import logging import os import media_common PIDFILE = '/var/run/media_library_server.pid' gflags.DEFINE_string( 'group_name', '', 'The name of the group to use' + ' for the daemon process') gflags.DEFINE_string('user_name', '', 'The name of the user to use' + ' for the daemon process') gflags.DEFINE_string('media_dir', None, 'Directory of media library') gflags.DEFINE_boolean('daemon', True, 'Run the server as a daemon') gflags.MarkFlagAsRequired('media_dir') gflags.MarkFlagAsRequired('group_name') gflags.MarkFlagAsRequired('user_name') FLAGS = gflags.FLAGS repository = media_common.Repository() def run_server(lib_base_dir): repository.open(lib_base_dir) logging.info('opened repository at %s', lib_base_dir) server = SimpleXMLRPCServer(('192.168.1.27', 9333), logRequests=True) server.register_function(check_file_hash) try: print 'Use Control-C to exit'
# See the License for the specific language governing permissions and # limitations under the License. """Strip static qualifier from C functions to enable unit testing.""" import os import re import sys import textwrap import gflags import makani gflags.DEFINE_string('autogen_root', makani.HOME, 'Root of the source tree for the output files.') gflags.DEFINE_string('input_source', None, 'Full path to input source file.') gflags.MarkFlagAsRequired('input_source') gflags.DEFINE_string('output_source', None, 'Full path to output source file.') gflags.DEFINE_string('output_header', None, 'Full path to output header file.') gflags.DEFINE_string('static_prefix', '', 'Function prefix to prepend to static functions.') gflags.DEFINE_string('stub', '', 'List of functions to rename.') gflags.DEFINE_string('stub_prefix', 'Stubbed', 'Function prefix to prepend to stub functions.') FLAGS = gflags.FLAGS def main(argv): try: argv = FLAGS(argv) except gflags.FlagsError, e: print '{}\nUsage: {} ARGS\n{}'.format(e, sys.argv[0], FLAGS)
# insert `makani.HOME` into the start of any CDLL calls. This works in # conjunction with the value of MAKANI_HOME used by a Python executable (it # points to the runfiles directory) to appropriately resolve the .so. # # On a related note, it would be really nice to replace h2py with SWIG. if shared_libs: cmd = ('sed -i -e'.split() + ['1s/^/import os\\nimport makani\\n/', '-e', 's|%s/||g' % FLAGS.shared_lib_root, '-e', r's/CDLL(\(.*\)/CDLL(os.path.join(makani.HOME, \1)/', FLAGS.output]) subprocess.check_call(cmd) # Define helpful variables. found_directory = False with open(FLAGS.output, 'a') as f: for include_dir in FLAGS.include_dir: relpath = os.path.relpath(FLAGS.header, start=include_dir) if not relpath.startswith('..'): f.write('H2PY_HEADER_FILE = \'{}\'\n'.format(relpath)) found_directory = True break assert found_directory if __name__ == '__main__': gflags.MarkFlagAsRequired('header') gflags.MarkFlagAsRequired('output') gflags.MarkFlagAsRequired('include_dir') main(sys.argv)
#coding=utf-8 import codecs import re import sys import gflags # Filter out sentences with tokens not match regex bellow FLAGS = gflags.FLAGS gflags.DEFINE_string("datafile", None, "datafile") gflags.MarkFlagAsRequired('datafile') try: FLAGS(sys.argv) except gflags.FlagsError as e: print "\n%s" % e print FLAGS.GetHelp(include_special_flags=False) sys.exit(1) filename = FLAGS.datafile with codecs.open(filename, "r") as f: for line in f: line = line.strip() if re.search(u"[^\u4e00-\u9fa5,。:?!0-9.0123456789a-zA-Z:?!\"\- \t]", line.decode("utf-8")) != None: continue print line
gflags.DEFINE_string('basename', '', 'Base filename for output files.') gflags.DEFINE_float( 'min_coherence', 0.0, 'Minimum coherence required to include data point on ' 'transfer function plot.') gflags.DEFINE_enum( 'detrend', 'mean', ['none', 'mean', 'linear'], 'Detrending algorithm to be applied before taking the ' 'Fourier transform.') gflags.DEFINE_boolean('do_unwrap', True, 'Whether phase should be unwrapped.') gflags.DEFINE_float('settling_time', 5.0, 'Duration [s] to skip at beginning of time series.') gflags.DEFINE_float('shutdown_time', 5.0, 'Duration [s] to skip at end of time series.') gflags.DEFINE_multistring('motor_name', ['ALL'], 'Name of motor to process.') gflags.MarkFlagAsRequired('motor_name') FLAGS = gflags.FLAGS def main(argv): def PrintUsage(argv, error=None): if error: print '\nError: %s\n' % error print 'Usage: %s --motor_name PBI logfile.h5 [logfile2.h5 ...]\n%s' % ( argv[0], FLAGS) try: argv = FLAGS(argv) except gflags.FlagsError, e: PrintUsage(argv, e)
#!/usr/bin/python import driver import gflags import sys FLAGS = gflags.FLAGS gflags.DEFINE_integer('zone', None, 'ID of the zone') gflags.DEFINE_bool('state', None, 'Desired state of the zone') gflags.MarkFlagAsRequired('zone') gflags.MarkFlagAsRequired('state') def main(): argv = sys.argv try: positional_args = gflags.FLAGS(argv) # parse flags except gflags.FlagsError, e: print '%s\nUsage: %s ARGS\n%s' % (e, sys.argv[0], gflags.FLAGS.MainModuleHelp()) sys.exit(1) sprink = driver.SprinklerDriver() sprink.set_zone(FLAGS.zone, FLAGS.state) print sprink.get_zone_states() return if __name__ == "__main__": main()
import pandas as pd import matplotlib.pyplot as plt import numpy as np import gflags # sudo pip install python-gflags import sys import scipy as sp # sudo pip install spicy from scipy import stats import math #gflags.DEFINE_string('cris', None, 'un parametro para testear', short_name='c') gflags.DEFINE_string('size', None, 'The model size', short_name='s') gflags.DEFINE_list('device_name', None, 'Broken device name', short_name='d') # pass devices name separated with , #gflags.DEFINE_string('size', 149, 'The model size', short_name='s') gflags.MarkFlagAsRequired('size') gflags.MarkFlagAsRequired('device_name') FLAGS = gflags.FLAGS try: argv = FLAGS(sys.argv) # parse flags except gflags.FlagsError, e: print '%s\nUsage: %s ARGS\n%s' % (e, sys.argv[0], FLAGS) sys.exit(1) #print "anduvo", FLAGS.size df_Command = pd.DataFrame() df_Devices = pd.DataFrame() df_Task = pd.DataFrame() for prob in range(10, 100, 10):
import sys import future.utils import gflags import hyou import oauth2client.client FLAGS = gflags.FLAGS TEST_CLIENT_ID = ('958069810280-th697if59r9scrf1qh0sg6gd9d9u0kts.' 'apps.googleusercontent.com') TEST_CLIENT_SECRET = '5nlcvd54WycOd8h8w7HD0avT' gflags.DEFINE_string('client_id', TEST_CLIENT_ID, '') gflags.DEFINE_string('client_secret', TEST_CLIENT_SECRET, '') gflags.MarkFlagAsRequired('client_id') gflags.MarkFlagAsRequired('client_secret') def main(argv): if len(argv) != 2: print('usage: generate_oauth2_credentials.py OUTPUT_JSON_PATH') return 1 output_json_path = argv[1] flow = oauth2client.client.OAuth2WebServerFlow( client_id=FLAGS.client_id, client_secret=FLAGS.client_secret, scope=hyou.SCOPES) url = flow.step1_get_authorize_url('urn:ietf:wg:oauth:2.0:oob')
import sys import gflags import glatitude import httplib2 FLAGS = gflags.FLAGS gflags.DEFINE_string('key', None, 'OAuth client ID') gflags.DEFINE_string('secret', None, 'OAuth secret') gflags.DEFINE_string('url', None, 'API URL') gflags.MarkFlagAsRequired('key') gflags.MarkFlagAsRequired('secret') gflags.MarkFlagAsRequired('url') try: sys.argv = FLAGS(sys.argv) except gflags.FlagsError, e: print '''\ %s Usage: %s ARGS %s''' % (e, sys.argv[0], FLAGS) sys.exit(1) credentials = glatitude.auth(FLAGS.key, FLAGS.secret) http = httplib2.Http() credentials.authorize(http) resp, content = http.request(FLAGS.url) print content
import hashlib import json import os import sys import gflags import jsonschema import M2Crypto from cpp_generator import generate_cpp_header from java_generator import generate_java_source FLAGS = gflags.FLAGS gflags.DEFINE_string("log_list", None, "Logs list file to parse and print.") gflags.MarkFlagAsRequired("log_list") gflags.DEFINE_string("signature", None, "Signature file over the list of logs.") gflags.DEFINE_string("signer_key", None, "Public key of the log list signer.") gflags.DEFINE_string("log_list_schema", os.path.join(os.path.dirname(sys.argv[0]), "data", "log_list_schema.json"), "JSON schema for the list of logs.") gflags.DEFINE_string("header_output", None, "If specifed, generates C++ code for Chromium.") gflags.DEFINE_string("java_output", None, "If specifed, generates Java code.") gflags.DEFINE_string("java_class", "org.conscrypt.ct.KnownLogs", "Fully qualified name of the generated class.") gflags.DEFINE_boolean("skip_signature_check", False, "Skip signature check (only validate schema).")
import Queue import subprocess import sys import threading import gflags from util import stdlog FLAGS = gflags.FLAGS ROOT_DIR = os.path.join(os.path.dirname(__file__), os.pardir) gflags.DEFINE_string( 'cluster_solver', None, 'Path to cluster solver binary.') gflags.MarkFlagAsRequired('cluster_solver') gflags.DEFINE_string( 'problemset_file', None, 'Path to the problemset TSV file.') gflags.MarkFlagAsRequired('problemset_file') gflags.DEFINE_integer( 'threads', None, 'Number of threads.') gflags.MarkFlagAsRequired('threads') gflags.DEFINE_integer( 'time_limit_sec', None, 'Time limit per problem in seconds.') gflags.MarkFlagAsRequired('time_limit_sec')