def MigrationOptionParser(Source, Destinate): # use clearer usage to override default usage message UsageString = "%prog [-a] [-o <output_file>] <input_file>" Parser = OptionParser(description=__copyright__, version=__version__, usage=UsageString) HelpText = "The name of the %s file to be created." % Destinate Parser.add_option("-o", "--output", dest="OutputFile", help=HelpText) HelpText = "Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate) Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help=HelpText) Options, Args = Parser.parse_args() # error check if len(Args) == 0: raise MigrationError(OPTION_MISSING, name="Input file", usage=Parser.get_usage()) if len(Args) > 1: raise MigrationError(OPTION_NOT_SUPPORTED, name="Too many input files", usage=Parser.get_usage()) InputFile = Args[0] if not os.path.exists(InputFile): raise MigrationError(FILE_NOT_FOUND, name=InputFile) if Options.OutputFile: if Options.AutoWrite: raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage()) else: if Options.AutoWrite: Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower() else: raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage()) return Options, InputFile
def main(argv): try: parser = OptionParser() parser.add_option("-a", "--auth", dest="auth", default="ClueCon", help="ESL password") parser.add_option("-s", "--server", dest="server", default="127.0.0.1", help="FreeSWITCH server IP address") parser.add_option("-p", "--port", dest="port", default="8021", help="FreeSWITCH server event socket port") parser.add_option( "-c", "--command", dest="command", help="command to run, surround mutli word commands in \"\'s") (options, args) = parser.parse_args() con = ESLconnection(options.server, options.port, options.auth) #are we connected? if con.connected(): #run command e = con.api(options.command) print e.getBody() else: print "Not Connected" sys.exit(2) except: print parser.get_usage()
def do_parse(args): parser = OptionParser(usage = "prog [options] command destination [server [app]]") parser.add_option("-a", "--all", action="store_true", dest="all", default=False) parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False) parser.add_option("-v", "--version", action="store_true", dest="version", default=False) parser.add_option("--force", action="store_true", dest="force", default=False) parser.add_option("-t", "--type", dest="dir_name", default="source") parser.add_option("-f", "--file", dest="file", default=None) (options, args) = parser.parse_args(args) if options.version: print_version() safe_exit(parser.get_usage()) l = len(args) if l < 2 or (l < 4 and not options.all): safe_exit(parser.get_usage()) if l >= 4: return args[0], options, args[1], get_server_app(args[1], args[2], args[3]) if l == 3: return args[0], options, args[1], get_server_app(args[1], args[2]) if l == 2: return args[0], options, args[1], get_server_app(args[1])
def main(argv): try: parser = OptionParser() parser.add_option("-a", "--auth", dest="auth", default="ClueCon", help="ESL password") parser.add_option("-s", "--server", dest="server", default="127.0.0.1", help="FreeSWITCH server IP address") parser.add_option("-p", "--port", dest="port", default="8021", help="FreeSWITCH server event socket port") parser.add_option("-c", "--command", dest="command", help="command to run, surround mutli word commands in \"\'s") (options, args) = parser.parse_args() con = ESLconnection(options.server, options.port, options.auth) #are we connected? if con.connected(): #run command e = con.api(options.command) print e.getBody() else: print "Not Connected" sys.exit(2) except: print parser.get_usage()
def main(argv=None): usage = "usage: %prog -h for help" did_something = False parser = OptionParser(usage) parser.add_option("-a", "--address", dest="address", help="return geocoded data for a given address") parser.add_option("-x", "--xml", action="store_true", dest="xml", help="include raw XML") parser.add_option("-s", "--startdate", dest="start_date", help="starting date in format M/D/YYYY") parser.add_option("-e", "--enddate", dest="end_date", help="end date in format M/D/YYYY") parser.add_option("-p", "--policedata", action="store_true", dest="police_data", help="pull crime data from the Philadelphia police department website") parser.add_option("-i", "--policeimport", dest="police_filename", help = "import or reimport the file given by police_filename") parser.add_option("-g", "--geocode", action="store_true", dest="geocode", help="Geocode empty locations") parser.add_option("-n", "--geocodelimit", dest="num_geocode_tries", help="How many times we're allowed to hit the geocoding APIs this session") (options, args) = parser.parse_args() if options.address: did_something = True print "using address %s"%options.address t = GeoData(options.address) if options.xml: print BeautifulSoup(t.xml).prettify() for a, b in t.geoinfo.items(): print "%s = %s"%(a, b) if options.police_filename: did_something = True t = PhilaCrimeParser() t.import_csv_data(options.police_filename) if options.police_data: did_something = True t = PhilaCrimeParser(options.start_date, options.end_date) if options.geocode: did_something = True t = PhilaCrimeParser() if options.num_geocode_tries: try: t.geocoder.retries = int(options.num_geocode_tries) print "Geocoding %d locations."%t.geocoder.retries except ValueError: print "Could not set geocode retries to " + options.num_geocode_tries + ". exiting." return 2 t.geocode_and_import() print "finished geocoding" if did_something == False: print parser.get_usage()
def main(argv): '''Main function.''' sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) StreamLog.init() # Identify settings module and set environment appropriately sdg.set_settings_module() # pylint: disable=F0401,E0611 from sdg.django.handoff.models import AnnouncedSession opt_parser = OptionParser(argv) opt_parser.add_option('--days', dest='days', action='store', type='int', help='number of days of data to be retained') (opt, leftover_arg_list) = opt_parser.parse_args() if len(leftover_arg_list): print opt_parser.get_usage() return 1 try: if opt.days is not None: AnnouncedSession.prune(days=opt.days) else: AnnouncedSession.prune() # pylint: disable=W0703 except Exception, x: logging.info(x.message) return 1
def main(): global task, iteration, region parser = OptionParser("%prog --job_name <JOB_NAME> --iteration_id <#> --task_name <TASK_NAME> \ --skipped | --running | --error | --timedout | --continue_anyway | --retry | --success \ [--delete] [--debug]") parser.add_option("--job_name", action="store", help="the name of the job") parser.add_option("--iteration_id", action="store", type="int", help="iteration id") parser.add_option("--task_name", action="store", help="the Task name") parser.add_option("--skipped", action="store_true" , help="Task has been skipped; it ran and failed or did not run before being skipped") parser.add_option("--running", action="store_true", help="Task is running now.. OMG exciting!") parser.add_option("--error", action="store_true", help="Task ran but ended in error") parser.add_option("--timedout", action="store_true", help="Task timed out while running") parser.add_option("--continue_anyway", action="store_true" , help="Task ran, failed, but children are allowed to run as though it succeeded or children were flow dependencies") parser.add_option("--retry", action="store_true", help="Task has been asked to be retried") parser.add_option("--success", action="store_true", help="Task ran successfully. Yay!") parser.add_option("--delete", action="store_true", help="Delete this status.") parser.add_option("--debug", action="store_true", help="more messages") (options, args) = parser.parse_args() if not options.job_name: sys.exit(parser.get_usage()) if not options.iteration_id: sys.exit(parser.get_usage()) if not check_status_options(options): sys.exit(parser.get_usage()) try: job = core.Job.get(options.job_name) except core.Job.DoesNotExist, dne: sys.exit("ERROR: Job '%s' does not exist!" % (options.job_name))
def main(): """ Program entry point when run interactively. """ # prepare option parser parser = OptionParser(usage="usage: %prog [options] filename jobId jobId ...", description="Read a text file with jobs, manipulate their state.", epilog="In the given file, each line beginning with a dot and a space (. ) will be executed. The file is modified to reflect the execution state of each job (r-running, d-done, !-failed, e-error).") parser.add_option("-s", "--set", dest="set_state", default="", help="set state to STATE [default: no change]", metavar="STATE") parser.add_option("-l", "--list", dest="list", default=False, action="store_true", help="list given jobs [default: no]") parser.add_option("-a", "--all", dest="all_jobs", default=False, action="store_true", help="affect all jobs [default: no]") # parse options (options, args) = parser.parse_args() # get file name if len(args) < 1: print "Need a filename (a list of jobs)" print "" print parser.get_usage() sys.exit(1) fname = args[0] jobIds = [] if len(args) > 1: jobIds = args[1:] # process file process_file(fname, jobIds, options)
def parse_parameters(args): """returns an option object with the values of the optional parameters and the mandatory arguments in the list""" from optparse import OptionParser, OptionGroup usage = "usage: %prog [options] cathcode\n" usage+= "Example: get data for the 1.10.10.180 cath superfamily\n" usage+= "./getDomainsFromSP.py 1.10.10.180\n" usage+= "Example:get data for the 1.10.10.180 cath superfamily with all posible conf parameters \n" usage+= "./getDomainsFromSP.py 1.10.10.180 -w . -m localhost -u sflexfit -p sflexfit -d sflexfit\n" parser = OptionParser(usage=usage) parser.add_option("-t","--tar",dest="tar",action="store_true",default=False,help="generate a compressed tar file with the results") parser.add_option("-w","--workingDirectory", dest="workingDirectory",default=".",help="directory where the resulting files will be stored") parser.add_option("-v","--verbose",action="store_true", dest="verbose",default=True,help="display program status") parser.add_option("-j","--justTar",dest="justTar",action="store_true",default=False,help="remove all the generated files leaving just the tar") group = OptionGroup(parser, "DataBase options", "DataBase connection parameters") group.add_option("-m","--DBServer", dest="host", default="okazaki.cnb.csic.es",help="database server") group.add_option("-u","--user", dest="user", default="poli",help="database user name") group.add_option("-p","--passwd", dest="passwd", default="R7xvgAKK",help="user password") group.add_option("-d","--db", dest="db", default="sflexfit",help="database name") parser.add_option_group(group) (options, args)= parser.parse_args(args) if len(args) != 1: #cathcode is the single mandatory parameter print parser.get_usage() sys.exit(2) return (options,args)
def main(): """ Program entry point when run interactively. """ # prepare option parser parser = OptionParser(usage="usage: %prog [options] filename", description="Wait until all jobs in a text file are processed.", epilog="In the given file, each line beginning with a dot and a space (. ) will be executed. The file is modified to reflect the execution state of each job (r-running, d-done, !-failed, e-error).") parser.add_option("-e", "--use-exit-status", dest="use_exit_status", default=False, action="store_true", help="use exit status 0 only if all jobs are marked done [default: no]") parser.add_option("-p", "--progress", dest="progress", default=False, action="store_true", help="show progress while waiting [default: no]") # parse options (options, args) = parser.parse_args() # get file name if len(args) < 1: print "Need a filename (a list of jobs)" print "" print parser.get_usage() sys.exit(1) fname = args[0] # process file f = open(fname, 'r+b', 0) jobs = read_jobs(f) states = list() while True: old_states = states refresh_job_states(f, jobs) states = list((j.state for j in jobs)) count_unproc = len(["." for j in jobs if j.state == '.']) count_running = len(["." for j in jobs if j.state == 'r']) count_failed = len(["." for j in jobs if j.state == '!']) count_error = len(["." for j in jobs if j.state == 'e']) count_done = len(["." for j in jobs if j.state == 'd']) if states != old_states: if options.progress: bar_len = 16 len_running = int(1.0 * count_running/len(jobs)*bar_len) len_failed = int(1.0 * count_failed /len(jobs)*bar_len) len_error = int(1.0 * count_error /len(jobs)*bar_len) len_done = int(1.0 * count_done /len(jobs)*bar_len) len_rest = bar_len - (len_running + len_failed + len_error + len_done) bar_print = ("=" * len_done) + ("e" * len_error) + ("!" * len_failed) + (">" * len_running) + (" " * len_rest) print "progress: %3d of %3d jobs processed, %d errors [%s]" % (count_failed + count_error + count_done, len(jobs), count_failed + count_error, bar_print) if count_unproc + count_running == 0: if options.use_exit_status and (count_done != len(jobs)): sys.exit(1) sys.exit(0) time.sleep(1) f.close()
def Options(): OptionList = [ make_option("-s", "--source-code", dest="FileType", const="SourceCode", action="store_const", help="The input file is preprocessed source code, including C or assembly code"), make_option("-r", "--vfr-file", dest="FileType", const="Vfr", action="store_const", help="The input file is preprocessed VFR file"), make_option("--Vfr-Uni-Offset", dest="FileType", const="VfrOffsetBin", action="store_const", help="The input file is EFI image"), make_option("--asl-deps", dest="AslDeps", const="True", action="store_const", help="Generate Asl dependent files."), make_option("-a", "--asl-file", dest="FileType", const="Asl", action="store_const", help="The input file is ASL file"), make_option( "--asm-file", dest="FileType", const="Asm", action="store_const", help="The input file is asm file"), make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true", help="Convert standard hex format (0xabcd) to MASM format (abcdh)"), make_option("-l", "--trim-long", dest="TrimLong", action="store_true", help="Remove postfix of long number"), make_option("-i", "--include-path-file", dest="IncludePathFile", help="The input file is include path list to search for ASL include file"), make_option("-o", "--output", dest="OutputFile", help="File to store the trimmed content"), make_option("--ModuleName", dest="ModuleName", help="The module's BASE_NAME"), make_option("--DebugDir", dest="DebugDir", help="Debug Output directory to store the output files"), make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, help="Run verbosely"), make_option("-d", "--debug", dest="LogLevel", type="int", help="Run with debug information"), make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET, help="Run quietly"), make_option("-?", action="help", help="show this help message and exit"), ] # use clearer usage to override default usage message UsageString = "%prog [-s|-r|-a|--Vfr-Uni-Offset] [-c] [-v|-d <debug_level>|-q] [-i <include_path_file>] [-o <output_file>] [--ModuleName <ModuleName>] [--DebugDir <DebugDir>] [<input_file>]" Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) Parser.set_defaults(FileType="Vfr") Parser.set_defaults(ConvertHex=False) Parser.set_defaults(LogLevel=EdkLogger.INFO) Options, Args = Parser.parse_args() # error check if Options.FileType == 'VfrOffsetBin': if len(Args) == 0: return Options, '' elif len(Args) > 1: EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage()) if len(Args) == 0: EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage()) if len(Args) > 1: EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage()) InputFile = Args[0] return Options, InputFile
def parse_options(): """Parse command line arguments and options""" usage = "usage: %prog path_to_whisper_storage" parser = OptionParser(usage=usage) options, args = parser.parse_args() if not args: print parser.get_usage() sys.exit() return options, args
def Options(): OptionList = [ make_option("-s", "--source-code", dest="FileType", const="SourceCode", action="store_const", help="The input file is preprocessed source code, including C or assembly code"), make_option("-r", "--vfr-file", dest="FileType", const="Vfr", action="store_const", help="The input file is preprocessed VFR file"), make_option("--Vfr-Uni-Offset", dest="FileType", const="VfrOffsetBin", action="store_const", help="The input file is EFI image"), make_option("-a", "--asl-file", dest="FileType", const="Asl", action="store_const", help="The input file is ASL file"), make_option("-8", "--Edk-source-code", dest="FileType", const="EdkSourceCode", action="store_const", help="The input file is source code for Edk to be trimmed for ECP"), make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true", help="Convert standard hex format (0xabcd) to MASM format (abcdh)"), make_option("-l", "--trim-long", dest="TrimLong", action="store_true", help="Remove postfix of long number"), make_option("-i", "--include-path-file", dest="IncludePathFile", help="The input file is include path list to search for ASL include file"), make_option("-o", "--output", dest="OutputFile", help="File to store the trimmed content"), make_option("--ModuleName", dest="ModuleName", help="The module's BASE_NAME"), make_option("--DebugDir", dest="DebugDir", help="Debug Output directory to store the output files"), make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, help="Run verbosely"), make_option("-d", "--debug", dest="LogLevel", type="int", help="Run with debug information"), make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET, help="Run quietly"), make_option("-?", action="help", help="show this help message and exit"), ] # use clearer usage to override default usage message UsageString = "%prog [-s|-r|-a|--Vfr-Uni-Offset] [-c] [-v|-d <debug_level>|-q] [-i <include_path_file>] [-o <output_file>] [--ModuleName <ModuleName>] [--DebugDir <DebugDir>] [<input_file>]" Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) Parser.set_defaults(FileType="Vfr") Parser.set_defaults(ConvertHex=False) Parser.set_defaults(LogLevel=EdkLogger.INFO) Options, Args = Parser.parse_args() # error check if Options.FileType == 'VfrOffsetBin': if len(Args) == 0: return Options, '' elif len(Args) > 1: EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage()) if len(Args) == 0: EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage()) if len(Args) > 1: EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage()) InputFile = Args[0] return Options, InputFile
def run(): usage = "usage: %prog [options] <path to wiki> <pagename>" parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Report success") parser.add_option("-a", "--auth", action="store_true", dest="auth", help="Use local user-based wiki authentication") parser.add_option("-c", "--comment", dest="comment", help="Optional change comment for the edit") (options, args) = parser.parse_args() if len(args) != 2: print parser.get_usage() sys.exit(2) # Configdir to path, so wikiconfig can be imported by Request cp = args[0] cp2 = os.path.join(cp, 'config') if os.path.isdir(cp2): cp = cp2 sys.path.insert(0, cp) pagename = unicode(args[1], sys.getfilesystemencoding()) # Make a new request for the page req = MinimalMoinScript(pagename, parse=False) req.page = Page(req, pagename) # Auth if options.auth: import posix, pwd # We need to import contexts before importing users, because otherwise # the relative imports in MoinMoin will fail. import MoinMoin.web.contexts from MoinMoin.user import User req.user = User(req, auth_username=pwd.getpwuid(posix.getuid())[0]) mytext = unicode(sys.stdin.read(), sys.getfilesystemencoding()) if options.comment: savetext(req, pagename, mytext, comment=unicode(options.comment)) else: savetext(req, pagename, mytext) # Must finish the request to ensure that metadata is saved graphdata_close(req)
def MigrationOptionParser(Source, Destinate): # use clearer usage to override default usage message UsageString = "%prog [-a] [-o <output_file>] <input_file>" Parser = OptionParser(description=__copyright__, version=__version__, usage=UsageString) HelpText = "The name of the %s file to be created." % Destinate Parser.add_option("-o", "--output", dest="OutputFile", help=HelpText) HelpText = "Automatically create the %s file using the name of the %s file and replacing file extension" % ( Source, Destinate) Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help=HelpText) Options, Args = Parser.parse_args() # error check if len(Args) == 0: raise MigrationError(OPTION_MISSING, name="Input file", usage=Parser.get_usage()) if len(Args) > 1: raise MigrationError(OPTION_NOT_SUPPORTED, name="Too many input files", usage=Parser.get_usage()) InputFile = Args[0] if not os.path.exists(InputFile): raise MigrationError(FILE_NOT_FOUND, name=InputFile) if Options.OutputFile: if Options.AutoWrite: raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage()) else: if Options.AutoWrite: Options.OutputFile = os.path.splitext( InputFile)[0] + "." + Destinate.lower() else: raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage()) return Options, InputFile
def main(): usage = "usage: %prog [options] -i [file-to-read-from] -o [file-to-write-too]\n \n Examples:\n mwhois -t net -i /tmp/wordlist -o /tmp/domains\n mwhois -s sourceforge.net\n mwhois --gui \n\nWordlists Found @ http://www.packetstormsecurity.org/Crackers/wordlists/" parser = OptionParser(usage=usage) try: parser.add_option("-t", "--tld", action="store", type="string", dest="tld", help="--tld com/net/org/biz/edu/info - Search for these TLD's (Only use one of these tlds for each whois search") parser.add_option("-s", "--single", action="store_true", dest="single", help="Single domain search") parser.add_option("-a", "--advance", action="store_true", dest="advance", help="Advanced domain search") parser.add_option("-i", "--file-in", dest="filein", type="string", help="File to read from") parser.add_option("-o", "--file-out", dest="fileout", type="string", help="File to write to") parser.add_option("--sql", action="store_true", dest="sql", help="Connect to a MySQL database") parser.add_option("--host", dest="host", type="string", help="Host address for MySQL database connection (Default 127.0.0.1)") parser.add_option("--port", dest="port", type="int", help="Port to use for MySQL database connection (Default 3306)") parser.add_option("--user", dest="user", type="string", help="User to use for MySQL database connection") parser.add_option("-p", "--passwd", action='store_true', dest="passwd", help="Prompt for a password to use with MySQL database connection") parser.add_option("--database", dest="database", type="string", help="Database to use for MySQL database query") parser.add_option("--table", dest="table", type="string", help="Table to use for MySQL database query") parser.add_option("--column", dest="column", type="string", help="Column to use for MySQL database query") parser.add_option("-g", "--gui", action="store_true", dest="gui", help="Start GUI Interface") (options, args) = parser.parse_args() if START_WITH_GUI == 1 or options.gui == True: window = StartGUI() window.main() if options.single == True: w = WhoisSearch(sys.argv[2], None, None, None, None) w.single_search() else: if options.sql == True: options.filein = options.fileout + ".tmp" if options.passwd == True: options.passwd = getpass.getpass() conn = WhoisServer() DBConnection().connection(options.user, options.passwd, options.host, options.port, options.database, options.table, \ options.column, options.filein) w = WhoisSearch(None, options.tld, options.filein, options.fileout, None) if options.advance == True: w.advance_search() else: w.basic_search() try: os.remove(options.fileout + ".tmp") except Exception, e: pass except IOError as (errno, strerror): print "\nI/O error({0}): {1}".format(errno, strerror) +"\n" print parser.get_usage() sys.exit()
def main(): """ Program entry point when run interactively. """ # prepare option parser parser = OptionParser( usage="usage: %prog [options] filename jobId jobId ...", description="Read a text file with jobs, manipulate their state.", epilog= "In the given file, each line beginning with a dot and a space (. ) will be executed. The file is modified to reflect the execution state of each job (r-running, d-done, !-failed, e-error)." ) parser.add_option("-s", "--set", dest="set_state", default="", help="set state to STATE [default: no change]", metavar="STATE") parser.add_option("-l", "--list", dest="list", default=False, action="store_true", help="list given jobs [default: no]") parser.add_option("-a", "--all", dest="all_jobs", default=False, action="store_true", help="affect all jobs [default: no]") # parse options (options, args) = parser.parse_args() # get file name if len(args) < 1: print "Need a filename (a list of jobs)" print "" print parser.get_usage() sys.exit(1) fname = args[0] jobIds = [] if len(args) > 1: jobIds = args[1:] # process file process_file(fname, jobIds, options)
def main(): """ Main CLI handler. """ parser = OptionParser(usage=_prog_usage, add_help_option=True, version=_prog_version, description=_prog_description) parser.add_option('-n', '--pop-size', '-N', action='store', dest='pop_size', type='int', default=1, metavar='Ne', help='effective HAPLOID population size (default=%default [assumes edge lengths are in units of Ne])') (opts, args) = parser.parse_args() if len(args) == 0: sys.stderr.write("%s" % parser.get_usage()) sys.exit(1) for a in args: fpath = os.path.expandvars(os.path.expanduser(a)) if not os.path.exists(fpath): sys.stderr.write('File not found: "%s"\n' % fpath) else: sys.stderr.write('Reading: "%s"\n' % fpath) d = datasets.Dataset() ctrees = d.read_trees(open(fpath, "rU"), "NEXUS") for t in ctrees: p = coalescent.log_probability_of_coalescent_tree(t, opts.pop_size) sys.stdout.write("%s\n" % p)
def handleClArgs(): parser = OptionParser() usage = parser.get_usage()[:-1] + " [date] file" + \ """ Given a number, redux will attempt to construct a Gemini file name from the current date. An alternate date can be specified either as an argument or with the prefix (-p) option. The default directory is the operations directory; an alternate directory can be specified with the -d option. Files may also be specified directly with a full directory path. """ parser.set_usage(usage) parser.add_option("-d", "--directory", action="store", dest="directory", default=None, help="Specify an input data directory, if not adata " + \ "and not included in name.") parser.add_option("-p", "--prefix", action="store", dest="prefix", default=None, help="Specify a file prefix if not auto " + \ "(eg. (N/S)YYYYMMDDS).") parser.add_option("-c", "--clean", action="store_true", dest="clean", default=False, help="Restore current directory to default state: " + \ "remove all caches and kill all " + \ "reduce and adcc instances") parser.add_option("-u", "--upload", action="store_true", dest="upload", default=False, help="Upload any generated calibrations to the " + \ "calibration service") parser.add_option("-s", "--stack", action="store_true", dest="stack", default=False, help="Perform stacking of all previously reduced "+ \ "images associated with the current image") (options, args) = parser.parse_args() return options, args, parser
def Options(): OptionList = [ make_option("-f", "--offset", dest="PcdOffset", action="store", type="int", help="Start offset to the image is used to store PCD value."), make_option("-u", "--value", dest="PcdValue", action="store", help="PCD value will be updated into the image."), make_option("-t", "--type", dest="PcdTypeName", action="store", help="The name of PCD data type may be one of VOID*,BOOLEAN, UINT8, UINT16, UINT32, UINT64."), make_option("-s", "--maxsize", dest="PcdMaxSize", action="store", type="int", help="Max size of data buffer is taken by PCD value.It must be set when PCD type is VOID*."), make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, help="Run verbosely"), make_option("-d", "--debug", dest="LogLevel", type="int", help="Run with debug information"), make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET, help="Run quietly"), make_option("-?", action="help", help="show this help message and exit"), ] # use clearer usage to override default usage message UsageString = "%prog -f Offset -u Value -t Type [-s MaxSize] <input_file>" Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) Parser.set_defaults(LogLevel=EdkLogger.INFO) Options, Args = Parser.parse_args() # error check if len(Args) == 0: EdkLogger.error("PatchPcdValue", PARAMETER_INVALID, ExtraData=Parser.get_usage()) InputFile = Args[len(Args) - 1] return Options, InputFile
def parse_options(self, arguments=None): if arguments == None: arguments = sys.argv[1:] parser = OptionParser(usage='usage: %prog [options] moduleset') parser.add_option('-p', '--pretend', action='store_true', dest='pretend', default=False, help='skips compiling and linking steps') (self.options, self.args) = parser.parse_args(arguments) if not len(self.args) == 1: error(parser.get_usage()) # Prepare build environment target = self.args[0].split('.') version = '%s.%s.%s' % (target[0], target[1], target[2]) platform = target[3] if platform in PLATFORMS.keys(): global PRODUCT_VERSION global WIN_PLATFORM global WIX_PLATFORM PRODUCT_VERSION = version WIN_PLATFORM = platform WIX_PLATFORM = PLATFORMS[platform] else: error('Unknown platform (%s).' % platform) global CACHEDIR CACHEDIR = join(TMPDIR, 'cache', self.args[0]) if not isdir(CACHEDIR): os.makedirs(CACHEDIR)
def execute(self): def option_city(option, opt_str, value, parser): if value not in ("BDX", "LIL", "LYN", "MAR", "MPL", "NAN", "NCE", "NCY", "PAR", "REN", "STG", "TLS"): raise OptionValueError("Unknown city: %s" % value) setattr(parser.values, option.dest, value) parser = OptionParser(self.usage, version=VERSION) parser.add_option("-c", "--config", dest="filename", help="using a specific config file FILE", metavar="FILE") parser.add_option("-j", "--json", action="store_true", dest="json", help="display output as json", default=False) parser.add_option("-y", "--yes", action="store_true", dest="yes", help="Assume Yes to all queries and do not prompt", default=False) parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print status messages to stdout") parser.add_option("-C", "--city", dest="city", action="callback", type="str", nargs=1, help="restrict actions to a specific city", default=None, callback=option_city) (self._options, args) = parser.parse_args() if len(args) == 0: return parser.print_usage() #print("options: %s" % str(self._options)) #print("args: %s" % str(args)) if not self._execute(args): sys.stderr.write(parser.get_usage())
def main(): option_parser = OptionParser(usage='%prog <WURFL XML file>') option_parser.add_option( '-o', '--output', dest='output', default='wurfl.py', help= 'Name of the database Python module to produce. Defaults to wurfl.py.') option_parser.add_option( '-g', '--group', dest='groups', default=None, action='append', help= 'Name of a capability group to be included in the output database. If no groups are specified, all input database capabilities groups are included in the output.' ) options, args = option_parser.parse_args() if args: wurfl = Processor(args[0], options.groups, options.output) wurfl.process() else: sys.stderr.write(option_parser.get_usage()) sys.exit(1)
def main(argv): '''Main function.''' sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) opt_parser = OptionParser() opt_parser.add_option('--description', dest='description', action='store', help='description for service secret file') opt_parser.add_option('--output-file', dest='output_file_name', action='store', help='destination file for service secret') # Explicit reference to argv is for benefit of unittest. opt, leftover_arg_list = opt_parser.parse_args(argv[1:]) if len(leftover_arg_list): print >> sys.stderr, opt_parser.get_usage() sys.exit(1) write_hexdigest(generate_hexdigest(), path_random=opt.output_file_name, description=opt.description) sys.exit(0)
def main(): global task parser = OptionParser( "%prog --daemon_status_id <id> --queue_name <queue_name> \ [--nice <0>] [--stdout <file_name|DEFAULT>] [--stderr <file_name>|STDOUT>] [--debug]" ) parser.add_option( "--daemon_status_id", action="store", type="int", help="The id of the daemon status that launched this Task" ) parser.add_option("--queue_name", action="store", type="string", help="The name of the queue from which to read") parser.add_option("--nice", action="store", type="int", default=0, help="nice this process. defaults to 5.") parser.add_option( "--stdout", action="store", type="string", help="Send stdout to this file, or special value 'DEFAULT' \ sends it a the stream unique to this Task request", ) parser.add_option( "--stderr", action="store", type="string", help="Send stderr to this file, or special value 'STDOUT' sends it to stdout", ) parser.add_option("--debug", action="store_true", help="more messages") (options, args) = parser.parse_args() # option parsing if not options.daemon_status_id or not options.queue_name: sys.exit(parser.get_usage()) log.set_logging_debug(options.debug) if not options.nice == 0: os.nice(options.nice) console_stderr = None try: c = SQSConnection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) q = c.get_queue(options.queue_name) boto_message = q.read() task = __get_task__(boto_message, options.queue_name) if task == None: log.debug("No task in queue '%s' pid:%s" % (options.queue_name, os.getpid())) sys.exit(133) else: log.debug("Starting SQS Queue '%s' Task:%s pid:%s" % (options.queue_name, task.get_id(), os.getpid())) q.delete_message(boto_message) console_stderr = __redirect_outputs__(task, options.stdout, options.stderr) daemon_status = __get_daemon_status__(options.daemon_status_id) __run_task__(task, daemon_status) ending_status = task.get_current_run_status() if ending_status == None: sys.exit(134) if not ending_status.was_successful(): sys.exit(1) except SystemExit, se: # in python 2.4, SystemExit extends Exception, this is changed in 2.5 to # extend BaseException, specifically so this check isn't necessary. But # we're using 2.4; upon upgrade, this check will be unecessary but ignorable. sys.exit(se.code)
def process_arguments(path_prefix): parser = OptionParser( usage="usage: %prog [options] main_input_file.inp [specific files...]", description="Transform FLUKA RESNUCLe .tab.lis files in compact tables", epilog=("rnuc2tab takes all the RESNUCLe tab.lis files obtained " "as FLUKA output and orders them in csv format " "or in a simple .tex file ready to be used as LaTeX table." "\n\n" "Results will be output to files in the current " "directory.")) parser.add_option("-t", "--table-type", dest="opt_table", default="csv", choices="csv tex".split(), help="Type of output: csv or tex file") (options, args) = parser.parse_args() #Not correct number of arguments is specified if len(args) < 1: sys.exit(parser.get_usage()) #Take the base of the input name and the option for the table input_base = re.sub(r'\.inp$', '', args[0]) #No specific files are selected if len(args) < 2: res_files = find_files(path_prefix, input_base) else: res_files = args[1:] tab_option = options.opt_table return (res_files, tab_option)
def main(): usage = "usage: %prog [-a admin_username] username" parser = OptionParser(usage=usage) parser.add_option("-a", "--admin", dest="admin", default="", help="Admin username") parser.add_option("-s", "--server", dest="server", default="localhost", help="External server name (default: localhost)") parser.add_option("-w", "--write", action="store_true", dest="write", default=False, help="Write authentication file for user (for superuser use)") (options, args) = parser.parse_args() if not args: print >> sys.stderr, parser.get_usage() sys.exit(1) user = args[0] if options.admin: admin_dir = os.path.join(os.path.expanduser("~"+options.admin), gterm.APP_DIRNAME) else: admin_dir = gterm.App_dir auth_code, port = gterm.read_auth_code(appdir=admin_dir, server=options.server) user_code = gterm.user_hmac(auth_code, user, key_version="1") if not options.write: print gterm.dashify(user_code) else: user_dir = os.path.join(os.path.expanduser("~"+user), gterm.APP_DIRNAME) gterm.create_app_directory(appdir=user_dir) gterm.write_auth_code(user_code, appdir=user_dir, user=user, server=options.server)
def process_arguments(): parser = OptionParser(usage="usage: %prog main_input_file.inp scorings_file", version="%prog "+VERSION, description="Submit split FLUKA simulation to LXBATCH", epilog=("Combines FLUKA results from split jobs using the " "appropriate combiners. Processes ZIP files in the " "current directory named main_input_file_<counter>.zip" "\n\n" "The desired FLUKA outputs and their interpretation " "are read from scorings_file. For example," "\n" "\t$ cat scorings" "\t31 USRBIN" "\t21 USRBDX" "\t22 USRBDX" "\t$ combine.py main_input_file.inp scorings" "\n" "will treat the *_fort.31 (respectively *_fort.21, " "*_fort.22) files found in the ZIP files as USRBIN " "(USRBDX, USRBDX) files, combining them using the " "'usbsuw' ('usxsuw', 'ustsuw') tool." "\n\n" "Results will be output to files in the current " "directory.")) (options, args) = parser.parse_args() if len(args) < 2: sys.exit(parser.get_usage()) input_base = re.sub(r'\.inp$', '', args[0]) scorings_filename = args[1] return (input_base, scorings_filename, options)
def __init__(self): parser = OptionParser() parser.add_option('-s', '--start_time', metavar='START', action='callback', callback=self.toSeconds, type='string', default=0) parser.add_option('-e', '--end_time', metavar='END', action='callback', callback=self.toSeconds, type='string', default=sys.maxsize) parser.usage = '%prog [options] vod_id' self.getUsage = lambda: parser.get_usage() self.parseArgs = lambda: parser.parse_args()
def main(): """ Program entry point when run interactively. """ # prepare option parser parser = OptionParser(usage="usage: %prog [options] filename", description="Read a text file with jobs, execute them one by one.", epilog="In the given file, each line beginning with a dot and a space (. ) will be executed. The file is modified to reflect the execution state of each job (r-running, d-done, !-failed, e-error).") parser.add_option("-r", "--retry", dest="retry", default=False, action="store_true", help="retry failed jobs [default: no]") parser.add_option("-l", "--logfile", dest="logfile", default=os.path.join(tempfile.gettempdir(), "runmaker4-server.log"), help="log output to FILENAME [default: %default]", metavar="FILENAME") parser.add_option("-v", "--verbose", dest="count_verbose", default=0, action="count", help="increase verbosity [default: don't log infos, debug]") parser.add_option("-q", "--quiet", dest="count_quiet", default=0, action="count", help="decrease verbosity [default: log warnings, errors]") parser.add_option("-p", "--port", dest="port", type="int", default=9998, action="store", help="TCP PORT the server has to listen to [default: %default]", metavar="PORT") parser.add_option("-d", "--daemon", dest="daemonize", default=False, action="store_true", help="detach and run as daemon [default: no]") parser.add_option("-t", "--tokenfile", dest="tokenfile", default=os.path.join(os.path.expanduser("~"), ".runmaker4.token"), action="store", help="string representing the file where the token is stored [default: %default]") # parse options (options, args) = parser.parse_args() _LOGLEVELS = (logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG) loglevel = _LOGLEVELS[max(0, min(1 + options.count_verbose - options.count_quiet, len(_LOGLEVELS)-1))] # get file name if len(args) != 1: print "Need exactly one filename (a list of all jobs to run)" print "" print parser.get_usage() sys.exit(1) fname = args[0] logging.basicConfig(filename=options.logfile, level=loglevel) if not options.daemonize: logging.getLogger().addHandler(logging.StreamHandler()) else: print("The --daemon option is not implemented.") logging.debug("Logging to %s" % options.logfile) f = open(fname, 'rb+', 0) jobs = read_jobs(f) tokenSize=6 tokenChars=string.ascii_uppercase + string.digits token = ''.join(random.choice(tokenChars) for _ in range(tokenSize)) print "Token for runmaker4-client.py: %s (written to %s)" % (token, options.tokenfile) if (options.tokenfile != ""): #we need to write the token to a file with os.fdopen(os.open(options.tokenfile, os.O_WRONLY | os.O_CREAT, 0600), 'w') as handle: handle.write(token)
def main(): parser = OptionParser() parser.set_usage(parser.get_usage().strip() + USAGE) parser.add_option("--repo-name", dest="repo_name", help="repository name - e.g. nitime", metavar="REPO_NAME") parser.add_option("--github-user", dest="main_gh_user", help="github username for main repo - e.g fperez", metavar="MAIN_GH_USER") parser.add_option("--gitwash-url", dest="gitwash_url", help="URL to gitwash repository - default %s" % GITWASH_CENTRAL, default=GITWASH_CENTRAL, metavar="GITWASH_URL") parser.add_option("--gitwash-branch", dest="gitwash_branch", help="branch in gitwash repository - default %s" % GITWASH_BRANCH, default=GITWASH_BRANCH, metavar="GITWASH_BRANCH") parser.add_option("--source-suffix", dest="source_suffix", help="suffix of ReST source files - default '.rst'", default='.rst', metavar="SOURCE_SUFFIX") parser.add_option("--project-url", dest="project_url", help="URL for project web pages", default=None, metavar="PROJECT_URL") parser.add_option("--project-ml-url", dest="project_ml_url", help="URL for project mailing list", default=None, metavar="PROJECT_ML_URL") (options, args) = parser.parse_args() if len(args) < 2: parser.print_help() sys.exit() out_path, project_name = args if options.repo_name is None: options.repo_name = project_name if options.main_gh_user is None: options.main_gh_user = options.repo_name repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) try: copy_replace((('PROJECTNAME', project_name), ('REPONAME', options.repo_name), ('MAIN_GH_USER', options.main_gh_user)), repo_path, out_path, cp_globs=(pjoin('gitwash', '*'),), rep_globs=('*.rst',), renames=(('\.rst$', options.source_suffix),)) make_link_targets(project_name, options.main_gh_user, options.repo_name, pjoin(out_path, 'gitwash', 'known_projects.inc'), pjoin(out_path, 'gitwash', 'this_project.inc'), options.project_url, options.project_ml_url) finally: shutil.rmtree(repo_path)
def main(): global task, iteration, region parser = OptionParser("%prog --jobs | --job_name <name> \ [--iteration_id <id> | --iterations | --incomplete [--remaining]] \ [--resources] [--all_statuses] [--debug]") parser.add_option("--job_name", action="store", help="the name of the job") parser.add_option("--jobs", action="store_true", help="list all jobs") parser.add_option("--iteration_id", action="store", type="int", help="show status for given iteration id") parser.add_option("--iterations", action="store_true", help="show iterations for given job") parser.add_option("--incomplete", action="store_true", help="show status of incomplete iterations for this job") parser.add_option("--remaining", action="store_true", help="only show tasks that have completed") parser.add_option("--all_statuses", action="store_true", help="show all statuses for each task/iteration, not just latest/running") parser.add_option("--resources", action="store_true", help="show all resource demands for all tasks") parser.add_option("--debug", action="store_true", help="more messages") (options, args) = parser.parse_args() if not options.job_name and not options.jobs: raise parser.get_usage() if options.remaining and not options.iteration_id: raise parser.get_usage() if options.all_statuses: exclude_iter_statuses = [] else: exclude_iter_statuses = ['DONE'] if options.job_name: job = core.Job.get(options.job_name) show_iterations = [] if options.iteration_id: iteration = core.Iteration.get(options.iteration_id) if not iteration.get_job() == job: raise Exception("Iteration doesn't match job!") show_iterations.append(iteration) elif options.incomplete: for iteration in get_iterations(job, exclude_iter_statuses): show_iterations.append(iteration) if options.iterations: print_job_iterations(job, exclude_iter_statuses) if options.jobs: print_jobs(get_jobs(), exclude_iter_statuses) elif len(show_iterations) == 0: print_job(job, options.remaining, show_resources=options.resources) else: for iteration in show_iterations: print_job(job, options.remaining, iteration, show_all_statuses=options.all_statuses, show_resources=options.resources)
def main(): usage = 'usage: %prog --homedir /u/home/y/ybwang --install installdir' parser = OptionParser(usage) parser.add_option('--homedir', dest='homedir', help='HOME directory [Default %default]') parser.add_option('--install', dest='install', help='INSTALL directory [Default %default]') (options, args) = parser.parse_args() # check parameters if options.homedir is None or options.install is None: sys.exit("[ERROR] " + parser.get_usage()) # warning information warnings.formatwarning = custom_formatwarning logging.basicConfig(filename='pipeline.log', level=logging.INFO) ## star install # 1. copy wine directory to home directory warnings.warn('[INSTALL] copy wine dir to home directory: ' + options.homedir) HOMEDIR = re.sub(r'\/$', '', options.homedir) INSTALLDIR = re.sub(r'\/$', '', options.install) os.system('tar -zxvf wine_dir.tgz -C ' + HOMEDIR) homedir1 = HOMEDIR.replace('/', '\/') homedir2 = HOMEDIR.replace('/', '\\\\\\\\') cmd1 = 'sed -i s\'/homedirXXXXXB/' + homedir1 + '/g\' ' + HOMEDIR + '/.wine/*.reg' cmd2 = 'sed -i s\'/homedirXXXXXA/' + homedir2 + '/g\' ' + HOMEDIR + '/.wine/*.reg' os.system(cmd1) os.system(cmd2) # 2. copy files to install directory if not os.path.exists(INSTALLDIR): os.makedirs(INSTALLDIR) warnings.warn('[INSTALL] copy files to install dir: ' + options.install) os.system('cp -rf proteoseq/* ' + INSTALLDIR) # 3. ln wine warnings.warn('[INSTALL] ln wine to install dir') os.system('ln -s ' + HOMEDIR + '/wine-1.6.2/bin/wine64 ' + INSTALLDIR + '/bin/wine') # 4. change config.ini file warnings.warn('[INSTALL] generate configure file') with open(INSTALLDIR + '/config.ini', 'w') as fout: fout.write('[global]\n') fout.write('BINDIR = ' + INSTALLDIR + '/bin\n') fout.write('CHROMS = ' + '/u/home/f/frankwoe/nobackup/hg19/hg19_by_chrom/\n') fout.write('WINE = ' + INSTALLDIR + '/bin/wine\n') fout.write('COMETEXE = ' + INSTALLDIR + '/bin/comet/comet.2015025.win64.exe\n') fout.write('COMETPAR = ' + INSTALLDIR + '/bin/comet/comet.params.high-low\n') fout.write('CRUX = ' + INSTALLDIR + '/bin/crux\n') fout.write('BEDTOOLDIR = ' + INSTALLDIR + '/bin/bedtools2/\n') fout.write('PERCOLATOR = ' + INSTALLDIR + '/bin/percolator-2.08/bin/percolator')
def main(): from optparse import OptionParser parser = OptionParser('usage: %prog [options] name path') parser.add_option('-v', '--version', dest='version', default='default') parser.add_option('-p', '--platform', dest='platform', default='linux2') parser.add_option('-y', '--python', dest='python', default='default') parser.add_option('-s', '--server', dest='server', default=default_server) argv = sys.argv opts, args = parser.parse_args(argv[1:]) try: name, path = args except Exception, error: print parser.get_usage() sys.exit(1) return
def parse_parameters(args): """returns an option object with the values of the optional parameters and the mandatory arguments in the list""" from optparse import OptionParser usage = "usage: %prog [options] cathcode\n" # parser = OptionParser(usage=usage) # parser.add_option("-w", "--workingDirectory", dest="workingDirectory", default=".", help="directory where the resulting files will be stored") parser.add_option("-m", "--mmult", dest="mmultPath", default='', help="directory of installation for mmult") parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="display program status") # (options, args)= parser.parse_args(args) if len(args) != 1: #cathcode is the single mandatory parameter print parser.get_usage() sys.exit(2) return (options, args)
def main(): from optparse import OptionParser usage="usage: %prog [options] program_to_run findparams_configfile" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() if len(args) != 2: print parser.get_usage() return program = args[0] configfile = args[1] #processor = UniqueCfgFileWriter(prefix = "gen.fp") processor = ProgramTester(program_name = program) pp = ParamProcessor(configfile = "configfiles/config.findparams", processor = processor) pp.run()
def main(): global task, iteration, region parser = OptionParser("%prog --daemon_status_id <id> --iteration_id <id> \ --task_library <lib> --task_id <id> [--nice 5] [--stdout <file_name>] [--stderr <file_name>|STDOUT>] [--debug]") parser.add_option("--daemon_status_id", action="store", type="int" , help="The id of the daemon status that launched this Task") parser.add_option("--iteration_id", action="store", type="int" , help="The id of the iteration in which this Task runs") parser.add_option("--task_library", action="store", type="string" , help="The path Task (permalink.tms_impl.models.EnqueudArchiveRequest)") parser.add_option("--task_id", action="store", type="string" , help="The id of this Task in this library") parser.add_option("--nice", action="store", type="int", default=5 , help="nice this process. defaults to 5.") parser.add_option("--stdout", action="store", type="string" , help="Send stdout to this file") parser.add_option("--stderr", action="store", type="string" , help="Send stderr to this file, or special value 'STDOUT' sends it to stdout") parser.add_option("--debug", action="store_true", help="more messages") (options, args) = parser.parse_args() # option parsing if not options.daemon_status_id or not options.iteration_id \ or not options.task_library or not options.task_id: sys.exit(parser.get_usage()) if options.debug: log.set_logging_debug(options.debug) console_stderr = sys.stderr if options.stdout: sys.stdout = open(options.stdout, 'a') if options.stderr: if options.stderr == 'STDOUT': sys.stderr = sys.stdout else: sys.stderr = open(options.stderr, 'a') if not options.nice == 0: os.nice(options.nice) try: task_class = __get_task_class__(options.task_library) task = __get_task__(options.task_library, task_class, options.task_id) daemon_status = __get_daemon_status__(options.daemon_status_id) iteration = __get_iteration__(options.iteration_id) region = daemon_status.get_region() __run_task__(task, iteration, daemon_status) ending_status = task.get_current_run_status(iteration) if not ending_status == None and not ending_status.was_successful(): # if there's no run status, assume success; resource management may have prevented it from working. return False return True except SystemExit, se: # in python 2.4, SystemExit extends Exception, this is changed in 2.5 to # extend BaseException, specifically so this check isn't necessary. But # we're using 2.4; upon upgrade, this check will be unecessary but ignorable. sys.exit(se.code)
def Options(): OptionList = [ make_option("-s", "--source-code", dest="FileType", const="SourceCode", action="store_const", help="The input file is preprocessed source code, including C or assembly code"), make_option("-r", "--vfr-file", dest="FileType", const="Vfr", action="store_const", help="The input file is preprocessed VFR file"), make_option("-a", "--asl-file", dest="FileType", const="Asl", action="store_const", help="The input file is ASL file"), make_option("-8", "--r8-source-code", dest="FileType", const="R8SourceCode", action="store_const", help="The input file is source code for R8 to be trimmed for ECP"), make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true", help="Convert standard hex format (0xabcd) to MASM format (abcdh)"), make_option("-o", "--output", dest="OutputFile", help="File to store the trimmed content"), make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, help="Run verbosely"), make_option("-d", "--debug", dest="LogLevel", type="int", help="Run with debug information"), make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET, help="Run quietly"), make_option("-?", action="help", help="show this help message and exit"), ] # use clearer usage to override default usage message UsageString = "%prog [-s|-r|-a] [-c] [-v|-d <debug_level>|-q] [-o <output_file>] <input_file>" Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) Parser.set_defaults(FileType="Vfr") Parser.set_defaults(ConvertHex=False) Parser.set_defaults(LogLevel=EdkLogger.INFO) Options, Args = Parser.parse_args() # error check if len(Args) == 0: EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage()) if len(Args) > 1: EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage()) InputFile = Args[0] return Options, InputFile
def parseOptions(): parser = OptionParser() parser.add_option('-r', '--raster', type='string', dest='raster_name', help='Name of raster containing data') parser.add_option('--lat0', type='float', dest='lat0', help='N-W pixel center latitude') parser.add_option('--lon0', type='float', dest='lon0', help='N-W pixel center longitude') parser.add_option('-p', '--pixsiz', dest='pixsiz', action='callback', callback=check_pixsiz, type='float', help='Pixel size (in meter)') parser.add_option('-b', '--bbox', dest='bbox', action='callback', callback=parse_bbox, type='string', help='Bounding box. Value are separated by commas') parser.add_option('--srs', type='string', dest='srs', help='Spatial reference system string') (options, args) = parser.parse_args() if no_args_given(sys.argv[1:]): print parser.get_usage() raise ArgumentError('NOARGS') if options.pixsiz is None: raise ArgumentError('PIXSIZ') else: pass return options
def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0): # use clearer usage to override default usage message UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName Version = "%s Version %.2f" % (ToolName, VersionNumber) Copyright = "Copyright (c) 2007, Intel Corporation. All rights reserved." Parser = OptionParser(description=Copyright, version=Version, usage=UsageString) Parser.add_option("-o", "--output", dest="OutputFile", help="The name of the %s file to be created." % Destinate) Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help="Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate)) Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.") Options, Args = Parser.parse_args() # Set logging level if Options.verbose: EdkLogger.setLevel(EdkLogger.VERBOSE) elif Options.quiet: EdkLogger.setLevel(EdkLogger.QUIET) else: EdkLogger.setLevel(EdkLogger.INFO) # error check if len(Args) == 0: raise MigrationError(PARAMETER_MISSING, name="Input file", usage=Parser.get_usage()) if len(Args) > 1: raise MigrationError(PARAMETER_INVALID, name="Too many input files", usage=Parser.get_usage()) InputFile = Args[0] if not os.path.exists(InputFile): raise MigrationError(FILE_NOT_FOUND, name=InputFile) if Options.OutputFile: if Options.AutoWrite: raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage()) else: if Options.AutoWrite: Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower() else: raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage()) return Options, InputFile
def main(): """Main Function Starts the sniffer and processes results. """ usage = "usage: %prog [-v] [--csv=OUTFILE] <interface> <ipaddress>" description = """%prog provides the server-side handling of the egressive egress filter toolset. Both the source IP and interface command line switches are required.""" parser = OptionParser(usage=usage, version="%prog 1.0", description=description) parser.add_option("-v", "--verbose", default=False, action="store_true", help="Verbose mode.") parser.add_option("--csv", default=False, metavar="FILE", type=str, action="store") (options, args) = parser.parse_args() if len(args) != 2: print >> sys.stderr, parser.get_usage() sys.exit(1) interface = args[0] src = args[1] verbose = options.verbose csv = options.csv open_ports = collections.defaultdict(set) # Build the pcap filter: # Only examine packets from the client source IP # Process all UDP packets # Process only TCP packets with SYN flag set pcap_filter = "src {0} and (udp or (tcp and (tcp[13] & 2!=0)))".format(src) print "[+] Starting listener. Press Ctrl-C to quit..." if verbose: print "[DEBUG] Filter: {0}".format(pcap_filter) print "[DEBUG] Interface: {0}".format(interface) try: sniff(filter=pcap_filter, iface=interface, prn=lambda x: handle_packet(x, verbose, open_ports, src)) except Scapy_Exception as e: print >> sys.stderr, "[!] Scapy error:{0}".format(e) except socket.error as e: print >> sys.stderr, "[!] Socket error, check interface: {0}".format(e) except KeyboardInterrupt: pass print "[+] Interrupt detected. Processing results..." output_ports(open_ports) if csv: print "[+] Writing port(s) to {0}...".format(csv) make_csv(open_ports, csv)
def main(): """ Program entry point when run interactively. """ # prepare option parser parser = OptionParser(usage="usage: %prog [options] filename", description="Read a text file with jobs, execute them one by one.", epilog="In the given file, each line beginning with a dot and a space (. ) will be executed. The file is modified to reflect the execution state of each job (r-running, d-done, !-failed, e-error).") parser.add_option("-j", "--jobs", dest="num_jobs", type="int", default=1, action="store", help="start NUMBER jobs in parallel, 0 meaning autodetect [default: %default]", metavar="NUMBER") parser.add_option("-r", "--retry", dest="retry", default=False, action="store_true", help="retry failed jobs [default: no]") parser.add_option("-l", "--logfile", dest="logfile", default="", help="log output to FILENAME [default: none]", metavar="FILENAME") parser.add_option("-n", "--loglines", dest="logfile_lines", type="int", default=3, action="store", help="if logging, log the last NUMBER lines of output [default: %default]", metavar="NUMBER") parser.add_option("-1", "--one-only", dest="one_only", default=False, action="store_true", help="run no more than a single job before exiting [default: no]") # parse options (options, args) = parser.parse_args() # get file name if len(args) != 1: print "Need exactly one filename (a list of all jobs to run)" print "" print parser.get_usage() sys.exit(1) fname = args[0] # autodetect number of cpus if options.num_jobs == 0: try: options.num_jobs = multiprocessing.cpu_count() except: pass # spawn children children = [] for i in range(options.num_jobs): child = multiprocessing.Process(target=process_file, args=(fname,options,)) child.start() children.append(child) for child in children: child.join()
def process_arguments(path_prefix): parser = OptionParser( usage="usage: %prog [options] main_input_file.inp [specific files...]", version="%prog " + VERSION, description="Submit split FLUKA simulation to LXBATCH", epilog=("Unless specific files are specified, the split files " " are assumed to reside in the current directory and be" " named main_input_file_<counter>.inp\n" "The jobs, when complete, will write their results into" " result files named results_main_input_file_<counter>.zip")) parser.add_option( "-q", "--run-queue", dest="job_flavour", default="tomorrow", choices= "espresso microcentury longlunch workday tomorrow testmatch nextweek". split(), help="submit to run queue QUEUE", metavar="QUEUE") parser.add_option("-e", "--executable", dest="executable", default="", help="passed on to rfluka", metavar="FILE") parser.add_option("-L", "--run-locally", action="store_true", dest="run_locally", help="run the job locally (for debugging)") parser.add_option( "--unless-finished", action="store_true", dest="unless_finished", help="run only jobs for which there is not a results file present") (options, args) = parser.parse_args() if len(args) < 1: sys.exit(parser.get_usage()) input_base = re.sub(r'\.inp$', '', args[0]) if len(args) < 2: inputs = ut.find_jobs(path_prefix, input_base) else: inputs = args[1:] return (options, inputs)
def main(): usage = "usage: %prog [-a admin_username] username" parser = OptionParser(usage=usage) parser.add_option("-a", "--admin", dest="admin", default="", help="Admin username") parser.add_option("-s", "--server", dest="server", default="localhost", help="External server name (default: localhost)") parser.add_option( "-w", "--write", action="store_true", dest="write", default=False, help="Write authentication file for user (for superuser use)") (options, args) = parser.parse_args() if not args: print >> sys.stderr, parser.get_usage() sys.exit(1) user = args[0] if options.admin: admin_dir = os.path.join(os.path.expanduser("~" + options.admin), gterm.APP_DIRNAME) else: admin_dir = gterm.App_dir auth_code, port = gterm.read_auth_code(appdir=admin_dir, server=options.server) user_code = gterm.user_hmac(auth_code, user, key_version="1") if not options.write: print gterm.dashify(user_code) else: user_dir = os.path.join(os.path.expanduser("~" + user), gterm.APP_DIRNAME) gterm.create_app_directory(appdir=user_dir) gterm.write_auth_code(user_code, appdir=user_dir, user=user, server=options.server)
def __init__(self, interactive=False): # from BN UI if interactive: json_file = OpenFileNameField("Export json file") get_form_input([json_file], "BN Export Options") if json_file.result == '': self.json_file = None else: self.json_file = json_file.result return # headless usage = "usage: %prog <bn_database.bndb> <ida_export.json>" parser = OptionParser(usage=usage) (options, args) = parser.parse_args() self.bn_database = args[0] self.json_file = args[1] self.usage = parser.get_usage()
def commandline_call(convert_class=RefreshMetaData): opt_parser = OptionParser() opt_parser.set_usage( "refresh [--federation <fed_name>] [--log <file>] [--force-refresh]") opt_parser.add_option("-l", "--log", type="string", dest="log", help="The logger configuration file", default=None, metavar="LOG") opt_parser.add_option( "-f", "--federation", type="string", dest="fed_name", help="The federation to be updated (None for anyone)", default=None, metavar="FED") opt_parser.add_option( "-r", "--force-refresh", action="store_true", dest="force_refresh", help= "Force refresh of metadata information (even if file has not changed)", metavar="REF") (options, _) = opt_parser.parse_args() error_message = "" if options.log and not os.path.exists(options.log): error_message = "File '%s' does not exist." % options.log if error_message: print(error_message) print(opt_parser.get_usage()) exit(1) obj_convert = convert_class() obj_convert.process(options)
def main(): usage = 'usage: python %prog [options] -i MSresultfile -s seqfile -u proteincolumn -p pepcolumn -r ratiocolumn -o output' parser = OptionParser(usage) parser.add_option( '-i', dest='msfile', help='quantification file using MaxQuant or others [Default %default]') parser.add_option( '-s', dest='seqfile', help='fasta file used for the database search [Default %default]') parser.add_option('-o', dest='outdir', default='output', help='output directory [Default %default]') parser.add_option( '-u', dest='protein', default=0, type='int', help='protein column, 0-based, like "IPI00021812.2" [Default %default]' ) parser.add_option( '-p', dest='pep', default=1, type='int', help= 'pep column, 0-based, like "_HRS(ph)NS(ph)FSDER_" [Default %default]') parser.add_option( '-r', dest='ratio', default=4, type='int', help='ratio column, 0-based, like "0.38957" [Default %default]') (options, args) = parser.parse_args() if options.msfile is None or options.seqfile is None or options.protein is None or options.pep is None or options.ratio is None: sys.exit("[ERROR] " + parser.get_usage()) if not os.path.exists(options.outdir): os.makedirs(options.outdir) seqdict = readseq(options.seqfile) parse(options.msfile, seqdict, options.protein, options.pep, options.ratio)
def __init__(self): parser = OptionParser() parser.add_option('-s', '--start_time', metavar='START', action='callback', callback=self.__to_seconds, type='string', default=0) parser.add_option('-e', '--end_time', metavar='END', action='callback', callback=self.__to_seconds, type='string', default=sys.maxsize) parser.usage = '%prog [options] vod_id' self.get_usage = lambda: parser.get_usage() self.parse_args = lambda: parser.parse_args()
def process_arguments(): parser = OptionParser( usage="usage: %prog main_input_file.inp [NPRIMARIES [NSPLITS]]", version="%prog " + VERSION, description= "Split FLUKA simulation into jobs for submission to LXBATCH", epilog=("NPRIMARIES specifies the number of primaries to simulate " "in each job; NSPLITS specifies the number of jobs to create. " "The jobs get consecutive random seeds counting up from the" "seed found in main_input_file.inp.")) (options, args) = parser.parse_args() if len(args) < 1: sys.exit(parser.get_usage()) input_base = re.sub(r'\.inp$', '', args[0]) nprimaries = int(args[1]) if len(args) > 1 else DEFAULT_NPRIMARIES nsplits = int(args[2]) if len(args) > 2 else DEFAULT_NSPLITS return (input_base, nprimaries, nsplits, options)
def main(): usage = 'usage: python %prog -i ratioElmfile -g iGPSResultfile -o output' parser = OptionParser(usage) parser.add_option('-i', dest='elmfile', help='quantification ratio elm file [Default %default]') parser.add_option('-g', dest='igpsfile', help='result file from iGPS [Default %default]') parser.add_option('-o', dest='outdir', default='output', help='output directory [Default %default]') (options, args) = parser.parse_args() if options.elmfile is None or options.igpsfile is None: sys.exit("[ERROR] " + parser.get_usage()) if not os.path.exists(options.outdir): os.makedirs(options.outdir) datalist = parseiGPS(options.elmfile, options.igpsfile) ka(datalist, options.outdir)
import os from optparse import OptionParser op = OptionParser() op.add_option( '-f', '--format', type='choice', dest='format', help='format of output information', choices=('sh', 'xml', 'python'), default='sh', ) op.add_option( '-n', '--normalize', action='store_true', dest='normalize', help='replace missing values with defaults', default=False, ) op.set_usage(op.get_usage().strip() + ' "version to parse"') co, ca = op.parse_args() if len(ca) != 1: op.error('requires exactly one argument, the version') else: v = split(ca[0]) if co.normalize: v = normalize(v) sys.stdout.write(getattr(sys.modules[__name__], co.format)(v)) sys.stdout.write(os.linesep)
"--debug", action="count", help="prints additional debug information while sending the programm") parser.add_option( "-t", "--type", dest="type", default="ccan.usb", help= "Select type of CAN adapter ('ccan.usb' or 'shell' or 'tiny' or 'socket')" ) (options, args) = parser.parse_args() if not options.filename or not options.id: print(parser.get_usage()) exit(1) board_id = int(options.id, 0) debug_mode = True if (options.debug) else False print("CAN Bootloader\n") print("Port : %s" % options.port) print("Board Id : %i (0x%02x)" % (board_id, board_id)) if debug_mode: print("debug mode active!") print("File : %s" % options.filename) hexfile = intelhex.IntelHexParser(options.filename) if len(hexfile.segments) > 1:
conf_module_name = win32api.GetLongPathName(conf_module_name) except win32api.error, exc: log( 2, "Couldn't determine the long name for %r: %s" % (conf_module_name, exc)) if opt_parser is None: # Build our own parser. parser = OptionParser(usage='') else: # The caller is providing their own filter, presumably with their # own options all setup. parser = opt_parser # build a usage string if we don't have one. if not parser.get_usage(): all_handlers = standard_arguments.copy() all_handlers.update(custom_arg_handlers) parser.set_usage(build_usage(all_handlers)) # allow the user to use uninstall as a synonym for remove if it wasn't # defined by the custom arg handlers. all_handlers.setdefault('uninstall', all_handlers['remove']) parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print status messages to stdout") parser.add_option("-v",
sys.exit(1) return firstCommit, finalCommit if not options.until_commit: print("no value specified with --until, try --until=xxxxx (without space after =)", file=sys.stderr) sys.exit(1) if not options.from_commit: print("no value specified with --from, try --from=xxxx (without space after =)", file=sys.stderr) sys.exit(1) firstCommit, commit = find_commits(options.from_commit, options.until_commit) if commit == firstCommit: print("Commit range is empty!", file=sys.stderr) print(parser.get_usage(), file=sys.stderr) print("Example use:", file=sys.stderr) print(" %s --help" % sys.argv[0], file=sys.stderr) print(" %s --from xxx >output.md" % sys.argv[0], file=sys.stderr) print(" %s --from xxx --until yyy >output.md" % sys.argv[0], file=sys.stderr) print("Note: the first commit is excluded. Use e.g.: --from <prev-release-tag> --until <new-release-candidate-sha>", file=sys.stderr) sys.exit(0) excludedFirst, excludedLast = None, None if options.exclude_from_commit or options.exclude_until_commit: if not options.exclude_from_commit or not options.exclude_until_commit: print("Both -xf and -xt must be specified, or not at all.") sys.exit(1) excludedFirst, excludedLast = find_commits(options.exclude_from_commit, options.exclude_until_commit) #