def main(argv): optparse_usage = 'reaction_table.py -i <input_paths> -g <deltaG_path> -f <table_format> -r <root_dir>' parser = OptionParser(usage=optparse_usage) parser.add_option("-i", "--inputpaths", action="store", type="string", dest="input_path", help='The input file is the kegg_reaction.tsv') parser.add_option("-g", "--deltaGpaths", action="store", type="string", dest="deltaG_path", help='The input file is the Info_deltaG.csv') parser.add_option("-r", "--rootDir", action="store", type="string", dest="root_dir", help='The root directory. All files are generated here.') parser.add_option("-f", "--tableFormat", action="store", type="string", dest="format_path", help='The different number that you have given before.') (options, args) = parser.parse_args() if options.input_path: input_path = os.path.abspath(options.input_path) else: print 'Error: please provide proper input file name' if options.format_path: format_path = os.path.abspath(options.format_path) else: print 'Error: please provide proper input file name' if options.deltaG_path: deltaG_path = os.path.abspath(options.deltaG_path) else: print 'Error: please provide proper input file name' if options.root_dir: root_dir = os.path.abspath(options.root_dir) else: print 'ERROR: please provide proper root direcotory' # Run the funtion make_file(input_path,deltaG_path, format_path, root_dir)
def main(): PROG = os.path.basename(os.path.splitext(__file__)[0]) description = """Scan claims files""" parser = OptionParser(option_class=MultipleOption, usage='usage: %prog claims_file, claims_file, ...', version='%s %s' % (PROG, VERSION), description=description) if len(sys.argv) == 1: parser.parse_args(['--help']) args = parser.parse_args() p2k = {} k2p = {} try: with open('claimants.csv') as csv_file: for line in csv.reader(csv_file, dialect="excel"): p2k[line[0]] = line[1] k2p[line[1]] = line[0] except IOError: pass for filename in args[1]: with open(filename+'_masked.csv', 'wb') as cf: outfile = csv.writer(cf, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) analyze_file(filename, outfile, p2k, k2p) print len(p2k), len(k2p) with open('claimants.csv', 'wb') as cf: cout = csv.writer(cf, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) for p in p2k: cout.writerow([p, p2k[p]])
def main(argv): """Flag options, automatically adds a -h/--help flag with this information.""" parser = OptionParser() parser.add_option("-f", "--file", dest="filename", help="read mountpoints from FILE", metavar="FILE") parser.add_option("-o", "--output", dest="output", help="dump drive information to FILE", metavar="FILE") parser.add_option("-p", "--prompt", dest="prompt", help="specify drive information before adding to database", action="store_true") parser.add_option("-l", "--location", dest="location", help="add hard drives to groups (boxes)", action="store_true") (options, args) = parser.parse_args() filename = options.filename try: if options.location: o = Organization() o.prompt() if filename == None: # default file to read drives from filename = "drives.in" #TODO: add username/password table username = raw_input("Enter username: "******"Enter username: "******"No file to read drives from. Please create a \'drives.in\' file to place the drives in, or specify a file with the -f flag."
def main(): # Setup the command line arguments. optp = OptionParser() # Output verbosity options optp.add_option('-q', '--quiet', help='set logging to ERROR', action='store_const', dest='loglevel', const=logging.ERROR, default=logging.INFO) optp.add_option('-d', '--debug', help='set logging to DEBUG', action='store_const', dest='loglevel', const=logging.DEBUG, default=logging.INFO) optp.add_option('-v', '--verbose', help='set logging to COMM', action='store_const', dest='loglevel', const=5, default=logging.INFO) # Option for hash to download optp.add_option("-i", "--ioc", dest="ioc", help="The hash, ip, or domain of the ioc you want to check") opts, args = optp.parse_args() # Prompt if the user disn't give an ioc if opts.ioc is None: opts.ioc = raw_input("What's your IOC (Hash, ip, domain)? ") results = checkSS(opts.ioc) print results
def process_options(debugger_name, pkg_version, sys_argv, option_list=None): """Handle debugger options. Set `option_list' if you are writing another main program and want to extend the existing set of debugger options. The options dicionary from opt_parser is return. sys_argv is also updated.""" usage_str="""%prog [debugger-options] [python-script [script-options...]] Runs the extended python debugger""" # serverChoices = ('TCP','FIFO', None) optparser = OptionParser(usage=usage_str, option_list=option_list, version="%%prog version %s" % pkg_version) optparser.add_option("-F", "--fntrace", dest="fntrace", action="store_true", default=False, help="Show functions before executing them. " + "This option also sets --batch") optparser.add_option("--basename", dest="basename", action="store_true", default=False, help="Filenames strip off basename, " "(e.g. for regression tests)") optparser.add_option("--different", dest="different", action="store_true", default=True, help="Consecutive stops should have different " "positions") optparser.disable_interspersed_args() sys.argv = list(sys_argv) (opts, sys.argv) = optparser.parse_args() dbg_opts = {} return opts, dbg_opts, sys.argv
def main(): description = ( "This application generates .h and .ld files for symbols defined in input file. " "The input symbols file can be generated using nm utility like this: " "esp32-ulp-nm -g -f posix <elf_file> > <symbols_file>" ); parser = OptionParser(description=description) parser.add_option("-s", "--symfile", dest="symfile", help="symbols file name", metavar="SYMFILE") parser.add_option("-o", "--outputfile", dest="outputfile", help="destination .h and .ld files name prefix", metavar="OUTFILE") (options, args) = parser.parse_args() if options.symfile is None: parser.print_help() return 1 if options.outputfile is None: parser.print_help() return 1 with open(options.outputfile + ".h", 'w') as f_h, \ open(options.outputfile + ".ld", 'w') as f_ld, \ open(options.symfile) as f_sym: \ gen_ld_h_from_sym(f_sym, f_ld, f_h) return 0
def getProgOptions(): from optparse import OptionParser, make_option option_list = [ make_option("-i", "--in-seq", action="append", type="string",dest="inSeq"), make_option("-o", "--out-name", action="store", type="string",dest="outName"), make_option("-s", "--num-splits", action="store", type="int",dest="nSplits",default=3), make_option("-m", "--min-samp-count", action="store", type="int",dest="minSampCount",default=100), make_option("-t", "--max-samp-seq", action="store", type="int",dest="maxSampCountPerSeq"), make_option("-l", "--samp-len", action="store", type="int",dest="sampLen",default=1000), make_option("-f", "--samp-offset", action="store", type="int",dest="sampOffset",default=0), make_option("-d", "--make-other", action="store_true", dest="makeOther",default=False), make_option("-a", "--alphabet", action="store", type="choice",choices=("dna","protein"), dest="alphabet",default="dna"), make_option("-e", "--degen-len", action="store", type="int",dest="degenLen",default=1), ] parser = OptionParser(usage = "usage: %prog [options]",option_list=option_list) (options, args) = parser.parse_args() return options,args
def main () : usage = "./app/python/arc2warc.py [OPTIONS] ARC-FILES " parser = OptionParser(usage) parser.add_option("-c", "--compressed", dest="cmode", action="store_true", help="compressed outpout WARC FILE") parser.add_option("-t", "--tempdir", dest="tmpdir", help="Temporary working directory", default="./") parser.add_option("-v","--verbose", action="store_true",dest="verbose" , default=False, help="print more information") (options, args) = parser.parse_args() if not ( len (args) > 0 ): parser.error(" Please give one or more arcs to convert") for fname in args: ofname = guessname( fname , options.cmode ) if options.verbose: print 'Converting %s to %s' % ( fname , ofname ) convert( fname , ofname , options.tmpdir , options.cmode ) if options.verbose: print 'Done' return
def main(args=None): # parse the command line from optparse import OptionParser parser = OptionParser(description=__doc__) for key in choices: parser.add_option('--%s' % key, dest=key, action='store_true', default=False, help="display choices for %s" % key) options, args = parser.parse_args() # args are JSON blobs to override info if args: # lazy import import json for arg in args: if _os.path.exists(arg): string = file(arg).read() else: string = arg update(json.loads(string)) # print out choices if requested flag = False for key, value in options.__dict__.items(): if value is True: print '%s choices: %s' % (key, ' '.join([str(choice) for choice in choices[key]])) flag = True if flag: return # otherwise, print out all info for key, value in info.items(): print '%s: %s' % (key, value)
def main(self,args=None): """ meant to be called if __name__ == '__main__': MyApplication().main() but lino.runscript calls it with args=sys.argv[:2] (command-line arguments are shifted by one) """ if args is None: args = sys.argv[1:] p = OptionParser( usage=self.usage, description=self.description) self.setupOptionParser(p) try: options,args = p.parse_args(args) self.applyOptions(options,args) return self.run(self.console) except UsageError,e: p.print_help() return -1
def main(): """Run the application from outside the module - used for deploying as frozen app""" import sys, os from optparse import OptionParser parser = OptionParser() parser.add_option("-f", "--file", dest="msgpack", help="Open a dataframe as msgpack", metavar="FILE") parser.add_option("-p", "--project", dest="projfile", help="Open a dataexplore project file", metavar="FILE") parser.add_option("-i", "--csv", dest="csv", help="Open a csv file by trying to import it", metavar="FILE") parser.add_option("-t", "--test", dest="test", action="store_true", default=False, help="Run a basic test app") opts, remainder = parser.parse_args() if opts.test == True: app = TestApp() else: if opts.projfile != None: app = DataExplore(projfile=opts.projfile) elif opts.msgpack != None: app = DataExplore(msgpack=opts.msgpack) elif opts.csv != None: app = DataExplore() t = app.getCurrentTable() t.importCSV(opts.csv) else: app = DataExplore() app.mainloop()
def main(): from optparse import OptionParser parser = OptionParser() parser.add_option('-c', type='int', dest='concurrency', default=DEFAULT_CONCURRENCY, help='Number of multiple requests to make') parser.add_option('-s', type='int', dest='seconds', default=DEFAULT_SECONDS, help='Number of seconds to perform') parser.add_option("--mode", dest="mode", default=None) options, args = parser.parse_args() #run tests if 'test' == options.mode: test_parse_http_load_result() return if 1 != len(args): parser.print_help() return assert 1 <= options.concurrency <= 100 assert 1 <= options.seconds <= 100 #run bench old_sys_stderr = sys.stderr sys.stderr = StringIO() try: run_bench(args[0], options.seconds, options.concurrency) except Exception, e: print e import traceback print traceback.format_exc()
def main(argv=None): """Main entry point for the script""" ret_code = 0 if argv is None: argv = sys.argv parser = OptionParser() parser.add_option("-f", "--file", action="store", type="string", dest="filepath", default="/var/log/asterisk/refs", help="The full path to the refs file to process") (options, args) = parser.parse_args(argv) if not os.path.isfile(options.filepath): print("File not found: %s" % options.filepath, file=sys.stderr) return -1 try: process_file(options) except (KeyboardInterrupt, SystemExit, IOError): print("File processing cancelled", file=sys.stderr) return -1 return ret_code
def parse_options(): """ Options parser. """ parser = OptionParser(option_class=eng_option, usage="%prog: [options]") parser.add_option("-s", "--servername", type="string", default="localhost", help="Server hostname") (options, args) = parser.parse_args() return options
def _getArgs(): parser = OptionParser(usage="Train HTM Spatial Pooler") parser.add_option("-d", "--dataSet", type=str, default='randomSDR', dest="dataSet", help="DataSet Name, choose from sparse, correlated-input" "bar, cross, image") parser.add_option("-b", "--boosting", type=int, default=1, dest="boosting", help="Whether to use boosting") parser.add_option("-e", "--numEpochs", type=int, default=100, dest="numEpochs", help="number of epochs") parser.add_option("--spatialImp", type=str, default="cpp", dest="spatialImp", help="spatial pooler implementations: py, c++, or " "monitored_sp") (options, remainder) = parser.parse_args() print options return options, remainder
def main(): atexit.register(fabric_cleanup, True) parser = OptionParser(usage="%prog RELEASE_DIR DESTINATION") (options, args) = parser.parse_args(sys.argv[1:]) comm_obj = _CommObj() if len(args) != 2: parser.print_help() sys.exit(-1) if not os.path.isdir(args[0]): print "release directory %s not found" % args[0] sys.exit(-1) destparts = args[1].split(':', 1) if len(destparts)==1: # it's a local release test area if not os.path.isdir(args[1]): _setup_local_release_dir(args[1]) comm_obj.put = shutil.copy comm_obj.put_dir = shutil.copytree comm_obj.run = local _push_release(args[0], args[1], comm_obj) else: # assume args[1] is a remote host:destdir comm_obj.put = put comm_obj.put_dir = put_dir comm_obj.run = run home = destparts[1] with settings(host_string=destparts[0]): _push_release(args[0], home, comm_obj)
def __init__(self): gr.top_block.__init__(self) usage="%prog: [options] output_filename" parser = OptionParser(option_class=eng_option, usage=usage) parser.add_option("-I", "--audio-input", type="string", default="", help="pcm input device name. E.g., hw:0,0 or /dev/dsp") parser.add_option("-r", "--sample-rate", type="eng_float", default=48000, help="set sample rate to RATE (48000)") parser.add_option("-N", "--nsamples", type="eng_float", default=None, help="number of samples to collect [default=+inf]") (options, args) = parser.parse_args () if len(args) != 1: parser.print_help() raise SystemExit, 1 filename = args[0] sample_rate = int(options.sample_rate) src = audio.source (sample_rate, options.audio_input) dst = gr.file_sink (gr.sizeof_float, filename) if options.nsamples is None: self.connect((src, 0), dst) else: head = gr.head(gr.sizeof_float, int(options.nsamples)) self.connect((src, 0), head, dst)
def main(): os.chdir(os.path.dirname(os.path.realpath(__file__))) parser = OptionParser() parser.add_option( "-d", dest="debug", action="store_true", default=False, help="Enable debug mode (different starting board configuration)", ) parser.add_option("-t", dest="text", action="store_true", default=False, help="Use text-based GUI") parser.add_option("-o", dest="old", action="store_true", default=False, help="Use old graphics in pygame GUI") parser.add_option( "-p", dest="pauseSeconds", metavar="SECONDS", action="store", default=0, help="Sets time to pause between moves in AI vs. AI games (default = 0)", ) (options, args) = parser.parse_args() game = PythonChessMain(options) game.SetUp(options) game.MainLoop()
def main(): p = OptionParser() options, args = p.parse_args() if len(args) != 1: p.error("no valid directory given") inp = args[0] outp = inp + ".npz" files = [] for dirpath, dirnames, filenames in os.walk(inp): for fn in filenames: if fn.endswith('.txt'): files.append( (dirpath[len(inp)+1:] + '/' + fn[:-4], os.path.join(dirpath, fn))) data = {} for key, fn in files: key = key.replace('/', '-').strip('-') try: data[key] = np.loadtxt(fn) except ValueError: print("Failed to load", fn) savez_compress(outp, **data)
def __init__(self, **kwargs): OptionParser.__init__(self, **kwargs) self.add_option('--dz-url', action='store', dest='datazilla_url', default='https://datazilla.mozilla.org', metavar='str', help='datazilla server url (default: %default)') self.add_option('--dz-project', action='store', dest='datazilla_project', metavar='str', help='datazilla project name') self.add_option('--dz-branch', action='store', dest='datazilla_branch', metavar='str', help='datazilla branch name') self.add_option('--dz-key', action='store', dest='datazilla_key', metavar='str', help='oauth key for datazilla server') self.add_option('--dz-secret', action='store', dest='datazilla_secret', metavar='str', help='oauth secret for datazilla server') self.add_option('--sources', action='store', dest='sources', metavar='str', help='path to sources.xml containing project revisions')
def cli_main(self,argv): with self: # so the sys.path was modified appropriately # I believe there's no performance hit loading these here when # CLI--it would load everytime anyway. from StringIO import StringIO from calibre.library import db from calibre_plugins.fanficfare_plugin.fanficfare.cli import main as fff_main from calibre_plugins.fanficfare_plugin.prefs import PrefsFacade from calibre.utils.config import prefs as calibre_prefs from optparse import OptionParser parser = OptionParser('%prog --run-plugin '+self.name+' -- [options] <storyurl>') parser.add_option('--library-path', '--with-library', default=None, help=_('Path to the calibre library. Default is to use the path stored in the settings.')) # parser.add_option('--dont-notify-gui', default=False, action='store_true', # help=_('Do not notify the running calibre GUI (if any) that the database has' # ' changed. Use with care, as it can lead to database corruption!')) pargs = [x for x in argv if x.startswith('--with-library') or x.startswith('--library-path') or not x.startswith('-')] opts, args = parser.parse_args(pargs) fff_prefs = PrefsFacade(db(path=opts.library_path, read_only=True)) fff_main(argv[1:], parser=parser, passed_defaultsini=StringIO(get_resources("fanficfare/defaults.ini")), passed_personalini=StringIO(fff_prefs["personal.ini"]))
def ParseInputs(): global DOMAIN_PAIRS usage = """usage: %prog -d <domain> -u <admin_user> -p <admin_pass>\n""" parser = OptionParser() parser.add_option("-u", dest="admin_user", help="admin user") parser.add_option("-p", dest="admin_pass", help="admin pass") parser.add_option("-d", dest="domain", help="Domain name") parser.add_option("-n", dest="nick_domain", help="The domain of the nick that should exist.",) parser.add_option("--apply", action="store_true", dest="apply", help="""If present, changes will be applied, otherwise will run in dry run mode with no changes made""") (options, args) = parser.parse_args() if options.admin_user is None: print "-u (admin user) is required" sys.exit(1) if options.admin_pass is None: print "-p (admin password) is required" sys.exit(1) if (options.domain and not options.nick_domain) or (options.nick_domain and not options.domain): print "Both -d and -n need to be given" sys.exit(1) if not options.domain and not options.nick_domain and not DOMAIN_PAIRS: print "No domain pairs given" sys.exit(1) return options
def main(): """gterm-aware matplotlib demo""" setup() import matplotlib.pyplot as plt from optparse import OptionParser usage = "usage: %prog [--animate]" parser = OptionParser(usage=usage) parser.add_option("", "--animate", action="store_true", dest="animate", default=False, help="Simple animation demo") (options, args) = parser.parse_args() fmt = "png" if options.animate: plt.plot([1,2,3,2,3,1]) show(overwrite=False, format=fmt, title="Simple animation") n = 10 dx = 5.0/n for j in range(1,n): time.sleep(0.5) plt.plot([1,2,3,2,3,1+j*dx]) show(overwrite=True, format=fmt) else: plt.plot([1,2,3,2,3,0]) show(overwrite=False, format=fmt, title="Simple plot")
def setupParser(self, argv): usage = "usage: %prog [options]" parser = OptionParser(usage) parser.add_option('-o', '--output-dir', action='store', dest='outputDir', default='./restdoc-output', help='The output directory for the API documentation files.') parser.add_option('-f', '--format', action='store', dest='format', default=0, help='The output format (MediaWiki). [Default: MediaWiki].') parser.add_option('-p', '--private-api', action='store_true', dest='privateApi', default=False, help='Generates private API documentation. [Default: False]') parser.add_option('-u', '--upload-documentation', action='store_true', dest='uploadDocumentation', default=False, help='Uploads the documentation to the Bitmunk wiki. ' + \ '[Default: False]') self.options, args = parser.parse_args(argv) largs = parser.largs return (self.options, args, largs)
def main(): parser = OptionParser(usage="%prog [options] jobfile|-", description=__doc__, version='%prog version 0.1') parser.add_option("-n", "--nb-procs", dest="nb_procs", type="int", default=DEFAULT_PROCESSES, help=("By default the number of concurent processes to " "run is equal to the number of CPUs " "(Default: %default)") ) (opts, spillover) = parser.parse_args() if len(spillover) != 1: parser.error('Invalid arguments.') if opts.nb_procs < 2: parser.error("There is no point of using that program if you are not " "running anything in parallel.") if spillover[0] == '-': jobfd = sys.stdin else: try: jobfd = open(spillover[0]) except IOError: parser.error("Job file '%s' open error" % spillover[0]) run_pexec(jobfd, opts.nb_procs)
def main(argv=None): if argv is None: argv = sys.argv parser = OptionParser() parser.add_option("-k", "--key", dest="key", help="your Giant Bomb API key") opts, args = parser.parse_args() if opts.key is None: print >>sys.stderr, "Option --key is required" return 1 query = ' '.join(args) Bombject.api_key = opts.key search = GameResult.get('/search/').filter(resources='game') search = search.filter(query=query) if len(search.results) == 0: print "No results for %r" % query elif len(search.results) == 1: (game,) = search.results print "## %s ##" % game.name print print game.summary else: print "## Search results for %r ##" % query for game in search.results: print game.name return 0
def parse_options(): parser = OptionParser() parser.add_option("-f", "--file", dest="filename", default="/etc/sssd/sssd.conf", help="Set input file to FILE", metavar="FILE") parser.add_option("-o", "--outfile", dest="outfile", default=None, help="Set output file to OUTFILE", metavar="OUTFILE") parser.add_option("", "--no-backup", action="store_false", dest="backup", default=True, help="""Do not provide backup file after conversion. The script copies the original file with the suffix .bak by default""") parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Be verbose") (options, args) = parser.parse_args() if len(args) > 0: print >>sys.stderr, "Stray arguments: %s" % ' '.join([a for a in args]) return None # do the conversion in place by default if not options.outfile: options.outfile = options.filename return options
def make_cli_parser(): """ Creates the command line interface parser. """ usage = "\n".join([ """\ python %prog [OPTIONS] FASTAFILE1 FASTAFILE2 ... ARGUMENTS: FASTAFILE1, FASTAFILE2, ...: paths to one or more FASTA formatted files """, __doc__ ]) cli_parser = OptionParser(usage) cli_parser.add_option( '-o', '--outfile', default=OUTFILE_NAME, help="path to output file [Default: %default]" ) cli_parser.add_option( '-b', '--barrelon', action='store_true', help="ignore possible errors in parsing" ) return cli_parser
def __init__(self, **kwargs): OptionParser.__init__(self, **kwargs) self.add_option("-m", "--masterMode", action="store_true", dest="masterMode", help="Run the script in master mode.", default=False) self.add_option("--noPrompts", action="store_true", dest="noPrompts", help="Uses default answers (intended for CLOUD TESTS only!).", default=False) self.add_option("--manifestFile", action="store", type="string", dest="manifestFile", help="A JSON file in the form of test_manifest.json (the default).") self.add_option("-b", "--browser", action="store", type="string", dest="browser", help="The path to a single browser (right now, only Firefox is supported).") self.add_option("--browserManifestFile", action="store", type="string", dest="browserManifestFile", help="A JSON file in the form of those found in resources/browser_manifests") self.add_option("--reftest", action="store_true", dest="reftest", help="Automatically start reftest showing comparison test failures, if there are any.", default=False) self.add_option("--port", action="store", dest="port", type="int", help="The port the HTTP server should listen on.", default=8080) self.add_option("--unitTest", action="store_true", dest="unitTest", help="Run the unit tests.", default=False) self.add_option("--fontTest", action="store_true", dest="fontTest", help="Run the font tests.", default=False) self.add_option("--noDownload", action="store_true", dest="noDownload", help="Skips test PDFs downloading.", default=False) self.add_option("--statsFile", action="store", dest="statsFile", type="string", help="The file where to store stats.", default=None) self.add_option("--statsDelay", action="store", dest="statsDelay", type="int", help="The amount of time in milliseconds the browser should wait before starting stats.", default=10000) self.set_usage(USAGE_EXAMPLE)
def main(): parser = OptionParser(usage="usage:%prog [options] filepath") parser.add_option("-p", "--port", action="store", type="string", default="", dest="port") (options, args) = parser.parse_args() port = options.port print port if not port: port = utils.config.get("global", "port") debug_mode = int(utils.config.get('global', 'debug')) sys.stderr.write("listen server on port %s ...\n" % port) settings = dict( debug=True if debug_mode else False, cookie_secret="e446976943b4e8442f099fed1f3fea28462d5832f483a0ed9a3d5d3859f==78d", session_secret="3cdcb1f00803b6e78ab50b466a40b9977db396840c28307f428b25e2277f1bcc", session_timeout=600, store_options={ 'redis_host': '127.0.0.1', 'redis_port': 6379, 'redis_pass': '', } ) application = HandlersApplication(handler, **settings) server = tornado.httpserver.HTTPServer(application) server.bind(port) server.start(1 if debug_mode else 15) tornado.ioloop.IOLoop.instance().start()
## These params contain everything we should need to weight our sources. ## src_ra, src_dec, redshift, gamma, flux, lum = params['ra'], params['dec'], params['redshift'], params['gamma'], params['flux'], params['lum'] #We need to switch the source locations to radians for background_scrambles (I think). src_ra = np.radians(src_ra) src_dec = np.radians(src_dec) ## There are three modelweights I can use, so lets put them in a dictionary for easy access. ## modelweights = {'flux':flux, 'redshift': list(np.power(redshift,-2))} import data_multi ##Here I add an argument for the sole purpose of having separate background files differently named. ## parser = OptionParser (usage = '%prog [options]') parser.add_option ('--batch', dest = 'batch', type = int, default = 0, metavar = 'BATCH', help = 'Assigns a number to each batch of background trials.') parser.add_option ('--batchsize', dest = 'batchsize', type = int, default = 1000, metavar = 'BATCHSIZE', help = 'Assigns how many background trials are used in each batch.') parser.add_option ('--llhweight', dest = 'llhweight', type = str, default = 'uniform', metavar = 'LLHWEIGHT', help = 'Sets the weighting used in the llh model for point source searches.') parser.add_option ('--years', dest = 'years', type = int, default = 4, metavar = 'YEARS', help = 'Number of years of data')
def main(argv=None): # for interactive mode if not argv: argv = sys.argv[1:] # handle cmd line arguments parser = OptionParser() parser.add_option("-p", "--plot", dest="plot", action="store_true", help="generates plot after data collection is finished") parser.add_option("-f", "--file", dest="fsave", action="store_true", help="save plot to file in the current directory") parser.add_option("-H", "--host", dest="host", default="google.com", help="the url or ip address to ping [default: %default]") parser.add_option("-n", "--num", dest="n", default=1, type="int", help="the number of packets to send on each ping iteration [default: %default]") parser.add_option("-t", "--dt", dest="dt", default=0.5, type="float", help="the time interval (seconds) in which successive pings are sent [default: %default s]") parser.add_option("-l", "--log", dest="log", action="store_true", help="save a logfile of the event in the current directory") parser.add_option("-s", "--size", dest="size", default="1280x640", help="If plotting/saving a plot, this is the plot's dimensions"\ "in pixels (at 80 DPI) in the format XxY [default: 1280x640]") # unpack and initialize data opts, args = parser.parse_args(argv) ping = np.array([]) loss = np.array([]) t = np.array([]) now = np.array([]) cnt = 0 # write log if specified if opts.log or opts.fsave: # get timestamp nowtime = datetime.datetime.now() datestr = nowtime.isoformat()[:-7][:10] timestr = "{0}h{1}m{2}s".format(nowtime.hour, nowtime.minute, nowtime.second) stamp = datestr + "_" + timestr logname = "pingplot_v{vers:0.1f}_{0}_{1}.log".format(opts.host, stamp, vers=__version__) # remove all '.' plotname = "pingplot_v{vers:0.1f}_{0}_{1}.png".format(opts.host, stamp, vers=__version__) # remove all '.' if opts.log: logfile = open(logname, 'w') logfile.write("PingPlot Version {0:0.1} - Log File\n\n\n".format(__version__)) # start the main loop print("PingPlot Version {0} -- by ccampo\n".format(__version__)) print("{0:^23}\n=======================".format("Run Parameters")) print("{0:>17} {1}".format("Hostname:", opts.host)) print("{0:>17} {1}".format("Ping interval:", str(opts.dt) + " s")) print("{0:>17} {1}".format("Packets per ping:", opts.n)) print("\n\nPress CTRL+C to quit...\n") print("{0:^15} {1:^15} {2:^15} {3:^15} {4:^15}\n" .format( "AVG. PING", "PACKET LOSS", "NUM. PINGS", "NUM. TIMEOUTS", "TIME ELAPSED" )) while True: # quit on ctrl+c try: # ping and parse; print results ping, loss, t, out = call_pinger(opts.host, opts.n, ping, loss, t) now = np.append(now, datetime.datetime.now()) cnt += 1 # get ping data mloss = loss.mean() nans = np.isnan(ping) if len(ping[~nans]) > 0: mping = ping[~np.isnan(ping)].mean() else: mping = np.nan # write log if specified if opts.log: write_log(logfile, out) # only ping after time dt time.sleep(opts.dt) # print results deltat = datetime.timedelta(seconds=(round(time.time() - t[0], 0))) sys.stdout.write("\r{0:^15.8} {1:^15.10} {2:^15} {3:^15} {4:^15}" .format( str(round(mping, 2))+" ms", str(round(mloss, 2))+" %", cnt*opts.n, len(ping[nans]), str(deltat) )) sys.stdout.flush() except KeyboardInterrupt: break print("\n") # close log file if opts.log: print("Saved log file %s" % logname) logfile.close() # make plot to save if opts.fsave or opts.plot: # check if any data was collected if len(ping[~nans]) == 0: print("Error: cannot generate plot; no data collected. Please check your connection.") return 2 plt = plot_gen(ping, now, t, nans, opts.host, opts.plot, opts.size) # save if applicable if opts.fsave: print("Saved plot %s" % plotname) plt.savefig(plotname) # show plot if specified if opts.plot: plt.show() return 2 # exit
#!/usr/bin/env python import matplotlib.pyplot as plt from matplotlib.dates import DateFormatter, MinuteLocator, SecondLocator import numpy as np from StringIO import StringIO import os import re import sys from optparse import OptionParser import subprocess parser = OptionParser (usage = "Usage: %prog [options] BINARY-PROTOCOL") parser.add_option ('--histogram', action = 'store_true', dest = 'histogram', help = "pause time histogram") parser.add_option ('--minor', action = 'store_true', dest = 'minor', help = "only show minor collections in histogram") parser.add_option ('--major', action = 'store_true', dest = 'major', help = "only show major collections in histogram") (options, files) = parser.parse_args () show_histogram = False show_minor = True show_major = True if options.minor: show_histogram = True show_major = False if options.major: show_histogram = True show_minor = False if options.histogram: show_histogram = True script_path = os.path.realpath (__file__) sgen_grep_path = os.path.join (os.path.dirname (script_path), 'sgen-grep-binprot')
if c > 0.: adjH.travelspeed = H.travelspeed / c elif index == "travellength": H.travellength = 12. / \ (totalsamples * (totalsamples + 1)) * \ subtotal - 3. * (totalsamples + 1) if c > 0.: adjH.travellength = H.travellength / c elif index == "waittime": H.waittime = 12. / \ (totalsamples * (totalsamples + 1)) * \ subtotal - 3. * (totalsamples + 1) if c > 0.: adjH.waittime = H.waittime / c optParser = OptionParser() optParser.add_option("-t", "--tripinform-file", dest="vehfile", help="read vehicle information generated by the DUA assignment from FILE (mandatory)", metavar="FILE") optParser.add_option("-o", "--output-file", dest="outputfile", default="Global_MOE.txt", help="write output to FILE", metavar="FILE") optParser.add_option("-g", "--SGToutput-file", dest="sgtoutputfile", default="significanceTest.txt", help="write output to FILE", metavar="FILE") optParser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="tell me what you are doing") optParser.add_option("-e", "--tTest", action="store_true", dest="ttest", default=False, help="perform the t-Test") optParser.add_option("-k", "--kruskalWallisTest", action="store_true", dest="kwtest", default=False, help="perform the Kruskal-Wallis-Test") (options, args) = optParser.parse_args()
def main(): usage = "usage: %prog [options]\n Check MySQL Function" parser = OptionParser(usage) parser.add_option("-H", "--host", action="store", dest="host", type="string", default='localhost', help="Connect to mysql host.") parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="if output all process") parser.add_option("-i", "--item", action="store", dest="item", type="string", default='Uptime', help="which item to fetch") (options, args) = parser.parse_args() if 1 >= len(sys.argv): parser.print_help() return hostname = options.host mysql = Mysql(hostname,debug=options.debug) try: item = options.item if item == "is_can_write": print mysql.is_mysql_can_write() #print mysql.get_item_val(options.item) except Exception as expt: import traceback tb = traceback.format_exc() mysql.get_logger().error(tb)
class PyjamasTester(object): parser = OptionParser() parser.add_option( "--verbose", "-v", dest="verbose", action="store_true", default=False, help="Show detailed information") parser.add_option( "--no-tracker", dest="tracker", action="store_false", default=True, help="Do not load data from issue tracker") parser.add_option( "--cpython", dest="cpython", action="store", default="/usr/bin/python", help="Path to CPython executable" ) parser.add_option( "--pyv8", dest="pyv8", action="store", default="pyv8/pyv8run.py", help="Path to PyV8-based interpreter" ) parser.add_option( "--no-libtest", dest="libtest_run", action="store_false", default=True, help="Do not run any LibTest tests" ) parser.add_option( "--no-pyv8", dest="pyv8_run", action="store_false", default=True, help="Do not run any PyV8 tests" ) parser.add_option( "--no-browsers", dest="browsers_run", action="store_false", default=True, help="Do not run any browsers tests" ) parser.add_option( "--no-pyjd", dest="pyjd_run", action="store_false", default=True, help="Do not run any browsers tests" ) parser.add_option( "--no-utils", dest="utils_run", action="store_false", default=True, help="Do not test utilities" ) parser.add_option( "--no-examples", dest="examples_run", action="store_false", default=False, help="Do not test examples" ) parser.add_option( "--examples", dest="examples_run", action="store_true", help="Test examples" ) parser.add_option( "--examples-path", dest="examples_path", action="store", default="examples/", help="Path to examples dir" ) parser.add_option( "--tracker-report", dest="tracker_report", action="store_true", default=False, help="Provide report on tracker issues lacking failing tests" ) def __init__(self): self.options, args = self.parser.parse_args() self.tmpdir = mkdtemp(prefix='pyjs') self.root = path.dirname(__file__) print "Output will be produced in %s" % self.tmpdir self.tracker_url = "http://code.google.com/p/pyjamas/issues/csv" if not path.isabs(self.options.pyv8): self.options.pyv8 = path.join(currentdir, self.options.pyv8) if not path.isabs(self.options.examples_path): self.options.examples_path = path.join(currentdir, self.options.examples_path) self.testsresults = [] self.testsknown = [] if self.options.libtest_run: self._test(self.test_libtest_cpython) if self.options.libtest_run: if self.options.pyv8_run: self._test(self.test_libtest_pyv8) if self.options.utils_run: self._test(self.test_pyjsbuild) self._test(self.test_pyjscompile) self._test(self.test_pyjampiler) if self.options.examples_run: self._test(self.test_examples) self.issues = {} if self.options.tracker: self.get_tracker_issues() self.print_results() def run_cmd(self, cmd=None, opts=None, cwd=None): if not cmd: cmd = self.options.cpython cmd = path.abspath(cmd) if not cwd: cwd = '.' cwd = path.abspath(cwd) if opts: if not isinstance(opts, list): opts = [opts] cmd = ' '.join([cmd] + opts) print "Running `%s` at \"%s\"" % (cmd, cwd) proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=cwd, env=os.environ ) stdout_value, stderr_value = proc.communicate('') return stdout_value, stderr_value def test_libtest_cpython(self, output): return self.parse_cmd_output( *self.run_cmd(opts="LibTest.py", cwd=path.join(self.options.examples_path, 'libtest')) ) def test_libtest_pyv8(self, output): return self.parse_cmd_output( *self.run_cmd(cmd=self.options.pyv8, opts=["-o %s" % output, "--strict", "--dynamic '^I18N[.].*.._..'", "LibTest", "`find I18N -name ??_??.py`"], cwd=path.join(self.options.examples_path, 'libtest')) ) def test_examples(self, output): return self.check_stderr(*self.run_cmd( opts=["__main__.py", "--", "-o %s" % output,], cwd=self.options.examples_path)) def test_pyjsbuild(self, output): return self.check_stderr(*(self.run_cmd( path.join(self.root, 'bin', 'pyjsbuild'), opts=["-o %s" % output, "--strict", "LibTest", ], cwd=path.join(self.options.examples_path, 'libtest')) + ('libtest', 'compile'))) def test_pyjscompile(self, output): return self.check_stderr(*(self.run_cmd( path.join(self.root, 'bin', 'pyjscompile'), opts=["-o %s/LibTest.js" % output, "--strict", "LibTest.py", ], cwd=path.join(self.options.examples_path, 'libtest')) + ('libtest', 'compile'))) def test_pyjampiler(self, output): cmd = path.join(self.root, 'bin', 'pyjampiler') r = self.check_stderr(*(self.run_cmd( cmd, opts=["-r Hello", "-o %s/hello.js" % output, ], cwd=path.join(self.options.examples_path, 'helloworld')) + ('helloworld', 'compile'))) r += self.check_stderr(*(self.run_cmd( cmd, opts=["-r LibTest", "-o %s/libtest.js" % output, ], cwd=path.join(self.options.examples_path, 'libtest')) + ('libtest', 'compile'))) return r def check_stderr(self, stdout, stderr, cls='cmd', name=''): if not name: name = cls if stderr: return [dict(cls = cls, status = 'failed', name = name, message = "Error happened during execution\n" "Last STDOUT lines:\n" "------------------\n" "%(stdout)s\n" "STDERR:\n" "------------------\n" "%(stderr)s" % {'stdout':'\n'.join(stdout.split('\n')[-5:]), 'stderr':stderr} )] else: return [dict(cls = cls, status = "passed", name = name, count = 1)] def parse_cmd_output(self, stdout_value, stderr_value=None): """ Parse stdout/stderr into list of dicts """ res = [] for line in stdout_value.split('\n'): m = test_msg_re.match(line) if m: groups = m.groups() test = dict( cls = groups[0], status = (groups[1] == 'Known issue') and 'known' or 'failed', name = groups[2], message = groups[3] ) if test['status'] == 'known': test['issues'] = issue_no_re.findall(test['message']) res.append(test) else: m = test_passed_re.match(line) if m: groups = m.groups() test = dict( cls = groups[0], status = "passed", name = groups[0], count = int(groups[1]) ) res.append(test) if stderr_value: res.append(dict( cls = 'command', status = 'failed', name = 'execution', message = stderr_value )) return res def _test(self, method): name = method.im_func.func_name d = dict(name=name, tests=[], failed_list=[], known_list=[], total=0, passed=0, failed=0, known=0, err=None) self.testsresults.append(d) output = path.join(self.tmpdir, method.func_name) if not path.exists(output): mkdir(output) try: d['tests'] = method(output=output) except Exception, e: print e d['err'] = e return False for test in d['tests']: if test['status'] == 'passed': d['passed'] += test['count'] elif test['status'] == 'failed': d['failed'] += 1 d['failed_list'].append(test) elif test['status'] == 'known': d['known'] += 1 d['known_list'].append(test) self.testsknown.append((d, test)) d['total'] = d['passed'] + d['failed'] + d['known']
def parse_args(): essentia_version = '%s\n'\ 'python version: %s\n'\ 'numpy version: %s' % (essentia.__version__, # full version sys.version.split()[0], # python major version numpy.__version__) # numpy version from optparse import OptionParser parser = OptionParser(usage=essentia_usage, version=essentia_version) parser.add_option("-v","--verbose", action="store_true", dest="verbose", default=False, help="verbose mode") parser.add_option("-s","--segmentation", action="store_true", dest="segmentation", default=False, help="do segmentation") parser.add_option("-p","--profile", action="store", type="string", dest="profile", default="music", help="computation mode: 'music', 'sfx' or 'broadcast'") parser.add_option("--start", action="store", dest="startTime", default="0.0", help="time in seconds from which the audio is computed") parser.add_option("--end", action="store", dest="endTime", default="1.0e6", help="time in seconds till which the audio is computed, 'end' means no time limit") parser.add_option("--svmpath", action="store", dest="svmpath", default=join('..', 'svm_models'), help="path to svm models") (options, args) = parser.parse_args() return options, args
else: #print ("Attempting to decrypt: '%s'..." % file_name), decrypt.decrypt([file_name], key, extension) if is_funf_database(file_name): #print "Success!" return True else: print "FAILED!!!!" print "File is either encrypted with another method, another key, or is not a valid sqlite3 db file." print "Keeping original file." shutil.move(decrypt.backup_file(file_name, extension), file_name) return False if __name__ == '__main__': usage = "%prog [options] [sqlite_file1.db [sqlite_file2.db...]]" description = "Safely decrypt Sqlite3 db files. Checks to see if the file can be opened by Sqlite. If so, the file is left alone, otherwise the file is decrypted. Uses the decrypt script, so it always keeps a backup of the original encrypted files. " parser = OptionParser(usage="%s\n\n%s" % (usage, description)) parser.add_option("-i", "--inplace", dest="extension", default=None, help="The extension to rename the original file to. Will not overwrite file if it already exists. Defaults to '%s'." % decrypt.default_extension,) parser.add_option("-k", "--key", dest="key", default=None, help="The DES key used to decrypt the files. Uses the default hard coded one if one is not supplied.",) (options, args) = parser.parse_args() key = options.key if options.key else decrypt.key_from_password(decrypt.prompt_for_password()) try: for file_name in args: decrypt_if_not_db_file(file_name, key, options.extension) except Exception as e: import sys sys.exit("ERROR: " + str(e))
# ei = pickle.load(open(ei_file, 'rb')) si = pickle.load(open(si_file, 'rb')) logger.info("=" * 40) logger.info("Executing EvaluationRunMonteCarlo:") logger.info(str(si)) logger.info("=" * 40) mce = MonteCarloEvaluation() mce.execute(si, MONTE_CARLO_ITERATIONS) if __name__ == "__main__": try: optparser = OptionParser() optparser.add_option("-e", "--executable_info", help="Pickled executable information filename") optparser.add_option("-s", "--scenario_info", help="Pickled scenario information filename") (optionsp, argsp) = optparser.parse_args() if not (optionsp.executable_info or optionsp.scenario_info): optparser.print_help() sys.exit(-1) ermc = EvaluationRunMonteCarlo()
import sys import pickle from optparse import OptionParser import time from keras_frcnn import config from keras import backend as K from keras.layers import Input from keras.models import Model from keras_frcnn import roi_helpers import json import matplotlib.pyplot as plt sys.setrecursionlimit(40000) parser = OptionParser() parser.add_option("-p", "--path", dest="test_path", help="Path to test data.") parser.add_option( "-n", "--num_rois", type="int", dest="num_rois", help="Number of ROIs per iteration. Higher means more memory use.", default=32) parser.add_option( "--config_filename", dest="config_filename", help= "Location to read the metadata related to the training (generated when training).", default="config.pickle")
#!/usr/bin/env python from __future__ import print_function from sys import stderr, exit import commands from optparse import OptionParser parser = OptionParser(usage="usage: %prog [options] Trigger_Path") parser.add_option("--firstRun", dest="firstRun", help="first run", type="int", metavar="RUN", default="1") parser.add_option("--lastRun", dest="lastRun", help="last run", type="int", metavar="RUN", default="9999999") parser.add_option("--groupName", dest="groupName", help="select runs of name like NAME", metavar="NAME", default="Collisions%") parser.add_option("--jsonOut", dest="jsonOut", help="dump prescales in JSON format on FILE", metavar="FILE") (options, args) = parser.parse_args() if len(args) != 1: parser.print_usage()
import threading import netaddr import json import os import datetime import signal from optparse import OptionParser from collections import defaultdict from multiprocessing.dummy import Pool host_status = {} hosts_to_skip = [] counter = defaultdict(int) lock = threading.Lock() options = OptionParser(usage='%prog <network> [network2] [network3] ...', description='Test for SSL heartbleed vulnerability (CVE-2014-0160) on multiple domains') options.add_option('--port', '-p', dest="port", default=443, help="Port to scan on all hosts or networks, default 443") options.add_option('--input', '-i', dest="input_file", default=[], action="append", help="Optional input file of networks or ip addresses, one address per line") options.add_option('--logfile', '-o', dest="log_file", default="results.txt", help="Optional logfile destination") options.add_option('--resume', dest="resume", action="store_true", default=False, help="Do not rescan hosts that are already in the logfile") options.add_option('--timeout', '-t', dest="timeout", default=5, help="How long to wait for remote host to respond before timing out") options.add_option('--threads', dest="threads", default=100, help="If specific, run X concurrent threads") options.add_option('--json', dest="json_file", default=None, help="Save data as json into this file") options.add_option('--only-vulnerable', dest="only_vulnerable", action="store_true", default=False, help="Only scan hosts that have been scanned before and were vulnerable") options.add_option('--only-unscanned', dest="only_unscanned", action="store_true", default=False, help="Only scan hosts that appear in the json file but have not been scanned") options.add_option('--summary', dest="summary", action="store_true", default=False, help="Useful with --json. Don't scan, just print old results") options.add_option('--verbose', dest="verbose", action="store_true", default=False, help="Print verbose information to screen") options.add_option('--max', dest="max", default=None, help="Exit program after scanning X hosts. Useful with --only-unscanned") opts, args = options.parse_args() threadpool = Pool(processes=int(opts.threads))
else: self.add(BoneControl(cp, skinpos)) class Editor(cocos.scene.Scene): def __init__(self, skeleton, savefile_name, skin=False): super(Editor, self).__init__() self.ui = BoneUILayer(skeleton, savefile_name, skin) self.add(self.ui) if __name__ == "__main__": import sys, imp director.init() parser = OptionParser() parser.add_option("-b", "--background", dest="background", help="use file as background", default=False, metavar="FILE") parser.add_option("-s", "--scale", dest="scale", help="scale image by", default=1, metavar="SCALE") parser.add_option("-k", "--skin", dest="skin",
""" Export scan data for provided date, in CSV form. """ import csv import datetime import os import sys import time from optparse import OptionParser os.environ['DJANGO_SETTINGS_MODULE'] = "settings" from infobase.models import Scan if __name__ == '__main__': parser = OptionParser() parser.add_option("-d", "--date", dest="date", help="YYYY-MM-DD") (options, args) = parser.parse_args() if not options.date: print "Error: '--date' argument is required. Use '-h' option for help." sys.exit() date = datetime.date(*time.strptime(options.date, "%Y-%m-%d")[:3]) writer = csv.writer(sys.stdout) writer.writerow(["Student ID", "Last Name", "First Name", "Timestamp"]) rows = Scan.barcode_scans_for_date(date).order_by('timestamp')
def parseCmdLine(): # Parse command-line parameters parser = OptionParser(description="Generate a token for authorization") parser.add_option("-a", "--ip", dest="host", help="server name or IP (default: %default)", metavar="ipAddress", default="localhost") parser.add_option("-p", dest="port", type="int", help="server port (default: %default)", metavar="tcpPort", default=8194) parser.add_option("-s", dest="securities", help="security (default: IBM US Equity)", metavar="security", action="append", default=[]) parser.add_option("-f", dest="fields", help="field to subscribe to (default: PX_LAST)", metavar="field", action="append", default=[]) parser.add_option("-d", dest="dirSvcProperty", help="dirSvcProperty", metavar="dirSvcProperty") (options, args) = parser.parse_args() if not options.securities: options.securities = ["IBM US Equity"] if not options.fields: options.fields = ["PX_LAST"] return options
and aligns it. WARNING: THIS WILL PRODUCE 10 000S OF THOUSANDS OF FILES WHICH WILL SLOW YOU COMPUTER DOWN, AND MAKE YOU UNPOPULAR WITH SYS ADMIN. YOU ASKED FOR THIS FEATURE!!! The BLAST and alignment also makes it very slow. $ python parse_clusters.py -i clustering_outfile_already_decoded_from_temp_names -n 3 -a all_seqeucnes.fasta --read_prefix M01157 -o summarise_clusters.out command line option """ parser = OptionParser(usage=usage) parser.add_option("-f","--fasta", dest="fasta", default=None, help="assembled fasta file to get stats on", metavar="FILE") parser.add_option("-a","--all_fasta", dest="all_fasta", default=None, help="both the assembled fasta and databse fasta." " this is to get the sequences from from for the clusters folder", metavar="FILE") parser.add_option("-n","--min_novel_cluster_threshold", dest="min_novel_cluster_threshold", default=4, help="min_novel_cluster_threshold to determine is this is a" " novel clusters to output. Default = 4 ") parser.add_option("-l", "--left", dest="left", default="temp_not_trimmedr1.fq",
def main(): # Setup option parser p = OptionParser(usage="%prog arg1 [options]") p.add_option("-a", "--address", dest="address", default=DEFAULT_ADDRESS, action="store") p.add_option("-p", "--port", dest="port", default=DEFAULT_PORT, action="store", type="int") p.add_option("-m", "--message", dest="message", default=DEFAULT_MSG, action="store") p.add_option("-n", "--numMessages", dest="numMessages", default=DEFAULT_NUM_MESSAGES, action="store", type="int") p.add_option("-t", "--numThreads", dest="numThreads", default=DEFAULT_NUM_THREADS, action="store", type="int") p.add_option("-d", "--debug", dest="debug", default=True, action="store_true") # Run option parser (opts, args) = p.parse_args(sys.argv[1:]) global SHOULD_OUTPUT SHOULD_OUTPUT = opts.debug handle_client(opts)
import array SANDYLINES = [] def sandy_callback(option, opt_str,value,parser): SANDYLINES.append(int(value)) SIG2FILES_ALL = {} def siggy_callback(option, opt_str,value,parser): key,val = value.split(":") SIG2FILES_ALL[int(key)] = val TEXLABELS = {} def labels_callback(option, opt_str,value,parser): key,val = value.split(":") TEXLABELS[key] = val from optparse import OptionParser parser=OptionParser() parser.add_option("","--xl",default="",type='str') parser.add_option("","--xr",default="",type='str') parser.add_option("","--labels",default="",type='str') parser.add_option("","--groups",default="",type='str') parser.add_option("","--smcen",default="",type='str') parser.add_option("","--colors",default="",type='str') parser.add_option("","--negs",default="",type='str') parser.add_option("","--styles",default="",type='str') parser.add_option("","--markers",default="",type='str') parser.add_option("","--widths",default="",type='str') parser.add_option("","--msizes",default="",type='str') parser.add_option("","--combined",default="",type='str') parser.add_option("","--longStyle",default=False,action='store_true') parser.add_option("","--sm",default="",type='str') parser.add_option("-b","--batch",default=False,action='store_true')
def process_options(): """Do what was asked of us""" parser = OptionParser() parser.add_option("-m", "--map", action="store", default=None, dest="map", help="Call with a date to draw a chorpleth for one date") parser.add_option("-g", "--gemeente", action="store_true", default=False, dest="gemeente", help="Draw Gemeente (municipal area) graph, requires -c") parser.add_option("-a", "--animation", action="store_true", default=False, dest="animation", help="Make animated chopleth") parser.add_option("-r", "--regions", action="store", default=None, dest="regions", help="A comma separated list of regions to graph") parser.add_option("-c", "--country", action="store", default=None, dest="country", help="A comma separated list of countries to animate") parser.add_option("-l", "--league", action="store_true", default=False, dest="league", help="League table by Gemeente") parser.add_option("-n", "--nation", action="store_true", default=False, dest="nation", help="Data by Nation") parser.add_option("-s", "--start", action="store", default=None, dest="startdate", help="Start date from animation yyyymmdd") parser.add_option("-e", "--end", action="store", default=None, dest="enddate", help="Start date from animation yyyymmdd") parser.add_option("-t", "--type", action="store", default='c', dest="type", help="choose (c)ases, (h)ospital (d)eaths") parser.add_option("-p", "--pivot", action="store_true", default=False, dest="pivot", help="Pivot Data Set") parser.add_option("-A", "--absolute", action="store_true", default=False, dest="absolute", help="Do not calculate per-capita (with -n)") (options, _) = parser.parse_args() regions = parse_regions(options.regions) combined = Combine(options) combined.parse_countries(options.country) combined.process() plot = Plot(combined, regions) if options.pivot: plot.pivot_graph() return if options.gemeente: if len(regions) == 0: print('You must specify a region to graph with -g') sys.exit(1) plot.gemeente_graph() if options.league: combined.project_for_date(None) if options.nation: if options.animation: plot.nations_animate() return plot.nations() if options.animation: clear_images() plot.make_frames() animate() if options.map is not None: date = datetime.datetime.strptime(options.map, '%Y-%m-%d') print('Drawing for %s' % date) subset = plot.get_one_date(date) print(subset) plot.one_plot(subset, date)
def readCommand(argv): "Processes the command used to run from the command line." from optparse import OptionParser parser = OptionParser(USAGE_STRING) parser.add_option('-c', '--classifier', help=default('The type of classifier'), choices=[ 'mostFrequent', 'nb', 'naiveBayes', 'perceptron', 'mira', 'minicontest' ], default='mostFrequent') parser.add_option('-d', '--data', help=default('Dataset to use'), choices=['digits', 'faces', 'pacman'], default='digits') parser.add_option('-t', '--training', help=default('The size of the training set'), default=100, type="int") parser.add_option('-f', '--features', help=default('Whether to use enhanced features'), default=False, action="store_true") parser.add_option('-o', '--odds', help=default('Whether to compute odds ratios'), default=False, action="store_true") parser.add_option('-1', '--label1', help=default("First label in an odds ratio comparison"), default=0, type="int") parser.add_option('-2', '--label2', help=default("Second label in an odds ratio comparison"), default=1, type="int") parser.add_option('-w', '--weights', help=default('Whether to print weights'), default=False, action="store_true") parser.add_option( '-k', '--smoothing', help=default("Smoothing parameter (ignored when using --autotune)"), type="float", default=2.0) parser.add_option( '-a', '--autotune', help=default("Whether to automatically tune hyperparameters"), default=False, action="store_true") parser.add_option('-i', '--iterations', help=default("Maximum iterations to run training"), default=3, type="int") parser.add_option('-s', '--test', help=default("Amount of test data to use"), default=TEST_SET_SIZE, type="int") parser.add_option('-g', '--agentToClone', help=default("Pacman agent to copy"), default=None, type="str") options, otherjunk = parser.parse_args(argv) if len(otherjunk) != 0: raise Exception('Command line input not understood: ' + str(otherjunk)) args = {} # Set up variables according to the command line input. print "Doing classification" print "--------------------" print "data:\t\t" + options.data print "classifier:\t\t" + options.classifier if not options.classifier == 'minicontest': print "using enhanced features?:\t" + str(options.features) else: print "using minicontest feature extractor" print "training set size:\t" + str(options.training) if (options.data == "digits"): printImage = ImagePrinter(DIGIT_DATUM_WIDTH, DIGIT_DATUM_HEIGHT).printImage if (options.features): featureFunction = enhancedFeatureExtractorDigit else: featureFunction = basicFeatureExtractorDigit if (options.classifier == 'minicontest'): featureFunction = contestFeatureExtractorDigit elif (options.data == "faces"): printImage = ImagePrinter(FACE_DATUM_WIDTH, FACE_DATUM_HEIGHT).printImage if (options.features): featureFunction = enhancedFeatureExtractorFace else: featureFunction = basicFeatureExtractorFace elif (options.data == "pacman"): printImage = None if (options.features): featureFunction = enhancedFeatureExtractorPacman else: featureFunction = basicFeatureExtractorPacman else: print "Unknown dataset", options.data print USAGE_STRING sys.exit(2) if (options.data == "digits"): legalLabels = range(10) else: legalLabels = ['Stop', 'West', 'East', 'North', 'South'] if options.training <= 0: print "Training set size should be a positive integer (you provided: %d)" % options.training print USAGE_STRING sys.exit(2) if options.smoothing <= 0: print "Please provide a positive number for smoothing (you provided: %f)" % options.smoothing print USAGE_STRING sys.exit(2) if options.odds: if options.label1 not in legalLabels or options.label2 not in legalLabels: print "Didn't provide a legal labels for the odds ratio: (%d,%d)" % ( options.label1, options.label2) print USAGE_STRING sys.exit(2) if (options.classifier == "mostFrequent"): classifier = mostFrequent.MostFrequentClassifier(legalLabels) elif (options.classifier == "naiveBayes" or options.classifier == "nb"): classifier = naiveBayes.NaiveBayesClassifier(legalLabels) classifier.setSmoothing(options.smoothing) if (options.autotune): print "using automatic tuning for naivebayes" classifier.automaticTuning = True else: print "using smoothing parameter k=%f for naivebayes" % options.smoothing elif (options.classifier == "perceptron"): if options.data != 'pacman': classifier = perceptron.PerceptronClassifier( legalLabels, options.iterations) else: classifier = perceptron_pacman.PerceptronClassifierPacman( legalLabels, options.iterations) elif (options.classifier == "mira"): if options.data != 'pacman': classifier = mira.MiraClassifier(legalLabels, options.iterations) if (options.autotune): print "using automatic tuning for MIRA" classifier.automaticTuning = True else: print "using default C=0.001 for MIRA" elif (options.classifier == 'minicontest'): import minicontest classifier = minicontest.contestClassifier(legalLabels) else: print "Unknown classifier:", options.classifier print USAGE_STRING sys.exit(2) args['agentToClone'] = options.agentToClone args['classifier'] = classifier args['featureFunction'] = featureFunction args['printImage'] = printImage return args, options
def run_dialog(klass): """Create the QApplication (before modifying sys.argv) then parse sys.argv for acceptable command line parameters. """ app = QtGui.QApplication(sys.argv) # Create an OptionParser usage = '%prog [OPTIONS] [FILE...]' description = 'Create, Edit or Delete a %s.' % (klass.object_name, ) epilog = """Examples: Create a new %(object_name)s in the given SQLite database python %(prog_name)s pyfueldb.db Edit an existing %(object_name)s in the given SQLite database python %(prog_name)s -e 1 pyfueldb.db Delete an existing %(object_name)s from the given SQLite database python %(prog_name)s -d 1 pyfueldb.db The SQLite database file should be specified otherwise changes will not be recorded. """ % { 'object_name': klass.object_name.lower(), 'prog_name': sys.argv[0], } parser = OptionParser(usage=usage, description=description, epilog=epilog, formatter=RawIndentedHelpFormatter()) parser.add_option('-e', action='store', type='int', dest='edit', metavar='ID', help='Edit an existing %s' % (klass.object_name.lower(), )) parser.add_option('-d', action='store', type='int', dest='delete', metavar='ID', help='Delete an existing %s' % (klass.object_name.lower(), )) (options, args) = parser.parse_args(sys.argv[1:]) if not args: parser.error('No database file was specified.') db_file = args[0] if options.edit and options.delete: parser.error('Cannot edit and delete at the same time!') # Create a database connection sessionmaker = get_sessionmaker(get_engine(db_file)) myapp = klass(sessionmaker, edit=options.edit, delete=options.delete) myapp.show() sys.exit(app.exec_())
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import pandas as pd from glob import glob from optparse import OptionParser parser = OptionParser() parser.add_option('--store_best', dest='store_best',type='str') parser.add_option('--output', dest="output", type="str") (options, args) = parser.parse_args() CSV = glob('*.csv') df_list = [] for f in CSV: df = pd.read_csv(f, index_col=False) name = f.split('.cs')[0] df.index = [name] df_list.append(df) table = pd.concat(df_list) best_index = table['F1'].argmax() table.to_csv(options.output, header=True, index=True) tmove_name = "{}".format(best_index) model = "_".join(best_index.split('_')[:-2]) n_feat = model.split('__')[1].split('_')[0] name = options.store_best os.mkdir(name) os.rename(model, os.path.join(name, model))
return vertical_region_stmt stencilsglobals = GlobalVariableMap() myglobal = stencilsglobals.map['dx'].double_value = 0.; hir = make_sir("laplacian_stencil_from_python.cpp", stencils=[ make_stencil( "laplacian_stencil", make_ast([create_vertical_region_stmt()]), [make_field("out",), make_field("in")] ) ],global_variables=stencilsglobals) parser = OptionParser() parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="print the SIR") (options, args) = parser.parse_args() # Print the SIR to stdout T = textwrap.TextWrapper( initial_indent=' ' * 1, width=120, subsequent_indent=' ' * 1) des = sir_printer.SIRPrinter() des.visit_global_variables(stencilsglobals) for stencil in hir.stencils: des.visit_stencil(stencil)
#!/usr/bin/env python from optparse import OptionParser import os.path from pyPdf import PdfFileWriter, PdfFileReader from outline import OutlineCreator from units import * import defaults # Option parsing parser = OptionParser("Usage: %prog [options] filename\n\nAll lengths are expressed in millimeters") parser.add_option("-c", "--crop-margin", type="float", dest="crop_margin", default=defaults.crop, help="crop margin, defaults to %s" % defaults.crop) parser.add_option("-b", "--bleed-margin", type="float", dest="bleed_margin", default=defaults.bleed, help="bleed margin, defaults to %s" % defaults.bleed) parser.add_option("-o", "--output", type="string", dest="output_filename", default=defaults.output, help="output filename, defaults to %s" % defaults.output) parser.add_option("-n", "--no-bleed", action="store_true", dest="no_bleed", help="set this if the document doesn't have bleed margins, equivalent to setting the bleed margin to 0") (options, args) = parser.parse_args() if len(args) != 1 or not os.path.isfile(args[0]): parser.print_help() exit() # Read the input and prepare the output document filename = args[0] document = PdfFileReader(file(filename, "rb")) output = PdfFileWriter() for page_num in range(document.getNumPages()): # Get the page dimensions page = document.getPage(page_num) box = page.mediaBox # PDF dimensions are in points
Generates random trips for the given network. Copyright (C) 2010 DLR/TS, Germany All rights reserved """ import os, sys, random, bisect, datetime, subprocess from optparse import OptionParser sys.path.append(os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), "../lib")) import sumonet def randomEdge(edges, cumWeights): r = random.random() * cumWeights[-1] return edges[bisect.bisect(cumWeights, r)] optParser = OptionParser() optParser.add_option("-n", "--net-file", dest="netfile", help="define the net file (mandatory)") optParser.add_option("-o", "--output-trip-file", dest="tripfile", default="trips.trips.xml", help="define the output trip filename") optParser.add_option("-r", "--route-file", dest="routefile", help="generates route file with duarouter") optParser.add_option("-t", "--trip-id-prefix", dest="tripprefix", default="t", help="prefix for the trip ids") optParser.add_option("-a", "--trip-parameters", dest="trippar", default="", help="additional trip parameters") optParser.add_option("-b", "--begin", type="int", default=0, help="begin time") optParser.add_option("-e", "--end", type="int", default=3600, help="end time") optParser.add_option("-p", "--period", type="int", default=1, help="repetition period") optParser.add_option("-s", "--seed", type="int", help="random seed") optParser.add_option("-l", "--length", action="store_true",
def main(): try: usage = "usage: inst.py -i" opts_parser = OptionParser(usage=usage) opts_parser.add_option("-m", dest="mode", action="store", help="Specify mode") opts_parser.add_option("-s", dest="device", action="store", help="Specify device") opts_parser.add_option("-i", dest="binstpkg", action="store_true", help="Install package") opts_parser.add_option("-u", dest="buninstpkg", action="store_true", help="Uninstall package") opts_parser.add_option("-a", dest="user", action="store", help="User name") global PARAMETERS (PARAMETERS, args) = opts_parser.parse_args() except Exception as e: print "Got wrong option: %s, exit ..." % e sys.exit(1) if not PARAMETERS.user: PARAMETERS.user = "******" global SRC_DIR, PKG_SRC_DIR SRC_DIR = "/home/%s/content" % PARAMETERS.user PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME) if not PARAMETERS.mode: PARAMETERS.mode = "SDB" if PARAMETERS.mode == "SDB": if not PARAMETERS.device: (return_code, output) = doCMD("sdb devices") for line in output: if str.find(line, "\tdevice") != -1: PARAMETERS.device = line.split("\t")[0] break else: PARAMETERS.mode = "SSH" if not PARAMETERS.device: print "No device provided" sys.exit(1) user_info = getUSERID() re_code = user_info[0] if re_code == 0: global XW_ENV userid = user_info[1][0] XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket" % str( userid) else: print "[Error] cmd commands error : %s" % str(user_info[1]) sys.exit(1) if PARAMETERS.binstpkg and PARAMETERS.buninstpkg: print "-i and -u are conflict" sys.exit(1) if PARAMETERS.buninstpkg: if not uninstPKGs(): sys.exit(1) else: if not instPKGs(): sys.exit(1)
print print "NEW METHODS :" new_methods = d.get_new_methods() for i in new_methods: for elem in new_methods[i]: elem.show2(details) print print "DELETE METHODS :" del_methods = d.get_delete_methods() for i in del_methods: for elem in del_methods[i]: elem.show2(details) print elif options.version != None: print "Androdiff version %s" % misc.ANDRODIFF_VERSION if __name__ == "__main__": parser = OptionParser() for option in options: param = option['name'] del option['name'] parser.add_option(*param, **option) options, arguments = parser.parse_args() sys.argv[:] = arguments main(options, arguments)
def main(): usage = "%prog [options]" description = "Loads the interactive tonal space gui." parser = OptionParser(usage=usage, description=description) parser.add_option( '-d', '--device', dest="device", action="store", type="int", help="select midi device to output to (run devices.py for a list)", default=0) parser.add_option( '-i', '--instrument', dest="instrument", action="store", help= "midi instrument number (0-127). Default: 16 (organ). Use 'N' not to set any instrument", default="16") parser.add_option('-f', '--font-size', dest="font_size", action="store", type="float", help="set the font size of the text in the labels") parser.add_option( '--dims', '--dimensions', dest="dimensions", action="store", help="dimensions of the tonal space: left,bottom,right,top") parser.add_option('-c', '--chord-vocab', dest="chord_vocab", action="store", help="chord vocab file to load chord types from") parser.add_option( '--hs', '--horiz-space', dest="hspace", action="store_true", help="leave space to the sides of the space if the window's big enough", default=False) parser.add_option( '--vs', '--vert-space', dest="vspace", action="store_true", help= "leave space to the top and bottom of the space if the window's big enough", default=False) parser.add_option('-t', '--tuner', dest="tuner", action="store_true", help="run the tonal space tuner") parser.add_option( '--commands', dest="commands", action="store_true", help= "display a list of commands available in the UI and instructions for use" ) options, arguments = parser.parse_args() if options.commands: print TonalSpaceWindow.HELP sys.exit(0) kwargs = {} if options.font_size is not None: kwargs['font_size'] = options.font_size if options.chord_vocab is not None: # Load chord types vocab = HarmonicalInputFile.from_file(options.chord_vocab, ['chord-vocab']) kwargs['chord_types'] = vocab.chords kwargs['chord_type_order'] = vocab.chord_names if options.instrument.lower() != 'n': kwargs['instrument'] = int(options.instrument) if options.dimensions is not None: dims = [int(dim) for dim in options.dimensions.split(",")] if len(dims) != 4: print >> sys.stderr, "Dimensions must be specified as four values: left,bottom,right,top" sys.exit(1) x0, y0, x1, y1 = dims else: x0, y0, x1, y1 = -4, -3, 4, 3 # Output the midi device we're using midi_devs = get_midi_devices() if options.device >= len(midi_devs): print >> sys.stderr, "No midi device %d" % options.device sys.exit(1) else: print >> sys.stderr, "Sending midi events to device %d: %s" % ( options.device, ", ".join( str(x) for x in midi_devs[options.device])) window = create_window(x0, y0, x1, y1, options.device, hfill=(not options.hspace), vfill=(not options.vspace), tuner=options.tuner, **kwargs) gtk.main()
DEFAULT_STATE_STORE_SUBSCRIBER_PORT, DEFAULT_IMPALAD_WEBSERVER_PORT, DEFAULT_STATESTORED_WEBSERVER_PORT, DEFAULT_CATALOGD_WEBSERVER_PORT, DEFAULT_CATALOGD_JVM_DEBUG_PORT, DEFAULT_IMPALAD_JVM_DEBUG_PORT, find_user_processes, run_daemon) logging.basicConfig(level=logging.ERROR, format="%(asctime)s %(threadName)s: %(message)s", datefmt="%H:%M:%S") LOG = logging.getLogger(os.path.splitext(os.path.basename(__file__))[0]) LOG.setLevel(level=logging.DEBUG) KUDU_MASTER_HOSTS = os.getenv("KUDU_MASTER_HOSTS", "127.0.0.1") DEFAULT_IMPALA_MAX_LOG_FILES = os.environ.get("IMPALA_MAX_LOG_FILES", 10) # Options parser = OptionParser() parser.add_option( "-s", "--cluster_size", type="int", dest="cluster_size", default=3, help="Size of the cluster (number of impalad instances to start).") parser.add_option("-c", "--num_coordinators", type="int", dest="num_coordinators", default=3, help="Number of coordinators.") parser.add_option( "--use_exclusive_coordinators",
def main(): """Main""" parser = OptionParser(usage='Usage: %prog [options] eval_dir', version='Barnum Cluster ' + module_version) parser.add_option( '-c', '--csv', action='store', type='str', default=None, help='Save CSV of results to given filepath (default: no CSV)') parser.add_option( '-p', '--plot', action='store', type='str', default=None, help= 'Save plot as a PNG image to the given filepath (default: no plotting)' ) parser.add_option( '-w', '--workers', action='store', dest='workers', type='int', default=cpu_count(), help='Number of workers to use (default: number of cores)') parser.add_option('--max-classes', action='store', type='int', default=256, help='How many classes to use (default: 256)') parser.add_option( '--min-samples', action='store', type='int', default=4, help='Minimum samples to form a cluster in DBSCAN (default: 4)') parser.add_option('--eps', action='store', type='float', default=0.03, help='Epsilon parameter to DBSCAN (default: 0.03)') options, args = parser.parse_args() if len(args) != 1 or options.workers < 1: parser.print_help() sys.exit(ERROR_INVALID_ARG) logger.log_start(20) logger.log_info(module_name, 'Barnum Cluster %s' % module_version) idirpath = args[0] if not os.path.isdir(idirpath): logger.log_error(module_name, 'ERROR: %s is not a directory' % idirpath) logger.log_stop() sys.exit(ERROR_INVALID_ARG) files = [ os.path.join(idirpath, f) for f in os.listdir(idirpath) if os.path.isfile(os.path.join(idirpath, f)) ] # We only care about clustering malicious traces mal_files = [fp for fp in files if 'malicious' in os.path.basename(fp)] num_mal = len(mal_files) # Calculate clustering metrics logger.log_info(module_name, "Parsing " + idirpath) pool = Pool(options.workers) data = [ sample for sample in pool.map( parse_file, zip(mal_files, [options.max_classes] * num_mal)) if sample ] pool.close() xs = np.array([sample[0] for sample in data]) ns = [sample[1] for sample in data] # Clustering logger.log_info(module_name, "Calculating clusters") db = DBSCAN(eps=options.eps, min_samples=options.min_samples).fit(xs) core_samples_mask = np.zeros_like(db.labels_, dtype=bool) core_samples_mask[db.core_sample_indices_] = True labels = db.labels_ # Number of clusters in labels, ignoring noise if present. n_clusters = len(set(labels)) - (1 if -1 in labels else 0) n_noise = list(labels).count(-1) logger.log_info(module_name, ' Number of points: %d' % len(ns)) logger.log_info(module_name, ' Number of clusters: %d' % n_clusters) logger.log_info(module_name, 'Number of noise points: %d' % n_noise) # Saving results as CSV if not options.csv is None: logger.log_info(module_name, "Saving CSV to %s" % options.csv) try: with open(options.csv, 'w') as csv_file: csv_file.write("cluster,filename\n") for label, name in zip(labels, ns): csv_file.write(','.join([str(label), name]) + "\n") except Exception as ex: logger.log_error(module_name, "Failed to save CSV: %s" % str(ex)) # Saving results as plot image if not options.plot is None: logger.log_info(module_name, "Generating plot") theta = radar_factory(options.max_classes, frame='polygon') fig, axes = plt.subplots(subplot_kw=dict(projection='radar')) colors = ['b', 'r', 'g', 'm', 'y'] axes.set_varlabels([""]) # no varlabels, they aren't that meaningful axes.set_rgrids([0.2, 0.4, 0.6, 0.8]) legend_labels = list() for label_key in set(labels): if label_key == -1: continue # noise legend_labels.append(label_key) label_color = colors[label_key % len(colors)] # Calculate per-cluster average label_mask = (labels == label_key) label_points = xs[label_mask & core_samples_mask] label_means = np.mean(label_points, axis=0) axes.plot(theta, label_means, color=label_color) axes.fill(theta, label_means, facecolor=label_color, alpha=0.25) # Legend legend = axes.legend(legend_labels, loc=(0.9, .95), labelspacing=0.1, fontsize='small') try: plt.savefig(options.plot) except: logger.log_error(module_name, "Failed to save plot") logger.log_stop()