def main(): parser = Opt(usage="Usage: %prog [options] arguments", version="%prog 1.0") parser.add_option("-g", "--get-id", action="store_true", dest="gen_id", default=False, help="Return the gdoc ID of url") parser.add_option("-f", "--from-file", dest="ifile", help="execute comands with specific flags from file") parser.add_option("-l", "--link", dest="t_format", default=None) (options_p, args) = parser.parse_args() options = eval(options_p.__str__()) if not(options['ifile'] is None): for parse_line in get_lines_from_file(options['ifile']): p_line = parse_line.split(' ') #print p_line op2, arg2 = parser.parse_args(p_line) execute_parse(op2, arg2) else: if len(args)!=0: execute_parse(options_p, args) else: parser.error("Ingrese argumentos")
def main(): PROG = os.path.basename(os.path.splitext(__file__)[0]) description = """Scan claims files""" parser = OptionParser(option_class=MultipleOption, usage='usage: %prog claims_file, claims_file, ...', version='%s %s' % (PROG, VERSION), description=description) if len(sys.argv) == 1: parser.parse_args(['--help']) args = parser.parse_args() p2k = {} k2p = {} try: with open('claimants.csv') as csv_file: for line in csv.reader(csv_file, dialect="excel"): p2k[line[0]] = line[1] k2p[line[1]] = line[0] except IOError: pass for filename in args[1]: with open(filename+'_masked.csv', 'wb') as cf: outfile = csv.writer(cf, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) analyze_file(filename, outfile, p2k, k2p) print len(p2k), len(k2p) with open('claimants.csv', 'wb') as cf: cout = csv.writer(cf, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL) for p in p2k: cout.writerow([p, p2k[p]])
def test_can_be_disabled(self): c = Capture() parser = OptionParser() c.addOptions(parser) options, args = parser.parse_args(['test_can_be_disabled', '-s']) c.configure(options, Config()) assert not c.enabled c = Capture() options, args = parser.parse_args(['test_can_be_disabled_long', '--nocapture']) c.configure(options, Config()) assert not c.enabled env = {'NOSE_NOCAPTURE': 1} c = Capture() parser = OptionParser() c.addOptions(parser, env) options, args = parser.parse_args(['test_can_be_disabled']) c.configure(options, Config()) assert not c.enabled c = Capture() parser = OptionParser() c.addOptions(parser) options, args = parser.parse_args(['test_can_be_disabled']) c.configure(options, Config()) assert c.enabled
def parseArgs(): """Parse any command line options.""" parser = OptionParser(usage=usagestr) parser.add_option("-d", "--daemon", action="store_true", dest="daemon", help="Run as a daemon") parser.add_option( "-p", "--pidfile", type="string", dest="pidfile", help="Path to process ID file", default="/var/run/weewx.pid" ) parser.add_option("-v", "--version", action="store_true", dest="version", help="Display version number then exit") parser.add_option( "-x", "--exit", action="store_true", dest="exit", help="Exit on I/O and database errors instead of restarting" ) parser.add_option( "-r", "--loop-on-init", action="store_true", dest="loop_on_init", help="Retry forever if device is not ready on startup", ) parser.add_option( "-n", "--log-label", type="string", dest="log_label", help="Label to use in syslog entries", default="weewx" ) (options, args) = parser.parse_args() if options.version: print weewx.__version__ sys.exit(0) if len(args) < 1: sys.stderr.write("Missing argument(s).\n") sys.stderr.write(parser.parse_args(["--help"])) sys.exit(weewx.CMD_ERROR) return options, args
def parse_cmdline(argv=[]): cmdline = OptionParser() cmdline.add_option('-f', '--force-build', dest='force_build', action='store_true', default=False, help="run a build whether or not it's stale") cmdline.add_option('-n', '--no-report', dest='report', action='store_false', default=True, help="do not report build results to server") cmdline.add_option('-N', '--no-clean-temp', dest='cleanup_temp', action='store_false', default=True, help='do not clean up the temp directory') cmdline.add_option('-s', '--server-url', dest='server_url', action='store', default='default', help='set pony-build server URL for reporting results') cmdline.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False, help='set verbose reporting') if not argv: (options, args) = cmdline.parse_args() else: (options, args) = cmdline.parse_args(argv) return options, args
class CommandDispatch: def __init__(self): self.commands = {} self.default = '' def register_command(self, command, default=False): self.commands[command.name.lower()] = command if default: self.default = command.name.lower() def __call__(self, command=False): if command: self.commands[command]() else: self.default_command() def default_command(self): if self.default: self.commands[self.default]() else: usage = '%prog command [options]\n' usage += 'Available commands: ' usage += ' '.join(sorted(self.commands.keys())) self.parser = OptionParser(usage=usage, version=__version__) self.parser.parse_args() self.parser.print_help()
def parse_commandline_arguments(self): usage = "%prog [options] main_script [extra extra extra]" version = version="%s-rev%s" % (APPLICATION_VERSION, APPLICATION_REVISION) description='Pypack packages a script into a directory containing all needed depencies suitable for "folder deployment". Run `python pypack --readme` for more information.' parser = OptionParser(usage=usage, description = description, version = version) parser.add_option("--readme", action="store_true", dest="readme", help="show more information") parser.add_option("--verbose", action="store_true", dest="verbose", help="run in verbose mode, all messages are displayed") parser.add_option("--debug", action="store_true", dest="debug", help="print debug messages to stdout") parser.add_option("--print", action="store_true", dest="print_dependencies", help="print dependencies and exit") parser.add_option("--outputdir", dest="out_dir", default="./build", help="directory where the files will be built, if non existent, it will be created") parser.add_option("--bytecompile", action="store_true", dest="bytecompile", help="bytecompile the python files") options, args = parser.parse_args() self.options, args = parser.parse_args() if options.readme: print self.print_readme() sys.exit(0) if not len(args): parser.print_help() sys.exit(1) self.main_script = args[0] self.extras = args[1:] if self.options.debug: self.options.verbose = True if self.options.print_dependencies: self.print_dependencies() sys.exit(0)
def test_can_be_disabled(self): c = Capture() parser = OptionParser() c.addOptions(parser) options, args = parser.parse_args(["test_can_be_disabled", "-s"]) c.configure(options, Config()) assert not c.enabled c = Capture() options, args = parser.parse_args(["test_can_be_disabled_long", "--nocapture"]) c.configure(options, Config()) assert not c.enabled env = {"NOSE_NOCAPTURE": 1} c = Capture() parser = OptionParser() c.addOptions(parser, env) options, args = parser.parse_args(["test_can_be_disabled"]) c.configure(options, Config()) assert not c.enabled c = Capture() parser = OptionParser() c.addOptions(parser) options, args = parser.parse_args(["test_can_be_disabled"]) c.configure(options, Config()) assert c.enabled
def option(): parser = OptionParser() parser.add_option("-t", "--timeout", dest="timeout", metavar="", default="5*60*60") parser.add_option("-i", "--taskid", dest="taskid",metavar="") parser.add_option("-c", "--cmd", dest="cmd", metavar="") print parser.parse_args() return parser.parse_args()
def getOptionDict(*argument_tuple): usage = """ Typical usage: * %prog CONFIGURATION_FILE [arguments passed to supervisor] """.strip() parser = OptionParser(usage=usage) # Parses arguments if argument_tuple: (argument_option_instance, argument_list) = parser.parse_args(list(argument_tuple)) else: # No arguments given to entry point : we parse sys.argv. (argument_option_instance, argument_list) = parser.parse_args() if not argument_list: parser.error("Configuration file is obligatory. Consult documentation by calling with -h.") configuration_file = argument_list[0] if not os.path.exists(configuration_file): parser.error("Could not read configuration file : %s" % configuration_file) slapgrid_configuration = ConfigParser.SafeConfigParser() slapgrid_configuration.read(configuration_file) # Merges the two dictionnaries option_dict = dict(slapgrid_configuration.items("slapos")) # Supervisord configuration location option_dict.setdefault('supervisord_configuration_path', os.path.join(option_dict['instance_root'], 'etc', 'supervisord.conf')) # Supervisord socket option_dict.setdefault('supervisord_socket', os.path.join(option_dict['instance_root'], 'supervisord.socket')) return option_dict, argument_list[1:]
def main(): PROG = os.path.basename(os.path.splitext(__file__)[0]) long_commands = "commands" short_commands = {"cmds": "commands"} description = """BBS crawler, use """ parser = OptionParser( option_class=MultipleOption, usage="usage: %prog [OPTIONS] ptt board_name index_number", version="%s %s" % (PROG, VERSION), description=description, ) parser.add_option( "-c", "--commands", action="extend", type="string", dest="commands", metavar="COMMANDS", help="select the commands like fetch_index with board_name or fetch_page with index_number you want the crawler to fetch ", ) if len(sys.argv) == 1: parser.parse_args(["--help"]) else: options, args = parser.parse_args() print "arguments:", args print "options:", options process(options, args)
def parse_args(self, *ar, **kwar): """this is the only overriden method. in practice a wrapper around OptionParser.parse_args""" (startOptions, startArgs) = OptionParser.parse_args(self, *ar, **kwar) if not startOptions.optionsfromfile: return (startOptions, startArgs) else: optionsAndArgsFromFile = self.loadOptionsFromFile(startOptions.optionsfromfile) (newOptions, newArgs) = OptionParser.parse_args(self, optionsAndArgsFromFile) if self.filefirst: firstOptions = newOptions secondOptions = startOptions else: firstOptions = startOptions secondOptions = newOptions for x in secondOptions.__dict__: if x not in firstOptions.__dict__: firstOptions.__dict__[x] = secondOptions.__dict__[x] startArgs.extend(newArgs) return (firstOptions, startArgs)
def test_activation_key_forbids_destination_credentials(self): parser = OptionParser() migrate.add_parser_options(parser) (options, args) = parser.parse_args(["--activation-key", "bar", "--destination-user", "x"]) self.assertRaises(SystemExit, migrate.validate_options, (options)) (options, args) = parser.parse_args(["--activation-key", "bar", "--destination-password", "y"]) self.assertRaises(SystemExit, migrate.validate_options, (options))
def main(): parser = OptionParser(usage="Usage: %prog") parser.parse_args() common.read_config(None) metadata.read_metadata(xref=True)
def test_cover_package_suppress_stdout(self): """regression test to check that nose can run coverage with and without suppression of stdout reporting""" parser = OptionParser() c = Coverage() c.addOptions(parser) options, args = parser.parse_args(['test_can_be_disabled', '--cover-package=pkg1,pkg2,pkg3']) c.configure(options, Config()) eq_(['pkg1', 'pkg2', 'pkg3'], c.coverPackages) env = {'NOSE_COVER_PACKAGE': 'pkg1,pkg2,pkg3'} c = Coverage() parser = OptionParser() c.addOptions(parser, env) options, args = parser.parse_args(['test_can_be_disabled']) c.configure(options, Config()) eq_(['pkg1', 'pkg2', 'pkg3'], c.coverPackages) env = {'NOSE_COVER_PACKAGE' : 'pkg1,pkg2,pkg3', 'NOSE_COVER_SUPPRESS_STDOUT': True} c = Coverage() parser = OptionParser() c.addOptions(parser, env) options, args = parser.parse_args(['test_can_be_disabled']) c.configure(options, Config()) eq_(['pkg1', 'pkg2', 'pkg3'], c.coverPackages)
def main(argv): ''' Commandline menu function ''' description = "Utility function to check abbreviation against their longforms" parser = OptionParser(description=description, usage='usage: %prog [OPTIONS] <abbreviation> <longform>') parser.add_option("-m", dest="manual", help="check abbreviation manually \t\t\t\t\t ex: 'is_abbv.py -m <abbreviation> <longform>' ", type="string",nargs=2) (options, args) = parser.parse_args() if options.manual: prettify_result(is_abbreviation(options.manual[0],options.manual[1])) if len(sys.argv) == 1: parser.parse_args(['--help'])
def main(): usage = "Usage: %prog [run|gen|list] [options] arg" parser = OptionParser(usage) try: cmd = sys.argv[1] except IndexError: parser.error("incorrect number of arguments") if cmd == "run": parser.add_option("-s", "--spider", dest="spider", help="give a spider") parser.add_option("-u", "--urls", dest="urls", help="give some urls, split with ','") parser.add_option("-L", "--logfile", dest="logfile", help="logging file for crawler") parser.add_option("-l", "--loglevel", dest="level", default="DEBUG", help="logging level for crawler") parser.add_option("", "--forever", dest="forever", action="store_true", default=False, help="run crawler forever") (options, args) = parser.parse_args(args=sys.argv[1:]) crawl = Crawler() crawl.install(options) crawl.run() elif cmd == "gen": parser.add_option("-s", "--spider", dest="spider", help="given a spider.") parser.add_option("-d", "--domain", dest="domain", help="given a domain that you will crawl.") (options, args) = parser.parse_args(args=sys.argv[1:]) gen_spider(options.spider, options.domain) elif cmd == "list": list_spiders() else: parser.error("unknow argument '%s'" % cmd)
def parsecli(): """Parse CLI arguments and return an object containing values for all of our options.""" if sys.version_info[0] < 3 and sys.version_info[1] < 7: parser = OptionParser() parser.add_option('-f', action='store', dest='csv', help='Path to a CSV file with names of the servers to update.') parser.add_option('-y', action='store_true', dest='yes', default=False, help='Auto answers \'yes\' to all questions.') parser.add_option('-g', action='store', dest='patching_group', help='Patching group to use. Should be one of the following: MSK.PROD1, MSK.PROD2, MSK.UAT1, MSK.UAT2') parser.add_option('-o', action='store_true', dest='report', default=False, help='Generate CSV with a report or prints to stdout otherwise.') parser.add_option('-r', action='store_true', dest='reboot', default=False, help='Reboot successfully updated systems.') parser.add_option('-s', action='callback', callback=vararg_callback, dest="servers_list", help='Space separated list of servers to update.') (options, args) = parser.parse_args() if options.servers_list and options.csv: print("\n-s and -f options are mutual exclusive.\n") parser.print_help() sys.exit(-1) if not options.servers_list and not options.csv: print("\nEither -s or -f options must be specified.\n") parser.print_help() sys.exit(-1) if options.csv and not options.patching_group: print("\nPatching group definition is missing.\n") parser.print_help() sys.exit(-1) return options else: parser = argparse.ArgumentParser(description='Update Linux servers using Spacewalk API.') parser.add_argument('-f', help='Path to a CSV file which contains names of the servers to update.') parser.add_argument('-y', action='store_const', dest='yes', const=0, help='Auto answers \'yes\' to all questions.') parser.add_argument('-g', action='store', dest='patching_group', help='Patching group to use. Should be one of the following: MSK.PROD1, MSK.PROD2, MSK.UAT1, MSK.UAT2') parser.add_argument('-s', help='Space separated list of servers to update.') parser.parse_args()
def main(separator = '\t'): parser = OptionParser() parser.add_option("-l","--periodlength", type='int') parser.add_option("-n","--datapoints", type='int') parser.add_option("-t","--transitlength", type='int') parser.add_option("-p","--phaseshift", type='int', default=0) parser.add_option("-d","--depth", type='float', default=1) parser.add_option("-s","--sigma", type='float', default=1) opts, args = parser.parse_args() parser.add_option("-m","--noise", type='float', default=opts.depth) parser.add_option("-u","--timeperpoint", type='float', default=.02044) opts, args = parser.parse_args() l=opts.periodlength n=opts.datapoints t=opts.transitlength p=opts.phaseshift d=opts.depth s=opts.sigma m=opts.noise u=opts.timeperpoint kic = 'your mom lol' q = '1234567' data = numpy.array([[x * u, 0] for x in xrange(n)]) for y in xrange(n/l + 1): for x in xrange(min(t,n-y*l-p)): data[(y*l + x + p)][1] -= d for x in xrange(n): data[x][1] += gauss(0,s)*m print "%s%s%s%s%s" % (kic, separator, q, separator, encode_list(data))
def args_to_config(sysargs): parser = OptionParser(usage="%prog [rdb options] backendname"+ " [ -- backendoptions ... ]") parser.add_option('-p', '--port', dest='port', help='which TCP port to listen on', metavar='PORT', type='int', default=6552) serveroptions, args = parser.parse_args(sysargs) if len(args) < 1: parser.error('no backend specified') backendname, backend_args = args[0], args[1:] if backendname not in backends: parser.error('unknown backend %r' % backendname) # each new process will need to build its own backend, since they # shouldn't share file descriptors or sockets backend_cls = backends[backendname] backend_optionparser = OptionParser() backend_cls.parse_arguments(backend_optionparser) backend_options, backend_args = ( backend_optionparser.parse_args(backend_args)) backend = backend_cls(backend_options, backend_args) return Config(backend=backend, port=serveroptions.port)
def get_options(opt = None): usage = 'usage: %prog [options] snps_file pheno_file' parser = OptionParser(usage = usage) parser.add_option('-f', '--file', dest = 'file_test', action = 'store', type = 'string', help = 'Plink PED fileset', default = None) parser.add_option('-b', '--bfile', dest = 'bfile_test', action = 'store', type = 'string', help = 'Plink binary fileset', default = None) parser.add_option('-t', '--tfile', dest = 'tfile_test', action = 'store', type = 'string', help = 'Plink TPED fileset', default = None) parser.add_option('-K', '--filesim', dest = 'file_sim', action = 'store', type = 'string', help = 'Plink PED fileset', default = None) parser.add_option('-L', '--bfilesim', dest = 'bfile_sim', action = 'store', type = 'string', help = 'Plink binary fileset', default = None) parser.add_option('-Q', '--tfilesim', dest = 'tfile_sim', action = 'store', type = 'string', help = 'Plink TPED fileset', default = None) parser.add_option('-N', '--n-snps', dest = 'N', action = 'store', type = 'int', help = 'number of SNPs to use to estimate Kinship (realized relationship) matrix (default all)', default = None) parser.add_option('-o', '--out', dest = 'out_dir', action = 'store', type = 'string', help = 'Output directory (default ./tmp)', default = './tmp') parser.add_option('-d', '--delimiter', dest = 'delimiter', action = 'store', type = 'string', help = 'delimiter between fields of the phenotype file', default = '\t') parser.add_option('-m', '--pheno-missing-value', dest = 'missing', action = 'store', type = 'float', help = 'missing value for the phenotype file (default -9)', default = -9.0) parser.add_option('-q', '--quiet', dest = 'quiet', action = 'store_true', help = 'show output from calls', default = False) parser.add_option('-C', '--covariates_file', dest = 'covariates', action = 'store', type = 'string', help = 'covariates', default = None) parser.add_option('-D', '--exclude_distance', dest = 'excl_dist', action = 'store', type = 'float', help = 'exclude by distance (in cM)', default = None) parser.add_option('-P', '--exclude_position', dest = 'excl_pos', action = 'store', type = 'int', help = 'exclude by position (in bp)', default = None) parser.add_option('-s', '--spaced', dest = 'spaced', action = 'store_true', help = 'equally spaced regressors') parser.add_option('-T', '--testing', dest = 'testing', action = 'store_true', help = 'only run the final LMM') parser.add_option('-R', '--one_chr', dest = 'chr_only', action = 'store_true', help = 'test on one chr and build kernel on the rest') parser.add_option('-p', '--pheno', dest = 'pheno_file', action = 'store', type = 'string', help = 'phenotype file') if opt is not None: (options, args) = parser.parse_args(opt) else: (options, args) = parser.parse_args() return options, args
def inp_extract_cmdline_parser(self, opts, args): import shlex args = self.inp_parser_extract(opts, None) if not args: return # Remove default values self[:] = [] self.subobjects.clear() while len(args) > 0: arg = args.pop(0) if hasattr(self.datatype, "path") and not os.path.exists(arg): args = glob.glob(arg) + args # Remove duplicated items caused by symlinks args = list(set([os.path.realpath(x) for x in args])) continue # Create Subtype and initialize its parser subtype = self.datatype() self.subobjects["%d" % len(self)] = subtype subtype_parser = OptionParser() subtype.inp_setup_cmdline_parser(subtype_parser) if not ":" in arg: (opts, sub_args) = subtype_parser.parse_args(["--" + subtype.name, arg]) else: arg = arg.replace(": ", "--" + subtype.name + " ") arg = arg.replace(":", "--" + subtype.name + "-") arg = shlex.split(arg) (opts, sub_args) = subtype_parser.parse_args(arg) subtype.inp_extract_cmdline_parser(opts,sub_args) self.append(subtype)
def parse_args(CONFIG_PATH=''): """ Parse the arguments from the command line """ try: from argparse import ArgumentParser, SUPPRESS except ImportError: from optparse import OptionParser from optparse import SUPPRESS_HELP as SUPPRESS parser = OptionParser() parser.add_option("-f", "--file", dest="filename", default=path.join(CONFIG_PATH, 'poezio.cfg'), help=_("The config file you want to use"), metavar="CONFIG_FILE") parser.add_option("-d", "--debug", dest="debug", help=_("The file where debug will be written"), metavar="DEBUG_FILE") parser.add_option("-v", "--version", dest="version", help=SUPPRESS, metavar="VERSION", default="0.9-dev") (options, __) = parser.parse_args() else: parser = ArgumentParser() parser.add_argument("-f", "--file", dest="filename", default=path.join(CONFIG_PATH, 'poezio.cfg'), help=_("The config file you want to use"), metavar="CONFIG_FILE") parser.add_argument("-d", "--debug", dest="debug", help=_("The file where debug will be written"), metavar="DEBUG_FILE") parser.add_argument("-v", "--version", dest="version", help=SUPPRESS, metavar="VERSION", default="0.9-dev") options = parser.parse_args() return options
def parse_args(): """ Parse command-line arguments """ parser = OptionParser(usage='usage: %prog [-v|vv|vvv] [options]', version='{0}: v.{1} by {2}'.format('%prog', __version__, __author__)) ## Verbosity (want this first, so it's right after --help and --version) parser.add_option('-v', help='Set verbosity level', action='count', default=0, dest='v') ## CLI arguments group = OptionGroup(parser, 'Plugin Options') group.add_option('-c', '--critical', help='Set the critical threshold.', type=int, dest='crit', metavar='######') group.add_option('-w', '--warning', help='Set the warning threshold.', type=int, dest='warn', metavar='######') group.add_option('--in', '--networkin', help='Get network in avarage.', action='store_true', dest='neti') group.add_option('--out', '--networkout', help='Get network out avarage.', action='store_true', dest='neto') parser.add_option_group(group) ## Try to parse based on the testargs variable. If doesn't exist, use args try: args, args2 = parser.parse_args(testargs.split()) except NameError: args, args2 = parser.parse_args() ## Set the logging level based on the -v arg log.getLogger().setLevel([log.ERROR, log.WARN, log.INFO, log.DEBUG][args.v]) log.debug('Parsed arguments: {0}'.format(args)) log.debug('Other arguments: {0}'.format(args2)) return args, args2
def getopts(args): parser = OptionParser() parser.add_option("-i", "--input", dest="inp", help="the name of the input file", default=None) parser.add_option("-o", "--output", dest="outp", help="the name of the output file", default=None) parser.add_option("-s", "--start", dest="slen", help="count of characters to keep from left (start) of header string (initial '>' not included)", default=None) parser.add_option("-e", "--end", dest="elen", help="count of characters to keep from right (end) of header string (initial '>' not included)", default=None) (options, args) = parser.parse_args(args) fail = False if options.inp is None: print "You must provide an input file" fail = True if options.outp is None: print "You must provide an output file" fail = True if options.outp==options.inp: print "You must provide an output filename DIFFERENT from the input filename" fail = True if options.slen is None or options.elen is None: print "You must provide both -s [num] and -e [num] options" fail = True if len(args)>0: print "Trailing arguments in commandline:", ' '.join(args) fail=True if fail: print print "Errors were detected in command line. Aborting." print parser.parse_args(['-h']) sys.exit(1) options.elen = int(options.elen) options.slen = int(options.slen) return options, args
def get_options(self, default=False): '''handle testrunner options''' parser = OptionParser(option_list=self.option_list, \ usage="usage: %prog [options]") if default: return parser.parse_args(args=[])[0] return parser.parse_args()[0]
def main(): server_info = swarmtoolscore.get_server_info() keys = swarmtoolscore.get_keys() if len(sys.argv) == 1: usage(sys.argv[0]) elif sys.argv[1] == "create": opt_usage = "usage: \n %s PASSWORD [options]"%(sys.argv[1]) opt_usage += "\n\n *PASSWORD: Your Bug Labs account password." parser = OptionParser(usage = opt_usage) parser.add_option("-t", "--type", dest="key_type", help="Specify the type of API key you wish to create. Valid types; 'configuration', 'participation' (both keys are created if no type specified).", metavar="KEY_TYPE") (options, args) = parser.parse_args() if len(args) != 2: print "Invalid number of args. See --help for correct usage." sys.exit() password = args[1] user_info = swarmtoolscore.get_user_info() create(server_info["hostname"], user_info["user_id"], password, options.key_type) elif sys.argv[1] == "list": opt_usage = "usage: \n %s PASSWORD [options]"%(sys.argv[1]) opt_usage += "\n\n *PASSWORD: Your Bug Labs account password." parser = OptionParser(usage = opt_usage) parser.add_option("-t", "--type", dest="key_type", help="Specify the type of API key. Valid types; 'configuration', 'participation' (configuration is created by default).", metavar="KEY_TYPE") (options, args) = parser.parse_args() if len(args) != 2: print "Invalid number of args. See --help for correct usage." sys.exit() password = args[1] user_info = swarmtoolscore.get_user_info() list(server_info["hostname"], user_info["user_id"], password, options.key_type) else: usage(sys.argv[0])
def __init__(self, importkeys=False, progress=None): """ Construct a customized instance of YumBase. This includes: - loading yum plugins. - custom configuration. - setting the progress bar for download progress reporting. - prime our progress report object. :param importkeys: Allow the import of GPG keys. :type importkeys: bool :param progress: A progress reporting object. :type progress: ProgressReport """ parser = OptionParser() parser.parse_args([]) self.__parser = parser YumBase.__init__(self) self.preconf.optparser = self.__parser self.preconf.plugin_types = (TYPE_CORE, TYPE_INTERACTIVE) self.conf.assumeyes = importkeys self.progress = progress or ProgressReport() bar = DownloadCallback(self.progress) self.repos.setProgressBar(bar) self.progress.push_step('Refresh Repository Metadata') self.logfile = getLogger('yum.filelogging')
def opt_parse(): parser = OptionParser() parser.add_option("-c", "--chunk", default=10, type='int', help="Chunk size in MB. [DEFAULT: 10]") parser.add_option("-p", "--path", default=None, help="Path to crawl.") parser.add_option("-o", "--output", default=None, help="File to store duplicate ") parser.add_option("-t", "--threads", default=4, type='int', help=("Number of threads to read concurrent files." " [DEFAULT: 4]")) (opts, args) = parser.parse_args() if not opts.path and not opts.output: parser.error("Please add a -p and -o flag when executing or -h for " "help.") return parser.parse_args()
def parseArgs(): """Parse any command line options.""" parser = OptionParser() group = OptionGroup(parser, "Basics Options") group.add_option("-c", "--conffile", dest="conffile", help="Config file") group.add_option("-p", "--period", dest="period", help="Period : hourly / daily / weekly / monthly / yearly", default='daily') group.add_option("-t", "--tmpdir", dest="tmp", help="Temporary working directory (by default /tmp)", default="/tmp") parser.add_option_group(group) group = OptionGroup(parser, "Program Options") group.add_option("--template", dest="template", help="Create config file example", default="False") group.add_option("-d", "--debug", dest="debug", action="store_true", help="Print debug information", default="False") group.add_option("-v", "--version", dest="version", action="store_true", help="Print version information and exit", default="False") parser.add_option_group(group) (options, args) = parser.parse_args() # Print version information if options.version == True: print "Backup v%s (modified %s)" %(VERSION, VERSION_DATE) print "Created by Steven Ducastelle" sys.exit(0) # Create template config file if options.template != "False": createTemplate(options) # Print error and help if no argument is passed if ((options.conffile is None) and (options.template == "False")): sys.stderr.write("Missing argument(s).\n") sys.stderr.write(parser.parse_args(["--help"])) return (options, args)
argv = sys.argv parser = OptionParser() #parser.add_option("-P", "--path", dest="path", default="", # help="path to samples") parser.add_option("-S", "--samples", dest="names", default="", help="samples you want to run on") parser.add_option("-C", "--config", dest="config", default=[], action="append", help="configuration defining the plots to make") (opts, args) = parser.parse_args(argv) if opts.config == "": opts.config = "config" from myutils import BetterConfigParser, ParseInfo, TreeCache print opts.config config = BetterConfigParser() config.read(opts.config) anaTag = config.get("Analysis", "tag") TrainFlag = eval(config.get('Analysis', 'TrainFlag')) btagLibrary = config.get('BTagReshaping', 'library') samplesinfo = config.get('Directories', 'samplesinfo') VHbbNameSpace = config.get('VHbbNameSpace', 'library') ROOT.gSystem.Load(VHbbNameSpace)
class DASOptionParser: """ DAS cache client option parser """ def __init__(self): usage = "Usage: %prog [options]\n" usage += "For more help please visit https://cmsweb.cern.ch/das/faq" self.parser = OptionParser(usage=usage) self.parser.add_option("-v", "--verbose", action="store", type="int", default=0, dest="verbose", help="verbose output") self.parser.add_option("--query", action="store", type="string", default=False, dest="query", help="specify query for your request") msg = "host name of DAS cache server, default is https://cmsweb.cern.ch" self.parser.add_option("--host", action="store", type="string", default='https://cmsweb.cern.ch', dest="host", help=msg) msg = "start index for returned result set, aka pagination," msg += " use w/ limit (default is 0)" self.parser.add_option("--idx", action="store", type="int", default=0, dest="idx", help=msg) msg = "number of returned results (default is 10)," msg += " use --limit=0 to show all results" self.parser.add_option("--limit", action="store", type="int", default=10, dest="limit", help=msg) msg = 'specify return data format (json or plain), default plain.' self.parser.add_option("--format", action="store", type="string", default="plain", dest="format", help=msg) msg = 'query waiting threshold in sec, default is 5 minutes' self.parser.add_option("--threshold", action="store", type="int", default=300, dest="threshold", help=msg) msg = 'specify private key file name, default $X509_USER_PROXY' self.parser.add_option("--key", action="store", type="string", default=x509(), dest="ckey", help=msg) msg = 'specify private certificate file name, default $X509_USER_PROXY' self.parser.add_option("--cert", action="store", type="string", default=x509(), dest="cert", help=msg) msg = 'specify number of retries upon busy DAS server message' self.parser.add_option("--retry", action="store", type="string", default=0, dest="retry", help=msg) msg = 'show DAS headers in JSON format' msg += ' (obsolete, keep for backward compatibility)' self.parser.add_option("--das-headers", action="store_true", default=False, dest="das_headers", help=msg) msg = 'specify power base for size_format, default is 10 (can be 2)' self.parser.add_option("--base", action="store", type="int", default=0, dest="base", help=msg) msg = 'a file which contains a cached json dictionary for query -> files mapping' self.parser.add_option("--cache", action="store", type="string", default=None, dest="cache", help=msg) msg = 'List DAS key/attributes, use "all" or specific DAS key value, e.g. site' self.parser.add_option("--list-attributes", action="store", type="string", default="", dest="keys_attrs", help=msg) def get_opt(self): """ Returns parse list of options """ return self.parser.parse_args()
def main(): usage = "usage: %prog [ options ] COMMAND product [ filename ... ]" description = "Operate on artifacts stored in a product bucket. " + \ "Valid commands are 'get', 'put' and 'list'. You may optionally " + \ "store the objects in a separate section of the bucket for " + \ "classification (e.g. for different environments)." parser = OptionParser(usage=usage, description=description, version=None, target=None) parser.set_defaults(quiet=False, prefix='', section='') parser.add_option("-c", "--config", dest="config", default="artifacts.ini", metavar="FILE", help="read config from FILE") parser.add_option("-b", "--bucket", dest="bucket", metavar="BUCKET", help="use S3 bucket BUCKET") parser.add_option("-p", "--prefix", dest="prefix", metavar="PREFIX", help="Store artifacts prefixed with path PREFIX") parser.add_option("", "--access-key", dest="access_key", help="S3 access key") parser.add_option("", "--secret-key", dest="secret_key", help="S3 secret key") parser.add_option("-v", "--version", dest="version", help="specify artifact version") parser.add_option( "-s", "--section", dest="section", metavar="SECTION", help="Store objects in subsection SECTION of the product bucket.") parser.add_option("-t", "--target", dest="target", metavar="FILENAME", help="Use FILENAME for destination") parser.add_option("-q", "--quiet", dest="quiet", action="store_true", help="execute quietly") (options, args) = parser.parse_args() config = configobj.ConfigObj(options.config) for option in parser.defaults.keys(): if getattr(options, option): config[option] = getattr(options, option) missing_opts = [] for option in ['bucket', 'access_key', 'secret_key', 'product']: if not option in config: missing_opts.append(option) if missing_opts: log.error("Missing configuration: %s", ", ".join(missing_opts)) sys.exit(1) if len(args) < 1: log.error("No product specified. A product is a grouping of " + "artifacts in your artifact store.") sys.exit(1) config['product'] = args[0] args = args[1:] if len(args) < 1: log.error("Must specify a command: get, put or list") sys.exit(1) command = args[0].lower() args = args[1:] if command != 'list': if len(args) < 1: log.error("You must specify filenames for 'get' or 'put'") sys.exit(1) if config.get('target') and len(args) > 1: log.error( "Cannot specify a target filename when %sting " + "multiple files", command) sys.exit(1) if not config['quiet']: log.setLevel(logging.INFO) a = artifacts.S3Artifacts(config['bucket'], config['access_key'], config['secret_key'], config['prefix']) if command == 'put': for f in args: log.info("Uploading %s.", f) a.upload(f, config['product'], config['section'], config['version'], config['target'], config['quiet']) if command == 'list': for f in args: vs = a.get_versions(f, config['product'], config['section']) for v in vs: print "%s : %s (external version %s) at %s" % ( base64.b64encode(v.version_id + " " * ((3 - len(v.version_id) % 3) % 3)), v.name, v.get_metadata('version'), v.last_modified) if command == 'put': for f in args: log.info("Downloading %s.", f) a.download(f, config['product'], config['section'], config['version'], config['target'], config['quiet'])
parser.add_option("-g", "--group_provider", dest="non_sole_provider", help="Extract providers that are group based.", action="store_true", default=False) parser.add_option("-i", "--institutional_provider", dest="blank_sole_provider", help="Extract providers that are institutional based.", action="store_true", default=False) (options, args) = parser.parse_args() selection_fields_sql = {} if options.taxonomy_selection_fields: taxonomy_selection_field_list = string_list_to_python_list( options.taxonomy_selection_fields) taxonomy_selection_field_list_sql = field_selection_with_like( "flattened_taxonomy_string", taxonomy_selection_field_list, padding_left_side=True, wild_card_right_side=True, wild_card_left_side=True, padder="|") selection_fields_sql[ "taxonomy_field_selection_list"] = taxonomy_selection_field_list_sql
def main(): parser = OptionParser() parser.add_option('-d', '--detail', dest='detail', default=False, action='store_true', help="show detailed status") parser.add_option('-x', '--debug', dest='debug', default=False, action='store_true', help="show debugging information") parser.add_option('-t', '--timeout', dest='timeout', type="float", default=2, help="timeout in seconds to use for HTTP requests to services") (options, args) = parser.parse_args() if args: parser.error("No arguments are permitted") control = package_installed('contrail-control') analytics = package_installed('contrail-analytics') agent = package_installed('contrail-vrouter') capi = package_installed('contrail-config') cwebui = package_installed('contrail-web-controller') cwebstorage = package_installed('contrail-web-storage') database = (package_installed('contrail-openstack-database') or package_installed('contrail-database')) storage = package_installed('contrail-storage') vr = False lsmodout = subprocess.Popen('lsmod', stdout=subprocess.PIPE).communicate()[0] lsofvrouter = (subprocess.Popen(['lsof', '-ni:{0}'.format(DPDK_NETLINK_TCP_PORT), '-sTCP:LISTEN'], stdout=subprocess.PIPE).communicate()[0]) if lsmodout.find('vrouter') != -1: vr = True elif lsofvrouter: vr = True if agent: if not vr: print "vRouter is NOT PRESENT\n" supervisor_status('compute', options) else: if vr: print "vRouter is PRESENT\n" if control: supervisor_status('control', options) if analytics: supervisor_status('analytics', options) if capi: supervisor_status('config', options) if cwebui or cwebstorage: supervisor_status('webui', options) if database: supervisor_status('database', options) if capi: supervisor_status('support-service', options) if storage: print "== Contrail Storage ==" check_svc('contrail-storage-stats') if len(glob.glob('/var/crashes/core.*')) != 0: print "========Run time service failures=============" for file in glob.glob('/var/crashes/core.*'): print file
def main(): # rename this thread threading.currentThread().name = "MAIN" # Set paths if hasattr(sys, 'frozen'): lazylibrarian.FULL_PATH = os.path.abspath(sys.executable) else: lazylibrarian.FULL_PATH = os.path.abspath(__file__) lazylibrarian.PROG_DIR = os.path.dirname(lazylibrarian.FULL_PATH) lazylibrarian.ARGS = sys.argv[1:] lazylibrarian.SYS_ENCODING = None try: locale.setlocale(locale.LC_ALL, "") lazylibrarian.SYS_ENCODING = locale.getpreferredencoding() except (locale.Error, IOError): pass # for OSes that are poorly configured I'll just force UTF-8 if not lazylibrarian.SYS_ENCODING or lazylibrarian.SYS_ENCODING in ( 'ANSI_X3.4-1968', 'US-ASCII', 'ASCII'): lazylibrarian.SYS_ENCODING = 'UTF-8' # Set arguments from optparse import OptionParser p = OptionParser() p.add_option('-d', '--daemon', action="store_true", dest='daemon', help="Run the server as a daemon") p.add_option('-q', '--quiet', action="store_true", dest='quiet', help="Don't log to console") p.add_option('--debug', action="store_true", dest='debug', help="Show debuglog messages") p.add_option('--nolaunch', action="store_true", dest='nolaunch', help="Don't start browser") p.add_option('--port', dest='port', default=None, help="Force webinterface to listen on this port") p.add_option('--datadir', dest='datadir', default=None, help="Path to the data directory") p.add_option('--config', dest='config', default=None, help="Path to config.ini file") p.add_option('-p', '--pidfile', dest='pidfile', default=None, help="Store the process id in the given file") options, args = p.parse_args() if options.debug: lazylibrarian.LOGLEVEL = 2 if options.quiet: lazylibrarian.LOGLEVEL = 0 if options.daemon: if not sys.platform == 'win32': lazylibrarian.DAEMON = True lazylibrarian.LOGLEVEL = 0 lazylibrarian.daemonize() else: print "Daemonize not supported under Windows, starting normally" if options.nolaunch: lazylibrarian.LAUNCH_BROWSER = False if options.datadir: lazylibrarian.DATADIR = str(options.datadir) else: lazylibrarian.DATADIR = lazylibrarian.PROG_DIR if options.config: lazylibrarian.CONFIGFILE = str(options.config) else: lazylibrarian.CONFIGFILE = os.path.join(lazylibrarian.DATADIR, "config.ini") if options.pidfile: if lazylibrarian.DAEMON: lazylibrarian.PIDFILE = str(options.pidfile) # create and check (optional) paths if not os.path.exists(lazylibrarian.DATADIR): try: os.makedirs(lazylibrarian.DATADIR) except OSError: raise SystemExit('Could not create data directory: ' + lazylibrarian.DATADIR + '. Exit ...') if not os.access(lazylibrarian.DATADIR, os.W_OK): raise SystemExit('Cannot write to the data directory: ' + lazylibrarian.DATADIR + '. Exit ...') # create database and config lazylibrarian.DBFILE = os.path.join(lazylibrarian.DATADIR, 'lazylibrarian.db') lazylibrarian.CFG = ConfigObj(lazylibrarian.CONFIGFILE, encoding='utf-8') lazylibrarian.initialize() if options.port: HTTP_PORT = int(options.port) logger.info('Starting LazyLibrarian on forced port: %s' % HTTP_PORT) else: HTTP_PORT = int(lazylibrarian.HTTP_PORT) logger.info('Starting LazyLibrarian on port: %s' % lazylibrarian.HTTP_PORT) if lazylibrarian.DAEMON: lazylibrarian.daemonize() # Try to start the server. webStart.initialize({ 'http_port': HTTP_PORT, 'http_host': lazylibrarian.HTTP_HOST, 'http_root': lazylibrarian.HTTP_ROOT, 'http_user': lazylibrarian.HTTP_USER, 'http_pass': lazylibrarian.HTTP_PASS, }) if lazylibrarian.LAUNCH_BROWSER and not options.nolaunch: lazylibrarian.launch_browser(lazylibrarian.HTTP_HOST, lazylibrarian.HTTP_PORT, lazylibrarian.HTTP_ROOT) lazylibrarian.start() while True: if not lazylibrarian.SIGNAL: try: time.sleep(1) except KeyboardInterrupt: lazylibrarian.shutdown() else: if lazylibrarian.SIGNAL == 'shutdown': lazylibrarian.shutdown() elif lazylibrarian.SIGNAL == 'restart': lazylibrarian.shutdown(restart=True) else: lazylibrarian.shutdown(restart=True, update=True) lazylibrarian.SIGNAL = None return
def main(): picklefile = '/var/tmp/module-data.pkl' parser = OptionParser() parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="print debug info") parser.add_option("-p", "--print", action="store_true", dest="printt", default=False, help="print output of called method") parser.add_option( "-f", "--file", action="store", dest="tempfile", default=picklefile, help= "use another file for pickling objects (it is important when you need to setup more machines)" ) (options, args) = parser.parse_args() picklefile = options.tempfile if len(args) == 0: raise ValueError( "Unable to call bash helper without function, there is possible to use: ", [ a[0] for a in inspect.getmembers( module_framework.get_correct_backend(), predicate=inspect.ismethod) if '__' not in a[0] ]) method = args[0] def printIfVerbose(*sargs): if options.verbose: print sargs if os.path.isfile(picklefile) and os.stat(picklefile).st_size > 100: printIfVerbose("reading from pickfile", picklefile) pkl_file = open(picklefile, 'rb') helper = pickle.load(pkl_file) printIfVerbose("reading from pickled object", helper) pkl_file.close() else: (helper, moduletype) = module_framework.get_correct_backend() printIfVerbose("created new instance for module") pkl_file = open(picklefile, 'wb') if "tearDown" != method: if options.printt: if len(args) == 1: print getattr(helper, method)() else: print getattr(helper, method)(" ".join(args[1:])) else: if len(args) == 1: getattr(helper, method)() else: getattr(helper, method)(" ".join(args[1:])) pickle.dump(helper, pkl_file) pkl_file.close() else: pkl_file.close() os.remove(picklefile) if os.path.exists(picklefile) else None helper.tearDown()
def cmdLineParser(argv=None): """ This function parses the command line parameters and arguments """ if not argv: argv = sys.argv checkSystemEncoding() # Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING") _ = getUnicode(os.path.basename(argv[0]), encoding=sys.stdin.encoding) usage = "%s%s [options]" % ("%s " % os.path.basename(sys.executable) if not IS_WIN else "", "\"%s\"" % _ if " " in _ else _) parser = OptionParser(usage=usage) try: parser.add_option("--hh", dest="advancedHelp", action="store_true", help="Show advanced help message and exit") parser.add_option("--version", dest="showVersion", action="store_true", help="Show program's version number and exit") parser.add_option("-v", dest="verbose", type="int", help="Verbosity level: 0-6 (default %d)" % defaults.verbose) # Target options target = OptionGroup(parser, "Target", "At least one of these " "options has to be provided to define the target(s)") target.add_option("-d", dest="direct", help="Connection string " "for direct database connection") target.add_option("-u", "--url", dest="url", help="Target URL (e.g. \"http://www.site.com/vuln.php?id=1\")") target.add_option("-l", dest="logFile", help="Parse target(s) from Burp " "or WebScarab proxy log file") target.add_option("-x", dest="sitemapUrl", help="Parse target(s) from remote sitemap(.xml) file") target.add_option("-m", dest="bulkFile", help="Scan multiple targets given " "in a textual file ") target.add_option("-r", dest="requestFile", help="Load HTTP request from a file") target.add_option("-g", dest="googleDork", help="Process Google dork results as target URLs") target.add_option("-c", dest="configFile", help="Load options from a configuration INI file") # Request options request = OptionGroup(parser, "Request", "These options can be used " "to specify how to connect to the target URL") request.add_option("--method", dest="method", help="Force usage of given HTTP method (e.g. PUT)") request.add_option("--data", dest="data", help="Data string to be sent through POST (e.g. \"id=1\")") request.add_option("--param-del", dest="paramDel", help="Character used for splitting parameter values (e.g. &)") request.add_option("--cookie", dest="cookie", help="HTTP Cookie header value (e.g. \"PHPSESSID=a8d127e..\")") request.add_option("--cookie-del", dest="cookieDel", help="Character used for splitting cookie values (e.g. ;)") request.add_option("--load-cookies", dest="loadCookies", help="File containing cookies in Netscape/wget format") request.add_option("--drop-set-cookie", dest="dropSetCookie", action="store_true", help="Ignore Set-Cookie header from response") request.add_option("--user-agent", dest="agent", help="HTTP User-Agent header value") request.add_option("--random-agent", dest="randomAgent", action="store_true", help="Use randomly selected HTTP User-Agent header value") request.add_option("--host", dest="host", help="HTTP Host header value") request.add_option("--referer", dest="referer", help="HTTP Referer header value") request.add_option("-H", "--header", dest="header", help="Extra header (e.g. \"X-Forwarded-For: 127.0.0.1\")") request.add_option("--headers", dest="headers", help="Extra headers (e.g. \"Accept-Language: fr\\nETag: 123\")") request.add_option("--auth-type", dest="authType", help="HTTP authentication type (Basic, Digest, NTLM or PKI)") request.add_option("--auth-cred", dest="authCred", help="HTTP authentication credentials (name:password)") request.add_option("--auth-file", dest="authFile", help="HTTP authentication PEM cert/private key file") request.add_option("--ignore-code", dest="ignoreCode", type="int", help="Ignore (problematic) HTTP error code (e.g. 401)") request.add_option("--ignore-proxy", dest="ignoreProxy", action="store_true", help="Ignore system default proxy settings") request.add_option("--ignore-redirects", dest="ignoreRedirects", action="store_true", help="Ignore redirection attempts") request.add_option("--ignore-timeouts", dest="ignoreTimeouts", action="store_true", help="Ignore connection timeouts") request.add_option("--proxy", dest="proxy", help="Use a proxy to connect to the target URL") request.add_option("--proxy-cred", dest="proxyCred", help="Proxy authentication credentials (name:password)") request.add_option("--proxy-file", dest="proxyFile", help="Load proxy list from a file") request.add_option("--tor", dest="tor", action="store_true", help="Use Tor anonymity network") request.add_option("--tor-port", dest="torPort", help="Set Tor proxy port other than default") request.add_option("--tor-type", dest="torType", help="Set Tor proxy type (HTTP, SOCKS4 or SOCKS5 (default))") request.add_option("--check-tor", dest="checkTor", action="store_true", help="Check to see if Tor is used properly") request.add_option("--delay", dest="delay", type="float", help="Delay in seconds between each HTTP request") request.add_option("--timeout", dest="timeout", type="float", help="Seconds to wait before timeout connection (default %d)" % defaults.timeout) request.add_option("--retries", dest="retries", type="int", help="Retries when the connection timeouts (default %d)" % defaults.retries) request.add_option("--randomize", dest="rParam", help="Randomly change value for given parameter(s)") request.add_option("--safe-url", dest="safeUrl", help="URL address to visit frequently during testing") request.add_option("--safe-post", dest="safePost", help="POST data to send to a safe URL") request.add_option("--safe-req", dest="safeReqFile", help="Load safe HTTP request from a file") request.add_option("--safe-freq", dest="safeFreq", type="int", help="Test requests between two visits to a given safe URL") request.add_option("--skip-urlencode", dest="skipUrlEncode", action="store_true", help="Skip URL encoding of payload data") request.add_option("--csrf-token", dest="csrfToken", help="Parameter used to hold anti-CSRF token") request.add_option("--csrf-url", dest="csrfUrl", help="URL address to visit for extraction of anti-CSRF token") request.add_option("--force-ssl", dest="forceSSL", action="store_true", help="Force usage of SSL/HTTPS") request.add_option("--chunked", dest="chunked", action="store_true", help="Use HTTP chunked transfer encoded (POST) requests") request.add_option("--hpp", dest="hpp", action="store_true", help="Use HTTP parameter pollution method") request.add_option("--eval", dest="evalCode", help="Evaluate provided Python code before the request (e.g. \"import hashlib;id2=hashlib.md5(id).hexdigest()\")") # Optimization options optimization = OptionGroup(parser, "Optimization", "These options can be used to optimize the performance of sqlmap") optimization.add_option("-o", dest="optimize", action="store_true", help="Turn on all optimization switches") optimization.add_option("--predict-output", dest="predictOutput", action="store_true", help="Predict common queries output") optimization.add_option("--keep-alive", dest="keepAlive", action="store_true", help="Use persistent HTTP(s) connections") optimization.add_option("--null-connection", dest="nullConnection", action="store_true", help="Retrieve page length without actual HTTP response body") optimization.add_option("--threads", dest="threads", type="int", help="Max number of concurrent HTTP(s) " "requests (default %d)" % defaults.threads) # Injection options injection = OptionGroup(parser, "Injection", "These options can be used to specify which parameters to test for, provide custom injection payloads and optional tampering scripts") injection.add_option("-p", dest="testParameter", help="Testable parameter(s)") injection.add_option("--skip", dest="skip", help="Skip testing for given parameter(s)") injection.add_option("--skip-static", dest="skipStatic", action="store_true", help="Skip testing parameters that not appear to be dynamic") injection.add_option("--param-exclude", dest="paramExclude", help="Regexp to exclude parameters from testing (e.g. \"ses\")") injection.add_option("--dbms", dest="dbms", help="Force back-end DBMS to provided value") injection.add_option("--dbms-cred", dest="dbmsCred", help="DBMS authentication credentials (user:password)") injection.add_option("--os", dest="os", help="Force back-end DBMS operating system to provided value") injection.add_option("--invalid-bignum", dest="invalidBignum", action="store_true", help="Use big numbers for invalidating values") injection.add_option("--invalid-logical", dest="invalidLogical", action="store_true", help="Use logical operations for invalidating values") injection.add_option("--invalid-string", dest="invalidString", action="store_true", help="Use random strings for invalidating values") injection.add_option("--no-cast", dest="noCast", action="store_true", help="Turn off payload casting mechanism") injection.add_option("--no-escape", dest="noEscape", action="store_true", help="Turn off string escaping mechanism") injection.add_option("--prefix", dest="prefix", help="Injection payload prefix string") injection.add_option("--suffix", dest="suffix", help="Injection payload suffix string") injection.add_option("--tamper", dest="tamper", help="Use given script(s) for tampering injection data") # Detection options detection = OptionGroup(parser, "Detection", "These options can be used to customize the detection phase") detection.add_option("--level", dest="level", type="int", help="Level of tests to perform (1-5, default %d)" % defaults.level) detection.add_option("--risk", dest="risk", type="int", help="Risk of tests to perform (1-3, default %d)" % defaults.risk) detection.add_option("--string", dest="string", help="String to match when query is evaluated to True") detection.add_option("--not-string", dest="notString", help="String to match when query is evaluated to False") detection.add_option("--regexp", dest="regexp", help="Regexp to match when query is evaluated to True") detection.add_option("--code", dest="code", type="int", help="HTTP code to match when query is evaluated to True") detection.add_option("--text-only", dest="textOnly", action="store_true", help="Compare pages based only on the textual content") detection.add_option("--titles", dest="titles", action="store_true", help="Compare pages based only on their titles") # Techniques options techniques = OptionGroup(parser, "Techniques", "These options can be used to tweak testing of specific SQL injection techniques") techniques.add_option("--technique", dest="tech", help="SQL injection techniques to use (default \"%s\")" % defaults.tech) techniques.add_option("--time-sec", dest="timeSec", type="int", help="Seconds to delay the DBMS response (default %d)" % defaults.timeSec) techniques.add_option("--union-cols", dest="uCols", help="Range of columns to test for UNION query SQL injection") techniques.add_option("--union-char", dest="uChar", help="Character to use for bruteforcing number of columns") techniques.add_option("--union-from", dest="uFrom", help="Table to use in FROM part of UNION query SQL injection") techniques.add_option("--dns-domain", dest="dnsDomain", help="Domain name used for DNS exfiltration attack") techniques.add_option("--second-url", dest="secondUrl", help="Resulting page URL searched for second-order response") techniques.add_option("--second-req", dest="secondReq", help="Load second-order HTTP request from file") # Fingerprint options fingerprint = OptionGroup(parser, "Fingerprint") fingerprint.add_option("-f", "--fingerprint", dest="extensiveFp", action="store_true", help="Perform an extensive DBMS version fingerprint") # Enumeration options enumeration = OptionGroup(parser, "Enumeration", "These options can be used to enumerate the back-end database management system information, structure and data contained in the tables. Moreover you can run your own SQL statements") enumeration.add_option("-a", "--all", dest="getAll", action="store_true", help="Retrieve everything") enumeration.add_option("-b", "--banner", dest="getBanner", action="store_true", help="Retrieve DBMS banner") enumeration.add_option("--current-user", dest="getCurrentUser", action="store_true", help="Retrieve DBMS current user") enumeration.add_option("--current-db", dest="getCurrentDb", action="store_true", help="Retrieve DBMS current database") enumeration.add_option("--hostname", dest="getHostname", action="store_true", help="Retrieve DBMS server hostname") enumeration.add_option("--is-dba", dest="isDba", action="store_true", help="Detect if the DBMS current user is DBA") enumeration.add_option("--users", dest="getUsers", action="store_true", help="Enumerate DBMS users") enumeration.add_option("--passwords", dest="getPasswordHashes", action="store_true", help="Enumerate DBMS users password hashes") enumeration.add_option("--privileges", dest="getPrivileges", action="store_true", help="Enumerate DBMS users privileges") enumeration.add_option("--roles", dest="getRoles", action="store_true", help="Enumerate DBMS users roles") enumeration.add_option("--dbs", dest="getDbs", action="store_true", help="Enumerate DBMS databases") enumeration.add_option("--tables", dest="getTables", action="store_true", help="Enumerate DBMS database tables") enumeration.add_option("--columns", dest="getColumns", action="store_true", help="Enumerate DBMS database table columns") enumeration.add_option("--schema", dest="getSchema", action="store_true", help="Enumerate DBMS schema") enumeration.add_option("--count", dest="getCount", action="store_true", help="Retrieve number of entries for table(s)") enumeration.add_option("--dump", dest="dumpTable", action="store_true", help="Dump DBMS database table entries") enumeration.add_option("--dump-all", dest="dumpAll", action="store_true", help="Dump all DBMS databases tables entries") enumeration.add_option("--search", dest="search", action="store_true", help="Search column(s), table(s) and/or database name(s)") enumeration.add_option("--comments", dest="getComments", action="store_true", help="Check for DBMS comments during enumeration") enumeration.add_option("-D", dest="db", help="DBMS database to enumerate") enumeration.add_option("-T", dest="tbl", help="DBMS database table(s) to enumerate") enumeration.add_option("-C", dest="col", help="DBMS database table column(s) to enumerate") enumeration.add_option("-X", dest="exclude", help="DBMS database identifier(s) to not enumerate") enumeration.add_option("-U", dest="user", help="DBMS user to enumerate") enumeration.add_option("--exclude-sysdbs", dest="excludeSysDbs", action="store_true", help="Exclude DBMS system databases when enumerating tables") enumeration.add_option("--pivot-column", dest="pivotColumn", help="Pivot column name") enumeration.add_option("--where", dest="dumpWhere", help="Use WHERE condition while table dumping") enumeration.add_option("--start", dest="limitStart", type="int", help="First dump table entry to retrieve") enumeration.add_option("--stop", dest="limitStop", type="int", help="Last dump table entry to retrieve") enumeration.add_option("--first", dest="firstChar", type="int", help="First query output word character to retrieve") enumeration.add_option("--last", dest="lastChar", type="int", help="Last query output word character to retrieve") enumeration.add_option("--sql-query", dest="sqlQuery", help="SQL statement to be executed") enumeration.add_option("--sql-shell", dest="sqlShell", action="store_true", help="Prompt for an interactive SQL shell") enumeration.add_option("--sql-file", dest="sqlFile", help="Execute SQL statements from given file(s)") # Brute force options brute = OptionGroup(parser, "Brute force", "These options can be used to run brute force checks") brute.add_option("--common-tables", dest="commonTables", action="store_true", help="Check existence of common tables") brute.add_option("--common-columns", dest="commonColumns", action="store_true", help="Check existence of common columns") # User-defined function options udf = OptionGroup(parser, "User-defined function injection", "These options can be used to create custom user-defined functions") udf.add_option("--udf-inject", dest="udfInject", action="store_true", help="Inject custom user-defined functions") udf.add_option("--shared-lib", dest="shLib", help="Local path of the shared library") # File system options filesystem = OptionGroup(parser, "File system access", "These options can be used to access the back-end database management system underlying file system") filesystem.add_option("--file-read", dest="fileRead", help="Read a file from the back-end DBMS file system") filesystem.add_option("--file-write", dest="fileWrite", help="Write a local file on the back-end DBMS file system") filesystem.add_option("--file-dest", dest="fileDest", help="Back-end DBMS absolute filepath to write to") # Takeover options takeover = OptionGroup(parser, "Operating system access", "These options can be used to access the back-end database management system underlying operating system") takeover.add_option("--os-cmd", dest="osCmd", help="Execute an operating system command") takeover.add_option("--os-shell", dest="osShell", action="store_true", help="Prompt for an interactive operating system shell") takeover.add_option("--os-pwn", dest="osPwn", action="store_true", help="Prompt for an OOB shell, Meterpreter or VNC") takeover.add_option("--os-smbrelay", dest="osSmb", action="store_true", help="One click prompt for an OOB shell, Meterpreter or VNC") takeover.add_option("--os-bof", dest="osBof", action="store_true", help="Stored procedure buffer overflow " "exploitation") takeover.add_option("--priv-esc", dest="privEsc", action="store_true", help="Database process user privilege escalation") takeover.add_option("--msf-path", dest="msfPath", help="Local path where Metasploit Framework is installed") takeover.add_option("--tmp-path", dest="tmpPath", help="Remote absolute path of temporary files directory") # Windows registry options windows = OptionGroup(parser, "Windows registry access", "These options can be used to access the back-end database management system Windows registry") windows.add_option("--reg-read", dest="regRead", action="store_true", help="Read a Windows registry key value") windows.add_option("--reg-add", dest="regAdd", action="store_true", help="Write a Windows registry key value data") windows.add_option("--reg-del", dest="regDel", action="store_true", help="Delete a Windows registry key value") windows.add_option("--reg-key", dest="regKey", help="Windows registry key") windows.add_option("--reg-value", dest="regVal", help="Windows registry key value") windows.add_option("--reg-data", dest="regData", help="Windows registry key value data") windows.add_option("--reg-type", dest="regType", help="Windows registry key value type") # General options general = OptionGroup(parser, "General", "These options can be used to set some general working parameters") general.add_option("-s", dest="sessionFile", help="Load session from a stored (.sqlite) file") general.add_option("-t", dest="trafficFile", help="Log all HTTP traffic into a textual file") general.add_option("--batch", dest="batch", action="store_true", help="Never ask for user input, use the default behavior") general.add_option("--binary-fields", dest="binaryFields", help="Result fields having binary values (e.g. \"digest\")") general.add_option("--check-internet", dest="checkInternet", action="store_true", help="Check Internet connection before assessing the target") general.add_option("--crawl", dest="crawlDepth", type="int", help="Crawl the website starting from the target URL") general.add_option("--crawl-exclude", dest="crawlExclude", help="Regexp to exclude pages from crawling (e.g. \"logout\")") general.add_option("--csv-del", dest="csvDel", help="Delimiting character used in CSV output (default \"%s\")" % defaults.csvDel) general.add_option("--charset", dest="charset", help="Blind SQL injection charset (e.g. \"0123456789abcdef\")") general.add_option("--dump-format", dest="dumpFormat", help="Format of dumped data (CSV (default), HTML or SQLITE)") general.add_option("--encoding", dest="encoding", help="Character encoding used for data retrieval (e.g. GBK)") general.add_option("--eta", dest="eta", action="store_true", help="Display for each output the estimated time of arrival") general.add_option("--flush-session", dest="flushSession", action="store_true", help="Flush session files for current target") general.add_option("--forms", dest="forms", action="store_true", help="Parse and test forms on target URL") general.add_option("--fresh-queries", dest="freshQueries", action="store_true", help="Ignore query results stored in session file") general.add_option("--har", dest="harFile", help="Log all HTTP traffic into a HAR file") general.add_option("--hex", dest="hexConvert", action="store_true", help="Use hex conversion during data retrieval") general.add_option("--output-dir", dest="outputDir", action="store", help="Custom output directory path") general.add_option("--parse-errors", dest="parseErrors", action="store_true", help="Parse and display DBMS error messages from responses") general.add_option("--preprocess", dest="preprocess", help="Use given script(s) for preprocessing of response data") general.add_option("--repair", dest="repair", action="store_true", help="Redump entries having unknown character marker (%s)" % INFERENCE_UNKNOWN_CHAR) general.add_option("--save", dest="saveConfig", help="Save options to a configuration INI file") general.add_option("--scope", dest="scope", help="Regexp to filter targets from provided proxy log") general.add_option("--test-filter", dest="testFilter", help="Select tests by payloads and/or titles (e.g. ROW)") general.add_option("--test-skip", dest="testSkip", help="Skip tests by payloads and/or titles (e.g. BENCHMARK)") general.add_option("--update", dest="updateAll", action="store_true", help="Update sqlmap") # Miscellaneous options miscellaneous = OptionGroup(parser, "Miscellaneous") miscellaneous.add_option("-z", dest="mnemonics", help="Use short mnemonics (e.g. \"flu,bat,ban,tec=EU\")") miscellaneous.add_option("--alert", dest="alert", help="Run host OS command(s) when SQL injection is found") miscellaneous.add_option("--answers", dest="answers", help="Set predefined answers (e.g. \"quit=N,follow=N\")") miscellaneous.add_option("--beep", dest="beep", action="store_true", help="Beep on question and/or when SQL injection is found") miscellaneous.add_option("--cleanup", dest="cleanup", action="store_true", help="Clean up the DBMS from sqlmap specific UDF and tables") miscellaneous.add_option("--dependencies", dest="dependencies", action="store_true", help="Check for missing (optional) sqlmap dependencies") miscellaneous.add_option("--disable-coloring", dest="disableColoring", action="store_true", help="Disable console output coloring") miscellaneous.add_option("--gpage", dest="googlePage", type="int", help="Use Google dork results from specified page number") miscellaneous.add_option("--identify-waf", dest="identifyWaf", action="store_true", help="Make a thorough testing for a WAF/IPS protection") miscellaneous.add_option("--list-tampers", dest="listTampers", action="store_true", help="Display list of available tamper scripts") miscellaneous.add_option("--mobile", dest="mobile", action="store_true", help="Imitate smartphone through HTTP User-Agent header") miscellaneous.add_option("--offline", dest="offline", action="store_true", help="Work in offline mode (only use session data)") miscellaneous.add_option("--purge", dest="purge", action="store_true", help="Safely remove all content from sqlmap data directory") miscellaneous.add_option("--skip-waf", dest="skipWaf", action="store_true", help="Skip heuristic detection of WAF/IPS protection") miscellaneous.add_option("--smart", dest="smart", action="store_true", help="Conduct thorough tests only if positive heuristic(s)") miscellaneous.add_option("--sqlmap-shell", dest="sqlmapShell", action="store_true", help="Prompt for an interactive sqlmap shell") miscellaneous.add_option("--tmp-dir", dest="tmpDir", help="Local directory for storing temporary files") miscellaneous.add_option("--web-root", dest="webRoot", help="Web server document root directory (e.g. \"/var/www\")") miscellaneous.add_option("--wizard", dest="wizard", action="store_true", help="Simple wizard interface for beginner users") # Hidden and/or experimental options parser.add_option("--base64", dest="base64Parameter", help=SUPPRESS_HELP) # help="Parameter(s) containing Base64 encoded values") parser.add_option("--crack", dest="hashFile", help=SUPPRESS_HELP) # help="Load and crack hashes from a file (standalone)") parser.add_option("--dummy", dest="dummy", action="store_true", help=SUPPRESS_HELP) parser.add_option("--murphy-rate", dest="murphyRate", type="int", help=SUPPRESS_HELP) parser.add_option("--debug", dest="debug", action="store_true", help=SUPPRESS_HELP) parser.add_option("--disable-precon", dest="disablePrecon", action="store_true", help=SUPPRESS_HELP) parser.add_option("--disable-stats", dest="disableStats", action="store_true", help=SUPPRESS_HELP) parser.add_option("--profile", dest="profile", action="store_true", help=SUPPRESS_HELP) parser.add_option("--force-dbms", dest="forceDbms", help=SUPPRESS_HELP) parser.add_option("--force-dns", dest="forceDns", action="store_true", help=SUPPRESS_HELP) parser.add_option("--force-pivoting", dest="forcePivoting", action="store_true", help=SUPPRESS_HELP) parser.add_option("--force-threads", dest="forceThreads", action="store_true", help=SUPPRESS_HELP) parser.add_option("--smoke-test", dest="smokeTest", action="store_true", help=SUPPRESS_HELP) parser.add_option("--live-test", dest="liveTest", action="store_true", help=SUPPRESS_HELP) parser.add_option("--vuln-test", dest="vulnTest", action="store_true", help=SUPPRESS_HELP) parser.add_option("--stop-fail", dest="stopFail", action="store_true", help=SUPPRESS_HELP) parser.add_option("--run-case", dest="runCase", help=SUPPRESS_HELP) # API options parser.add_option("--api", dest="api", action="store_true", help=SUPPRESS_HELP) parser.add_option("--taskid", dest="taskid", help=SUPPRESS_HELP) parser.add_option("--database", dest="database", help=SUPPRESS_HELP) parser.add_option_group(target) parser.add_option_group(request) parser.add_option_group(optimization) parser.add_option_group(injection) parser.add_option_group(detection) parser.add_option_group(techniques) parser.add_option_group(fingerprint) parser.add_option_group(enumeration) parser.add_option_group(brute) parser.add_option_group(udf) parser.add_option_group(filesystem) parser.add_option_group(takeover) parser.add_option_group(windows) parser.add_option_group(general) parser.add_option_group(miscellaneous) # Dirty hack to display longer options without breaking into two lines def _(self, *args): retVal = parser.formatter._format_option_strings(*args) if len(retVal) > MAX_HELP_OPTION_LENGTH: retVal = ("%%.%ds.." % (MAX_HELP_OPTION_LENGTH - parser.formatter.indent_increment)) % retVal return retVal parser.formatter._format_option_strings = parser.formatter.format_option_strings parser.formatter.format_option_strings = type(parser.formatter.format_option_strings)(_, parser) # Dirty hack for making a short option '-hh' option = parser.get_option("--hh") option._short_opts = ["-hh"] option._long_opts = [] # Dirty hack for inherent help message of switch '-h' option = parser.get_option("-h") option.help = option.help.capitalize().replace("this help", "basic help") _ = [] prompt = False advancedHelp = True extraHeaders = [] tamperIndex = None # Reference: https://stackoverflow.com/a/4012683 (Note: previously used "...sys.getfilesystemencoding() or UNICODE_ENCODING") for arg in argv: _.append(getUnicode(arg, encoding=sys.stdin.encoding)) argv = _ checkDeprecatedOptions(argv) prompt = "--sqlmap-shell" in argv if prompt: parser.usage = "" cmdLineOptions.sqlmapShell = True _ = ["x", "q", "exit", "quit", "clear"] for option in parser.option_list: _.extend(option._long_opts) _.extend(option._short_opts) for group in parser.option_groups: for option in group.option_list: _.extend(option._long_opts) _.extend(option._short_opts) autoCompletion(AUTOCOMPLETE_TYPE.SQLMAP, commands=_) while True: command = None try: # Note: in Python2 command should not be converted to Unicode before passing to shlex (Reference: https://bugs.python.org/issue1170) command = _input("sqlmap-shell> ").strip() except (KeyboardInterrupt, EOFError): print() raise SqlmapShellQuitException if not command: continue elif command.lower() == "clear": clearHistory() dataToStdout("[i] history cleared\n") saveHistory(AUTOCOMPLETE_TYPE.SQLMAP) elif command.lower() in ("x", "q", "exit", "quit"): raise SqlmapShellQuitException elif command[0] != '-': dataToStdout("[!] invalid option(s) provided\n") dataToStdout("[i] proper example: '-u http://www.site.com/vuln.php?id=1 --banner'\n") else: saveHistory(AUTOCOMPLETE_TYPE.SQLMAP) loadHistory(AUTOCOMPLETE_TYPE.SQLMAP) break try: for arg in shlex.split(command): argv.append(getUnicode(arg, encoding=sys.stdin.encoding)) except ValueError as ex: raise SqlmapSyntaxException("something went wrong during command line parsing ('%s')" % getSafeExString(ex)) for i in xrange(len(argv)): if argv[i] == "-hh": argv[i] = "-h" elif len(argv[i]) > 1 and all(ord(_) in xrange(0x2018, 0x2020) for _ in ((argv[i].split('=', 1)[-1].strip() or ' ')[0], argv[i][-1])): dataToStdout("[!] copy-pasting illegal (non-console) quote characters from Internet is, well, illegal (%s)\n" % argv[i]) raise SystemExit elif len(argv[i]) > 1 and u"\uff0c" in argv[i].split('=', 1)[-1]: dataToStdout("[!] copy-pasting illegal (non-console) comma characters from Internet is, well, illegal (%s)\n" % argv[i]) raise SystemExit elif re.search(r"\A-\w=.+", argv[i]): dataToStdout("[!] potentially miswritten (illegal '=') short option detected ('%s')\n" % argv[i]) raise SystemExit elif argv[i].startswith("--tamper"): if tamperIndex is None: tamperIndex = i if '=' in argv[i] else (i + 1 if i + 1 < len(argv) and not argv[i + 1].startswith('-') else None) else: argv[tamperIndex] = "%s,%s" % (argv[tamperIndex], argv[i].split('=')[1] if '=' in argv[i] else (argv[i + 1] if i + 1 < len(argv) and not argv[i + 1].startswith('-') else "")) argv[i] = "" elif argv[i] == "-H": if i + 1 < len(argv): extraHeaders.append(argv[i + 1]) elif argv[i] == "-r": for j in xrange(i + 2, len(argv)): value = argv[j] if os.path.isfile(value): argv[i + 1] += ",%s" % value argv[j] = '' else: break elif re.match(r"\A\d+!\Z", argv[i]) and argv[max(0, i - 1)] == "--threads" or re.match(r"\A--threads.+\d+!\Z", argv[i]): argv[i] = argv[i][:-1] conf.skipThreadCheck = True elif argv[i] == "--version": print(VERSION_STRING.split('/')[-1]) raise SystemExit elif argv[i] in ("-h", "--help"): advancedHelp = False for group in parser.option_groups[:]: found = False for option in group.option_list: if option.dest not in BASIC_HELP_ITEMS: option.help = SUPPRESS_HELP else: found = True if not found: parser.option_groups.remove(group) for verbosity in (_ for _ in argv if re.search(r"\A\-v+\Z", _)): try: if argv.index(verbosity) == len(argv) - 1 or not argv[argv.index(verbosity) + 1].isdigit(): conf.verbose = verbosity.count('v') + 1 del argv[argv.index(verbosity)] except (IndexError, ValueError): pass try: (args, _) = parser.parse_args(argv) except UnicodeEncodeError as ex: dataToStdout("\n[!] %s\n" % getUnicode(ex.object.encode("unicode-escape"))) raise SystemExit except SystemExit: if "-h" in argv and not advancedHelp: dataToStdout("\n[!] to see full list of options run with '-hh'\n") raise if extraHeaders: if not args.headers: args.headers = "" delimiter = "\\n" if "\\n" in args.headers else "\n" args.headers += delimiter + delimiter.join(extraHeaders) # Expand given mnemonic options (e.g. -z "ign,flu,bat") for i in xrange(len(argv) - 1): if argv[i] == "-z": expandMnemonics(argv[i + 1], parser, args) if args.dummy: args.url = args.url or DUMMY_URL if not any((args.direct, args.url, args.logFile, args.bulkFile, args.googleDork, args.configFile, args.requestFile, args.updateAll, args.smokeTest, args.vulnTest, args.liveTest, args.wizard, args.dependencies, args.purge, args.sitemapUrl, args.listTampers, args.hashFile)): errMsg = "missing a mandatory option (-d, -u, -l, -m, -r, -g, -c, -x, --list-tampers, --wizard, --update, --purge or --dependencies). " errMsg += "Use -h for basic and -hh for advanced help\n" parser.error(errMsg) return args except (OptionError, TypeError) as ex: parser.error(ex) except SystemExit: # Protection against Windows dummy double clicking if IS_WIN: dataToStdout("\nPress Enter to continue...") _input() raise debugMsg = "parsing command line" logger.debug(debugMsg)
def process_cmd_line(argv, pkg): """ Process the passed command line arguments. """ if sys.argv[0][-3:] == '.py': usage = "usage: %prog [options] <file_name>" else: usage = "usage: %prog bdump [options] <file_name>" usage += """ Dump headers and optionnaly content of a """ + pkg.code_name + """ Preprocessor, Partitioner, or restart file. """ parser = OptionParser(usage=usage) parser.add_option("-e", "--extract", dest="extract", action="store_true", help="extract mode (extract full section data, with " \ + "no metadata).") parser.add_option("--f-format", dest="f_format", type="string", metavar="<fmt>", help="define format for floating-point numbers (default: " \ + "\"15.9e\" for floats, \"22.15e\" for doubles).") parser.add_option("--location", dest="location", type="string", metavar="<id>", help="only output section(s) with given location id.") parser.add_option("-n", dest="level", type="int", metavar="<level>", help="number of first and last elements of each section " \ + "to output (default: print headers only).") parser.add_option("--section", dest="section", type="string", metavar="<name>", help="only consider section matching given criteria.") parser.set_defaults(extract=False) parser.set_defaults(f_format=None) parser.set_defaults(location=None) parser.set_defaults(level=None) parser.set_defaults(section=None) (options, args) = parser.parse_args(argv) if len(args) == 1: if args[0] != argv[len(argv) - 1]: args = None else: args = None if not args: parser.print_help() return options, args
def getOptions(args=None): optParser = OptionParser() optParser.add_option( "-t", "--trajectory-type", dest="ttype", default="ds", help="select two letters from [t, s, d, a, i, x, y] to plot" + " Time, Speed, Distance, Acceleration, Angle, x-Position, y-Position." + " Default 'ds' plots Distance vs. Speed") optParser.add_option("-s", "--show", action="store_true", default=False, help="show plot directly") optParser.add_option("-o", "--output", help="outputfile for saving plots", default="plot.png") optParser.add_option("--csv-output", dest="csv_output", help="write plot as csv", metavar="FILE") optParser.add_option("-b", "--ballistic", action="store_true", default=False, help="perform ballistic integration of distance") optParser.add_option( "--filter-route", dest="filterRoute", help= "only export trajectories that pass the given list of edges (regardless of gaps)" ) optParser.add_option( "-p", "--pick-distance", dest="pickDist", type="float", default=1, help="pick lines within the given distance in interactive plot mode") optParser.add_option( "-i", "--invert-distance-angle", dest="invertDistanceAngle", type="float", help="invert distance for trajectories with a average angle near FLOAT" ) optParser.add_option("--label", help="plot label (default input file name") optParser.add_option("--invert-yaxis", dest="invertYAxis", action="store_true", default=False, help="Invert the Y-Axis") optParser.add_option("--legend", action="store_true", default=False, help="Add legend") optParser.add_option("-v", "--verbose", action="store_true", default=False, help="tell me what you are doing") options, args = optParser.parse_args(args=args) if len(args) < 1: sys.exit("mandatory argument FCD_FILE missing") options.fcdfiles = args if options.filterRoute is not None: options.filterRoute = options.filterRoute.split(',') return options
if __name__ == '__main__': parser = OptionParser() parser.add_option("-p", "--path", dest="cpath", help="PATH to a single curve slice", metavar="CPATH") parser.add_option("-d", "--dir", dest="dpath", help="PATH to object curve slices", metavar="DPATH") parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print status messages to stdout") parser.add_option("-t", "--type", default="css", dest="csstype", help="types allowed: css, eigencss, [default: %default]") parser.add_option("-n", "--name", dest="scss_name", help="Name for saving scss image", metavar="NAME") (options, args) = parser.parse_args(sys.argv) if len(args) != 1: print len(args), args print options parser.error('Incorrect number of arguiments, path and method needed') else: if options.cpath and options.csstype: if options.csstype == 'css': css = exp_simple_css(options.cpath, 400, 0.1, 5) plt.plot(css) plt.show(block=True) else: css = exp_eigen_css(options.cpath, 400, 0.1, False) plt.plot(css) plt.show(block=True)
def main(args): usage_msg = "Usage: %prog [options] PeerClass1[,cnt] PeerClass2[,cnt2] ..." parser = OptionParser(usage=usage_msg) def usage(msg): print "Error: %s\n" % msg parser.print_help() sys.exit() parser.add_option("--loglevel", dest="loglevel", default="info", help="Set the logging level: 'debug' or 'info'") parser.add_option("--mech", dest="mechanism", default="gsp", help="Set the mechanim: 'gsp' or 'vcg' or 'switch'") parser.add_option("--num-rounds", dest="num_rounds", default=48, type="int", help="Set number of rounds") parser.add_option("--min-val", dest="min_val", default=25, type="int", help="Min per-click value, in cents") parser.add_option("--max-val", dest="max_val", default=175, type="int", help="Max per-click value, in cents") parser.add_option("--budget", dest="budget", default=500000, type="int", help="Total budget, in cents") parser.add_option("--reserve", dest="reserve", default=0, type="int", help="Reserve price, in cents") parser.add_option("--perms", dest="max_perms", default=120, type="int", help="Max number of value permutations to run. Set to 1 for debugging.") parser.add_option("--iters", dest="iters", default=1, type="int", help="Number of different value draws to sample. Set to 1 for debugging.") parser.add_option("--seed", dest="seed", default=None, type="int", help="seed for random numbers") (options, args) = parser.parse_args() # leftover args are class names: # e.g. "Truthful BBAgent CleverBidder Fred" if len(args) == 0: # default agents_to_run = ['Truthful', 'Truthful', 'Truthful'] else: agents_to_run = parse_agents(args) configure_logging(options.loglevel) if options.seed != None: random.seed(options.seed) # Add some more config options options.agent_class_names = agents_to_run options.agent_classes = load_modules(options.agent_class_names) options.dropoff = 0.75 logging.info("Starting simulation...") n = len(agents_to_run) totals = dict((id, 0) for id in range(n)) total_revenues = [] approx = math.factorial(n) > options.max_perms if approx: num_perms = options.max_perms logging.warning( "Running approximation: taking %d samples of value permutations" % options.max_perms) else: num_perms = math.factorial(n) av_value=range(0,n) total_spent = [0 for i in range(n)] ## iters = no. of samples to take for i in range(options.iters): values = get_utils(n, options) logging.info("==== Iteration %d / %d. Values %s ====" % (i, options.iters, values)) ## Create permutations (permutes the random values, and assigns them to agents) if approx: perms = [shuffled(values) for i in range(options.max_perms)] else: perms = itertools.permutations(values) total_rev = 0 ## Iterate over permutations for vals in perms: options.agent_values = list(vals) values = dict(zip(range(n), list(vals))) ## Runs simulation ### history = sim(options) ### simulation ends. stats = Stats(history, values) # Print stats in console? # logging.info(stats) for id in range(n): totals[id] += stats.total_utility(id) total_spent[id] += history.agents_spent[id] total_rev += stats.total_revenue() total_revenues.append(total_rev / float(num_perms)) ## total_spent = total amount of money spent by agents, for all iterations, all permutations, all rounds # Averages are over all the value permutations considered N = float(num_perms) * options.iters logging.info("%s\t\t%s\t\t%s" % ("#" * 15, "RESULTS", "#" * 15)) logging.info("") for a in range(n): logging.info("Stats for Agent %d, %s" % (a, agents_to_run[a]) ) logging.info("Average spend $%.2f (daily)" % (0.01 *total_spent[a]/N) ) logging.info("Average utility $%.2f (daily)" % (0.01 * totals[a]/N)) logging.info("-" * 40) logging.info("\n") m = mean(total_revenues) std = stddev(total_revenues) logging.warning("Average daily revenue (stddev): $%.2f ($%.2f)" % (0.01 * m, 0.01*std))
help= "Choices: 'passingrate' for a plot of veto passing rates, 'response' for a plot of the neutrino response function and associated muon yields" ) group.add_option( "--energy", type=float, default=1e4, help= "Evaluate response function for this neutrino energy [%default GeV]") group.add_option( "--zenith", type=float, default=60, help="Evaluate response for this zenith angle [%default degrees]") parser.add_option_group(group) opts, args = parser.parse_args() if opts.plot == 'passingrate': plot_passing_rate(opts.depth) sys.exit(0) elif opts.plot == 'response': plot_response_function(opts.energy, opts.depth, numpy.cos(numpy.radians(opts.zenith)), [opts.flavor, 'charm'][opts.charm]) sys.exit(0) # Calculate passing rate on a grid of energies and zenith angles enu = numpy.logspace(3, 7, 101) cos_theta = numpy.arange(0, 1, .05) + .05 enu, cos_theta = numpy.meshgrid(enu, cos_theta)
action="store_const", const=logging.INFO) optp.add_option("-d", "--debug", help="logging level DEBUG", dest="loglevel", action="store_const", const=logging.DEBUG) optp.add_option("-D", "--Debug", help="logging level ALL", dest="loglevel", action="store_const", const=0) opts, args = optp.parse_args() rootlog.setLevel(opts.loglevel) use_color(opts.color) if len(args) == 0: rootlog.critical( "Je treba zadat alespon jedno OSM ID relace, nebo LAU2 kod obce.") raise SystemExit(1) filename = os.path.join(os.path.dirname(__file__), "credentials.txt") if not os.path.isfile(filename): rootlog.critical( "Soubor s prihlasovacimi udaji pro OSM nenalezen. Vytvor soubor credentials.txt, na prvni radek zadej svoje uzivatelske jmeno a na druhy heslo." ) raise SystemExit(1) with open(filename) as fp: uirzsj2osm.api.username = fp.readline().rstrip("\r\n")
def __init__(self): Storage.__init__(self) self._section = 'main' self._parser = ConfigParser.ConfigParser() parser = OptionParser() parser.add_option("-c", "--configfile", dest="configfile", default="/etc/tor2web.conf") parser.add_option("-p", "--pidfile", dest="pidfile", default='/var/run/tor2web/t2w.pid') parser.add_option("-u", "--uid", dest="uid", default='') parser.add_option("-g", "--gid", dest="gid", default='') parser.add_option("-n", "--nodaemon", dest="nodaemon", default=False, action="store_true") parser.add_option("-d", "--rundir", dest="rundir", default='/var/run/tor2web/') parser.add_option("-x", "--command", dest="command", default='start') options, _ = parser.parse_args() self._file = options.configfile self.__dict__['configfile'] = options.configfile self.__dict__['pidfile'] = options.pidfile self.__dict__['uid'] = options.uid self.__dict__['gid'] = options.gid self.__dict__['nodaemon'] = options.nodaemon self.__dict__['command'] = options.command self.__dict__['nodename'] = 'tor2web' self.__dict__['datadir'] = '/home/tor2web' self.__dict__['sysdatadir'] = '/usr/share/tor2web/data' self.__dict__['ssl_key'] = None self.__dict__['ssl_cert'] = None self.__dict__['ssl_intermediate'] = None self.__dict__['ssl_dh'] = None self.__dict__['rundir'] = options.rundir self.__dict__['logreqs'] = False self.__dict__['debugmode'] = False self.__dict__['debugtostdout'] = False self.__dict__['processes'] = 1 self.__dict__['requests_per_process'] = 1000000 self.__dict__['transport'] = 'BOTH' self.__dict__['listen_ipv4'] = '127.0.0.1' self.__dict__['listen_ipv6'] = None self.__dict__['listen_port_http'] = 80 self.__dict__['listen_port_https'] = 443 self.__dict__['basehost'] = 'AUTO' self.__dict__['sockshost'] = '127.0.0.1' self.__dict__['socksport'] = 9050 self.__dict__['socksoptimisticdata'] = True self.__dict__['sockmaxpersistentperhost'] = 5 self.__dict__['sockcachedconnectiontimeout'] = 240 self.__dict__['sockretryautomatically'] = True self.__dict__['cipher_list'] = 'ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:' \ 'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:' \ 'ECDHE-RSA-AES256-SHA:DHE-DSS-AES256-SHA:DHE-RSA-AES128-SHA:' self.__dict__['mode'] = 'BLOCKLIST' self.__dict__['onion'] = None self.__dict__['blockhotlinking'] = True self.__dict__['blockhotlinking_exts'] = ['jpg', 'png', 'gif'] self.__dict__['extra_http_response_headers'] = None self.__dict__['disable_disclaimer'] = False self.__dict__['disable_banner'] = False self.__dict__['disable_tor_redirection'] = False self.__dict__['disable_gettor'] = False self.__dict__['avoid_rewriting_visible_content'] = False self.__dict__['smtpuser'] = '******' self.__dict__['smtppass'] = '******' self.__dict__['smtpmail'] = '*****@*****.**' self.__dict__[ 'smtpmailto_exceptions'] = '*****@*****.**' self.__dict__[ 'smtpmailto_notifications'] = '*****@*****.**' self.__dict__['smtpdomain'] = 'demo.globaleaks.org' self.__dict__['smtpport'] = 9267 self.__dict__['smtpsecurity'] = 'TLS' self.__dict__['exit_node_list_refresh'] = 600 self.__dict__['automatic_blocklist_updates_source'] = '' self.__dict__['automatic_blocklist_updates_refresh'] = 600 self.__dict__['automatic_blocklist_updates_mode'] = "MERGE" self.__dict__['publish_lists'] = False self.__dict__['mirror'] = [] self.__dict__['dummyproxy'] = None self.__dict__['proto'] = 'http://' if self.__dict__[ 'transport'] == 'HTTP' else 'https://' self.__dict__['bufsize'] = 4096 # Development VS. Production localpath = os.path.abspath( os.path.join(os.path.dirname(sys.argv[0]), "..", "data")) if os.path.exists(localpath): self.__dict__['sysdatadir'] = localpath self.load() if self.__dict__['ssl_key'] is None: self.__dict__['ssl_key'] = os.path.join(self.__dict__['datadir'], "certs/tor2web-key.pem") if self.__dict__['ssl_cert'] is None: self.__dict__['ssl_cert'] = os.path.join(self.__dict__['datadir'], "certs/tor2web-cert.pem") if self.__dict__['ssl_intermediate'] is None: self.__dict__['ssl_intermediate'] = os.path.join( self.__dict__['datadir'], "certs/tor2web-intermediate.pem") if self.__dict__['ssl_dh'] is None: self.__dict__['ssl_dh'] = os.path.join(self.__dict__['datadir'], "certs/tor2web-dh.pem")
def main(): def get_src_dir_by_date(process_date): # Helper function to find the source data, which is stored in # a few different places on rsgis-base. if options.src_dir: return options.src_dir elif process_date.year > 2012: return config.SRC_DIR elif process_date.year == 2012: return config.ARCHIVE_DIR_2012 else: month_path = process_date.strftime('%Y/%B') return os.path.join(config.ARCHIVE_DIR, month_path) def verbose_print(to_print): if options.verbose or options.dry_run: print(to_print) def parse_date(date_string): return datetime.datetime.strptime(date_string, '%Y%m%d') # Track our script run time. start = timeit.default_timer() # Create an OptionParser to handle any command-line arguments and options. usage = ("usage: %prog [options] office") parser = OptionParser(usage=usage) # Command line options added here. parser.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False) parser.add_option( '-d', '--date', dest='process_date', default=datetime.datetime.now().strftime('%Y%m%d'), help='Date should be in YYYYMMDD format. Can also provide a range ' 'of dates with YYYYMMDD-YYYYMMDD format.') parser.add_option('-a', '--all', dest='all', action='store_true', default=False, help='Parse all exents defined in config.py') parser.add_option('--office', dest='office', default=None, help='Parse all basins for a given office.') parser.add_option('--scp', dest='run_scp', action='store_true', default=False, help='Copy files to target location specfied in config ' 'file upon completion of processing.') parser.add_option( '-p', '--project', dest='project', default=None, help='Parse extents for a given project, defined in PROJECT_EXTENTS. ' '--scp is currently disabled for this option.') # Debugging options. parser.add_option('--dry-run', dest='dry_run', action='store_true', default=False, help='Dry run of the script.') parser.add_option('-k', '--keep', dest='keep_tmp_dir', action='store_true', default=False, help='Keep all intermediate files. The script ' 'removes intermediate files by default.') parser.add_option('-s', '--srcdir', dest='src_dir', default=None, help='Use a custom directory for source data. Otherwise ' 'uses directory defined in config file. ') options, args = parser.parse_args() # Only one of the following options may be used at a time. # The options disable the use of the office argument. no_arg_opts = ('all', 'project', 'office') no_arg_opt_count = 0 for opt in no_arg_opts: if options.__dict__[opt]: no_arg_opt_count += 1 if len(args) != 0: print('Error: Wrong number of arguments for --{0} option.\n'. format(opt)) parser.print_help() sys.exit(1) if no_arg_opt_count > 1: print( 'Error: Only one of the following options may be used at a time: {0}' .format(' '.join(['--' + opt for opt in no_arg_opts]))) parser.print_help() sys.exit(1) if no_arg_opt_count == 0 and len(args) != 1: print('Error: Script requires one office argument.\n') parser.print_help() sys.exit(1) # Grab parameters based on office inputs. inputs_list = [] if options.all: for office in config.EXTENTS: inputs_list += [(office, config.EXTENTS[office])] elif options.office: office = options.office try: inputs_list += [(office, config.EXTENTS[office])] except KeyError: print( 'Could not find extents list for office "{0}"'.format(office)) sys.exit(1) elif options.project: try: extents_list = config.PROJECT_EXTENTS[options.project] inputs_list = [('projects', options.project, extents_list)] except KeyError: print('Could not find extents list for project "{0}"'.format( options.project)) sys.exit(1) else: office = args[0] try: extents_list = config.EXTENTS[office] inputs_list = [(office, extents_list)] except KeyError: print( 'Could not find extents list for office "{0}"'.format(office)) sys.exit(1) # Parse out our processing date(s). process_dates = [] try: match = DATE_REGEX.match(options.process_date) if match is not None: # Build out a list of dates to process. match_dict = match.groupdict() process_date = parse_date(match_dict['start_date']) end_date = parse_date(match_dict['end_date']) assert process_date < end_date while process_date <= end_date: process_dates += [process_date] process_date += datetime.timedelta(days=1) else: process_dates = [parse_date(options.process_date)] except: if options.verbose: raise print('Couldn\'t parse time input. Please use YYYYMMDD format, or ' 'YYYYMMDD-YYYYMMDD for a date range.') sys.exit(1) verbose_print('Process date(s): {0}'.format(options.process_date)) # Run the actual grid processing for each set of inputs and dates. transfer_list = set() # Loop through by date, so we only need to do the source data # manipulation once per date. for process_date in process_dates: # Fetch and transform source data. unzip_dir = snowmelt.prepare_source_data_for_date( process_date, get_src_dir_by_date(process_date)) if unzip_dir is None: print('Skipping date: {}'.format( process_date.strftime('%Y.%m.%d'))) continue for input_list in inputs_list: office, extents_list = input_list verbose_print('-' * 64) verbose_print('{0} Watersheds:'.format(office.upper())) for extent in extents_list: verbose_print('{0}: {1}'.format(extent[0], extent[1])) verbose_print( 'Processing extents for location {0}, date {1}'.format( office, process_date)) if not options.dry_run: new_data = snowmelt.process_extents( office, process_date + datetime.timedelta(hours=2), # 2am. unzip_dir, extents_list, options, ) if new_data is not None: transfer_list.add((office, new_data)) else: transfer_list.add(office) if not config.KEEP_PROCESSED_SRC_DATA and not options.dry_run: verbose_print('Removing temp unzipped dir: {0}'.format(unzip_dir)) shutil.rmtree(unzip_dir) finish = timeit.default_timer() print('Finished Processing {0} {1} (Duration = {2})'.format( os.path.basename(__file__), datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), str(datetime.timedelta(seconds=finish - start)))) # Transfer any files we've updated during this run. if options.run_scp: if not transfer_list: print('No new files to transfer.') else: print('-' * 64) print('Transferring updated files:') for (office, new_data) in transfer_list: target_dir = config.SCP_TARGET_STR.format(office) command = 'scp {0} {1}'.format(new_data, target_dir) print(command) proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() exit_code = proc.wait() print(stdout) if exit_code: print('ERROR - could not transfer: {0}'.format(new_data))
def main(): """ Main entry point """ # Variables Globale global port global ip_bts global ip_home global HumanGreeter global essaie HumanGreeter = None # faceInfo = None # faceShapeInfo = None # faceExtraInfo = None # nom_personne = None # timeStamp = None # global ValReq ip_home = "192.168.1.43" ip_bts = "192.168.0.115" port = 9559 parser = OptionParser() parser.add_option("--pip", help=ip_bts, dest="pip") parser.add_option("--pport", help=port, dest="pport", type="int") parser.set_defaults( pip=ip_bts, pport=port) (opts, args_) = parser.parse_args() pip = opts.pip pport = opts.pport # We need this broker to be able to construct # NAOqi modules and subscribe to other modules # The broker must stay alive until the program exists myBroker = ALBroker("myBroker", "0.0.0.0", # listen to anyone 0, # find a free port and use it pip, # parent broker IP pport) # parent broker port # Warning: HumanGreeter must be a global variable # The name given to the constructor must be the name of the # variable HumanGreeter = HumanGreeterModule("HumanGreeter") try: while True: time.sleep(1) except KeyboardInterrupt: print print "Interrupted by user, shutting down" myBroker.shutdown() sys.exit(0)
if __name__ == '__main__': import sys if not HAS_CRYPTOSIGN: print('NaCl library must be installed for this to function.', file=sys.stderr) sys.exit(1) from optparse import OptionParser parser = OptionParser() parser.add_option('-f', '--file', dest='keyfile', help='file containing ssh key') parser.add_option('-p', action='store_true', dest='printpub', default=False, help='print public key information') options, args = parser.parse_args() if not options.printpub: print("Print public key must be specified as it's the only option.") parser.print_usage() sys.exit(1) key = SigningKey.from_ssh_key(options.keyfile) print(key.public_key())
def main(): progname = os.path.basename(sys.argv[0]) usage = progname + """ [options] <inputfile> This is a specialized version of e2proc3d.py targeted at performing a limited set of operations on very large volumes in-place (such as tomograms) which may not readily fit into system memory. Operations are performed by reading portions of the image, processing, then writing the portion back to disk. Unlike e2proc3d.py you may pass only a single operation to the program for each invocation, or behavior will be undefined. It will process a single volume in a single file in-place. """ parser = OptionParser(usage) parser.add_option( "--streaksubtract", type="string", help= "This will subtract the histogram peak value along a single axis in the volume.", default=None) parser.add_option( "--process", metavar="processor_name:param1=value1:param2=value2", type="string", action="append", help= "apply a processor named 'processorname' with all its parameters/values. WARNING: this works by operating on fragments of the overall image at a time, and some processors won't work properly this way." ) parser.add_option( "--mult", metavar="f", type="float", help="Scales the densities by a fixed number in the output") parser.add_option( "--multfile", type="string", action="append", help= "Multiplies the volume by another volume of identical size. This can be used to apply masks, etc." ) parser.add_option("--add", metavar="f", type="float", help="Adds a constant 'f' to the densities") parser.add_option("--trans", metavar="dx,dy,dz", type="string", default=0, help="Translate map by dx,dy,dz ") parser.add_option( "--ppid", type=int, help="Set the PID of the parent process, used for cross platform PPID", default=-1) parser.add_option( "--verbose", "-v", dest="verbose", action="store", metavar="n", type="int", default=0, help= "verbose level [0-9], higher number means higher level of verboseness") (options, args) = parser.parse_args() print("Sorry, this program still under development. Not functional yet.") sys.exit(1) try: hdr = EMData(args[1], 0, 1) except: print("ERROR: Can't read input file header") sys.exit(1) if options.mediansubtract != None: pass # Added this to make python happy
def main(argv=None): global simulate global fp global shut_open global current global x3h5capture #parse command line options usage = "usage: %prog [options]\nData files are written to /data/<year>/<month>/<day>/" parser = OptionParser(usage) parser.add_option("--detname", action="store", type="string", dest="detname", help="detector PV base") parser.add_option("--xstart", action="store", type="float", dest="xo", help="starting X position") parser.add_option("--xnumstep", action="store", type="int", dest="Nx", help="number of steps in X") parser.add_option("--xstepsize", action="store", type="float", dest="dx", help="step size in X") parser.add_option("--ystart", action="store", type="float", dest="yo", help="starting Y position") parser.add_option("--ynumstep", action="store", type="int", dest="Ny", help="number of steps in Y") parser.add_option("--ystepsize", action="store", type="float", dest="dy", help="step size in Y") parser.add_option("--wait", action="store", type="float", dest="stall", help="wait at each step [seconds]") parser.add_option("--simulate", action="store_true", dest="sim", default=False, help="simulate motor moves") parser.add_option("--checkbeam", action="store_true", dest="checkbeam", default=False, help="only acquire when beam is on") parser.add_option("--acqtime", action="store", type="float", dest="acqt", default=1, help="image integration time [sec]") parser.add_option("--acqnum", action="store", type="int", dest="acqn", default=1, help="frames per scan point") (options,args) = parser.parse_args() #open log file D0=time.localtime()[0] D1=time.localtime()[1] D2=time.localtime()[2] D3=time.localtime()[3] D4=time.localtime()[4] cd=os.getcwd() filedir = '/nfs/xf05id1/data/' if sys.argv[0][0]=='.': out_filename=filedir+repr(D0)+'/'+repr(D1)+'/'+repr(D2)+'/'+'log_'+repr(D3)+'_'+repr(D4)+'_'+\ string.split(string.strip(sys.argv[0],'./'),'/')[0]+'.txt' else: out_filename=filedir+repr(D0)+'/'+repr(D1)+'/'+repr(D2)+'/'+'log_'+repr(D3)+'_'+repr(D4)+'_'+\ string.split(string.strip(sys.argv[0],'./'),'/')[5]+'.txt' try: os.chdir(filedir+repr(D0)) except OSError: try: os.mkdir(filedir+repr(D0)) except Exception: print 'cannot create directory: '+'/data/'+repr(D0) sys.exit() try: os.chdir(filedir+repr(D0)+'/'+repr(D1)) except OSError: try: os.mkdir(filedir+repr(D0)+'/'+repr(D1)) except Exception: print 'cannot create directory: '+'/data/'+repr(D0)+'/'+repr(D1) sys.exit() try: os.chdir(filedir+repr(D0)+'/'+repr(D1)+'/'+repr(D2)) except OSError: try: os.mkdir(filedir+repr(D0)+'/'+repr(D1)+'/'+repr(D2)) except Exception: print 'cannot create directory: '+filedir+repr(D0)+'/'+repr(D1)+'/'+repr(D2) sys.exit() try: fp=open(out_filename,'a') except Exception: print 'cannot open file: '+out_filename sys.exit() os.chdir(cd) fp.write('#'+', '.join(sys.argv)) fp.write('\n') H5path='/epics/data/201507/300124' #initialize PVs and callbacks if options.detname == None: detstr='XF:05IDA{IM:1}' else: detstr=options.detname ##### original script for Aerotech stages # xmotname='XF:05IDD-ES:1{Stg:Smpl1-Ax:X}' # ymotname='XF:05IDD-ES:1{Stg:Smpl1-Ax:Y}' # xmot=PV(xmotname+'Mtr.VAL') # xmot_cur=PV(xmotname+'Mtr.RBV') # ymot=PV(ymotname+'Mtr.VAL') # ymot_cur=PV(ymotname+'Mtr.RBV') ###### #####modified for nPoint Stages xmot=PV('NPOINT:CH1:SET_POSITION.A') xmot_cur=PV('NPOINT:CH1:GET_POSITION') ymot=PV('NPOINT:CH2:SET_POSITION.A') ymot_cur=PV('NPOINT:CH2:GET_POSITION') ##### shut_status=PV('SR:C05-EPS{PLC:1}Shutter:Sum-Sts') beam_current=PV('SR:C03-BI{DCCT:1}I:Total-I') bmot_cur=PV('XF:05IDA-OP:1{Mono:HDCM-Ax:P}Mtr.RBV') #transmission #check command line options if options.yo == None: print "must provide a starting point in the vertical" sys.exit() else: yo = options.yo if options.xo == None: print "must provide a starting point in the horizontal" sys.exit() else: xo = options.xo if options.dx == None: dx = 0.00000001 else: dx = options.dx if options.dy == None: dy = 0.00000001 else: dy = options.dy if options.Nx == None: Nx = 0 else: Nx = options.Nx if options.Ny == None: Ny = 0 else: Ny = options.Ny if options.stall == None: twait = 0. else: twait = options.stall diode0=PV(detstr+'Cur:I0-I') diode1=PV(detstr+'Cur:I1-I') diode2=PV(detstr+'Cur:I2-I') diode3=PV(detstr+'Cur:I3-I') #EVR output 2 trigpv0='XF:05IDD-ES:1{EVR:1-Out:FP2}Src:Scale-SP' dett0=PV(trigpv0) #EVR output 3 trigpv1='XF:05IDD-ES:1{EVR:1-Out:FP3}Src:Scale-SP' dett1=PV(trigpv1) deti=PV('XF:05IDA{IM:1}Per-SP') detinit=PV('XF:05IDA{IM:1}Cmd:Init') wb=srxslit.nsls2slit(tb='XF:05IDA-OP:1{Slt:1-Ax:T}',bb='XF:05IDA-OP:1{Slt:1-Ax:B}',ib='XF:05IDA-OP:1{Slt:1-Ax:I}',ob='XF:05IDA-OP:1{Slt:1-Ax:O}') pb=srxslit.nsls2slit(ib='XF:05IDA-OP:1{Slt:2-Ax:I}',ob='XF:05IDA-OP:1{Slt:2-Ax:O}') ssa=srxslit.nsls2slit(tb='XF:05IDB-OP:1{Slt:SSA-Ax:T}', bb='XF:05IDB-OP:1{Slt:SSA-Ax:B}', ob='XF:05IDB-OP:1{Slt:SSA-Ax:O}',ib='XF:05IDB-OP:1{Slt:SSA-Ax:I}') x3acq=PV('XSPRESS3-EXAMPLE:Acquire') x3erase=PV('XSPRESS3-EXAMPLE:ERASE') x3acqtime=PV('XSPRESS3-EXAMPLE:AcquireTime') x3acqnum=PV('XSPRESS3-EXAMPLE:NumImages') x3tmode=PV('XSPRESS3-EXAMPLE:TriggerMode') x3h5path=PV('XSPRESS3-EXAMPLE:HDF5:FilePath') x3h5fname=PV('XSPRESS3-EXAMPLE:HDF5:FileName') x3h5fnum=PV('XSPRESS3-EXAMPLE:HDF5:FileNumber') x3h5vdim=PV('XSPRESS3-EXAMPLE:HDF5:NumExtraDims') x3h5size=PV('XSPRESS3-EXAMPLE:HDF5:ExtraDimSizeN') x3h5d1=PV('XSPRESS3-EXAMPLE:HDF5:ExtraDimSizeX') x3h5d2=PV('XSPRESS3-EXAMPLE:HDF5:ExtraDimSizeY') #report ROIs for channels and counts at each point x3ch1roi0min=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI1_LLM') x3ch1roi0max=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI1_HLM') x3ch1roi0ct=PV('XSPRESS3-EXAMPLE:C1_ROI1:Value_RBV') x3ch1roi1min=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI2_LLM') x3ch1roi1max=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI2_HLM') x3ch1roi1ct=PV('XSPRESS3-EXAMPLE:C1_ROI2:Value_RBV') x3ch1roi2min=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI3_LLM') x3ch1roi2max=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI3_HLM') x3ch1roi2ct=PV('XSPRESS3-EXAMPLE:C1_ROI3:Value_RBV') x3ch2roi0min=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI1_LLM') x3ch2roi0max=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI1_HLM') x3ch2roi0ct=PV('XSPRESS3-EXAMPLE:C2_ROI1:Value_RBV') x3ch2roi1min=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI2_LLM') x3ch2roi1max=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI2_HLM') x3ch2roi1ct=PV('XSPRESS3-EXAMPLE:C2_ROI2:Value_RBV') x3ch2roi2min=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI3_LLM') x3ch2roi2max=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI3_HLM') x3ch2roi2ct=PV('XSPRESS3-EXAMPLE:C2_ROI3:Value_RBV') x3ch3roi0min=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI1_LLM') x3ch3roi0max=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI1_HLM') x3ch3roi0ct=PV('XSPRESS3-EXAMPLE:C3_ROI1:Value_RBV') x3ch3roi1min=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI2_LLM') x3ch3roi1max=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI2_HLM') x3ch3roi1ct=PV('XSPRESS3-EXAMPLE:C3_ROI2:Value_RBV') x3ch3roi2min=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI3_LLM') x3ch3roi2max=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI3_HLM') x3ch3roi2ct=PV('XSPRESS3-EXAMPLE:C3_ROI3:Value_RBV') #claim ROI 4 for our own use. we will integrate over all 4096 channels. x3ch1roi3min=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI4_LLM') x3ch1roi3max=PV('XSPRESS3-EXAMPLE:C1_MCA_ROI4_HLM') x3ch1roi3ct=PV('XSPRESS3-EXAMPLE:C1_ROI4:Value_RBV') x3ch2roi3min=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI4_LLM') x3ch2roi3max=PV('XSPRESS3-EXAMPLE:C2_MCA_ROI4_HLM') x3ch2roi3ct=PV('XSPRESS3-EXAMPLE:C2_ROI4:Value_RBV') x3ch3roi3min=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI4_LLM') x3ch3roi3max=PV('XSPRESS3-EXAMPLE:C3_MCA_ROI4_HLM') x3ch3roi3ct=PV('XSPRESS3-EXAMPLE:C3_ROI4:Value_RBV') xmot_cur.get() ymot_cur.get() norm0=PV('XF:05IDD-BI:1{BPM:01}.S20') norm1=PV('XF:05IDD-BI:1{BPM:01}.S21') norm2=PV('XF:05IDD-BI:1{BPM:01}.S22') norm3=PV('XF:05IDD-BI:1{BPM:01}.S23') xmot_cur.add_callback(cbfx) ymot_cur.add_callback(cbfy) shut_status.add_callback(cbf_shut) beam_current.add_callback(cbf_curr) xmot_cur.run_callbacks() ymot_cur.run_callbacks() shut_status.run_callbacks() beam_current.run_callbacks() # x3h5path.put(H5path) # x3h5fname.put(repr(D3)+'_'+repr(D4)+'_') # x3h5fnum.put(0) # x3acqtime.put(options.acqt) # x3acqnum.put(options.acqn) # x3tmode.put(1) # x3ch1roi3min.put(0) # x3ch2roi3min.put(0) # x3ch3roi3min.put(0) # x3ch1roi3max.put(4096) # x3ch2roi3max.put(4096) # x3ch3roi3max.put(4096) #h5 set up # x3h5vdim.put(2) # x3h5size.put(options.acqn) # x3h5d1.put(options.Nx+1) # x3h5d2.put(options.Ny+1) dett0.put(3) dett1.put(3) #overhead on triggering F460 deti.put(float(options.acqn)*options.acqt*1.) detinit.put(1) str='#NSLS-II SRX'+time.asctime() fp.write(str) fp.write('\n') str='#Start time is '+time.asctime() print str fp.write(str) fp.write('\n') str='# x: %(hs)6.4f ; y: %(vs)6.4f ; ROI1 %(roi1i)d:%(roi1a)d ; ROI2 %(roi2i)d:%(roi2a)d ; ROI3 %(roi3i)d:%(roi3a)d'%\ {"hs":xmot_cur.get(),"vs":ymot_cur.get(), 'roi1i':x3ch1roi0min.get(), 'roi1a':x3ch1roi0max.get(), 'roi2i':x3ch1roi1min.get(), 'roi2a':x3ch1roi1max.get(), 'roi3i':x3ch1roi2min.get(), 'roi3a':x3ch1roi2max.get()} print str fp.write(str) fp.write('\n') roits=x3ch3roi3ct.timestamp str='# SSA HCEN: %(WBHC)f ; SSA HSIZE: %(WBHS)f ; SSA VCEN: %(WBVC)f ; SSA VSIZE: %(WBVS)f'%\ {"WBHC":ssa.hcen(), "WBHS":ssa.hsize(), "WBVC":ssa.vcen(), "WBVS":ssa.vsize()} print str fp.write(str) fp.write('\n') str='# Bragg: %(B)6.4f ; Energy: %(E)6.4f ; WB HCEN: %(WBHC)f ; WB HSIZE: %(WBHS)f ; WB VCEN: %(WBVC)f ; WB VSIZE: %(WBVS)f'%\ {"B":bmot_cur.get(), "E": 12398. / (2 * 3.1355 * math.sin(bmot_cur.get()/180.*3.1416)), "WBHC":wb.hcen(), "WBHS":wb.hsize(), "WBVC":wb.vcen(), "WBVS":wb.vsize()} print str fp.write(str) fp.write('\n') str="# -------------------------------------------------------------------- " print str fp.write(str) fp.write('\n') str='#[point #]\tX pos\t\tY pos\tch 1\t\tch 2\t\tch 3\t\tch 4\tdBPM1\t\tdBPM2\t\tdBPM3\t\tdBPM4\t\troi0\t\troi1\t\troi2\t\troi3\t\ttime' print str fp.write(str) fp.write('\n') if options.sim is True: str=" -----simulating motor moves and bursts-----" print str fp.write(str) fp.write('\n') else: time.sleep(2) x3h5capture.put(1) #number of rows and columns completed by scan Ncol=Nrow=0 LN=0 #diode readback is now limiting factor for scan speed oldsig=0. #when the cryocooler kicks in, the beam is unusable for ~3200sec cryo=PV('XF:05IDA-OP:1{Mono:HDCM}T:LN2Out-I') ct=cryo.get() while( ct is None): time.sleep(0.05) ct=cryo.get() t0=time.time() cryocounter=0 shut_toggle=False #nested loops for scanning z,x,y for y in np.linspace(yo,yo+((Ny)*dy),Ny+1): tar[1][0]=y tar[1][1]=1 if options.sim is False: ymot.put(tar[1][0]) if indeadband(float(tar[1][0]),float(ymot_cur.get()),dbd)==1: tar[1][1] = 0 if Nrow%2==0: xs=0.+xo xe=((Nx+1)*dx)+xo-dx xi=dx else: xs=((Nx)*dx)+xo xe=0.+xo xi=-dx for x in np.linspace(xs,xe,Nx+1): tar[0][0]=x tar[0][1]=1 if indeadband(float(tar[0][0]),float(xmot_cur.get()),dbd)==1: tar[0][1]=0 if options.sim is False: xmot.put(tar[0][0]) while ((tar[0][1] == 1) or (tar[1][1] == 1)): time.sleep(0.01) signal0=signal1=signal2=signal3=0. nsig0=nsig1=nsig2=nsig3=0. sig0=sig1=sig2=sig3=0. time.sleep(twait) while ( options.checkbeam and (cryo.get() < (ct - 0.1)) ): print "Stopped. Detected possible cryocooler activation." time.sleep(1) cryocounter=cryocounter+1 #if the above is true for five cycles, the cryocooler was on, wait another 10min if ( options.checkbeam and cryocounter > 300 ): print "Detected cryocooler activation, waiting 10min" time.sleep(600) cryocounter=0 while ( options.checkbeam and (shut_open == False or beam_current == False)): print "Stopped. Waiting for scan conditions to return to normal." if shut_open==False: shut_toggle=True time.sleep(10.) if shut_toggle==True: print "Entering optics conditioning period. Waiting 5min" time.sleep(300) shut_toggle=False if options.sim is False: dett1.put(4) # x3erase.put(1) while nsig0==0.: nsig0=float(norm0.get()) while nsig1==0.: nsig1=float(norm1.get()) while nsig2==0.: nsig2=float(norm2.get()) while nsig3==0.: nsig3=float(norm3.get()) sig0=0 sig1=0 sig2=0 sig3=0 for i in range(0,options.acqn): dett0.put(4) time.sleep(options.acqt*.25) dett0.put(3) # x3acq.put(1) # while ( x3ch3roi3ct.get()==0.0 or x3ch3roi3ct.timestamp==roits): # time.sleep(0.02) # sig0=sig0+x3ch1roi0ct.get()+x3ch2roi0ct.get()+x3ch3roi0ct.get() # sig1=sig1+x3ch1roi1ct.get()+x3ch2roi1ct.get()+x3ch3roi1ct.get() # sig2=sig2+x3ch1roi2ct.get()+x3ch2roi2ct.get()+x3ch3roi2ct.get() # sig3=sig3+x3ch1roi3ct.get()+x3ch2roi3ct.get()+x3ch3roi3ct.get() # roits=x3ch3roi3ct.timestamp time.sleep(options.acqt*.75) signal0=diode0.get() if signal0==oldsig: time.sleep(0.05) signal0=diode0.get() oldsig=signal0 signal1=diode1.get() signal2=diode2.get() signal3=diode3.get() dett1.put(3) tn=time.time()-t0 if options.sim is False: str='%(X)06d %(XC)9.4f %(YC)9.4f %(d1)10.7e %(d2)10.7e %(d3)10.7e %(d4)10.7e %(n0)10.7e %(n1)10.7e %(n2)10.7e %(n3)10.7e %(s0)10.7e %(s1)10.7e %(s2)10.7e %(s3)10.7e %(time)9.2f'%{ 'X':Ncol, 'XC':xmot_cur.get(),"YC":ymot_cur.get(), "d1":float(signal0), "d2":float(signal1), "d3":float(signal2),"d4":float(signal3), 'n0':nsig0, 'n1':nsig1, 'n2':nsig2, 'n3':nsig3, "s0":sig0,"s1":sig1,"s2":sig2,"s3":sig3, "time":tn} print str fp.write(str) fp.write('\n') else: str='%(X)06d %(XC)8.4f %(YC)8.4f %(d1)10.7e %(d2)10.7e %(d3)10.7e %(d4)10.7e'%{"X":int(Ncol),"XC":tar[0][0], "YC":tar[1][0], "d1":float(signal0), "d2":float(signal1), "d3":float(signal2),"d4":float(signal3)} print str fp.write(str) fp.write('\n') Ncol=Ncol+1 Nrow=Nrow+1 str='End time is '+time.asctime() print str fp.write(str) fp.write('\n') fp.close() return 0
""" parser = OptionParser(usage=usage) parser.add_option('-v', '--verbose', dest='verbose', action="store", type='int', default=1, help='Verbose mode [default: %default (semi-quiet)]') parser.add_option('-j', '--jobs', dest='jobs', action="store", type='int', default=1, help=('Number of jobs to run in parallel ' '[default: single]')) parser.add_option('-o', '--outDir', dest='outDir', default='svlplots', help='Output directory [default: %default]') parser.add_option('-c', '--cache', dest='cache', action="store_true", help='Read from cache') (opt, args) = parser.parse_args() exit(main(args, opt))
def _rosnode_cmd_kill(argv): """ Implements rosnode 'kill' command. @raise ROSNodeException: if user enters in unrecognized nodes """ args = argv[2:] parser = OptionParser(usage="usage: %prog kill [node]...", prog=NAME) parser.add_option("-a", "--all", dest="kill_all", default=False, action="store_true", help="kill all nodes") (options, args) = parser.parse_args(args) if options.kill_all: if args: parser.error("invalid arguments with kill all (-a) option") args = get_node_names() args.sort() elif not args: node_list = get_node_names() node_list.sort() if not node_list: print("No nodes running", file=sys.stderr) return 0 sys.stdout.write('\n'.join( ["%s. %s" % (i + 1, n) for i, n in enumerate(node_list)])) sys.stdout.write( "\n\nPlease enter the number of the node you wish to kill.\n> ") selection = '' while not selection: selection = sys.stdin.readline().strip() try: selection = int(selection) if selection <= 0: print( "ERROR: invalid selection. Please enter a number (ctrl-C to cancel)" ) except: print("ERROR: please enter a number (ctrl-C to cancel)") sys.stdout.flush() selection = '' args = [node_list[selection - 1]] else: # validate args args = [rosgraph.names.script_resolve_name(ID, n) for n in args] node_list = get_node_names() unknown = [n for n in args if not n in node_list] if unknown: raise ROSNodeException("Unknown node(s):\n" + '\n'.join([" * %s" % n for n in unknown])) if len(args) > 1: print("killing:\n" + '\n'.join([" * %s" % n for n in args])) else: print("killing %s" % (args[0])) success, fail = kill_nodes(args) if fail: print("ERROR: Failed to kill:\n" + '\n'.join([" * %s" % n for n in fail]), file=sys.stderr) return 1 print("killed") return 0
def main(): usage = "usage: %prog [options] -i " description = "automatically Sanger primer design version1.0. " optparser = OptionParser(version="%prog 0.1", description=description, usage=usage, add_help_option=False) optparser.add_option("-h", "--help", action="help", help="Show this help message and exit.") #optparser.add_option("-s","--seq",dest="SEQ",type="string",action='store', # help="Input sequence for primer design ") optparser.add_option( "-i", "--input", dest="INPUT", type="string", action='store', help="Input files for primer design provided by clinical team.") optparser.add_option("-p", "--para", dest="PARA", type="string", action='store', help="Iuput PARAMETERS file.") optparser.add_option("-n", "--name", dest="NAME", type="string", action='store', help="Name for this run.") optparser.add_option('-s', "--sequence", dest='SEQ', type='string', action='store', help='Input fastq/fasta sequence data.') (options, args) = optparser.parse_args() #SEQ = options.SEQ INPUT = options.INPUT NAME = options.NAME PARA = options.PARA SEQ = options.SEQ # Chesk input files if not PARA or not INPUT or not NAME or not SEQ: optparser.print_help() sys.exit(1) M13F = 'GTAAAACGACGGCCAG' M13R = 'CAGGAAACAGCTATGAC' adeptor = [M13F, M13R] # read input file #print(INPUT) data = pd.read_csv(INPUT, sep='\t') print(data.to_string()) path = os.getcwd() result = [] for index, row in data.iterrows(): print(index) chrom = row['CHROM'] pos = row['POS'] snpLen = len(row['REF']) # get genomic sequence by chrom:start,end; window_size=500 window_size = 500 start = int(pos) - window_size end = int(pos) + window_size #print([chrom,start,end]) seqs = fetchSequence(str(chrom), str(start - 1), str(end), SEQ) #print(seqs) #sys.exit() seqs_input = seqs.split('\n')[1] #print(data.REF[0]) seqs_output = seqs_input[0:500] + '[' + seqs_input[500:( 500 + snpLen)] + ']' + seqs_input[(500 + snpLen):] #print(seqs_output) data.ix[index, 'FLANK_SEQ'] = seqs_output #print(seqs_input) data_sub = data.iloc[[index]] result = primerDesign2(window_size + 1, snpLen, seqs_input, PARA, NAME, adeptor, data_sub) pathfile = path + '/' + NAME + '_primer_design_output.xlsx' pathfiletxt = path + '/' + NAME + '_primer_design_output.txt' if index == 0: result.to_excel(pathfile, sheet_name=str(index), index=False) result.to_csv(pathfiletxt, index=None, sep='\t') continue if index != 0: book = load_workbook(pathfile) writer = pd.ExcelWriter(pathfile, engine='openpyxl') writer.book = book result.to_excel(writer, sheet_name=str(index), index=False) result.to_csv(pathfiletxt, index=None, sep='\t', mode='a', header=None) writer.save() writer.close() print(data.to_string()) data.to_csv(NAME + '_primer_design.flanks500.txt', index=None, sep='\t')
def readCommand( argv ): "Processes the command used to run from the command line." from optparse import OptionParser parser = OptionParser(USAGE_STRING) parser.add_option('-c', '--classifier', help=default('The type of classifier'), choices=['mostFrequent', 'nb', 'naiveBayes', 'perceptron', 'mira', 'minicontest'], default='mostFrequent') parser.add_option('-d', '--data', help=default('Dataset to use'), choices=['digits', 'faces'], default='digits') parser.add_option('-t', '--training', help=default('The size of the training set'), default= 100, type="int") parser.add_option('-f', '--features', help=default('Whether to use enhanced features'), default=False, action="store_true") parser.add_option('-o', '--odds', help=default('Whether to compute odds ratios'), default=False, action="store_true") parser.add_option('-1', '--label1', help=default("First label in an odds ratio comparison"), default=0, type="int") parser.add_option('-2', '--label2', help=default("Second label in an odds ratio comparison"), default=1, type="int") parser.add_option('-w', '--weights', help=default('Whether to print weights'), default=False, action="store_true") parser.add_option('-k', '--smoothing', help=default("Smoothing parameter (ignored when using --autotune)"), type="float", default=2.0) parser.add_option('-a', '--autotune', help=default("Whether to automatically tune hyperparameters"), default=False, action="store_true") parser.add_option('-i', '--iterations', help=default("Maximum iterations to run training"), default=3, type="int") parser.add_option('-s', '--test', help=default("Amount of test data to use"), default=TEST_SET_SIZE, type="int") options, otherjunk = parser.parse_args(argv) if len(otherjunk) != 0: raise Exception('Command line input not understood: ' + str(otherjunk)) args = {} # Set up variables according to the command line input. print ("Doing classification") print ("--------------------") print ("data:\t\t" + options.data) print ("classifier:\t\t" + options.classifier) if not options.classifier == 'minicontest': print ("using enhanced features?:\t" + str(options.features)) else: print ("using minicontest feature extractor") print ("training set size:\t" + str(options.training)) if(options.data=="digits"): printImage = ImagePrinter(DIGIT_DATUM_WIDTH, DIGIT_DATUM_HEIGHT).printImage if (options.features): featureFunction = enhancedFeatureExtractorDigit else: featureFunction = basicFeatureExtractorDigit if (options.classifier == 'minicontest'): featureFunction = contestFeatureExtractorDigit elif(options.data=="faces"): printImage = ImagePrinter(FACE_DATUM_WIDTH, FACE_DATUM_HEIGHT).printImage if (options.features): featureFunction = enhancedFeatureExtractorFace else: featureFunction = basicFeatureExtractorFace else: print ("Unknown dataset", options.data) print (USAGE_STRING) sys.exit(2) if(options.data=="digits"): legalLabels = range(10) else: legalLabels = range(2) if options.training <= 0: print ("Training set size should be a positive integer (you provided: %d)" % options.training) print (USAGE_STRING) sys.exit(2) if options.smoothing <= 0: print ("Please provide a positive number for smoothing (you provided: %f)" % options.smoothing) print (USAGE_STRING) sys.exit(2) if options.odds: if options.label1 not in legalLabels or options.label2 not in legalLabels: print ("Didn't provide a legal labels for the odds ratio: (%d,%d)" % (options.label1, options.label2)) print (USAGE_STRING) sys.exit(2) if(options.classifier == "mostFrequent"): classifier = mostFrequent.MostFrequentClassifier(legalLabels) elif(options.classifier == "naiveBayes" or options.classifier == "nb"): classifier = naiveBayes.NaiveBayesClassifier(legalLabels) classifier.setSmoothing(options.smoothing) if (options.autotune): print ("using automatic tuning for naivebayes") classifier.automaticTuning = True else: print ("using smoothing parameter k=%f for naivebayes" % options.smoothing) elif(options.classifier == "perceptron"): classifier = perceptron.PerceptronClassifier(legalLabels,options.iterations) elif(options.classifier == "mira"): classifier = mira.MiraClassifier(legalLabels, options.iterations) if (options.autotune): print ("using automatic tuning for MIRA") classifier.automaticTuning = True else: print ("using default C=0.001 for MIRA") elif(options.classifier == 'minicontest'): import minicontest classifier = minicontest.contestClassifier(legalLabels) else: print ("Unknown classifier:", options.classifier) print (USAGE_STRING) sys.exit(2) args['classifier'] = classifier args['featureFunction'] = featureFunction args['printImage'] = printImage return args, options
def main(self): # Parser arguments parser = OptionParser() # Single file input target. Not to be used with -r parser.add_option("-f", "--file", type="string", dest="file_name", default="", help="input file name", metavar="FILE") # Single file output destination. Not to be used with -r parser.add_option("-o", "--output", type="string", dest="output_file", default="", help="output file for writing", metavar="FILE") # Directory in which to recurse and process all files. Not to be used with -f parser.add_option("-r", "--recursive", type="string", dest="folder_name", default="", help="recursively decompile lua files", metavar="FOLDER") # Directory to output processed files during recursion. Not to be used with -f parser.add_option("-d", "--dir_out", type="string", dest="folder_output", default="", help="directory to output decompiled lua scripts", metavar="FOLDER") # Global override of LuaJIT version, ignores -j parser.add_option("-j", "--jit_version", type="string", dest="luajit_version", default="", help="override LuaJIT version, default 2.1, now supports 2.0, 2.1") # 'Profiles' that hardcode LuaJIT versions per file parser.add_option("-v", "--version_config_list", type="string", dest="version_config_list", default="version_default", help="LuaJIT version config list to use") # Prevent most integrity asserts from canceling decompilation parser.add_option("-c", "--catch_asserts", action="store_true", dest="catch_asserts", default=False, help="attempt inline error reporting without breaking decompilation") # Output a log of exceptions and information during decompilation parser.add_option("-l", "--enable_logging", action="store_true", dest="enable_logging", default=False, help="log info and exceptions to external file while decompiling") (self.options, args) = parser.parse_args() # Initialize opcode set for required LuaJIT version basepath = os.path.dirname(sys.argv[0]) if basepath == "": basepath = "." if self.options.luajit_version == "": version_required = self.check_for_version_config(self.options.file_name) sys.path.append(basepath + "/ljd/rawdump/luajit/" + str(version_required) + "/") else: self.set_version_config(float(self.options.luajit_version)) sys.path.append(basepath + "/ljd/rawdump/luajit/" + self.options.luajit_version + "/") # LuaJIT version is known after the argument is parsed, so delay module import. import ljd.rawdump.parser import ljd.pseudoasm.writer import ljd.ast.builder import ljd.ast.validator import ljd.ast.locals import ljd.ast.slotworks import ljd.ast.unwarper import ljd.ast.mutator import ljd.lua.writer # Send assert catch argument to modules if self.options.catch_asserts: ljd.ast.unwarper.catch_asserts = True ljd.ast.slotworks.catch_asserts = True ljd.ast.validator.catch_asserts = True self.ljd = ljd # Start logging if required if self.options.enable_logging: logger = logging.getLogger('LJD') logger.setLevel(logging.INFO) fh = MakeFileHandler(f'logs/{datetime.now().strftime("%Y_%m_%d_%H_%M_%S")}.log') fh.setLevel(logging.DEBUG) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') fh.setFormatter(formatter) logger.addHandler(fh) console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logger.addHandler(console) else: logger = None # Recursive batch processing if self.options.folder_name: if self.options.version_config_list != "version_default": print(self.options) print("Version config lists are not supported in recursive directory mode.") if self.options.enable_logging: logger.info("Exit") return 0 for path, _, filenames in os.walk(self.options.folder_name): for file in filenames: if file.endswith('.lua'): full_path = os.path.join(path, file) if self.options.enable_logging: logger.info(full_path) try: self.decompile(full_path) new_path = os.path.join(self.options.folder_output, os.path.relpath(full_path, self.options.folder_name)) os.makedirs(os.path.dirname(new_path), exist_ok=True) self.write_file(new_path) if self.options.enable_logging: logger.info("Success") except KeyboardInterrupt: if self.options.enable_logging: logger.info("Exit") return 0 except: if self.options.enable_logging: logger.info("Exception") logger.debug('', exc_info=True) return 0 # Single file processing if self.options.file_name == "": print(self.options) parser.error("Options -f or -r are required.") return 0 self.decompile(self.options.file_name) if self.options.output_file: self.write_file(self.options.output_file) else: self.ljd.lua.writer.write(sys.stdout, self.ast) return 0
import sys import csv from optparse import OptionParser optParser = OptionParser() optParser.add_option("-f", "--file", dest="raw_links_path", type="string", help="Path to unfiltered Links XML file") optParser.add_option("-e", "--metric_etx_limit", type="float", dest="etx_metric_limit", help="ETX-metric limit for accepted links") (options, args) = optParser.parse_args() if not options.raw_links_path or not options.etx_metric_limit: print("Failed: Missing input measurement file") optParser.print_help() sys.exit(-1) used_content = list() num_filtered = 0 num_used = 0 header = str() with open(options.raw_links_path, 'r') as f: reader = csv.reader(f, delimiter=',', quoting=csv.QUOTE_ALL) header = next(reader)
def main(args=None): """command line front-end function""" # parse command line arguments args = args or sys.argv[1:] usage = "Usage: %prog [options] [destination]" parser = OptionParser(usage=usage) parser.add_option('--develop', dest='develop', action='store_true', default=False, help='setup in development mode') options, args = parser.parse_args(args) # Print the python version print 'Python: %s' % sys.version # The data is kept in the same directory as the script source = os.path.abspath(os.path.dirname(__file__)) # directory to install to if not len(args): destination = source elif len(args) == 1: destination = os.path.abspath(args[0]) else: parser.print_usage() parser.exit(1) os.chdir(source) # check for existence of necessary files if not os.path.exists('virtualenv'): print "File not found: virtualenv" sys.exit(1) PACKAGES_FILE = 'PACKAGES' if not os.path.exists(PACKAGES_FILE) and destination != source: PACKAGES_FILE = os.path.join(destination, PACKAGES_FILE) if not os.path.exists(PACKAGES_FILE): print "File not found: PACKAGES" # packages to install in dependency order PACKAGES = file(PACKAGES_FILE).read().split() assert PACKAGES # create the virtualenv and install packages env = os.environ.copy() env.pop('PYTHONHOME', None) returncode = call([ sys.executable, os.path.join('virtualenv', 'virtualenv.py'), destination ], env=env) if returncode: print 'Failure to install virtualenv' sys.exit(returncode) if options.develop: python = entry_point_path(destination, 'python') for package in PACKAGES: oldcwd = os.getcwd() os.chdir(package) returncode = call([python, 'setup.py', 'develop']) os.chdir(oldcwd) if returncode: break else: pip = entry_point_path(destination, 'pip') returncode = call([pip, 'install'] + PACKAGES, env=env) if returncode: print 'Failure to install packages' sys.exit(returncode) # create a front end runner that is path-independent template = """#!/bin/bash unset PYTHONHOME %(PYTHON)s %(MOZMILL)s $@ """ variables = {'PYTHON': esc(entry_point_path(destination, 'python'))} for script in 'mozmill', 'mozmill-restart': path = os.path.join(destination, script + '.sh') f = file(path, 'w') variables['MOZMILL'] = esc(python_script_path(destination, script)) print >> f, template % variables f.close() if not is_windows(): os.chmod(path, 0755)
continue for dn in sorted(os.listdir(envs_dir)): if dn.startswith('.'): continue prefix = join(envs_dir, dn) if isdir(prefix): prefix = join(envs_dir, dn) yield prefix yield config.root_dir if __name__ == '__main__': from optparse import OptionParser p = OptionParser(usage="usage: %prog [options] DIST/FN [ADDITIONAL ARGS]") p.add_option('-p', '--prefix', action="store", default=sys.prefix, help="prefix (defaults to %default)") opts, args = p.parse_args() if len(args) == 0: p.error('at least one argument expected') fn = args[0] if not fn.endswith('.tar.bz2'): fn += '.tar.bz2' p = launch(fn, opts.prefix, args[1:]) print('PID:', p.pid)
op.add_option( '--run-slow-tests', dest='run_slow_tests', action='store_true', help='run particularly slow tests as well as average-speed tests') op.add_option( '--xul-info', dest='xul_info_src', help='config data for xulRuntime (avoids search for config/autoconf.mk)' ) op.add_option( '--no-extensions', dest='no_extensions', action='store_true', help='run only tests conforming to the ECMAScript 5 standard') (OPTIONS, args) = op.parse_args() if len(args) < 1: if not OPTIONS.check_manifest: op.error('missing JS_SHELL argument') JS, args = None, [] else: JS, args = args[0], args[1:] # Convert to an absolute path so we can run JS from a different directory. if JS is not None: JS = os.path.abspath(JS) if OPTIONS.debug: if OPTIONS.valgrind: print >> sys.stderr, "--debug and --valgrind options are mutually exclusive" sys.exit(2) debugger_prefix = ['gdb', '-q', '--args']
def main(): print 'here starts main program' #parse options parser = OptionParser() parser.add_option("--cv", dest="cv", action="store_true", default=False, help="cross validate methods with dataset") parser.add_option("--test", dest="test", action="store_true", default=False, help="test methods with specific events") (options, args) = parser.parse_args() #load parameters pars = load_pars() cut_pars = [60, 4, 24] pars = pars[:1] output = '\n\nResult:\n\n' for par in pars: #init analysis object analysis = Analysis(limits=par, max_price=5, cut_pars=cut_pars) if options.cv: #perform cross validation analysis.cross_validation() else: #get filenames if len(args) < 1: raise NameError("Usage: %s /path_some_file") else: fnames = args #load event data from fnames dh = Data_Handle().cut_raw_data(fnames=fnames, analysis=True) linklist = dh.get_linklist() datalist = dh.get_datalist() if options.test: analysis.test(datalist, num=50) else: #predict outcome events print 'Predicting outcome events:\n' #train clf with existing data analysis.fit() outputs = [] for i in xrange(len(datalist)): data = datalist[i] link = linklist[i] out = analysis.predict(link, [data]) if out != '': outputs.append(out) output += 'limits=[%.2f,%.2f]\n%d of %d events with prediction:\n\n' % ( par[0], par[1], len(outputs), len(datalist)) if len(outputs) > 0: output += analysis.performance() for i, item in enumerate(outputs): output += ' event #%d:\n%s\n' % (i + 1, item) output += '\n' if options.cv == False and options.test == False: print output