def main(): from optparse import OptionParser usage = "yappi.py [-b] [-s] [scriptfile] args ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option("-b", "--builtins", action="store_true", dest="profile_builtins", default=False, help="Profiles builtin functions when set. [default: False]") parser.add_option("-s", "--single_thread", action="store_true", dest="profile_single_thread", default=False, help="Profiles only the thread that calls start(). [default: False]") if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if (len(sys.argv) > 0): sys.path.insert(0, os.path.dirname(sys.argv[0])) start(options.profile_builtins, not options.profile_single_thread) if sys.version_info >= (3, 0): exec(compile(open(sys.argv[0]).read(), sys.argv[0], 'exec'), sys._getframe(1).f_globals, sys._getframe(1).f_locals) else: execfile(sys.argv[0], sys._getframe(1).f_globals, sys._getframe(1).f_locals) stop() # we will currently use default params for these get_func_stats().print_all() get_thread_stats().print_all() else: parser.print_usage()
def main(): log.addHandler(logging.StreamHandler()) usage = """%prog SHP ISO_CSV BORDERS_CSV SHP World admin shapefile. ISO_CSV CSV of "ISO2, ISO3, ISON". BORDERS_SWITCH_CSV CSV of ISO2 codes of borders to switch. BORDERS_DENY_CSV CSV of ISO2 codes of borders to inhibit. """ parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="count", dest="verbose", help="Print verbose information for debugging.", default=0) parser.add_option("-q", "--quiet", action="count", dest="quiet", help="Suppress warnings.", default=0) (options, args) = parser.parse_args() args = [arg.decode(sys.getfilesystemencoding()) for arg in args] log_level = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG,)[ max(0, min(3, 1 + options.verbose - options.quiet))] log.setLevel(log_level) if not len(args) == 4: parser.print_usage() sys.exit(1) (shp_path, iso_csv_path, border_switch_csv_path, border_deny_csv_path) = args worldgeo(shp_path, iso_csv_path, border_switch_csv_path, border_deny_csv_path)
def main(): usage = "profile.py [-o output_file_path] [-s sort] scriptfile [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option("-o", "--outfile", dest="outfile", help="Save stats to <outfile>", default=None) parser.add_option( "-s", "--sort", dest="sort", help="Sort order when printing to stdout, based on pstats.Stats class", default=-1 ) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() if len(args) > 0: sys.argv[:] = args sys.path.insert(0, os.path.dirname(sys.argv[0])) fp = open(sys.argv[0]) try: script = fp.read() finally: fp.close() run("exec(%r)" % script, options.outfile, options.sort) else: parser.print_usage() return parser
def main(argv=None): import sys if argv is None: argv = sys.argv[1:] from optparse import OptionParser usage = "usage: %prog [options] <feed url>" parser = OptionParser(usage=usage) parser.add_option( "--schema", help="which news item type to create when scraping", default="local-news" ) parser.add_option( "--http-cache", help='location to use as an http cache. If a cached value is seen, no update is performed.', action='store' ) from ebpub.utils.script_utils import add_verbosity_options, setup_logging_from_opts add_verbosity_options(parser) options, args = parser.parse_args(argv) setup_logging_from_opts(options, logger) if len(args) < 1: parser.print_usage() sys.exit(0) scraper = LocalNewsScraper(url=args[0], schema_slug=options.schema, http_cache=options.http_cache) scraper.update()
def main(): """ Geocodes a location given on the command line. Usage: pygeocoder.py "1600 amphitheatre mountain view ca" [YOUR_API_KEY] pygeocoder.py 37.4219720,-122.0841430 [YOUR_API_KEY] When providing a latitude and longitude on the command line, ensure they are separated by a comma and no space. """ usage = "usage: %prog [options] address" parser = OptionParser(usage, version=VERSION) parser.add_option("-k", "--key", dest="key", help="Your Google Maps API key") (options, args) = parser.parse_args() if len(args) != 1: parser.print_usage() sys.exit(1) query = args[0] gcoder = Geocoder(options.key) try: result = gcoder.geocode(query) except GeocoderError as err: sys.stderr.write('%s\n%s\nResponse:\n' % (err.url, err)) json.dump(err.response, sys.stderr, indent=4) sys.exit(1) print(result) print(result.coordinates)
def parse_args(): parser = OptionParser(description='Get the IP of a running VM') parser.add_option('--debug', action='store_true', default=False, help='debug mode') parser.add_option('--api_host', default=None, help='oVirt API IP Address/Hostname') parser.add_option( '--api_user', default=DEFAULT_API_USER, help='oVirt API Username, defaults to "%s"' % (DEFAULT_API_USER)) parser.add_option('--api_pass', default=None, help='oVirt API Password') parser.add_option('--vm_id', default=None, help='ID of an existing VM to add a disk to') (opts, args) = parser.parse_args() for optname in ["api_host", "api_pass", "api_user", "vm_id"]: optvalue = getattr(opts, optname) if not optvalue: parser.print_help() parser.print_usage() print "Please re-run with an option specified for: '%s'" % (optname) sys.exit(1) return opts
def main(): usage = "profile.py [-o output_file_path] [-s sort] scriptfile [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option('-o', '--outfile', dest="outfile", help="Save stats to <outfile>", default=None) parser.add_option('-s', '--sort', dest="sort", help="Sort order when printing to stdout, based on pstats.Stats class", default=-1) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if len(args) > 0: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) with open(progname, 'rb') as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, '__name__': '__main__', '__package__': None, } runctx(code, globs, None, options.outfile, options.sort) else: parser.print_usage() return parser
def main(tester_lambda=None, pop_lambda=None, hlconfig_lambda=None, config_lambda=None): from optparse import OptionParser parser = OptionParser(usage="usage: sgatuner.py [options] Benchmark") parser.add_option("--check", action="store_true", dest="check", default=False, help="check for correctness") parser.add_option("--debug", action="store_true", dest="debug", default=False, help="enable debugging options") parser.add_option("-n", type="int", help="input size to train for") parser.add_option("--max_time", type="float", action="callback", callback=option_callback) parser.add_option("--rounds_per_input_size", type="int", action="callback", callback=option_callback) parser.add_option("--mutations_per_mutator", type="int", action="callback", callback=option_callback) parser.add_option("--output_dir", type="string", action="callback", callback=option_callback) parser.add_option("--population_size", type="int", action="callback", callback=option_callback) parser.add_option("--min_input_size", type="int", action="callback", callback=option_callback) parser.add_option("--offset", type="int", action="callback", callback=option_callback) parser.add_option("--threads", type="int", action="callback", callback=option_callback) parser.add_option("--name", type="string", action="callback", callback=option_callback) parser.add_option("--abort_on", type="string", action="callback", callback=option_callback) parser.add_option("--accuracy_target", type="float", action="callback", callback=option_callback) (options, args) = parser.parse_args() if len(args)!=1: parser.print_usage() sys.exit(1) if options.check: tunerconfig.applypatch(tunerconfig.patch_check) if options.debug: tunerconfig.applypatch(tunerconfig.patch_debug) if options.n: tunerconfig.applypatch(tunerconfig.patch_n(options.n)) config.benchmark=args[0] if tester_lambda is None and pop_lambda is None and hlconfig_lambda is None: recompile() autotune(config.benchmark, None, tester_lambda, pop_lambda, hlconfig_lambda, config_lambda)
def main(): usage = "mactrace.py [-o output_file_path] scriptfile [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option('-o', '--outfile', dest="outfile", help="Save trace to <outfile>", default=None) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if options.outfile: twriter = TraceWriter(open(options.outfile, "w")) else: twriter = TraceWriter() sys.settrace(twriter.trace) if len(args) > 0: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) with open(progname, 'rb') as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, '__name__': '__main__', '__package__': None, } eval(code, globs) else: parser.print_usage() return parser
def parse_options(): help = [ 'Usage: %prog [options] <command>', '', 'Commands:', ] help.extend([' %s' % c for c in Client.command_list()]) parser = OptionParser( '\n'.join(help) ) parser.add_option( '-v', '--verbose', dest='verbose', action='store_true', help='More verbose output', ) parser.add_option( '--text_to_speech', dest='text_to_speech', action='store_true', help="Use the shell `say' command to enable text-to-speech for messages", ) parser.add_option( '-l', '--live', dest='live', action='store_true', help='Use the "live" chat stream instead of transcript polling', ) options, args = parser.parse_args() if not args: parser.print_usage() sys.exit(1) return options, args
def parse_options(): parser = OptionParser(usage="usage: %prog [options] <version>") parser.add_option("-f", "--from", dest="from_tag", metavar="FROM_TAG", help="The tag which should be used as start tag") parser.add_option("-t", "--to", dest="to_tag", metavar="TO_TAG", help="The tag which should be used as end tag") parser.add_option("-p", "--preview-url", dest="preview_url", metavar="PREVIEW_URL", help="The URL which is able to preview release notes") parser.add_option("-b", "--batch-mode", dest="batch_mode", action="store_true", default=False) (options, args) = parser.parse_args() if len(args) > 1: print __doc__ parser.print_usage() sys.exit(1) current_tag = None try: current_tag = current_git_tag() except: current_tag = "" options.from_tag = options.from_tag or current_tag if options.from_tag is None: parser.error("Couldn't find start tag. Please specify one manually") options.to_tag = options.to_tag or "HEAD" options.preview_url = options.preview_url or os.getenv("PREVIEW_URL", PREVIEW_URL % nname(tool_config("name"))) return (options, args)
def main(): log.addHandler(logging.StreamHandler()) usage = """%prog CSV CSV CSV of missing country coordinates. """ parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="count", dest="verbose", help="Print verbose information for debugging.", default=0) parser.add_option("-q", "--quiet", action="count", dest="quiet", help="Suppress warnings.", default=0) (options, args) = parser.parse_args() args = [arg.decode(sys.getfilesystemencoding()) for arg in args] log_level = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG,)[ max(0, min(3, 1 + options.verbose - options.quiet))] log.setLevel(log_level) if not len(args) == 1: parser.print_usage() sys.exit(1) (csv_path, ) = args missinggeo(csv_path)
class Attach(): def __init__(self): self.git = Git() self.parser = OptionParser() self.parser.set_usage("mgit attach <subproject name>") def print_usage(self): self.parser.print_usage() def run(self, args): options, args = self.parser.parse_args(args[1:]) if len(args) <= 0: self.parser.print_usage() return if len(args) > 1: module = args[1] else: module = self.git.module_name(args[0]) if module == None or len(module) <= 0: return if not os.path.exists(module): print("module %s not exists." % module) return project = Project() project.append(module)
def main(): """Main function code. Implementing: 1. parsig args (-i, --in-file) 2. iterating and transorming input data (results stored in table) 3. printing out results in required format """ parser = OptionParser(usage="usage: %prog [options] arg") parser.add_option("-i", "--in-file", dest="in_filename", help="provide input data by specifing filename") (options, args) = parser.parse_args() if options.in_filename is None: parser.print_usage() exit(0) results = [] for engines, phrases in gen_next_case(options.in_filename): engine_in_use = None score = 0 if len(phrases) == 0: results.append(score) continue for i, phrase in enumerate(phrases): if engine_in_use is None: engine_in_use = suggest(engines, phrases, exclude=phrases[0]) elif engine_in_use == phrase: engine_in_use = suggest(engines, phrases[i:], exclude=engine_in_use) score += 1 results.append(score) for i, result in enumerate(results): print "Case #%i: %i" % (i+1, result)
def main(): option_parser = OptionParser(usage='%prog [options] input.xml, output.xml') option_parser.add_option('-s', '--start', type='float') option_parser.add_option('-e', '--end', type='float') option_parser.add_option('--src-start', '--os', type='float') option_parser.add_option('--src-end', '--oe', type='float') option_parser.add_option('-r', '--rate', type='float', default=1.0) option_parser.add_option('-v', '--verbose', action='count', default=0) option_parser.add_option('-f', '--farm', action='store_true') option_parser.add_option('-w', '--workers', type='int', default=20) option_parser.add_option('-a', '--advect', type='float', default=0.0) opts, args = option_parser.parse_args() if len(args) != 2: option_parser.print_usage() exit(1) res = schedule_retime(*args, src_start=opts.src_start, src_end=opts.src_end, dst_start=opts.start, dst_end=opts.end, sampling_rate=opts.rate, verbose=opts.verbose, farm=opts.farm, workers=opts.workers, advect=opts.advect ) if opts.farm: print 'Qube job ID', res
def parse_command_line(): parser = OptionParser( usage = '%prog [options] uploaded.csv' ) parser.add_option( '-o', '--output', dest='output_path', default='hist.csv', help='path to output CSV file', ) parser.add_option( '-n', '--num-samples', dest='num_samples', default=None, help='maximum number of samples from input CSV', ) parser.add_option( '-t', '--cluster-threshold', dest='cluster_thresh', default=25, help='number of photos per day considered significant when clustering', ) (options, args) = parser.parse_args() if len(args) < 1: parser.print_usage() sys.exit(1) return (options, args)
def main(): usage = "%prog [options] /path/to/error/file" parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="print messages to stdout") (options, args) = parser.parse_args() if not len(args) > 0: parser.print_usage() quit(code=1) for file in args: fh = open(file) data = cPickle.load(fh) fh.close() for k in data.keys(): print "%s: %s" % ( k, data[k]) quit(code=0)
def main(args): parser = OptionParser(version='modifyrepo version %s' % __version__) # query options parser.add_option("--mdtype", dest='mdtype', help="specific datatype of the metadata, will be derived from the filename if not specified") parser.add_option("--remove", action="store_true", help="remove specified file from repodata") parser.add_option("--compress", action="store_true", default=True, help="compress the new repodata before adding it to the repo (default)") parser.add_option("--no-compress", action="store_false", dest="compress", help="do not compress the new repodata before adding it to the repo") parser.add_option("--compress-type", dest='compress_type', help="compression format to use") parser.add_option("-s", "--checksum", dest='sumtype', help="specify the checksum type to use") parser.add_option("--unique-md-filenames", dest="unique_md_filenames", help="include the file's checksum in the filename, helps with proxies", action="store_true") parser.add_option("--simple-md-filenames", dest="unique_md_filenames", help="do not include the file's checksum in the filename", action="store_false") parser.usage = "modifyrepo [options] [--remove] <input_metadata> <output repodata>" (opts, argsleft) = parser.parse_args(args) if len(argsleft) != 2: parser.print_usage() return 0 metadata = argsleft[0] repodir = argsleft[1] try: repomd = RepoMetadata(repodir) except MDError, e: print "Could not access repository: %s" % str(e) return 1
def main(): """Handle command-line munging, and pass off control to the interesting stuff.""" parser = OptionParser( usage='usage: %prog [options] <staging | prod>', description='Deploy a new version of DXR.') parser.add_option('-b', '--base', dest='base_path', help='Path to the dir containing the builds, instances, ' 'and deployment links') parser.add_option('-c', '--branch', dest='branch', help='Deploy the revision from this branch which last ' 'passed Jenkins.') parser.add_option('-p', '--python', dest='python_path', help='Path to the Python executable on which to base the' ' virtualenvs') parser.add_option('-e', '--repo', dest='repo', help='URL of the git repo from which to download DXR. ' 'Use HTTPS if possible to ward off spoofing.') parser.add_option('-r', '--rev', dest='manual_rev', help='A hash of the revision to deploy. Defaults to the ' 'last successful Jenkins build on the branch ' 'specified by -c (or master, by default).') options, args = parser.parse_args() if len(args) == 1: non_none_options = dict((k, getattr(options, k)) for k in (o.dest for o in parser.option_list if o.dest) if getattr(options, k)) Deployment(args[0], **non_none_options).deploy_if_appropriate() else: parser.print_usage()
def main(): # Argument parsing from optparse import OptionParser parser = OptionParser( usage = '%prog [options] <link> ...', description = 'Shows a tree of link dependencies of the given EPICS pv link.') parser.add_option( '-m', '--nms', dest = 'check_ms', default = False, action = 'store_true', help = 'Check for missing maximise severity (MS) links') parser.add_option( '-q', '--quiet', dest = 'quiet', default = False, action = 'store_true', help = 'Only show errors, suppress normal output') parser.add_option( '-r', '--raw', dest = 'raw', default = dumb_terminal(), action = 'store_true', help = 'Print raw text without colour codes') parser.add_option( '-c', '--colour', dest = 'raw', action = 'store_false', help = 'Force colour coded output on unsupported destination') global options options, args = parser.parse_args() if args: for arg in args: follow_link(0, arg) else: parser.print_usage()
def parse_command_line(): usage = """%prog <mmfile> -o [<htmloutput>] Create a FreeMind (.mm) document (see http://freemind.sourceforge.net/wiki/index.php/Main_Page) the main node will be the title page and the lower nodes will be pages. """ parser = OptionParser(usage) parser.add_option('-o', '--output', dest="outfile") parser.add_option('-m', '--minutes', dest="order_by_time", action='store_true', help="Order the minutes by time and show the time") (options, args) = parser.parse_args() if len(args) == 0: parser.print_usage() sys.exit(-1) infile = args[0] if not infile.endswith('.mm'): print "Input file must end with '.mm'" parser.print_usage() sys.exit(-1) outfile = sys.stdout if options.outfile: # Writing out the HTML in correct UTF-8 format is a little tricky. print "Outputting to '%s'" % (options.outfile) outfile = codecs.open(options.outfile, 'w', 'utf-8') mm2notes = Mm2Notes() lines = mm2notes.open(infile) mm2notes.set_order_by_time(options.order_by_time) mm2notes.write(outfile, lines)
def main(): """ cmd executes a command on multiple hosts """ p = OptionParser("python deploy_utils cmd [script] --hosts=[host1,host2]]") p.add_option('--verbose', action='store_true', dest='verbose', help='Verbose mode') p.add_option('--hosts', dest='hosts', help='Hosts') p.add_option('--ssh_key', dest='ssh_key', help='ssh key') p.add_option('--ssh_user', dest='ssh_user', help='ssh user') (options, args) = p.parse_args() args = args[1:] script = args[0] if not script or not options.hosts: p.print_usage() return 0 env.user = options.ssh_user or os.environ.get('DEPLOY_UTILS_SSH_USER') or 'root' env.key_filename = options.ssh_key or os.environ.get('DEPLOY_UTILS_SSH_KEY') #env.rootpath = '/var/apps/' env.hosts = (options.hosts or '').split(',') with settings(warn_only=True,host_string=options.hosts): # Create the directory if needed. run('{0}'.format(script))
def main(): import os, sys from optparse import OptionParser usage = "cProfile.py [-o output_file_path] [-s sort] scriptfile [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option("-o", "--outfile", dest="outfile", help="Save stats to <outfile>", default=None) parser.add_option( "-s", "--sort", dest="sort", help="Sort order when printing to stdout, based on pstats.Stats class", default=-1 ) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if len(sys.argv) > 0: sys.path.insert(0, os.path.dirname(sys.argv[0])) run("execfile(%r)" % (sys.argv[0],), options.outfile, options.sort) else: parser.print_usage() return parser
def parse_args(): parser = OptionParser(description='Create a VM in oVirt from an existing VM Template') parser.add_option('--debug', action='store_true', default=False, help='debug mode') parser.add_option('--api_host', default=None, help='oVirt API IP Address/Hostname') parser.add_option('--api_user', default=DEFAULT_API_USER, help='oVirt API Username, defaults to "%s"' % (DEFAULT_API_USER)) parser.add_option('--api_pass', default=None, help='oVirt API Password') parser.add_option('--vm_template_name', default=None, help='VM template name to create VM from') parser.add_option('--cluster_name', default=None, help='Cluster name to create VM in') parser.add_option('--vm_name', default=None, help='VM name to be created') (opts, args) = parser.parse_args() for optname in ["api_host", "api_pass", "api_user", "vm_template_name", "cluster_name", "vm_name"]: optvalue = getattr(opts, optname) if not optvalue: parser.print_help() parser.print_usage() print "Please re-run with an option specified for: '%s'" % (optname) sys.exit(1) return opts
def cmd_line(): parser = OptionParser() parser.set_usage("yum-debug-restore [options]") parser.add_option("-C", "--cache", action="store_true", help="run from cache only") parser.add_option("-c", dest="conffile", help="config file location") parser.add_option("--enablerepo", action="append", dest="enablerepos", help="specify additional repoids to query, can be specified multiple times") parser.add_option("--disablerepo", action="append", dest="disablerepos", help="specify repoids to disable, can be specified multiple times") parser.add_option("-y", dest="assumeyes", action="store_true", help="answer yes for all questions") parser.add_option("--skip-broken", action="store_true", help="skip packages with depsolving problems") parser.add_option("--output", action="store_true", help="output the yum shell commands") parser.add_option("--shell", help="output the yum shell commands to a file") parser.add_option("--install-latest", action="store_true", help="install the latest instead of specific versions") parser.add_option("--ignore-arch", action="store_true", help="ignore arch of packages, so you can dump on .i386 and restore on .x86_64") parser.add_option("--filter-types", help="Limit to: install, remove, update, downgrade") (opts, args) = parser.parse_args() if not args: parser.print_usage() sys.exit(1) return (opts, args)
def main(): log.addHandler(logging.StreamHandler()) usage = """%prog SHP SHP World admin shapefile. """ parser = OptionParser(usage=usage) parser.add_option( "-v", "--verbose", action="count", dest="verbose", help="Print verbose information for debugging.", default=0 ) parser.add_option("-q", "--quiet", action="count", dest="quiet", help="Suppress warnings.", default=0) parser.add_option( "-l", "--list", action="store_true", dest="dump", help="List countries as CSV of iso2, name.", default=None ) (options, args) = parser.parse_args() args = [arg.decode(sys.getfilesystemencoding()) for arg in args] log_level = (logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG)[ max(0, min(3, 1 + options.verbose - options.quiet)) ] log.setLevel(log_level) if not len(args) == 1: parser.print_usage() sys.exit(1) (shp_path,) = args worldgeo(shp_path, options.dump)
def main(): program_name = os.path.basename(sys.argv[0]) program_version = "v0.1" program_build_date = "%s" % __updated__ program_version_string = '%%prog %s (%s)' % (program_version, program_build_date) program_license = "Copyright 2014 Steven Hill Hendrix Lab Oregon State University \ Licensed under the Apache License 2.0\nhttp://www.apache.org/licenses/LICENSE-2.0" program_longdesc = "Takes tabular blast output and fetches the raw reads from a paired fastq file. Supports interleaved paired end reads. Expects an index to be present that has the same prefix as the fastq file." argv = sys.argv[1:] # setup option parser parser = OptionParser(version=program_version_string, epilog=program_longdesc, description=program_license) parser.add_option("-b", "--blast", dest="blast", help="Tabular output from BLAST to a blastdb of raw reads.", metavar="FILE") parser.add_option("-t", "--num-threads", dest="num_threads", help="Number of threads to use when searching for reads.", metavar="INT") parser.add_option("-r", "--raw-reads", dest="reads", help="A single fastq file containing the source reads. If the reads are paired they should be interleaved.", metavar="FILE") # process options (opts, args) = parser.parse_args(argv) blast, rawReads, num_threads = opts.blast, opts.reads, int(opts.num_threads) if num_threads is None: num_threads = 1 if blast is None or rawReads is None: parser.print_usage() sys.exit(-1) # MAIN BODY # toFind = {} print >> sys.stderr, "reading blast hits", "\r", readBlast(blast, toFind) print >> sys.stderr, "finding reads", "\r", findReads(rawReads, toFind, num_threads)
def main(): usage = "usage: %prog [options] filename1 filename2" parser = OptionParser(usage=usage) parser.add_option("-r", "--relative-error", dest="relerr", default=0.0, help="the maximum relative error") parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, help="don't print a line per numerical difference found") parser.add_option("-s", "--silent", action="store_true", dest="silent", default=False, help="don't print anything" " this is useful if you just want to use the exit status") (options, args) = parser.parse_args() if len(args) != 2: print "too few arguments" parser.print_usage() sys.exit(2) if options.silent: options.quiet = True same = ndiff(args[0], args[1], options.relerr, options.quiet) if same: sys.exit(0) else: sys.exit(1)
def main(): from optparse import OptionParser parser = OptionParser() parser.allow_interspersed_args = False parser.add_option('-o', '--outfile', dest="outfile", help="Save stats to <outfile>", default='/tmp/profile.tmp') if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() #viz = kcachegrind viz = profile_viz sys.path = [os.getcwd()] + sys.path if len(args) > 0: sys.argv[:] = args sys.path.insert(0, os.path.dirname(sys.argv[0])) viz('execfile(%r)' % sys.argv[0], out=options.outfile) else: parser.print_usage() return parser
def parse_args(): "Parse command line arguments" parser = OptionParser(usage=PARSER_USAGE) parser.add_option('-m', '--method', default='main', help='Which parser method should be run') parser.add_option('-f', '--force', action="store_true", help='Re-parse all data whether or not it has changed.') parser.add_option('-l', '--level', default='info', help='Default logging level') parser.add_option('--disable-events', action='store_true', default=False, help='Disable events processing') parser.add_option('--disable-indexing', action='store_true', default=False, help='Disable synchronous indexing') parser.add_option('--congress', help='Limit parsing to the specified congress') parser.add_option('--slow', action='store_true', help='Slow down parsing so we don\'t interfere with other processes.') parser.add_option('--filter', help='Only process files matching a regex.') kwargs, args = parser.parse_args() if not args: parser.print_usage() sys.exit() return kwargs, args
def ParseOptions(): usage = 'Usage: %prog [options] <uid>' description = textwrap.dedent("""\ Run a series of tests on a RDM responder to check the behaviour. This requires the OLA server to be running, and the RDM device to have been detected. You can confirm this by running ola_rdm_discover -u UNIVERSE. This will send SET commands to the broadcast UIDs which means the start address, device label etc. will be changed for all devices connected to the responder. Think twice about running this on your production lighting rig. """) parser = OptionParser(usage, description=description) parser.add_option('-c', '--slot-count', default=10, help='Number of slots to send when sending DMX.') parser.add_option('-d', '--debug', action='store_true', help='Print debug information to assist in diagnosing ' 'failures.') parser.add_option('-f', '--dmx-frame-rate', default=0, type='int', help='Send DMX frames at this rate in the background.') parser.add_option('-l', '--log', metavar='FILE', help='Also log to the file named FILE.uid.timestamp.') parser.add_option('--list-tests', action='store_true', help='Display a list of all tests') parser.add_option('-p', '--pid-location', metavar='DIR', help='The location of the PID definitions.') parser.add_option('-s', '--skip-check', action='store_true', help='Skip the check for multiple devices.') parser.add_option('-t', '--tests', metavar='TEST1,TEST2', help='A comma separated list of tests to run.') parser.add_option('--timestamp', action='store_true', help='Add timestamps to each test.') parser.add_option('--timing', action='store_true', help='Display summary timing information') parser.add_option('--no-factory-defaults', action='store_true', help="Don't run the SET factory defaults tests") parser.add_option( '-w', '--broadcast-write-delay', default=0, type='int', help='The time in ms to wait after sending broadcast set ' 'commands.') parser.add_option( '-u', '--universe', default=0, type='int', help='The universe number to use, default is universe 0.') parser.add_option( '--inter-test-delay', default=0, type='int', help='The delay in ms to wait between tests, defaults to ' '0.') options, args = parser.parse_args() if options.list_tests: return options if not args: parser.print_help() sys.exit(2) uid = UID.FromString(args[0]) if uid is None: parser.print_usage() print 'Invalid UID: %s' % args[0] sys.exit(2) options.uid = uid return options
def write_fmt(self, fmt, *args): fmt = self._convert_format(fmt) self.write(struct.pack(fmt, *args)) # Parse command line parser = OptionParser(usage='%prog [options] filename tag_number') parser.add_option('-d', '--directory', dest='directory', type=int, default=0, help='Directory number (default 0)') opts, args = parser.parse_args() if len(args) != 2: parser.print_usage() sys.exit(2) # Get arguments filename, tag = args directory = opts.directory tag = int(tag, 0) with TiffFile(filename) as fh: entry_size = fh.fmt_size('HHZZ') # Seek to correct directory dir_base = fh.read_fmt('Z') fh.seek(dir_base) while directory > 0: count = fh.read_fmt('Y')
def main(): usage = ("%prog [options] burp-session-file\n" " mine a burp session file for possible passwords\n" " motiviated by digininja's cewl\n") parser = OptionParser() ## Depth doesn't make sense since we're not spidering # # parser.add_option("-d","--depth", # action="store", type="int", default = 2, # dest="depth", # help="depth to spider to, default 2") parser.add_option("-m", "--min_word_length", action="store", type="int", default=3, dest="min_word_length", help="minimum word length, default 3") parser.add_option("-e", "--email", action="store_true", dest="EMAIL", default=False, help="output email addresses") parser.add_option("--email_file", action="store", dest="email_file", help="output file for email addresses") parser.add_option("-a", "--meta", action="store_true", dest="META", default=False, help="output metadata") parser.add_option("--meta-file", action="store", dest="meta_file", help="output file for meta data") parser.add_option("-n", "--no-words", action="store_true", dest="no_words", default=False, help="do not output the wordlist") parser.add_option("-u", "--urls", action="store_true", dest="list_urls", default=False, help="list visited urls to stderr") parser.add_option("-w", "--write", dest="output_file", help="write the words to file") parser.add_option("-c", "--count", action="store_true", dest="count", default=False, help="show the count for each of the words found") parser.add_option("-v", "--verbose", action="store_true", dest="VERBOSE", default=False, help="verbose") parser.add_option("--meta-temp-dir", action="store", dest="meta_temp_dir", help="temporary directory used by " + "ot lineexiftool when parsing file, default /tmp") global Options global Dictionary (Options, args) = parser.parse_args() Dictionary = {} untyped_documents = 0 if len(args) != 1: parser.print_usage() else: vprint("Converting burp session file to xml\n") try: burp_xml_str = burp_to_xml(args[0], True) except: print 'burp2xml.py failed to parse session file' raise content_verify_map = {'text/plain': check_plain} action_map = { 'application/msword': doc_get_words, 'application/pdf': pdf_get_words, 'application/vnd.openxmlformats-officedocument' '.wordprocessingml.document': doc_get_words, 'application/x-gzip': do_pass, 'application/x-shockwave-flash': do_pass, 'audio/x-wav': do_pass, 'image/jpeg': image_get_words, 'image/gif': do_pass, 'image/png': do_pass, 'image/x-bitmap': do_pass, 'text/css': do_pass, 'text/html': html_get_words, 'text/plain': text_get_words, 'none': do_pass, } # First Get the Request while 1: try: (burp_xml_str, rq_str) = get_tag_content(burp_xml_str, 'request') request = HTTPRequest(rq_str) except LookupError: break try: (burp_xml_str, rsp_str) = get_tag_content(burp_xml_str, 'response') rsp_str = remove_CDATA(rsp_str) response = http_parse(rsp_str) try: url = request.headers['Host'] + request.path except: url = '?!Unknown url!?' type_str = response.getheader('Content-Type', 'none') if type_str == 'none': vprint('Document has no content-type ' + url + '\n') untyped_documents = untyped_documents + 1 semi_index = type_str.find(';') if semi_index >= 0: type_str = type_str[0:semi_index].lower() body_str = response.read() magic_str = magic.from_buffer(body_str) if Options.list_urls: sys.stderr.write(url + ':' + type_str + '(' + magic_str + ')\n') try: # Check content. If not verified, TypeError is returned try: content_verify_map[type_str](magic_str) except KeyError: pass action_map[type_str](body_str, url) except: pass except LookupError: # print "Missing response" raise if not Options.no_words: sorted_words = sorted(Dictionary.iteritems(), key=operator.itemgetter(1), reverse=True) for word_tuple in sorted_words: sys.stdout.write(word_tuple[0]) sys.stdout.write((', ' + str(word_tuple[1])) if Options.count else '') sys.stdout.write('\n') if untyped_documents > 0: sys.stderr.write('Ignored ' + str(untyped_documents) + ' documents with missing Content-Type Header')
def parseOptions(): """ returns an object with command line options fields set. """ parser = OptionParser(usage=usage) # parser.add_option("-r","--representation-file", action="store", type="string", dest="representationFile", help="representation file explaning HGM representation of the complex") parser.add_option("-c","--coordinates-file", action="store", type="string", dest="coordinatesFile", help="path to the file hosting model coordinates for the given representation") parser.add_option("-P","--path","--pdb-path", action="store", type="string", dest="pdbPath", default=".", help="path to a directory where to dump the pdb files") parser.add_option("-p","--prefix","--pdb-prefix", action="store", type="string", dest="pdbPrefix", default="HGM-model", help="filename prefix for the pdb files to output") parser.add_option("-m","--map","--chain-map-file", action="store", type="string", dest="cplxChainMapFile", help="Path to a chain map file that allows for mapping of subunits or domains to a chain ID") # parser.add_option("-i","--id-models", action="store", type="string", # dest="indices", # help="[optional] specify a coma separated list of indices for specific models to output as pdb files, if not set all models will be processed") # parser.add_option("-d","--display-file", action="store", type="string", # dest="displayFile", # help="[optional] an xml file controling the colors for the representation of the various beads") # parser.add_option("--chimera-script", action="store", type="string", # dest="chimera", # help="[optional] contains chimera code for coloring and setting radii in chimera") parser.add_option("-v","--verbose", action="store_true", default='False', dest="verbose", help="verbose mode") parser.add_option("-l","--level","--chain-level", action="store", type="string", dest="level", default="subunit", help="control the chain 'level' in the produced pdb file (a chain per 'subunit', per 'domain', or for the whole 'complex')") parser.add_option("-i","--indices", action="store", type="string", dest="indices", help="allows one to select a specific subset of indices in the coordinates file (beware, starts with index 0)") # parser.add_option("-n","--no-idx","--no-index", action="store_true", # dest="no_idx", default=False, # help="when only one model is considered, this option allows one to remove mention of the index from the resulting file name") options,args = parser.parse_args(sys.argv[1:]) # Mandatory options # class MandatoryOptionException(Exception): def __init__(self,message): Exception.__init__(self,message) try : if options.representationFile == None : errMsg = "Should provide a representation file" raise MandatoryOptionException(errMsg) if options.coordinatesFile == None : errMsg = "Should provide a coordinates file" raise MandatoryOptionException(errMsg) # if options.pdbPath == None : # errMsg = "Should provide a Path to a directory for the pdb files" # raise # if options.indices != None: # indices = misc.get_index_list_from_string(options.indices) # if options.displayFile == None and options.chimera except MandatoryOptionException as e : errMsg = e.message sys.stderr.write( errMsg + "\n\n" ) parser.print_usage() sys.exit(1) # let's replace the nasty index specification string with a list of integers if options.indices != None : options.indices = HGM2.files.samples.get_indices_from_string( options.indices ) return options
def cli(): parser = OptionParser(usage='%prog [options] <test_file> <test_file> ...') parser.add_option("--binary", action="store", dest="binary", default=None, help="path to B2G desktop build binary") parser.add_option("--profile", action="store", dest="profile", default=None, help="path to gaia profile directory") options, tests = parser.parse_args() if not options.binary or not options.profile: parser.print_usage() parser.exit('--binary and --profile required') if not tests: # Read in a list of tests to skip from disabled.json, if it exists; # disabled.json should contain filenames with paths relative to the # apps directory, e.g., "wallpaper/test/unit/pick_test.js". disabled = [] disabled_file = os.path.join(os.path.dirname(__file__), 'disabled.json') if os.access(disabled_file, os.F_OK): with open(disabled_file, 'r') as f: disabled_contents = f.read() try: disabled = json.loads(disabled_contents) except: traceback.print_exc() print "Error while decoding disabled.json; please make sure this file has valid JSON syntax." sys.exit(1) # build a list of tests appsdir = os.path.join( os.path.dirname(os.path.abspath(options.profile)), 'apps') for root, dirs, files in os.walk(appsdir): for file in files: # only include tests in a 'unit' directory if os.path.basename(root) == 'unit': full_path = os.path.relpath(os.path.join(root, file), appsdir) if full_path.endswith( '_test.js') and full_path not in disabled: tests.append(full_path) runner = GaiaUnitTestRunner(binary=options.binary, profile=options.profile) runner.run() # Lame but necessary hack to prevent tornado's logger from duplicating # every message from mozlog. logger = logging.getLogger() handler = logging.NullHandler() logger.addHandler(handler) print 'starting WebSocket Server' application = tornado.web.Application([ (r"/", TestAgentServer, { 'tests': tests, 'runner': runner, 'logger': mozlog.getLogger('gaia-unit-tests') }), ]) http_server = tornado.httpserver.HTTPServer(application) http_server.listen(8789) tornado.ioloop.IOLoop.instance().start()
def __init__(self): sys.excepthook = self.excepthook INIPATH = None usage = "usage: %prog [options] myfile.ui" parser = OptionParser(usage=usage) parser.disable_interspersed_args() parser.add_options(options) # remove [-ini filepath] that linuxcnc adds if being launched as a screen # keep a reference of that path for i in range(len(sys.argv)): if sys.argv[i] == '-ini': # delete -ini del sys.argv[i] # pop out the ini path INIPATH = sys.argv.pop(i) break (opts, args) = parser.parse_args() # initialize QApp so we can pop up dialogs now. self.app = QtWidgets.QApplication(sys.argv) # we import here so that the QApp is initialized before # the Notify library is loaded because it uses DBusQtMainLoop # DBusQtMainLoop must be initialized after to work properly from qtvcp import qt_makepins, qt_makegui # ToDo: pass specific log levels as an argument, or use an INI setting if not opts.debug: # Log level defaults to DEBUG, so set higher if not debug logger.setGlobalLevel(logger.INFO) # a specific path has been set to load from or... # no path set but -ini is present: default qtvcp screen...or # oops error if args: basepath = args[0] elif INIPATH: basepath = "qt_cnc" else: PATH.set_paths() # set paths using basename PATH.set_paths(basepath, bool(INIPATH)) ################# # Screen specific ################# if INIPATH: LOG.info('green<Building A Linuxcnc Main Screen>') import linuxcnc # internationalization and localization import locale, gettext # pull info from the INI file self.inifile = linuxcnc.ini(INIPATH) self.inipath = INIPATH # screens require more path info PATH.add_screen_paths() # International translation locale.setlocale(locale.LC_ALL, '') locale.bindtextdomain(PATH.DOMAIN, PATH.LOCALEDIR) gettext.install(PATH.DOMAIN, localedir=PATH.LOCALEDIR, unicode=True) gettext.bindtextdomain(PATH.DOMAIN, PATH.LOCALEDIR) # if no handler file specified, use stock test one if not opts.usermod: LOG.info('No handler file specified on command line') target = os.path.join(PATH.CONFIGPATH, '%s_handler.py' % PATH.BASENAME) source = os.path.join(PATH.SCREENDIR, 'tester/tester_handler.py') if PATH.HANDLER is None: message = (""" Qtvcp encountered an error; No handler file was found. Would you like to copy a basic handler file into your config folder? This handker file will allow display of your screen and basic keyboard jogging. The new handlerfile's path will be: %s Pressing cancel will close linuxcnc.""" % target) rtn = QtWidgets.QMessageBox.critical( None, "QTVCP Error", message, QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel) if rtn == QtWidgets.QMessageBox.Ok: try: shutil.copy(source, target) except IOError as e: LOG.critical("Unable to copy handler file. %s" % e) sys.exit(0) except: LOG.critical( "Unexpected error copying handler file:", sys.exc_info()) sys.exit(0) opts.usermod = PATH.HANDLER = target else: LOG.critical( 'No handler file found or specified. User requested stopping' ) else: opts.usermod = PATH.HANDLER # specify the HAL component name if missing if opts.component is None: LOG.info( 'No HAL component base name specified on command line using: {}' .format(PATH.BASENAME)) opts.component = PATH.BASENAME ################# # VCP specific ################# else: LOG.info('green<Building A VCP Panel>') # if no handler file specified, use stock test one if not opts.usermod: LOG.info('No handler file specified - using {}'.format( PATH.HANDLER)) opts.usermod = PATH.HANDLER # specify the HAL component name if missing if opts.component is None: LOG.info( 'No HAL component base name specified - using: {}'.format( PATH.BASENAME)) opts.component = PATH.BASENAME ############## # Build ui ############## #if there was no component name specified use the xml file name if opts.component is None: opts.component = PATH.BASENAME # initialize HAL try: self.halcomp = hal.component(opts.component) self.hal = QComponent(self.halcomp) except: LOG.critical( "Asking for a HAL component using a name that already exists?") sys.exit(0) # initialize the window window = qt_makegui.VCPWindow(self.hal, PATH) # load optional user handler file if opts.usermod: LOG.debug('Loading the handler file') window.load_extension(opts.usermod) # do any class patching now if "class_patch__" in dir(window.handler_instance): window.handler_instance.class_patch__() # add filter to catch keyboard events LOG.debug('Adding the key events filter') myFilter = qt_makegui.MyEventFilter(window) self.app.installEventFilter(myFilter) # actually build the widgets window.instance() # make QT widget HAL pins self.panel = qt_makepins.QTPanel(self.hal, PATH, window, opts.debug) # call handler file's initialized function if opts.usermod: if "initialized__" in dir(window.handler_instance): LOG.debug( '''Calling the handler file's initialized__ function''') window.handler_instance.initialized__() # All Widgets should be added now - synch them to linuxcnc STATUS.forced_update() # User components are set up so report that we are ready LOG.debug('Set HAL ready') self.halcomp.ready() # embed us into an X11 window (such as AXIS) if opts.parent: window = xembed.reparent_qt_to_x11(window, opts.parent) forward = os.environ.get('AXIS_FORWARD_EVENTS_TO', None) LOG.critical('Forwarding events to AXIS is not well tested yet') if forward: xembed.XEmbedFowarding(window, forward) # push the window id for embedment into an external program if opts.push_XID: wid = int(window.winId()) print >> sys.stdout, wid sys.stdout.flush() # for window resize and or position options if "+" in opts.geometry: LOG.debug('-g option: moving window') try: j = opts.geometry.partition("+") pos = j[2].partition("+") window.move(int(pos[0]), int(pos[2])) except: LOG.critical("With window position data") parser.print_usage() sys.exit(1) if "x" in opts.geometry: LOG.debug('-g option: resizing') try: if "+" in opts.geometry: j = opts.geometry.partition("+") t = j[0].partition("x") else: t = window_geometry.partition("x") window.resize(int(t[0]), int(t[2])) except: LOG.critical("With window resize data") parser.print_usage() sys.exit(1) # always on top if opts.always_top: window.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint) # theme (styles in QT speak) specify a qss file if opts.theme: window.apply_styles(opts.theme) # appy qss file or default theme else: window.apply_styles() # title if INIPATH: title = 'QTvcp-Screen-%s' % opts.component else: title = 'QTvcp-Panel-%s' % opts.component window.setWindowTitle(title) LOG.debug('Show window') # maximize if opts.maximum: window.showMaximized() # fullscreen elif opts.fullscreen: window.showFullScreen() else: self.panel.set_preference_geometry() window.show() if INIPATH: self.postgui() # catch control c and terminate signals signal.signal(signal.SIGTERM, self.shutdown) signal.signal(signal.SIGINT, self.shutdown) if opts.usermod and "before_loop__" in dir(window.handler_instance): LOG.debug('''Calling the handler file's before_loop__ function''') window.handler_instance.before_loop__() LOG.info('Preference path: {}'.format(PATH.PREFS_FILENAME)) # start loop self.app.exec_() # now shut it all down self.shutdown()
def print_usage(self, file=None): from calibre.utils.terminal import ANSIStream s = ANSIStream(file) _OptionParser.print_usage(self, file=s)
action='store_true', help="Perform a destructive change") option_parser.add_option( '--newfile', default=None, action='store', help="Create a new database file without removing the existing one") option_parser.add_option( '--nobackup', action='store_true', help='Delete the .bak file after successful execution') (options, args) = option_parser.parse_args() if len(args) < 2: option_parser.print_usage() sys.exit(1) path = args[0] new_archives = [parseRetentionDef(retentionDef) for retentionDef in args[1:]] info = whisper.info(path) old_archives = info['archives'] old_archives.sort(key=lambda a: a['secondsPerPoint'], reverse=True) #sort by precision, lowest to highest if options.xFilesFactor is None: xff = info['xFilesFactor'] else: xff = options.xFilesFactor
def parse_options(): parser = OptionParser( usage='%prog [options] test_file_or_dir <test_file_or_dir> ...') parser.add_option("--autolog", action="store_true", dest="autolog", default=False, help="send test results to autolog") parser.add_option("--revision", action="store", dest="revision", help="git revision for autolog/perfdata submissions") parser.add_option("--testgroup", action="store", dest="testgroup", help="testgroup names for autolog submissions") parser.add_option( "--emulator", action="store", dest="emulator", default=None, choices=["x86", "arm"], help= "If no --address is given, then the harness will launch a B2G emulator " "on which to run emulator tests. If --address is given, then the harness assumes you are " "running an emulator already, and will run the emulator tests using that emulator. " "You need to specify which architecture to emulate for both cases.") parser.add_option("--emulator-binary", action="store", dest="emulatorBinary", default=None, help="Launch a specific emulator binary rather than " "launching from the B2G built emulator") parser.add_option('--emulator-img', action='store', dest='emulatorImg', default=None, help="Use a specific image file instead of a fresh one") parser.add_option('--emulator-res', action='store', dest='emulator_res', default=None, type='str', help='Set a custom resolution for the emulator. ' 'Example: "480x800"') parser.add_option("--no-window", action="store_true", dest="noWindow", default=False, help="when Marionette launches an emulator, start it " "with the -no-window argument") parser.add_option('--logcat-dir', dest='logcat_dir', action='store', help='directory to store logcat dump files') parser.add_option('--address', dest='address', action='store', help='host:port of running Gecko instance to connect to') parser.add_option('--device', dest='device', action='store', help='serial ID of a device to use for adb / fastboot') parser.add_option( '--type', dest='type', action='store', default='browser+b2g', help="The type of test to run, can be a combination " "of values defined in unit-tests.ini; individual values " "are combined with '+' or '-' chars. Ex: 'browser+b2g' " "means the set of tests which are compatible with both " "browser and b2g; 'b2g-qemu' means the set of tests " "which are compatible with b2g but do not require an " "emulator. This argument is only used when loading " "tests from .ini files.") parser.add_option('--homedir', dest='homedir', action='store', help='home directory of emulator files') parser.add_option('--app', dest='app', action='store', default=None, help='application to use') parser.add_option( '--binary', dest='bin', action='store', help='gecko executable to launch before running the test') parser.add_option( '--profile', dest='profile', action='store', help='profile to use when launching the gecko process. If not ' 'passed, then a profile will be constructed and used.') parser.add_option('--perf', dest='perf', action='store_true', default=False, help='send performance data to perf data server') parser.add_option( '--perf-server', dest='perfserv', action='store', default=None, help='dataserver for perf data submission. Entering this value ' 'will overwrite the perfserv value in any passed .ini files.') parser.add_option('--repeat', dest='repeat', action='store', type=int, default=0, help='number of times to repeat the test(s).') parser.add_option('-x', '--xml-output', action='store', dest='xml_output', help='XML output.') parser.add_option('--gecko-path', dest='gecko_path', action='store', default=None, help='path to B2G gecko binaries that should be ' 'installed on the device or emulator') parser.add_option('--testvars', dest='testvars', action='store', default=None, help='path to a JSON file with any test data required') parser.add_option('--tree', dest='tree', action='store', default='b2g', help='the tree that the revsion parameter refers to') parser.add_option('--symbols-path', dest='symbols_path', action='store', default=None, help='absolute path to directory containing breakpad ' 'symbols, or the URL of a zip file containing symbols') options, tests = parser.parse_args() if not tests: parser.print_usage() parser.exit() if not options.emulator and not options.address and not options.bin: parser.print_usage() print "must specify --binary, --emulator or --address" parser.exit() # default to storing logcat output for emulator runs if options.emulator and not options.logcat_dir: options.logcat_dir = 'logcat' if options.perf: import datazilla # check for valid resolution string, strip whitespaces try: if options.emulator_res: dims = options.emulator_res.split('x') assert len(dims) == 2 width = str(int(dims[0])) height = str(int(dims[1])) options.emulator_res = 'x'.join([width, height]) except: raise ValueError('Invalid emulator resolution format. ' 'Should be like "480x800".') return (options, tests)
optparser.add_option("-n", "--num-occults", dest="numOccults", help="Number of occults to visit", default=10, type="int") optparser.add_option("-t", "--obs-time", dest="obsTime", help="Epidemic observation time", default=1e10, type="float") (options, args) = optparser.parse_args() if len(args) != 3: print optparser.print_usage() sys.exit(1) # First get top n occults occFile = open(args[1], 'r') occults = [] for line in occFile: toks = line.split() if float(toks[1] ) < 1.0: # Ignore occProb == 1 as these are known infections occults.append(Occult(toks[0], toks[1])) occFile.close() occults = sorted(occults, reverse=True) occults = occults[0:options.numOccults]
def main(): """ Main CLI handler. """ default_title = "syrupy_" + pretty_timestamp(style=1) parser = OptionParser(usage=_program_usage, add_help_option=True, version=_program_version, description=_program_description) parser.add_option('-q', '--quiet', action='store_true', dest='quiet', default=False, help='do not report miscellaneous run information to stderr') parser.add_option('-r', '--replace', action='store_true', dest='replace', default=False, help='replace output file(s) without asking if already exists') parser.add_option('-t', '--title', action='store', dest='title', metavar="PROCESS-TITLE", default=default_title, help="name for this run (will be used as prefix for all output files); defaults to 'syrupy_<TIMESTAMP>'") parser.add_option('-v', '--debug-level', action='store', type='int', dest='debug', metavar="#", default=0, help='debugging information level (0, 1, 2, 3; default=%default)') parser.add_option('--explain', action='store_true', dest='explain', default=False, help='show detailed information on the meaning of each of the columns, ' \ +'and then exit') process_opts = OptionGroup(parser, 'Process Selection', """\ By default, Syrupy tracks the process resulting from executing COMMAND. You can also instruct Syrupy to track external processes by using the following options, each of which specify a criteria that a particular process must meet so as to be monitored. Syrupy will report the resource usage of any and all processes that meet the specified criteria, and will exit when no processes matching all the criteria are found. If no processes matching all the criteria are actually already running when Syrupy starts, then Syrupy exits immediately. Note that an instance of Syrupy automatically excludes its own process from being tracked by itself. """ ) parser.add_option_group(process_opts) process_opts.add_option('-p', '--poll-pid', '--pid', action='store', dest='poll_pid', default=None, metavar='PID', type=int, help='ignore COMMAND if given, and poll external process with ' \ +'specified PID') process_opts.add_option('-s', '--ssh', action='store', dest='ssh', default=None, metavar='SSH', type=str, help='use SSH to remote view PS with syrupy') process_opts.add_option('-m', '--poll-top-memory', '--mem', action='store', dest='poll_mem', default=None, metavar='MEM', type=int, help='ignore COMMAND if given and poll top MEM processes by memory usage') process_opts.add_option('-c', '--poll-command', action='store', dest='poll_command', default=None, metavar='REG-EXP', help='ignore COMMAND if given, and poll external process with ' \ +'command matching specified regular expression pattern') polling_opts = OptionGroup(parser, 'Polling Regime') parser.add_option_group(polling_opts) polling_opts.add_option('-i', '--interval', action='store', dest='poll_interval', default=1, metavar='#.##', type=float, help='polling interval in seconds (default=%default)') run_output_opts = OptionGroup(parser, 'Output Modes', """\ By default, Syrupy redirects the standard output and standard error of COMMAND, as well as its own output, to log files. The following options allow you to change this behavior, either having Syrupy write to standard output and standard error ('-S'), COMMAND write to standard output and standard error ('-C'), or suppress all COMMAND output altogether ('-N'). """ ) parser.add_option_group(run_output_opts) run_output_opts.add_option('-S', '--syrupy-in-front', action='store_true', dest='syrupy_in_front', default=False, help='redirect Syrupy output and miscellaneous information to ' \ +'standard output and standard error instead of logging to files') run_output_opts.add_option('-C', '--command-in-front', action='store_true', dest='command_in_front', default=False, help='run COMMAND in foreground: send output and error stream of' \ +' COMMAND to standard output and standard error, respectively') run_output_opts.add_option('-N', '--no-command-output', action='store_true', dest='suppress_command_output', default=False, help='suppress all output from COMMAND') run_output_opts.add_option('--flush-output', action='store_true', dest='flush_output', default=False, help='force flushing of stream buffers after every write') run_output_opts.add_option('--no-raw-process-log', action='store_true', dest='suppress_raw_process_log', default=False, help='suppress writing of raw results from process sampling') formatting_opts = OptionGroup(parser, 'Output Formatting') parser.add_option_group(formatting_opts) formatting_opts.add_option('--show-command', action='store_true', dest='show_command', default=False, help='show command column in output' ) formatting_opts.add_option('--separator', action='store', dest='separator', default=" ", metavar="SEPARATOR", help='character(s) to used to separate columns in results' ) formatting_opts.add_option('--no-align', action='store_false', dest='align', default=True, help='do not align/justify columns' ) formatting_opts.add_option('--no-headers', action='store_false', dest='headers', default=True, help='do not output column headers' ) # we need to do this to prevent options meant for COMMAND # being consumed by Syrupy parser.disable_interspersed_args() (opts, args) = parser.parse_args() if opts.explain: sys.stdout.write(column_help()) sys.stdout.write("\n") sys.exit(0) if len(args) == 0 \ and opts.poll_pid is None \ and opts.poll_command is None \ and opts.poll_mem is None: parser.print_usage() sys.exit(1) if opts.title is None and len(args) > 0: base_title = os.path.splitext(os.path.basename(args[0]))[0] else: base_title = opts.title if opts.syrupy_in_front: syrupy_output = sys.stdout else: fname = base_title + ".ps.log" if not opts.quiet: sys.stderr.write("SYRUPY: Writing process resource usage samples to '%s'\n" % fname) syrupy_output = open_file(fname, "w", replace=opts.replace) if opts.suppress_raw_process_log: raw_ps_log = None else: fname = base_title + ".ps.raw" if not opts.quiet: sys.stderr.write("SYRUPY: Writing raw process resource usage logs to '%s'\n" % fname) raw_ps_log = open_file(base_title + ".ps.raw", "w", replace=opts.replace) if opts.poll_pid is not None or opts.poll_command is not None or opts.poll_mem is not None: if not opts.quiet: if opts.poll_pid is not None: sys.stderr.write("SYRUPY: sampling process %d\n" % opts.poll_pid) elif opts.poll_mem is not None: sys.stderr.write("SYRUPY: sampling top %d processes by memory usage\n" % opts.poll_mem) else: sys.stderr.write("SYRUPY: sampling process with command pattern '%s'\n" % opts.poll_command) profile_process(pid=opts.poll_pid, command_pattern=opts.poll_command, top_mem=opts.poll_mem, syrupy_output=syrupy_output, raw_ps_log=raw_ps_log, poll_interval=opts.poll_interval, quit_poll_func=None, ssh_id=opts.ssh, has_ssh=True if opts.ssh else False, quit_if_none=True if opts.poll_pid else False, quit_at_time=None, show_command=opts.show_command, output_separator=opts.separator, align=opts.align, headers=opts.headers, flush_output=opts.flush_output, debug_level=opts.debug) else: command = args if not opts.quiet: sys.stderr.write("SYRUPY: Executing command '%s'\n" % (" ".join(command))) if opts.suppress_command_output: command_stdout = open(os.devnull, "w") command_stderr = open(os.devnull, "w") if not opts.quiet: sys.stderr.write("SYRUPY: Suppressing output of command\n") elif opts.command_in_front: command_stdout = sys.stdout command_stderr = sys.stderr else: cout = base_title + ".out.log" cerr = base_title + ".err.log" if not opts.quiet: sys.stderr.write("SYRUPY: Redirecting command output stream to '%s'\n" % cout) command_stdout = open_file(cout, 'w', replace=opts.replace) if not opts.quiet: sys.stderr.write("SYRUPY: Redirecting command error stream to '%s'\n" % cerr) command_stderr = open_file(cerr, 'w', replace=opts.replace) start_time, end_time = profile_command(command=command, command_stdout=command_stdout, command_stderr=command_stderr, syrupy_output=syrupy_output, raw_ps_log=raw_ps_log, poll_interval=opts.poll_interval, show_command=opts.show_command, output_separator=opts.separator, align=opts.align, headers=opts.headers, flush_output=opts.flush_output, debug_level=opts.debug) if not opts.quiet: final_run_report = [] final_run_report.append("SYRUPY: Completed running: %s" % (" ".join(command))) final_run_report.append("SYRUPY: Started at %s" % (start_time.isoformat(' '))) final_run_report.append("SYRUPY: Ended at %s" % (end_time.isoformat(' '))) hours, mins, secs = str(end_time-start_time).split(":") run_time = "SYRUPY: Total run time: %s hour(s), %s minute(s), %s second(s)" % (hours, mins, secs) final_run_report.append(run_time) report = "\n".join(final_run_report) + "\n" sys.stderr.write(report)
def main(): """Check if VMs are still valid.""" parser = OptionParser(usage='Usage: %%prog [options] [uri]') parser.add_option( '-v', '--verbose', action='count', dest='verbose', default=0, help='Increase verbosity') parser.add_option( '-g', '--dot', action='store_true', dest='dot', default=False, help='Generate dot graph') parser.add_option( '-a', '--all', action='store_true', dest='show_all', default=False, help='Show all resources') parser.add_option( '-u', '--unused', action='store_true', dest='show_unused', default=False, help='Show unused resources') options, arguments = parser.parse_args() logging.basicConfig(level={ 0: logging.CRITICAL, 1: logging.ERROR, 2: logging.WARNING, 3: logging.INFO, 4: logging.DEBUG, 5: logging.NOTSET, }.get(options.verbose, logging.NOTSET)) try: url = arguments[0] except IndexError: if os.path.exists('/dev/kvm'): url = 'qemu:///system' else: parser.print_usage(sys.stderr) sys.exit(2) libvirt.registerErrorHandler(lambda f, ctx: None, None) conn = libvirt.open(url) try: # volumes first because this is more detailed check_storage_pools(conn) check_virtual_machines(conn) check_storage_volumes(conn) finally: conn.close() # Validate all resources for res in list(Resource.all.values()): res.check_valid() # Print all resources filtered = set() for res in Resource.all.values(): if options.show_all or \ options.show_unused and not res.used or \ not res.valid: filtered.add(res) text = '// %s' % (res.console(),) print(text) if options.dot: if not options.show_all: filtered = resource_closure(filtered) print_dot(filtered)
def main(): from optparse import OptionParser usage = "yappi.py [-b] [-o output_file] [-f output_format] [-s] [scriptfile] args ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option( "-b", "--builtins", action="store_true", dest="profile_builtins", default=False, help="Profiles builtin functions when set. [default: False]") parser.add_option("-o", "--output-file", metavar="output_file", help="Write stats to output_file.") parser.add_option( "-f", "--output-format", default="pstat", choices=("pstat", "callgrind", "ystat"), metavar="output_format", help="Write stats in the specified" "format (\"pstat\", \"callgrind\" or \"ystat\", default is " "\"pstat\").") parser.add_option( "-s", "--single_thread", action="store_true", dest="profile_single_thread", default=False, help="Profiles only the thread that calls start(). [default: False]") if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if (len(sys.argv) > 0): sys.path.insert(0, os.path.dirname(sys.argv[0])) start(options.profile_builtins, not options.profile_single_thread) try: if sys.version_info >= (3, 0): exec(compile(open(sys.argv[0]).read(), sys.argv[0], 'exec'), sys._getframe(1).f_globals, sys._getframe(1).f_locals) else: execfile(sys.argv[0], sys._getframe(1).f_globals, sys._getframe(1).f_locals) finally: stop() if options.output_file: stats = get_func_stats() stats.save(options.output_file, options.output_format) else: # we will currently use default params for these get_func_stats().print_all() get_thread_stats().print_all() else: parser.print_usage()
def main(args=None): """command line front-end function""" # parse command line arguments args = args or sys.argv[1:] usage = "Usage: %prog [options] [destination]" parser = OptionParser(usage=usage) parser.add_option('--develop', dest='develop', action='store_true', default=False, help='setup in development mode') options, args = parser.parse_args(args) # Print the python version print 'Python: %s' % sys.version # The data is kept in the same directory as the script source=os.path.abspath(os.path.dirname(__file__)) # directory to install to if not len(args): destination = source elif len(args) == 1: destination = os.path.abspath(args[0]) else: parser.print_usage() parser.exit(1) os.chdir(source) # check for existence of necessary files if not os.path.exists('virtualenv'): print "File not found: virtualenv" sys.exit(1) PACKAGES_FILE = 'PACKAGES' if not os.path.exists(PACKAGES_FILE) and destination != source: PACKAGES_FILE = os.path.join(destination, PACKAGES_FILE) if not os.path.exists(PACKAGES_FILE): print "File not found: PACKAGES" # packages to install in dependency order PACKAGES=file(PACKAGES_FILE).read().split() assert PACKAGES # create the virtualenv and install packages env = os.environ.copy() env.pop('PYTHONHOME', None) returncode = call([sys.executable, os.path.join('virtualenv', 'virtualenv.py'), destination], env=env) if returncode: print 'Failure to install virtualenv' sys.exit(returncode) if options.develop: python = entry_point_path(destination, 'python') for package in PACKAGES: oldcwd = os.getcwd() os.chdir(package) returncode = call([python, 'setup.py', 'develop']) os.chdir(oldcwd) if returncode: break else: pip = entry_point_path(destination, 'pip') returncode = call([pip, 'install'] + PACKAGES, env=env) if returncode: print 'Failure to install packages' sys.exit(returncode)
def main(): # Initialize a command-line processing object with a table of options optionsTable = [ Option('-v', '--verbose', action='count', help='Increase verbosity'), Option('-d', '--dir', action='store', help='Process packages from this directory'), Option('-L', '--cache-locally', action='store_true', help='Locally cache packages so that Proxy will not ever need to ' + 'download them. Changes nothing on the upstream server.'), Option('-e', '--from-export', action='store', dest='export_location', help='Process packages from this channel export. Can only be used ' + 'with --cache-locally or --copyonly.'), Option('-c', '--channel', action='append', help='Channel to operate on. When used with --from-export ' + 'specifies channels to cache rpms for, else specifies channels ' + 'that we will be pushing into.'), Option('-n', '--count', action='store', help='Process this number of headers per call', type='int'), Option('-l', '--list', action='store_true', help='Only list the specified channels'), Option('-s', '--sync', action='store_true', help='Check if in sync with the server'), Option('-p', '--printconf', action='store_true', help='Print the configuration and exit'), Option('-X', '--exclude', action="append", help="Exclude packages that match this glob expression"), Option('--newest', action='store_true', help='Only push the files that are newer than the server ones'), Option('--stdin', action='store_true', help='Read the package names from stdin'), Option('--nosig', action='store_true', help="Push unsigned packages"), Option('--username', action='store', help='Use this username to connect to RHN'), Option('--password', action='store', help='Use this password to connect to RHN'), Option('--source', action='store_true', help='Upload source package headers'), Option('--dontcopy', action='store_true', help='Do not copy packages to the local directory'), Option('--copyonly', action='store_true', help="Only copy packages; don't reimport. Same as --cache-locally"), Option('--test', action='store_true', help='Only print the packages to be pushed'), Option('-N', '--new-cache', action='store_true', help='Create a new username/password cache'), Option('--no-ssl', action='store_true', help='Turn off SSL (not recommended).'), Option('--no-session-caching', action='store_true', help='Disables session-token authentication.'), Option('-?', '--usage', action='store_true', help="Briefly describe the options"), ] # Process the command line arguments optionParser = OptionParser(option_list=optionsTable, usage="USAGE: %prog [OPTION] [<package>]") options, files = optionParser.parse_args() upload = UploadClass(options, files=files) if options.usage: optionParser.print_usage() sys.exit(0) if options.printconf: CFG.show() return if options.list: upload.list() return if options.sync: upload.checkSync() return # It's just an alias to copyonly if options.cache_locally: options.copyonly = True # remeber to process dir option before export, export can overwrite dir if options.dir: upload.directory() if options.export_location: if not options.copyonly: upload.die(0, "--from-export can only be used with --cache-locally" + " or --copyonly") if options.source: upload.die(0, "--from-export cannot be used with --source") upload.from_export() if options.stdin: upload.readStdin() # if we're going to allow the user to specify packages by dir *and* export # *and* stdin *and* package list (why not?) then we have to uniquify # the list afterwards. Sort just for user-friendly display. upload.files = sorted(list(set(upload.files))) if options.copyonly: if not upload.files: upload.die(0, "Nothing to do; exiting. Try --help") if options.test: upload.test() return upload.copyonly() return if options.exclude: upload.filter_excludes() if options.newest: upload.newest() if not upload.files: upload.die(0, "Nothing to do; exiting. Try --help") if options.test: upload.test() return try: upload.uploadHeaders() except UploadError, e: sys.stderr.write("Upload error: %s\n" % e)
def main(): """ creates a HAL component. parsees a glade XML file with gtk.builder or libglade calls gladevcp.makepins with the specified XML file to create pins and register callbacks. main window must be called "window1" """ global gladevcp_debug (progdir, progname) = os.path.split(sys.argv[0]) usage = "usage: %prog [options] myfile.ui" parser = OptionParser(usage=usage) parser.disable_interspersed_args() parser.add_options(options) (opts, args) = parser.parse_args() if not args: parser.print_help() sys.exit(1) gladevcp_debug = debug = opts.debug xmlname = args[0] #if there was no component name specified use the xml file name if opts.component is None: opts.component = os.path.splitext(os.path.basename(xmlname))[0] #try loading as a libglade project try: builder = gtk.Builder() builder.add_from_file(xmlname) except: try: # try loading as a gtk.builder project dbg("**** GLADE VCP INFO: Not a builder project, trying to load as a lib glade project" ) builder = gtk.glade.XML(xmlname) builder = GladeBuilder(builder) except Exception as e: print("**** GLADE VCP ERROR: With xml file: %s : %s" % (xmlname, e), file=sys.stderr) sys.exit(0) window = builder.get_object("window1") window.set_title(opts.component) try: halcomp = hal.component(opts.component) except: print( "*** GLADE VCP ERROR: Asking for a HAL component using a name that already exists.", file=sys.stderr) sys.exit(0) panel = gladevcp.makepins.GladePanel(halcomp, xmlname, builder, None) # at this point, any glade HL widgets and their pins are set up. handlers, mod, obj = load_handlers(opts.usermod, halcomp, builder, opts.useropts) # so widgets can call handler functions - give them refeence to the handler object panel.set_handler(obj) builder.connect_signals(handlers) # This option puts the gladevcp panel into a plug and pushed the plug's # X window id number to standard output - so it can be reparented exterally # it also forwards events to qtvcp if opts.push_XID: if not opts.debug: # supress warnings when x window closes warnings.filterwarnings("ignore") # block X errors since gdk error handling silently exits the # program without even the atexit handler given a chance gtk.gdk.error_trap_push() forward = os.environ.get('QTVCP_FORWARD_EVENTS_TO', None) if forward: xembed.keyboard_forward(window, forward) # This option reparents gladevcp in a given X window id. # it also forwards keyboard events from gladevcp to AXIS if opts.parent: if not opts.debug: # supress warnings when x window closes warnings.filterwarnings("ignore") # block X errors since gdk error handling silently exits the # program without even the atexit handler given a chance gtk.gdk.error_trap_push() window = xembed.reparent(window, opts.parent) forward = os.environ.get('AXIS_FORWARD_EVENTS_TO', None) if forward: xembed.keyboard_forward(window, forward) window.connect("destroy", on_window_destroy) window.show() # for window resize and or position options if "+" in opts.geometry: try: j = opts.geometry.partition("+") pos = j[2].partition("+") window.move(int(pos[0]), int(pos[2])) except: print("**** GLADE VCP ERROR: With window position data", file=sys.stderr) parser.print_usage() sys.exit(1) if "x" in opts.geometry: try: if "+" in opts.geometry: j = opts.geometry.partition("+") t = j[0].partition("x") else: t = window_geometry.partition("x") window.resize(int(t[0]), int(t[2])) except: print("**** GLADE VCP ERROR: With window resize data", file=sys.stderr) parser.print_usage() sys.exit(1) if opts.gtk_workaround: # work around https://bugs.launchpad.net/ubuntu/+source/pygtk/+bug/507739 # this makes widget and widget_class matches in gtkrc and theme files actually work dbg("activating GTK bug workaround for gtkrc files") for o in builder.get_objects(): if isinstance(o, gtk.Widget): # retrieving the name works only for GtkBuilder files, not for # libglade files, so be cautious about it name = gtk.Buildable.get_name(o) if name: o.set_name(name) if opts.gtk_rc: dbg("**** GLADE VCP INFO: %s reading gtkrc file '%s'" % (opts.component, opts.gtk_rc)) gtk.rc_add_default_file(opts.gtk_rc) gtk.rc_parse(opts.gtk_rc) if opts.theme: dbg("**** GLADE VCP INFO: Switching %s to '%s' theme" % (opts.component, opts.theme)) settings = gtk.settings_get_default() settings.set_string_property("gtk-theme-name", opts.theme, "") # This needs to be done after geometry moves so on dual screens the window maxumizes to the actual used screen size. if opts.maximum: window.window.maximize() if opts.always_above_flag: window.set_keep_above(True) if opts.halfile: if opts.halfile[-4:] == ".tcl": cmd = ["haltcl", opts.halfile] else: cmd = ["halcmd", "-f", opts.halfile] res = subprocess.call(cmd, stdout=sys.stdout, stderr=sys.stderr) if res: print("'%s' exited with %d" % (' '.join(cmd), res), file=sys.stderr) sys.exit(res) # User components are set up so report that we are ready halcomp.ready() GSTAT.forced_update() # push the XWindow id number to standard out if opts.push_XID or opts.parent: gdkwin = window.get_window() w_id = gdkwin.xid print(w_id, file=sys.stdout) sys.stdout.flush() if signal_func in handlers: dbg("Register callback '%s' for SIGINT and SIGTERM" % (signal_func)) signal.signal(signal.SIGTERM, handlers[signal_func]) signal.signal(signal.SIGINT, handlers[signal_func]) try: gtk.main() except KeyboardInterrupt: sys.exit(0) finally: halcomp.exit() if opts.parent or opts.push_XID: gtk.gdk.flush() error = gtk.gdk.error_trap_pop() if error and opts.debug: print("**** GLADE VCP ERROR: X Protocol Error: %s" % str(error), file=sys.stderr)
op.add_option("--raw", action='store_true', default=False, help="Just dump raw SPARQL, don't do any treeification.") op.add_option("--pprint", action='store_true', default=False, help="Pretty print tree, instead of JSON serialization.") op.add_option("--time", action='store_true', default=False, help="Add measured time as comments.") opts, args = op.parse_args() if len(args) < 2: op.print_usage() op.exit() endpoint_url = args[0] fpath = args[1] if fpath == "-": import sys query = sys.stdin.read() else: with open(fpath) as f: query = f.read() from time import time start = time()
def process_options(): parser = OptionParser( usage= "usage: %prog [-h|--help] [OPTIONS] -u USER -o OUTPUTFILE [REQUEST]", version="%prog v" + VERSION, add_help_option=False) parser.set_defaults(address="eida.gfz-potsdam.de:18001", request_format="native", data_format="mseed", spfr=None, label=None, no_resp_dict=False, rebuild_volume=False, proxymode=False, timeout=300, retries=5, SSLpasswordFile="dcidpasswords.txt") parser.add_option("-h", "--help", action="store_true", dest="showhelp", default=False) parser.add_option("-l", "--longhelp", action="store_true", dest="showlonghelp", default=False) parser.add_option( "-w", "--password-file", type="string", dest="SSLpasswordFile", help= "file containing passwords used for decryption of encrypted data (default %default)" ) parser.add_option( "-a", "--address", type="string", dest="address", help="address of primary ArcLink node (default %default)") foptions = ("native", "breqfast") parser.add_option( "-f", "--request-format", type="choice", dest="request_format", choices=foptions, help="request format: breqfast, native (default %default)") koptions = ("mseed", "mseed4k", "fseed", "dseed", "inv", "inventory") parser.add_option( "-k", "--data-format", type="choice", dest="data_format", choices=koptions, help= "data format: mseed, mseed4k, fseed, dseed, inv[entory] (default %default)" ) parser.add_option("-s", "--preferred-sample-rate", type="float", dest="spfr", help="preferred sample rate") parser.add_option("-L", "--label", type="string", dest="label", help="label of SEED volume") parser.add_option( "-n", "--no-resp-dict", action="store_true", dest="no_resp_dict", help="avoid using response dictionary (default %default)") parser.add_option("-g", "--rebuild-volume", action="store_true", dest="rebuild_volume", help="rebuild SEED volume (default %default)") parser.add_option("-p", "--proxy", action="store_true", dest="proxymode", help="proxy mode, no routing (default %default)") parser.add_option("-t", "--timeout", type="int", dest="timeout", help="timeout in seconds (default %default)") parser.add_option("-x", "--retries", type="int", dest="retries", help="download retries (default %default)") parser.add_option("-v", action="callback", callback=add_verbosity, help="increase verbosity level") parser.add_option("-q", action="callback", callback=add_quietness, help="decrease verbosity level") parser.add_option("-u", "--user", type="string", dest="user", help="user's e-mail address") parser.add_option("-o", "--output-file", type="string", dest="output_file", help="file where downloaded data is written") (options, args) = parser.parse_args() if options.showhelp or options.showlonghelp: parser.print_help() if options.showlonghelp: print """ About ArcLink Protocol ====================== The ArcLink is a protocol used to request distributed archive seismological data. Today it gives you access to several European data archives (European Integrated Data Archive - EIDA) that are supporting the protocol developed by GEOFON ([email protected]) at the GeoForschungZentrum, Potsdam, Germany. You can find more information about it at the SeisComp3 and GEOFON web pages: * http://www.seiscomp3.org/ * http://geofon.gfz-potsdam.de/ ArcLink Password File (for decryption) ====================================== In this file (default: dcidpasswords.txt) you can store your private passwords given by different data centers. Each data center that you request encrypted data will send you a different password. The format of the file is really simple: just the data center ID followed by the password that you received. One data center ID and password per line. Any empty lines or lines starting with # are ignored. Example: gfz password1 odc password2 ipgp password3 The data center ID and password can be found on the automatically generated e-mail that you received from each data center. (You will only receive this email if you have been authorized to download encrypted data, and you have tried to download it.) Input File Format ================= ArcLink Fetch program supports two different input formats for the request file. It supports the traditional BREQ FAST format, and its own native format. Both formats contains the same information and they differ slightly. Native Format: -------------- The native format has the following format: YYYY,MM,DD,HH,MM,SS YYYY,MM,DD,HH,MM,SS Network Station Channel [Location] the Channel, Station and Location, can contains wildcards (*) and the Location field is optional. For matching all locations please use the '*' symbol. Example: 2010,02,18,12,00,00 2010,02,18,12,10,00 GE WLF BH* 2010,02,18,12,00,00 2010,02,18,12,10,00 GE VSU BH* 00 BREQ FAST Format: ----------------- The BREQ FAST format is a standard format used on seismology to request data. Each header line start with '.' and the request lines have the following format: Station Network {Time Start} {Time End} {Number of Channels} N x Channels Location Time Specification should have the following format: YYYY MM DD HH MM SS.TTTT Please read more about the BREQ FAST format at: http://www.iris.edu/manuals/breq_fast.htm """ sys.exit() errors = [] warnings = [] if options.user is None: errors.append("Username required") if options.output_file is None: errors.append("Output file required") if options.data_format.upper() != "FSEED" and options.rebuild_volume: errors.append("-g is only applicable to FSEED format") if len(args) > 1: errors.append( "invalid command line options or multiple files supplied") elif len(args) == 1: if not os.path.exists(args[0]): errors.append("request file '%s' not found." % args[0]) request_file = args[0] else: request_file = None SSLpasswordDict = {} if os.path.exists(options.SSLpasswordFile): fd = open(options.SSLpasswordFile) line = fd.readline() while line: line = line.strip() if line and line[0] != "#": try: (dcid, password) = line.split() SSLpasswordDict[dcid] = password except ValueError: logs.error(options.SSLpasswordFile + " invalid line: " + line) fd.close() sys.exit() line = fd.readline() else: if options.SSLpasswordFile != parser.defaults['SSLpasswordFile']: errors.append("Supplied password file (%s) not found" % options.SSLpasswordFile) else: warnings.append("Default password file (%s) not found" % options.SSLpasswordFile) if len(errors) > 0: logs.error("\n** ArcLink Fetch %s **\n" % VERSION) parser.print_usage() logs.error("Errors detected on the command line:") for item in errors: logs.error("\t%s" % item) print "" if len(warnings) > 0: logs.info("Warnings detected on the command line:") for item in warnings: logs.info("\t%s" % item) print "" if len(errors) > 0: sys.exit() return (SSLpasswordDict, options.address, options.request_format, options.data_format, options.label, not options.no_resp_dict, options.rebuild_volume, options.proxymode, options.user, options.timeout, options.retries, options.output_file, request_file, options.spfr)
############################################################ parser = OptionParser() parser.add_option("-o", "--output", help="Filename of MSI to create") parser.add_option("-d", "--debug", action="store_true", help="Enable debugging messages") parser.add_option("-v", "--verbose", action="store_true", help="Enable verbose messages") options, args = parser.parse_args() if args: parser.print_usage(sys.stderr) sys.exit(1) if options.debug: logging.basicConfig(level=logging.DEBUG) elif options.verbose: logging.basicConfig(level=logging.INFO) _logger = logging.getLogger("build_msi") output = options.output if output is None: output = os.path.join(os.getcwd(), "Reinteract-%(version)s.msi") script = os.path.abspath(sys.argv[0]) scriptdir = os.path.dirname(script)
def main(stream=None): global raw_output global pcidb pcifile_default = "./pci.ids" # for unknown OS's assume local file if platform.system() == 'Linux': pcifile_default = "/usr/share/hwdata/pci.ids" elif platform.system() == 'FreeBSD': pcifile_default = "/usr/local/share/pciids/pci.ids" if not os.path.exists(pcifile_default): pcifile_default = "/usr/share/misc/pci_vendors" optparser = OptionParser( usage='usage: %prog [-hrtp] [-d <pci id file] <elf-file>', description="Dump pmd hardware support info", add_help_option=True) optparser.add_option('-r', '--raw', action='store_true', dest='raw_output', help='Dump raw json strings') optparser.add_option("-d", "--pcidb", dest="pcifile", help="specify a pci database " "to get vendor names from", default=pcifile_default, metavar="FILE") optparser.add_option( "-t", "--table", dest="tblout", help="output information on hw support as a hex table", action='store_true') optparser.add_option("-p", "--plugindir", dest="pdir", help="scan dpdk for autoload plugins", action='store_true') options, args = optparser.parse_args() if options.raw_output: raw_output = True if options.pcifile: pcidb = PCIIds(options.pcifile) if pcidb is None: print("Pci DB file not found") exit(1) if options.tblout: options.pcifile = None pcidb = None if (len(args) == 0): optparser.print_usage() exit(1) if options.pdir is True: exit(scan_for_autoload_pmds(args[0])) ldlibpath = os.environ.get('LD_LIBRARY_PATH') if (ldlibpath is None): ldlibpath = "" if (os.path.exists(args[0]) is True): myelffile = args[0] else: myelffile = search_file(args[0], ldlibpath + ":/usr/lib64:/lib64:/usr/lib:/lib") if (myelffile is None): print("File not found") sys.exit(1) with open(myelffile, 'rb') as file: try: readelf = ReadElf(file, sys.stdout) readelf.process_dt_needed_entries() readelf.display_pmd_info_strings(".rodata") sys.exit(0) except ELFError as ex: sys.stderr.write('ELF error: %s\n' % ex) sys.exit(1)
def main(): global _conf_debug from optparse import OptionParser parser = OptionParser() parser.usage = """\ %prog [options] <cmd> [arg] Commands: help cached <path> cached-dir <path> read <path> touch <path> write data <path> dur <secs> durs <secs> secs <time> time <secs> userappcachedir""" parser.add_option("-a", "--autocleanup", help="Automatically remove non-cache files", default=False, action="store_true") parser.add_option("", "--debug", help="Debug output", default=_conf_debug, action="store_true") parser.add_option("", "--max", help="Maximum seconds to cache for", default=None) parser.add_option("", "--min", help="Minimum seconds to cache for", default=None) (options, args) = parser.parse_args() if options.debug: _conf_debug = True if len(args) < 1: parser.error("No command specified") options.min = parse_time(options.min) options.max = parse_time(options.max) cmd = args[0] if False: pass elif cmd == "help": parser.print_usage() elif cmd == "time": if len(args) < 2: parser.error("No time specified") print("time:", format_time(int(args[1]))) elif cmd == "dur": if len(args) < 2: parser.error("No time specified") print("dur:", format_duration(int(args[1]))) elif cmd == "durs": if len(args) < 2: parser.error("No time specified") print("dur:", format_duration(int(args[1]), static=True)) elif cmd == "secs": if len(args) < 2: parser.error("No time specified") print("secs:", parse_time(args[1])) elif cmd == "userappcachedir": print("user:"******"<app>")) elif cmd == "cached": if len(args) < 2: parser.error("No path specified") c = Cache(args[1], options.min, options.max) print("cached:", c.cached(options.autocleanup)) elif cmd == "cached-dir": if len(args) < 2: parser.error("No path specified") for c in cache_dir(args[1], options.min, options.max): print("name:", os.path.basename(c.path)) print("cached:", c.cached(options.autocleanup)) elif cmd == "read": if len(args) < 2: parser.error("No path specified") c = Cache(args[1], options.min, options.max) print("cached:", c.cached(options.autocleanup)) if c.cached(): print("data:", c.read()) elif cmd == "touch": if len(args) < 2: parser.error("No path specified") c = Cache(args[1], options.min, options.max) print("touch") c.touch() print("cached:", c.cached(autocleanup=False)) elif cmd == "write": if len(args) < 3: parser.error("No data/path specified") c = Cache(args[2], options.min, options.max) print("touch") c.touch(args[1]) print("cached:", c.cached(autocleanup=False)) print("data:", c.read()) else: parser.error("invalid command: " + cmd)
def mllp_send(): """Command line tool to send messages to an MLLP server""" # set up the command line options script_name = os.path.basename(sys.argv[0]) parser = OptionParser(usage=script_name + ' [options] <server>') parser.add_option('--version', action='store_true', dest='version', default=False, help='print current version and exit') parser.add_option('-p', '--port', action='store', type='int', dest='port', default=6661, help='port to connect to') parser.add_option('-f', '--file', dest='filename', help='read from FILE instead of stdin', metavar='FILE') parser.add_option('-q', '--quiet', action='store_true', dest='verbose', default=True, help='do not print status messages to stdout') parser.add_option('--loose', action='store_true', dest='loose', default=False, help='allow file to be a HL7-like object (\\r\\n instead ' \ + 'of \\r). Requires that messages start with ' \ + '"MSH|^~\\&|". Requires --file option (no stdin)') (options, args) = parser.parse_args() if options.version: import hl7 stdout(hl7.__version__) return if len(args) == 1: host = args[0] else: # server not present parser.print_usage() stderr().write('server required\n') return if options.filename is not None: stream = open(options.filename, 'rb') #FIXME with_statement else: if options.loose: stderr().write('--loose requires --file\n') return stream = stdin() with MLLPClient(host, options.port) as client: message_stream = read_stream(stream) \ if not options.loose \ else read_loose(stream) for message in message_stream: result = client.send_message(message) if options.verbose: stdout(result)
def cli(): """Main function for the downloader""" BUILD_TYPES = {'release': ReleaseScraper, 'candidate': ReleaseCandidateScraper, 'daily': DailyScraper, 'tinderbox': TinderboxScraper} usage = 'usage: %prog [options]' parser = OptionParser(usage=usage, description=__doc__) parser.add_option('--application', '-a', dest='application', choices=APPLICATIONS, default='firefox', metavar='APPLICATION', help='The name of the application to download, ' 'default: "%default"') parser.add_option('--directory', '-d', dest='directory', default=os.getcwd(), metavar='DIRECTORY', help='Target directory for the download, default: ' 'current working directory') parser.add_option('--build-number', dest='build_number', type="int", metavar='BUILD_NUMBER', help='Number of the build (for candidate, daily, ' 'and tinderbox builds)') parser.add_option('--locale', '-l', dest='locale', metavar='LOCALE', help='Locale of the application, default: "en-US or ' 'multi"') parser.add_option('--platform', '-p', dest='platform', choices=PLATFORM_FRAGMENTS.keys(), metavar='PLATFORM', help='Platform of the application') parser.add_option('--stub', dest='is_stub_installer', action='store_true', help='Stub installer. ' 'Only applicable to Windows builds.') parser.add_option('--type', '-t', dest='type', choices=BUILD_TYPES.keys(), default='release', metavar='BUILD_TYPE', help='Type of build to download, default: "%default"') parser.add_option('--url', dest='url', metavar='URL', help='URL to download. Note: Reserved characters (such ' 'as &) must be escaped or put in quotes otherwise ' 'CLI output may be abnormal.') parser.add_option('--version', '-v', dest='version', metavar='VERSION', help='Version of the application to be used by release\ and candidate builds, i.e. "3.6"') parser.add_option('--extension', dest='extension', metavar='EXTENSION', help='File extension of the build (e.g. "zip"), default:\ the standard build extension on the platform.') parser.add_option('--username', dest='username', metavar='USERNAME', help='Username for basic HTTP authentication.') parser.add_option('--password', dest='password', metavar='PASSWORD', help='Password for basic HTTP authentication.') parser.add_option('--retry-attempts', dest='retry_attempts', default=0, type=int, metavar='RETRY_ATTEMPTS', help='Number of times the download will be attempted in ' 'the event of a failure, default: %default') parser.add_option('--retry-delay', dest='retry_delay', default=10., type=float, metavar='RETRY_DELAY', help='Amount of time (in seconds) to wait between retry ' 'attempts, default: %default') parser.add_option('--timeout', dest='timeout', type=float, metavar='TIMEOUT', help='Amount of time (in seconds) until a download times' ' out') parser.add_option('--log-level', action='store', dest='log_level', default='INFO', metavar='LOG_LEVEL', help='Threshold for log output (default: %default)') # Option group for candidate builds group = OptionGroup(parser, "Candidate builds", "Extra options for candidate builds.") group.add_option('--no-unsigned', dest='no_unsigned', action="store_true", help="Don't allow to download unsigned builds if signed\ builds are not available") parser.add_option_group(group) # Option group for daily builds group = OptionGroup(parser, "Daily builds", "Extra options for daily builds.") group.add_option('--branch', dest='branch', default='mozilla-central', metavar='BRANCH', help='Name of the branch, default: "%default"') group.add_option('--build-id', dest='build_id', metavar='BUILD_ID', help='ID of the build to download') group.add_option('--date', dest='date', metavar='DATE', help='Date of the build, default: latest build') parser.add_option_group(group) # Option group for tinderbox builds group = OptionGroup(parser, "Tinderbox builds", "Extra options for tinderbox builds.") group.add_option('--debug-build', dest='debug_build', action="store_true", help="Download a debug build") parser.add_option_group(group) # TODO: option group for nightly builds (options, args) = parser.parse_args() # Gives instructions to user when no arguments were passed if len(sys.argv) == 1: print __doc__ parser.print_usage() print "Specify --help for more information on options. " \ "Please see the README for examples." return # Check for required options and arguments # Note: Will be optional when ini file support has been landed if not options.url \ and options.type not in ['daily', 'tinderbox'] \ and not options.version: parser.error('The version of the application to download has not' ' been specified.') # Instantiate scraper and download the build scraper_keywords = {'application': options.application, 'locale': options.locale, 'platform': options.platform, 'version': options.version, 'directory': options.directory, 'extension': options.extension, 'authentication': (options.username, options.password), 'retry_attempts': options.retry_attempts, 'retry_delay': options.retry_delay, 'is_stub_installer': options.is_stub_installer, 'timeout': options.timeout, 'log_level': options.log_level} scraper_options = { 'candidate': {'build_number': options.build_number, 'no_unsigned': options.no_unsigned}, 'daily': {'branch': options.branch, 'build_number': options.build_number, 'build_id': options.build_id, 'date': options.date}, 'tinderbox': {'branch': options.branch, 'build_number': options.build_number, 'date': options.date, 'debug_build': options.debug_build} } kwargs = scraper_keywords.copy() kwargs.update(scraper_options.get(options.type, {})) if options.application == 'b2g' and \ options.type in ('candidate', 'release'): error_msg = "%s build is not yet supported for B2G" % options.type raise NotSupportedError(error_msg) if options.url: build = DirectScraper(options.url, **kwargs) else: build = BUILD_TYPES[options.type](**kwargs) try: build.download() except KeyboardInterrupt: print "\nDownload interrupted by the user"
def main(arguments=None): if arguments: argv = arguments else: argv = sys_argv version = '0.0.3' parser = OptionParser(version='%%prog %s' % version, usage=''' usage: %%prog [--version] [--help] [--os-username <auth-user-name>] [--os-password <auth-password>] [--os-tenant-name <auth-tenant-name>] [--os-auth-url <auth-url>] <subcommand> [--help] Command-line interface to the OpenStack Swift API. Positional arguments: <subcommand> upload Uploads files or directories to the given container. '''.strip('\n') % globals()) parser.add_option('--insecure', action="store_true", dest="insecure", default=True, help='Allow swiftclient to access servers without ' 'having to verify the SSL certificate. ' 'Defaults to env[SWIFTCLIENT_INSECURE] ' '(set to \'true\' to enable).') os_grp = OptionGroup(parser, "OpenStack authentication options") os_grp.add_option('--os-username', metavar='<auth-user-name>', default=environ.get('OS_USERNAME', environ.get('SWIFT_USER')), help='OpenStack username. Defaults to env[OS_USERNAME].') os_grp.add_option('--os_username', help=SUPPRESS_HELP) os_grp.add_option('--os-password', metavar='<auth-password>', default=environ.get('OS_PASSWORD', environ.get('SWIFT_PASSWORD')), help='OpenStack password. Defaults to env[OS_PASSWORD].') os_grp.add_option('--os-tenant-name', metavar='<auth-tenant-name>', default=environ.get('OS_TENANT_NAME', environ.get('SWIFT_TENANT')), help='OpenStack tenant name. ' 'Defaults to env[OS_TENANT_NAME].') os_grp.add_option('--os_tenant_name', help=SUPPRESS_HELP) os_grp.add_option('--os-auth-url', metavar='<auth-url>', default=environ.get('OS_AUTH_URL', environ.get('SWIFT_AUTH_URL')), help='OpenStack auth URL. Defaults to env[OS_AUTH_URL].') os_grp.add_option('--os_auth_url', help=SUPPRESS_HELP) os_grp.add_option('--os-storage-url', metavar='<storage-url>', default=environ.get('OS_STORAGE_URL', environ.get('SWIFT_ADMIN_URL')), help='OpenStack storage URL. ' 'Defaults to env[OS_STORAGE_URL]. ' 'Overrides the storage url returned during auth. ' 'Will bypass authentication when used with ' '--os-auth-token.') os_grp.add_option('--os_storage_url', help=SUPPRESS_HELP) (options, args) = parser.parse_args(argv[1:]) if not args or args[0] not in commands: parser.print_usage() if args: exit('no such command: %s' % args[0]) exit() try: globals()['st_%s' % args[0]](options=options, args=args) except Exception as err: print(str(err))
def main(): usage = "usage: %prog [-h] [-o output_file_path] scriptfile [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option('', '--html', dest="output_html", action='store_true', help="output HTML instead of text", default=False) parser.add_option('-o', '--outfile', dest="outfile", action='store', help="save stats to <outfile>", default=None) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args if len(args) > 0: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) with open(progname, 'rb') as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, '__name__': '__main__', '__package__': None, } profiler = Profiler() profiler.start() try: exec code in globs, None except SystemExit, KeyboardInterrupt: pass profiler.stop() if options.outfile: f = codecs.open(options.outfile, 'w', 'utf-8') unicode = True color = False else: f = sys.stdout unicode = stdout_supports_unicode() color = stdout_supports_color() if options.output_html: f.write(profiler.output_html()) else: f.write(profiler.output_text(unicode=unicode, color=color)) f.close()
"--exclude", type="string", dest="exclude", default="none", help="List of frames to exclude (default is 'none')") option_parser.add_option("-o", "--output", type="string", dest="output", help="The target PDF file name") options, args = option_parser.parse_args() if len(args) == 0: option_parser.print_usage(sys.stderr) sys.exit() # Set page dimensions based on "format" and "landscape" options page_format = options.format.lower() if page_format not in PAGE_FORMATS: sys.stderr.write("Unknown page format: " + page_format + "\n") sys.exit() width = PAGE_FORMATS[page_format]["width"] height = PAGE_FORMATS[page_format]["height"] resolution = PAGE_FORMATS[page_format]["resolution"] if not options.landscape: width, height = height, width
def main(): from optparse import OptionParser cli = OptionParser( usage="%prog [options] <base-file>", description= "Generate OpenVZ container configuration files based on an existing file" ) cli.add_option("-m", "--multiply", dest="multiply", type="float", metavar="FACTOR", help="multiply by given factor") cli.add_option( "-a", "--add", dest="add", type="string", action="append", metavar="FILE", help= "add (as in sum) given file, you can add as many files as you need by specifying this option multiple times" ) cli.add_option( "-s", "--substract", dest="substract", type="string", action="append", metavar="FILE", help= "substract given file, you can add as many files as you need by specifying this option multiple times" ) cli.add_option("-d", "--debug", dest="debug", action="store_true", help="do not catch python exceptions, useful for debugging") (options, args) = cli.parse_args() if not len(args): cli.error("No base file provided") try: # Require Python >= 2.4 import sys if sys.version_info[0] < 2 or sys.version_info[1] < 4: cli.error("Python 2.4.0 or higher is required") c = CTConfig(args[0]) # Multiply if options.multiply: if options.multiply <= 0: cli.error("Invalid multiplication factor %s" % str(options.multiply)) c.multiply(options.multiply) # Add if options.add is not None: for f in options.add: c.add(CTConfig(f)) # Substract if options.substract is not None: for f in options.substract: c.substract(CTConfig(f)) # Output results print c except Exception, e: if options.debug: raise else: cli.print_usage() cli.exit(2, "%s: %s\n" % (cli.get_prog_name(), e))
def main(): args = sys.argv err = 0 if 'id3help' in args: from mutagen.easyid3 import EasyID3 for key in EasyID3.valid_keys.keys(): print(key, ) from optparse import OptionParser as OP OP = OP() OP.usage = ("%prog [options] filenames") OP.epilog = '%s id3help: for help with id3 tags' % os.path.basename( args[0]) OP.add_option('-t', '--tag', dest='tag', action='append', help="set a tag", metavar='tag=value') OP.add_option( '-a', '--add', dest='add', action='append', help='set/add values to a tag, without removing any existing values', metavar='tag=value') OP.add_option('-p', '--pattern', dest='pattern', action='store', help='substitution pattern from filename', metavar="'%n %t.flac'") OP.add_option('--fn2tag', dest='pattern', action='store', help='same as -p | --pattern') OP.add_option('-r', '--remove', dest='remove', action='append', help='remove a tag value or entire tag', metavar="'tag' or 'tag=value'") OP.add_option('-j', '--justify', dest='justify', action='store_true', help='zero-justify tracknumbers') OP.add_option('--clear', dest='clear', action='store_true', help='clear all tags') OP.add_option('-n', '--noact', dest='noact', action='store_true', help="just show what changes would be made") OP.add_option('-c', '--confirm', dest='confirm', action='store_true', help='show changes and prompt for confirmation to save') OP.add_option('-f', '--files', dest='filenames', action='append', help='one or more filenames/globs') OP.add_option('-q', '--quiet', dest='quiet', action='store_true', help='no output to stdout') OP.add_option('--tag2fn', dest='tag2fn', action='store', help='substitution pattern from tags', metavar="'%n %t.flac'") OP.add_option( '-s', '--filter', dest='symbols', action='store', help= 'one or more characters to filter from tags used to build filenames', metavar="'!@$&*/\?'") OP.add_option( '-m', '--map', dest='map', action='store', help= 'replace all instances of a char with another char\nin conjunction with --tag2fn', metavar="/ -") OP.add_option('-i', '--index', dest='idx', action='store_true', help='index files by filename order (persistent file order)') OP.add_option('-v', '--version', dest='vers', action='store_true', help='show version') argstr = ' '.join(args) if len(args) < 2: OP.print_usage() # print("version %s" % __version__) print('-h|--help for help') sys.exit(1) p = '(-t|--tag|-a|--add|-p|--pattern|-r|--remove|-f|--files)\ +?\-[^\ ]*' mo = re.search(p, argstr) if mo: print('illegal option combination: ', mo.group()) sys.exit(1) (opt, fnames) = OP.parse_args() if opt.vers: print('%s %s' % (OP.get_prog_name(), __version__)) if opt.filenames: fnames += opt.filenames for fname in fnames: if not os.path.exists(fname): print('%s: no such file' % fname) err += 1 if err: sys.exit(err) cfmr = Confirmer(opt) fnum = 0 idx = 0 if opt.pattern: subster = Subster(opt.pattern) elif opt.tag2fn: subster = Subster(opt.tag2fn, 'tag2fn') else: subster = Subster('', '') modded = any( [opt.clear, opt.remove, opt.add, opt.tag, opt.pattern, opt.justify]) spkr = Speaker(opt.quiet) top_length = 0 for fname in fnames: bfname = os.path.basename(fname) top_length = len(bfname) if len(bfname) > top_length else top_length for fname in fnames: fnum += 1 vals = {} keys = [] origfn = fname if os.path.splitext(fname)[1] == '.mp3': try: mf = MP3(fname) except IOError: spkr.speak("\ncan't open %s" % fname) continue spkr.speak("processing %s" % fname) if opt.clear: mf.clear() for action in opt.remove or []: k, v = (action.split('=', 1) + [''])[:2] vals[k] = mf.pop(k, []) if k and not v: vals[k] = [] elif v and v in vals[k]: vals[k].remove(v) for action in opt.tag or []: k, v = (action.split('=', 1) + [''])[:2] vals[k] = [v] for action in opt.add or []: k, v = (action.split('=', 1) + [''])[:2] if vals.get(k, []): vals[k] += mf.pop(k, []) else: vals[k] = mf.pop(k, []) vals[k].extend([v]) if subster.pattern: d = subster.getdict(fname) for k in d: values = d.get(k, []) if not isinstance(values, list): values = [values] try: vals[k].extend(values) except KeyError: vals[k] = values if opt.justify: if not vals.get('tracknumber'): vals['tracknumber'] = fnum width = len(str(len(fnames))) n = width - len(str(vals['tracknumber'])) vals['tracknumber'] = [n * '0' + str(vals['tracknumber'])] if not modded: if not opt.quiet: print(mf.pprint()) continue if opt.noact or opt.confirm: for k in vals: print(k + '=' + str(vals[k])) if opt.noact: continue if opt.confirm and not cfmr.confirm(): continue for k in vals: try: mf.update({k: vals[k]}) # mf.save( ) except ValueError: pass mf.save() else: try: # print(fname) mf = File(fname) except IOError: spkr.speak("can't open %s" % fname) continue spkr.speak(os.path.basename(fname)) if opt.idx: trn = mf.get('tracknumber', None) mf['idx'] = unicode(fnum) if trn: mf['idx'] += trn mf.save() print(' indexed') if opt.clear: mf.clear() spkr.speak('\n\ttags cleared..') for action in opt.remove or []: k, v = (action.split('=', 1) + [''])[:2] t = mf.pop(k, []) if v and v in t: t.remove(v) spkr.speak(str(k) + ' removes ' + str(v)) if v and t: mf.update({k: t}) for action in opt.tag or []: if '=' in action: k, v = action.split('=', 1) if k and v: mf.update({k: [v]}) spkr.speak('\t\ttag set: ' + k + '=' + v) for action in opt.add or []: if '=' in action: k, v = action.split('=', 1) mf.update({k: mf.get(k, []) + [v]}) spkr.speak('\n\ttag appended: ' + k + '=' + v) if subster.mode == 'fn2tag': d = subster.getdict(fname) for k in d: mf.update({k: d[k]}) spkr.speak('\n\tfrom filename: ' + k + '=' + d[k]) if subster.mode == 'tag2fn': fname = '' fnlist = subster.getfnlist() if 'tracknumber' in fnlist: tn = 1 else: tn = 0 lit = True for item in fnlist: lit = not lit if lit: if not tn and item == 'tracknumber': item = 'track' if tn and item == 'track': item = 'tracknumber' if item.startswith('track') and opt.justify: subst = mf[item][0].rjust(2, '0') else: subst = mf[item][0] if opt.symbols: pat = '[' + opt.symbols + ']' subst = re.sub(pat, '', subst) subst = subst.strip() fname += subst else: fname += item if '/' in fname: fname = re.sub('/', '-', fname) # if opt.map: # fname = map(fname,opt.map) if opt.noact or opt.confirm: pass if not any([modded, opt.tag2fn, opt.quiet]): print(mf.pprint(), ) if cfmr.confirm(): if opt.tag2fn: if opt.map: a, b = opt.map.split() fname = re.sub(a, b, fname) pth = os.path.join(os.path.dirname(origfn), fname) second_column = top_length + 2 tab = (second_column - len(os.path.basename(origfn))) * ' ' try: os.rename(origfn, pth) print(tab + '--> ' + fname), # spkr.speak( 'renamed... ' + fname ) except IOError: raise IOError else: mf.save() spkr.speak('\tsaved!')
def main(): import os import sys import runpy import pstats from optparse import OptionParser usage = "cProfile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..." parser = OptionParser(usage=usage) parser.allow_interspersed_args = False parser.add_option('-o', '--outfile', dest="outfile", help="Save stats to <outfile>", default=None) parser.add_option( '-s', '--sort', dest="sort", help="Sort order when printing to stdout, based on pstats.Stats class", default=-1, choices=sorted(pstats.Stats.sort_arg_dict_default)) parser.add_option('-m', dest="module", action="store_true", help="Profile a library module", default=False) if not sys.argv[1:]: parser.print_usage() sys.exit(2) (options, args) = parser.parse_args() sys.argv[:] = args # The script that we're profiling may chdir, so capture the absolute path # to the output file at startup. if options.outfile is not None: options.outfile = os.path.abspath(options.outfile) if len(args) > 0: if options.module: code = "run_module(modname, run_name='__main__')" globs = {'run_module': runpy.run_module, 'modname': args[0]} else: progname = args[0] sys.path.insert(0, os.path.dirname(progname)) with open(progname, 'rb') as fp: code = compile(fp.read(), progname, 'exec') globs = { '__file__': progname, '__name__': '__main__', '__package__': None, '__cached__': None, } try: runctx(code, globs, None, options.outfile, options.sort) except BrokenPipeError as exc: # Prevent "Exception ignored" during interpreter shutdown. sys.stdout = None sys.exit(exc.errno) else: parser.print_usage() return parser
def Main(): Logger.Initialize() Parser = OptionParser(version=(MSG_VERSION + ' Build ' + gBUILD_VERSION), description=MSG_DESCRIPTION, prog="UPT.exe", usage=MSG_USAGE) Parser.add_option("-d", "--debug", action="store", type="int", dest="debug_level", help=ST.HLP_PRINT_DEBUG_INFO) Parser.add_option("-v", "--verbose", action="store_true", dest="opt_verbose", help=ST.HLP_PRINT_INFORMATIONAL_STATEMENT) Parser.add_option("-s", "--silent", action="store_true", dest="opt_slient", help=ST.HLP_RETURN_NO_DISPLAY) Parser.add_option("-q", "--quiet", action="store_true", dest="opt_quiet", help=ST.HLP_RETURN_AND_DISPLAY) Parser.add_option("-i", "--install", action="append", type="string", dest="Install_Distribution_Package_File", help=ST.HLP_SPECIFY_PACKAGE_NAME_INSTALL) Parser.add_option("-c", "--create", action="store", type="string", dest="Create_Distribution_Package_File", help=ST.HLP_SPECIFY_PACKAGE_NAME_CREATE) Parser.add_option("-r", "--remove", action="store", type="string", dest="Remove_Distribution_Package_File", help=ST.HLP_SPECIFY_PACKAGE_NAME_REMOVE) Parser.add_option("-t", "--template", action="store", type="string", dest="Package_Information_Data_File", help=ST.HLP_SPECIFY_TEMPLATE_NAME_CREATE) Parser.add_option("-p", "--dec-filename", action="append", type="string", dest="EDK2_DEC_Filename", help=ST.HLP_SPECIFY_DEC_NAME_CREATE) Parser.add_option("-m", "--inf-filename", action="append", type="string", dest="EDK2_INF_Filename", help=ST.HLP_SPECIFY_INF_NAME_CREATE) Parser.add_option("-l", "--list", action="store_true", dest="List_Dist_Installed", help=ST.HLP_LIST_DIST_INSTALLED) Parser.add_option("-f", "--force", action="store_true", dest="Yes", help=ST.HLP_DISABLE_PROMPT) Parser.add_option("-n", "--custom-path", action="store_true", dest="CustomPath", help=ST.HLP_CUSTOM_PATH_PROMPT) Parser.add_option("-x", "--free-lock", action="store_true", dest="SkipLock", help=ST.HLP_SKIP_LOCK_CHECK) Parser.add_option("-u", "--replace", action="store", type="string", dest="Replace_Distribution_Package_File", help=ST.HLP_SPECIFY_PACKAGE_NAME_REPLACE) Parser.add_option("-o", "--original", action="store", type="string", dest="Original_Distribution_Package_File", help=ST.HLP_SPECIFY_PACKAGE_NAME_TO_BE_REPLACED) Parser.add_option("--use-guided-paths", action="store_true", dest="Use_Guided_Paths", help=ST.HLP_USE_GUIDED_PATHS) Parser.add_option("-j", "--test-install", action="append", type="string", dest="Test_Install_Distribution_Package_Files", help=ST.HLP_TEST_INSTALL) Opt = Parser.parse_args()[0] Var2Var = [ ("PackageInformationDataFile", Opt.Package_Information_Data_File), ("PackFileToInstall", Opt.Install_Distribution_Package_File), ("PackFileToCreate", Opt.Create_Distribution_Package_File), ("PackFileToRemove", Opt.Remove_Distribution_Package_File), ("PackageFileList", Opt.EDK2_DEC_Filename), ("ModuleFileList", Opt.EDK2_INF_Filename), ("InventoryWs", Opt.List_Dist_Installed), ("PackFileToReplace", Opt.Replace_Distribution_Package_File), ("PackFileToBeReplaced", Opt.Original_Distribution_Package_File), ("UseGuidedPkgPath", Opt.Use_Guided_Paths), ("TestDistFiles", Opt.Test_Install_Distribution_Package_Files) ] for Var in Var2Var: setattr(Opt, Var[0], Var[1]) try: GlobalData.gWORKSPACE, GlobalData.gPACKAGE_PATH = GetWorkspace() except FatalError as XExcept: if Logger.GetLevel() <= Logger.DEBUG_9: Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + format_exc()) return XExcept.args[0] # Support WORKSPACE is a long path # Only works for windows system if pf.system() == 'Windows': Vol = 'B:' for Index in range(90, 65, -1): Vol = chr(Index) + ':' if not os.path.isdir(Vol): os.system('subst %s "%s"' % (Vol, GlobalData.gWORKSPACE)) break GlobalData.gWORKSPACE = '%s\\' % Vol WorkspaceDir = GlobalData.gWORKSPACE SetLogLevel(Opt) Mgr = FileHook.RecoverMgr(WorkspaceDir) FileHook.SetRecoverMgr(Mgr) GlobalData.gDB = IpiDatabase(os.path.normpath(os.path.join(WorkspaceDir, \ "Conf/DistributionPackageDatabase.db")), WorkspaceDir) GlobalData.gDB.InitDatabase(Opt.SkipLock) # # Make sure the Db will get closed correctly # try: ReturnCode = 0 CheckConflictOption(Opt) RunModule = None if Opt.PackFileToCreate: if Opt.PackageInformationDataFile: if not os.path.exists(Opt.PackageInformationDataFile): if not os.path.exists( os.path.join(WorkspaceDir, Opt.PackageInformationDataFile)): Logger.Error( "\nUPT", FILE_NOT_FOUND, ST.ERR_NO_TEMPLATE_FILE % Opt.PackageInformationDataFile) else: Opt.PackageInformationDataFile = os.path.join( WorkspaceDir, Opt.PackageInformationDataFile) else: Logger.Error("UPT", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_T_OPTION) if not Opt.PackFileToCreate.endswith('.dist'): Logger.Error("CreatePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToCreate) RunModule = MkPkg.Main elif Opt.PackFileToInstall: AbsPath = [] for Item in Opt.PackFileToInstall: if not Item.endswith('.dist'): Logger.Error("InstallPkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Item) AbsPath.append(GetFullPathDist(Item, WorkspaceDir)) if not AbsPath: Logger.Error("InstallPkg", FILE_NOT_FOUND, ST.ERR_INSTALL_DIST_NOT_FOUND % Item) Opt.PackFileToInstall = AbsPath setattr(Opt, 'PackageFile', Opt.PackFileToInstall) RunModule = InstallPkg.Main elif Opt.PackFileToRemove: if not Opt.PackFileToRemove.endswith('.dist'): Logger.Error("RemovePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToRemove) head, tail = os.path.split(Opt.PackFileToRemove) if head or not tail: Logger.Error("RemovePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_FILENAME_ONLY_FOR_REMOVE % Opt.PackFileToRemove) setattr(Opt, 'DistributionFile', Opt.PackFileToRemove) RunModule = RmPkg.Main elif Opt.InventoryWs: RunModule = InventoryWs.Main elif Opt.PackFileToBeReplaced and not Opt.PackFileToReplace: Logger.Error("ReplacePkg", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_U_OPTION) elif Opt.PackFileToReplace: if not Opt.PackFileToReplace.endswith('.dist'): Logger.Error("ReplacePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToReplace) if not Opt.PackFileToBeReplaced: Logger.Error("ReplacePkg", OPTION_MISSING, ExtraData=ST.ERR_REQUIRE_O_OPTION) if not Opt.PackFileToBeReplaced.endswith('.dist'): Logger.Error("ReplacePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Opt.PackFileToBeReplaced) head, tail = os.path.split(Opt.PackFileToBeReplaced) if head or not tail: Logger.Error( "ReplacePkg", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_FILENAME_ONLY_FOR_REPLACE_ORIG % Opt.PackFileToBeReplaced) AbsPath = GetFullPathDist(Opt.PackFileToReplace, WorkspaceDir) if not AbsPath: Logger.Error( "ReplacePkg", FILE_NOT_FOUND, ST.ERR_REPLACE_DIST_NOT_FOUND % Opt.PackFileToReplace) Opt.PackFileToReplace = AbsPath RunModule = ReplacePkg.Main elif Opt.Test_Install_Distribution_Package_Files: for Dist in Opt.Test_Install_Distribution_Package_Files: if not Dist.endswith('.dist'): Logger.Error("TestInstall", FILE_TYPE_MISMATCH, ExtraData=ST.ERR_DIST_EXT_ERROR % Dist) setattr(Opt, 'DistFiles', Opt.Test_Install_Distribution_Package_Files) RunModule = TestInstall.Main else: Parser.print_usage() return OPTION_MISSING ReturnCode = RunModule(Opt) except FatalError as XExcept: ReturnCode = XExcept.args[0] if Logger.GetLevel() <= Logger.DEBUG_9: Logger.Quiet(ST.MSG_PYTHON_ON % (python_version(), platform) + \ format_exc()) finally: try: if ReturnCode != 0 and ReturnCode != UPT_ALREADY_INSTALLED_ERROR: Logger.Quiet(ST.MSG_RECOVER_START) GlobalData.gDB.RollBack() Mgr.rollback() Logger.Quiet(ST.MSG_RECOVER_DONE) else: GlobalData.gDB.Commit() Mgr.commit() except Exception: Logger.Quiet(ST.MSG_RECOVER_FAIL) GlobalData.gDB.CloseDb() if pf.system() == 'Windows': os.system('subst %s /D' % GlobalData.gWORKSPACE.replace('\\', '')) return ReturnCode