Пример #1
0
def test_preprocr():
    parser = OptionParser()
#    parser.add_option("-f", "--file", dest="filename",
#                      help="Path to input C file", metavar="FILE", default="")
    #parser.add_option("-q", "--quiet",
    #                  action="store_False", dest="verbose", default=True,
    #                                    help="don't print status messages to stdout")

    # pre-process the code and make it suitable for scanner
    parser = OptionParser()
    parser.add_option("-i", "--infile", dest="infnm",
                      help="Path to input C file", metavar="FILE", default="")
    parser.add_option("-o", "--outfile", dest="outfnm",
                      help="Path to output C file", metavar="FILE", default="")
    #parser.add_option("-q", "--quiet",
    #                  action="store_False", dest="verbose", default=True,
    #                                    help="don't print status messages to stdout")

    (options, args) = parser.parse_args()

    if options.infnm=="":
        parser.print_help()
        exit(-1)
    if os.path.isfile(options.infnm):       
        J=preprocr(options.infnm,options.outfnm)
        J.labeling()
    else:
        print "preprocessor: file not found: "+options.infnm
        exit(-1)
Пример #2
0
def main():
    description = ( "This application generates .h and .ld files for symbols defined in input file. "
                    "The input symbols file can be generated using nm utility like this: "
                    "esp32-ulp-nm -g -f posix <elf_file> > <symbols_file>" );

    parser = OptionParser(description=description)
    parser.add_option("-s", "--symfile", dest="symfile",
                      help="symbols file name", metavar="SYMFILE")
    parser.add_option("-o", "--outputfile", dest="outputfile",
                      help="destination .h and .ld files name prefix", metavar="OUTFILE")

    (options, args) = parser.parse_args()
    if options.symfile is None:
        parser.print_help()
        return 1

    if options.outputfile is None:
        parser.print_help()
        return 1

    with open(options.outputfile + ".h", 'w') as f_h, \
         open(options.outputfile + ".ld", 'w') as f_ld, \
         open(options.symfile) as f_sym: \
        gen_ld_h_from_sym(f_sym, f_ld, f_h)
    return 0
Пример #3
0
def generate_from_command_line_options(argv, msg_template_dict, srv_template_dict, module_template_dict = {}):
    from optparse import OptionParser
    parser = OptionParser("[options] <srv file>")
    parser.add_option("-p", dest='package',
                      help="ros package the generated msg/srv files belongs to")
    parser.add_option("-o", dest='outdir',
                      help="directory in which to place output files")
    parser.add_option("-I", dest='includepath',
                      help="include path to search for messages",
                      action="append")
    parser.add_option("-m", dest='module',
                      help="write the module file",
                      action='store_true', default=False)
    parser.add_option("-e", dest='emdir',
                      help="directory containing template files",
                      default=sys.path[0])

    (options, argv) = parser.parse_args(argv)

    if( not options.package or not options.outdir or not options.emdir):
        parser.print_help()
        exit(-1)

    if( options.module ):
        generate_module(options.package, options.outdir, options.emdir, module_template_dict)
    else:
        if len(argv) > 1:
            generate_from_file(argv[1], options.package, options.outdir, options.emdir, options.includepath, msg_template_dict, srv_template_dict)
        else:
            parser.print_help()
            exit(-1)
Пример #4
0
def main():
    """ Handels command line args. """
    debug = False
    usage = dedent("""\
        %prog [options]
        Updates a server's sources, vendor libraries, packages CSS/JS
        assets, migrates the database, and other nifty deployment tasks.
        """.rstrip())

    options = OptionParser(usage=usage)
    e_help = "Type of environment. One of (%s) Example: update_site.py \
        -e stage" % '|'.join(ENV_BRANCH.keys())
    options.add_option("-e", "--environment", help=e_help)
    options.add_option("-v", "--verbose",
                       help="Echo actions before taking them.",
                       action="store_true", dest="verbose")
    (opts, _) = options.parse_args()

    if opts.verbose:
        debug = True
    if opts.environment in ENV_BRANCH.keys():
        update_site(opts.environment, debug)
    else:
        sys.stderr.write("Invalid environment!\n")
        options.print_help(sys.stderr)
        sys.exit(1)
Пример #5
0
def main():
	usage = "usage: %prog [options] arg1 [arg2]"
	desc = """arg1: file with the output of the baseline search engine (ex: svm.test.res) 
	arg2: predictions file from svm (ex: test/svm.pred)
	if arg2 is omitted only the search engine is evaluated"""

	parser = OptionParser(usage=usage, description=desc)
	parser.add_option("-t", "--threshold", dest="th", default=15, type=int, 
	                  help="supply a value for computing Precision up to a given threshold "
	                  "[default: %default]", metavar="VALUE")
	parser.add_option("-r", "--reranking_threshold", dest="reranking_th", default=None, type=float, 
	                  help="if maximum prediction score for a set of candidates is below this threshold, do not re-rank the candiate list."
	                  "[default: %default]", metavar="VALUE")
	parser.add_option("-f", "--format", dest="format", default="trec", 
	                  help="format of the result file (trec, answerbag): [default: %default]", 
	                  metavar="VALUE")	 	  
	parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
	                  help="produce verbose output [default: %default]")	 	  
	parser.add_option("--ignore_noanswer", dest="ignore_noanswer", default=False, action="store_true",
	                  help="ignore questions with no correct answer [default: %default]")	 	  
	
	(options, args) = parser.parse_args()

	if len(args) == 1:
		res_fname = args[0]
		eval_search_engine(res_fname, options.format, options.th)
	elif len(args) == 2:
		res_fname = args[0]
		pred_fname = args[1]	
		eval_reranker(res_fname, pred_fname, options.format, options.th, 
		              options.verbose, options.reranking_th, options.ignore_noanswer)
	else:
		parser.print_help()
		sys.exit(1)
Пример #6
0
def main():
    parser = OptionParser()
    parser.add_option("-f", "--file", dest="filename",
                      help="Database file to load", metavar="FILE")
    parser.add_option("-d", "--dest", dest="destfile",
                      help="Database file to write to. If not supplied will use input database", metavar="FILE")
    parser.add_option("-n", "--name", dest="name",
                      help="Attribute Name. If 'all' then all attributes will be changed", metavar="name")
    parser.add_option("-e", "--enable", dest="enabled", action="store_true", default=False, help="If set enable the meter. Otherwise disable")
    parser.add_option("-p", "--protocols", action="append", dest="protocols",
                      help="Protocols to enable / disable the Attribute to. Not adding this means all")

    (options, args) = parser.parse_args()

    if options.filename is None or options.filename == "":
        print "ERROR: No Database file supplied\n"
        parser.print_help()
        sys.exit(1)

    if options.destfile is None or options.destfile == "":
        options.destfile = options.filename

    if options.name is None or options.name == "":
        print "ERROR: No Name\n"
        parser.print_help()
        sys.exit(1)

    toggle_attribute_meter(options.filename, options.destfile, options.name, options.enabled, options.protocols)
Пример #7
0
 def main(self,args=None):
     """
     meant to be called
     
         if __name__ == '__main__':
             MyApplication().main()
             
     but lino.runscript calls it with args=sys.argv[:2] (command-line
     arguments are shifted by one)
     
     """
     if args is None:
         args = sys.argv[1:]
     
     p = OptionParser(
         usage=self.usage,
         description=self.description)
         
     self.setupOptionParser(p)
     
     try:
         options,args = p.parse_args(args)
         self.applyOptions(options,args)
         return self.run(self.console)
     
     except UsageError,e:
         p.print_help()
         return -1
Пример #8
0
def get_options():
    optParser = OptionParser(usage=os.path.basename(sys.argv[0]) + " [<options>] <input_file_or_port>")
    optParser.add_option("-s", "--separator", default=";",
                         help="separating character for fields")
    optParser.add_option("-q", "--quotechar", default='',
                         help="quoting character for fields")
    optParser.add_option("-x", "--xsd", help="xsd schema to use")
    optParser.add_option("-a", "--validation", action="store_true",
                         default=False, help="enable schema validation")
    optParser.add_option("-p", "--split", action="store_true",
                         default=False, help="split in different files for the first hierarchy level")
    optParser.add_option("-o", "--output", help="base name for output")
    options, args = optParser.parse_args()
    if len(args) != 1:
        optParser.print_help()
        sys.exit()
    if options.validation and not haveLxml:
        print("lxml not available, skipping validation", file=sys.stderr)
        options.validation = False
    if args[0].isdigit():
        if not options.xsd:
            print("a schema is mandatory for stream parsing", file=sys.stderr)
            sys.exit()
        options.source = getSocketStream(int(args[0]))
    else:
        options.source = args[0]
    if options.output and options.output.isdigit() and options.split:
        print("it is not possible to use splitting together with stream output", file=sys.stderr)
        sys.exit()
    return options 
Пример #9
0
def main():
  from optparse import OptionParser
  
  parser = OptionParser()
  parser.add_option('-c', type='int', dest='concurrency', default=DEFAULT_CONCURRENCY,
    help='Number of multiple requests to make')
  parser.add_option('-s', type='int', dest='seconds', default=DEFAULT_SECONDS,
    help='Number of seconds to perform')
  parser.add_option("--mode", dest="mode", default=None)
  options, args = parser.parse_args()
  
  #run tests
  if 'test' == options.mode:
    test_parse_http_load_result()
    return
  
  if 1 != len(args):
    parser.print_help()
    return
  
  assert 1 <= options.concurrency <= 100
  assert 1 <= options.seconds <= 100
  
  #run bench
  old_sys_stderr = sys.stderr
  sys.stderr = StringIO()
  try:
    run_bench(args[0], options.seconds, options.concurrency)
  except Exception, e:
    print e
    import traceback
    print traceback.format_exc()
Пример #10
0
def main():
    atexit.register(fabric_cleanup, True)
    parser = OptionParser(usage="%prog RELEASE_DIR DESTINATION")
    (options, args) = parser.parse_args(sys.argv[1:])
    
    comm_obj = _CommObj()
    
    if len(args) != 2:
        parser.print_help()
        sys.exit(-1)
        
    if not os.path.isdir(args[0]):
        print "release directory %s not found" % args[0]
        sys.exit(-1)
    
    destparts = args[1].split(':', 1)
    if len(destparts)==1: # it's a local release test area
        if not os.path.isdir(args[1]):
            _setup_local_release_dir(args[1])
        comm_obj.put = shutil.copy
        comm_obj.put_dir = shutil.copytree
        comm_obj.run = local
        
        _push_release(args[0], args[1], comm_obj)
    else: # assume args[1] is a remote host:destdir
        comm_obj.put = put
        comm_obj.put_dir = put_dir
        comm_obj.run = run
        
        home = destparts[1]

        with settings(host_string=destparts[0]):
            _push_release(args[0], home, comm_obj)
Пример #11
0
    def __init__(self):
        gr.top_block.__init__(self)

        usage="%prog: [options] output_filename"
        parser = OptionParser(option_class=eng_option, usage=usage)
        parser.add_option("-I", "--audio-input", type="string", default="",
                          help="pcm input device name.  E.g., hw:0,0 or /dev/dsp")
        parser.add_option("-r", "--sample-rate", type="eng_float", default=48000,
                          help="set sample rate to RATE (48000)")
        parser.add_option("-N", "--nsamples", type="eng_float", default=None,
                          help="number of samples to collect [default=+inf]")
     
        (options, args) = parser.parse_args ()
        if len(args) != 1:
            parser.print_help()
            raise SystemExit, 1
        filename = args[0]

        sample_rate = int(options.sample_rate)
        src = audio.source (sample_rate, options.audio_input)
        dst = gr.file_sink (gr.sizeof_float, filename)

        if options.nsamples is None:
            self.connect((src, 0), dst)
        else:
            head = gr.head(gr.sizeof_float, int(options.nsamples))
            self.connect((src, 0), head, dst)
Пример #12
0
def main():
    from optparse import OptionParser

    from mrec import load_sparse_matrix, save_sparse_matrix

    parser = OptionParser()
    parser.add_option('--input_format',dest='input_format',help='format of input dataset tsv | csv | mm (matrixmarket) | csr (scipy.sparse.csr_matrix) | fsm (mrec.sparse.fast_sparse_matrix)')
    parser.add_option('--input',dest='input',help='filepath to input')
    parser.add_option('--output_format',dest='output_format',help='format of output dataset(s) tsv | csv | mm (matrixmarket) | csr (scipy.sparse.csr_matrix) | fsm (mrec.sparse.fast_sparse_matrix)')
    parser.add_option('--output',dest='output',help='filepath for output')

    (opts,args) = parser.parse_args()
    if not opts.input or not opts.output or not opts.input_format or not opts.output_format:
        parser.print_help()
        raise SystemExit

    if opts.output_format == opts.input_format:
        raise SystemExit('input and output format are the same, not doing anything')

    if opts.input_format == 'tsv' and opts.output_format == 'mm':
        # we can do this without loading the data
        tsv2mtx(opts.input,opts.output)
    else:
        data = load_sparse_matrix(opts.input_format,opts.input)
        save_sparse_matrix(data,opts.output_format,opts.output)
Пример #13
0
    def __init__(self):
        gr.top_block.__init__(self)

	usage = "%prog: [options] filename"
        parser = OptionParser(option_class=eng_option, usage=usage)
        parser.add_option("-r", "--sample-rate", type="eng_float", default=48000,
                          help="set sample rate to RATE (48000)")
	parser.add_option("-N", "--samples", type="eng_float", default=None,
			  help="number of samples to record")
        (options, args) = parser.parse_args ()
        if len(args) != 1 or options.samples is None:
            parser.print_help()
            raise SystemExit, 1

        sample_rate = int(options.sample_rate)
        ampl = 0.1

        src0 = analog.sig_source_f(sample_rate, analog.GR_SIN_WAVE, 350, ampl)
        src1 = analog.sig_source_f(sample_rate, analog.GR_SIN_WAVE, 440, ampl)
	head0 = blocks.head(gr.sizeof_float, int(options.samples))
	head1 = blocks.head(gr.sizeof_float, int(options.samples))
	dst = blocks.wavfile_sink(args[0], 2, int(options.sample_rate), 16)

        self.connect(src0, head0, (dst, 0))
        self.connect(src1, head1, (dst, 1))
Пример #14
0
class VideoGenOptionParser(object):
    def __init__(self):
        self._parser = OptionParser()
        
        self._parser.add_option("-c", "--conf", help="input file in XML format", dest="conf")
        self._parser.add_option("-o", "--output", help="place where to generate output file (default is ./output.avi)", dest="output")
        self._parser.add_option("-t", "--tmp", help="directory for intermediate files (default is /tmp/)", dest="tmp")
        self._parser.add_option("-a", "--attach", help="a string which should be added before each file that is loaded", dest="attach")
        self._parser.add_option("-e", "--encoder", help="location of ffmpeg encoder", dest="encoder")
        self._parser.add_option("-y", action="store_true", help="do not ask to overwrite files", dest="overwrite")
        self._parser.add_option("-s", "--subtitles", help="Adds *.srt soft subtitles file to the output directory", dest="softsubtitles")
        
        self._options = {}
        (self._options, args) = self._parser.parse_args()
        
        if self._options.output == None:
            self._options.output = "./output.avi"
            
        if self._options.tmp == None:
            self._options.tmp = "/tmp/"
            
        if self._options.attach == None:
            self._options.attach = ""
            
        if self._options.encoder == None:
            self._options.encoder = "ffmpeg"
            
        if self._options.overwrite == None:
            self._options.overwrite = False
        
    def get_options(self):
        return self._options
    
    def print_help(self):
        self._parser.print_help()
Пример #15
0
def main():
    try:
        cpus = multiprocessing.cpu_count()
    except NotImplementedError:
        cpus = 1
    parser = OptionParser(usage=helptext)
    parser.add_option("-p", "--processes", dest="procs", help="How many chunks to render in parallel. A good number for this is the number of cores in your computer. Default %s" % cpus, default=cpus, action="store", type="int")
    parser.add_option("-z", "--zoom", dest="zoom", help="Sets the zoom level manually instead of calculating it. This can be useful if you have outlier chunks that make your world too big. This value will make the highest zoom level contain (2**ZOOM)^2 tiles", action="store", type="int")
    parser.add_option("-d", "--delete", dest="delete", help="Clear all caches. Next time you render your world, it will have to start completely over again. This is probably not a good idea for large worlds. Use this if you change texture packs and want to re-render everything.", action="store_true")

    options, args = parser.parse_args()

    if len(args) < 1:
        print "You need to give me your world directory"
        parser.print_help()
        sys.exit(1)
    worlddir = args[0]

    if len(args) != 2:
        parser.error("Where do you want to save the tiles?")
    destdir = args[1]

    if options.delete:
        return delete_all(worlddir, destdir)

    # First generate the world's chunk images
    w = world.WorldRenderer(worlddir)
    w.go(options.procs)

    # Now generate the tiles
    q = quadtree.QuadtreeGen(w, destdir, depth=options.zoom)
    q.go(options.procs)
Пример #16
0
def main():
    parser = OptionParser(epilog="***Required option")
    parser.add_option("-r", "--rows", help="***Number of rows in word search", dest='rows', metavar="ROWS", type="int")
    parser.add_option("-c", "--columns", help="***Number of columns in word search", dest='columns', metavar="COLUMNS", type="int")
    parser.add_option("-w", "--words", help="***File location of words or space seperated list of words, pass True if list, False if not", dest='words', metavar="WORDS", nargs=2)
    parser.add_option("-o", "--output", help="File location of the output file", dest='output', metavar="OUTPUT") 
    parser.add_option("-g", "--challenge", help="Make the word search only out of letters from the words", dest="challenge", action="store_true", default=False)
    parser.add_option("-v", "--verbose", help="Print status messages", action="store_true", dest="verbose",default=False)
    (options, args) = parser.parse_args()
    mandatory = ['rows', 'columns', 'words']
    for m in mandatory:
        if not options.__dict__[m]:
            print "Mandatory option is missing\n"
            parser.print_help()
            exit(-1)
    if options.words[1].lower() == "true":
        options.words = options.words[0].split()
    else:
        input = open(options.words[0], "r")
        words = []
        for line in input:
            if not line == '':
                words.append(line.split("\n")[0])
        options.words = words
        input.close()
    puzzle = createPuzzle(options.words, options.rows, options.columns, options.challenge, options.verbose)    
    printArray(puzzle[0])
Пример #17
0
def get_options(demods):
    parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
    parser.add_option("","--from-file", default=None,
                      help="input file of samples to demod")
    parser.add_option("-m", "--modulation", type="choice", choices=demods.keys(), 
                      default='psk',
                      help="Select modulation from: %s [default=%%default]"
                            % (', '.join(demods.keys()),))
    parser.add_option("-r", "--bitrate", type="eng_float", default=250e3,
                      help="Select modulation bit rate (default=%default)")
    parser.add_option("-S", "--samples-per-symbol", type="float", default=2,
                      help="set samples/symbol [default=%default]")
    if not parser.has_option("--verbose"):
        parser.add_option("-v", "--verbose", action="store_true", default=False)
    if not parser.has_option("--log"):
        parser.add_option("", "--log", action="store_true", default=False,
                      help="Log all parts of flow graph to files (CAUTION: lots of data)")

    uhd_receiver.add_options(parser)

    demods = digital.modulation_utils.type_1_demods()
    for mod in demods.values():
        mod.add_options(parser)
		      
    (options, args) = parser.parse_args()
    if len(args) != 0:
        parser.print_help()
        sys.exit(1)
	
    return (options, args)
Пример #18
0
def main():
    usage = './stem_stem_orientations.py temp.comp'
    usage += '''
    Output the orientations of the stems as a set of'
    three coordinates.
    1. The distance between the closest points of the
    two stems.
    2. The angle between the two stems in the plane defined
    by the axis of stem1 and the vector between the closest
    points between the two stem.
    3. The angle of the second stem out the plane.
    '''

    parser = OptionParser()

    #parser.add_option('-o', '--options', dest='some_option', default='yo', help="Place holder for a real option", type='str')
    #parser.add_option('-u', '--useless', dest='uselesss', default=False, action='store_true', help='Another useless option')
    parser.add_option('-l', '--loops', dest='loops', default=False, action='store_true', help="Compute the statistics for the loop regions rather than the stems.")

    (options, args) = parser.parse_args()

    if len(args) < 1:
        parser.print_help()
        sys.exit(1)

    for arg in args:
        bg = ttmc.CoarseGrainRNA(arg)
        if options.loops:
            loop_loop_orientations(bg)
        else:
            stem_stem_orientations(bg)
Пример #19
0
def main():
    parser = OptionParser(usage=usage, version="%prog " + sfepy.__version__)
    parser.add_option("-o", "", metavar='filename', action="store",
                      dest="output_filename_trunk",
                      default=None, help=help['filename'])

    (options, args) = parser.parse_args()

    if (len(args) == 1):
        filename_in = args[0]
    else:
        parser.print_help(),
        return

    required, other = get_standard_keywords()
    required.remove('equations')

    conf = ProblemConf.from_file(filename_in, required, other)

    app = HomogenizationApp(conf, options, 'homogen:')
    opts = conf.options
    if hasattr(opts, 'parametric_hook'):  # Parametric study.
        parametric_hook = conf.get_function(opts.parametric_hook)
        app.parametrize(parametric_hook)
    app()
def main():
    parser = OptionParser(usage="""\
Unpack a MIME message into a directory of files.

Usage: %prog [options] msgfile
""")
    parser.add_option('-d', '--directory',
                      type='string', action='store',
                      help="""Unpack the MIME message into the named
                      directory, which will be created if it doesn't already
                      exist.""")
    opts, args = parser.parse_args()
    if not opts.directory:
        parser.print_help()
        sys.exit(1)

    try:
        msgfile = args[0]
    except IndexError:
        parser.print_help()
        sys.exit(1)

    try:
        os.mkdir(opts.directory)
    except OSError, e:
        # Ignore directory exists error
        if e.errno <> errno.EEXIST:
            raise
Пример #21
0
def command():
    parser = OptionParser(usage="%prog DIR [HOST][:][PORT]",
                          version="static 0.3.6")
    options, args = parser.parse_args()
    if len(args) in (1, 2):
        if len(args) == 2:
            parts = args[1].split(":")
            if len(parts) == 1:
                host = parts[0]
                port = None
            elif len(parts) == 2:
                host, port = parts
            else:
                sys.exit("Invalid host:port specification.")
        elif len(args) == 1:
            host, port = None, None
        if not host:
            host = '0.0.0.0'
        if not port:
            port = 9999
        try:
            port = int(port)
        except:
            sys.exit("Invalid host:port specification.")
        app = Cling(args[0])
        try:
            make_server(host, port, app).serve_forever()
        except KeyboardInterrupt:
            print("Cio, baby!")
        except:
            sys.exit("Problem initializing server.")
    else:
        parser.print_help(sys.stderr)
        sys.exit(1)
Пример #22
0
def configure():
	global server, srcPath, dstPath, repository
	parser = OptionParser(usage="""\
	Update Doxygen doc on server and send an email to admins.

	Usage: %prog [options]
	""")
	parser.add_option('-s', '--server',
							type='string', action='store',
							help="""Server where the documentation will be uploaded.""")
	parser.add_option('--dest-path',
							type='string', action='store',
							help="""Directory on the server where the documentation will be uploaded.""")
	parser.add_option('--repository',
							type='string', action='store',
							help="""Repository where the project is located.""")
	opts, args = parser.parse_args()

	if not opts.server or not opts.dest_path or not opts.repository:
		parser.print_help()
		sys.exit(1)

	server = opts.server
	dstPath = opts.dest_path
	repository = opts.repository
Пример #23
0
def main():
    usage = """usage: python dedupe.py <graphdb_filename>"""
    parser = OptionParser(usage=usage)
    
    (options, args) = parser.parse_args()
    
    if len(args) != 1:
        parser.print_help()
        exit(-1)
        
    graphdb_filename = args[0]    
    
    gtfsdb = GTFSDatabase( graphdb_filename )

    query = """
    SELECT count(*), monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date 
    FROM calendar
    GROUP BY monday, tuesday, wednesday, thursday, friday, saturday, sunday, start_date, end_date"""

    duped_periods = gtfsdb.execute( query )

    equivilants = []

    for count, m,t,w,th,f,s,su,start_date,end_date in duped_periods:
        # no need to check for dupes if there's only one
        if count==1:
            continue
        
        #print count, m, t, w, th, f, s, su, start_date, end_date
        
        # get service_ids for this dow/start_date/end_date combination
        service_ids = [x[0] for x in list(  gtfsdb.execute( "SELECT service_id FROM calendar where monday=? and tuesday=? and wednesday=? and thursday=? and friday=? and saturday=? and sunday=? and start_date=? and end_date=?", (m,t,w,th,f,s,su,start_date,end_date) ) ) ]
        
        # group by service periods with the same set of exceptions
        exception_set_grouper = {}
        for service_id in service_ids:
            exception_set = list(gtfsdb.execute( "SELECT date, exception_type FROM calendar_dates WHERE service_id=?", (service_id,) ) )
            exception_set.sort()
            exception_set = tuple(exception_set)
            
            exception_set_grouper[exception_set] = exception_set_grouper.get(exception_set,[])
            exception_set_grouper[exception_set].append( service_id )
        
        # extend list of equivilants
        for i, exception_set_group in enumerate( exception_set_grouper.values() ):
            equivilants.append( ("%d%d%d%d%d%d%d-%s-%s-%d"%(m,t,w,th,f,s,su,start_date,end_date,i), exception_set_group) )
        
    for new_name, old_names in equivilants:
        for old_name in old_names:
            print old_name, new_name
            
            c = gtfsdb.conn.cursor()
            
            c.execute( "UPDATE calendar SET service_id=? WHERE service_id=?", (new_name, old_name) )
            c.execute( "UPDATE calendar_dates SET service_id=? WHERE service_id=?", (new_name, old_name) )
            c.execute( "UPDATE trips SET service_id=? WHERE service_id=?", (new_name, old_name) )

            gtfsdb.conn.commit()
            
            c.close()
Пример #24
0
def get_options(mods):
    parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
    parser.add_option("-m", "--modulation", type="choice", choices=mods.keys(),
                      default='psk',
                      help="Select modulation from: %s [default=%%default]"
                            % (', '.join(mods.keys()),))
    parser.add_option("", "--amplitude", type="eng_float", default=0.2,
                      help="set Tx amplitude (0-1) (default=%default)")
    parser.add_option("-r", "--bitrate", type="eng_float", default=250e3,
                      help="Select modulation bit rate (default=%default)")
    parser.add_option("-S", "--samples-per-symbol", type="float", default=2,
                      help="set samples/symbol [default=%default]")
    parser.add_option("","--to-file", default=None,
                      help="Output file for modulated samples")
    if not parser.has_option("--verbose"):
        parser.add_option("-v", "--verbose", action="store_true", default=False)
    if not parser.has_option("--log"):
        parser.add_option("", "--log", action="store_true", default=False)

    uhd_transmitter.add_options(parser)

    for mod in mods.values():
        mod.add_options(parser)
		      
    (options, args) = parser.parse_args()
    if len(args) != 0:
        parser.print_help()
        sys.exit(1)
	
    return (options, args)
Пример #25
0
def main():
    parser = OptionParser(usage='usage: %prog [options] message\nexample: %prog hello world')
    parser.add_option('--host', dest='host',
                        help='AMQP server to connect to (default: %default)',
                        default='localhost')
    parser.add_option('-u', '--userid', dest='userid',
                        help='userid to authenticate as (default: %default)',
                        default='guest')
    parser.add_option('-p', '--password', dest='password',
                        help='password to authenticate with (default: %default)',
                        default='guest')
    parser.add_option('--ssl', dest='ssl', action='store_true',
                        help='Enable SSL (default: not enabled)',
                        default=False)

    options, args = parser.parse_args()

    if not args:
        parser.print_help()
        sys.exit(1)

    msg_body = ' '.join(args)

    conn = amqp.Connection(options.host, userid=options.userid, password=options.password, ssl=options.ssl)

    ch = conn.channel()
    ch.exchange_declare('myfan', 'fanout')

    msg = amqp.Message(msg_body, content_type='text/plain', application_headers={'foo': 7, 'bar': 'baz'})

    ch.basic_publish(msg, 'myfan')

    ch.close()
    conn.close()
Пример #26
0
def main(args=sys.argv[1:]):
    
    # parse command line options
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option('-p', '--port', dest='port', 
                      type="int", default=8888,
                      help="port to run the server on [DEFAULT: %default]")
    parser.add_option('-H', '--host', dest='host',
                      default='127.0.0.1',
                      help="host [DEFAULT: %default]")
    parser.add_option('-d', '--docroot', dest='docroot',
                      default=os.getcwd(),
                      help="directory to serve files from [DEFAULT: %default]")
    options, args = parser.parse_args(args)
    if args:
        parser.print_help()
        parser.exit()

    # create the server
    kwargs = options.__dict__.copy()
    server = MozHttpd(**kwargs)

    print "Serving '%s' at %s:%s" % (server.docroot, server.host, server.port)
    server.start(block=True)
def main():
    "Option parsing and launch latex_dir"
    usage = "%prog [-c nb_of_columns -w outtexfile] directory_list"
    parser = OptionParser(usage = usage)
    parser.add_option('-w', dest='outtexfile', type='string',
                      help='output latex file (default is dir/latex_dir.tex)')
    parser.add_option('-c', dest='column', type='int', default = 2,
                      help='number of columns of latex file: 1 or 2')
    parser.add_option('--eps', dest='eps', default=False, action='store_true',
                      help='use eps files instead of pdf')
    (options, args) = parser.parse_args()
    if not args:
        parser.print_help()
        exit(5)
    for directory in args:
        if not options.outtexfile:
            outfile_name = os.sep.join((directory,
                                        'latex_dir_%s.tex' % directory))
        else:
            outfile_name = options.outtexfile
        if options.column not in (1, 2):
            print("invalid number of columns")
            parser.print_help()
            exit(5)
        latex_dir(outfile_name, directory, options.column, eps=options.eps)
        #compile the tex file
        if options.eps:
            os.execlp('latex', 'latex', '-interaction=nonstopmode',
                      '-output-directory', directory, outfile_name)
        else:
            os.execlp('pdflatex', 'pdflatex', '-interaction=nonstopmode',
                      '-output-directory', directory, outfile_name)
Пример #28
0
def main():
	"""
	Launch gamepad2midi with command line options and an empty binding.
	Call the function ``gamepad2midi(api, mapping)`` to custom it.
	"""
	# TODO read a config file or generate an auto config instead of an empty one
	print "WARNING: Gamepad mapping is empty. Create your own mapping using the template mygamepad2midi.py "

	from optparse import OptionParser

	parser = OptionParser()

	parser.add_option("-O", "--output", dest="api", help="MIDI output interface",
		choices=available_midi_apis.keys(), type="choice")

	# TODO rework "print" with logger
	#parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False,
	#	help="Print more information to stdout")

	# it is quite a hack to display choices options, but it works
	g = parser.add_option_group("Available output interfaces")
	keys = available_midi_apis.keys()
	keys.sort()
	for api in keys:
		comment = available_midi_apis[api]
		g.add_option("--" + api, help=comment, dest="api", action="store_const", const=api)

	(options, args) = parser.parse_args()

	if options.api == None:
		parser.print_help()
		return

	mapping = InputMapping()
	gamepad2midi(options.api, mapping)
Пример #29
0
def command_line_run(args_in):
    """ Runs everything needed to execute from the command line, so main method is callable without arg parsing """
    parser = OptionParser(usage="usage: %prog base_url test_filename.yaml [options] ")
    parser.add_option(u"--print-bodies", help="Print all response bodies", action="store", type="string", dest="print_bodies")
    parser.add_option(u"--log", help="Logging level", action="store", type="string")
    parser.add_option(u"--interactive", help="Interactive mode", action="store", type="string")
    parser.add_option(u"--url", help="Base URL to run tests against", action="store", type="string")
    parser.add_option(u"--test", help="Test file to use", action="store", type="string")
    parser.add_option(u'--import_extensions', help='Extensions to import, separated by semicolons', action="store", type="string")
    parser.add_option(u'--vars', help='Variables to set, as a YAML dictionary', action="store", type="string")
    parser.add_option(u'--verbose', help='Put cURL into verbose mode for extra debugging power', action='store_true', default=False, dest="verbose")
    parser.add_option(u'--ssl-insecure', help='Disable cURL host and peer cert verification', action='store_true', default=False, dest="ssl_insecure")
    parser.add_option(u'--absolute-urls', help='Enable absolute URLs in tests instead of relative paths', action="store_true", dest="absolute_urls")

    (args, unparsed_args) = parser.parse_args(args_in)
    args = vars(args)

    # Handle url/test as named, or, failing that, positional arguments
    if not args['url'] or not args['test']:
        if len(unparsed_args) == 2:
            args[u'url'] = unparsed_args[0]
            args[u'test'] = unparsed_args[1]
        elif len(unparsed_args) == 1 and args['url']:
            args['test'] = unparsed_args[0]
        elif len(unparsed_args) == 1 and args['test']:
            args['url'] = unparsed_args[0]
        else:
            parser.print_help()
            parser.error("wrong number of arguments, need both url and test filename, either as 1st and 2nd parameters or via --url and --test")

    args['cwd'] = os.path.realpath(os.path.abspath(os.getcwd()))  # So modules can be loaded from current folder
    main(args)
Пример #30
0
def arguments_parse():
    """Handle the command line arguments."""
    parser = OptionParser(usage='usage: %prog [options] arguments')
    parser.add_option('-f', '--file',
                      dest='filename',
                      metavar='FILE',
                      help='specify the input XML testcase file for testing (REQUIRED)')
    parser.add_option('-p', '--packages',
                      dest='custompackages',
                      metavar='FILE',
                      help='specify the input file for custom package processing')
    parser.add_option('-o', '--out',
                      dest='allpackages',
                      metavar='FILE',
                      help='specify the output file to print the list of packages in the input XML')
    (options, args) = parser.parse_args()
    # If no arguments are passed just print the help message
    if options.filename is None:
        print colorize("The input file (specified by the '-f' option) is mandatory.", 'red')
        parser.print_help()
        sys.exit(1)
    if len(args) != 0:
        parser.error('Invalid number of arguments.')
        sys.exit(1)
    # Either call -p or -o, but not both
    if options.custompackages and options.allpackages:
        print colorize("Error: Specify either the '-p' or the '-o' option, but not both.", 'red')
        sys.exit(1)
    # Since both cannot be true, check which is and return accordingly
    return options.filename, options.custompackages, options.allpackages
    def __init__(self):
        gr.top_block.__init__(self)

        usage="%prog: [options] tx-freq0 tx-freq1"
        parser = OptionParser (option_class=eng_option, usage=usage)
        parser.add_option("-a", "--args", type="string", default="",
                          help="UHD device address args [default=%default]")
        parser.add_option("", "--spec", type="string", default=None,
	                  help="Subdevice of UHD device where appropriate")
        parser.add_option("-A", "--antenna", type="string", default=None,
                          help="select Rx Antenna where appropriate")
        parser.add_option("-s", "--samp-rate", type="eng_float", default=320e3,
                          help="set sample rate [default=%default]")
        parser.add_option("-g", "--gain", type="eng_float", default=None,
                          help="set gain in dB (default is midpoint)")
        (options, args) = parser.parse_args ()

        if len(args) != 2:
            parser.print_help()
            raise SystemExit
        else:
            freq0 = str_to_num(args[0])
            freq1 = str_to_num(args[1])

        # ----------------------------------------------------------------
        # Set up USRP to transmit on both daughterboards

        d = uhd.find_devices(uhd.device_addr(options.args))
        uhd_type = d[0].get('type')

        stream_args = uhd.stream_args('fc32', channels=range(2))
        self.u = uhd.usrp_sink(device_addr=options.args, stream_args=stream_args)

        # Set up USRP system based on type
        if(uhd_type == "usrp"):
            self.u.set_subdev_spec("A:0 B:0")
            tr0 = uhd.tune_request(freq0)
            tr1 = uhd.tune_request(freq1)

        else:
            if abs(freq0 - freq1) > 5.5e6:
                sys.stderr.write("\nError: When not using two separate d'boards, frequencies must bewithin 5.5MHz of each other.\n")
                raise SystemExit

            self.u.set_subdev_spec("A:0 A:0")

            mid_freq = (freq0 + freq1)/2.0
            tr0 = uhd.tune_request(freq0, rf_freq=mid_freq,
                                   rf_freq_policy=uhd.tune_request.POLICY_MANUAL)

            tr1 = uhd.tune_request(freq1, rf_freq=mid_freq,
                                   rf_freq_policy=uhd.tune_request.POLICY_MANUAL)

        # Use the tune requests to tune each channel
        self.set_freq(tr0, 0)
        self.set_freq(tr1, 1)

        self.usrp_rate  = options.samp_rate

        self.u.set_samp_rate(self.usrp_rate)
        dev_rate = self.u.get_samp_rate()

        # ----------------------------------------------------------------
        # build two signal sources, interleave them, amplify and
        # connect them to usrp

        sig0 = example_signal_0(self.usrp_rate)
        sig1 = example_signal_1(self.usrp_rate)

        intl = gr.interleave(gr.sizeof_gr_complex)
        self.connect(sig0, (intl, 0))
        self.connect(sig1, (intl, 1))

        # Correct for any difference in requested and actual rates
        rrate = self.usrp_rate / dev_rate
        resamp = blks2.pfb_arb_resampler_ccf(rrate)

        # and wire them up
        self.connect(intl, resamp, self.u)

        if options.gain is None:
            # if no gain was specified, use the mid-point in dB
            g = self.u.get_gain_range()
            options.gain = float(g.start()+g.stop())/2.0

        self.set_gain(options.gain, 0)
        self.set_gain(options.gain, 1)

        # Set the subdevice spec
        if(options.spec):
            self.u.set_subdev_spec(options.spec, 0)

        # Set the antenna
        if(options.antenna):
            self.u.set_antenna(options.antenna, 0)
            self.u.set_antenna(options.antenna, 1)
Пример #32
0
if __name__ == "__main__":
    parser = OptionParser(usage='usage: %prog inFile outConfig\n' +
                          __description__,
                          version='%prog ' + __version__)
    parser.add_option("-r",
                      "--recursive",
                      action='store_true',
                      default=False,
                      help="Recursive Mode")
    (options, args) = parser.parse_args()
    # If we dont have args quit with help page
    if len(args) > 0:
        pass
    else:
        parser.print_help()
        sys.exit()
    # if we want a recursive extract run this function
    if options.recursive == True:
        if len(args) == 2:
            runRecursive(args[0], args[1])
            sys.exit()
        else:
            print("[+] You need to specify Both Dir to read AND Output File")
            parser.print_help()
            sys.exit()

    # If not recurisve try to open file
    try:
        print("[+] Reading file")
        fileData = open(args[0], 'rb').read()
              help="Remove newsgroup information that is easily overfit: "
                   "headers, signatures, and quoting.")


def is_interactive():
    return not hasattr(sys.modules['__main__'], '__file__')

# work-around for Jupyter notebook and IPython console
argv = [] if is_interactive() else sys.argv[1:]
(opts, args) = op.parse_args(argv)
if len(args) > 0:
    op.error("this script takes no arguments.")
    sys.exit(1)

print(__doc__)
op.print_help()
print()




if opts.filtered:
    remove = ('headers', 'footers', 'quotes')
else:
    remove = ()


data_train=base.load_files("news")
data_test=base.load_files("test")
data = data_train
Пример #34
0
    def __init__(self, frame, panel, vbox, argv):
        MAX_CHANNELS = 7
        stdgui2.std_top_block.__init__ (self, frame, panel, vbox, argv)

        parser = OptionParser (option_class=eng_option)
        parser.add_option("-a", "--args", type="string", default="",
                          help="UHD device address args [default=%default]")
        parser.add_option("", "--spec", type="string", default=None,
	                  help="Subdevice of UHD device where appropriate")
        parser.add_option("-A", "--antenna", type="string", default=None,
                          help="select Rx Antenna where appropriate")
        parser.add_option("-s", "--samp-rate", type="eng_float", default=400e3,
                          help="set sample rate (bandwidth) [default=%default]")
        parser.add_option("-f", "--freq", type="eng_float", default=None,
                          help="set frequency to FREQ", metavar="FREQ")
        parser.add_option("-g", "--gain", type="eng_float", default=None,
                          help="set gain in dB (default is midpoint)")
        parser.add_option("-n", "--nchannels", type="int", default=4,
                           help="number of Tx channels [1,4]")
        #parser.add_option("","--debug", action="store_true", default=False,
        #                  help="Launch Tx debugger")
        (options, args) = parser.parse_args ()

        if len(args) != 0:
            parser.print_help()
            sys.exit(1)

        if options.nchannels < 1 or options.nchannels > MAX_CHANNELS:
            sys.stderr.write ("fm_tx4: nchannels out of range.  Must be in [1,%d]\n" % MAX_CHANNELS)
            sys.exit(1)

        if options.freq is None:
            sys.stderr.write("fm_tx4: must specify frequency with -f FREQ\n")
            parser.print_help()
            sys.exit(1)

        # ----------------------------------------------------------------
        # Set up constants and parameters

        self.u = uhd.usrp_sink(device_addr=options.args, stream_args=uhd.stream_args('fc32'))

        # Set the subdevice spec
        if(options.spec):
            self.u.set_subdev_spec(options.spec, 0)

        # Set the antenna
        if(options.antenna):
            self.u.set_antenna(options.antenna, 0)

        self.usrp_rate = options.samp_rate
        self.u.set_samp_rate(self.usrp_rate)
        self.usrp_rate = self.u.get_samp_rate()

        self.sw_interp = 10
        self.audio_rate = self.usrp_rate / self.sw_interp    # 32 kS/s

        if options.gain is None:
            # if no gain was specified, use the mid-point in dB
            g = self.u.get_gain_range()
            options.gain = float(g.start()+g.stop())/2

        self.set_gain(options.gain)
        self.set_freq(options.freq)

        self.sum = blocks.add_cc ()

        # Instantiate N NBFM channels
        step = 25e3
        offset = (0 * step, 1 * step, -1 * step,
                  2 * step, -2 * step, 3 * step, -3 * step)

        for i in range (options.nchannels):
            t = pipeline("audio-%d.dat" % (i % 4), offset[i],
                         self.audio_rate, self.usrp_rate)
            self.connect(t, (self.sum, i))

        self.gain = blocks.multiply_const_cc (1.0 / options.nchannels)

        # connect it all
        self.connect (self.sum, self.gain)
        self.connect (self.gain, self.u)

        # plot an FFT to verify we are sending what we want
        if 1:
            post_mod = fftsink2.fft_sink_c(panel, title="Post Modulation",
                                           fft_size=512,
                                           sample_rate=self.usrp_rate,
                                           y_per_div=20,
                                           ref_level=40)
            self.connect (self.gain, post_mod)
            vbox.Add (post_mod.win, 1, wx.EXPAND)
def main():
    parser = OptionParser(usage=USAGE, description=DESCRIPTION)

    parser.add_option('-s', '--subject_dir', metavar='PATH',
                      type='str', default=os.getenv('SUBJECTS_DIR'),
                      help='Freesurfer subject dir, default is ' \
                           'env. variable SUBJECTS_DIR')

    parser.add_option('-p', '--parcellations', type='str',
                      metavar='LIST OF STR',
                      default='dk,dkt,destrieux',
                      help='Parcellation(s) for which to '
                      'produce mappable metrics, as a comma-separated list ' \
                      '(no space). Choices: dk, dkt, destrieux. '\
                      'See https://surfer.nmr.mgh.harvard.edu/fswiki/CorticalParcellation.')

    parser.add_option('-m', '--metrics', type='str',
                      metavar='LIST OF STR', default=None,
                      help='List of metrics to map, as a comma-separated '\
                      'list (no space). Eg.: NumVert SurfArea GrayVol '\
                      'ThickAvg ThickStd MeanCurv GausCurv FoldInd CurvInd.')
    
    parser.add_option('-v', '--verbose', dest='verbose',
                      metavar='VERBOSELEVEL',
                      type='int', default=0, help='Verbose level')

    (options, args) = parser.parse_args()

    logger.setLevel(options.verbose)

    nba = len(args)
    if nba < MIN_ARGS or (MAX_ARGS >= 0 and nba > MAX_ARGS):
        parser.print_help()
        sys.exit(1)

    set_fs_subject_dir(options.subject_dir)
        
    subjects = args[0].split(',')
    parcellation_tags = get_parcellations_from_opt(options.parcellations)

    metrics = None
    if options.metrics is not None:
        metrics = options.metrics.split(',')
        
        
    # tmp_dir = tempfile.mkdtemp(prefix='nipic_')

    for subject in subjects:
        subject_dir = op.join(options.subject_dir, subject)
        logger.info('Processing subject %s...' % subject)
 
        for parcellation in parcellation_tags:
            for hemi in ['l','r']:
                annotation_fn = op.join(subject_dir, 'label',
                                        '%sh.aparc%s.annot' %
                                        (hemi, parcellation))
                logger.info('Reading annotation file %s ...' % annotation_fn)
                annotation = nibabel.freesurfer.read_annot(annotation_fn)
                label_mask = annotation[0]
                parcel_ids = dict( (an, i)
                                   for i,an in  enumerate(annotation[2]) )
                
                stats_fn = op.join(subject_dir, 'stats',
                                   '%sh.aparc%s.stats'%
                                   (hemi, parcellation))
                logger.info('Reading stats file %s ...' % stats_fn)
                stats = read_stats(stats_fn)

                stat_names = [n for n in stats.dtype.fields.keys()
                              if n != 'StructName']
                if metrics is not None:
                    stat_names_set = set(stat_names)
                    assert all([m in stat_names_set for m in metrics])
                else:
                    metrics = stat_names
                    
                mapped_stats = dict( (m, np.zeros(label_mask.shape))
                                     for m in metrics )
                for iparcel in range(stats.shape[0]):
                    parcel_id = parcel_ids[stats['StructName'][iparcel]]
                    parcel_mask = np.where(label_mask==parcel_id)
                    for metric in metrics:
                        mapped_stats[metric][parcel_mask] = \
                                                    stats[metric][iparcel]
                            
                for metric in metrics:
                    mapped_stats_fn = op.join(subject_dir, 'surf',
                                              '%sh.aparc%s.%s'%
                                              (hemi, parcellation, metric))
                    logger.info('Writing mapped stats file %s ...' %
                                mapped_stats_fn)
                    nibabel.freesurfer.write_morph_data(mapped_stats_fn,
                                                        mapped_stats[metric])
Пример #36
0
def get_options(args=None):
    optParser = OptionParser()
    optParser.add_option("-n",
                         "--net-file",
                         dest="netfile",
                         help="network file")
    optParser.add_option("-l",
                         "--ptlines-file",
                         dest="ptlines",
                         help="public transit lines file")
    optParser.add_option("-s",
                         "--ptstops-file",
                         dest="ptstops",
                         help="public transit stops file")
    optParser.add_option("-o",
                         "--output-file",
                         dest="outfile",
                         default="flows.rou.xml",
                         help="output flows file")
    optParser.add_option("-i",
                         "--stopinfos-file",
                         dest="stopinfos",
                         default="stopinfos.xml",
                         help="file from '--stop-output'")
    optParser.add_option("-r",
                         "--routes-file",
                         dest="routes",
                         default="vehroutes.xml",
                         help="file from '--vehroute-output'")
    optParser.add_option("-t",
                         "--trips-file",
                         dest="trips",
                         default="trips.trips.xml",
                         help="output trips file")
    optParser.add_option(
        "-p",
        "--period",
        type=float,
        default=600,
        help=
        "the default service period (in seconds) to use if none is specified in the ptlines file"
    )
    optParser.add_option("-b",
                         "--begin",
                         type=float,
                         default=0,
                         help="start time")
    optParser.add_option("-e",
                         "--end",
                         type=float,
                         default=3600,
                         help="end time")
    optParser.add_option(
        "--min-stops",
        type=int,
        default=2,
        help="only import lines with at least this number of stops")
    optParser.add_option("-f",
                         "--flow-attributes",
                         dest="flowattrs",
                         default="",
                         help="additional flow attributes")
    optParser.add_option("--use-osm-routes",
                         default=False,
                         action="store_true",
                         dest='osmRoutes',
                         help="use osm routes")
    optParser.add_option("--random-begin",
                         default=False,
                         action="store_true",
                         dest='randomBegin',
                         help="randomize begin times within period")
    optParser.add_option("--seed", type="int", help="random seed")
    optParser.add_option("--ignore-errors",
                         default=False,
                         action="store_true",
                         dest='ignoreErrors',
                         help="ignore problems with the input data")
    optParser.add_option("--no-vtypes",
                         default=False,
                         action="store_true",
                         dest='novtypes',
                         help="do not write vtypes for generated flows")
    optParser.add_option(
        "--types",
        help="only export the given list of types (using OSM nomenclature)")
    optParser.add_option("--bus.parking",
                         default=False,
                         action="store_true",
                         dest='busparking',
                         help="let busses clear the road while stopping")
    optParser.add_option("--vtype-prefix",
                         default="",
                         dest='vtypeprefix',
                         help="prefix for vtype ids")
    optParser.add_option("-d",
                         "--stop-duration",
                         default=30,
                         type="float",
                         dest='stopduration',
                         help="Configure the minimum stopping duration")
    optParser.add_option("-H",
                         "--human-readable-time",
                         dest="hrtime",
                         default=False,
                         action="store_true",
                         help="write times as h:m:s")
    optParser.add_option("-v",
                         "--verbose",
                         action="store_true",
                         default=False,
                         help="tell me what you are doing")
    (options, args) = optParser.parse_args(args=args)

    if options.netfile is None or options.ptlines is None or options.ptstops is None:
        sys.stderr.write(
            "Error: net-file, ptlines-file and ptstops-file must be set\n")
        optParser.print_help()
        sys.exit(1)

    if options.begin >= options.end:
        sys.stderr.write("Error: end time must be larger than begin time\n")
        optParser.print_help()
        sys.exit(1)

    if options.types is not None:
        options.types = options.types.split(',')

    return options
Пример #37
0
def main():
    usage = ("usage: %prog <tunnels_file> <output_dir> <vp_name>\n"
             "  where:\n"
             "      - tunnels_file is the file containing all the tunnels"
             " to be classified.\n"
             "      - output_dir is the output directory.\n"
             "      - vp_name is the name of the VP to consider. 'all' means"
             " all VPs.\n")
    parser = OptionParser(usage=usage)

    (options, args) = parser.parse_args()
    # Check arg number
    if not len(args) == 3:
        parser.print_help()
        sys.exit(1)

    tunnels_file = args[0]
    output_dir = args[1]
    tgt_vp = args[2]

    os.system("rm -r " + output_dir + "; mkdir " + output_dir)

    # Structures
    types = ["EXP", "IMP", "OPA", "INV", "HET", "CHA"]
    all_tuns = set()
    # Tunnels classified according to their type
    type2tuns = defaultdict(set)
    # Trigger and revelation technique for each tunnel
    tun2trig_revtechs = defaultdict(set)
    # Tunnel with at least one LSR revealed with the buddy technique
    buddytuns = set()
    # Initialize the type structure
    for t in types:
        type2tuns[t] = set()

    # Read the input file
    input_file = open(tunnels_file)
    for line in input_file:
        line = line.strip()
        # Empty line
        if line == "" or "#" in line:
            continue

        # Get needed fields in line
        line_split = line.split()
        vp_name = line_split[0]
        if tgt_vp != "all" and vp_name != tgt_vp:
            continue
        tun_type = line_split[1]
        tun_trig = line_split[2]
        tun_revtech = line_split[3]
        tun_buddy = line_split[4]
        ip_tun = list()

        # Get the tunnel in terms of IP addresses
        comp_tunnel = line.split("=")[2]
        tun_split = comp_tunnel.split()
        for hop in tun_split:
            hop_split = hop.split(":")
            ip = hop_split[0]
            ip_tun.append(ip)
        tunnel = " ".join(ip_tun)
        all_tuns.add(tunnel)

        # Check if tunnel already seen previously
        tunseen = False
        for t in types:
            if tunnel in type2tuns[t]:
                if t != tun_type and t != "CHA":
                    type2tuns[t].remove(tunnel)
                    type2tuns["CHA"].add(tunnel)
                tunseen = True

        # Add to the right sets
        if not tunseen:
            type2tuns[tun_type].add(tunnel)
        if tun_buddy != "-":
            buddytuns.add(tunnel)
        if tun_trig != "-":
            trig_revtech = (tun_trig, tun_revtech)
            tun2trig_revtechs[tunnel].add(trig_revtech)
    input_file.close()

    # Write the output files

    # 1. Tunnel count according the type
    output_file_name = output_dir + "/tunnel_count.txt"
    output_file = open(output_file_name, 'w')
    output_file.write("#Type ntun\n")
    output_file.write("ALL " + str(len(all_tuns)) + "\n")
    for t in types:
        output_file.write(t + " " + str(len(type2tuns[t])) + "\n")
        # For implicit tunnels, separate according to QTTL and UTURN
        if t == "IMP":
            count = {"QTTL": 0, "UTURN": 0, "MULTI": 0}
            for tunnel in type2tuns[t]:
                for trig_revtech in tun2trig_revtechs[tunnel]:
                    trig = trig_revtech[0]
                    count[trig] += 1
            output_file.write("IMP_QTTL " + str(count["QTTL"]) + "\n")
            output_file.write("IMP_UTURN " + str(count["UTURN"]) + "\n")
            output_file.write("IMP_BOTH " + str(count["MULTI"]) + "\n")
        # For opaque tunnel, separate according to LSRs were revealed or not
        elif t == "OPA":
            count = {"REV": 0, "NOREV": 0}
            for tunnel in type2tuns[t]:
                for trig_revtech in tun2trig_revtechs[tunnel]:
                    revtech = trig_revtech[1]
                    if revtech == "-":
                        count["NOREV"] += 1
                    else:
                        count["REV"] += 1
            output_file.write("OPA_REV " + str(count["REV"]) + "\n")
            output_file.write("OPA_NOREV " + str(count["NOREV"]) + "\n")
    output_file.close()
    os.system("column -t " + output_file_name +
              " > tmp_col.txt; mv tmp_col.txt " + output_file_name)

    # 2. Opaque revelation techniques
    revtech2ntun = defaultdict()
    for tunnel in type2tuns["OPA"]:
        cur_revtechs = set()
        for trig_revtech in tun2trig_revtechs[tunnel]:
            revtech = trig_revtech[1]
            if revtech != "-":
                cur_revtechs.add(revtech)
        revtech = "_".join(list(sorted(cur_revtechs)))
        # Update distribution
        if revtech:
            if revtech in revtech2ntun:
                revtech2ntun[revtech] += 1
            else:
                revtech2ntun[revtech] = 1
    output_file_name = output_dir + "/opaque_revtech.txt"
    output_file = open(output_file_name, 'w')
    output_file.write("#revtech ntun\n")
    for revtech in sorted(revtech2ntun):
        output_file.write(revtech + " " + str(revtech2ntun[revtech]) + "\n")
    output_file.close()
    # Arrange output file
    os.system("column -t " + output_file_name + " > tmp_col.txt")
    os.system("mv tmp_col.txt " + output_file_name)

    # 3. Triggers and revelation techniques for invisible tunnels
    trig2revtech2ntun = defaultdict(lambda: defaultdict(set))
    trig2ntun = {"DUPIPBUD": 0, "DUPIPNOBUD": 0}
    for tunnel in type2tuns["INV"]:
        cur_revtechs = set()
        cur_triggers = set()
        for trig_revtech in tun2trig_revtechs[tunnel]:
            trig = trig_revtech[0]
            revtech = trig_revtech[1]
            cur_triggers.add(trig)
            cur_revtechs.add(revtech)
        # Combine multiple triggers and revelation techniques if necessary
        revtech = "_".join(list(sorted(cur_revtechs)))
        trigger = "_".join(list(sorted(cur_triggers)))
        # Update distributions
        # According to the trigger
        if trigger in trig2ntun:
            trig2ntun[trigger] += 1
        else:
            trig2ntun[trigger] = 1
        # Check if the buddy technique was needed
        if trigger == "DUPIP":
            if tunnel in buddytuns:
                trig2ntun["DUPIPBUD"] += 1
            else:
                trig2ntun["DUPIPNOBUD"] += 1
        # According to the trigger and the revelation technique
        if revtech in trig2revtech2ntun[trigger]:
            trig2revtech2ntun[trigger][revtech] += 1
        else:
            trig2revtech2ntun[trigger][revtech] = 1

    # Output the trigger results
    output_file_name = output_dir + "/inv_triggers.txt"
    output_file = open(output_file_name, 'w')
    output_file.write("#trigger ntun\n")
    for trigger in sorted(trig2ntun):
        output_file.write(trigger + " " + str(trig2ntun[trigger]) + "\n")
    output_file.close()
    os.system("column -t " + output_file_name + " > tmp_col.txt")
    os.system("mv tmp_col.txt " + output_file_name)

    # Output the trigger and revelation technique results
    for trigger in sorted(trig2revtech2ntun):
        output_file_name = output_dir + "/inv_" + trigger + "_revtech.txt"
        output_file = open(output_file_name, 'w')
        output_file.write("#revtech ntun\n")
        for revtech in sorted(trig2revtech2ntun[trigger]):
            output_file.write(revtech + " " +
                              str(trig2revtech2ntun[trigger][revtech]) + "\n")
        output_file.close()
        os.system("column -t " + output_file_name + " > tmp_col.txt")
        os.system("mv tmp_col.txt " + output_file_name)

    return
Пример #38
0
def main(args):
    """
    %prog database.fasta query.fasta


    Run LAST by calling LASTDB, LASTAL and LASTEX.
    """

    supported_formats = ("tab", "maf", "blast")

    p = OptionParser(main.__doc__)
    p.add_option("-a", "-A", dest="cpus", default=1, type="int",
            help="parallelize job to multiple cpus [default: %default]")
    p.add_option("--path", help="specify LAST path")
    p.add_option("--dbpath", help="build db in which dir [default: same as fasta]")
    p.add_option("--format", default="blast", choices=supported_formats,
                 help="Output format, one of {0} [default: %default]".\
                      format("|".join(supported_formats)))
    p.add_option("--eval", default=False, action="store_true",
                 help="Use lastex to recalculate E-value [default: %default]")
    set_outfile(p)

    opts, args = p.parse_args(args)

    if len(args) != 2:
        sys.exit(not p.print_help())

    subject, query = args
    if opts.eval and opts.cpus > 1:
        raise Exception, "Option --eval cannnot work with multiple threads"

    path = opts.path
    dbpath = opts.dbpath

    getpath = lambda x: op.join(path, x) if path else x
    lastdb_bin = getpath("lastdb")
    lastal_bin = getpath("lastal")
    lastex_bin = getpath("lastex")

    subject = get_abs_path(subject)
    head, tail = op.split(subject)
    head = dbpath or head
    subjectdb = tail.rsplit(".", 1)[0] + "a"  # sentinel (1a.prj and 10a.prj)

    mkdir(head)
    subjectdb = op.join(head, subjectdb)
    run_lastdb(infile=subject, outfile=subjectdb + ".prj", lastdb_bin=lastdb_bin)

    cpus = opts.cpus
    logging.debug("Dispatch job to {0} cpus".format(cpus))

    if opts.format == "maf":
        cmd = 'echo "##maf version=1"'
        sh(cmd)

    cmd = "{0} -u 0".format(lastal_bin)
    f = supported_formats.index(opts.format)
    cmd += " -f {0}".format(f)
    cmd += " {0} -".format(subjectdb)

    if opts.eval:
        querydb = query.rsplit(".", 1)[0]
        run_lastdb(infile=query, outfile=querydb + ".prj")

        cmd += " | {0} {1}.prj {2}.prj -".format(lastex_bin, subjectdb, querydb)

    out_fh = must_open(opts.outfile, "w")
    lock = Lock()

    args = [(k + 1, cpus, out_fh, cmd, query, lock) \
                    for k in xrange(cpus)]
    g = Jobs(target=last, args=args)
    g.run()
Пример #39
0
def main():
    """
compares a designed protein's sequence to the native sequence and reports the 
number of mutations
	"""

    parser = OptionParser()
    parser.add_option("-p", dest="pdbfile", help="pdbfile")
    parser.add_option("-P", dest="pdblist", help="pdblist")
    parser.add_option("-n", dest="native", help="native")
    parser.set_description(main.__doc__)
    (options, args) = parser.parse_args()

    pdbfiles = []
    if options.pdblist:
        pdbfiles = files_from_list(options.pdblist)
    elif options.pdbfile:
        pdbfiles.append(options.pdbfile)
    else:
        parser.print_help()
        sys.exit()

    if not options.native:
        parser.print_help()
        sys.exit()

    enz = Enzyme()
    nat = Enzyme()
    nat.readPDB(options.native)
    natss = getSecondaryStructure(options.native)
    natseq = nat.protein.sequence()

    if natss != "" and (len(natss) != len(natseq)):
        print natss
        print natseq
        print "PROBLEM WITH NATSS"
        sys.exit()

    resprop = ResidueProperties()

    for pdbfile in pdbfiles:
        n_gly_ss = 0
        n_np_pol = 0
        n_np_chg = 0

        enz.readPDB(pdbfile)

        catlist = []
        for cres in enz.catalytic:
            catlist.append(int(cres.file_id))

        myseq = enz.protein.sequence()

        mismatch = []
        if len(myseq) != len(natseq):
            print "sequence lengths differ"
            sys.exit()

        mm_ss = ""
        mm_ns = ""
        mm_ps = ""

        for i in range(len(myseq)):
            if i in catlist:
                continue

            if myseq[i] != natseq[i]:
                mm_ss += natss[i]
                mm_ns += natseq[i]
                mm_ps += myseq[i]

                if myseq[i] == "G" and (natss[i] == "H" or natss[i] == "E"):
                    n_gly_ss += 1

                if resprop.isNonPolar(code=natseq[i]) or natseq[i] == "F":
                    if resprop.isPolar(code=myseq[i]):
                        n_np_pol += 1
                    if resprop.isCharged(code=myseq[i]):
                        n_np_chg += 1

        print mm_ss
        print mm_ns
        print mm_ps

        print pdbfile
        print "# glycines in ss:", n_gly_ss
        print "# np to pol:", n_np_pol
        print "# np to chg:", n_np_chg
        print "--------------"

        enz.clear()
Пример #40
0
import psycopg2

from lib.varnish import VarnishPurger

if __name__ == "__main__":
    optparser = OptionParser()
    optparser.add_option('-m',
                         '--msgid',
                         dest='msgid',
                         help='Messageid to load')

    (opt, args) = optparser.parse_args()

    if (len(args)):
        print("No bare arguments accepted")
        optparser.print_help()
        sys.exit(1)

    if not opt.msgid:
        print("Message-id must be specified")
        optparser.print_help()
        sys.exit(1)

    cfg = ConfigParser()
    cfg.read('%s/archives.ini' %
             os.path.realpath(os.path.dirname(sys.argv[0])))
    try:
        connstr = cfg.get('db', 'connstr')
    except Exception:
        connstr = 'need_connstr'
Пример #41
0
def main():
  args = argv[1:]

  parser = OptionParser(usage="parsebruker <input|stdin> [opts]", description=__doc__)

  parser.add_option("--verbose", action="store_true", \
    help="print status messages")
  parser.add_option("--array", action="store_true", \
    help="store array values by entry, e.g. value[0], ...")
  parser.add_option("--write-header", metavar="<fn>", \
    help="write the header to a json file")
  parser.add_option("--write-header-attr", metavar="<fn>", \
    help="write the header attributes to a json file")
  parser.add_option("--write-param", metavar="<fn>", \
    help="write the parameter values to a json file")
  parser.add_option("--write-param-attr", metavar="<fn>", \
    help="write the parameter attributes to a json file")
  parser.add_option("--print-header", action="store_true", \
    help="print the header to a json file")
  parser.add_option("--print-header-attr", action="store_true", \
    help="print the header attributes to a json file")
  parser.add_option("--print-param", action="store_true", \
    help="print the parameter values to a json file")
  parser.add_option("--print-param-attr", action="store_true", \
    help="print the parameter attributes to a json file")
  parser.add_option("--format", metavar="<pattern>", action="store", \
    help="print a string formatted with parameter values, e.g. ${param1}_${param2}_$${attr1}.txt")

  (opts, pos) = parser.parse_args()

  if len(args) == 0 or len(pos) > 1:
    parser.print_help()
    return

  logger = Logger(opts.verbose)
  logger.info("started")

  logger.info("reading input")
  
  body = None
  if len(pos) > 0:
    input_fn = pos[0]
    logger.assume(exists(input_fn), "input file not found: %s" % input_fn)
    logger.info("reading file: %s" % input_fn)
    body = open(input_fn, 'r').read()
  else:
    logger.info("reading standard input")
    body = stdin.read() 

  method = Method(array=opts.array).parse(body)

  logger.info("writing output")

  def dumpit(lookup, fn):
    info("writing output: %s" % fn)
    open(fn, 'w').write(json.dumps(lookup, indent=2, sort_keys=False) + "\n")

  def printit(lookup):
    print json.dumps(lookup, indent=2, sort_keys=True)

  if opts.write_header:
    dumpit(method.header, opts.write_header)

  if opts.write_header_attr:
    dumpit(method.header_attr, opts.write_header_attr)

  if opts.write_param:
    dumpit(method.param, opts.write_param)

  if opts.write_param_attr:
    dumpit(method.param_attr, opts.write_param_attr)

  if opts.print_header:
    printit(method.header)

  if opts.print_header_attr:
    printit(method.header_attr)

  if opts.print_param:
    printit(method.param)

  if opts.print_param_attr:
    printit(method.param_attr)

  if opts.format:
    print method.format(opts.format)

  logger.info("finished")
Пример #42
0
# parse command line
p = OptionParser(usage="""usage: %prog [options] <fille_prefix> <start> <end>

Converts bindary files to ASCII format

<start>  starting file
<end>    finishing file
<file_prefix> either "out_0par" or "out_0disk"

""")
p.add_option("-v", action="store_true", dest="verbose", help="Verbose")

(opts, args) = p.parse_args()
# get the com filename
if len(args) != 3:
    p.print_help()
    sys.exit(1)
(file_prefix, start, end) = args

for no_of_files in range(int(start), int(end)):
    file_name = file_prefix + str(no_of_files) + ".dat"
    f = open(file_name, 'rb')

    fout = open("_" + file_name, 'w')
    x = np.fromfile(f, dtype=np.float)  # read the data into numpy array

    size = len(x) / 8

    if file_prefix == "out_0par":
        if opts.verbose: print "Writing particle file ", no_of_files
        index = 1
Пример #43
0
def main(argv=None, qtGUI=None):
    global options, GUI, splitWorkerPool, splitWorkerOutput, mergeWorkerPool, mergeWorkerOutput
    parser = OptionParser(usage="Usage: kcc-c2p [options] comic_folder",
                          add_help_option=False)
    mainOptions = OptionGroup(parser, "MANDATORY")
    otherOptions = OptionGroup(parser, "OTHER")
    mainOptions.add_option("-y",
                           "--height",
                           type="int",
                           dest="height",
                           default=0,
                           help="Height of the target device screen")
    mainOptions.add_option("-i",
                           "--in-place",
                           action="store_true",
                           dest="inPlace",
                           default=False,
                           help="Overwrite source directory")
    mainOptions.add_option(
        "-m",
        "--merge",
        action="store_true",
        dest="merge",
        default=False,
        help="Combine every directory into a single image before splitting")
    otherOptions.add_option("-d",
                            "--debug",
                            action="store_true",
                            dest="debug",
                            default=False,
                            help="Create debug file for every splitted image")
    otherOptions.add_option("-h",
                            "--help",
                            action="help",
                            help="Show this help message and exit")
    parser.add_option_group(mainOptions)
    parser.add_option_group(otherOptions)
    options, args = parser.parse_args(argv)
    if qtGUI:
        GUI = qtGUI
    else:
        GUI = None
    if len(args) != 1:
        parser.print_help()
        return 1
    if options.height > 0:
        options.sourceDir = args[0]
        options.targetDir = args[0] + "-Splitted"
        if os.path.isdir(options.sourceDir):
            rmtree(options.targetDir, True)
            copytree(options.sourceDir, options.targetDir)
            work = []
            pagenumber = 1
            splitWorkerOutput = []
            splitWorkerPool = Pool(maxtasksperchild=10)
            if options.merge:
                print("Merging images...")
                directoryNumer = 1
                mergeWork = []
                mergeWorkerOutput = []
                mergeWorkerPool = Pool(maxtasksperchild=10)
                mergeWork.append([options.targetDir])
                for root, dirs, files in os.walk(options.targetDir, False):
                    dirs, files = walkSort(dirs, files)
                    for directory in dirs:
                        directoryNumer += 1
                        mergeWork.append([os.path.join(root, directory)])
                if GUI:
                    GUI.progressBarTick.emit('Combining images')
                    GUI.progressBarTick.emit(str(directoryNumer))
                for i in mergeWork:
                    mergeWorkerPool.apply_async(func=mergeDirectory,
                                                args=(i, ),
                                                callback=mergeDirectoryTick)
                mergeWorkerPool.close()
                mergeWorkerPool.join()
                if GUI and not GUI.conversionAlive:
                    rmtree(options.targetDir, True)
                    raise UserWarning("Conversion interrupted.")
                if len(mergeWorkerOutput) > 0:
                    rmtree(options.targetDir, True)
                    raise RuntimeError(
                        "One of workers crashed. Cause: " +
                        mergeWorkerOutput[0][0], mergeWorkerOutput[0][1])
            print("Splitting images...")
            for root, _, files in os.walk(options.targetDir, False):
                for name in files:
                    if getImageFileName(name) is not None:
                        pagenumber += 1
                        work.append([root, name, options])
                    else:
                        os.remove(os.path.join(root, name))
            if GUI:
                GUI.progressBarTick.emit('Splitting images')
                GUI.progressBarTick.emit(str(pagenumber))
                GUI.progressBarTick.emit('tick')
            if len(work) > 0:
                for i in work:
                    splitWorkerPool.apply_async(func=splitImage,
                                                args=(i, ),
                                                callback=splitImageTick)
                splitWorkerPool.close()
                splitWorkerPool.join()
                if GUI and not GUI.conversionAlive:
                    rmtree(options.targetDir, True)
                    raise UserWarning("Conversion interrupted.")
                if len(splitWorkerOutput) > 0:
                    rmtree(options.targetDir, True)
                    raise RuntimeError(
                        "One of workers crashed. Cause: " +
                        splitWorkerOutput[0][0], splitWorkerOutput[0][1])
                if options.inPlace:
                    rmtree(options.sourceDir)
                    move(options.targetDir, options.sourceDir)
            else:
                rmtree(options.targetDir, True)
                raise UserWarning("Source directory is empty.")
        else:
            raise UserWarning("Provided path is not a directory.")
    else:
        raise UserWarning("Target height is not set.")
Пример #44
0
# use simplejson in place of json for python < 2.6
try:
    import json
except ImportError:
    import simplejson
    json = simplejson

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

#   Main logic

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888

# get help string
f = StringIO.StringIO()
parser.print_help(f)
helpstr = f.getvalue()
(options, remaining_args) = parser.parse_args()

species_list = defaultdict(list)
species_list["mammals"].append("cow")
species_list["mammals"].append("horse")
species_list["mammals"].append("sheep")
species_list["reptiles"].append("snake")
species_list["reptiles"].append("lizard")
species_list["reptiles"].append("crocodile")
species_list["fish"].append("pufferfish")

tempdir = "temp_filesre_combine/"

#88888888888888888888888888888888888888888888888888888888888888888888888888888888888888888
Пример #45
0
def main():
    usage = '%prog [ -U rest url (required) | -i ifName pattern | -a alias pattern | -e endpoint -e endpoint (multiple ok) ]'
    parser = OptionParser(usage=usage)
    parser.add_option(
        '-U',
        '--url',
        metavar='ESMOND_REST_URL',
        type='string',
        dest='api_url',
        help='URL for the REST API (default=%default) - required.',
        default='http://localhost')
    parser.add_option('-i',
                      '--ifname',
                      metavar='IFNAME',
                      type='string',
                      dest='ifname_pattern',
                      help='Pattern to apply to interface ifname search.')
    parser.add_option('-a',
                      '--alias',
                      metavar='ALIAS',
                      type='string',
                      dest='alias_pattern',
                      help='Pattern to apply to interface alias search.')
    parser.add_option(
        '-e',
        '--endpoint',
        metavar='ENDPOINT',
        dest='endpoint',
        action='append',
        default=[],
        help='Endpoint type to query (required) - can specify more than one.')
    parser.add_option('-m',
                      '--month',
                      metavar='MONTH',
                      type='string',
                      dest='month',
                      default='',
                      help='Specify month in YYYY-MM format.')
    parser.add_option('-v',
                      '--verbose',
                      dest='verbose',
                      action='count',
                      default=False,
                      help='Verbose output - -v, -vv, etc.')
    parser.add_option(
        '-P',
        '--post',
        dest='post',
        action='store_true',
        default=False,
        help=
        'Switch to actually post data to the backend - otherwise it will just query and give output.'
    )
    parser.add_option('-u',
                      '--user',
                      metavar='USER',
                      type='string',
                      dest='user',
                      default='',
                      help='POST interface username.')
    parser.add_option('-k',
                      '--key',
                      metavar='API_KEY',
                      type='string',
                      dest='key',
                      default='',
                      help='API key for POST operation.')
    options, args = parser.parse_args()

    if not options.month:
        print 'No -m arg, defaulting to last month'
        now = datetime.datetime.utcnow()
        start_year, start_month = lastmonth((now.year, now.month))
        start_point = datetime.datetime.strptime(
            '{0}-{1}'.format(start_year, start_month), '%Y-%m')
    else:
        print 'Parsing -m input {0}'.format(options.month)
        try:
            start_point = datetime.datetime.strptime(options.month, '%Y-%m')
        except ValueError:
            print 'Unable to parse -m arg {0} - expecting YYYY-MM format'.format(
                options.month)
            return -1

    print 'Generating monthly summary starting on: {0}'.format(start_point)

    start, end = get_month_start_and_end(start_point)

    if options.verbose:
        print 'Scanning from {0} to {1}'.format(
            datetime.datetime.utcfromtimestamp(start),
            datetime.datetime.utcfromtimestamp(end))

    filters = ApiFilters()

    filters.verbose = options.verbose
    filters.endpoint = options.endpoint
    filters.agg = AGG_FREQUENCY
    filters.cf = 'raw'

    filters.begin_time = start
    filters.end_time = end

    if not options.ifname_pattern and not options.alias_pattern:
        # Don't grab *everthing*.
        print 'Specify an ifname or alias filter option.'
        parser.print_help()
        return -1
    elif options.ifname_pattern and options.alias_pattern:
        print 'Specify only one filter option.'
        parser.print_help()
        return -1
    else:
        if options.ifname_pattern:
            interface_filters = {'ifName__contains': options.ifname_pattern}
        elif options.alias_pattern:
            interface_filters = {'ifAlias__contains': options.alias_pattern}

    conn = ApiConnect(options.api_url, filters, options.user, options.key)

    data = conn.get_interface_bulk_data(**interface_filters)

    print data

    aggs = aggregate_to_device_interface_endpoint(data, options.verbose)

    # Generate the grand total
    total_aggs = {}

    for d, i, endpoint, val in iterate_device_interface_endpoint(aggs):
        if not total_aggs.has_key(endpoint): total_aggs[endpoint] = 0
        total_aggs[endpoint] += val

    if options.verbose: print 'Grand total:', total_aggs

    # Roll everything up before posting
    summary_name = get_summary_name(interface_filters)

    post_data = {}

    for device, interface, endpoint, val in iterate_device_interface_endpoint(
            aggs):
        path = (MONTHLY_NS, summary_name, device, interface, endpoint)
        payload = {'ts': start * 1000, 'val': val}
        if options.verbose > 1: print path, '\n\t', payload
        post_data[path] = payload

    for endpoint, val in total_aggs.items():
        path = (MONTHLY_NS, summary_name, endpoint)
        payload = {'ts': start * 1000, 'val': val}
        if options.verbose > 1: print path, '\n\t', payload
        post_data[path] = payload

    if not options.post:
        print 'Not posting (use -P flag to write to backend).'
        return

    if not options.user or not options.key:
        print 'user and key args must be supplied to POST summary data.'
        return

    for path, payload in post_data.items():
        args = {
            'api_url': options.api_url,
            'path': list(path),
            'freq': AGG_FREQUENCY * 1000
        }

        p = PostRawData(username=options.user, api_key=options.key, **args)
        p.add_to_payload(payload)
        p.send_data()

        if options.verbose:
            print 'verifying write for', path
            p = {'begin': start * 1000, 'end': start * 1000}
            g = GetRawData(params=p, **args)
            result = g.get_data()
            print result, '\n\t', result.data[0]

    return
Пример #46
0
def call_main():

    usage = """\npython peakfinder.py -b <bamfile> -s <hg18/hg19/mm9>\n OR 
    \npython peakfinder.py -b <bamfile> --customBED <BEDfile> --customMRNA 
    <mRNA lengths> --customPREMRNA <premRNA lengths>"""
    description = """CLIPper. Michael Lovci, Gabriel Pratt 2012. 
                     CLIP peakfinder that uses fitted smoothing splines to 
                     define clusters of binding.  Computation is performed in
                     parallel using parallelPython. 
                     Refer to: https://github.com/YeoLab/clipper/wiki for instructions. 
                     Questions should be directed to [email protected]."""

    parser = OptionParser(usage=usage, description=description)

    parser.add_option("--bam",
                      "-b",
                      dest="bam",
                      help="A bam file to call peaks on",
                      type="string",
                      metavar="FILE.bam")
    parser.add_option("--input_bam",
                      dest="input_bam",
                      help="input bam to control for peak calling",
                      type="string",
                      default=None,
                      metavar="FILE.bam")

    parser.add_option(
        "--species",
        "-s",
        dest="species",
        help="A species for your peak-finding, either hg19 or mm9")
    parser.add_option("--gtfFile",
                      dest="gtfFile",
                      help="use a gtf file instead of the AS structure data")
    parser.add_option("--outfile",
                      "-o",
                      dest="outfile",
                      default="fitted_clusters",
                      help="a bed file output, default:%default")
    parser.add_option("--gene",
                      "-g",
                      dest="gene",
                      action="append",
                      help="A specific gene you'd like try",
                      metavar="GENENAME")
    parser.add_option(
        "--minreads",
        dest="minreads",
        help=
        "minimum reads required for a section to start the fitting process.  Default:%default",
        default=3,
        type="int",
        metavar="NREADS")
    parser.add_option("--premRNA",
                      dest="premRNA",
                      action="store_true",
                      help="use premRNA length cutoff, default:%default",
                      default=False)
    parser.add_option("--poisson-cutoff",
                      dest="poisson_cutoff",
                      type="float",
                      help="p-value cutoff for poisson test, Default:%default",
                      default=0.05,
                      metavar="P")
    parser.add_option(
        "--disable_global_cutoff",
        dest="use_global_cutoff",
        action="store_false",
        help=
        "disables global transcriptome level cutoff to CLIP-seq peaks, Default:On",
        default=True,
        metavar="P")
    parser.add_option(
        "--FDR",
        dest="FDR_alpha",
        type="float",
        default=0.05,
        help="FDR cutoff for significant height estimation, default=%default")
    parser.add_option(
        "--threshold-method",
        dest="method",
        default="random",
        help=
        "Method used for determining height threshold, Can use default=random or binomial"
    )
    parser.add_option(
        "--binomial",
        dest="binom",
        type="float",
        default=0.05,
        help=
        "Alpha significance threshold for using Binomial distribution for determining height threshold, default=%default"
    )
    parser.add_option("--threshold",
                      dest="threshold",
                      type="int",
                      default=None,
                      help="Skip FDR calculation and set a threshold yourself")
    parser.add_option(
        "--maxgenes",
        dest="maxgenes",
        default=None,
        type="int",
        help="stop computation after this many genes, for testing",
        metavar="NGENES")
    parser.add_option(
        "--processors",
        dest="np",
        default="autodetect",
        help="Number of processors to use. Default: All processors on machine",
        type="str",
        metavar="NP")
    parser.add_option(
        "--superlocal",
        action="store_true",
        dest="SloP",
        default=False,
        help=
        "Use super-local p-values, counting reads in a 1KB window around peaks"
    )
    parser.add_option("--plot",
                      "-p",
                      dest="plotit",
                      action="store_true",
                      help="make figures of the fits",
                      default=False)
    parser.add_option("--verbose",
                      "-v",
                      dest="verbose",
                      action="store_true",
                      default=False)
    parser.add_option("--quiet",
                      "-q",
                      dest="quiet",
                      action="store_true",
                      default=False,
                      help="suppress notifications")
    parser.add_option("--save-pickle",
                      dest="save_pickle",
                      default=False,
                      action="store_true",
                      help="Save a pickle file containing the analysis")
    parser.add_option(
        "--debug",
        dest="debug",
        default=False,
        action="store_true",
        help="disables multipcoressing in order to get proper error tracebacks"
    )
    parser.add_option(
        "--max_width",
        dest="max_width",
        type="int",
        default=75,
        help="Defines max width for classic algorithm, default: %default")
    parser.add_option(
        "--min_width",
        dest="min_width",
        type="int",
        default=50,
        help="Defines min width for classic algorithm, default: %default")
    parser.add_option(
        "--max_gap",
        dest="max_gap",
        type="int",
        default=15,
        help=
        "defines maximum gap between reads before calling a region a new section, default: %default"
    )
    parser.add_option("--bonferroni",
                      dest="bonferroni_correct",
                      action="store_true",
                      default=False,
                      help="Perform Bonferroni on data before filtering")
    parser.add_option(
        "--algorithm",
        dest="algorithm",
        default="spline",
        help="Defines algorithm to run, currently spline, classic, gaussian")
    parser.add_option("--reverse_strand",
                      dest="reverse_strand",
                      default=False,
                      action="store_true",
                      help="adds option to reverse strand")

    (options, args) = parser.parse_args()

    if options.plotit:
        options.debug = True

    #enforces required usage
    if not (options.bam and ((options.species) or (options.gtfFile))):
        parser.print_help()
        exit()

    logging.info("Starting peak calling")
    main(options)
Пример #47
0
    optParser.add_option("-f",
                         "--scale-factor",
                         dest="demandscale",
                         type="float",
                         default=1.,
                         help="scale demand by ")
    optParser.add_option(
        "-D",
        "--depart-pos",
        dest="departpos",
        type="choice",
        choices=('random', 'free', 'random_free'),
        default='free',
        help="choose departure position: random, free, random_free")
    optParser.add_option(
        "-C",
        "--get-connections",
        action="store_true",
        dest="getconns",
        default=True,
        help=
        "generate the OD connection directory, if set as False, a odConnTables.py should be available in the defined data directory"
    )
    (options, args) = optParser.parse_args()

    if not options.netfile or not options.mtxfile or not options.districtfile:
        optParser.print_help()
        sys.exit()

    main(options)
Пример #48
0
def main():

    argParser = OptionParser(usage="%prog [options]")
    argParser.add_option(
        "-e",
        dest="directories",
        help=
        "(required) Text file containing the directory name and ground truth directory and aoi"
    )
    argParser.add_option("-d",
                         dest="directory",
                         help="The directory to run in",
                         default=".")
    argParser.add_option("-t",
                         dest="tag",
                         help="Tag to prefix all test output with",
                         default="")
    argParser.add_option("-b",
                         dest="bin_dir",
                         help="Bin directory containing the VIDTK binaries")
    argParser.add_option("-v",
                         dest="gen_video",
                         help="enables the eneration of check videos")
    argParser.add_option("-a",
                         dest="adaboost_prefix",
                         help="adaboost prefix",
                         default="adaboost")
    options, args = argParser.parse_args()

    # Process command line options
    if options.directories == None:
        argParser.print_help()
        return 1
    if options.bin_dir != None:
        if platform.system() == "Windows":
            os.environ["PATH"] = options.bin_dir + ";" + os.environ["PATH"]
        else:
            os.environ["PATH"] = options.bin_dir + ":" + os.environ["PATH"]

    # Generate a config file
    os.chdir(options.directory)

    output_experiment_driver = open("trainer_driver.txt", 'w')

    experiments_in = open(options.directories, 'r')
    while 1:
        line = experiments_in.readline()
        if not line:
            break
        line = line.strip()
        exp_elements = line.split(' ')
        if (len(exp_elements) != 3):
            continue
        base_director = exp_elements[1]
        experiment_name = exp_elements[0]
        aoi = exp_elements[2]
        CreateTrainingSet(base_director, experiment_name,
                          output_experiment_driver, options.gen_video)
    output_experiment_driver.close()
    TrainAdaboostClassifer(options.adaboost_prefix)

    return 0
Пример #49
0
def parse_arguments(args):
    """ Parse command-line arguments, get values """
    from optparse import OptionParser

    kw = {}
    kw["usage"] = "%prog [options]"
    kw["description"] = \
        "%prog configures and runs a Hadoop n-gram (with a value range for n) computation job."

    parser = OptionParser(**kw)
    parser.disable_interspersed_args()

    parser.add_option(
        "--from",
        action="store",
        type="int",
        dest="start",  # opts.from is n/a
        help="The starting value for n",
        default=2)

    parser.add_option("--to",
                      action="store",
                      type="int",
                      dest="to",
                      help="The ending value for n",
                      default=2)

    parser.add_option("--reducers",
                      action="store",
                      type="int",
                      dest="reduce",
                      help="Number of reduce tasks",
                      default=1)

    parser.add_option("--threshold",
                      action="store",
                      type="int",
                      dest="threshold",
                      help="Threshold to discard #frequencies",
                      default=1)

    parser.add_option("--inputdir",
                      action="store",
                      type="string",
                      dest="inputdir",
                      help="Input HDFS directory",
                      default="wiki/in")

    (opts, args) = parser.parse_args(args)

    # Verify arguments

    if opts.start < 2:
        print >> sys.stderr, "Invalid From argument."
        parser.print_help()
        sys.exit(1)

    if opts.to < 2:
        print >> sys.stderr, "Invalid To argument."
        parser.print_help()
        sys.exit(1)

    if opts.reduce < 0:
        print >> sys.stderr, "Invalid Reduce argument."
        parser.print_help()
        sys.exit(1)

    if opts.threshold < -2:
        print >> sys.stderr, "Invalid Threshold argument."
        parser.print_help()
        sys.exit(1)

    if opts.start > opts.to:
        print >> sys.stderr, "From argument must be less or equal then To argument."
        parser.print_help()
        sys.exit(1)

    return (opts, args)
Пример #50
0
                or1_chrom = or1.reference_name
                or2_chrom = or2.reference_name
                or1_pos = get_read_start(or1) + 1
                or2_pos = get_read_start(or2) + 1
                or1_resfrag = get_pos_range(res_frag, or1_chrom, or1)
                or2_resfrag = get_pos_range(res_frag, or2_chrom, or2)

                if or1_resfrag is not None:
                    rup1 = or1_resfrag.start
                    rdn1 = or1_resfrag.end
                if or2_resfrag is not None:
                    rup2 = or2_resfrag.start
                    rdn2 = or2_resfrag.end

                result = (or1.qname, or1_chrom, or1_pos, get_read_strand(or1),
                          or1.alen, rup1, rdn1, or2_chrom, or2_pos, get_read_strand(or2),
                          or2.alen, rup2, rdn2)
                outtsv.write("\t".join(map(str, result)) + "\n")
    outtsv.close()

if __name__ == "__main__":
    from optparse import OptionParser
    p = OptionParser(__doc__)

    opts, args = p.parse_args()
    if len(args) != 3:
        sys.exit(p.print_help())

    bamfile, enzyme_bed, outtsv = args
    hicpro2binless(bamfile, enzyme_bed, outtsv)
Пример #51
0
def scan(argv):
    parser = OptionParser(
        usage="usage: %prog [options] [ip]...",
        description="Scan IP range for PLC devices, Support MODBUS and...")
    parser.add_option("--host-list",
                      dest="hosts_file",
                      help="Scan hosts from file",
                      metavar="FILE")
    parser.add_option("--ports",
                      dest="ports",
                      help="Scan ports",
                      metavar="PORTS",
                      default=502)
    parser.add_option("--timeout",
                      dest="connect_timeout",
                      help="Connection timeout (seconds)",
                      metavar="TIMEOUT",
                      type="float",
                      default=1)
    AddModOptions(parser)  #for modbus protocol

    (options, args) = parser.parse_args(argv)

    scan_hosts = []
    if options.hosts_file:
        try:
            scan_hosts = [
                file.strip() for file in open(options.hosts_file, 'r')
            ]
        except IOError:
            print "Can't open file %s" % options.hosts_file

    for ip in args:
        scan_hosts.extend(get_ip_list(ip) if '/' in ip else [ip])

    scan_ports = options.ports

    if not scan_hosts:
        print "No targets to scan\n\n"
        parser.print_help()
        exit()

    status("Scan start...\n")
    for host in scan_hosts:
        splitted = host.split(':')
        host = splitted[0]
        if len(splitted) == 2:
            ports = [int(splitted[1])]
        else:
            ports = scan_ports

        port = ports
        status("%s:%d...\r" % (host, port))
        try:
            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            sock.settimeout(options.connect_timeout)
            sock.connect((host, port))
            sock.close()
        except socket.error:
            continue

        if port == 502:
            res = ModScan(host, port, options)
        else:
            print "port not support"
            exit()

        if not res:
            print "%s:%d unknown protocol" % (host, port)

    status("Scan complete\n")
Пример #52
0
parser = OptionParser(description=disc)
parser.add_option('--header',
                  dest='header',
                  metavar='FILE',
                  help='output version header file [required]')
parser.add_option('-q',
                  '--quiet',
                  action='store_true',
                  dest='quiet',
                  default=False,
                  help='do not output detailed status information')
(options, args) = parser.parse_args()

# the header option is required
if options.header is None:
    parser.print_help(sys.stdout)
    sys.exit()


def write_version_header(header):
    """ Creates the header file for the current revision and Chrome version information
       if the information has changed or if the file doesn't already exist. """

    if not git.is_checkout('.'):
        raise Exception('Not a valid checkout')

    if path_exists(header):
        oldcontents = read_file(header)
    else:
        oldcontents = ''
Пример #53
0
def model_plotter(model, logfile=None, **kwds):
    """
generate surface contour plots for model, specified by full import path
generate model trajectory from logfile (or solver restart file), if provided

Available from the command shell as:
  mystic_model_plotter.py model (filename) [options]

or as a function call as:
  mystic.model_plotter(model, filename=None, **options)

The option "bounds" takes an indicator string, where the bounds should
be given as comma-separated slices. For example, using bounds = "-1:10, 0:20"
will set the lower and upper bounds for x to be (-1,10) and y to be (0,20).
The "step" can also be given, to control the number of lines plotted in the
grid. Thus "-1:10:.1, 0:20" would set the bounds as above, but use increments
of .1 along x and the default step along y.  For models with > 2D, the bounds
can be used to specify 2 dimensions plus fixed values for remaining dimensions.
Thus, "-1:10, 0:20, 1.0" would plot the 2D surface where the z-axis was fixed
at z=1.0.  When called from a script, slice objects can be used instead of a
string, thus "-1:10:.1, 0:20, 1.0" becomes (slice(-1,10,.1), slice(20), 1.0).

The option "label" takes comma-separated strings. For example, label = "x,y,"
will place 'x' on the x-axis, 'y' on the y-axis, and nothing on the z-axis.
LaTeX is also accepted. For example, label = "$ h $, $ {\\alpha}$, $ v$" will
label the axes with standard LaTeX math formatting. Note that the leading
space is required, while a trailing space aligns the text with the axis
instead of the plot frame.

The option "reduce" can be given to reduce the output of a model to a scalar,
thus converting 'model(params)' to 'reduce(model(params))'. A reducer is given
by the import path (e.g. 'numpy.add'). The option "scale" will convert the plot
to log-scale, and scale the cost by 'z=log(4*z*scale+1)+2'. This is useful for
visualizing small contour changes around the minimium. If using log-scale
produces negative numbers, the option "shift" can be used to shift the cost
by 'z=z+shift'. Both shift and scale are intended to help visualize contours.

Required Inputs:
  model               full import path for the model (e.g. mystic.models.rosen)

Additional Inputs:
  filename            name of the convergence logfile (e.g. log.txt)
"""
    #FIXME: should be able to:
    # - apply a constraint as a region of NaN -- apply when 'xx,yy=x[ij],y[ij]'
    # - apply a penalty by shifting the surface (plot w/alpha?) -- as above
    # - build an appropriately-sized default grid (from logfile info)
    # - move all mulit-id param/cost reading into read_history
    #FIXME: current issues:
    # - 1D slice and projection work for 2D function, but aren't "pretty"
    # - 1D slice and projection for 1D function, is it meaningful and correct?
    # - should be able to plot from solver.genealogy (multi-monitor?) [1D,2D,3D?]
    # - should be able to scale 'z-axis' instead of scaling 'z' itself
    #   (see https://github.com/matplotlib/matplotlib/issues/209)
    # - if trajectory outside contour grid, will increase bounds
    #   (see support_hypercube.py for how to fix bounds)
    import shlex
    global __quit
    __quit = False
    _model = None
    _reducer = None
    _solver = None

    # handle the special case where list is provided by sys.argv
    if isinstance(model, (list,tuple)) and not logfile and not kwds:
        cmdargs = model # (above is used by script to parse command line)
    elif isinstance(model, basestring) and not logfile and not kwds:
        cmdargs = shlex.split(model)
    # 'everything else' is essentially the functional interface
    else:
        out = kwds.get('out', None)
        bounds = kwds.get('bounds', None)
        label = kwds.get('label', None)
        nid = kwds.get('nid', None)
        iter = kwds.get('iter', None)
        reduce = kwds.get('reduce', None)
        scale = kwds.get('scale', None)
        shift = kwds.get('shift', None)
        fill = kwds.get('fill', False)
        depth = kwds.get('depth', False)
        dots = kwds.get('dots', False)
        join = kwds.get('join', False)

        # special case: bounds passed as list of slices
        if not isinstance(bounds, (basestring, type(None))):
            cmdargs = ''
            for b in bounds:
                if isinstance(b, slice):
                    cmdargs += "{}:{}:{}, ".format(b.start, b.stop, b.step)
                else:
                    cmdargs += "{}, ".format(b)
            bounds = cmdargs[:-2]

        # special case: model passed as model instance
       #model.__doc__.split('using::')[1].split()[0].strip()
        if callable(model): _model, model = model, "None"
        if callable(reduce): _reducer, reduce = reduce, None

        # handle logfile if given
        if logfile: model += ' ' + logfile

        # process "commandline" arguments
        cmdargs = ''
        cmdargs += '' if out is None else '--out={} '.format(out)
        cmdargs += '' if bounds is None else '--bounds="{}" '.format(bounds)
        cmdargs += '' if label is None else '--label={} '.format(label)
        cmdargs += '' if nid is None else '--nid={} '.format(nid)
        cmdargs += '' if iter is None else '--iter={} '.format(iter)
        cmdargs += '' if reduce is None else '--reduce={} '.format(reduce)
        cmdargs += '' if scale is None else '--scale={} '.format(scale)
        cmdargs += '' if shift is None else '--shift={} '.format(shift)
        cmdargs += '' if fill == False else '--fill '
        cmdargs += '' if depth == False else '--depth '
        cmdargs += '' if dots == False else '--dots '
        cmdargs += '' if join == False else '--join '
        cmdargs = model.split() + shlex.split(cmdargs)

    #XXX: note that 'argparse' is new as of python2.7
    from optparse import OptionParser
    def _exit(self, **kwds):
      global __quit
      __quit = True
    OptionParser.exit = _exit

    parser = OptionParser(usage=model_plotter.__doc__.split('\n\nOptions:')[0])
    parser.add_option("-u","--out",action="store",dest="out",\
                      metavar="STR",default=None,
                      help="filepath to save generated plot")
    parser.add_option("-b","--bounds",action="store",dest="bounds",\
                      metavar="STR",default="-5:5:.1, -5:5:.1",
                      help="indicator string to set plot bounds and density")
    parser.add_option("-l","--label",action="store",dest="label",\
                      metavar="STR",default=",,",
                      help="string to assign label to axis")
    parser.add_option("-n","--nid",action="store",dest="id",\
                      metavar="INT",default=None,
                      help="id # of the nth simultaneous points to plot")
    parser.add_option("-i","--iter",action="store",dest="stop",\
                      metavar="STR",default=":",
                      help="string for smallest:largest iterations to plot")
    parser.add_option("-r","--reduce",action="store",dest="reducer",\
                      metavar="STR",default="None",
                      help="import path of output reducer function")
    parser.add_option("-x","--scale",action="store",dest="zscale",\
                      metavar="INT",default=0.0,
                      help="scale plotted cost by z=log(4*z*scale+1)+2")
    parser.add_option("-z","--shift",action="store",dest="zshift",\
                      metavar="INT",default=0.0,
                      help="shift plotted cost by z=z+shift")
    parser.add_option("-f","--fill",action="store_true",dest="fill",\
                      default=False,help="plot using filled contours")
    parser.add_option("-d","--depth",action="store_true",dest="surface",\
                      default=False,help="plot contours showing depth in 3D")
    parser.add_option("-o","--dots",action="store_true",dest="dots",\
                      default=False,help="show trajectory points in plot")
    parser.add_option("-j","--join",action="store_true",dest="line",\
                      default=False,help="connect trajectory points in plot")
    parsed_opts, parsed_args = parser.parse_args(cmdargs)

#   import sys
#   if 'mystic_model_plotter.py' not in sys.argv:
    from StringIO import StringIO
    f = StringIO()
    parser.print_help(file=f)
    f.seek(0)
    if 'Options:' not in model_plotter.__doc__:
      model_plotter.__doc__ += '\nOptions:%s' % f.read().split('Options:')[-1]
    f.close()

    if __quit: return

    # get the import path for the model
    model = parsed_args[0]  # e.g. 'mystic.models.rosen'
    if "None" == model: model = None

    try: # get the name of the parameter log file
      source = parsed_args[1]  # e.g. 'log.txt'
    except:
      source = None

    try: # select the bounds
      options = parsed_opts.bounds  # format is "-1:10:.1, -1:10:.1, 1.0"
    except:
      options = "-5:5:.1, -5:5:.1"

    try: # plot using filled contours
      fill = parsed_opts.fill
    except:
      fill = False

    try: # plot contours showing depth in 3D
      surface = parsed_opts.surface
    except:
      surface = False

    #XXX: can't do '-x' with no argument given  (use T/F instead?)
    try: # scale plotted cost by z=log(4*z*scale+1)+2
      scale = float(parsed_opts.zscale)
      if not scale: scale = False
    except:
      scale = False

    #XXX: can't do '-z' with no argument given
    try: # shift plotted cost by z=z+shift
      shift = float(parsed_opts.zshift)
      if not shift: shift = False
    except:
      shift = False

    try: # import path of output reducer function
      reducer = parsed_opts.reducer  # e.g. 'numpy.add'
      if "None" == reducer: reducer = None
    except:
      reducer = None

    style = '-' # default linestyle
    if parsed_opts.dots:
      mark = 'o' # marker=mark
      # when using 'dots', also can turn off 'line'
      if not parsed_opts.line:
        style = '' # linestyle='None'
    else:
      mark = ''
    color = 'w' if fill else 'k'
    style = color + style + mark

    try: # select labels for the axes
      label = parsed_opts.label.split(',')  # format is "x, y, z"
    except:
      label = ['','','']

    try: # select which 'id' to plot results for
      ids = (int(parsed_opts.id),) #XXX: allow selecting more than one id ?
    except:
      ids = None # i.e. 'all'

    try: # select which iteration to stop plotting at
      stop = parsed_opts.stop  # format is "1:10:1"
      stop = stop if ":" in stop else ":"+stop
    except:
      stop = ":"

    #################################################
    solver = None  # set to 'mystic.solvers.fmin' (or similar) for 'live' fits
    #NOTE: 'live' runs constrain params explicitly in the solver, then reduce
    #      dimensions appropriately so results can be 2D contour plotted.
    #      When working with legacy results that have more than 2 params,
    #      the trajectory WILL NOT follow the masked surface generated
    #      because the masked params were NOT fixed when the solver was run.
    #################################################

    from mystic.tools import reduced, masked, partial

    # process inputs
    if _model: model = _model
    if _reducer: reducer = _reducer
    if _solver: solver = _solver
    select, spec, mask = _parse_input(options)
    x,y = _parse_axes(spec, grid=True) # grid=False for 1D plots
    #FIXME: does grid=False still make sense here...?
    if reducer: reducer = _reducer or _get_instance(reducer)
    if solver and (not source or not model):
        raise RuntimeError('a model and results filename are required')
    elif not source and not model:
        raise RuntimeError('a model or a results file is required')
    if model:
        model = _model or _get_instance(model)
        # need a reducer if model returns an array
        if reducer: model = reduced(reducer, arraylike=False)(model)

    if solver:
        # if 'live'... pick a solver
        solver = 'mystic.solvers.fmin'
        solver = _solver or _get_instance(solver)
        xlen = len(select)+len(mask)
        if solver.__name__.startswith('diffev'):
            initial = [(-1,1)]*xlen
        else:
            initial = [0]*xlen
        from mystic.monitors import VerboseLoggingMonitor
        itermon = VerboseLoggingMonitor(filename=source, new=True)
        # explicitly constrain parameters
        model = partial(mask)(model)
        # solve
        sol = solver(model, x0=initial, itermon=itermon)

        #-OVERRIDE-INPUTS-#
        import numpy
        # read trajectories from monitor (comment out to use logfile)
        source = itermon
        # if negative minimum, shift by the 'solved minimum' plus an epsilon
        shift = max(-numpy.min(itermon.y), 0.0) + 0.5 # a good guess
        #-----------------#

    if model: # for plotting, implicitly constrain by reduction
        model = masked(mask)(model)

       ## plot the surface in 1D
       #if solver: v=sol[-1]
       #elif source: v=cost[-1]
       #else: v=None
       #fig0 = _draw_slice(model, x=x, y=v, scale=scale, shift=shift)
        # plot the surface in 2D or 3D
        fig = _draw_contour(model, x, y, surface=surface, fill=fill, scale=scale, shift=shift)
    else:
       #fig0 = None
        fig = None

    if source:
        # params are the parameter trajectories
        # cost is the solution trajectory
        params, cost = _get_history(source, ids)
        if len(cost) > 1: style = style[1:] # 'auto-color' #XXX: or grayscale?

        for p,c in zip(params, cost):
           ## project trajectory on a 1D slice of model surface #XXX: useful?
           #s = select[0] if len(select) else 0
           #px = p[int(s)] # _draw_projection requires one parameter
           ## ignore everything after 'stop'
           #_c = eval('c[%s]' % stop)
           #_x = eval('px[%s]' % stop)
           #fig0 = _draw_projection(_x,_c, style=style, scale=scale, shift=shift, figure=fig0)

            # plot the trajectory on the model surface (2D or 3D)
            # get two selected params #XXX: what if len(select)<2? or len(p)<2?
            p = [p[int(i)] for i in select[:2]]
            px,py = p # _draw_trajectory requires two parameters
            # ignore everything after 'stop'
            _x = eval('px[%s]' % stop)
            _y = eval('py[%s]' % stop)
            _c = eval('c[%s]' % stop) if surface else None
            fig = _draw_trajectory(_x,_y,_c, style=style, scale=scale, shift=shift, figure=fig)

    # add labels to the axes
    if surface: kwds = {'projection':'3d'} # 3D
    else: kwds = {}                        # 2D
    ax = fig.gca(**kwds)
    ax.set_xlabel(label[0])
    ax.set_ylabel(label[1])
    if surface: ax.set_zlabel(label[2])

    if not parsed_opts.out:
        plt.show()
    else:
        fig.savefig(parsed_opts.out)
Пример #54
0
def main():
    parser = OptionParser(
        description="Finds a consensus dendrogram from an HRG model of a network.  Saves the consensus dendrogram to a graph markup language (GML) file.  Saves the histogram of splits in the consensus dendrogram to a file in Python's pickle format.",
        prog='hrg-consensus.py',
        usage='%prog [options] GRAPH_EDGELIST_FILE DENDROGRAM_GML_FILE')

    parser.add_option('-s', '--num-samples', action='store', type=int,
        default=10000, help='The number of times to sample the dendrogram\'s splits (default=10000).')

    parser.add_option('-t', '--temperature', action='store', type=float,
        default=2.0, help='The temperature at which to run (default=2.0).')

    (options, args) = parser.parse_args()

    if len(args) != 2:
        parser.print_help()
        return 1

    graph_edgelist=args[0]
    G=nx.read_edgelist(graph_edgelist, nodetype=int)
    filename=os.path.basename(graph_edgelist)
    G.name=os.path.splitext(filename)[0]

    gml_file=args[1]
    D=Dendrogram.from_gml_file(gml_file, G)

    bestL=initL=D.graph['L']
    bestI=0

    print_status("step", "L", "best L", "% complete", "consensus size")

    threshold = 1/(50.0*G.number_of_nodes())
    burnin = 200*G.number_of_nodes()
    i=1

    out = os.path.splitext(graph_edgelist)[0]
    out += '-consensus-temp-%0.2f' % options.temperature
    dendro_file = out + '-dendrogram.gml'
    hist_file = out + '-histogram.dat'
    print("HRG consensus dendrogram will be saved as " + dendro_file)
    print("Split histogram will be saved as " + hist_file)

    while D.num_samples < options.num_samples:
        taken=D.monte_carlo_move(T=options.temperature, debug=False)

        if i > burnin and random.random() < threshold:
            D.sample_splits()

        t = ''
        if taken:
            t = '*'
        if D.graph['L'] > bestL:
            bestL=D.graph['L']

        if i % 4096 == 0:
            nsplits = D.num_samples
            pct_complete = 100 * D.num_samples / float(options.num_samples)
            print_status(
                "[" + str(i) + "]",
                "%.3f" % D.graph['L'],
                "%.3f" % bestL,
                "%8.2f" % pct_complete,
                "%10d" % nsplits)

        if i % 10 == 0:
            sys.stdout.flush()

        i+=1

    # Save the histogram to a file.
    D.split_histogram['num_samples'] = D.num_samples
    pickle.dump(D.split_histogram, open(hist_file, mode='wb'))
    del D.split_histogram['num_samples']
    print("Saved split histogram to " + hist_file)

    # Build the consensus dendrogram, save it to a file.
    builder = ConsensusDendrogramBuilder()
    C = builder.build(D.graph_nodes_list, D.split_histogram, D.num_samples)
    nx.write_gml(C, out + '-dendrogram.gml')
    print("Saved consensus dendrogram to " + dendro_file)

    return 0
Пример #55
0
def main():

    parser = OptionParser()
    parser.add_option(
        '-b',
        '--region-bed',
        help='Limit variant calling to regions in this BED file (optional)',
        dest='bedfile')
    parser.add_option(
        '-s',
        '--hotspot-vcf',
        help='VCF file specifying exact hotspot positions (optional)',
        dest='hotspot_vcf')
    parser.add_option('-i',
                      '--input-bam',
                      help='BAM file containing aligned reads (required)',
                      dest='bamfile')
    parser.add_option('-r',
                      '--reference-fasta',
                      help='FASTA file containing reference genome (required)',
                      dest='reference')
    parser.add_option('-o',
                      '--output-dir',
                      help='Output directory (default: current)',
                      dest='outdir',
                      default='.')
    parser.add_option(
        '-p',
        '--parameters-file',
        help='JSON file containing variant calling parameters (recommended)',
        dest='paramfile')
    parser.add_option(
        '-B',
        '--bin-dir',
        help=
        'Directory path to location of variant caller programs. Defaults to the directory this script is located',
        dest='rundir',
        default=os.path.dirname(os.path.realpath(__file__)))
    parser.add_option('-n',
                      '--num-threads',
                      help='Set TVC number of threads (default: 12)',
                      dest='numthreads',
                      default='32')
    parser.add_option(
        '--primer-trim-bed',
        help='Perform primer trimming using provided BED file. (optional)',
        dest='ptrim_bed')
    parser.add_option(
        '--postprocessed-bam',
        help=
        'Perform primer trimming, storing the results in provided BAM file name (optional)',
        dest='postprocessed_bam')

    (options, args) = parser.parse_args()

    if not options.bamfile or not options.reference:
        parser.print_help()
        exit(1)

        if not os.path.isdir(options.outdir):
            printtime('ERROR: No output directory found at: ' + options.outdir)
            sys.exit(1)
        if not os.path.exists(options.reference):
            printtime('ERROR: No reference file found at: ' +
                      options.reference)
            sys.exit(1)
    if not os.path.exists(options.reference + '.fai'):
        printtime('ERROR: No reference index file found at: ' +
                  options.reference + '.fai')
        sys.exit(1)
        if not os.path.exists(options.bamfile):
            printtime('ERROR: No bam file found at: ' + options.bamfile)
            sys.exit(1)
    if not os.path.exists(options.bamfile + '.bai'):
        printtime('ERROR: No bam index file found at: ' + options.bamfile +
                  '.bai')
        sys.exit(1)

    if options.hotspot_vcf:
        if not os.path.exists(options.hotspot_vcf):
            printtime('ERROR: No hotspots vcf file found at: ' +
                      options.hotspot_vcf)
            sys.exit(1)

    parameters = {}
    if options.paramfile:
        try:
            json_file = open(options.paramfile, 'r')
            parameters = json.load(json_file)
            json_file.close()
            if parameters.has_key('pluginconfig'):
                parameters = parameters['pluginconfig']
        except:
            printtime('ERROR: No parameter file found at: ' +
                      options.paramfile)
            sys.exit(1)

    if not os.path.exists(options.outdir):
        os.makedirs(options.outdir)

    # New way of handling hotspots: single call to tvc

    # This logic might go to variant_caller_plugin.py
    meta_tvc_args = parameters.get('meta', {}).get('tvcargs', 'tvc')
    if meta_tvc_args == 'tvc' and os.path.exists(
            options.rundir +
            '/tvc'):  # try local binary first, then go to global one
        tvc_command = '%s/tvc' % options.rundir
    else:
        tvc_command = meta_tvc_args
    tvc_command += '   --output-dir %s' % options.outdir
    tvc_command += '   --output-vcf small_variants.vcf'
    tvc_command += '   --reference %s' % options.reference
    tvc_command += '   --input-bam %s' % options.bamfile
    if options.ptrim_bed:
        tvc_command += '   --target-file %s' % options.ptrim_bed
        tvc_command += '   --trim-ampliseq-primers on'
    elif options.bedfile:
        tvc_command += '   --target-file %s' % options.bedfile
    if options.postprocessed_bam:
        postprocessed_bam_tmp = options.postprocessed_bam + '.tmp.bam'
        tvc_command += '   --postprocessed-bam %s' % postprocessed_bam_tmp
    if options.hotspot_vcf:
        tvc_command += '   --input-vcf %s' % options.hotspot_vcf
    if options.paramfile:
        tvc_command += '   --parameters-file %s' % options.paramfile
    tvc_command += '   --num-threads %s' % options.numthreads
    tvc_command += '   --error-motifs %s' % os.path.join(
        options.rundir, 'TVC/sse/motifset.txt')
    RunCommand(tvc_command, 'Call small indels and SNPs')

    if options.postprocessed_bam:
        bamsort_command = 'samtools sort %s %s' % (
            postprocessed_bam_tmp, options.postprocessed_bam[:-4])
        RunCommand(bamsort_command, 'Sort postprocessed bam')
        bamindex_command = 'samtools index %s' % options.postprocessed_bam
        RunCommand(bamindex_command, 'Index postprocessed bam')
        RunCommand('rm -f ' + postprocessed_bam_tmp,
                   'Remove unsorted postprocessed bam')

    vcfsort_command = '%s/scripts/sort_vcf.py' % options.rundir
    vcfsort_command += '   --input-vcf %s/small_variants.vcf' % options.outdir
    vcfsort_command += '   --output-vcf %s/small_variants.sorted.vcf' % options.outdir
    vcfsort_command += '   --index-fai %s.fai' % options.reference
    RunCommand(vcfsort_command, 'Sort small variant vcf')

    left_align_command = 'java -Xmx8G -jar %s/TVC/jar/GenomeAnalysisTK.jar' % options.rundir
    left_align_command += '   -T LeftAlignVariants'
    left_align_command += '   -R %s' % options.reference
    left_align_command += '   --variant %s/small_variants.sorted.vcf' % options.outdir
    left_align_command += '   -o %s/small_variants.left.vcf' % options.outdir
    RunCommand(left_align_command, 'Ensure left-alignment of indels')

    # create command for long indel assembly and run
    long_indel_command = 'java -Xmx8G -cp %s/TVC/jar/ -jar %s/TVC/jar/GenomeAnalysisTK.jar' % (
        options.rundir, options.rundir)
    long_indel_command += '   -T IndelAssembly --bypassFlowAlign'
    long_indel_command += '   -R %s' % options.reference
    long_indel_command += '   -I %s' % options.bamfile
    if options.bedfile:
        long_indel_command += '   -L %s' % options.bedfile
    long_indel_command += '   -o %s/indel_assembly.vcf' % options.outdir
    long_indel_command += '   -S SILENT -U ALL -filterMBQ'
    cmdoptions = parameters.get('long_indel_assembler', {})
    for k, v in cmdoptions.iteritems():
        long_indel_command += '   --%s %s' % (k, str(v))
    if not cmdoptions:
        long_indel_command += '   -nt 1'
    #long_indel_command +=       ' > %s/indel_assembly.log' % options.outdir
    RunCommand(long_indel_command, 'Assemble long indels')

    # Perform variant unification step.

    unify_command = '%s/scripts/unify_variants_and_annotations.py' % options.rundir
    #unify_command +=            '   --novel-tvc-vcf %s/small_variants.vcf' % options.outdir
    unify_command += '   --novel-tvc-vcf %s/small_variants.left.vcf' % options.outdir
    if os.path.exists("%s/indel_assembly.vcf" % options.outdir):
        unify_command += '   --novel-assembly-vcf %s/indel_assembly.vcf' % options.outdir
    if options.hotspot_vcf:
        unify_command += '   --hotspot-annotation-vcf %s' % options.hotspot_vcf
    unify_command += '   --output-vcf %s/all.merged.vcf' % options.outdir
    unify_command += '   --index-fai %s.fai' % options.reference
    if os.path.exists(options.outdir + '/tvc_metrics.json'):
        unify_command += '   --tvc-metrics %s/tvc_metrics.json' % options.outdir

    RunCommand(
        unify_command,
        'Unify variants and annotations from all sources (tvc,IndelAssembly,hotpots)'
    )

    # Scan through the merged vcf and count the number of lines.
    num_variants_before_bed = 0
    input = open('%s/all.merged.vcf' % options.outdir, 'r')
    for line in input:
        if line and line[0] != '#':
            num_variants_before_bed += 1
    input.close()

    # BED filtering
    if options.bedfile and num_variants_before_bed > 0:
        # This command merely prepends a fake header line to the bed file if it doesn't have one
        bedtmp = options.outdir + '/' + os.path.basename(
            options.bedfile) + "tmp.bed"
        RunCommand(
            'awk \'{++c;if(c==1&&$1!~"^#"){print "track name=header";print}else{print}}\' %s > %s'
            % (options.bedfile, bedtmp), 'Append header line to bed file')

        bedfilter_command = 'vcftools'
        bedfilter_command += '   --vcf %s/all.merged.vcf' % options.outdir
        bedfilter_command += '   --bed %s' % bedtmp
        bedfilter_command += '   --out %s/all' % options.outdir
        bedfilter_command += '   --recode  --keep-INFO-all'
        #bedfilter_command +=    ' > /dev/null'
        RunCommand(bedfilter_command, 'Filter merged VCF using region BED')

        if os.path.exists(options.outdir + '/all.recode.vcf'):
            RunCommand(
                "cp   %s/all.recode.vcf   %s/TSVC_variants.vcf" %
                (options.outdir, options.outdir), 'Move final VCF into place')
        else:
            # Temporary workaround for cases where there are no variants left after filtering.
            # Just physically copy the header
            input = open('%s/all.merged.vcf' % options.outdir, 'r')
            output = open('%s/TSVC_variants.vcf' % options.outdir, 'w')
            for line in input:
                if line and line[0] == '#':
                    output.write(line)
            input.close()
            output.close()

    else:
        RunCommand(
            "cp   %s/all.merged.vcf   %s/TSVC_variants.vcf" %
            (options.outdir, options.outdir), 'Move final VCF into place')

    # Generate .gz and .tbi
    vcfout = '%s/TSVC_variants.vcf' % options.outdir
    RunCommand('bgzip   -c "%s"   > "%s.gz"' % (vcfout, vcfout),
               'Generate compressed vcf')
    RunCommand('tabix   -p vcf   "%s.gz"' % (vcfout),
               'Generate index for compressed vcf')

    SplitVcf("%s/TSVC_variants.vcf" % options.outdir,
             "%s/SNP_variants.vcf" % options.outdir,
             "%s/indel_variants.vcf" % options.outdir)
Пример #56
0
def log_reader(filename, **kwds):
    """
plot parameter convergence from file written with 'LoggingMonitor'

Available from the command shell as:
  mystic_log_reader.py filename [options]

or as a function call as:
  mystic.log_reader(filename, **options)

The option "param" takes an indicator string. The indicator string is built
from comma-separated array slices. For example, params = ":" will plot all
parameters.  Alternatively, params = ":2, 3:" will plot all parameters except
for the third parameter, while params = "0" will only plot the first parameter.

Required Inputs:
  filename            name of the convergence logfile (e.g log.txt)
"""
    import shlex
    global __quit
    __quit = False

    # handle the special case where list is provided by sys.argv
    if isinstance(filename, (list,tuple)) and not kwds:
        cmdargs = filename # (above is used by script to parse command line)
    elif isinstance(filename, basestring) and not kwds:
        cmdargs = shlex.split(filename)
    # 'everything else' is essentially the functional interface
    else:
        out = kwds.get('out', None)
        dots = kwds.get('dots', False)
        line = kwds.get('line', False)
        iter = kwds.get('iter', None)
        legend = kwds.get('legend', False)
        nid = kwds.get('nid', None)
        param = kwds.get('param', None)

        # process "commandline" arguments
        cmdargs = ''
        cmdargs += '' if out is None else '--out={} '.format(out)
        cmdargs += '' if dots == False else '--dots '
        cmdargs += '' if line == False else '--line '
        cmdargs += '' if iter is None else '--iter={} '.format(iter)
        cmdargs += '' if legend == False else '--legend '
        cmdargs += '' if nid is None else '--nid={} '.format(nid)
        cmdargs += '' if param is None else '--param="{}" '.format(param)
        cmdargs = filename.split() + shlex.split(cmdargs)

    #XXX: note that 'argparse' is new as of python2.7
    from optparse import OptionParser
    def _exit(self, **kwds):
      global __quit
      __quit = True
    OptionParser.exit = _exit

    parser = OptionParser(usage=log_reader.__doc__.split('\n\nOptions:')[0])
    parser.add_option("-u","--out",action="store",dest="out",\
                      metavar="STR",default=None,
                      help="filepath to save generated plot")
    parser.add_option("-d","--dots",action="store_true",dest="dots",\
                      default=False,help="show data points in plot")
    parser.add_option("-l","--line",action="store_true",dest="line",\
                      default=False,help="connect data points in plot with a line")
    parser.add_option("-i","--iter",action="store",dest="stop",metavar="INT",\
                      default=None,help="the largest iteration to plot")
    parser.add_option("-g","--legend",action="store_true",dest="legend",\
                      default=False,help="show the legend")
    parser.add_option("-n","--nid",action="store",dest="id",\
                      metavar="INT",default=None,
                      help="id # of the nth simultaneous points to plot")
    parser.add_option("-p","--param",action="store",dest="param",\
                      metavar="STR",default=":",
                      help="indicator string to select parameters")
    #parser.add_option("-f","--file",action="store",dest="filename",metavar="FILE",\
    #                  default='log.txt',help="log file name")
    parsed_opts, parsed_args = parser.parse_args(cmdargs)

#   import sys
#   if 'mystic_log_reader.py' not in sys.argv:
    from StringIO import StringIO
    f = StringIO()
    parser.print_help(file=f)
    f.seek(0)
    if 'Options:' not in log_reader.__doc__:
      log_reader.__doc__ += '\nOptions:%s' % f.read().split('Options:')[-1]
    f.close()

    style = '-' # default linestyle
    if parsed_opts.dots:
      mark = 'o'
      # when using 'dots', also can turn off 'line'
      if not parsed_opts.line:
        style = 'None'
    else:
      mark = ''

    if __quit: return

    try: # get logfile name
      filename = parsed_args[0]
    except:
      raise IOError, "please provide log file name"

    try: # select which iteration to stop plotting at
      stop = int(parsed_opts.stop)
    except:
      stop = None

    try: # select which 'id' to plot results for
      runs = (int(parsed_opts.id),) #XXX: allow selecting more than one id ?
    except:
      runs = None # i.e. 'all' **or** use id=0, which should be 'best' energy ?

    try: # select which parameters to plot
      select = parsed_opts.param.split(',')  # format is ":2, 2:4, 5, 6:"
    except:
      select = [':']

    # ensure all terms of select have a ":"
    for i in range(len(select)):
      if isinstance(select[i], int): select[i] = str(select[i])
      if select[i] == '-1': select[i] = 'len(params)-1:len(params)'
      elif not select[i].count(':'):
        select[i] += ':' + str(int(select[i])+1)


    # == Possible results ==
    # iter = (i,id) or (i,) 
    # split => { (i,) then (i+1,) } or { (i,) then (0,) }
    # y x = { float list } or { list [list1, ...] }

    # == Use Cases ==
    # (i,id) + { (i,) then (i+1,) } + { float list }
    # (i,) + { (i,) then (i+1,) } + { float list }
    # (i,id) + { (i,) then (i+1,) } + { list [list1, ...] }
    # (i,) + { (i,) then (i+1,) } + { list [list1, ...] }
    # (i,id) + { (i,) then (0,) } + { float list }
    # (i,) + { (i,) then (0,) } + { float list }
    # (i,id) + { (i,) then (0,) } + { list [list1, ...] }
    # (i,) + { (i,) then (0,) } + { list [list1, ...] }
    # NOTES:
    #   Legend is different for list versus [list1,...]
    #   Plot should be discontinuous for (i,) then (0,)

    # parse file contents to get (i,id), cost, and parameters
    from mystic.munge import logfile_reader, read_raw_file
    try:
        step, param, cost = logfile_reader(filename)
    except SyntaxError:
        read_raw_file(filename)
        msg = "incompatible file format, try 'support_convergence.py'"
        raise SyntaxError(msg)

    # ignore everything after 'stop'
    step = step[:stop]
    cost = cost[:stop]
    param = param[:stop]

    # split (i,id) into iteration and id
    multinode = len(step[0]) - 1  #XXX: what if step = []?
    iter = [i[0] for i in step]
    if multinode:
      id = [i[1] for i in step]
    else:
      id = [0 for i in step]

    # build the list of selected parameters
    params = range(len(param[0]))
    selected = []
    for i in select:
      selected.extend(eval("params[%s]" % i))
    selected = list(set(selected))

    results = [[] for i in range(max(id) + 1)]

    # populate results for each id with the corresponding (iter,cost,param)
    for i in range(len(id)):
      if runs is None or id[i] in runs: # take only the selected 'id'
        results[id[i]].append((iter[i],cost[i],param[i]))
    # NOTE: for example...  results = [[(0,...)],[(0,...),(1,...)],[],[(0,...)]]

    # build list of parameter (and cost) convergences for each id
    conv = []; cost_conv = []; iter_conv = []
    for i in range(len(results)):
      conv.append([])#; cost_conv.append([]); iter_conv.append([])
      if len(results[i]):
        for k in range(len(results[i][0][2])):
          conv[i].append([results[i][j][2][k] for j in range(len(results[i]))])
        cost_conv.append([results[i][j][1] for j in range(len(results[i]))])
        iter_conv.append([results[i][j][0] for j in range(len(results[i]))])
      else:
        conv[i] = [[] for k in range(len(param[0]))]
        cost_conv.append([])
        iter_conv.append([])

    #print "iter_conv = %s" % iter_conv
    #print "cost_conv = %s" % cost_conv
    #print "conv = %s" % conv

    import matplotlib.pyplot as plt

    fig = plt.figure()

    #FIXME: These may fail when conv[i][j] = [[],[],[]] and cost = []. Verify this.
    ax1 = fig.add_subplot(2,1,1)
    for i in range(len(conv)):
      if runs is None or i in runs: # take only the selected 'id'
        for j in range(len(param[0])):
          if j in selected: # take only the selected 'params'
            tag = "%d,%d" % (j,i) # label is 'parameter,id'
            ax1.plot(iter_conv[i],conv[i][j],label="%s" % tag,marker=mark,linestyle=style)
    if parsed_opts.legend: plt.legend()

    ax2 = fig.add_subplot(2,1,2)
    for i in range(len(conv)):
      if runs is None or i in runs: # take only the selected 'id'
        tag = "%d" % i # label is 'cost id'
        ax2.plot(iter_conv[i],cost_conv[i],label='cost %s' % tag,marker=mark,linestyle=style)
    if parsed_opts.legend: plt.legend()

    if not parsed_opts.out:
        plt.show()
    else:
        fig.savefig(parsed_opts.out)
Пример #57
0
                          dest="chinese",
                          default=False,
                          help="Create Chinese font")
    cmd_parser.add_option("-b",
                          "--barcode",
                          action="store_true",
                          dest="barcode",
                          default=False,
                          help="Create Barcode font")
    cmd_parser.add_option("-v",
                          "--save_glyph_image",
                          action="store_true",
                          dest="save_glyph_image",
                          default=False,
                          help="Save individual glyph to files.")
    (options, args) = cmd_parser.parse_args()

    font_size = getattr(options, 'font_size')
    chinese = getattr(options, 'chinese')
    barcode = getattr(options, 'barcode')
    save_glyph_image = getattr(options, 'save_glyph_image')
    if not font_size or (not chinese and not barcode):
        cmd_parser.print_help()
        sys.exit()

    if chinese:
        make_chinese_font(font_size, save_glyph_image)

    if barcode:
        make_barcode_font(font_size, save_glyph_image)
Пример #58
0
def parse_options():
  """Process command line arguments"""

  

  def tallies_callback(option, opt, value, parser):
    """Option parser function for list of tallies"""
    global err
    try:
      setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
    except:
      p.print_help()
      err = True

  def scores_callback(option, opt, value, parser):
    """Option parser function for list of scores"""
    global err
    try:
      scores = {}
      entries = value.split(',')
      for e in entries:
        tally,score = [int(i) for i in e.split('.')]
        if not tally in scores: scores[tally] = []
        scores[tally].append(score)
      setattr(parser.values, option.dest, scores)
    except:
      p.print_help()
      err = True
  
  def filters_callback(option, opt, value, parser):
    """Option parser function for list of filters"""
    global err
    try:
      filters = {}
      entries = value.split(',')
      for e in entries:
        tally,filter_,bin = [i for i in e.split('.')]
        tally,bin = int(tally),int(bin)
        if not tally in filters: filters[tally] = {}
        if not filter_ in filters[tally]: filters[tally][filter_] = []
        filters[tally][filter_].append(bin)
      setattr(parser.values, option.dest, filters)
    except:
      p.print_help()
      err = True
  
  from optparse import OptionParser
  usage = r"""%prog [options] <statepoint_file>

The default is to process all tallies and all scores into one file. Subsets
can be chosen using the options.  For example, to only process tallies 2 and 4
with all scores on tally 2 and only scores 1 and 3 on tally 4:

%prog -t 2,4 -s 4.1,4.3 <statepoint_file>

Likewise if you have additional filters on a tally you can specify a subset of
bins for each filter for that tally. For example to process all tallies and
scores, but only energyin bin #1 in tally 2:

%prog -f 2.energyin.1 <statepoint_file>

You can list the available tallies, scores, and filters with the -l option:

%prog -l <statepoint_file>"""
  p = OptionParser(usage=usage)
  p.add_option('-t', '--tallies', dest='tallies', type='string', default=None,
                action='callback', callback=tallies_callback,
                help='List of tally indices to process, separated by commas.' \
                     ' Default is to process all tallies.')
  p.add_option('-s', '--scores', dest='scores', type='string', default=None,
               action='callback', callback=scores_callback,
               help='List of score indices to process, separated by commas, ' \
                 'specified as {tallyid}.{scoreid}.' \
                 ' Default is to process all scores in each tally.')
  p.add_option('-f', '--filters', dest='filters', type='string', default=None,
               action='callback', callback=filters_callback,
               help='List of filter bins to process, separated by commas, ' \
                 'specified as {tallyid}.{filter}.{binid}. ' \
                 'Default is to process all filter combinaiton for each score.')
  p.add_option('-l', '--list', dest='list', action='store_true',
               help='List the tally and score indices available in the file.')
  p.add_option('-o', '--output', action='store', dest='output',
               default='tally', help='path to output SILO file.')
  p.add_option('-e', '--error', dest='valerr', default=False,
               action='store_true', 
               help='Flag to extract errors instead of values.')
  p.add_option('-v', '--vtk', action='store_true', dest='vtk',
               default=False, help='Flag to convert to VTK instead of SILO.')
  parsed = p.parse_args()
  
  if not parsed[1]:
    p.print_help()
    return parsed, err

  if parsed[0].valerr:
    parsed[0].valerr = 1
  else:
    parsed[0].valerr = 0

  return parsed, err
Пример #59
0
def main():

    argParser = OptionParser(usage="%prog [options]")
    argParser.add_option("-c",
                         dest="config_file",
                         help="(required) The master config file to edit")
    argParser.add_option(
        "-e",
        dest="experiments",
        help="(required) File containing the set of experiments to run")
    argParser.add_option("-t",
                         dest="tag",
                         help="Tag to prefix all test output with",
                         default="")
    argParser.add_option("-b",
                         dest="bin_dir",
                         help="Bin directory containing the VIDTK binaries")
    argParser.add_option("-s",
                         dest="html_script",
                         help="The HTML generation script")

    options, args = argParser.parse_args()

    # Process command line options
    if options.config_file == None:
        argParser.print_help()
        return 1
    if options.experiments == None:
        argParser.print_help()
        return 1
    if options.bin_dir != None:
        if platform.system() == "Windows":
            os.environ["PATH"] = options.bin_dir + ";" + os.environ["PATH"]
        else:
            os.environ["PATH"] = options.bin_dir + ":" + os.environ["PATH"]

    # Generate the test name
    strtime = time.strftime("%Y_%m_%d__%H_%M_%S", time.localtime())
    testName = options.tag + strtime

    # Generate a config file
    os.mkdir(testName)
    os.chdir(testName)

    experiments_in = open(options.experiments, 'r')
    while 1:
        line = experiments_in.readline()
        if not line:
            break
        line = line.strip()
        exp_elements = line.split(' ')
        if (len(exp_elements) != 4):
            continue
        base_director = exp_elements[1]
        experiment_name = exp_elements[0]
        aoi = exp_elements[2]
        adaboost_file = exp_elements[3]
        RunExperiment(options.config_file, base_director, experiment_name, aoi,
                      adaboost_file)
    if options.html_script == None:
        print("Cannot generate website")
        return 0
    os.chdir(testName + "/..")
    run_generate_html(options.html_script)
    return 0
Пример #60
0
def main():
    import cli
    usagestr = "Usage: %prog [OPTIONS] <command> [args]"
    PARSER = OptionParser(usage=usagestr)
    PARSER.add_option("-H",
                      "--host",
                      dest="host",
                      type="string",
                      default="127.0.0.1",
                      help="ip address of api host")
    PARSER.add_option("-p",
                      "--port",
                      dest="port",
                      type="int",
                      default=9696,
                      help="api poort")
    PARSER.add_option("-s",
                      "--ssl",
                      dest="ssl",
                      action="store_true",
                      default=False,
                      help="use ssl")
    PARSER.add_option("-v",
                      "--verbose",
                      dest="verbose",
                      action="store_true",
                      default=False,
                      help="turn on verbose logging")
    PARSER.add_option("-f",
                      "--logfile",
                      dest="logfile",
                      type="string",
                      default="syslog",
                      help="log file path")
    options, args = PARSER.parse_args()

    if options.verbose:
        LOG.setLevel(logging.DEBUG)
    else:
        LOG.setLevel(logging.WARN)

    if options.logfile == "syslog":
        LOG.addHandler(logging.handlers.SysLogHandler(address='/dev/log'))
    else:
        LOG.addHandler(logging.handlers.WatchedFileHandler(options.logfile))
        os.chmod(options.logfile, 0644)

    if len(args) < 1:
        PARSER.print_help()
        qcli.help()
        help()
        sys.exit(1)

    CMD = args[0]
    if CMD in qcli.commands.keys():
        qcli.main()
        sys.exit(1)
    if CMD not in COMMANDS.keys():
        LOG.error("Unknown command: %s" % CMD)
        qcli.help()
        help()
        sys.exit(1)

    args = build_args(CMD, COMMANDS[CMD]["args"], args[1:])

    LOG.info("Executing command \"%s\" with args: %s" % (CMD, args))

    HOST = options.host
    PORT = options.port
    USE_SSL = options.ssl
    COMMANDS[CMD]["func"](*args)

    LOG.info("Command execution completed")
    sys.exit(0)