Beispiel #1
0
def main(): 
    usage = """usage: %prog [options] /path/to/dump.rdb

Example 1 : %prog -k "user-" -k "friends-" /var/redis/6379/dump.rdb
Example 2 : %prog /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)

    parser.add_option("-k", "--key", dest="keys", action="append",
                  help="Keys that should be grouped together. Multiple prefix can be provided")
    
    (options, args) = parser.parse_args()
    
    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]
    if not options.keys:
        options.keys = []

    stat2d = MyStatsAggregator(options.keys)
    callback = MemoryCallback(stat2d, 64)
    parser = RdbParser(callback)
    try:
        parser.parse(dump_file)
    except Exception, e:
        print 'error: ', e
def main(): 
    usage = """usage: %prog [options] /path/to/dump.rdb

Example 1 : %prog -k "user.*" -k "friends.*" -f memoryreport.html /var/redis/6379/dump.rdb
Example 2 : %prog /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)

    parser.add_option("-f", "--file", dest="output",
                      help="Output file", metavar="FILE")
    parser.add_option("-k", "--key", dest="keys", action="append",
                      help="Keys that should be grouped together. Multiple regexes can be provided")
    
    (options, args) = parser.parse_args()
    
    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]
    
    if not options.output:
        output = "redis_memory_report.html"
    else:
        output = options.output

    stats = StatsAggregator()
    callback = MemoryCallback(stats, 64)
    parser = RdbParser(callback)
    parser.parse(dump_file)
    stats_as_json = stats.get_json()
    
    t = open(os.path.join(os.path.dirname(__file__),"report.html.template")).read()
    report_template = Template(t)
    html = report_template.substitute(REPORT_JSON = stats_as_json)
    print(html)
Beispiel #3
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option("-c", "--command", dest="command",
                  help="Command to execute. Valid commands are json or diff", metavar="FILE")
                  
    parser.add_option("-f", "--file", dest="output",
                  help="Output file", metavar="FILE")
    parser.add_option("-n", "--db", dest="dbs", action="append",
                  help="Database Number. Multiple databases can be provided. If not specified, all databases will be included.")
    parser.add_option("-k", "--key", dest="keys", default=None,
                  help="Keys to export. This can be a regular expression")
    parser.add_option("-t", "--type", dest="types", action="append",
                  help="""Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")
    
    (options, args) = parser.parse_args()
    
    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]
    
    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' %x)
    
    if options.keys:
        filters['keys'] = options.keys
    
    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception('Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)
    
    destination = sys.stdout 
    if options.output:
        desitnaion = open(options.output, 'wb')

    cmds = { 'diff': DiffCallback, 
             'json': JSONCallback, 
             'memory': lambda r : MemoryCallback(r, 64) }
    for key,cb in cmds.items():
        if key != options.command:
            continue
        with destination as f:
            parser = RdbParser(cb(f), filters=filters)
            parser.parse(dump_file)
        return True
    raise Exception('Invalid Caommand %s' % options.output)
Beispiel #4
0
def main():
    parser = OptionParser(usage="Usage: %prog [options] [[--] <application> [<args>]]")
    collection_group = OptionGroup(parser, "Collection options")
    collection_group.add_option("--file",
                                metavar="FILE",
                                dest="file",
                                help="")

    collection_group.add_option("--func",
                                metavar="OUT",
                                dest="function",
                                help="")

    collection_group.add_option("--out",
                                metavar="OUT",
                                dest="out",
                                help="")

    collection_group.add_option("--cache",
                                metavar="CACHE",
                                dest="cache",
                                help="")

    collection_group.add_option("--format",
                                metavar="FORMAT",
                                dest="format",
                                help="")

    parser.add_option_group(collection_group)

    (options, arguments) = parser.parse_args()
    if options.file:
        if not os.path.exists(options.file):
            parser.error("file not found")
    else:
        parser.error("file not specified")

    info = HpccInfo('xstftool', options)
    parser = Parser(options.format, info, options.out, options.cache)

    print datetime.datetime.now().ctime() + ' Start'
    if options.cache is None:
        parser.parse()
    else:
        if not parser.read_cache():
            parser.parse()

    parser.to_print()
    print "\n" + datetime.datetime.now().ctime() + ' End'
    return 0
Beispiel #5
0
def main():
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option("-p", "--port", dest="port", type="int", default=8822,
                      help="AMGA instance port")
    parser.add_option("-i", "--interval", dest="interval", type="int", default=1,
                      help="Poll interval")

    (opts, args) = parser.parse_args()    
    if len(args) == 0:
        host = "localhost"
    else:
        host = args[0]

    print "Querying: " + host + ":" + str(opts.port)
    parser = Parser()
    log = None
#    count = 0
    while True:
        (serverType, xmlData) = pollAMGA(host, opts.port)
        #print xmlData
        oldLog = log
        log = parser.parse(serverType, xmlData)
        print log
#        count += 1
#        print "Queries:",count
        time.sleep(opts.interval)
Beispiel #6
0
def main():
    from optparse import OptionParser
    parser = OptionParser()
    parser.add_option("-p", "--port", dest="port", type="int", default=8822,
                      help="AMGA instance port")
    parser.add_option("-i", "--interval", dest="interval", type="int", default=1,
                      help="Poll interval")

    (opts, args) = parser.parse_args()
    if len(args) == 0:
        host = "localhost"
    else:
        host = args[0]

    print "Querying: " + host + ":" + str(opts.port)

    parser = amgaMonitor.Parser()
    log = None
    (serverType, xmlData) = amgaMonitor.pollAMGA(host, opts.port)
    if serverType == amgaMonitor.ServerType.MDSERVER:
        miner = ServerMiner()
    elif serverType == amgaMonitor.ServerType.MDREPDAEMON:
        miner = RepDaemonMiner()
    else:
        raise Exception("Bad id: " + str(serverType))

    while True:
#        print xmlData
        log = parser.parse(serverType, xmlData)        
        print log
        miner.processLog(log)
        time.sleep(opts.interval)
        (serverType, xmlData) = amgaMonitor.pollAMGA(host, opts.port)
def main():
	from optparse import OptionParser
	parser = OptionParser(
		usage="usage: %prog [options] network1 network2...",
		version="%prog 1.4\n"+_copyright,
		description=_description
		)
	parser.add_option("-m", "--manifest", dest='createManifest', action='store_true',
		help="Generates the manifest file")
	parser.add_option("-t", "--ttl", dest='createTtls', action='store_true',
		help="Generates ttl files for each network")
	parser.add_option("-i", "--main", dest='createMain', action='store_true',
		help="Generates the main C++ file for the plugin library")
	parser.add_option("-u", "--uribase", dest='uribase', default="http://clam-project.org/examples/lv2",
		help="Specifies the uri base for the plugins", metavar="URIBASE")
	parser.add_option("-d", "--doap", dest='doapfile',
		help="Specifies a doapfile with additional info when generating a ttl", metavar="DOAPFILE")
	parser.add_option("-y", "--binary", dest='binary',
		help="Provides a name for the library binary (required by --ttls)", metavar="LIBBINARY")
	parser.add_option("-g", "--gui-binary", dest='guibinary', default=None,
		help="Provides a name for a ui library binary for the plugin in the ttl", metavar="UIBINARY")

	options, networks = parser.parse_args()

	names = [os.path.splitext(os.path.basename(network))[0] for network in networks]
	uris  = [os.path.join(options.uribase,name) for name in names ]

	if options.createTtls:
		for network, uri, name in zip(networks, uris, names) :
			parser = make_parser()   
			curHandler = ExporterHandler()
			parser.setContentHandler(curHandler)
			parser.parse(open(network))
			curHandler.printTTL(network,uri,name,options.doapfile,options.binary,options.guibinary)
			return

	if options.createManifest:
		printManifest(uris,names)
		return

	if options.createMain:
		printCLAM_PLUGIN(networks,uris)
		return
Beispiel #8
0
def main():
    usage= 'usage: %prog [options] infname'
    parser= OptionParser(usage=usage)
    parser.add_option('-o', '--output', 
                        dest='output_folder', default='patterns',
                        help='output folder')
    parser.add_option('-c', '--no-cache', 
                        dest='cache', action='store_false', default=True, 
                        help='discard cache')

    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('not enaught args')

    parser= parserclass('models_cache')

    infname= args[0]
    score= parser.parse(infname)
    score= quantize(score)

    output_folder_prefix= options.output_folder

    pat_sizes= [4]
    margins= range(3)
    f= 2
    key= lambda n:('%s|%s' % (n.duration, n.is_silence),)

    writer= writerclass()

    results= {}
    patterns= get_score_patterns(score, pat_sizes, margins, f, key)
    instr, all_notes= score.notes_per_instrument.iteritems().next()

    output_folder= os.path.join(output_folder_prefix, infname[:-4])
    if not os.path.exists(output_folder):
        os.mkdir(output_folder)

    for i, (pat, matches) in enumerate(patterns.iteritems()):
        pattern_folder= os.path.join(output_folder, 'pat_%s' % i)
        if not os.path.exists(pattern_folder):
            os.mkdir(pattern_folder)
        for j, (start, end) in enumerate(matches):
            notes= all_notes[start:end]
            notes= [n.copy() for n in notes]

            notes[0].start= 0
            for prev, next in zip(notes, notes[1:]):
                next.start= prev.start+prev.duration
                            
            s= score.copy()
            s.notes_per_instrument={instr:notes}

            writer.dump(s, os.path.join(pattern_folder, 'match_%s.mid' % j))
Beispiel #9
0
def run():
    usage = "usage: %prog source dest [options]"

    parser = OptionParser()
    parser.add_option("-s", "--with-slots",
                      dest="with_slots",
                      action="store_true",
                      help="generate code using __slots__")
    (options, args) = parser.parse_args()

    if len(args) != 2:
        parser.error("exactly two arguments required: path to directory with .proto files and path to destination package")

    d = args[0]
    od = args[1]
    exit_status = 0

    protos = [f for f in os.listdir(d) if f.endswith(".proto")]

    for p in protos:
        sys.stdout.write(("%s..." % p).ljust(70))
        sys.stdout.flush()

        source = open(os.path.join(d, p)).read()

        try:
            parser = make_parser()
            r = parser.parse(source)

            _, res, l = r
            if l != len(source):
                raise SyntaxError("Syntax error on line %s near %r" % (
                    source[:l].count('\n') + 1,
                    source[l:l+10]))
            s = gen_module([m for m in res if type(m) is tuple],
                    [m for m in res if type(m) is str],
                    [m for m in res if type(m) is list],
                    options.with_slots,
                    )
            open(os.path.join(od, convert_proto_name(p) + ".py"), 'wb').write(s)
        except:
            sys.stdout.write("[FAIL]\n")
            sys.stdout.flush()
            traceback.print_exc()
            exit_status = 1
        else:
            sys.stdout.write("[OKAY]\n")
            sys.stdout.flush()

    return exit_status
Beispiel #10
0
def main():
    usage= 'usage: %prog [options] infname1 [infname2 ...]'
    parser= OptionParser(usage=usage)
    parser.add_option('-o', '--output', 
                        dest='output_fname', default='cluster.out',
                        help='output fname')
    parser.add_option('-c', '--no-cache', 
                        dest='cache', action='store_false', default=True, 
                        help='discard cache')
    parser.add_option('-k', '--resulting-dimensionality', 
                        dest='k', default=150, 
                        help='el numero de dimensiones resultantes')

    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('not enaught args')

    parser= parserclass('models_cache')

    infnames= args
    outfname= options.output_fname

    print 'parsing scores'
    scores= [parser.parse(infname, cache=options.cache) for infname in infnames]

    nclusterss= [2] # range(2,5)
    npasss= [5,10, 15, 20, 25]
    methods= ['a', 'm']
    dists= ['e', 'b', 'c', 'a', 'u', 'x', 's', 'k']
    configs= list(combine(nclusterss, npasss, methods, dists))

    results= {}
    for k in range(2,10,2):
        print 'k=', k
        concept_vectors= apply_lsa(scores, k)
        step= len(configs)/10
        for i, (nclusters, npass, method, dist) in enumerate(configs):
            if (i+1)%step == 0: print '\t', ((i+1)*100)/len(configs)
            r= Pycluster.kcluster(concept_vectors, 
                                  nclusters= nclusters, 
                                  method= method, dist= dist)
            results[(k, nclusters, npass, method, dist)]= r

    
    f= open('clusters_results.pickle', 'w')
    pickle.dump(results, f, 2)
    f.close()
Beispiel #11
0
def main():
    usage= 'usage: %prog [options] infname'
    parser= OptionParser(usage=usage)
    parser.add_option('-c', '--no-cache', 
                        dest='cache', action='store_false', default=True, 
                        help='discard cache')


    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('not enaught args')

    parser= parserclass('models_cache')
    
    infname= args[0]
    score= parser.parse(infname, cache=options.cache)
    #import ipdb;ipdb.set_trace()
    notes= score.notes_per_instrument.values()[0]
    for n in notes: print n
Beispiel #12
0
def main():
    usage= 'usage: %prog [options] infname outfname'
    parser= OptionParser(usage=usage)

    parser.add_option('-p', '--patch', dest='patch', type='int', help='patch to select')
    
    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('not enaught args')
    
    infname= args[0]        
    outfname= infname.replace('.mid', '.svg')

    parser= MidiScoreParser()
    score= parser.parse(infname)

    rank(score)
    graph= build_graph(score)
    graph.draw(outfname, prog='dot') # , args='-Gsize="4096,4096"')
Beispiel #13
0
def main():
    usage = "vs_cmd.py PTV PATH_TO_PLUGINS"
    parser = OptionParser(usage)
    (_, args) = parser.parse_args()
    if len(args) != 2:
        parser.error(usage)
    ptv, path = args

    # Use arguments here
    vs.Logger_setLevel(4)

    vs.loadPlugins(path)
    parser = vs.Parser_create()
    if not parser.parse(ptv):
        raise Exception('could not parse the configuration')
    pano = vs.PanoDefinition_create(parser.getRoot().has("pano"))
    merger = vs.ImageMergerFactory_createMergerFactory(
        parser.getRoot().has("merger"))
    warper = vs.ImageWarperFactory_createWarperFactory(
        parser.getRoot().has("warper"))
    flow = vs.ImageFlowFactory_createFlowFactory(parser.getRoot().has("flow"))
    print("Panorama size: {}x{}".format(pano.width, pano.height))
    output = parser.getRoot().has("output")
    first_frame = 0
    last_frame = 100
    input_factory = vs.DefaultReaderFactory(first_frame, last_frame)
    controller = vs.createController(vs.PanoDeviceDefinition(), pano,
                                     merger.object(), wraper.object(),
                                     flow.object(), input_factory)
    writer = vs.create(output, "test", pano.width, pano.height)
    device = vs.StitcherDevice()
    device.device = 0
    stitcher = controller.createStitcher(device)
    stitch_output = controller.createAsyncStitchOutput(
        device,
        writer.object().getVideoWriter())
    status = stitcher.stitch(stitch_output.object())
    print("CAN I HAZ STITCH ? {}".format(status.ok()))
Beispiel #14
0
def main():
    usage= 'usage: %prog [options] infname [outfname]'
    parser= OptionParser(usage=usage)
    parser.add_option('-f', '--from', dest='moment_from', help='start moment in ticks', default=0)
    parser.add_option('-t', '--to', dest='moment_to', help='end moment in ticks')
    parser.add_option('-d', '--duration', dest='duration', help='duration of the slice')
    parser.add_option('-c', '--no-cache', dest='cache', action='store_false', default=True, help='discard cache')

    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('not enaught args')

    parser= parserclass('models_cache')
    moment_from= int(options.moment_from)
    if options.moment_to is None and options.duration is None:
        parser.error('falta --to y --duration')
    elif options.moment_to is None:
        moment_to= int(options.moment_from)+int(options.duration)
    else:
        moment_to= int(options.moment_to)

    infname= args[0]
    if len(args) == 1: outfname= '%s-%s-%s.mid' % (infname[:-4], moment_from, moment_to)
    else: outfname= args[1]

    print 'creating score'
    score= parser.parse(infname, cache=options.cache)
    beat_duration= float(score.tempo)/1e6
    divisions= score.divisions
    for instrument, notes in score.notes_per_instrument.iteritems():
        new_notes= [n for n in notes  \
                      if n.start/divisions*beat_duration >= moment_from and \
                         (n.start + n.duration)/divisions*beat_duration< moment_to]
        score.notes_per_instrument[instrument]= new_notes

    print 'dumping'
    writer= writerclass()
    writer.dump(score, outfname)
Beispiel #15
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option("-c", "--command", dest="command",
                  help="Command to execute. Valid commands are json or diff", metavar="FILE")
                  
    parser.add_option("-f", "--file", dest="output",
                  help="Output file", metavar="FILE")
    parser.add_option("-n", "--db", dest="dbs", action="append",
                  help="Database Number. Multiple databases can be provided. If not specified, all databases will be included.")
    parser.add_option("-k", "--key", dest="keys", default=None,
                  help="Keys to export. This can be a regular expression,"
                       "When picking this mode, we will dump a deep copy of the key")
    parser.add_option("-t", "--type", dest="types", action="append",
                  help="""Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")

    parser.add_option("-p", "--pos", dest="pos",
                  help="""Position in RDB file to skip to, after generated an memory index file this can be used to speed up.
                          The data starts reading at 9 bytes""",
                  default = 9)

    parser.add_option("-m", "--max", dest="max",
                  help=""" Read maximum number of keys, to limit search.""",
                  default = 2e31)


    parser.add_option("-v", "--verbose", dest="verbose",
                  help="""If true dump a deep copy of the data structure""",
                  action = "store_true",
                  default = False)

    parser.add_option("-q", "--quick", dest="quick",
                  help="""If true dump a deep copy of the data structure""",
                  action = "store_true",
                  default = False)
    
    (options, args) = parser.parse_args()
    
    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]
    
    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' %x)
    
    if options.keys:
        filters['keys'] = options.keys
    
    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception('Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)


    filters['pos'] = options.pos
    filters['max'] = options.max
    
    # TODO : Fix this ugly if-else code
    callback = None
    if options.output:
        f = open(options.output, "wb")
        if 'diff' == options.command:
            callback = DiffCallback(f)
        elif 'json' == options.command:
            callback = JSONCallback(f)
        elif 'memory' == options.command:
            reporter = PrintAllKeys(f)
            callback = MemoryCallback(reporter, 64, options.verbose)
        else:
            raise Exception('Invalid Command %s' % options.output)
    else:
        if 'diff' == options.command:
            callback = DiffCallback(sys.stdout)
        elif 'json' == options.command:
            callback = JSONCallback(sys.stdout)
        elif 'memory' == options.command:
            reporter = PrintAllKeys(sys.stdout)
            callback = MemoryCallback(reporter, 64, options.verbose)
        else:
            raise Exception('Invalid Command %s' % options.output)


    start = time.clock()
    parser = RdbParser(callback, filters=filters, quick=options.quick)
    parser.parse(dump_file)
    end =  time.clock()

    print "time=%s seconds" % (end-start)
Beispiel #16
0
    parser = OptionParser()
    parser.add_option("-v", "--verbose", dest="verbose", help="Print verbose log to stdout", action="store_true", default=False)
    parser.add_option("", "--no-warnings", dest="warnings", help="Hide warning messages", action="store_false", default=True)
    parser.add_option("", "--no-errors", dest="errors", help="Hide error messages", action="store_false", default=True)
    parser.add_option("", "--modules", dest="modules", help="comma-separated list of modules to generate comments", metavar="MODS", default=",".join(rst_parser.allmodules))

    (options, args) = parser.parse_args(sys.argv)
    options.modules = options.modules.split(",")

    if len(args) < 2 or len(options.modules) < 1:
        parser.print_help()
        exit(0)

    parser = rst_parser.RstParser(hdr_parser.CppHeaderParser())
    for m in options.modules:
        parser.parse(m, os.path.join(selfpath, "../../" + m))

    parser.printSummary()

    generator = JavadocGenerator(parser.definitions, options.modules)
    generator.verbose = options.verbose
    generator.show_warnings = options.warnings
    generator.show_errors = options.errors

    for path in args:
        folder = os.path.abspath(path)
        for jfile in [f for f in glob.glob(os.path.join(folder,"*.java")) if not f.endswith("-jdoc.java")]:
            outfile = os.path.abspath(os.path.basename(jfile).replace(".java", "-jdoc.java"))
            generator.document(jfile, outfile)

    generator.printSummary()
Beispiel #17
0
    parser = OptionParser()
    parser.add_option("-v", "--verbose", dest="verbose", help="Print verbose log to stdout", action="store_true", default=False)
    parser.add_option("", "--no-warnings", dest="warnings", help="Hide warning messages", action="store_false", default=True)
    parser.add_option("", "--no-errors", dest="errors", help="Hide error messages", action="store_false", default=True)
    parser.add_option("", "--modules", dest="modules", help="comma-separated list of modules to generate comments", metavar="MODS", default=",".join(rst_parser.allmodules))

    (options, args) = parser.parse_args(sys.argv)
    options.modules = options.modules.split(",")

    if len(args) < 2 or len(options.modules) < 1:
        parser.print_help()
        exit(0)

    parser = rst_parser.RstParser(hdr_parser.CppHeaderParser())
    for m in options.modules:
        parser.parse(m, os.path.join(selfpath, "../../" + m))

    parser.printSummary()

    generator = JavadocGenerator(parser.definitions, options.modules)
    generator.verbose = options.verbose
    generator.show_warnings = options.warnings
    generator.show_errors = options.errors

    for path in args:
        folder = os.path.abspath(path)
        for jfile in [f for f in glob.glob(os.path.join(folder,"*.java")) if not f.endswith("-jdoc.java")]:
            outfile = os.path.abspath(os.path.basename(jfile).replace(".java", "-jdoc.java"))
            generator.document(jfile, outfile)

    generator.printSummary()
Beispiel #18
0
    lines = []
    end_exists = False
    try:
        for i in range(0, len(code)):
            if code[i].split()[1] == r'end':
                code = code[0:i + 1]
                end_exists = True
                break
        if not end_exists:
            raise AssertionError
    except Exception:
        print("EOF (end) expected.")
        sys.exit(-1)
    for line in code:  # run the code
        parser.parse("\n" + line)

    if args.noforward == "yes":
        DG = DependencyGraph(lines)
        noper = Nope()
        noper.add_nops(DG.dependencies, DG.lines, forwarding=False)
    elif args.forward == "yes":
        DG = DependencyGraph(lines)
        noper = Nope()
        noper.add_nops(DG.dependencies, DG.lines, forwarding=True)
    elif args.schedule == "yes":
        DG = DependencyGraph(lines, out_of_order=True)
        DG.get_edges()
        DG.get_dependency_tree()
        #DG.print_dependency_graph() # comment out for debugging
        #DG.print_dependencies()
Beispiel #19
0
def main():
    # get command line inputs. Use -h to list all possible arguments and default values
    parser = OptionParser(
        usage="Usage: %prog [options] <input1>.log <input2>.log ...")
    parser.add_option("--amp",
                      dest="amplitude",
                      action="store",
                      help="amplitude (default 0.2)",
                      default="0.2",
                      type="float",
                      metavar="AMPLITUDE")
    parser.add_option("--nproc",
                      dest="nproc",
                      action="store",
                      help="number of processors (default 1)",
                      default="1",
                      type="int",
                      metavar="NPROC")
    parser.add_option("--mem",
                      dest="mem",
                      action="store",
                      help="memory (default 4GB)",
                      default="4GB",
                      type="string",
                      metavar="MEM")
    parser.add_option(
        "--route",
        dest="route",
        action="store",
        help="calculation route (defaults to same as original file)",
        default=None,
        type="string",
        metavar="ROUTE")
    parser.add_option("-v",
                      dest="verbose",
                      action="store_true",
                      help="verbose output",
                      default=True,
                      metavar="VERBOSE")
    parser.add_option("--auto",
                      dest="auto",
                      action="store_true",
                      help="turn on automatic batch processing",
                      default=False,
                      metavar="AUTO")
    parser.add_option("--name",
                      dest="suffix",
                      action="store",
                      help="append to file name (defaults to QRC)",
                      default="QRC",
                      type="string",
                      metavar="SUFFIX")
    parser.add_option(
        "-f",
        "--freq",
        dest="freq",
        action="store",
        help="request motion along a particular frequency (cm-1)",
        default=None,
        type="float",
        metavar="FREQ")
    parser.add_option(
        "--freqnum",
        dest="freqnum",
        action="store",
        help="request motion along a particular frequency (number)",
        default=None,
        type="int",
        metavar="FREQNUM")

    #arguments for running calcs
    parser.add_option(
        "--qcoord",
        dest="qcoord",
        action="store_true",
        help=
        "request automatic single point calculation along a particular normal mode (number)",
        default=False,
        metavar="QCOORD")
    parser.add_option(
        "--nummodes",
        dest="nummodes",
        action="store",
        help="number of modes for automatic single point calculation",
        default='all',
        type='string',
        metavar="NUMMODES")

    (options, args) = parser.parse_args()

    files = []
    if len(sys.argv) > 1:
        for elem in sys.argv[1:]:
            try:
                if os.path.splitext(elem)[1] in [".out", ".log"]:
                    for file in glob(elem):
                        files.append(file)
            except IndexError:
                pass

    for file in files:
        # parse compchem output with cclib & count imaginary frequencies
        parser = cclib.io.ccopen(file)
        data = parser.parse()

        #for i in range(1,len(data.atomcoords)):
        #    mw_distance = mwdist(data.atomcoords[i], data.atomcoords[1], data.atomnos)
        #    print(mw_distance)

        if hasattr(data, 'vibfreqs'):
            im_freq = len([val for val in data.vibfreqs if val < 0])
        else:
            print('o   {} has no frequency information: exiting'.format(file))
            sys.exit()

        if not options.qcoord:
            if im_freq == 0 and options.auto != False:
                print('x   {} has no imaginary frequencies: skipping'.format(
                    file))
            else:
                if options.freq == None and options.freqnum == None:
                    print('o   {} has {} imaginary frequencies: processing'.
                          format(file, im_freq))
                elif options.freq != None:
                    print(
                        'o   {} will be distorted along the frequency of {} cm-1: processing'
                        .format(file, options.freq))
                elif options.freqnum != None:
                    print(
                        'o   {} will be distorted along the frequency number {}: processing'
                        .format(file, options.freqnum))
                qrc = gen_qrc(file, options.amplitude, options.nproc,
                              options.mem, options.route, options.verbose,
                              options.suffix, options.freq, options.freqnum)

        #doing automatic calcualtions (single points for stability check)
        else:
            log_output = Logger("RUNIRC", 'dat', options.nummodes)
            if im_freq == 0:
                log_output.Writeonlyfile(
                    'o   {} has no imaginary frequencies: check for stability'.
                    format(file))
            amp_base = [round(elem, 2) for elem in np.arange(0, 1, 0.1)]
            # amp_base_backward =  [round(elem, 2) for elem in np.arange(-1,0,0.1) ]
            energy_base = []
            root_dir = os.getcwd()
            parent_dir = os.getcwd() + '/' + file.split('.')[0]
            if not os.path.exists(parent_dir):
                os.makedirs(parent_dir)
            log_output.Writeonlyfile(
                'o  Entering directory {}'.format(parent_dir))

            # getting energetics of the current molecule
            energy_base.append(data.freeenergy)
            #creating folders for number of normal modes
            if options.nummodes == 'all':
                freq_range = range(1, len(data.vibfreqs) + 1)
            else:
                freq_range = range(1, len(data.vibfreqs) + 1)
                freq_range = freq_range[:int(options.nummodes)]
            for num in freq_range:
                num_dir = parent_dir + '/' + 'num_' + str(num)
                if not os.path.exists(num_dir):
                    os.makedirs(num_dir)
                log_output.Writeonlyfile(
                    'o  Entering directory {}'.format(num_dir))
                shutil.copyfile(root_dir + '/' + file, num_dir + '/' + file)
                os.chdir(num_dir)
                for amp in amp_base:
                    suffix = 'num_' + str(num) + '_amp_' + str(amp).split(
                        '.')[0] + str(amp).split('.')[1]
                    run_irc(file, options, num, amp, freq.LEVELOFTHEORY,
                            suffix, freq.CHARGE, freq.MULT, log_output)
                    log_output.Writeonlyfile('o  Writing to file {}'.format(
                        file.split('.')[0] + '_' + suffix))
                os.chdir(parent_dir)
    # These are duplicates, only found via a contents page still
    # linked to via a "Back to contents" link.
    if re.search('day-wa-01_war0816\.htm',filename):
        continue
    if re.search('day-wa-03_war1110\.htm',filename):
        continue
    if re.search('day-wa-03_war0922\.htm',filename):
        continue    
    if re.search('day-wa-03_war0805\.htm',filename):
        continue

    # This is a misnamed file, a duplicate of wa-02_wa1004.htm
    if re.search('day-wa-02_wa0104\.htm',filename):
        continue

    parser = Parser()

    parser.parse(filename)

    # try:
    #     parser.parse(filename)
    # except Exception, e:
    #     if verbose: print "An unhandled exception occured in parsing: "+filename
    #     if verbose: print "The exception was: "+str(e)
    #     traceback.print_exc(file=sys.stdout)
    #     files_and_exceptions.append( (filename,e) )

# if verbose: print "All exceptions:"
# for e in files_and_exceptions:
#     if verbose: print "%s: %s" % e
Beispiel #21
0
    parser.add_option("-d", "--key-directory", default="", dest="key_directory",
                      help="Specify a parent directory for keys")

    (options, args) = parser.parse_args()

    if len(args) < 2:
        parser.error("Must specify a config file (keys.conf) AND mac_permissions.xml file(s)!")

    logging.basicConfig(level=logging.INFO if options.verbose == True else logging.WARN)

    # Read the config file
    config = ParseConfig()
    config.read(args[0])

    os.chdir(options.root)

    output_file = sys.stdout if options.output_file == "stdout" else open(options.output_file, "w")
    logging.info("Setting output file to: " + options.output_file)

    # Generate the key list
    key_map = config.generateKeyMap(options.target_build_variant.lower(), options.key_directory)
    logging.info("Generate key map:")
    for k in key_map:
        logging.info(k + " : " + str(key_map[k]))
    # Generate the XML file with markup replaced with keys
    parser = make_parser()
    parser.setContentHandler(ReplaceTags(key_map, output_file))
    for f in args[1:]:
        parser.parse(f)
Beispiel #22
0
    sys.exit(1)

data_folder   = args[0]
output_folder = args[1]


# Import the data report
report = DataReport()
if not(report.parse(data_folder)):
    print 'ERROR: Failed to parse the data report'
    sys.exit(1)


# Parse the data written by the instrument
parser = DataParser()
(data_format, classification_results) = parser.parse(report.data(INSTRUMENT_NAME))


# Only keep the images from the test set
classification_results = filter(lambda x: report.image(x.image_index).test_index is not None, classification_results)


# Complete the data if it was saved in an old format
if data_format != '1.1':
    label_names = []
    for label in range(0, report.nbLabels()):
        label_names.append(report.labelName(label))

    database_name = report.getExperimentSettingDatabaseName()

    for classification_result in classification_results:
Beispiel #23
0
        if name == "msg":
            self.outFile.write("\n")
        else:
            self.outFile.write(",")

    def characters(self, ch):
        self.outFile.write(ch.strip("\n\t,"))


if __name__ == "__main__":

    parser = OptionParser()
    parser.add_option("-f",
                      "--file",
                      dest="filename",
                      help="write report to FILE",
                      metavar="FILE",
                      default="none")

    (options, args) = parser.parse_args()

    if options.filename == "none":
        exit()

    parser = xml.sax.make_parser()

    fileCSV = open("dump.csv", "w")
    handler = XML2CSV(fileCSV)
    parser.setContentHandler(handler)
    parser.parse(options.filename)
Beispiel #24
0
#
# cleanup / preparation
#

cmd = 'rm -rf %s/%s' % (QASRC_DIRFN, corpusname)
logging.info(cmd)
os.system(cmd)

cmd = 'mkdir -p %s/%s' % (QASRC_DIRFN, corpusname)
logging.info(cmd)
os.system(cmd)

#
# parse XML
#

parser = make_parser()
parser.setContentHandler(WikiContentHandler(WIKIXML[options.lang]))

article_cnt = 0
article_tot = 0

wikisize = os.path.getsize(WIKIXML[options.lang])

with open(WIKIXML[options.lang], 'rb') as wikibz2f:

    # with codecs.getreader('utf8')(BZ2File(WIKIXML[options.lang], 'r')) as wikif:
    with codecs.getreader('utf8')(BZ2File(wikibz2f)) as wikif:

        parser.parse(wikif)
Beispiel #25
0
  tarball = tarfile.open(args[0], 'r')
except Exception, e:
  print "Error: %s" % str(e)
  sys.exit(1)

queue = Queue.Queue()

#start the workers
for i in range(int(options.cores)):
  worker = Worker(queue)
  worker.setDaemon(True)
  worker.start()

#fill the queue
parser=ConfigParser(args[0])
requests=parser.parse() 

for request in requests:
  

  if os.path.exists(request._dir) and options.force is False:
    print 'Directory '+request._dir+' exists already, doing nothing.'
    print 'You can overwrite with the --force option'
    continue
  
  if not os.path.exists(request._dir):
    os.mkdir(request._dir)
 

  commandBuilder = LocalCommandBuilder(request, options.events) #, options.batch)
  command = commandBuilder.build()
Beispiel #26
0
                      help="don't delete the temporary files created "\
                      "(useful for finding problems)",
                      default=False)

    options, files = parser.parse_args(argv[1:])

    if not files:
        print "Error: no files to process"
        print >> sys.stderr, __doc__
        return 1

    options.flags = options.gccxml_options
    options.verbose = not options.quiet

    parser = cparser.IncludeParser(options)
    parser.parse(files)


def main(argv=None):
    if argv is None:
        argv = sys.argv

    try:
        compile_to_xml(argv)
    except cparser.CompilerError, detail:
        print >> sys.stderr, "CompilerError:", detail
        return 1


if __name__ == "__main__":
    sys.exit(main())
Beispiel #27
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option(
        "-c",
        "--command",
        dest="command",
        help="Command to execute. Valid commands are json or diff",
        metavar="FILE")

    parser.add_option("-f",
                      "--file",
                      dest="output",
                      help="Output file",
                      metavar="FILE")
    parser.add_option(
        "-n",
        "--db",
        dest="dbs",
        action="append",
        help=
        "Database Number. Multiple databases can be provided. If not specified, all databases will be included."
    )
    parser.add_option("-k",
                      "--key",
                      dest="keys",
                      default=None,
                      help="Keys to export. This can be a regular expression")
    parser.add_option(
        "-t",
        "--type",
        dest="types",
        action="append",
        help=
        """Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")

    (options, args) = parser.parse_args()

    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]

    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' % x)

    if options.keys:
        filters['keys'] = options.keys

    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception(
                    'Invalid type provided - %s. Expected one of %s' %
                    (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)

    # TODO : Fix this ugly if-else code
    if options.output:
        with open(options.output, "wb") as f:
            if 'diff' == options.command:
                callback = DiffCallback(f)
            elif 'json' == options.command:
                callback = JSONCallback(f)
            elif 'memory' == options.command:
                reporter = PrintAllKeys(f)
                callback = MemoryCallback(reporter, 64)
            else:
                raise Exception('Invalid Command %s' % options.output)
            parser = RdbParser(callback)
            parser.parse(dump_file)
    else:
        if 'diff' == options.command:
            callback = DiffCallback(sys.stdout)
        elif 'json' == options.command:
            callback = JSONCallback(sys.stdout)
        elif 'memory' == options.command:
            reporter = PrintAllKeys(sys.stdout)
            callback = MemoryCallback(reporter, 64)
        else:
            raise Exception('Invalid Command %s' % options.output)

        parser = RdbParser(callback, filters=filters)
        parser.parse(dump_file)
Beispiel #28
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option("-c", "--command", dest="command",
                  help="Command to execute. Valid commands are json, diff, justkeys, justkeyvals, memory and protocol", metavar="FILE")
    parser.add_option("-f", "--file", dest="output",
                  help="Output file", metavar="FILE")
    parser.add_option("-n", "--db", dest="dbs", action="append",
                  help="Database Number. Multiple databases can be provided. If not specified, all databases will be included.")
    parser.add_option("-k", "--key", dest="keys", default=None,
                  help="Keys to export. This can be a regular expression")
    parser.add_option("-o", "--not-key", dest="not_keys", default=None,
                  help="Keys Not to export. This can be a regular expression")
    parser.add_option("-t", "--type", dest="types", action="append",
                  help="""Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")
    parser.add_option("-b", "--bytes", dest="bytes", default=None,
                  help="Limit memory output to keys greater to or equal to this value (in bytes)")
    parser.add_option("-l", "--largest", dest="largest", default=None,
                  help="Limit memory output to only the top N keys (by size)")
    parser.add_option("-e", "--escape", dest="escape", choices=ESCAPE_CHOICES,
                      help="Escape strings to encoding: %s (default), %s, %s, or %s." % tuple(ESCAPE_CHOICES))

    (options, args) = parser.parse_args()
    
    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]
    
    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' %x)
    
    if options.keys:
        filters['keys'] = options.keys
        
    if options.not_keys:
        filters['not_keys'] = options.not_keys
    
    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception('Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)

    out_file_obj = None
    try:
        if options.output:
            out_file_obj = open(options.output, "wb")
        else:
            # Prefer not to depend on Python stdout implementation for writing binary.
            out_file_obj = os.fdopen(sys.stdout.fileno(), 'wb')

        try:
            callback = {
                'diff': lambda f: DiffCallback(f, string_escape=options.escape),
                'json': lambda f: JSONCallback(f, string_escape=options.escape),
                'justkeys': lambda f: KeysOnlyCallback(f, string_escape=options.escape),
                'justkeyvals': lambda f: KeyValsOnlyCallback(f, string_escape=options.escape),
                'memory': lambda f: MemoryCallback(PrintAllKeys(f, options.bytes, options.largest),
                                                   64, string_escape=options.escape),
                'protocol': lambda f: ProtocolCallback(f, string_escape=options.escape)
            }[options.command](out_file_obj)
        except:
            raise Exception('Invalid Command %s' % options.command)

        if not PYTHON_LZF_INSTALLED:
            eprint("WARNING: python-lzf package NOT detected. " +
                "Parsing dump file will be very slow unless you install it. " +
                "To install, run the following command:")
            eprint("")
            eprint("pip install python-lzf")
            eprint("")

        parser = RdbParser(callback, filters=filters)
        parser.parse(dump_file)
    finally:
        if options.output and out_file_obj is not None:
            out_file_obj.close()
Beispiel #29
0
def main(argv):
    #global globalX, globalY
    usage = "%prog [OPTIONS] INPUT_FILE OUTPUT_FILE"
    parser = OptionParser(usage = usage, version = "pdfa2hocr 0.2")
    parser.add_option("-m", "--mapping", help="mapping from PDF/A tags to hOCR elements", dest="mapping", default=None)
    parser.add_option("-c", "--columns", help="divide text into columns", dest="columns", default=None)
    parser.add_option("-p", "--pdfminer", action="store_true", help="analyze layout using pdfminer", dest="pdfminer", default=False)
    parser.add_option("-l", "--module", help="custom layout analyzing module", dest="module", default=None)    
    parser.add_option("-v", "--verbose", action="store_true", help="write information about progress", dest="verbose", default=False)
    parser.add_option("-i", "--ignore-text-groups", action="store_true", help="ignore textgroups from pdfminer (textboxes only - simpler layout)", dest="ignore", default=False)
    parser.add_option("-g", "--ignore-pages", help="pages which should not be divided into columns", dest="pages", default="")
    parser.add_option("-f", "--font-mapping", help="mapping of PostScript font names into html style", dest="fontMap", default=None)
    parser.add_option("-u", "--icu", help="use ICU word segmentation algorithm", dest="icu", default=None)
    parser.add_option("-t", "--special-font-tags", action="store_true", help="use special font style hOCR tags", dest="tags", default=False)
    parser.add_option("-r", "--resolution", help="resolution of page in pixels", dest="resolution", default=None)
    (options, args) = parser.parse_args(argv)
    if len(args) != 3:
        #print args
        parser.print_help()
        exit()
    #print args
    #print "a", options.columns, options.ignore, options.module
    #fin = "C:\\Users\\to\\Documents\\Próbka2.pdf"
    #fin = "C:\\Users\\to\\Desktop\\linde\\Linde2edK_1_0133-0222.pdf"
    #fin = "C:\\Users\\to\\Desktop\\pliki\\FRiLindeJSBkor1-50.pdf"
    #fin = "C:\\Users\\to\\Desktop\\linde\\wyn.xml"
    #fout = "C:\\Users\\to\\Documents\\a.html"
    fin = args[1]
    fout = args[2]
    if not os.path.exists(fin):
        sys.stderr.write("File " + fin + " does not exist\n")
        exit()
    # TODO: E sprawdzic, czy sciezka do fout jest prawidlowa
    map = None
    if options.fontMap != None:
        if not os.path.exists(options.fontMap):
            sys.stderr.write("File " + options.fontMap + " does not exist\n")	
            exit()
        map = FontMap(options.fontMap) # ladujemy z pliku mapowanie postscriptowych
            # nazw fontow na rodziny
    if options.module != None:
        if not os.path.exists(options.module):
            sys.stderr.write("File " + options.module + " does not exist\n")
            exit()
        if options.module[-3:] != ".py":
            sys.stderr.write("File " + options.module + " should have .py " +
                             "extension\n")
            exit()
    if options.mapping != None:
        if not os.path.exists(options.mapping):
            sys.stderr.write("File " + options.mapping + " does not exist\n")
            exit()
    if options.resolution != None:
        dims = options.resolution.split("x")
        utils.globalX = int(dims[0])        
        utils.globalY = int(dims[1])
        if (not str.isdigit(dims[0])) or int(dims[0]) == 0 or (not str.isdigit(dims[1])) or int(dims[1]) == 0:
            sys.stderr.write("Invalid parameters of -r/--resolution option")
            exit()
    if options.pdfminer: # wczytujemy PDF i analizujemy uklad strony PDFMinerem
        lib = XMLLib(map=map) # tworzymy obiekt udajacy TagLib (patrz TagLib i
            # XMLLib)
        lib.loadPTree(fin) # ladujemy slowniki zasobow stron
        parser = PDFMinerParser()
        (columnizer, cols) = processModuleAndColumnsOptions(options)
        print options.columns
        if columnizer != None:
            columnizer.setCols(cols) # ustawiamy liczbe kolumn do podzielenia
        if options.mapping == None and columnizer == None: # eksportujemy bez
                # dodatkowej analizy ukladu strony
            if not parser.extractHOCRFromPDFDirect(fin, lib, fout, options.ignore, options.verbose, map, options.icu, options.tags):
                    # patrz opis metody extractHOCRFromPDFDirect 
                sys.stderr.write("File " + fin + " is not a PDF file.\n")
            exit()
        else:
            hocr = HOCRExporter(None, fout, lib, mapping=options.mapping, xml=True, verbose=options.verbose, fontMap=map, icu=options.icu, tags=options.tags)
                # tworzymy obiekt eksportujacy do hOCR
            if not parser.extractHOCRFromPDF(fin, lib, hocr, columnizer, options.ignore):
                    # patrz opis metody extractHOCRFromPDF
                sys.stderr.write("File " + fin + " is not a PDF file.\n")
            exit()
    else:
        try:
            lib = TagLib(map=map) # tworzymy obiekt bedacy interfejsem do pdfminera
                # dla kokretnego pliku
            if not lib.initialize(fin): # otwieramy plik PDF pdfminerem
                # okazalo sie, ze to jednak plik XML:
                # TODO: F od razu page z SAXA do hOCR
                if options.fontMap == None:
                    sys.stderr.write("Warning: pdfmimer results conversion to hOCR without font map specified. In resulting hOCR fonts won't display properly in browser.\n")
                parser = PDFMinerParser()
                lib = XMLLib(map=map) # tworzymy obiekt udajacy TagLib (patrz TagLib i
                    # XMLLib)
                parser.parse(fin, options.ignore, lib=lib) # wczytujemy plik XML
                root = parser.getResult() # pobieramy korzen drzewa struktury
                (columnizer, cols) = processModuleAndColumnsOptions(options)
                if columnizer != None:
                    columnizer.columnize(root, cols) # analizujemy uklad strony
                    if columnizer.isFatal(): # blad w columnizerze - nie udalo sie
                            # nic podzielic
                        sys.stderr.write(columnizer.getLastMessage())
                        sys.exit()
                    elif columnizer.isError(): # nie udalo sie podzielic niektorych stron
                        for m in columnizer.getMessages():
                            sys.stderr.write("Warning: " + m + "\n")
                hocr = HOCRExporter(root, fout, lib, mapping=options.mapping, xml=True, verbose=options.verbose, fontMap=map, icu=options.icu, tags=options.tags)
                    # tworzymy obiekt konwertujacy do hOCR 
            else: # to faktycznie plik PDF
                if options.columns != None:
                    sys.stderr.write("Warning: --columns option specified for" +
                                     " export using PDF structural tags. Option" +
                                     " ignored.\n")
                if options.module != None:
                    sys.stderr.write("Warning: --module option specified for" +
                                     " export using PDF structural tags. Option" +
                                     " ignored.\n")
                if options.ignore:
                    sys.stderr.write("Warning: --ignore option specified for" +
                                     " export using PDF structural tags. Option" +
                                     " ignored.\n")
                if options.pages != "":
                    sys.stderr.write("Warning: --ignore-pages option specified for" +
                                     " export using PDF structural tags. Option" +
                                     " ignored.\n")
                if not lib.hasRoot(): # nie ma StructTreeRoot
                    sys.stderr.write("PDF file does not contain logical strucutre.\n")
                    exit()
                root = lib.getRoot()          
                hocr = HOCRExporter(root, fout, lib, mapping=options.mapping, verbose=options.verbose, fontMap=map, icu=options.icu, tags=options.tags)
                    # tworzymy obiekt eksportujacy plik PDF
        except MyException:
            exit()
    hocr.export() # eksportujemy plik PDF
    hocr.save()
Beispiel #30
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option(
        "-c",
        "--command",
        dest="command",
        help="Command to execute. Valid commands are json or diff",
        metavar="FILE")

    parser.add_option("-f",
                      "--file",
                      dest="output",
                      help="Output file",
                      metavar="FILE")
    parser.add_option(
        "-n",
        "--db",
        dest="dbs",
        action="append",
        help=
        "Database Number. Multiple databases can be provided. If not specified, all databases will be included."
    )
    parser.add_option("-k",
                      "--key",
                      dest="keys",
                      default=None,
                      help="Keys to export. This can be a regular expression")
    parser.add_option(
        "-t",
        "--type",
        dest="types",
        action="append",
        help=
        """Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")

    (options, args) = parser.parse_args()

    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]

    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' % x)

    if options.keys:
        filters['keys'] = options.keys

    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception(
                    'Invalid type provided - %s. Expected one of %s' %
                    (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)

    destination = sys.stdout
    if options.output:
        desitnaion = open(options.output, 'wb')

    cmds = {
        'diff': DiffCallback,
        'json': JSONCallback,
        'memory': lambda r: MemoryCallback(r, 64)
    }
    for key, cb in cmds.items():
        if key != options.command:
            continue
        with destination as f:
            parser = RdbParser(cb(f), filters=filters)
            parser.parse(dump_file)
        return True
    raise Exception('Invalid Caommand %s' % options.output)
Beispiel #31
0
            exc_type, exc_value, exc_traceback = sys.exc_info()
            stdlogger.error("Erreur parsing file:" + file_name)
            stdlogger.error(
                "Error in convert_url_to_html method, stack %s" % (e))
            stdlogger.error(e, exc_info=True)
        return ''


if __name__ == "__main__":
    usage = "usage: %prog http://www.server.com/page.html"
    parser = OptionParser(usage=usage,
                          description="Parse favori_file")
    parser.add_option("-d", "--debug",
                      action="store_true",
                      dest="debug",
                      help="Turn on debug logging")
    parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
                      help="turn off all logging")
    parser.add_option("-o", "--output", action="store", dest="output",
                      default="output.html",
                      help="output file name, defaults to output.html")
    (options, args) = parser.parse_args()

    logging.basicConfig(level=logging.DEBUG if options.debug else
                        (logging.ERROR if options.quiet else logging.INFO))

    for file_name in args:
        stdlogger.info("Parsing file_name " + file_name)
        parser = FavoriteParser()
        parser.parse(file_name)
Beispiel #32
0
def main():
    usage= 'usage: %prog [options] midis'
    parser= OptionParser(usage=usage)
    parser.add_option('--n-measures', dest='n_measures', default=1, type='int', help='if the partition algorithm is MEASURE, especifies the number of measures to take as a unit')
    parser.add_option('-o', dest='outfname', help='the output file')


    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('not enaught args')

    outfname= options.outfname
    if outfname is None:
        parser.error('missing outfname')

    infnames= args[:]

    parser= MidiScoreParser()
    models= []
    for infname in infnames: 
        score= parser.parse(infname)
        if not score: import ipdb;ipdb.set_trace() # por que podria devolver None?

        #########
        # AJUSTES DE PARSING
        notes= score.get_first_voice()
        notes= [n for n in notes if n.duration > 0]
        instr= score.notes_per_instrument.keys()[0]
        patch= instr.patch
        channel= instr.channel
        score.notes_per_instrument= {instr:notes}
        #########


        interval_size= measure_interval_size(score, options.n_measures)
        algorithm= HarmonyHMM(interval_size, instrument=patch, channel=channel)
        algorithm.train(score)
        algorithm.create_model()
        models.append(algorithm.model)


    def sim(m1, m2):
        """
        calcula H(m1|m2)
        """
        from math import log
        ans= 0
        for s1 in m2.state_transition:
            for s2, prob in m1.state_transition.get(s1, {}).iteritems():
                ans+= m2.pi[s1]*prob*log(prob)
        
        return -ans


    from os import path
    def get_node_name(ticks):
        n_quarters= 0
        while ticks >= score.divisions:
            ticks-= score.divisions
            n_quarters+= 1

        if ticks > 0:
            f= Fraction(ticks, score.divisions)
            if n_quarters > 0: return "%s + %s" % (n_quarters, f)
            else: return repr(f)
        return "%s" % n_quarters 
    for i, m in enumerate(models):
        m.pi= m.calc_stationationary_distr()

        from pygraphviz import AGraph
        from utils.fraction import Fraction
            
           
        g= AGraph(directed=True, strict=False)
        for n1, adj in m.state_transition.iteritems():
            n1_name= get_node_name(n1)
            for n2, prob in adj.iteritems():
                n2_name= get_node_name(n2)
                g.add_edge(n1_name, n2_name)
                e= g.get_edge(n1_name, n2_name)
                e.attr['label']= str(prob)[:5]
        model_fname= path.basename(infnames[i]).replace('mid', 'png')
        g.draw(model_fname, prog='dot', args='-Grankdir=LR')                

    sims= defaultdict(dict)
    for i, m1 in enumerate(models):
        for j in xrange(i+1, len(models)):
            m2= models[j]
            sims[path.basename(infnames[i])][path.basename(infnames[j])]= sim(m1, m2)
            sims[path.basename(infnames[j])][path.basename(infnames[i])]= sim(m2, m1)
    
    import csv

    f= open(outfname, 'w')
    writer= csv.writer(f)
    head= sorted(sims)
    head.insert(0, "-")
    writer.writerow(head)
    for (k, row) in sorted(sims.iteritems(), key=lambda x:x[0]):
        row= sorted(row.iteritems(), key=lambda x:x[0])
        row= [t[1] for t in row]
        row.insert(0, k)
        writer.writerow(row)

    f.close()
    return

    from pygraphviz import AGraph
    from utils.fraction import Fraction
        
       

    g= AGraph(directed=True, strict=False)
    for infname1, adj in sims.iteritems():
        for infname2, sim in adj.iteritems():
            if sim>0.2 and sim!=0: continue
            g.add_edge(infname1, infname2)
            e= g.get_edge(infname1, infname2)
            e.attr['label']= str(sim)[:5]
    g.draw(outfname, prog='dot')
Beispiel #33
0
        print 'usage: ingestpipeline.py <path_to_dir_containing_set_of_jobfiles>'

    logging.basicConfig(format="%(asctime)-15s %(levelname)s %(message)s",
                        level=logging.DEBUG)
    logger = logging.getLogger('Slave')

    if options.user == 'ingest':
        import ingest_config as config
    else:
        import ingest_config_test as config

    path = args[0]

    if os.path.isfile(path):
        parser = job_parser.parser(logger)
        job = parser.parse(path)
        job['filename'] = path
        logger.info("Parsed jobfile %s: %s" % (path, str(job)))
        jobPipeline = IngestPipeline(None, jobToRun, config.momClient,
                                     config.ltaClient, config.ipaddress,
                                     config.ltacpport, config.mailCommand,
                                     config.momRetry, config.ltaRetry,
                                     config.srmRetry, config.srmInit)
        jobPipeline.run()
        exit(0)

    if os.path.isdir(path):
        dirEntries = [os.path.join(path, x) for x in os.listdir(path)]
        jobfilepaths = [p for p in dirEntries if os.path.isfile(p)]

        try:
Beispiel #34
0
def main():
    parser = OptionParser()
    parser.add_option("-m", "--master", action="store", dest="master", help="Master URI", metavar="MASTER", default=os.getenv("SW_MASTER"))
    parser.add_option("-i", "--id", action="store", dest="id", help="Job ID", metavar="ID", default="default")
    parser.add_option("-e", "--env", action="store_true", dest="send_env", help="Set this flag to send the current environment with the script as _env", default=False)
    (options, args) = parser.parse_args()
   
    if not options.master:
        parser.print_help()
        print >> sys.stderr, "Must specify master URI with --master"
        sys.exit(1)

    if len(args) != 1:
        parser.print_help()
        print >> sys.stderr, "Must specify one script file to execute, as argument"
        sys.exit(1)

    script_name = args[0]
    master_uri = options.master
    id = options.id
    
    print id, "STARTED", now_as_timestamp()

    parser = SWScriptParser()
    
    script = parser.parse(open(script_name, 'r').read())

    print id, "FINISHED_PARSING", now_as_timestamp()
    
    if script is None:
        print "Script did not parse :("
        exit()
    
    cont = SWContinuation(script, SimpleContext())
    if options.send_env:
        cont.context.bind_identifier('env', os.environ)
    
    http = httplib2.Http()
    
    master_data_uri = urlparse.urljoin(master_uri, "/data/")
    pickled_cont = pickle.dumps(cont)
    (_, content) = http.request(master_data_uri, "POST", pickled_cont)
    cont_id = simplejson.loads(content)
    
    print id, "SUBMITTED_CONT", now_as_timestamp()
    
    #print continuation_uri
    
    master_netloc = urlparse.urlparse(master_uri).netloc
    task_descriptor = {'dependencies': {'_cont' : SW2_ConcreteReference(cont_id, SWNoProvenance(), len(pickled_cont), [master_netloc])}, 'handler': 'swi'}
    
    master_task_submit_uri = urlparse.urljoin(master_uri, "/job/")
    (_, content) = http.request(master_task_submit_uri, "POST", simplejson.dumps(task_descriptor, cls=SWReferenceJSONEncoder))
    
    print id, "SUBMITTED_JOB", now_as_timestamp() 
    
    
    out = simplejson.loads(content)
    
    notify_url = urlparse.urljoin(master_uri, "/job/%s/completion" % out['job_id'])
    job_url = urlparse.urljoin(master_uri, "/browse/job/%s" % out['job_id'])

    print id, "JOB_URL", job_url
    
    #print "Blocking to get final result"
    (_, content) = http.request(notify_url)
    completion_result = simplejson.loads(content, object_hook=json_decode_object_hook)
    if "error" in completion_result.keys():
        print id, "ERROR", completion_result["error"]
        return None
    else:
        print id, "GOT_RESULT", now_as_timestamp()
        #print content
        return completion_result["result_ref"]
Beispiel #35
0
                      action="store_true",
                      help="don't delete the temporary files created "\
                      "(useful for finding problems)",
                      default=False)

    options, files = parser.parse_args(argv[1:])

    if not files:
        print "Error: no files to process"
        print >> sys.stderr, __doc__
        return 1

    options.flags = options.gccxml_options
    options.verbose = not options.quiet

    parser = cparser.IncludeParser(options)
    parser.parse(files)

def main(argv=None):
    if argv is None:
        argv = sys.argv

    try:
        compile_to_xml(argv)
    except cparser.CompilerError, detail:
        print >> sys.stderr, "CompilerError:", detail
        return 1

if __name__ == "__main__":
    sys.exit(main())
Beispiel #36
0
    dest="reformat",
    default=False,
    help="run reformat scripts for the observations according to namelist")
options, args = parser.parse_args()
if len(args) == 0:
    parser.print_help()
    sys.exit(0)

# Get command arguments.
input_xml_full_path = args[0]

# Parse input namelist into project_info-dictionary.
Project = xml_parsers.namelistHandler()
parser = xml.sax.make_parser()
parser.setContentHandler(Project)
parser.parse(input_xml_full_path)

# Project_info is a dictionary with all info from the namelist.
project_info = Project.project_info

if options.reformat:
    if 'REFORMAT' not in project_info.keys():
        error('No REFORMAT tag specified in {0}'.format(input_xml_full_path))
    if len(project_info['REFORMAT']) == 0:
        info('No reformat script specified', 1, 1)
    print_header({}, options.reformat)
    for k, v in project_info['REFORMAT'].iteritems():
        if not os.path.exists(v):
            error('Path {0} does not exist'.format(v))
        projects.run_executable(v, project_info, 1, False, write_di=False)
    sys.exit(0)
    # These are duplicates, only found via a contents page still
    # linked to via a "Back to contents" link.
    if re.search('day-wa-01_war0816\.htm', filename):
        continue
    if re.search('day-wa-03_war1110\.htm', filename):
        continue
    if re.search('day-wa-03_war0922\.htm', filename):
        continue
    if re.search('day-wa-03_war0805\.htm', filename):
        continue

    # This is a misnamed file, a duplicate of wa-02_wa1004.htm
    if re.search('day-wa-02_wa0104\.htm', filename):
        continue

    parser = Parser()

    parser.parse(filename)

    # try:
    #     parser.parse(filename)
    # except Exception, e:
    #     if verbose: print "An unhandled exception occurred in parsing: "+filename
    #     if verbose: print "The exception was: "+str(e)
    #     traceback.print_exc(file=sys.stdout)
    #     files_and_exceptions.append( (filename,e) )

# if verbose: print "All exceptions:"
# for e in files_and_exceptions:
#     if verbose: print "%s: %s" % e
	
	def endElement(self, name):
		if name == "ch1" or name == "ch2" or name == "watts" or name == "data":
			print "no"
		if name == "msg":
			self.outFile.write("\n")
		else:
			self.outFile.write(",")
	
	def characters(self, ch):
		self.outFile.write( ch.strip("\n\t,") )


if __name__ == "__main__":

    parser = OptionParser()
    parser.add_option("-f", "--file", dest="filename",
                      help="write report to FILE", metavar="FILE", default="none")

    (options, args) = parser.parse_args()
    
    if options.filename == "none":
        exit()

    parser = xml.sax.make_parser()
    
    fileCSV = open("dump.csv", "w")
    handler = XML2CSV(fileCSV)
    parser.setContentHandler(handler)
    parser.parse(options.filename)
Beispiel #39
0
        options.osmInput = options.osmInput.rstrip("." + archiveType)
        print "File uncompressed: ", options.osmInput

    if options.kmlOutput == None:
        options.kmlOutput = os.path.basename(options.osmInput).rstrip(".osc")
    if options.kmlVersion == None: options.kmlVersion = "1"

    myAwareness = OSMaware(debug=False,
                           verbose=False,
                           ele=options.linesElevation,
                           kml_version=options.kmlVersion)
    parser = make_parser()
    parser.setContentHandler(myAwareness)
    t0 = datetime.now()
    print "Starting parsing OSM file..."
    parser.parse(options.osmInput)

    #for userName, userStat in self.statsUsers.iteritems(): print userName, userStat
    print "Number of contributors (users):", len(myAwareness.statsUsers)
    print "Number of Nodes created, deleted or modified:", myAwareness.nodeCount
    print "Number of Ways created, deleted or modified:", myAwareness.wayCount
    print "Number of Realations created, deleted or modified:", myAwareness.relationCount

    if options.kmlVersion == "2":
        myAwareness.createKmlV2(options.kmlOutput,
                                heightFactor=myAwareness.linesElevation)
    if options.kmlVersion == "1":
        myAwareness.createKmlV1(options.kmlOutput)
    if options.kmlVersion == "0":
        myAwareness.createKmlV0(options.kmlOutput)
Beispiel #40
0
def run_standalone():
    from optparse import OptionParser

    parser = OptionParser("usage: %prog [options] source.c")
    parser.add_option("-o",
                      "--output",
                      help="write output to FILE (default stdout)",
                      metavar="FILE")
    parser.add_option("-E", "--preprocess", action="store_true")
    parser.add_option("-I",
                      "--include",
                      action="append",
                      help="include path, passed on to C preprocessor",
                      metavar="PATH")
    parser.add_option("-D",
                      "--define",
                      action="append",
                      help="C macro definition, passed on to C preprocessor",
                      metavar="NAME[=VALUE]")
    parser.add_option("--cpp", help="C preprocessor to use", metavar="COMMAND")
    parser.add_option("--ast",
                      action="store_true",
                      help="print syntax tree, quit")

    (options, args) = parser.parse_args()

    if not args:
        parser.print_help()
        sys.exit(1)

    in_file = args[0]

    src = open(in_file, "rt").read()
    src = insert_parens_in_brackets(in_file, src)

    if options.preprocess:
        extra_lines = PREAMBLE + [
            "# 1 \"%s\"" % in_file,
        ]
        src = "\n".join(extra_lines) + "\n" + src

    if options.preprocess:
        cpp_options = []
        #cpp_options = ["-P"]
        if options.include:
            for inc_dir in options.include:
                cpp_options.extend(["-I", inc_dir])

        if options.define:
            for define in options.define:
                cpp_options.extend(["-D", define])

        src = preprocess_source(src, options.cpp, cpp_options)

    #print "preprocessed source in ", write_temp_file(src, ".c")

    parser = GnuCndParser()
    ast = parser.parse(src, filename=in_file)
    if options.ast:
        ast.show()
        return

    generator = GnuCGenerator()

    if options.output is not None:
        outf = open(options.output, "wt")
    else:
        outf = sys.stdout

    try:
        outf.write(generator.visit(ast))
    finally:
        if options.output is not None:
            outf.close()
    if not options.direct_resync:
        parser.error(
            "The option --direct-resync is required to run this module directly"
        )
        sys.exit(1)

    univention.debug.init("stderr", univention.debug.NO_FLUSH,
                          univention.debug.NO_FUNCTION)
    ucr = ConfigRegistry()
    ucr.load()
    univention.debug.set_level(univention.debug.LISTENER,
                               int(ucr.get('listener/debug/level', 2)))

    cmd = ['/usr/bin/univention-ldapsearch', '-LLL', filter, 'objectClass']
    cmd.extend(attributes)
    p1 = subprocess.Popen(cmd, stdout=subprocess.PIPE)
    (stdout, stderr) = p1.communicate()

    class ListenerHandler(LDIFParser):
        def __init__(self, input):
            LDIFParser.__init__(self, input)

        def handle(self, dn, entry):
            handler(dn, entry, {}, 'a')

    parser = ListenerHandler(StringIO.StringIO(stdout))
    parser.parse()

    subprocess.call(['net', 'cache', 'flush'])
      if status == BuildStatus.Success:
        pass

      # Parse log to save 'TestFailed' results.
      elif status == BuildStatus.TestFailed :
        logging.info("Checking build log for '%s' at %d (%s)" % (name, starttime, ctime(starttime)))
        try:
          failures = []
          # Grab the build log.
          log, headers = urllib.urlretrieve("http://tinderbox.mozilla.org/%s/%s" % (options.tree, build['logfile']))
          gz = GzipFile(log) # I need a list of lines from the build log

          # assured to match because we search for this above
          harness_type = re.match(harness_regex, logname).groups()[0]
          parser = parsers.get(harness_type, logparser.LogParser)()
          failures = parser.parse(gz)

          # add the failures to the database
          for failure in failures:

            # convenience variables; can probably delete
            test = failure['test']
            text = failure['text']
            reason = failure['reason']
            
            testnames_id=GetOrInsertTest(conn,test)
            if HaveFailRecord(conn,buildid,  reason, testnames_id):
              logging.info("Skipping already recorded failure '%s' in build with id '%s' with failure record '%s' " % (test, buildid, text))
            else:  
              InsertTest(conn, buildid, reason, testnames_id, text)
                      
Beispiel #43
0
        option_session = options.session
        option_cookie = options.cookie

        setOptions(options)
    else:
        try:
            f = open("grabber.conf.xml", 'r')
        except IOError:
            print "No arguments ? You need to setup the XML configuration file or using the inline arguments"
            print "Look at the doc to start..."
            sys.exit(1)
        parser = make_parser()
        conf_handler = ConfHandler()
        # Tell the parser to use our handler
        parser.setContentHandler(conf_handler)
        parser.parse("grabber.conf.xml")

        option_url = confUrl
        option_spider = confSpider
        option_sql = "sql" in confActions
        option_bsql = "bsql" in confActions
        option_xss = "xss" in confActions
        option_backup = "backup" in confActions
        option_include = "include" in confActions
        option_js = "javascript" in confActions
        option_crystal = "crystal" in confActions
        option_session = "session" in confActions

    # default to localhost ?
    archives_url = "http://localhost"
    if option_url:
Beispiel #44
0
def main():
    parser = OptionParser()
    parser.add_option("-m",
                      "--master",
                      action="store",
                      dest="master",
                      help="Master URI",
                      metavar="MASTER",
                      default=os.getenv("SW_MASTER"))
    parser.add_option("-i",
                      "--id",
                      action="store",
                      dest="id",
                      help="Job ID",
                      metavar="ID",
                      default="default")
    parser.add_option(
        "-e",
        "--env",
        action="store_true",
        dest="send_env",
        help=
        "Set this flag to send the current environment with the script as _env",
        default=False)
    (options, args) = parser.parse_args()

    if not options.master:
        parser.print_help()
        print >> sys.stderr, "Must specify master URI with --master"
        sys.exit(1)

    if len(args) != 1:
        parser.print_help()
        print >> sys.stderr, "Must specify one script file to execute, as argument"
        sys.exit(1)

    script_name = args[0]
    master_uri = options.master
    id = options.id

    print id, "STARTED", now_as_timestamp()

    parser = SWScriptParser()

    script = parser.parse(open(script_name, 'r').read())

    print id, "FINISHED_PARSING", now_as_timestamp()

    if script is None:
        print "Script did not parse :("
        exit()

    cont = SWContinuation(script, SimpleContext())
    if options.send_env:
        cont.context.bind_identifier('env', os.environ)

    http = httplib2.Http()

    master_data_uri = urlparse.urljoin(master_uri, "/data/")
    pickled_cont = pickle.dumps(cont)
    (_, content) = http.request(master_data_uri, "POST", pickled_cont)
    cont_id = simplejson.loads(content)

    out_id = 'joboutput:%s' % cont_id

    print id, "SUBMITTED_CONT", now_as_timestamp()

    #print continuation_uri

    master_netloc = urlparse.urlparse(master_uri).netloc
    task_descriptor = {
        'dependencies': {
            '_cont':
            SW2_ConcreteReference(cont_id, len(pickled_cont), [master_netloc])
        },
        'handler': 'swi',
        'expected_outputs': [out_id]
    }

    master_task_submit_uri = urlparse.urljoin(master_uri, "/job/")
    (_, content) = http.request(
        master_task_submit_uri, "POST",
        simplejson.dumps(task_descriptor, cls=SWReferenceJSONEncoder))

    print id, "SUBMITTED_JOB", now_as_timestamp()

    out = simplejson.loads(content)

    notify_url = urlparse.urljoin(master_uri,
                                  "/job/%s/completion" % out['job_id'])
    job_url = urlparse.urljoin(master_uri, "/browse/job/%s" % out['job_id'])

    print id, "JOB_URL", job_url

    #print "Blocking to get final result"
    (_, content) = http.request(notify_url)
    completion_result = simplejson.loads(content,
                                         object_hook=json_decode_object_hook)
    if "error" in completion_result.keys():
        print id, "ERROR", completion_result["error"]
        return None
    else:
        print id, "GOT_RESULT", now_as_timestamp()
        #print content
        return completion_result["result_ref"]
Beispiel #45
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option(
        "-c",
        "--command",
        dest="command",
        help=
        "Command to execute. Valid commands are json, diff, justkeys, justkeyvals, memory and protocol",
        metavar="FILE")
    parser.add_option("-f",
                      "--file",
                      dest="output",
                      help="Output file",
                      metavar="FILE")
    parser.add_option(
        "-n",
        "--db",
        dest="dbs",
        action="append",
        help=
        "Database Number. Multiple databases can be provided. If not specified, all databases will be included."
    )
    parser.add_option("-k",
                      "--key",
                      dest="keys",
                      default=None,
                      help="Keys to export. This can be a regular expression")
    parser.add_option(
        "-o",
        "--not-key",
        dest="not_keys",
        default=None,
        help="Keys Not to export. This can be a regular expression")
    parser.add_option(
        "-t",
        "--type",
        dest="types",
        action="append",
        help=
        """Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")
    parser.add_option(
        "-b",
        "--bytes",
        dest="bytes",
        default=None,
        help=
        "Limit memory output to keys greater to or equal to this value (in bytes)"
    )
    parser.add_option(
        "-l",
        "--largest",
        dest="largest",
        default=None,
        help="Limit memory output to only the top N keys (by size)")
    parser.add_option(
        "-e",
        "--escape",
        dest="escape",
        choices=ESCAPE_CHOICES,
        help="Escape strings to encoding: %s (default), %s, %s, or %s." %
        tuple(ESCAPE_CHOICES))

    (options, args) = parser.parse_args()

    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]

    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' % x)

    if options.keys:
        filters['keys'] = options.keys

    if options.not_keys:
        filters['not_keys'] = options.not_keys

    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception(
                    'Invalid type provided - %s. Expected one of %s' %
                    (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)

    out_file_obj = None
    try:
        if options.output:
            out_file_obj = open(options.output, "wb")
        else:
            # Prefer not to depend on Python stdout implementation for writing binary.
            out_file_obj = os.fdopen(sys.stdout.fileno(), 'wb')

        try:
            callback = {
                'diff':
                lambda f: DiffCallback(f, string_escape=options.escape),
                'json':
                lambda f: JSONCallback(f, string_escape=options.escape),
                'justkeys':
                lambda f: KeysOnlyCallback(f, string_escape=options.escape),
                'justkeyvals':
                lambda f: KeyValsOnlyCallback(f, string_escape=options.escape),
                'memory':
                lambda f: MemoryCallback(PrintAllKeys(f, options.bytes, options
                                                      .largest),
                                         64,
                                         string_escape=options.escape),
                'protocol':
                lambda f: ProtocolCallback(f, string_escape=options.escape)
            }[options.command](out_file_obj)
        except:
            raise Exception('Invalid Command %s' % options.command)

        if not PYTHON_LZF_INSTALLED:
            eprint(
                "WARNING: python-lzf package NOT detected. " +
                "Parsing dump file will be very slow unless you install it. " +
                "To install, run the following command:")
            eprint("")
            eprint("pip install python-lzf")
            eprint("")

        parser = RdbParser(callback, filters=filters)
        parser.parse(dump_file)
    finally:
        if options.output and out_file_obj is not None:
            out_file_obj.close()
    )

    (options, args) = parser.parse_args()

    if not options.input:
        print "--mapcss parameter is required"
        raise SystemExit

    if options.name:
        style_name = options.name
    else:
        style_name = re.sub("\..*", "", options.input)

    content = open(options.input).read()
    parser = MapCSSParser(debug=False)
    mapcss = parser.parse(content)

    mapcss_js = mapcss.as_js()
    subparts_var = ", ".join(
        map(lambda subpart: "s_%s = {}" % subpart, subparts))
    subparts_var = "        var %s;" % subparts_var
    subparts_fill = "\n".join(
        map(
            lambda subpart:
            "        if (Object.keys(s_%s).length) {\n            style['%s'] = s_%s; }"
            % (subpart, subpart, subpart), subparts))
    js = """
(function (MapCSS) {
    'use strict';

    function restyle(style, tags, zoom, type, selector) {
Beispiel #47
0
 else:
     stats = FortranStaticAnalysis()
     for name in args:
         stats.scan(name)
     stats.analyze()
     stats.summary()
     stats.saveStateToFile()
 
 if options.prof:
     theme = gprof2dot.TEMPERATURE_COLORMAP
     if options.theme_skew:
         theme.skew = options.theme_skew
     theme.rankdir = options.rankdir
     fh = open(options.prof, 'rt')
     parser = gprof2dot.GprofParser(fh)
     profile = parser.parse()
     profile.prune(options.node_thres/100.0, options.edge_thres/100.0)
     root, ext = os.path.splitext(options.prof)
     if not options.output:
         options.output = "%s.dot" % root
     fh = open(options.output, 'wb')
     dot = FortranDotWriter(fh)
     dot.graph(profile, theme, stats, exclude, title=root)
     fh.close()
     print(options.output)
     root, ext = os.path.splitext(options.output)
     print("%s.ps" % root)
 elif options.isolate:
     print "Isolating %s" % options.isolate
     i = stats.isolate(options.isolate)
     i.summary()
def main():
    if_recording = 'r'    
    supported_parser_list = [
               Parser(if_recording,
                      'recording file',
                      parsers.recording_file.RecordingFileMapParser),
               ]
    
    of_castle_fortify = 'cf'
    of_grid = 'g'
    supported_writer_list = [
               Writer(of_castle_fortify,
                      'Castle Fortify (see http://castlefortify.com/)',
                      writers.castle_fortify.Writer),
               Writer(of_grid,
                      'simple grid',
                      writers.simple.Writer),
               ]

    valid_input_formats = [parser.name for parser in supported_parser_list] 
    valid_output_formats = [writer.name for writer in supported_writer_list]

    parser = OptionParser()
    parser.add_option('-f', '--input-file', type = 'string',
                      dest = 'input_filename', default = '',
                      help = 'Input filename')
    parser.add_option('-i', '--input-format', type = 'choice',
                      dest = 'input_format', default = if_recording, choices = valid_input_formats,
                      help = 'Input format: ' +
                             ', '.join(p.name + ' - ' + p.desc for p in supported_parser_list))
    parser.add_option('-o', '--output-format', type = 'choice',
                      dest = 'output_format', default = of_grid, choices = valid_output_formats,
                      help = 'Output format: ' +
                             ', '.join(w.name + ' - ' + w.desc for w in supported_writer_list))
    parser.add_option('-O', '--output-format-list', type = 'string',
                      dest = 'output_formats', default = '',
                      help = 'Output formats separated by a comma, see --output-format')
    (options, args) = parser.parse_args()

    input_format = options.input_format
    output_formats = []
    for output_format in options.output_formats.split(','):
        if output_format in valid_output_formats and not output_format in output_formats:
            output_formats += [output_format]
    if len(output_formats) == 0:
        output_formats = [options.output_format]
        
    try:
        input = io.input.FileInput(options.input_filename)
    except:
        print 'Could not open: \'' + options.input_filename + '\''
        return

    parser = [parser.parser(input) for parser in supported_parser_list if parser.name == input_format][0]

    converting = True
    while converting:
        parsed_map = core.map.Map()
        try:
            parsed = parser.parse(parsed_map)
    
            if parsed:
                writer_list = []
                for format in output_formats:
                    writer_list += [writer.writer() for writer in supported_writer_list if writer.name == format]
                for writer in writer_list:
                    writer.write(parsed_map)
                
        except parsers.recording_file.ParseError as e:
            print 'Failed to parse file:', e.message
            
        converting = not input.eof()
Beispiel #49
0
                        dest='outfname', 
                        help='output fname, default=midi_fname.intervals')
    parser.add_option('-c', '--no-cache', 
                        dest='cache', action='store_false', default=True, 
                        help='discard cache')

    options, args= parser.parse_args(argv[1:])
    if len(args) < 1: parser.error('faltan argumentos')

    infame= args[0]
    outfname= options.outfname
    if outfname is None:
        outfname= infame[:-3] + 'intervals'

    parser= parserclass()
    score= parser.parse(infame)

    notes= score.notes_per_instrument.itervalues().next()
    # que no haya dos silencios consecutivos
    assert not any((prev.is_silence and next.is_silence for (prev, next) in zip(notes, notes[1:])))

    for prev, next in zip(notes, notes[1:]):
        if next.is_silence: prev.duration += next.duration

    notes= [n for n in notes if not n.is_silence]
    def f(e):
        if e < 0:
            return 'm%s' % abs(e)
        else:
            return str(e)
    intervals= [f(next.pitch - prev.pitch) for (prev, next) in zip(notes, notes[1:])]
Beispiel #50
0
    def main(self, args=None):
        """
        Main entry point for the Application.

        Parameters
        ----------
        :param args: command line arguments.

        Returns
        ----------
        :return: N/A

        Examples
        --------
        >>> python TestVectorDirectoryParserApp.py -dir /data/asc_dir -out Files.txt

        """
        # ****************************************
        #         Execution information
        # ****************************************

        print(__doc__)

        # ****************************************
        #    Command line argument processing
        # ****************************************

        # Python 2.4 argument processing.
        parser = OptionParser()

        # REQUIRED ARGUMENTS
        parser.add_option("--dir",
                          action="store",
                          dest="dir",
                          help='Path to the directory to parse (required).',
                          default=None)
        parser.add_option("--out",
                          action="store",
                          dest="out",
                          help='Path to the output file (required).',
                          default=None)
        parser.add_option("--ext",
                          action="store",
                          dest="ext",
                          help='File extension to look for (required).',
                          default='.fil')
        parser.add_option("-f",
                          type="int",
                          dest="format",
                          help='The file output format (optional).',
                          default=1)
        parser.add_option("-v",
                          action="store_true",
                          dest="verbose",
                          help='Verbose debugging flag (optional).',
                          default=False)

        (args, options) = parser.parse_args()

        # Update variables with command line parameters.
        directory = args.dir
        output_file = args.out
        output_format = args.format
        verbose = args.verbose

        ############################################################
        #              Check user supplied parameters              #
        ############################################################

        # Check the directory is valid...
        if directory is None:
            print "No valid directory supplied, exiting."
            sys.exit()
        elif not Common.dir_exists(directory):
            print "No valid directory supplied, exiting."
            sys.exit()

        # Check the output file is valid...
        if output_file is None:
            print "No valid output file supplied, exiting."
            sys.exit()
        elif not Common.is_path_valid(output_file):
            print "No valid output file supplied, exiting."
            sys.exit()

        # Check the output file is valid...
        if args.ext is None:
            print "No valid file extension supplied, exiting."
            sys.exit()
        else:
            extension = [args.ext]

        if output_format < 1 or output_format > 2:
            print "You must supply a valid output format via the -f flag."
            print "1    -    CSV."
            print "2    -    JSON (not implemented yet)."

            sys.exit()

        ############################################################
        #               Start parsing the directory                #
        ############################################################

        print "\tSearching: ", directory

        # Used to measure feature generation time.
        start = datetime.datetime.now()

        parser = TestVectorDirectoryParser()
        parser.parse(directory, extension, output_file, output_format)

        # Finally get the time that the procedure finished.
        end = datetime.datetime.now()

        ############################################################
        #                    Summarise outcome                     #
        ############################################################

        print "\tFinished parsing"
        print "\tExecution time: ", str(end - start)
        print "Done."
Beispiel #51
0
def main():
    usage = """usage: %prog [options] /path/to/dump.rdb

Example : %prog --command json -k "user.*" /var/redis/6379/dump.rdb"""

    parser = OptionParser(usage=usage)
    parser.add_option("-c", "--command", dest="command",
                  help="Command to execute. Valid commands are json, diff, and protocol", metavar="FILE")
    parser.add_option("-f", "--file", dest="output",
                  help="Output file", metavar="FILE")
    parser.add_option("-n", "--db", dest="dbs", action="append",
                  help="Database Number. Multiple databases can be provided. If not specified, all databases will be included.")
    parser.add_option("-k", "--key", dest="keys", default=None,
                  help="Keys to export. This can be a regular expression")
    parser.add_option("-t", "--type", dest="types", action="append",
                  help="""Data types to include. Possible values are string, hash, set, sortedset, list. Multiple typees can be provided. 
                    If not specified, all data types will be returned""")
    
    (options, args) = parser.parse_args()
    
    if len(args) == 0:
        parser.error("Redis RDB file not specified")
    dump_file = args[0]
    
    filters = {}
    if options.dbs:
        filters['dbs'] = []
        for x in options.dbs:
            try:
                filters['dbs'].append(int(x))
            except ValueError:
                raise Exception('Invalid database number %s' %x)
    
    if options.keys:
        filters['keys'] = options.keys
    
    if options.types:
        filters['types'] = []
        for x in options.types:
            if not x in VALID_TYPES:
                raise Exception('Invalid type provided - %s. Expected one of %s' % (x, (", ".join(VALID_TYPES))))
            else:
                filters['types'].append(x)
    
    # TODO : Fix this ugly if-else code
    if options.output:
        with open(options.output, "wb") as f:
            if 'diff' == options.command:
                callback = DiffCallback(f)
            elif 'json' == options.command:
                callback = JSONCallback(f)
            elif 'csv' == options.command:
                callback = CSVCallback(f)
            elif 'memory' == options.command:
                reporter = PrintAllKeys(f)
                callback = MemoryCallback(reporter, 64)
            elif 'protocol' == options.command:
                callback = ProtocolCallback(f)
            else:
                raise Exception('Invalid Command %s' % options.command)
            parser = RdbParser(callback)
            parser.parse(dump_file)
    else:
        if 'diff' == options.command:
            callback = DiffCallback(sys.stdout)
        elif 'json' == options.command:
            callback = JSONCallback(sys.stdout)
        elif 'csv' == options.command:
            callback = CSVCallback(sys.stdout)
        elif 'memory' == options.command:
            reporter = PrintAllKeys(sys.stdout)
            callback = MemoryCallback(reporter, 64)
        elif 'protocol' == options.command:
            callback = ProtocolCallback(sys.stdout)
        else:
            raise Exception('Invalid Command %s' % options.command)

        parser = RdbParser(callback, filters=filters)
        parser.parse(dump_file)
Beispiel #52
0
    dest="reformat",
    default=False,
    help="run reformat scripts for the observations according to namelist")
options, args = parser.parse_args()
if len(args) == 0:
    parser.print_help()
    sys.exit(0)

# Get command arguments.
input_xml_full_path = args[0]

# Parse input namelist into project_info-dictionary.
Project = xml_parsers.namelistHandler()
parser = xml.sax.make_parser()
parser.setContentHandler(Project)
parser.parse(input_xml_full_path)

# Project_info is a dictionary with all info from the namelist.
project_info = Project.project_info

if options.reformat:
    if 'REFORMAT' not in project_info.keys():
        error('No REFORMAT tag specified in {0}'.format(input_xml_full_path))
    if len(project_info['REFORMAT']) == 0:
        info('No reformat script specified', 1, 1)
    print_header({}, options.reformat)
    for k, v in project_info['REFORMAT'].iteritems():
        if not os.path.exists(v):
            error('Path {0} does not exist'.format(v))
        projects.run_executable(v, project_info, 1, False, write_di=False)
    sys.exit(0)
    parser.add_option("-s", "--output-sprite",
        dest="sprite", 
        help="Filename of generated CSS sprite. If not specified, [stylename].png will be uesed")    

    (options, args) = parser.parse_args()    
    
    if not options.input:
        print "--mapcss parameter is required"
        raise SystemExit

    style_name = re.sub("\..*", "", options.input)

    content = open(options.input).read()
    parser = MapCSSParser(debug=False)
    mapcss = parser.parse(content) 
    
    mapcss_js = mapcss.as_js()
    subparts_var = "\n".join(map(lambda subpart: "        var s_%s = {};" % subpart, subparts))
    subparts_fill = "\n".join(map(lambda subpart: "        if (s_%s) {\n            style['%s'] = s_%s;\n        }" % (subpart, subpart, subpart), subparts))

    js = """
(function(MapCSS) {
    function restyle(tags, zoom, type, selector) {
%s
%s
        var style = {};
%s        
        return style;
    }
    """ % (subparts_var, mapcss_js, subparts_fill)
Beispiel #54
0
def weblab_start(directory):
    parser = OptionParser(usage="%prog create DIR [options]")

    parser.add_option("-m", "--machine",           dest="machine", default=None, metavar="MACHINE",
                                                   help = "If there is more than one machine in the configuration, which one should be started.")
    parser.add_option("-l", "--list-machines",     dest="list_machines", action='store_true', default=False, 
                                                   help = "List machines.")

    parser.add_option("-s", "--script",            dest="script", default=None, metavar="SCRIPT",
                                                   help = "If the runner option is not available, which script should be used.")

    options, args = parser.parse_args()

    check_dir_exists(directory, parser)

    def on_dir(directory, configuration_files):
        db_conf = DbConfiguration(configuration_files)
        regular_url = db_conf.build_url()
        coord_url   = db_conf.build_coord_url()
        upgrader = DbUpgrader(regular_url, coord_url)
        return upgrader.check_updated()

    if not run_with_config(directory, on_dir):
        print >> sys.stderr, "Error: WebLab-Deusto instance outdated! You may have updated WebLab-Deusto recently. Run: weblab-admin.py upgrade %s" % directory
        sys.exit(-1)


    old_cwd = os.getcwd()
    os.chdir(directory)
    try:
        if options.script: # If a script is provided, ignore the rest
            if os.path.exists(options.script):
                execfile(options.script)
            elif os.path.exists(os.path.join(old_cwd, options.script)):
                execfile(os.path.join(old_cwd, options.script))
            else:
                print >> sys.stderr, "Provided script %s does not exist" % options.script
                sys.exit(-1)
        else:
            parser = GlobalParser()
            global_configuration = parser.parse('.')
            if options.list_machines:
                for machine in global_configuration.machines:
                    print ' - %s' % machine
                sys.exit(0)

            machine_name = options.machine
            if machine_name is None: 
                if len(global_configuration.machines) == 1:
                    machine_name = global_configuration.machines.keys()[0]
                else:
                    print >> sys.stderr, "System has more than one machine (see -l). Please detail which machine you want to start with the -m option."
                    sys.exit(-1)

            if not machine_name in global_configuration.machines:
                print >> sys.stderr, "Error: %s machine does not exist. Use -l to see the list of existing machines." % machine_name
                sys.exit(-1)

            machine_config = global_configuration.machines[machine_name]
            if machine_config.runner is None:
                if os.path.exists('run.py'):
                    execfile('run.py')
                else:
                    print >> sys.stderr, "No runner was specified, and run.py was not available. Please the -s argument to specify the script or add the <runner file='run.py'/> option in %s." % machine_name
                    sys.exit(-1)
            else:
                if os.path.exists(machine_config.runner):
                    execfile(machine_config.runner)
                else:
                    print >> sys.stderr, "Misconfigured system. Machine %s points to %s which does not exist." % (machine_name, os.path.abspath(machine_config.runner))
                    sys.exit(-1)
    finally:
        os.chdir(old_cwd)
Beispiel #55
0
def main():
    #################### args parsing ############
        help = "#################Usage:##################\n dsTemplater -i <input XML> \n "
        help = help + "Example 1: dsTemplater -i dstemplate.xml \n "
        help = help + "Example 2: dsTemplater -i examples/dstemplate.60lo0FUTURE.xml \n"
        #print usage
	basedir = os.environ.get('BASEDIR')
        if(basedir is None):
	   print "Warning: BASEDIR environment variable not set"	
        parser = OptionParser(usage=help)
        parser.add_option("-i", "--file", dest="uinput",
        help="pass location of XML template", metavar="FILE")
        parser.add_option("-f", "--force",action="store_true",default=False, help="Force override existing output. Default is set to FALSE")
        #parser.add_option("-v", "--version",action="store_true", dest="version",default="v20120422", help="Assign version for downscaled data. Default is set to v20120422")        
        
        (options, args) = parser.parse_args()
        verOpt = True #default 
        forOpt = True #default
        inter = 'on' #for debugging
        if (inter == 'off'):
		uinput = 'dstemplate.xml'
	        dversion = "v20120422"
                force = False
                if os.path.exists(uinput):
                                        parser = xml.sax.make_parser(  )
                                        handler = temphandler.TempHandler(  )
                                        parser.setContentHandler(handler)
                                        parser.parse(uinput)
        if (inter == 'on'):  
        	for opts,vals in options.__dict__.items():
           	# print opts,vals
            		if(opts == 'uinput'):
                		uinput = vals
                		if os.path.exists(uinput):
				        print "XML input:",uinput
                    			#parser = xml.sax.make_parser(  )
                    			#handler = temphandler.TempHandler(  )
                    			#parser.setContentHandler(handler)
                    			#parser.parse(uinput)
                		else:
                    			print "Please pass a valid input XML filename with the -i argument and try again. See -h for syntax. Quitting now.."
                    			sys.exit()
            		if(opts == 'force'):
                		if (vals == True):
                    			forOpt = False
                		force = vals 
        #########  call listVars() #############################################################
	output_grid,kfold,lone,region,fut_train_start_time,fut_train_end_time,file_j_range,hist_file_start_time,hist_file_end_time,hist_train_start_time,hist_train_end_time,lats,late,lons,late, basedir,method,target_file_start_time,target_file_end_time,target_train_start_time,target_train_end_time,spat_mask,fut_file_start_time,fut_file_end_time,predictor,target,params,outdir,dversion,dexper,target_scenario,target_model,target_freq,hist_scenario,hist_model,hist_freq,fut_scenario,fut_model,fut_freq,hist_pred_dir,fut_pred_dir,target_dir,expconfig,target_time_window,hist_time_window,fut_time_window,tstamp,ds_region,target_ver,auxcustom= listVars(uinput,basedir,force)
        ######### call script creators..  #######################################################
        ############################### 1 ###############################################################
        #  make.code.tmax.sh 1 748 756 /vftmp/Aparna.Radhakrishnan/pid15769 outdir 1979 2008 tasmax

	# If the runscript directory already exists, please quit
	#/home/a1r/gitlab/cew/fudge2014///scripts/tasmax/35txa-CDFt-A00X01K02
	scriptdir = basedir+"/scripts/"+target+"/"+expconfig

	if(os.path.exists(scriptdir)):
		print "Warning: script directory already exists.",scriptdir
		#print "ERROR: Directory already exists. Clean up and try again please:",scriptdir 
        script1Loc = basedir+"/utils/bin/create_runcode"
        make_code_cmd = script1Loc+" "+str(predictor)+" "+str(target)+" "+str(output_grid)+" "+str(spat_mask)+" "+str(region)
        make_code_cmd = make_code_cmd+" "+str(file_j_range)+" "+str(lons)+" "+str(lats)+" "+str(late)
	make_code_cmd = make_code_cmd+" "+str(hist_file_start_time)+" "+str(hist_file_end_time)+" "+str(hist_train_start_time)+" "+str(hist_train_end_time)+" "+str(hist_scenario)+" "+str(hist_model)+" "+hist_freq+" "+str(hist_pred_dir)+" "+str(hist_time_window)
        make_code_cmd = make_code_cmd+" "+str(fut_file_start_time)+" "+str(fut_file_end_time)+" "+str(fut_train_start_time)+" "+str(fut_train_end_time)+" "+str(fut_scenario)+" "+str(fut_model)+" "+fut_freq+" "+str(fut_pred_dir)+" "+str(fut_time_window)
        make_code_cmd = make_code_cmd+" "+str(target_file_start_time)+" "+str(target_file_end_time)+" "+str(target_train_start_time)+" "+str(target_train_end_time)+" "+str(target_scenario)+" "+str(target_model)+" "+target_freq+" "+str(target_dir)+" "+str(target_time_window)
        make_code_cmd = make_code_cmd+" "+str(method)+" "+str(expconfig)+" "+str(kfold)+" "+str(outdir)+" "+str(tstamp)+" "+'na'+" "+basedir+" "+str(lone)

        print "Step 1: R code starters generation ..in progress"
        #p = subprocess.Popen('tcsh -c "'+make_code_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        
        params_new =  '"'+str(params)+'\"'
        make_code_cmd = make_code_cmd +" "+params_new+" "+"'"+str(ds_region)+"'"
        make_code_cmd = make_code_cmd+" "+str(auxcustom)
	#cprint make_code_cmd
        #p = subprocess.Popen(make_code_cmd +" "+params_new,shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        p = subprocess.Popen(make_code_cmd,shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        output, errors = p.communicate() 
        if(p.returncode != 0):
         print "Step1:!!!! FAILED !!!!, please contact developer."
         print output, errors
         sys.exit(0)
        #cprint output, errors
	#cprint "----Log-----"
        #cprint output,errors
        ###############################################################################################
        print "1- completed\n"
        ############################### 2 ################################################################
	#target_time_window,hist_time_window,fut_time_window
        script2Loc = basedir+"/utils/bin/"+"create_runscript"
        create_runscript_cmd = script2Loc+" "+str(lons)+" "+str(lone)+" "+str(expconfig)+" "+str(basedir)+" "+target+" "+method+" "+target_dir+" "+hist_pred_dir+" "+fut_pred_dir+" "+outdir+" "+str(file_j_range)+" "+tstamp+" "+str(target_file_start_time)+" "+str(target_file_end_time)+" "+str(hist_file_start_time)+" "+str(hist_file_end_time)+" "+str(fut_file_start_time)+" "+str(fut_file_end_time)+" "+str(spat_mask)+" "+str(region)+" "+auxcustom+" "+target_time_window+" "+hist_time_window+" "+fut_time_window
        print "Step 2: Individual Runscript generation: \n"+create_runscript_cmd
        p1 = subprocess.Popen('tcsh -c "'+create_runscript_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        print "Step 2: Individual runscript  creation.. in progress"
        output1, errors1 = p1.communicate()
	#cprint output1,errors1
        print "2- completed\n"
        if(p1.returncode != 0):
         print "Step2:!!!! FAILED !!!!, please contact developer."
         print output1, errors1
         sys.exit(0)
        #print output1, errors1
        #c print errors1
        
        ###################################### 3 ################################################################
        script3Loc = basedir+"/utils/bin/"+"create_master_runscript"
        create_master_cmd= script3Loc+" "+str(lons)+" "+str(lone)+" "+str(predictor)+" "+method+" "+basedir+" "+expconfig+" "+file_j_range+" "+tstamp+" "+str(ppn)
        print "Step 3: --------------MASTER SCRIPT GENERATION-----------------------"#+create_master_cmd
        p2 = subprocess.Popen('tcsh -c "'+create_master_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        #cprint create_master_cmd
        #print "Create master script .. in progress"
        output2, errors2 = p2.communicate()
        #######
        if(p2.returncode != 0):
         print "Step3:!!!! FAILED !!!!, please contact developer."
         print output2, errors2
         sys.exit(0)
        print output2, errors2
        print "3- completed"
	##################################### 4 ############################################
        # copy xml to configs dir 
        cdir = basedir+"/"+"/scripts/"+predictor+"/"+expconfig+"/config/"
        try:
	  os.mkdir(cdir)
	except:
	  print "Unable to create dir. Dir may exist already", cdir 
	shutil.copy2(uinput, cdir)
        print "Config XML saved in ",cdir 
	print "----See readMe in fudge2014 directory for the next steps----"
Beispiel #56
0
def main():
    #################### args parsing ############
        help = "#################Usage:(run from AN nodes)##################\n dsTemplater -i <input XML> \n "
        help = help + "Example 1: expergen -i dstemplate.xml \n "
        help = help + "Example 2: expergen -i examples/dstemplate.60lo0FUTURE.xml \n"
        #print usage
	basedir = os.environ.get('BASEDIR')
        if(basedir is None):
	   print "Warning: BASEDIR environment variable not set"	
	##### get version BRANCH info ######################
        branch = os.environ.get('BRANCH')
        if(branch is None):
           print "Warning: BRANCH env variable not set. BRANCH will be set to undefined"
           branch = "undefined"
	####################################################
        parser = OptionParser(usage=help)
        parser.add_option("-i", "--file", dest="uinput",
        help="pass location of XML template", metavar="FILE")
      #since we now have an XML tag  parser.add_option("-f", "--force",action="store_true",default=False, help="Force override existing output. Default is set to FALSE")
        parser.add_option("--msub", "--msub",action="store_true",default=False, help="Automatically submit the master runscripts using msub.Default is set to FALSE")
        #parser.add_option("-v", "--version",action="store_true", dest="version",default="v20120422", help="Assign version for downscaled data. Default is set to v20120422")        
        
        (options, args) = parser.parse_args()
        verOpt = True #default
	msub = False 
        forOpt = True #default
        inter = 'on' #for debugging
#########if platform is not PAN, disable expergen at this time 11/14/2014 ###############
        system,node,release,version,machine = os.uname()
        if(node.startswith('pp')):
                print "Running on PP(PAN) node", node
        if(node.startswith('an')):
                print "Running on AN(PAN) node", node
        else:
                 print "\033[1;41mERROR code -5: Running on a workstation not tested for end-to-end runs yet. Please login to analysis nodes to run expergen. \033[1;m",node
		 sys.exit(-5)

   
######################################################################################### 
        if (inter == 'off'):
		uinput = 'dstemplate.xml'
	        dversion = "v20120422"
                force = False
                if os.path.exists(uinput):
                                        parser = xml.sax.make_parser(  )
                                        handler = temphandler.TempHandler(  )
                                        parser.setContentHandler(handler)
                                        parser.parse(uinput)
        if (inter == 'on'):  
        	for opts,vals in options.__dict__.items():
           	# print opts,vals
            		if(opts == 'uinput'):
                		uinput = vals
                		if os.path.exists(uinput):
				        print "XML input:",uinput
                    			#parser = xml.sax.make_parser(  )
                    			#handler = temphandler.TempHandler(  )
                    			#parser.setContentHandler(handler)
                    			#parser.parse(uinput)
                		else:
                    			print "Please pass a valid input XML filename with the -i argument and try again. See -h for syntax. Quitting now.."
                    			sys.exit()
            		if(opts == 'msub'):
                		if (vals == True):
                    			msub = True 
				else:
					msub = False
                		msub = vals 
        #########  call listVars() #############################################################
	output_grid,kfold,lone,region,fut_train_start_time,fut_train_end_time,file_j_range,hist_file_start_time,hist_file_end_time,hist_train_start_time,hist_train_end_time,lats,late,lons,late, basedir,method,target_file_start_time,target_file_end_time,target_train_start_time,target_train_end_time,spat_mask,fut_file_start_time,fut_file_end_time,predictor,target,params,outdir,dversion,dexper,target_scenario,target_model,target_freq,hist_scenario,hist_model,hist_freq,fut_scenario,fut_model,fut_freq,hist_pred_dir,fut_pred_dir,target_dir,expconfig,target_time_window,hist_time_window,fut_time_window,tstamp,ds_region,target_ver,auxcustom,qc_mask,qc_varname,qc_type,adjust_out,sbase,pr_opts,masklists= listVars(uinput,basedir,msub)
        ######### call script creators..  #######################################################
        ############################### 1 ###############################################################
        #  make.code.tmax.sh 1 748 756 /vftmp/Aparna.Radhakrishnan/pid15769 outdir 1979 2008 tasmax

	# If the runscript directory already exists, please quit
	#/home/a1r/gitlab/cew/fudge2014///scripts/tasmax/35txa-CDFt-A00X01K02
	scriptdir = [sbase+"/master",sbase+"/runcode",sbase+"/runscript"]
        for sd in scriptdir:
	    if(os.path.exists(sd)):
       		 if os.listdir(sd):
		    if (overwrite == False):    
			print '\033[1;41mERROR: Scripts Directory already exists. Clean up using cleanup_script and try again please -or- (Use <ifpreexist>erase</ifpreexist> if output/scripts already exist and you want to override this and let expergen run the cleanup_script for you; Use <ifpreexist>move</ifpreexist> to move existing output)\033[1;m',sd
                        print "\033[1;41mERROR code -6: script directory already exists.Check --\033[1;m",scriptdir
                	sys.exit(-6)
		    if (overwrite == True):
			print "\033[1;43mWarning: Scripts Directory already exists. But, since <ifpreexist> has different settings, cleanup utility will handle this\033[1;m "
			print "Now invoking cleanup utility..........."
		        cleaner_script = basedir+"/utils/bin/"+"cleanup_script.csh"
			if(preexist_glob == 'erase'):
				print "Lets erase it ........... ifpreexist"
		        	cleaner_cmd = cleaner_script+" d "+uinput 
			else: 
				if(preexist_glob != 'move') & (preexist_glob != 'exit'):
					print "CHECK ifpreexist settings, quitting now"
					sys.exit(1)
				print "Lets move it............... if preexist" 
                               	cleaner_cmd = cleaner_script+" m "+uinput
	                print "cleaner_cmd"
			print cleaner_cmd		
	                pclean = subprocess.Popen(cleaner_cmd,shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
			#check return code
        		output0, errors0 = pclean.communicate()
        		if(pclean.returncode != 0):
         			print "033[1;41mCleaner Step:!!!! FAILED !!!!, please contact developer.033[1;m"
         			print output0, errors0
        			sys.exit(1)
			else:
				print output0,errors0
			print "continue expergen run...................." 
			break 
	    else:
		   print "scriptdir "+sd+" does not exist. Looks like a clean slate "
        #print  "hist_freq"+hist_freq
        script1Loc = basedir+"/utils/bin/create_runcode"
        make_code_cmd = script1Loc+" "+str(predictor)+" "+str(target)+" "+str(output_grid)+" "+str(spat_mask)+" "+str(region)
        make_code_cmd = make_code_cmd+" "+str(file_j_range)+" "+str(lons)+" "+str(lats)+" "+str(late)
	make_code_cmd = make_code_cmd+" "+str(hist_file_start_time)+" "+str(hist_file_end_time)+" "+str(hist_train_start_time)+" "+str(hist_train_end_time)+" "+str(hist_scenario)+" "+str(hist_model)+" "+hist_freq+" "+str(hist_pred_dir)+" "+str(hist_time_window)
        make_code_cmd = make_code_cmd+" "+str(fut_file_start_time)+" "+str(fut_file_end_time)+" "+str(fut_train_start_time)+" "+str(fut_train_end_time)+" "+str(fut_scenario)+" "+str(fut_model)+" "+fut_freq+" "+str(fut_pred_dir)+" "+str(fut_time_window)
        make_code_cmd = make_code_cmd+" "+str(target_file_start_time)+" "+str(target_file_end_time)+" "+str(target_train_start_time)+" "+str(target_train_end_time)+" "+str(target_scenario)+" "+str(target_model)+" "+target_freq+" "+str(target_dir)+" "+str(target_time_window)
        make_code_cmd = make_code_cmd+" "+str(method)+" "+str(expconfig)+" "+str(kfold)+" "+str(outdir)+" "+str(tstamp)+" "+'na'+" "+basedir+" "+str(lone)

        print "Step 1: R code starters generation ..in progress"
        #p = subprocess.Popen('tcsh -c "'+make_code_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        
        params_new =  '"'+str(params)+'\"'
	params_pr_opts = '"'+str(pr_opts)+'\"'
        make_code_cmd = make_code_cmd +" "+params_new+" "+"'"+str(ds_region)+"'"
        make_code_cmd = make_code_cmd+" "+str(auxcustom)+" "+str(qc_mask)+" "+str(qc_varname)+" "+str(qc_type)+" "+str(adjust_out)+" "+str(sbase)+" "+str(params_pr_opts)+" "+str(branch)+" "+'"'+str(masklists)+'"' 
	#cprint make_code_cmd
        #p = subprocess.Popen(make_code_cmd +" "+params_new,shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        p = subprocess.Popen(make_code_cmd,shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        output, errors = p.communicate() 
        if(p.returncode != 0):
         print "Step1:!!!! FAILED !!!!, please contact developer."
         print output, errors
         sys.exit(0)
        #cprint output, errors
	#cprint "----Log-----"
        #cprint output,errors
        ###############################################################################################
        print "1- completed\n"
        #print "debug............msub turned ",msub
        ############################### 2 ################################################################
	#target_time_window,hist_time_window,fut_time_window
        script2Loc = basedir+"/utils/bin/"+"create_runscript"
        create_runscript_cmd = script2Loc+" "+str(lons)+" "+str(lone)+" "+str(expconfig)+" "+str(basedir)+" "+target+" "+method+" "+target_dir+" "+hist_pred_dir+" "+fut_pred_dir+" "+outdir+" "+str(file_j_range)+" "+tstamp+" "+str(target_file_start_time)+" "+str(target_file_end_time)+" "+str(hist_file_start_time)+" "+str(hist_file_end_time)+" "+str(fut_file_start_time)+" "+str(fut_file_end_time)+" "+str(spat_mask)+" "+str(region)+" "+auxcustom+" "+target_time_window+" "+hist_time_window+" "+fut_time_window+" "+sbase
        print "Step 2: Individual Runscript generation: \n"+create_runscript_cmd
        p1 = subprocess.Popen('tcsh -c "'+create_runscript_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        print "Step 2: Individual runscript  creation.. in progress"
        output1, errors1 = p1.communicate()
	#cprint output1,errors1
        print "2- completed\n"
        if(p1.returncode != 0):
         print "Step2:!!!! FAILED !!!!, please contact developer."
         print output1, errors1
         sys.exit(0)
        #print output1, errors1
        #c print errors1
        
        ###################################### 3 ################################################################
        script3Loc = basedir+"/utils/bin/"+"create_master_runscript"
        create_master_cmd= script3Loc+" "+str(lons)+" "+str(lone)+" "+str(predictor)+" "+method+" "+sbase+" "+expconfig+" "+file_j_range+" "+tstamp+" "+str(ppn)+" "+str(msub)
        print "Step 3: --------------MASTER SCRIPT GENERATION-----------------------"#+create_master_cmd
        p2 = subprocess.Popen('tcsh -c "'+create_master_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
        #cprint create_master_cmd
        print "Create master script .. in progress"
        output2, errors2 = p2.communicate()
        #######
        if(p2.returncode != 0):
         print "Step3:!!!! FAILED !!!!, please contact developer."
         print output2, errors2
         sys.exit(0)
        print output2, errors2
        print "3- completed"
	##################################### 4 ############################################
        # copy xml to configs dir 
        cdir = sbase+"/config/"
        try:
	  os.mkdir(cdir)
	except:
	  print "Unable to create dir. Dir may exist already", cdir 
	shutil.copy2(uinput, cdir)
        print "Config XML saved in ",cdir 
        print "RunScripts will be saved under:",sbase
	print "----See readMe in fudge2014 directory for the next steps----"
	print "Use submit_job to submit scripts"

############### crte ppscript #################
	dev = "off" 
        print(sbase+"/postProc/aux/")
        if not os.path.exists(sbase+"/postProc/aux/"):
        	os.makedirs(sbase+"/postProc/aux/")
        if (dev == "off"):
		tsuffix = ""
        	ppbase = sbase+"/postProc/aux/"+"/postProc_source"
	else:
		tsuffix = "_"+tstamp
                ppbase = sbase+"/postProc/aux/"+"/postProc_source"+tstamp
	try:
  		ppfile = open(ppbase, 'w')
#check if qc_mask is relevant  
		if(qc_mask != 'off'):
  			pp_cmnd = "python $BASEDIR/bin/postProc -i "+os.path.abspath(uinput)+" -v "+target+","+target+"_qcmask\n"
	 	else:
                        pp_cmnd = "python $BASEDIR/bin/postProc -i "+os.path.abspath(uinput)+" -v "+target+"\n"
  		ppfile.write(pp_cmnd)
  		ppfile.close()
	except:
  		print "Unable to create postProc command file. You may want to check your settings."
	#c	print create_pp_cmd
        if(os.path.exists(ppbase)):
######################### write postProc_job to be used ############################
                ppLoc = basedir+"/utils/bin/"+"create_postProc"
                create_pp_cmd= ppLoc+" "+ppbase+" "+sbase+"  "+basedir+" "+tstamp+" "+branch
                print "Step 4: --------------PP postProc SCRIPT GENERATION-----------------------"
                p4 = subprocess.Popen('tcsh -c "'+create_pp_cmd+'"',shell=True,stdout=PIPE,stdin=PIPE, stderr=PIPE)
                output4, error4 = p4.communicate()
                if(p4.returncode != 0):
                        print "Step4:!!!! FAILED !!!!, please contact developer."
                        print output4, error4
                        sys.exit(-4)
                print output4, error4
                print "4- completed"
                print "NOTE: postProc will succeed only if you're running the model for the full downscaled region. (it will fail if you're running downscaling for a single slice for example)"
                print "----------------------------------------"
		print "\033[1;42mPlease use this script to run post post-processing (or msub this script), postProc when downscaling jobs are complete \033[1;m",sbase+"postProc/postProc_command"+tsuffix
                try:  
   			NEMSemail = os.environ["NEMSemail"]
                        print "msub -m ae -M "+os.environ.get('NEMSemail')+" "+sbase+"postProc/postProc_command"+tsuffix
		except KeyError: 
   			print "NEMSemail not set. Please use your email for notification in the following msub command i.e msub -m ae -M <email> script " 
			print "msub "+sbase+"postProc/postProc_command"+tsuffix
	else:
		print "postProc_command cannot be created. postProc_source does not exist"
################ step 5 fudgeList invocation ##############################################
        slogloc = sbase+"/"+"experiment_info.txt"
        fcmd = "python "+basedir+"/bin/fudgeList.py -f -i "+uinput+" -o "+slogloc
        f = subprocess.Popen(fcmd, stdout=subprocess.PIPE, shell=True)
        out, err = f.communicate()
        #print "fudgeList out", out
        #if err is not None:
        #       print "fudgeList err", err
        print "Summary Log File: ", slogloc