示例#1
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER,
                              log_level=config.get('log.level'),
                              log_fmt=config.get('log.format'),
                              log_file=config.get('log.file'))

    dry_run = config.get('dry-run')
    after_job = config.get('after-job')
    max_dom = config['max_dom']

    # either the start time is exactly specified, or else we calculate it from base time, delay and cycles
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'),
                                    delay=config.get('delay'),
                                    round=config.get('cycles'))

    if config.get('end'):
        end_init = config['end']
        init_interval = config['init_interval']
        init_times = list(
            rrule.rrule(freq=rrule.HOURLY,
                        interval=init_interval,
                        dtstart=init_time,
                        until=end_init))
    else:
        init_times = [init_time]

    jobs = config['jobs']

    run_jobs = [j for j in jobs if true_like(j['run'])]

    parallel = int(config.get('parallel'))
    logger.debug("allow %d parallel simulations" % parallel)

    after_job = None
    previous_sim = None
    working_dir = config.get('working-dir')
    for n, init_time in enumerate(init_times):
        # one-argument function to do initial-time substitution in strings
        expand = lambda s: substitute.sub_date(str(s), init_time=init_time)
        wdir = expand(working_dir)
        if not os.path.exists(wdir):
            logger.error("could not find %s, skipping" % wdir)
            continue
        # allow individual simulations to run in parallel, but up to a limit of N
        # every time N reaches parallel limit, enforce a dependency

        if parallel == 0 or (n % parallel) == 0:
            after_job = previous_sim
            logger.debug("dependency: %s" % after_job)
        previous_sim = submit(run_jobs,
                              expand,
                              after_job=after_job,
                              array_job=max_dom,
                              dry_run=dry_run)
示例#2
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    init_time = shared.get_time(config.get('start'), config.get('delay'), config.get('cycles'))
    format = config['format'] if config.get('format') else '%Y-%m-%d_%H:%M:%S'
    
    print init_time.strftime(format)
示例#3
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER,
                              log_level=config.get('log.level'),
                              log_fmt=config.get('log.format'))

    if config.get('log.file'):
        log_file = config['log.file']
        logger.addHandler(
            loghelper.file_handler(log_file, config['log.level'],
                                   config['log.format']))

    dry_run = config.get('dry-run')

    # either the start time is exactly specified, or else we calculate it from base time, delay and cycles
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'),
                                    delay=config.get('delay'),
                                    round=config.get('cycles'))

    if config.get('end'):
        end_init = config['end']
        init_interval = config['init_interval']
        init_times = list(
            rrule.rrule(freq=rrule.HOURLY,
                        interval=init_interval,
                        dtstart=init_time,
                        until=end_init))
    else:
        init_times = [init_time]

    for init_time in init_times:
        # one-argument function to do initial-time substitution in strings
        expand = lambda s: substitute.sub_date(str(s), init_time=init_time)
        replacements = substitute.date_replacements(init_time=init_time)

        jobs = config['jobs']
        # get an ordered list of all the ones which we will run
        run_jobs = [
            jobs[j] for j in sorted(jobs.keys()) if jobs[j]['run'] == True
        ]
        #for key, entry in config['jobs'].items():
        for entry in run_jobs:
            template = expand(entry['template'])
            target = expand(entry['target'])
            tm.fill_template(template, target, replacements)
示例#4
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER,
                              log_level=config.get('log.level'),
                              log_fmt=config.get('log.format'))

    if config.get('log.file'):
        log_file = config['log.file']
        logger.addHandler(
            loghelper.file_handler(log_file, config['log.level'],
                                   config['log.format']))
        logger.debug('now logging to file')

    dry_run = config.get('dry-run')

    # either the start time is exactly specified, or else we calculate it
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'),
                                    delay=config.get('delay'),
                                    round=config.get('cycles'))

    if config.get('end'):
        end_init = config['end']
        init_interval = config['init_interval']
        init_times = list(
            rrule.rrule(freq=rrule.HOURLY,
                        interval=init_interval,
                        dtstart=init_time,
                        until=end_init))
    else:
        init_times = [init_time]

    for init_time in init_times:
        # one-argument function to do initial-time substitution in strings
        expand = lambda s: substitute.sub_date(s, init_time=init_time) if type(
            s) == type("") else s

        # dictionary of replacements e.g. %iY : 2015
        date_replacements = substitute.date_replacements(init_time=init_time)

        source = expand(config['source'])
        target = expand(config['target'])

        assert (_are_compatible(source, target))

        _recursive_replace(source, target, date_replacements)
示例#5
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'), log_file=config.get('log.file'))
    
    dry_run = config.get('dry-run')
    after_job = config.get('after-job')
    max_dom = config['max_dom']
    
    # either the start time is exactly specified, or else we calculate it from base time, delay and cycles
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'), delay=config.get('delay'), round=config.get('cycles'))
        
    if config.get('end'):
        end_init = config['end']
        init_interval = config['init_interval']
        init_times = list(rrule.rrule(freq=rrule.HOURLY, interval=init_interval, dtstart=init_time, until=end_init))
    else:
        init_times = [init_time]

    jobs = config['jobs']


    
    run_jobs = [ j for j in jobs if true_like(j['run'])]
    
    parallel = int(config.get('parallel'))
    logger.debug("allow %d parallel simulations" % parallel)
    
    after_job=None
    previous_sim=None
    working_dir = config.get('working-dir')
    for n,init_time in enumerate(init_times):
        # one-argument function to do initial-time substitution in strings
        expand = lambda s : substitute.sub_date(str(s), init_time=init_time)
        wdir = expand(working_dir)
        if not os.path.exists(wdir):
            logger.error("could not find %s, skipping" % wdir)
            continue
        # allow individual simulations to run in parallel, but up to a limit of N
        # every time N reaches parallel limit, enforce a dependency

        if parallel==0 or (n%parallel)==0:
            after_job=previous_sim
            logger.debug("dependency: %s" % after_job)        
        previous_sim = submit(run_jobs, expand, after_job=after_job, array_job=max_dom, dry_run=dry_run)
示例#6
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'))

    if config.get('log.file'):
        log_file = config['log.file']
        logger.addHandler(loghelper.file_handler(log_file, config['log.level'], config['log.format']))
        logger.debug('now logging to file')
    
    dry_run = config.get('dry-run')
    
    # either the start time is exactly specified, or else we calculate it
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'), delay=config.get('delay'), round=config.get('cycles'))

    if config.get('end'):
        end_init = config['end']
        init_interval = config['init_interval']
        init_times = list(rrule.rrule(freq=rrule.HOURLY, interval=init_interval, dtstart=init_time, until=end_init))
    else:
        init_times = [init_time]


    for init_time in init_times:
        # one-argument function to do initial-time substitution in strings
        expand = lambda s : substitute.sub_date(s, init_time=init_time) if type(s)==type("") else s
        
        # dictionary of replacements e.g. %iY : 2015
        date_replacements = substitute.date_replacements(init_time=init_time)
        
        source = expand(config['source'])
        target = expand(config['target'])
        
        assert(_are_compatible(source,target))

        _recursive_replace(source, target, date_replacements)
示例#7
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER, log_level=config.get("log.level"), log_fmt=config.get("log.format"))

    if config.get("log.file"):
        log_file = config["log.file"]
        logger.addHandler(loghelper.file_handler(log_file, config["log.level"], config["log.format"]))

    dry_run = config.get("dry-run")

    # either the start time is exactly specified, or else we calculate it from base time, delay and cycles
    if config.get("start"):
        init_time = config["start"]
    else:
        init_time = shared.get_time(
            base_time=config.get("base-time"), delay=config.get("delay"), round=config.get("cycles")
        )

    if config.get("end"):
        end_init = config["end"]
        init_interval = config["init_interval"]
        init_times = list(rrule.rrule(freq=rrule.HOURLY, interval=init_interval, dtstart=init_time, until=end_init))
    else:
        init_times = [init_time]

    for init_time in init_times:
        # one-argument function to do initial-time substitution in strings
        expand = lambda s: substitute.sub_date(str(s), init_time=init_time)
        replacements = substitute.date_replacements(init_time=init_time)

        jobs = config["jobs"]
        # get an ordered list of all the ones which we will run
        run_jobs = [jobs[j] for j in sorted(jobs.keys()) if jobs[j]["run"] == True]
        # for key, entry in config['jobs'].items():
        for entry in run_jobs:
            template = expand(entry["template"])
            target = expand(entry["target"])
            tm.fill_template(template, target, replacements)
示例#8
0
文件: init.py 项目: zhenkunl/wrftools
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])
    logger = loghelper.create(LOGGER,
                              log_level=config.get('log.level'),
                              log_fmt=config.get('log.format'),
                              log_file=config.get('log.file'))

    base_dir = config['base_dir']
    wrftools_dir = config['wrftools_dir']
    dry_run = config.get('dry_run')
    jobs = config.get('jobs')
    create = config.get('initialise.create')
    remove = config.get('initialise.remove')
    copy = config.get('initialise.copy')
    link = config.get('initialise.link')

    if create:
        for d in create:
            shared.create(d, dry_run=dry_run)

    if remove:
        for pattern in remove:
            shared.remove(pattern, dry_run=dry_run)

    if copy:
        for pattern in copy:
            shared.copy(pattern, dry_run=dry_run)

    if link:
        for pattern in link:
            shared.link(pattern, dry_run=dry_run)

    logger.debug("init.py done")
    print("\n\n")
    print("************************************************")
    print(NEXT_STEPS % (base_dir, base_dir))
    print("************************************************")
示例#9
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'))

    if config.get('log.file'):
        log_file = config['log.file']
        logger.addHandler(loghelper.file_handler(log_file, config['log.level'], config['log.format']))
   
    jobs = config['jobs']
    
    for key in sorted(jobs.keys()):
        entry = jobs[key]
        template = entry['template']
        target = entry['target']
        logger.debug("filling template  %s ----> %s" % (template, target))
        path,name = os.path.split(target)
        if not os.path.exists(path):
            os.makedirs(path)
        
        replacements = entry['replacements']
        tm.fill_template(template,target,replacements)
示例#10
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])
    logger = loghelper.create(LOGGER, log_level=config.get('log.level'), log_fmt=config.get('log.format'), log_file=config.get('log.file'))
    
    
    base_dir = config['base_dir']
    wrftools_dir = config['wrftools_dir']
    dry_run = config.get('dry_run')
    jobs = config.get('jobs')
    create = config.get('initialise.create')
    remove = config.get('initialise.remove')
    copy = config.get('initialise.copy')
    link = config.get('initialise.link')
    
    if create:
        for d in create:
            shared.create(d, dry_run=dry_run)

    if remove:
        for pattern in remove:
            shared.remove(pattern, dry_run=dry_run)

    if copy:
        for pattern in copy:
            shared.copy(pattern, dry_run=dry_run)
            
    if link:
        for pattern in link:
            shared.link(pattern, dry_run=dry_run)

    logger.debug("init.py done")
    print("\n\n")
    print("************************************************")
    print(NEXT_STEPS % (base_dir,base_dir))
    print("************************************************")
示例#11
0
def main():
    config = conf.config(__doc__, sys.argv[1:])
    power(config)
示例#12
0
def main():
    """ Pass command line arguments to NCL script"""
    config = conf.config(__doc__, sys.argv[1:])

    t0 = time.time()

    if config["out-dir"] == None:
        out_dir = "."
    else:
        out_dir = config["out-dir"]

    # if ncl-code-dir not specified, expect it in ../ncl relative to
    # the path of this file
    if config["ncl-script"] == None:
        ncl_script = NCL_SCRIPT
    else:
        ncl_code_dir = config["ncl-script"]

    cmd_files = config["<file>"]
    # Add nc extension if needed
    nc_files = [f if f.endswith(".nc") else f + ".nc" for f in cmd_files]

    # Create height arrays
    hgts = config["height"]
    hgts = "(/%s/)" % ",".join(map(str, hgts))

    mode = config["mode"]

    dry_run = config["dry-run"]

    loc = config["loc"]
    opt = config["opt"]

    print "\n*****************************************************"
    print "extract.py"

    if mode not in SUPPORTED_MODES:
        raise ConfigError("specified mode not supported")

    if mode == "loop":

        # This will loop over each file seperately
        for f in sorted(nc_files):
            path, name = os.path.split(f)
            out_file = out_dir + "/" + name.replace("wrfout", "tseries")

            if os.path.exists(out_file):
                os.remove(out_file)

            # Create NCL file array
            in_file = f
            # cmd = """FCST_FILE=%s  NCL_OUT_FILE=%s LOCATIONS_FILE=%s NCL_OPT_FILE=%s ncl %s/%s 'extract_heights=%s' """ %(in_file, out_file, loc,opt, ncl_code_dir, NCL_SCRIPT, hgts)
            cmd = """NCL_OPT_FILE=%s ncl 'in_file="%s"' 'out_file="%s"' 'extract_heights=%s' 'loc_file="%s"' %s""" % (
                opt,
                in_file,
                out_file,
                hgts,
                loc,
                ncl_script,
            )
            print cmd
            # We could either aggregate all files together or loop over files
            if not dry_run:
                subprocess.call(cmd, shell=True)

    elif mode == "lump":
        f = nc_files[0]
        path, name = os.path.split(f)
        out_file = out_dir + "/" + name.replace("wrfout", "tseries")
        if os.path.exists(out_file):
            os.rm(out_file)

        # Create NCL file array
        files = '","'.join(sorted(nc_files))
        in_file = '(/"%s"/)' % files
        cmd = """NCL_OPT_FILE=%s ncl 'in_file=%s' 'out_file="%s"' 'extract_heights=%s' 'loc_file="%s"' %s""" % (
            opt,
            in_file,
            out_file,
            hgts,
            loc,
            ncl_script,
        )
        print cmd
        if not dry_run:
            subprocess.call(cmd, shell=True)

    te = time.time() - t0
    print "elapsed time: %0.1f " % te
示例#13
0
文件: ncdump.py 项目: qingu/wrftools
def main():
    
    config = conf.config(__doc__, sys.argv[1:], flatten=True)
    logger = loghelper.create_logger(config)
    logger.debug(config)
    ncdump(config)
def test_config():
    confighelper.config("docstring", None)
    assert True
示例#15
0
def main():
    # merge command-line and file-specified arguments
    config = conf.config(__doc__, sys.argv[1:])

    logger = loghelper.create(LOGGER, log_level=config.get('log.level'), 
                              log_fmt=config.get('log.format'), 
                              log_file=config.get('log.file'))
    
    
    if not os.path.exists(config['namelist_wps']):
        logger.error("No namelist.wps found, %s was specifed as template, but does not exist" % config['namelist_wps'])
        sys.exit()
    
    if not os.path.exists(config['namelist_input']):
        logger.error("No namelist.input found, %s was specifed as template, but does not exist" % config['namlist_input'])
        sys.exit()
    
    
    dry_run = config.get('dry-run')
    rmtree = config.get('rmtree')
    max_dom = config['max_dom']
    bdy_interval = config['bdy_interval']
    fcst_hours = config['fcst_hours']
    logger.debug(fcst_hours)
    

    history_interval = config['history_interval'] 
    link_boundaries = config.get('link-boundaries')
    
    # either the start time is exactly specified, or else we calculate it
    if config.get('start'):
        init_time = config['start']
    else:
        init_time = shared.get_time(base_time=config.get('base-time'), delay=config.get('delay'), round=config.get('cycles'))

    if config.get('end'):
        end_init = config['end']
        logger.debug(end_init)
        init_interval = config['init_interval']
        init_times = list(rrule.rrule(freq=rrule.HOURLY, interval=init_interval, dtstart=init_time, until=end_init))
    else:
        init_times = [init_time]



    for init_time in init_times:
        try:
            logger.info("**** Running simulation for %s *****" % init_time)
            
            # one-argument function to do initial-time substitution in strings
            expand = lambda s : substitute.sub_date(s, init_time=init_time) if type(s)==type("") else s

            date_replacements = substitute.date_replacements(init_time=init_time)
            
            working_dir = expand(config['working_dir'])
            logger.info("working dir: %s " % working_dir)

            if rmtree:
                safe_remove(working_dir, dry_run)
            
            create_directory_structure(expand, remove=config.get('prepare.remove'), create=config.get('prepare.create'), copy=config.get('prepare.copy'), link=config.get('prepare.link'), dry_run=dry_run)    
            if config.get('prepare.template'):
                for entry in config['prepare.template']:
                    tokens = expand(entry).split()
                    source = tokens[0]
                    target = tokens[1] if len(tokens)>1 else tokens[0]
                    templater.fill_template(source, target, date_replacements)


            
            bdy_times = shared.get_bdy_times(init_time, fcst_hours, bdy_interval)
            
            working_namelist = working_dir+"/namelist.wps"
        
            # this can be made cleaner
            prefix=''
            update_namelist_wps(config['namelist_wps'], working_namelist, config['max_dom'], 
                                init_time, fcst_hours, config['bdy_interval'], 
                                config['geo_em_dir'], config['met_em_dir'], config['geogrid_run_dir'], config['metgrid_run_dir'], prefix, 
                                config.get('constants_name'))
        
            working_namelist = working_dir+"/namelist.input"
            logger.debug(fcst_hours)
            
            update_namelist_input(config['namelist_input'], working_namelist, max_dom, init_time, fcst_hours, history_interval, bdy_interval*60*60, metadata=config.get('metadata'))    
            logger.debug(fcst_hours)
            
            # apply any additional specified namelist updates (consider getting rid of this section)
            namelist_updates = config.get('namelist_updates')
            if namelist_updates:
                for key in sorted(namelist_updates.keys()):
                    entry = namelist_updates[key]
                    logger.debug('processing namelist update entry %s' % key)
                    
                    template = expand(entry['template'])
                    target = expand(entry['target'])
                    logger.debug('%s\t---->\t%s' %(template.ljust(20), target.ljust(20)))
                    namelist = shared.read_namelist(template)
                    if entry.get('update'):
                        for old,new in entry['update'].items():
                            logger.debug('\t%s\t:\t%s' %(old.ljust(20), expand(new).ljust(20)))
                            namelist.update(old,expand(new))
                    namelist.to_file(target)
        


            # link in input files for all ungrib jobs
            # update namelist.wps to modify start and end time
            
            if config.get('ungrib'):
                for key,entry in config['ungrib'].items():
                    # apply any delay and rounding to the init_time to get correct time for dataset
                    # note that sometimes it is necessary to use a different time e.g. for SST field is delayed by one day
                    run_dir = expand(entry['run_dir'])
                    base_time = shared.get_time(init_time, delay=entry.get('delay'), round=entry.get('cycles'))
                    ungrib_len = int(entry['ungrib_len'])
                    bdy_times = shared.get_bdy_times(base_time, ungrib_len, bdy_interval)
                    namelist = shared.read_namelist(run_dir+"/namelist.wps")

                    start_str  = base_time.strftime("%Y-%m-%d_%H:%M:%S")
                    end_str    = bdy_times[-1].strftime("%Y-%m-%d_%H:%M:%S")

                    namelist.update('start_date', [start_str]*max_dom)
                    namelist.update('end_date',   [end_str]*max_dom)
                    namelist.to_file(run_dir+"/namelist.wps")
                    
                    # link in vtable
                    vtable = entry['vtable']
                    cmd = "%s %s/Vtable" % (vtable, run_dir)
                    shared.link(cmd, dry_run=dry_run)
               

                    if link_boundaries:
               
                        file_pattern = entry['files']
                        
                        # create an ordered set to ensure filenames only appear once
                        filenames = shared.ordered_set([substitute.sub_date(file_pattern, init_time=base_time, valid_time=t) for t in bdy_times])
                        missing_files = []
                        for f in filenames:
                            if not os.path.exists(f): 
                                missing_files.append(f)
                                logger.error("%s \t missing" % f)
                                
                        if missing_files!=[]:
                            if rmtree:
                                safe_remove(working_dir, dry_run)
                            raise IOError("some files could not be found")




                        args = ' '.join(filenames)
                        cmd = '%s/link_grib.csh %s' %(run_dir,args)
                        shared.run_cmd(cmd, dry_run=dry_run, cwd=run_dir, log=False)
                
                
        
        except IOError as e:
            logger.error(e)
def test_config():
    confighelper.config("docstring", None)
    assert True 
示例#17
0
"""example.py docstring in docopt recognised format see docopt

	Usage: 
	example.py [--config=<file>]
			[--option1=arg1]
			[--option2=arg2]
			[--option3=arg3]
			[--option4=arg4]
			[--option5=arg5]

	Options:
		--config=<file>    configuration file to specify options
		--option1=arg1     anything specified here will ovveride the config file 
		--option2=arg2
		--option3=arg3
		--option4=arg4
		--option5=arg5
"""
    
import confighelper as conf
import sys
import pprint

# get configuration by passing in docstring and command-line arguments
config = conf.config(__doc__, sys.argv[1:] )

# config will be a merged dictionary of file and command-line args
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(config)
示例#18
0
def main():
    
    config = conf.config(__doc__, sys.argv[1:], flatten=True)
    logger = loghelper.create(LOGGER, config['log.level'], config['log.fmt'], config['log.file'])
    ncdump(config)
示例#19
0
def main():
    """ Pass command line arguments to NCL script"""
    config = conf.config(__doc__, sys.argv[1:])

    t0 = time.time()

    if config['out-dir'] == None:
        out_dir = '.'
    else:
        out_dir = config['out-dir']

    # if ncl-code-dir not specified, expect it in ../ncl relative to
    # the path of this file
    if config['ncl-script'] == None:
        ncl_script = NCL_SCRIPT
    else:
        ncl_code_dir = config['ncl-script']

    cmd_files = config['<file>']
    # Add nc extension if needed
    nc_files = [f if f.endswith('.nc') else f + '.nc' for f in cmd_files]

    # Create height arrays
    hgts = config['height']
    hgts = '(/%s/)' % ','.join(map(str, hgts))

    mode = config['mode']

    dry_run = config['dry-run']

    loc = config['loc']
    opt = config['opt']

    print '\n*****************************************************'
    print 'extract.py'

    if mode not in SUPPORTED_MODES:
        raise ConfigError("specified mode not supported")

    if mode == 'loop':

        # This will loop over each file seperately
        for f in sorted(nc_files):
            path, name = os.path.split(f)
            out_file = out_dir + '/' + name.replace('wrfout', 'tseries')

            if os.path.exists(out_file):
                os.remove(out_file)

            # Create NCL file array
            in_file = f
            #cmd = """FCST_FILE=%s  NCL_OUT_FILE=%s LOCATIONS_FILE=%s NCL_OPT_FILE=%s ncl %s/%s 'extract_heights=%s' """ %(in_file, out_file, loc,opt, ncl_code_dir, NCL_SCRIPT, hgts)
            cmd = """NCL_OPT_FILE=%s ncl 'in_file="%s"' 'out_file="%s"' 'extract_heights=%s' 'loc_file="%s"' %s""" % (
                opt, in_file, out_file, hgts, loc, ncl_script)
            print cmd
            # We could either aggregate all files together or loop over files
            if not dry_run:
                subprocess.call(cmd, shell=True)

    elif mode == 'lump':
        f = nc_files[0]
        path, name = os.path.split(f)
        out_file = out_dir + '/' + name.replace('wrfout', 'tseries')
        if os.path.exists(out_file):
            os.rm(out_file)

        # Create NCL file array
        files = '","'.join(sorted(nc_files))
        in_file = '(/"%s"/)' % files
        cmd = """NCL_OPT_FILE=%s ncl 'in_file=%s' 'out_file="%s"' 'extract_heights=%s' 'loc_file="%s"' %s""" % (
            opt, in_file, out_file, hgts, loc, ncl_script)
        print cmd
        if not dry_run:
            subprocess.call(cmd, shell=True)

    te = time.time() - t0
    print 'elapsed time: %0.1f ' % te
示例#20
0
def main():
    
    config = conf.config(__doc__, sys.argv[1:], flatten=True)
    logger = loghelper.create_logger(config)
    logger.debug(config)
    ncdump(config)
示例#21
0
def main():

    config = conf.config(__doc__, sys.argv[1:], flatten=True)
    logger = loghelper.create(LOGGER, config['log.level'], config['log.fmt'],
                              config['log.file'])
    ncdump(config)
示例#22
0
文件: power.py 项目: qingu/wrftools
def main():
    config = conf.config(__doc__, sys.argv[1:], flatten=True)
    power(config)
示例#23
0
"""example.py docstring in docopt recognised format see docopt

	Usage: 
	example.py [--config=<file>]
			[--option1=arg1]
			[--option2=arg2]
			[--option3=arg3]
			[--option4=arg4]
			[--option5=arg5]

	Options:
		--config=<file>    configuration file to specify options
		--option1=arg1     anything specified here will ovveride the config file 
		--option2=arg2
		--option3=arg3
		--option4=arg4
		--option5=arg5
"""

import confighelper as conf
import sys
import pprint

# get configuration by passing in docstring and command-line arguments
config = conf.config(__doc__, sys.argv[1:])

# config will be a merged dictionary of file and command-line args
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(config)
示例#24
0
def main():
    config = conf.config(__doc__, sys.argv[1:], flatten=True, format="yaml")
    dispatch(config)
示例#25
0
def main():
    config = conf.config(__doc__, sys.argv[1:], flatten=True)
    power(config)
示例#26
0
def main():
    config = conf.config(__doc__, sys.argv[1:], flatten=True, format="yaml")
    dispatch(config)
示例#27
0
def main():
    config = conf.config(__doc__, sys.argv[1:])
    power(config)