예제 #1
0
import os, fnmatch, batchJobArgs

Opts = batchJobArgs.batchArgs('Find chains which have failed or not converged.', importance=True, converge=True)

Opts.parser.add_argument('--exist', action='store_true')
Opts.parser.add_argument('--checkpoint', action='store_true')
Opts.parser.add_argument('--running', action='store_true')
Opts.parser.add_argument('--not_running', action='store_true')
Opts.parser.add_argument('--stuck', action='store_true')

(batch, args) = Opts.parseForBatch()

notExist = []
converge = []

if args.running:args.checkpoint = True

if args.stuck:
        for jobItem in Opts.filteredBatchItems():
            if jobItem.chainExists() and jobItem.chainsDodgy():
                print 'Chain stuck?...' + jobItem.name
elif args.checkpoint:
    print 'Convergence from checkpoint files...'
    for jobItem in Opts.filteredBatchItems():
        R, done = jobItem.convergeStat()
        if R is not None and not done:
            if (not args.not_running or jobItem.notRunning()) and (not args.running or not jobItem.notRunning()): print '...', jobItem.chainRoot, R
            if args.running and jobItem.chainExists() and jobItem.chainsDodgy():
                print 'Chain stuck?...' + jobItem.name
else:
    for jobItem in Opts.filteredBatchItems():
예제 #2
0
#!/usr/bin/env python
import os, batchJobArgs, jobQueue

Opts = batchJobArgs.batchArgs('Submit jobs to run chains or importance sample', notExist=True, converge=True)
Opts.parser.add_argument('--nodes', type=int, default=2)
Opts.parser.add_argument('--script', default='runMPI_HPCS.pl')

Opts.parser.add_argument('--dryrun', action='store_true')
Opts.parser.add_argument('--subitems', action='store_true')
Opts.parser.add_argument('--minimize', action='store_true')
Opts.parser.add_argument('--importance_minimize', action='store_true')
Opts.parser.add_argument('--minimize_failed', action='store_true')
Opts.parser.add_argument('--checkpoint_run', action='store_true')
Opts.parser.add_argument('--importance_ready', action='store_true')
Opts.parser.add_argument('--not_queued', action='store_true')


(batch, args) = Opts.parseForBatch()

if args.not_queued: 
    print 'Getting queued names...'
    queued = jobQueue.queued_jobs()
    
def notQueued(name):
    for job in queued:
        if name in job: 
#            print 'Already running:', name
            return False
    return True
        
예제 #3
0
import os, iniFile, batchJobArgs

def checkDir(fname):
    if not os.path.exists(fname): os.makedirs(fname)


Opts = batchJobArgs.batchArgs('Run getdist over the grid of models', notExist=True)
Opts.parser.add_argument('--plots', action='store_true')
Opts.parser.add_argument('--norun', action='store_true')
Opts.parser.add_argument('--plot_data', default=None)
Opts.parser.add_argument('--burn_removed', action='store_true')


(batch, args) = Opts.parseForBatch()

base_ini = 'getdist_common_batch1.ini'


plot_ext = 'py'
plot_cmd = 'python'

# plot_cmd = 'matlab'

# the plotting matlab run is optional and only if you are using plot_ext=m in getdist
plot_types = ['.', '_2D.']
# you don't need these for python plots generated separately
# '_tri.m' is very slow for so many

if args.plot_data is None: data_dir = batch.batchPath + 'plot_data' + os.sep
else: data_dir = os.path.abspath(args.plot_data) + os.sep
ini_dir = batch.batchPath + 'getdist' + os.sep
예제 #4
0
import os
import batchJobArgs
from getdist import ResultObjs, paramNames


Opts = batchJobArgs.batchArgs('Compare parameter constraints over set of models')
Opts.parser.add_argument('--params', nargs='+')
Opts.parser.add_argument('--chain_name_params', nargs='+')

Opts.parser.add_argument('--compare', nargs='+', default=None)
Opts.parser.add_argument('--nobestfits', action='store_true')
Opts.parser.add_argument('--single_extparam', action='store_true')
Opts.parser.add_argument('--limit', type=int, default=2)
Opts.parser.add_argument('--latex_filename', default=None)
Opts.parser.add_argument('--mathColumns', action='store_true')
Opts.parser.add_argument('--endline', default='\\cr')
Opts.parser.add_argument('--paramNameFile', default='clik_latex.paramnames')


(batch, args) = Opts.parseForBatch()
formatter = ResultObjs.tableFormatter()

names = paramNames.paramNames(args.paramNameFile)

if args.chain_name_params is None: args.chain_name_params = args.params

if args.compare: args.compare = [batch.normalizeDataTag(dat) for dat in args.compare]

table = dict()
paramtag_for_param = dict()
for par in args.params:
예제 #5
0
import os, batchJobArgs, paramNames, GetDistPlots


Opts = batchJobArgs.batchArgs('Make plots from getdist outputs', importance=True, converge=True)
Opts.parser.add_argument('out_dir')

Opts.parser.add_argument('--plot_data', default=None)
Opts.parser.add_argument('--paramNameFile', default='clik_latex.paramnames')
Opts.parser.add_argument('--paramList', default=None)
Opts.parser.add_argument('--compare_data', nargs='+', default=None)
Opts.parser.add_argument('--compare_importance', nargs='*', default=None)
Opts.parser.add_argument('--compare_paramtag', nargs='+', default=None)
Opts.parser.add_argument('--nx', default=None)
Opts.parser.add_argument('--legend_labels', default=None)
Opts.parser.add_argument('--D2_param', default=None)
Opts.parser.add_argument('--outputs', nargs='+', default=['pdf'])
Opts.parser.add_argument('--filled', action='store_true')
Opts.parser.add_argument('--allhave', action='store_true')

(batch, args) = Opts.parseForBatch()

if args.paramList is not None: args.paramList = paramNames.paramNames(args.paramList)

outdir = args.out_dir
if not os.path.exists(outdir): os.makedirs(outdir)
outdir = os.path.abspath(outdir) + os.sep

if args.plot_data is None: data = batch.batchPath + '/plot_data'
else: data = args.plot_data

g = GetDistPlots.GetDistPlotter(data)
예제 #6
0
import os, fnmatch, batchJobArgs

Opts = batchJobArgs.batchArgs('delete failed chains, files etc.', importance=True, converge=True)

Opts.parser.add_argument('--dist', action='store_true')
Opts.parser.add_argument('--ext', nargs='+', default=['*'])
Opts.parser.add_argument('--empty', action='store_true')
Opts.parser.add_argument('--confirm', action='store_true')
Opts.parser.add_argument('--chainnum', default=None)

(batch, args) = Opts.parseForBatch()

def fsizestr(fname):
    sz = os.path.getsize(fname) / 1024
    if (sz < 1024): return str(sz) + 'KB'
    if (sz < 1024 * 1024): return str(sz / 1024) + 'MB'
    if (sz < 1024 * 1024 * 1024): return str(sz / 1024 / 1024) + 'GB'

if args.chainnum is not None:
    args.ext = ['_' + args.chainnum + '.' + ext for ext in args.ext]
else: args.ext = ['.' + ext for ext in args.ext] + ['_*.' + ext for ext in args.ext]

for jobItem in Opts.filteredBatchItems():
    if (args.converge == 0 or not jobItem.hasConvergeBetterThan(args.converge, returnNotExist=True)) and os.path.exists(jobItem.chainPath):
        dirs = [jobItem.chainPath]
        if args.dist: dirs = []
        if os.path.exists(jobItem.distPath): dirs += [jobItem.distPath]
        for adir in dirs:
            files = sorted(os.listdir(adir))
            for f in files:
                for ext in args.ext:
예제 #7
0
import os, re
import batchJobArgs
from getdist import paramNames

Opts = batchJobArgs.batchArgs('rename parameter in all .paramnames files in grid', importance=True)
Opts.parser.add_argument('--old_new', nargs='+', help="list of oldname newname oldname2 newname2...")
Opts.parser.add_argument('--labelNames', default=None, help=".paramnames file for new param labels")
Opts.parser.add_argument('--map_file', help="file with rows of oldname newname label")
Opts.parser.add_argument('--confirm', action='store_true', help="true to replace .paramnames files")


(batch, args) = Opts.parseForBatch()

if args.old_new and len(args.old_new) < 2: raise Exception('Must have at least one pair of parameters to rename')

if args.labelNames:
    labels = paramNames.paramNames(args.labelNames)
else:
    labels = None

mapper = dict()
if args.map_file:
    with open(args.map_file) as f:
        for line in f:
            if line.strip():
                old, new, label = [s.strip() for s in line.split(None, 2)]
                mapper[old] = (new, label)
if args.old_new:
    for old, new in zip(args.old_new[::2], args.old_new[1::2]):
        mapper[old] = (new, None)
예제 #8
0
import os, fnmatch, shutil, batchJobArgs

Opts = batchJobArgs.batchArgs('copy all files of a given type from all getdist output directories in the batch', importance=True, converge=True)

Opts.parser.add_argument('target_dir')
Opts.parser.add_argument('file_extension', nargs='+')
Opts.parser.add_argument('--normalize_names', action='store_true', help='replace actual name tags with normalized names')
Opts.parser.add_argument('--tag_replacements', nargs='+', help="XX YY XX2 YY2 replaces name XX with YY, XX2 with YY2 etc.")


(batch, args) = Opts.parseForBatch()

target_dir = os.path.abspath(args.target_dir) + os.sep
if not os.path.exists(target_dir): os.makedirs(target_dir)

if args.tag_replacements is not None:
    replacements = dict()
    for i, val in enumerate(args.tag_replacements[::2]):
        replacements[val] = args.tag_replacements[i * 2 + 1]
else: replacements = None

for ext in args.file_extension:
    if not '.' in ext: pattern = '.' + ext
    else: pattern = ext
    for jobItem in Opts.filteredBatchItems():
        if os.path.exists(jobItem.distPath) and (args.converge == 0 or jobItem.hasConvergeBetterThan(args.converge)):
            for f in os.listdir(jobItem.distPath):
                if fnmatch.fnmatch(f, jobItem.name + pattern):
                    print jobItem.distPath + f
                    if args.normalize_names:
                        fout = jobItem.makeNormedName(replacements)[0] + os.path.splitext(f)[1]
예제 #9
0
import jobQueue, batchJobArgs, subprocess

Opts = batchJobArgs.batchArgs("Delete running or queued jobs", importance=True, batchPathOptional=True)

group = Opts.parser.add_mutually_exclusive_group()
group.add_argument("--queued", action="store_true")
group.add_argument("--running", action="store_true")

Opts.parser.add_argument("--delete_id_min", type=int)
Opts.parser.add_argument("--delete_id_range", nargs=2, type=int)
Opts.parser.add_argument("--delete_ids", nargs="+", type=int)

Opts.parser.add_argument("--confirm", action="store_true")


(batch, args) = Opts.parseForBatch()


if batch:
    if args.delete_id_range is not None:
        jobQueue.deleteJobs(args.batchPath, jobId_minmax=args.delete_id_range, confirm=args.confirm)
    if args.delete_id_min is not None:
        jobQueue.deleteJobs(args.batchPath, jobId_min=args.delete_id_min, confirm=args.confirm)
    elif args.delete_ids is not None:
        jobQueue.deleteJobs(args.batchPath, args.delete_ids, confirm=args.confirm)
    else:
        items = [jobItem for jobItem in Opts.filteredBatchItems()]
        batchNames = set([jobItem.name for jobItem in items])
        jobQueue.deleteJobs(args.batchPath, rootNames=batchNames, confirm=args.confirm)

    if not args.confirm:
예제 #10
0
import os, batchJobArgs, ResultObjs, paramNames, planckStyle


Opts = batchJobArgs.batchArgs('Make pdf tables from latex generated from getdist outputs', importance=True, converge=True)
Opts.parser.add_argument('latex_filename')
Opts.parser.add_argument('--limit', type=int, default=2)
Opts.parser.add_argument('--bestfitonly', action='store_true')
Opts.parser.add_argument('--nobestfit', action='store_true')

# this is just for the latex labelsm set None to use those in chain .paramnames
Opts.parser.add_argument('--paramNameFile', default='clik_latex.paramnames')
Opts.parser.add_argument('--paramList', default=None)
Opts.parser.add_argument('--blockEndParams', default=None)
Opts.parser.add_argument('--columns', type=int, nargs=1, default=3)
Opts.parser.add_argument('--compare', nargs='+', default=None)
Opts.parser.add_argument('--titles', default=None)  # for compare plots
Opts.parser.add_argument('--forpaper', action='store_true')
Opts.parser.add_argument('--separate_tex', action='store_true')
Opts.parser.add_argument('--header_tex', default=None)
Opts.parser.add_argument('--height', default="8in")
Opts.parser.add_argument('--width', default="10in")


(batch, args) = Opts.parseForBatch()

if args.blockEndParams is not None: args.blockEndParams = args.blockEndParams.split(';')
outfile = args.latex_filename

if args.paramList is not None: args.paramList = paramNames.paramNames(args.paramList)

if args.forpaper: formatter = planckStyle.planckStyleTableFormatter()
예제 #11
0
import jobQueue, batchJobArgs

Opts = batchJobArgs.batchArgs(
    "List details of running or queued jobs; gives job stats, then current R-1 and job/chain names",
    importance=True,
    batchPathOptional=True,
)

group = Opts.parser.add_mutually_exclusive_group()
group.add_argument("--queued", action="store_true")
group.add_argument("--running", action="store_true")

(batch, args) = Opts.parseForBatch()

if batch:
    items = [jobItem for jobItem in Opts.filteredBatchItems()]
    batchNames = set([jobItem.name for jobItem in items] + [jobItem.name + "_minimize" for jobItem in items])

ids, jobNames, nameslist, infos = jobQueue.queue_job_details(
    args.batchPath, running=not args.queued, queued=not args.running
)
for jobId, jobName, names, info in zip(ids, jobNames, nameslist, infos):
    if not batch or batchNames.intersection(set(names)):
        stats = dict()
        if batch:
            for name in names:
                for jobItem in items:
                    if jobItem.name == name:
                        R = jobItem.convergeStat()[0]
                        if R:
                            stats[name] = "%6.3f" % (R)
예제 #12
0
import os, fnmatch, shutil, batchJobArgs, batchJob, zipfile

Opts = batchJobArgs.batchArgs('copy or zip chains and optionally other files', importance=True, converge=True)

Opts.parser.add_argument('target_dir', help="output root directory or zip file name")

Opts.parser.add_argument('--dist', action='store_true', help="include getdist outputs")
Opts.parser.add_argument('--chains', action='store_true', help="include chain files")
Opts.parser.add_argument('--sym_link', action='store_true', help="just make symbolic links to source directories")
Opts.parser.add_argument('--no_config', action='store_true', help="don't copy grid config info")

Opts.parser.add_argument('--remove_burn_fraction', default=0.0, type=float, help="fraction at start of chain to remove as burn in")

Opts.parser.add_argument('--file_extensions', nargs='+', default=['.*'], help='extensions to include')
Opts.parser.add_argument('--skip_extensions', nargs='+', default=['.data', '.chk', '.chk_tmp', '.log', '.corr', '.py', '.m', '.py_mcsamples', '.pysamples'])
Opts.parser.add_argument('--dryrun', action='store_true')
Opts.parser.add_argument('--verbose', action='store_true')
Opts.parser.add_argument('--zip', action='store_true', help='make a zip file. Not needed if target_dir is a filename ending in .zip')


(batch, args) = Opts.parseForBatch()

if '.zip' in args.target_dir: args.zip = True

sizeMB = 0

if args.zip:
    zipper = zipfile.ZipFile(args.target_dir, 'w', compression=zipfile.ZIP_DEFLATED, allowZip64=True)
else:
    target_dir = os.path.abspath(args.target_dir) + os.sep
    batchJob.makePath(target_dir)
예제 #13
0
import batchJobArgs


Opts = batchJobArgs.batchArgs('List items in a grid', importance=True, converge=True, notExist=True)
Opts.parser.add_argument('--exists', action='store_true', help='chain must exist')
Opts.parser.add_argument('--normed', action='store_true', help='Output normed names')


(batch, args) = Opts.parseForBatch()
items = Opts.sortedParamtagDict(chainExist=args.exists)

for paramtag, parambatch in items:
    for jobItem in parambatch:
        if hasattr(jobItem, 'group'):
            tag = '(%s)' % jobItem.group
        else:
            tag = ''
        if args.normed:
            print jobItem.normed_name, tag
        else:
            print jobItem.name, tag