예제 #1
0
from __future__ import absolute_import
from __future__ import print_function
import os
import re
from paramgrid import batchjob_args
from getdist import paramnames


Opts = batchjob_args.batchArgs('rename parameter in all .paramnames files in grid', importance=True)
Opts.parser.add_argument('--old_new', nargs='+', help="list of oldname newname oldname2 newname2...")
Opts.parser.add_argument('--labelNames', default=None, help=".paramnames file for new param labels")
Opts.parser.add_argument('--map_file', help="file with rows of oldname newname label")
Opts.parser.add_argument('--confirm', action='store_true', help="true to replace .paramnames files")


(batch, args) = Opts.parseForBatch()

if args.old_new and len(args.old_new) < 2: raise Exception('Must have at least one pair of parameters to rename')

if args.labelNames:
    labels = paramnames.ParamNames(args.labelNames)
else:
    labels = None

mapper = dict()
if args.map_file:
    with open(args.map_file) as f:
        for line in f:
            if line.strip():
                old, new, label = [s.strip() for s in line.split(None, 2)]
                mapper[old] = (new, label)
예제 #2
0
from __future__ import absolute_import
from __future__ import print_function
import os

from . import plik_postprocess
from getdist import inifile
from paramgrid import batchjob_args


Opts = batchjob_args.batchArgs('add plik params and bestfits', importance=True)

Opts.parser.add_argument('--finished', action='store_true', help='only run on completed chains')

(batch, args) = Opts.parseForBatch()


for jobItem in Opts.filteredBatchItems():
    if args.finished and not jobItem.chainFinished(): continue
    name = jobItem.chainRoot + '.paramnames'
    properties = jobItem.propertiesIni()
    if 'plik' in name and os.path.exists(name) and not properties.bool('plik_foregrounds', False):

        ini = inifile.IniFile(jobItem.chainRoot + '.inputparams')
        dat = ini.string('clik_data_plik', '')
        params = ini.string('clik_params_plik', '')
        hasderived = dat
        if not dat:
            dat = ini.string('clik_data_plikTE', '')
            params = ini.string('clik_params_plikTE', '')
        if not dat:
            dat = ini.string('clik_data_plikEE', '')
예제 #3
0
from __future__ import absolute_import
from __future__ import print_function
import subprocess
from paramgrid import batchjob_args, jobqueue

Opts = batchjob_args.batchArgs('Delete running or queued jobs',
                               importance=True,
                               batchPathOptional=True)

group = Opts.parser.add_mutually_exclusive_group()
group.add_argument('--queued', action='store_true')
group.add_argument('--running', action='store_true')

Opts.parser.add_argument('--delete_id_min', type=int)
Opts.parser.add_argument('--delete_id_range', nargs=2, type=int)
Opts.parser.add_argument('--delete_ids', nargs='+', type=int)
Opts.parser.add_argument('--confirm', action='store_true')

(batch, args) = Opts.parseForBatch()

if batch:
    if args.delete_id_range is not None:
        jobqueue.deleteJobs(args.batchPath,
                            jobId_minmax=args.delete_id_range,
                            confirm=args.confirm)
    if args.delete_id_min is not None:
        jobqueue.deleteJobs(args.batchPath,
                            jobId_min=args.delete_id_min,
                            confirm=args.confirm)
    elif args.delete_ids is not None:
        jobqueue.deleteJobs(args.batchPath,
예제 #4
0
from __future__ import absolute_import
from __future__ import print_function
import os
import fnmatch
from paramgrid import batchjob_args


Opts = batchjob_args.batchArgs('delete failed chains, files etc.', importance=True, converge=True)

Opts.parser.add_argument('--dist', action='store_true')
Opts.parser.add_argument('--ext', nargs='+', default=['*'])
Opts.parser.add_argument('--empty', action='store_true')
Opts.parser.add_argument('--confirm', action='store_true')
Opts.parser.add_argument('--chainnum', default=None)

(batch, args) = Opts.parseForBatch()

sizeMB = 0

def fsizestr(fname):
    global sizeMB
    sz = os.path.getsize(fname) / 1024
    sizeMB += sz / 1024.
    if sz < 1024: return str(sz) + 'KB'
    if sz < 1024 * 1024: return str(sz / 1024) + 'MB'
    if sz < 1024 * 1024 * 1024: return str(sz / 1024 / 1024) + 'GB'

if args.chainnum is not None:
    args.ext = ['_' + args.chainnum + '.' + ext for ext in args.ext]
else: args.ext = ['.' + ext for ext in args.ext] + ['_*.' + ext for ext in args.ext]
예제 #5
0
from __future__ import absolute_import
from __future__ import print_function
import os
import fnmatch
from paramgrid import batchjob_args


Opts = batchjob_args.batchArgs('delete failed chains, files etc.', importance=True, converge=True)

Opts.parser.add_argument('--dist', action='store_true')
Opts.parser.add_argument('--ext', nargs='+', default=['*'])
Opts.parser.add_argument('--empty', action='store_true')
Opts.parser.add_argument('--confirm', action='store_true')
Opts.parser.add_argument('--chainnum', default=None)

(batch, args) = Opts.parseForBatch()

sizeMB = 0

def fsizestr(fname):
    global sizeMB
    sz = os.path.getsize(fname) // 1024
    sizeMB += sz / 1024.
    if sz < 1024: return str(sz) + 'KB'
    if sz < 1024 * 1024: return str(sz // 1024) + 'MB'
    if sz < 1024 * 1024 * 1024: return str(sz // 1024 // 1024) + 'GB'

if args.chainnum is not None:
    args.ext = ['_' + args.chainnum + '.' + ext for ext in args.ext]
else: args.ext = ['.' + ext for ext in args.ext] + ['_*.' + ext for ext in args.ext]
예제 #6
0
from __future__ import absolute_import
from __future__ import print_function
import os
from paramgrid import batchjob_args
from getdist import types, paramnames


Opts = batchjob_args.batchArgs('Compare parameter constraints over set of models')
Opts.parser.add_argument('--params', nargs='+')
Opts.parser.add_argument('--chain_name_params', nargs='+')

Opts.parser.add_argument('--compare', nargs='+', default=None)
Opts.parser.add_argument('--nobestfits', action='store_true')
Opts.parser.add_argument('--single_extparam', action='store_true')
Opts.parser.add_argument('--limit', type=int, default=2)
Opts.parser.add_argument('--latex_filename', default=None)
Opts.parser.add_argument('--mathColumns', action='store_true')
Opts.parser.add_argument('--endline', default='\\cr')
Opts.parser.add_argument('--paramNameFile', default='clik_latex.paramnames')

(batch, args) = Opts.parseForBatch()
formatter = types.TableFormatter()

names = paramnames.ParamNames(args.paramNameFile)

if args.chain_name_params is None: args.chain_name_params = args.params

if args.compare: args.compare = [batch.normalizeDataTag(dat) for dat in args.compare]

table = dict()
paramtag_for_param = dict()
예제 #7
0
#!/usr/bin/env python

from __future__ import absolute_import
from __future__ import print_function
import hashlib
import os
from paramgrid import batchjob_args, jobqueue

Opts = batchjob_args.batchArgs(
    'Submit jobs to run chains or importance sample',
    notExist=True,
    notall=True,
    converge=True)

jobqueue.addArguments(Opts.parser, combinedJobs=True)

Opts.parser.add_argument('--subitems',
                         action='store_true',
                         help='include sub-grid items')
Opts.parser.add_argument('--not_queued', action='store_true')
Opts.parser.add_argument(
    '--filters',
    action='store_true',
    help='run any python importance filters on grid (no submission)')
Opts.parser.add_argument('--minimize',
                         action='store_true',
                         help='Run minimization jobs')
Opts.parser.add_argument(
    '--importance_minimize',
    action='store_true',
    help='Run minimization jobs for chains that are importance sampled')
예제 #8
0
from __future__ import absolute_import
from __future__ import print_function
import os
import copy
import planckStyle
from paramgrid import batchjob, batchjob_args
from getdist import types, paramnames
from getdist.mcsamples import loadMCSamples, MCSamples
from getdist.paramnames import ParamNames
import numpy as np

Opts = batchjob_args.batchArgs('Make pdf tables from latex generated from getdist outputs', importance=True,
                               converge=True)
Opts.parser.add_argument('latex_filename', help="name of latex/PDF file to produce")
Opts.parser.add_argument('--limit', type=int, default=2, help="sigmas of quoted confidence intervals")
Opts.parser.add_argument('--all_limits', action='store_true')

Opts.parser.add_argument('--bestfitonly', action='store_true')
Opts.parser.add_argument('--nobestfit', action='store_true')
Opts.parser.add_argument('--no_delta_chisq', action='store_true')
Opts.parser.add_argument('--delta_chisq_paramtag', default=None,
                         help="parameter tag to give best-fit chi-squared differences")
Opts.parser.add_argument('--changes_from_datatag', default=None,
                         help="give fractional sigma shifts compared to a given data combination tag")
Opts.parser.add_argument('--changes_from_paramtag', default=None,
                         help="give fractional sigma shifts compared to a given parameter combination tag")
Opts.parser.add_argument('--changes_adding_data', nargs='+', default=None,
                         help="give fractional sigma shifts when adding given data")
Opts.parser.add_argument('--changes_replacing', nargs='+', default=None,
                         help='give sigma shifts for results with data x, y, z replacing data y, z.. with x')
Opts.parser.add_argument('--changes_only', action='store_true',
예제 #9
0
from __future__ import absolute_import
from __future__ import print_function
import subprocess
from paramgrid import batchjob_args, jobqueue

Opts = batchjob_args.batchArgs('Delete running or queued jobs', importance=True, batchPathOptional=True)

group = Opts.parser.add_mutually_exclusive_group()
group.add_argument('--queued', action='store_true')
group.add_argument('--running', action='store_true')

Opts.parser.add_argument('--delete_id_min', type=int)
Opts.parser.add_argument('--delete_id_range', nargs=2, type=int)
Opts.parser.add_argument('--delete_ids', nargs='+', type=int)
Opts.parser.add_argument('--confirm', action='store_true')

(batch, args) = Opts.parseForBatch()


if batch:
    if args.delete_id_range is not None:
        jobqueue.deleteJobs(args.batchPath, jobId_minmax=args.delete_id_range, confirm=args.confirm)
    if args.delete_id_min is not None:
        jobqueue.deleteJobs(args.batchPath, jobId_min=args.delete_id_min, confirm=args.confirm)
    elif args.delete_ids is not None:
        jobqueue.deleteJobs(args.batchPath, args.delete_ids, confirm=args.confirm)
    else:
        items = [jobItem for jobItem in Opts.filteredBatchItems()]
        batchNames = set([jobItem.name for jobItem in items] + [jobItem.name + '_minimize' for jobItem in items])
        jobqueue.deleteJobs(args.batchPath, rootNames=batchNames, confirm=args.confirm)
예제 #10
0
from __future__ import absolute_import
from __future__ import print_function
import os
import re
from paramgrid import batchjob_args
from getdist import paramnames

Opts = batchjob_args.batchArgs(
    'rename parameter in all .paramnames files in grid', importance=True)
Opts.parser.add_argument('--old_new',
                         nargs='+',
                         help="list of oldname newname oldname2 newname2...")
Opts.parser.add_argument('--labelNames',
                         default=None,
                         help=".paramnames file for new param labels")
Opts.parser.add_argument('--map_file',
                         help="file with rows of oldname newname label")
Opts.parser.add_argument('--confirm',
                         action='store_true',
                         help="true to replace .paramnames files")

(batch, args) = Opts.parseForBatch()

if args.old_new and len(args.old_new) < 2:
    raise Exception('Must have at least one pair of parameters to rename')

if args.labelNames:
    labels = paramnames.ParamNames(args.labelNames)
else:
    labels = None
from __future__ import absolute_import
from __future__ import print_function
import os
from paramgrid import batchjob_args
from getdist import types, paramnames

Opts = batchjob_args.batchArgs(
    'Compare parameter constraints over set of models')
Opts.parser.add_argument('--params', nargs='+')
Opts.parser.add_argument('--chain_name_params', nargs='+')

Opts.parser.add_argument('--compare', nargs='+', default=None)
Opts.parser.add_argument('--nobestfits', action='store_true')
Opts.parser.add_argument('--single_extparam', action='store_true')
Opts.parser.add_argument('--limit', type=int, default=2)
Opts.parser.add_argument('--latex_filename', default=None)
Opts.parser.add_argument('--mathColumns', action='store_true')
Opts.parser.add_argument('--endline', default='\\cr')
Opts.parser.add_argument('--paramNameFile', default='clik_latex.paramnames')

(batch, args) = Opts.parseForBatch()
formatter = types.TableFormatter()

names = paramnames.ParamNames(args.paramNameFile)

if args.chain_name_params is None: args.chain_name_params = args.params

if args.compare:
    args.compare = [batch.normalizeDataTag(dat) for dat in args.compare]

table = dict()
예제 #12
0
from __future__ import absolute_import
from __future__ import print_function
import os
from paramgrid import batchjob_args

Opts = batchjob_args.batchArgs('Make plots from getdist outputs',
                               importance=True,
                               converge=True,
                               plots=True)
Opts.parser.add_argument('out_dir',
                         help='directory to put the produced plots in')

Opts.parser.add_argument(
    '--compare_data',
    nargs='+',
    default=None,
    help='data tags to compare for each parameter combination (data1_data2)')
Opts.parser.add_argument('--compare_importance', nargs='*', default=None)
Opts.parser.add_argument(
    '--compare_paramtag',
    nargs='+',
    default=None,
    help=
    'list of parameter tags to compare for each data combination (param1_param2)'
)
Opts.parser.add_argument(
    '--compare_alldata',
    action='store_true',
    help='compare all data combinations for each parameter combination')
Opts.parser.add_argument(
    '--compare_replacing',
예제 #13
0
#!/usr/bin/env python

from __future__ import absolute_import
from __future__ import print_function
import hashlib
import os
from paramgrid import batchjob_args, jobqueue

Opts = batchjob_args.batchArgs('Submit jobs to run chains or importance sample', notExist=True, notall=True,
                               converge=True)

jobqueue.addArguments(Opts.parser, combinedJobs=True)

Opts.parser.add_argument('--subitems', action='store_true', help='include sub-grid items')
Opts.parser.add_argument('--not_queued', action='store_true')
Opts.parser.add_argument('--filters', action='store_true',
                         help='run any python importance filters on grid (no submission)')
Opts.parser.add_argument('--minimize', action='store_true', help='Run minimization jobs')
Opts.parser.add_argument('--importance_minimize', action='store_true',
                         help='Run minimization jobs for chains that are importance sampled')
Opts.parser.add_argument('--minimize_failed', action='store_true', help='run where minimization previously failed')
Opts.parser.add_argument('--checkpoint_run', nargs='?', default=None, const=0, type=float,
                         help='run if stopped and not finished; if optional value given then only run chains with convergence worse than the given value')
Opts.parser.add_argument('--importance_ready', action='store_true', help='where parent chain has converged and stopped')
Opts.parser.add_argument('--importance_changed', action='store_true',
                         help='run importance jobs where the parent chain has changed since last run')
Opts.parser.add_argument('--parent_converge', type=float, default=0,
                         help='minimum R-1 convergence for importance job parent')
Opts.parser.add_argument('--parent_stopped', action='store_true', help='only run if parent chain is not still running')
Opts.parser.add_argument('--chain_exists', action='store_true', help='Only run if chains already exist')
예제 #14
0
from __future__ import absolute_import
from __future__ import print_function
from paramgrid import batchjob_args, jobqueue

Opts = batchjob_args.batchArgs(
    'List details of running or queued jobs; gives job stats, then current R-1 and job/chain names', importance=True,
    batchPathOptional=True)

group = Opts.parser.add_mutually_exclusive_group()
group.add_argument('--queued', action='store_true')
group.add_argument('--running', action='store_true')

(batch, args) = Opts.parseForBatch()

if batch:
    items = [jobItem for jobItem in Opts.filteredBatchItems()]
    batchNames = set([jobItem.name for jobItem in items] + [jobItem.name + '_minimize' for jobItem in items])
else:
    batchNames= set()

ids, jobNames, nameslist, infos = jobqueue.queue_job_details(args.batchPath, running=not args.queued,
                                                             queued=not args.running)
for jobId, jobName, names, info in zip(ids, jobNames, nameslist, infos):
    if batchNames.intersection(set(names)):
        stats = dict()
        if batch:
            for name in names:
                for jobItem in items:
                    if jobItem.name == name:
                        R = jobItem.convergeStat()[0]
                        if R: stats[name] = "%6.3f" % R
예제 #15
0
from __future__ import absolute_import
from __future__ import print_function
from paramgrid import batchjob_args

Opts = batchjob_args.batchArgs('List items in a grid',
                               importance=True,
                               converge=True,
                               notExist=True)
Opts.parser.add_argument('--exists',
                         action='store_true',
                         help='chain must exist')
Opts.parser.add_argument('--normed',
                         action='store_true',
                         help='Output normed names')

(batch, args) = Opts.parseForBatch()
items = Opts.sortedParamtagDict(chainExist=args.exists)

for paramtag, parambatch in items:
    for jobItem in parambatch:
        if hasattr(jobItem, 'group'):
            tag = '(%s)' % jobItem.group
        else:
            tag = ''
        if args.normed:
            print(jobItem.normed_name, tag)
        else:
            print(jobItem.name, tag)
예제 #16
0
from __future__ import absolute_import
from __future__ import print_function
from paramgrid import batchjob_args, jobqueue

Opts = batchjob_args.batchArgs(
    'List details of running or queued jobs; gives job stats, then current R-1 and job/chain names',
    importance=True,
    batchPathOptional=True)

group = Opts.parser.add_mutually_exclusive_group()
group.add_argument('--queued', action='store_true')
group.add_argument('--running', action='store_true')

(batch, args) = Opts.parseForBatch()

if batch:
    items = [jobItem for jobItem in Opts.filteredBatchItems()]
    batchNames = set([jobItem.name for jobItem in items] +
                     [jobItem.name + '_minimize' for jobItem in items])
else:
    batchNames = set()

ids, jobNames, nameslist, infos = jobqueue.queue_job_details(
    args.batchPath, running=not args.queued, queued=not args.running)
for jobId, jobName, names, info in zip(ids, jobNames, nameslist, infos):
    if batchNames.intersection(set(names)):
        stats = dict()
        if batch:
            for name in names:
                for jobItem in items:
                    if jobItem.name == name:
예제 #17
0
from __future__ import absolute_import
from __future__ import print_function
import os
import subprocess
import getdist
from getdist import IniFile
import time
from paramgrid import batchjob_args


def checkDir(fname):
    if not os.path.exists(fname): os.makedirs(fname)


Opts = batchjob_args.batchArgs('Run getdist over the grid of models', notExist=True)
Opts.parser.add_argument('--update_only', action='store_true')
Opts.parser.add_argument('--make_plots', action='store_true', help='run generated script plot files to make PDFs')
Opts.parser.add_argument('--norun', action='store_true')
Opts.parser.add_argument('--plot_data', default=None,
                         help="directory to store the plot_data in for each chain. Default None to generate on the fly.")
Opts.parser.add_argument('--burn_removed', action='store_true', help="if burn in has already been removed from chains")
Opts.parser.add_argument('--burn_remove', type=float,
                         help="fraction of chain to remove as burn in (if not importance sampled or already done)")

Opts.parser.add_argument('--no_plots', action='store_true',
                         help="just make non-plot outputs (faster if using old plot_data)")
Opts.parser.add_argument('--delay', type=int, help="run after delay of some number of seconds")
Opts.parser.add_argument('--procs', type=int, default=1, help="number of getdist instances to run in parallel")
Opts.parser.add_argument('--base_ini', default=getdist.default_getdist_settings, help="default getdist settings")
Opts.parser.add_argument('--command', default='python', help="program to run")
Opts.parser.add_argument('--command_params', nargs='*',
예제 #18
0
from __future__ import absolute_import
from __future__ import print_function
import os
import fnmatch
import shutil
from paramgrid import batchjob_args

Opts = batchjob_args.batchArgs('copy all files of a given type from all getdist output directories in the batch',
                               importance=True, converge=True)

Opts.parser.add_argument('target_dir')
Opts.parser.add_argument('file_extension', nargs='+')
Opts.parser.add_argument('--normalize_names', action='store_true', help='replace actual name tags with normalized names')
Opts.parser.add_argument('--tag_replacements', nargs='+', help="XX YY XX2 YY2 replaces name XX with YY, XX2 with YY2 etc.")


(batch, args) = Opts.parseForBatch()

target_dir = os.path.abspath(args.target_dir) + os.sep
if not os.path.exists(target_dir): os.makedirs(target_dir)

if args.tag_replacements is not None:
    replacements = dict()
    for i, val in enumerate(args.tag_replacements[::2]):
        replacements[val] = args.tag_replacements[i * 2 + 1]
else: replacements = None

for ext in args.file_extension:
    if not '.' in ext: pattern = '.' + ext
    else: pattern = ext
    for jobItem in Opts.filteredBatchItems():
예제 #19
0
from __future__ import absolute_import
from __future__ import print_function
import os
from paramgrid import batchjob_args


Opts = batchjob_args.batchArgs('Make plots from getdist outputs', importance=True, converge=True, plots=True)
Opts.parser.add_argument('out_dir', help='directory to put the produced plots in')

Opts.parser.add_argument('--compare_data', nargs='+', default=None,
                         help='data tags to compare for each parameter combination (data1_data2)')
Opts.parser.add_argument('--compare_importance', nargs='*', default=None)
Opts.parser.add_argument('--compare_paramtag', nargs='+', default=None,
                         help='list of parameter tags to compare for each data combination (param1_param2)')
Opts.parser.add_argument('--compare_alldata', action='store_true',
                         help='compare all data combinations for each parameter combination')
Opts.parser.add_argument('--compare_replacing', nargs='*', default=None,
                         help='compare results replacing data combination x with y, z..(datavar1 datavar2 ...)')

Opts.parser.add_argument('--legend_labels', default=None, nargs='+',
                         help='labels to replace full chain names in legend')
Opts.parser.add_argument('--D2_param', default=None, help='x-parameter for 2D plots')
Opts.parser.add_argument('--D2_y_params', nargs='+', default=None, help='list of y parameter names for 2D plots')
Opts.parser.add_argument('--filled', action='store_true', help='for 2D plots, output filled contours')

Opts.parser.add_argument('--tri_params', nargs='+', default=None, help='list of parameters for triangle plots')

Opts.parser.add_argument('--legend_ncol', type=int, default=None, help='numnber of columns to draw legends')
Opts.parser.add_argument('--allhave', action='store_true', help='only include plots where all combinations exist')
Opts.parser.add_argument('--outtag', default=None, help='tag to add to output filenames to distinguish output')
예제 #20
0
from __future__ import absolute_import
from __future__ import print_function
import os
import fnmatch
import shutil
import zipfile

from paramgrid import batchjob, batchjob_args


Opts = batchjob_args.batchArgs('copy or zip chains and optionally other files', importance=True, converge=True)

Opts.parser.add_argument('target_dir', help="output root directory or zip file name")

Opts.parser.add_argument('--dist', action='store_true', help="include getdist outputs")
Opts.parser.add_argument('--chains', action='store_true', help="include chain files")
Opts.parser.add_argument('--sym_link', action='store_true', help="just make symbolic links to source directories")
Opts.parser.add_argument('--no_config', action='store_true', help="don't copy grid config info")

Opts.parser.add_argument('--remove_burn_fraction', default=0.0, type=float, help="fraction at start of chain to remove as burn in")

Opts.parser.add_argument('--file_extensions', nargs='+', default=['.*'], help='extensions to include')
Opts.parser.add_argument('--skip_extensions', nargs='+', default=['.data', '.chk', '.chk_tmp', '.log', '.corr', '.py', '.m', '.py_mcsamples', '.pysamples'])
Opts.parser.add_argument('--dryrun', action='store_true')
Opts.parser.add_argument('--verbose', action='store_true')
Opts.parser.add_argument('--zip', action='store_true', help='make a zip file. Not needed if target_dir is a filename ending in .zip')


(batch, args) = Opts.parseForBatch()

if '.zip' in args.target_dir: args.zip = True
예제 #21
0
from __future__ import absolute_import
from __future__ import print_function
import os
import fnmatch
import shutil
from paramgrid import batchjob_args

Opts = batchjob_args.batchArgs(
    'copy all files of a given type from all getdist output directories in the batch',
    importance=True,
    converge=True)

Opts.parser.add_argument('target_dir')
Opts.parser.add_argument('file_extension', nargs='+')
Opts.parser.add_argument('--normalize_names',
                         action='store_true',
                         help='replace actual name tags with normalized names')
Opts.parser.add_argument(
    '--tag_replacements',
    nargs='+',
    help="XX YY XX2 YY2 replaces name XX with YY, XX2 with YY2 etc.")

(batch, args) = Opts.parseForBatch()

target_dir = os.path.abspath(args.target_dir) + os.sep
if not os.path.exists(target_dir): os.makedirs(target_dir)

if args.tag_replacements is not None:
    replacements = dict()
    for i, val in enumerate(args.tag_replacements[::2]):
        replacements[val] = args.tag_replacements[i * 2 + 1]
예제 #22
0
from __future__ import absolute_import
from __future__ import print_function
from paramgrid import batchjob_args

Opts = batchjob_args.batchArgs('Find chains which have failed or not converged.', importance=True, converge=True)

Opts.parser.add_argument('--exist', action='store_true')
Opts.parser.add_argument('--checkpoint', action='store_true')
Opts.parser.add_argument('--running', action='store_true')
Opts.parser.add_argument('--not_running', action='store_true')
Opts.parser.add_argument('--stuck', action='store_true')

(batch, args) = Opts.parseForBatch()

notExist = []
converge = []

if args.running:args.checkpoint = True

if args.stuck:
        for jobItem in Opts.filteredBatchItems():
            if jobItem.chainExists() and jobItem.chainsDodgy():
                print('Chain stuck?...' + jobItem.name)
elif args.checkpoint:
    print('Convergence from checkpoint files...')
    for jobItem in Opts.filteredBatchItems():
        R, done = jobItem.convergeStat()
        if R is not None and not done:
            if (not args.not_running or jobItem.notRunning()) and (not args.running or not jobItem.notRunning()): print('...', jobItem.chainRoot, R)
            if args.running and jobItem.chainExists() and jobItem.chainsDodgy():
                print('Chain stuck?...' + jobItem.name)
예제 #23
0
from __future__ import absolute_import
from __future__ import print_function
from paramgrid import batchjob_args


Opts = batchjob_args.batchArgs('List items in a grid', importance=True, converge=True, notExist=True)
Opts.parser.add_argument('--exists', action='store_true', help='chain must exist')
Opts.parser.add_argument('--normed', action='store_true', help='Output normed names')

(batch, args) = Opts.parseForBatch()
items = Opts.sortedParamtagDict(chainExist=args.exists)

for paramtag, parambatch in items:
    for jobItem in parambatch:
        if hasattr(jobItem, 'group'):
            tag = '(%s)' % jobItem.group
        else:
            tag = ''
        if args.normed:
            print(jobItem.normed_name, tag)
        else:
            print(jobItem.name, tag)
예제 #24
0
from __future__ import absolute_import
from __future__ import print_function
import os
import fnmatch
import shutil
import zipfile
from datetime import datetime, timedelta

from paramgrid import batchjob, batchjob_args

Opts = batchjob_args.batchArgs('copy or zip chains and optionally other files',
                               importance=True,
                               converge=True)

Opts.parser.add_argument('target_dir',
                         help="output root directory or zip file name")

Opts.parser.add_argument('--dist',
                         action='store_true',
                         help="include getdist outputs")
Opts.parser.add_argument('--chains',
                         action='store_true',
                         help="include chain files")
Opts.parser.add_argument('--sym_link',
                         action='store_true',
                         help="just make symbolic links to source directories")
Opts.parser.add_argument('--no_config',
                         action='store_true',
                         help="don't copy grid config info")

Opts.parser.add_argument(
예제 #25
0
from __future__ import absolute_import
from __future__ import print_function
import os
import copy
import planckStyle
from paramgrid import batchjob, batchjob_args
from getdist import types, paramnames
from getdist.mcsamples import loadMCSamples, MCSamples
from getdist.paramnames import ParamNames
import numpy as np

Opts = batchjob_args.batchArgs(
    'Make pdf tables from latex generated from getdist outputs',
    importance=True,
    converge=True)
Opts.parser.add_argument('latex_filename',
                         help="name of latex/PDF file to produce")
Opts.parser.add_argument('--limit',
                         type=int,
                         default=2,
                         help="sigmas of quoted confidence intervals")
Opts.parser.add_argument('--all_limits', action='store_true')

Opts.parser.add_argument('--bestfitonly', action='store_true')
Opts.parser.add_argument('--nobestfit', action='store_true')
Opts.parser.add_argument('--no_delta_chisq', action='store_true')
Opts.parser.add_argument(
    '--delta_chisq_paramtag',
    default=None,
    help="parameter tag to give best-fit chi-squared differences")
Opts.parser.add_argument(
예제 #26
0
from __future__ import absolute_import
from __future__ import print_function
from paramgrid import batchjob_args

Opts = batchjob_args.batchArgs(
    'Find chains which have failed or not converged.',
    importance=True,
    converge=True)

Opts.parser.add_argument('--exist', action='store_true')
Opts.parser.add_argument('--checkpoint', action='store_true')
Opts.parser.add_argument('--running', action='store_true')
Opts.parser.add_argument('--not_running', action='store_true')
Opts.parser.add_argument('--stuck', action='store_true')

(batch, args) = Opts.parseForBatch()

notExist = []
converge = []

if args.running: args.checkpoint = True

if args.stuck:
    for jobItem in Opts.filteredBatchItems():
        if jobItem.chainExists() and jobItem.chainsDodgy():
            print('Chain stuck?...' + jobItem.name)
elif args.checkpoint:
    print('Convergence from checkpoint files...')
    for jobItem in Opts.filteredBatchItems():
        R, done = jobItem.convergeStat()
        if R is not None and not done: