Example #1
0
def pcload_single_entries(bn, gp):
    import gi_collection as glc
    pc = glc.ProfileCollection(gp.pops, gp.nepol)
    import re
    tmp = re.split('/DT', bn)[-1]
    path = str.join('/', re.split('/', tmp)[:-1])
    # get number of iterations from run_info file
    import gi_base as gb
    bp = gb.get_basepath()
    fil = open(bp + "/run_info", "r")
    ln = 0
    for line in fil:
        ln = ln + 1
        if re.search(path, line):
            line2 = re.sub(r'\n', '', line)
            if not re.search("File ", line2):
                runparams = line2
    fil.close()
    numofmodels = int(re.split('\t', runparams)[2])
    current = 0
    with open(bn + 'pc2.save', 'rb') as fi:
        dum = pickle.load(fi)  # dummy variable, was used to create file
        try:
            while current < minlinelen:  # if needing to cut to max no. iterations
                current += 1
                if current % 100 == 0:
                    gh.progressbar((1.0 * current) / numofmodels)
                MODEL = pickle.load(fi)
                pc.add(MODEL)
        except EOFError:
            pass
    print("")
    return pc
Example #2
0
def run(investigate="", case=-1, latest=False):
    if investigate == "":
        investigate = get_investigate()
    if case == -1:
        case = get_case(investigate)
    basepath = gb.get_basepath()
    basedir = os.path.abspath(basepath+'DT'+investigate+'/'+str(case)+'/')

    print(' - searching directory ', basedir, ' for output files')
    if latest:
        fdl = list_files_readout(basedir, investigate, case)
        sel = -1
    else:
        action = 'k'
        while(action == 'k'):
            fdl = list_files_readout(basedir, investigate, case)
            sel = get_run(len(fdl))
            action = get_action()
            if action == 'k':
                import shutil
                basename = re.split('\t', fdl[sel])[1]
                shutil.rmtree(basepath+basename)

    line = fdl[sel] # full directory path, without '/'
    basename = re.split('\t', line)[1]
    info = re.split('/', basename)
    timestamp = info[2]
    basename = info[0]+'/'+info[1]
    return timestamp, basepath+basename+'/'
Example #3
0
def pcload_single_entries(basename, gp):
    import gi_collection as glc
    pc = glc.ProfileCollection(gp.pops, gp.nepol)
    import re
    tmp = re.split('/DT', basename)[-1]
    path = str.join('/', re.split('/', tmp)[:-1])
    # get number of iterations from run_info file
    import gi_base as gb
    bp = gb.get_basepath()
    fil = open(bp+"/run_info", "r")
    for line in fil:
        if re.search(path, line):
            line2 = re.sub(r'\n', '', line)
            if not re.search("File ", line2):
                runparams = line2
    fil.close()
    numofmodels = int(re.split('\t', runparams)[2])
    current = 0
    with open(basename+'pc2.save', 'rb') as fi:
        dum = pickle.load(fi) # dummy variable, was used to create file
        try:
            while True:
                # and current<22360: # if needing to cut to max no. iterations
                current += 1
                if current%100 == 0:
                    gh.progressbar((1.0*current)/numofmodels)
                MODEL = pickle.load(fi)
                pc.add(MODEL)
        except EOFError:
            pass
    print("")
    return pc
Example #4
0
def list_files_readout(basedir, investigate, case):
    bp = gb.get_basepath()
    fil = open(bp + "/run_info", "r")
    fdl = []
    for line in fil:
        if re.search("DT" + str(investigate) + "/" + str(case) + "/", line):
            line2 = re.sub(r'\n', '', line)
            print(line2)
            #exclude "File not found" errors
            if not re.search("File ", line2):
                fdl.append(line2)
Example #5
0
def list_files_readout(basedir, investigate, case):
    bp = gb.get_basepath()
    fil = open(bp+"/run_info", "r")
    fdl=[]
    for line in fil:
        if re.search("DT"+str(investigate)+"/"+str(case)+"/", line):
            line2 = re.sub(r'\n', '', line)
            print(line2)
            #exclude "File not found" errors
            if not re.search("File ", line2):
                fdl.append(line2)
    fil.close()
    return fdl
Example #6
0
# increment NICEness of process by 1, if CPU usage shall not block others
# import os
# os.nice(1)

# optionally start with -i and -c switches, to batch start gaia and walk runs
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-i", "--investigation", dest="investigation", default="", help="investigation to run: gaia, walk, hern, triax, discmock")
parser.add_option("-c", "--case", dest="case", default='-1', help="case: 1, 2, ..")
parser.add_option("-t", "--timestamp", dest="timestamp", default='-1', help="timestamp: 201501221224")
(options, args) = parser.parse_args()
print('gravimage.py '+options.investigation+' '+str(options.case)+' '+str(options.timestamp))
if options.timestamp != '-1':
    import gi_base as gb
    basepath = gb.get_basepath()
    import import_path as ip
    ip.insert_sys_path(basepath+"DT"+options.investigation+"/"+options.case+"/"+options.timestamp+"/programs/")
import gi_params
warnings.simplefilter('ignore') # set to 'error' when debugging
gp = gi_params.Params(options.timestamp, options.investigation, int(options.case))
if options.timestamp != '-1':
    ip.remove_third()
    #import os
    #os.system('cd '+basepath+'DT'+options.investigation+'/'+options.case+'/'+options.timestamp)
    gp.restart = True
    gp.chi2_Sig_converged = 0
import gi_file as gf

def show(filepath):
    subprocess.call(('xdg-open', filepath))
Example #7
0
                nbeta = int(re.split('[=\n]', line)[-2])
    return nbeta


## \fn get_nbeta(basename)
# return number of beta parameters  from gi_params stored in output directory
# @param basename string of output dir
# @return integer number of beta* parameters


def run(investigate="", case=-1, latest=False):
    if investigate == "":
        investigate = get_investigate()
    if case == -1:
        case = get_case(investigate)
    basepath = gb.get_basepath()
    basedir = os.path.abspath(basepath + 'DT' + investigate + '/' + str(case) +
                              '/')

    print(' - searching directory ', basedir, ' for output files')
    if latest:
        fdl = list_files_readout(basedir, investigate, case)
        sel = -1
    else:
        action = 'k'
        while (action == 'k'):
            fdl = list_files_readout(basedir, investigate, case)
            sel = get_run(len(fdl))
            action = get_action()
            if action == 'k':
                import shutil
Example #8
0
def set_geometry(geom, machine):
    print('Machine = ', machine)
    basepath = gb.get_basepath() + 'programs/'
    insert_sys_path(basepath + 'reducedata/')
    insert_sys_path(basepath + geom)
Example #9
0
def set_geometry(geom, machine):
    print('Machine = ', machine)
    basepath = gb.get_basepath()+'programs/'
    insert_sys_path(basepath + 'reducedata/')
    insert_sys_path(basepath + geom)
Example #10
0
class Files:
    # we have the convention to use
    # pop==0 for all tracer populations together
    # pop==1 for first tracer population
    # pop==2 for second tracer population, and so on
    def __init__ (self, gp, timestamp='-1'):
        ## set which computer we are working on
        self.basepath = ''
        ## set base directory, link version for short filenames
        self.shortdir = ''
        self.longdir = ''
        self.dir = ''
        ## relative path to the 'programs' directory
        self.progdir = ''
        self.modedir = ''
        self.set_dir(gp.machine, gp.case, gp.investigate) # changes self.shortdir
        ## file with 2D summed masses
        self.massfiles = []
        ## file with analytic values for Walker models
        self.analytic = ''
        ## file with 2D surface density
        self.surfdenfiles = []
        ## files with 2D tracer surface densities
        self.Sigfiles  = []
        ## files with 3D tracer densities
        self.nufiles = []
        ## files with velocity dispersions
        self.sigfiles = []
        ## files with centered positions and velocities for the tracer particles
        self.posvelfiles = []
        ## files for the fourth order moment of the LOS velocity
        self.kappafiles = []
        ## file for zeta_A, zeta_B parameters
        self.zetafiles = []
        ## [beta_star1, r_DM, gamma_star1, r_star1, r_a1, gamma_DM, rho0]
        self.params = []
        if gp.investigate == 'hern':
            self.set_hern(gp, timestamp)
        elif gp.investigate == 'walk':
            self.set_walk(gp, timestamp)
        elif gp.investigate == 'gaia':
            self.set_gaia(gp, timestamp)
        elif gp.investigate == 'coll':
            self.set_coll(gp, timestamp)
        elif gp.investigate == 'triax':
            self.set_triax(gp, timestamp)
        elif gp.investigate == 'obs':
            self.set_obs(gp, timestamp)
        elif gp.investigate == 'discsim':
            self.set_discsim(gp, timestamp)
        elif gp.investigate == 'discmock':
            self.set_discmock(gp, timestamp)
        else:
            print(' wrong investigation in Files()')
            pdb.set_trace()
        ## directory and basename of all output files
        if timestamp == '-1':
            import datetime
            self.timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M")
        else:
            self.timestamp = str(timestamp)
        self.outdir = self.shortdir+self.timestamp+'/'
        # shorter dir names in Multinest (bound to <= 100 total)
        #os.system('ln -sf '+ self.dir+' '+self.modedir + str(gp.case))
        os.system('mkdir -p '+self.outdir)
        newdir(self.dir + 'M/')
        newdir(self.dir + 'Sigma/')
        newdir(self.dir + 'siglos/')
        newdir(self.dir + 'kappalos/')
        newdir(self.dir + 'nu/')
        # create new pc2.save file for profile storage that is appended to during the run
        if timestamp == -1:
            with open(self.outdir+'pc2.save', 'wb') as fi:
                pickle.dump(-1, fi) # dummy data, to get file written new
        return
    ## \fn __init__(self, gp)
    # constructor
    # @param gp parameters
    # @param timestamp = '' used for output analysis

    def set_dir(self, machine, case, inv):
        self.basepath = gb.get_basepath()
        self.progdir = self.basepath + 'programs/'
        self.modedir = self.basepath + 'DT' + inv + '/'
        self.shortdir = self.modedir + str(case) + '/'
        return
Example #11
0
 def set_dir(self, machine, case, inv):
     self.basepath = gb.get_basepath()
     self.progdir = self.basepath + "programs/"
     self.modedir = self.basepath + "DT" + inv + "/"
     self.shortdir = self.modedir + str(case) + "/"
     return