def completed_skirtruns(unless_filenames=None): db = Database() runids = sorted([ row['runid'] for row in db.select("runstatus='completed' or runstatus='archived'") ]) runs = [SkirtRun(runid) for runid in runids] if unless_filenames != None: runs = filter( lambda run: not has_visualization_files(run, unless_filenames), runs) db.close() return runs
# backup and open the local database backup() db = Database() # remove SKIRT-runs that have been archived from the do-list records = filter(lambda record: db.select("runid="+str(record['runid']))[0]['runstatus']!='archived', records) print "--> .. of which {} have not yet been archived".format(len(records)) # synchronize the results from each completed run for record in records: runid = record['runid'] print "--> Synchronizing results for run-id {}...".format(runid) # get the local and remote paths (creating the local directories if needed) skirtrun = SkirtRun(runid, create=True) local_runpath = skirtrun.runpath() local_inpath = skirtrun.inpath() local_outpath = skirtrun.outpath() local_vispath = skirtrun.vispath() remote_runpath = local_runpath.replace(config.results_path, config.cosma["results_path"]) remote_inpath = local_inpath.replace(config.results_path, config.cosma["results_path"]) remote_outpath = local_outpath.replace(config.results_path, config.cosma["results_path"]) remote_vispath = local_vispath.replace(config.results_path, config.cosma["results_path"]) # synchronize the files in each directory # - skip subdirectories and symbolic links # - for the vis directory, do not overwrite newer local versions error = subprocess.call(("rsync", "-htvz", cosma_prefix+remote_runpath+"/*", local_runpath+"/")) if error: raise ValueError("Error in rsync for run: " + str(error)) error = subprocess.call(("rsync", "-htvz", cosma_prefix+remote_inpath+"/*", local_inpath+"/"))
buildrange = sys.argv[2].lower() update = "update".startswith(buildrange) rebuild = "rebuild".startswith(buildrange) runidrange = runids_in_range(buildrange) if not update and not rebuild and not runidrange: raise ValueError("Unknown build range: " + buildrange) # ----------------------------------------------------------------- # get a list of relevant filename endings depending on the visualization type filenames = filenames_for_vistype[vistype] # construct the list of SKIRT-runs to be processed if runidrange: skirtruns = [SkirtRun(runid) for runid in runidrange] else: skirtruns = completed_skirtruns(filenames if update else None) # ================================================================= # build density curves if vistype == 'densities': from pts.eagle.plotdensitycurves import plotdensitycurves print "Building density curves for {} SKIRT-runs".format(len(skirtruns)) for skirtrun in skirtruns: print "Building density curves for SKIRT-run {}...".format( skirtrun.runid()) plotdensitycurves(skirtrun) # -----------------------------------------------------------------
backup() db = Database() # remove SKIRT-runs that have been archived from the do-list records = filter( lambda record: db.select("runid=" + str(record['runid']))[0]['runstatus'] != 'archived', records) print "--> .. of which {} have not yet been archived".format(len(records)) # synchronize the results from each completed run for record in records: runid = record['runid'] print "--> Synchronizing results for run-id {}...".format(runid) # get the local and remote paths (creating the local directories if needed) skirtrun = SkirtRun(runid, create=True) local_runpath = skirtrun.runpath() local_inpath = skirtrun.inpath() local_outpath = skirtrun.outpath() local_vispath = skirtrun.vispath() remote_runpath = local_runpath.replace(config.results_path, config.cosma["results_path"]) remote_inpath = local_inpath.replace(config.results_path, config.cosma["results_path"]) remote_outpath = local_outpath.replace(config.results_path, config.cosma["results_path"]) remote_vispath = local_vispath.replace(config.results_path, config.cosma["results_path"]) # synchronize the files in each directory # - skip subdirectories and symbolic links
# get the eligible records db = database.Database() records = db.select("((stage='simulate' and status='succeeded') or (stage in ('observe', 'store', 'completed')))" \ " and label=?", (label,)) db.close() size = len(records) if size == 0: raise ValueError("There are no simulated records with label " + label) # assemble the statistics for all records log.info("Assembling statistics for {} SKIRT simulations...".format(size)) time = np.zeros(size) memory = np.zeros(size) for index in range(size): logfilepath = SkirtRun(records[index]["runid"]).simulation().logfilepath() for line in open(logfilepath): if " Finished simulation " in line: segments = line.split() processes = float(segments[segments.index("processes") - 1]) timeindex = segments.index( "s") if "s" in segments else segments.index("s.") walltime = float(segments[timeindex - 1]) time[index] = processes * walltime if " Available memory: " in line: segments = line.split() memory[index] = float(segments[segments.index("usage:") + 1]) # construct the time histogram log.info("Constructing plots...") figure = plt.figure(figsize=(8, 8))
# dump it into file outfile = open(outfilepath, 'w') pickle.dump(collection, outfile) outfile.close() print "Created info collection " + outfilepath # ----------------------------------------------------------------- # chain the command-line arguments into a query list if len(sys.argv) <= 1: raise ValueError("This script expects one or more command-line arguments") querylist = "('{}')".format("','".join(sys.argv[1:])) namelist = "_".join(sys.argv[1:]) # get a list of SkirtRun objects for which to collect statistics, in order of run-id db = Database() query = "runstatus in ('completed','archived') and label in {0} and eaglesim in {0}".format(querylist) runids = sorted([ row['runid'] for row in db.select(query) ]) skirtruns = [ SkirtRun(runid) for runid in runids ] db.close() # perform the collection if len(skirtruns) > 0: print "Collecting statistics from {} SKIRT-runs with label and eaglesim fields in {}...".format(len(skirtruns),querylist) collect_info(skirtruns, os.path.join(config.collections_path,"{}_info_collection.dat".format(namelist))) else: print "There are no SKIRT-runs with label and eaglesim fields in {}.".format(querylist) # -----------------------------------------------------------------
# verify that the offsite archive server is mounted if len(runids) == 0: print "No SKIRT-runs to be archived." exit() if not os.path.isdir(archive_path): print "Please mount the offsite archive server and try again." exit() # open the database and process each SKIRT-run db = Database() for runid in runids: print "--> Creating archives for SKIRT-run {}...".format(runid) # get the paths for the in and out directories, and the corresponding zip archives skirtrun = SkirtRun(runid) indir = skirtrun.inpath() outdir = skirtrun.outpath() inzip = indir + ".zip" outzip = outdir + ".zip" # export the database record info = db.select("runid=" + str(runid))[0] infofile = open(os.path.join(indir, "database_record.txt"), "w") infofile.write("# SKIRT-run database record\n") for key in info.keys(): infofile.write(("{} = {}\n").format(key, info[key])) infofile.close() # create the archives if os.path.isfile(inzip):
# verify that the offsite archive server is mounted if len(runids) == 0: print "No SKIRT-runs to be archived." exit() if not os.path.isdir(archive_path): print "Please mount the offsite archive server and try again." exit() # open the database and process each SKIRT-run db = Database() for runid in runids: print "--> Creating archives for SKIRT-run {}...".format(runid) # get the paths for the in and out directories, and the corresponding zip archives skirtrun = SkirtRun(runid) indir = skirtrun.inpath() outdir = skirtrun.outpath() inzip = indir + ".zip" outzip = outdir + ".zip" # export the database record info = db.select("runid=" + str(runid))[0] infofile = open(os.path.join(indir, "database_record.txt"), 'w') infofile.write('# SKIRT-run database record\n') for key in info.keys(): infofile.write(("{} = {}\n").format(key, info[key])) infofile.close() # create the archives if os.path.isfile(inzip): os.remove(inzip)