else: raise ValueError('catalog name not found') if injweight is None: injweight = llhweight print("The llhweight is: {}".format(llhweight)) print("The injweight is: {}".format(injweight)) # get unique job id job_id = '{0}_nixtime_{2:.0f}_job_{1}'.format(socket.gethostname(), os.getpid(), time.time()) # set up submitter submitter = Submitter( job_dir= '/data/user/brelethford/Output/stacking_sensitivity/{0}/mstacking_{1}yr/jobs/{2}_{3}' .format(catalog, str(n), job_id, llhweight)) submitter.memory = 3 # figure out what dir we're in, and get the path to the actual job script this_dir = os.path.dirname(os.path.abspath(__file__)) job_script = this_dir + '/background_trials.py' # get the metaproject path env_shell = os.getenv('I3_BUILD') + '/env-shell.sh' # build the lists of commands and labels commands, labels = [], [] ## For this we'll need to do background trials in batches of 1000. So... batchsize = 100 total_bckg_trials = 30000
opts, args = parser.parse_args() llhweight = opts.llhweight injweight = opts.injweight print("The llhweight is: {}".format(llhweight)) print("The injweight is: {}".format(injweight)) # get unique job id job_id = '{0}_nixtime_{2:.0f}_job_{1}_{3}inj'.format(socket.gethostname(), os.getpid(), time.time(), injweight) # set up submitter submitter = Submitter( job_dir= '/data/user/brelethford/Output/stacking_sensitivity/SwiftBAT70m_mc_cut/{0}/jobs/{1}' .format(llhweight, job_id)) # figure out what dir we're in, and get the path to the actual job script this_dir = os.path.dirname(os.path.abspath(__file__)) job_script = this_dir + '/n_inj_test_2.py' # get the metaproject path env_shell = os.getenv('I3_BUILD') + '/env-shell.sh' # build the lists of commands and labels commands, labels = [], [] ## However, I don't know how I'd split this up, so I'm just gonna hope that I can do it all at once... bins = 1 gammarange = np.linspace(2.0, 2.0, bins)
if injweight is None: injweight = llhweight print("The llhweight is: {}".format(llhweight)) print("The injweight is: {}".format(injweight)) # get unique job id job_id = '{0}_nixtime_{2:.0f}_job_{1}'.format(socket.gethostname(), os.getpid(), time.time()) jobdir = misc.ensure_dir( '/data/user/brelethford/Output/stacking_sensitivity/{0}/jstacking_{1}year/jobs/' .format(catalog, n_year)) # set up submitter submitter = Submitter(job_dir=jobdir + '{0}_{1}'.format(job_id, llhweight)) submitter.memory = 3 # figure out what dir we're in, and get the path to the actual job script this_dir = os.path.dirname(os.path.abspath(__file__)) job_script = this_dir + '/background_trials.py' # get the metaproject path env_shell = os.getenv('I3_BUILD') + '/env-shell.sh' # build the lists of commands and labels commands, labels = [], [] ## For this we'll need to do background trials in batches of 1000. So... batchsize = 100 total_bckg_trials = 10000 batches = int(np.round(total_bckg_trials / batchsize))
import os import socket import time import numpy as np from icecube.umdtools.submitter import Submitter # get unique job id job_id = '{0}_nixtime_{2:.0f}_job_{1}'.format(socket.gethostname(), os.getpid(), time.time()) # set up submitter submitter = Submitter( job_dir= '/data/user/brelethford/Output/all_sky_sensitivity/results/box/jobs/{0}'. format(job_id)) # figure out what dir we're in, and get the path to the actual job script this_dir = os.path.dirname(os.path.abspath(__file__)) job_script = this_dir + '/all_sky_sensitivity_default.py' # get the metaproject path env_shell = os.getenv('I3_BUILD') + '/env-shell.sh' # build the lists of commands and labels commands, labels = [], [] bins = 31 #sindecrange=np.linspace(np.sin(np.radians(-85.0)),np.sin(np.radians(85.0)),bins) #degdecrange = np.degrees(np.arcsin(sindecrange)) #Note - I used to do it with equal spacing in degrees, but I think I want equal spacing in sin_dec, for the purpose of graphing.