def big_sky( nside=32, weights={ 'u': [0.31, 0.15, False], 'g': [0.44, 0.15], 'r': [1., 0.3], 'i': [1., 0.3], 'z': [0.9, 0.3], 'y': [0.9, 0.3, False] }): """ Based on the Olsen et al Cadence White Paper """ wfd_north = 12.4 wfd_south = -72.25 gal_lat_limit = 15. full_north = 30. # WFD in big sky = dec range -72.5 to 12.5, avoiding galactic plane |b| < 15. deg. bigsky = utils.WFD_no_gp_healpixels(nside, dec_min=wfd_south, dec_max=wfd_north, center_width=gal_lat_limit, gal_long1=0, gal_long2=360) # Add extention to the north, up to 30 deg. ra, dec = utils.ra_dec_hp_map(nside=nside) bigsky = np.where( (dec > np.radians(wfd_north)) & (dec < np.radians(full_north)), 1.e-6, bigsky) # Now let's break it down by filter result = {} for key in weights: result[key] = bigsky + 0. result[key][np.where(result[key] == 1)] = weights[key][0] result[key][np.where(result[key] == 1e-6)] = weights[key][1] if len(weights[key]) == 3: result[key][np.where(dec > np.radians(wfd_north))] = 0. return result
def ResultadosNtotBolV2(FBS, mod): # ========================================================== # mod = "A" # FBS = "1.5" # modo = "A" # filtros_considerados = ["u","g"] # f1,f2 tq f2 mas rojo que f1 # ========================================================== #validacion(filtros_considerados) #f1,f2 = filtros_considerados # g_modA_LookupT_extension.pk # lookup_table = "{}_mod{}_LookupT_extension.pkl".format(f2, modo) # debe estar en la carpeta de /lookuptables en /essentials # f2 porque ese se ocupa , el f1 es para potencial lyman pbreak nomas #filtros_modo = "{}_mod{}".format("".join(filtros_considerados),modo) print("FBS usado:", FBS) print("mod:", mod) ##################################################################################### ################################## 3 BUNDLES ######################################## ##################################################################################### metric = NtotMetricV2(mod, f1f2diff=2) # ========================= WFD ================================= constraint1 = "note NOT LIKE '%DD%'" wfd_standard = schedUtils.WFD_no_gp_healpixels( 64) #, dec_max=2.5, dec_min=-62.5) slicer1 = slicers.HealpixSubsetSlicer( 64, np.where(wfd_standard == 1)[0] ) #nside = 64, hpid = The subset of healpix id's to use to calculate the metric. bundle1 = mb.MetricBundle(metric, slicer1, constraint1) # ========================= DDF ================================= constraint2 = "note LIKE '%DD%'" slicer2 = slicers.HealpixSlicer(nside=64) bundle2 = mb.MetricBundle(metric, slicer2, constraint2) print("==============================================") print("constraint WFD:" + constraint1) print("constraint DDF:" + constraint2) ##################################################################################### ################################# DIRECTORIOS ####################################### ##################################################################################### #Please enter your SciServer username between the single quotes below! # your_username = '******' # Check avaiable database directoies show_fbs_dirs() # if your_username == '': # do NOT put your username here, put it in the cell at the top of the notebook. # raise Exception('Please provide your username! See the top of the notebook.') dbDir = './lsst_cadence/FBS_{}/'.format(FBS) outDir = '/data/agonzalez/output_FBS_{}/bolNtot_mod{}_FINAL/'.format( FBS, mod) if not os.path.exists(os.path.abspath(outDir)): os.makedirs(os.path.abspath(outDir), exist_ok=True) opSimDbs, resultDbs = connect_dbs(dbDir, outDir) metricDataPath = '/data/agonzalez/output_FBS_{}/bolNtot_mod{}_FINAL/MetricData/'.format( FBS, mod) if not os.path.exists(os.path.abspath(metricDataPath)): os.makedirs(os.path.abspath(metricDataPath), exist_ok=True) print("===================================================") print("dbDir :", dbDir) print("outDir :", outDir) print("metricDataPath :", metricDataPath) print("===================================================") ##################################################################################### ################################# BUNDLE GROUP ###################################### ##################################################################################### dbRuns = show_opsims(dbDir) print(dbRuns) dbRuns = [x for x in dbRuns if "noddf" not in x] #archivo70plus = open("jhu70plus{}.txt".format(FBS),"r") #dbRuns = [x.rstrip() for x in list(archivo70plus)] #archivo70plus.close() for run in dbRuns: #[70:]: bDict = {"WFD": bundle1, "DDF": bundle2} bundle1.setRunName(run) bundle2.setRunName(run) bgroup = mb.MetricBundleGroup(bDict, opSimDbs[run], metricDataPath, resultDbs[run]) bgroup.runAll()
def run_por_filtro( filtros_considerados, FBS, pruebas="" ): #escribir PRUEBAS si se quiere actuar en directorio para tests, si no, dejar en blanco print("FBS usado:", FBS) metric = NpairsMetric() # ========================= WFD ================================= constraint1 = inicio_constraint( filtros_considerados) + "note NOT LIKE '%DD%'" wfd_standard = schedUtils.WFD_no_gp_healpixels( 64) #, dec_max=2.5, dec_min=-62.5) slicer1 = slicers.HealpixSubsetSlicer( 64, np.where(wfd_standard == 1)[0] ) #nside = 64, hpid = The subset of healpix id's to use to calculate the metric. bundle1 = mb.MetricBundle(metric, slicer1, constraint1) # ========================= DDF ================================= constraint2 = inicio_constraint( filtros_considerados ) + "note LIKE '%DD%'" # Esto daba problemas con :"proposalId > 1" slicer2 = slicers.HealpixSlicer(nside=64) bundle2 = mb.MetricBundle(metric, slicer2, constraint2) # # ========================= Other ================================= # constraint3 = inicio_constraint(filtros_considerados)+"proposalId = 0" # slicer3 = slicers.HealpixSlicer(nside=64) # bundle3 = mb.MetricBundle(metric, slicer3, constraint3) print("==============================================") print("constraint WFD:" + constraint1) print("constraint DDF:" + constraint2) # print("constraint Other:"+constraint3) # ### Directorios dbDir = './lsst_cadence/FBS_{}/'.format( FBS) # debe correrse en dir contiguo a lsst_cadence outDir = '/data/agonzalez/NpairSECONDS/output_FBS_{0}{2}/Npairs_{1}/'.format( FBS, "".join(filtros_considerados), pruebas) if not os.path.exists(os.path.abspath(outDir)): os.makedirs(os.path.abspath(outDir), exist_ok=True) opSimDbs, resultDbs = connect_dbs(dbDir, outDir) metricDataPath = '/data/agonzalez/NpairSECONDS/output_FBS_{0}{2}/Npairs_{1}/MetricData/'.format( FBS, "".join(filtros_considerados), pruebas) if not os.path.exists(os.path.abspath(metricDataPath)): os.makedirs(os.path.abspath(metricDataPath), exist_ok=True) print("===================================================") print("dbDir :", dbDir) print("outDir :", outDir) print("metricDataPath :", metricDataPath) print("===================================================") # ### Ahora el bundle group (SIN considerar noddf) dbRuns = show_opsims(dbDir) # print(dbRuns[0]) # In[9]: import time start = time.perf_counter() # dbRuns = ["footprint_big_wfdv1.5_10yrs"] cantidad_runs = len(dbRuns) n = 1 for run in dbRuns: bDict = {"WFD": bundle1, "DDF": bundle2} #,"Other":bundle3} bundle1.setRunName(run) bundle2.setRunName(run) # bundle3.setRunName(run) bgroup = mb.MetricBundleGroup(bDict, opSimDbs[run], metricDataPath, resultDbs[run]) bgroup.runAll() ##################### seguimiento tiempo ############# actual = time.perf_counter() tiempo_transcurrido = actual - start tiempo_faltante_aprox = tiempo_transcurrido * (cantidad_runs - n) / n m, s = divmod(tiempo_transcurrido, 60) transcurrido_con_formato = "{0:.0f}m:{1:.0f}s".format(m, s) m, s = divmod(tiempo_faltante_aprox, 60) faltante_con_formato = "{0:.0f}m:{1:.0f}s".format(m, s) print( "________________________________________________________________________" ) print("Terminado dbRun {}/{}, tiempo transcurrido: {}".format( n, cantidad_runs, transcurrido_con_formato)) print("approx ETA: {}".format(faltante_con_formato)) print( "________________________________________________________________________" ) n += 1 ##################################################### print( " ===================================================================================================== " ) print( " =========================================== FINISHED ================================================ " ) print( " ===================================================================================================== " )
type=str, help='sqlite file of observations (full path).') parser.add_argument( '--wfd', type=str, default='standard', help='Type of wfd footprint [standard, extended, dust]') args = parser.parse_args() # There are some standard WFD footprints: the standard version, the extendded version (with gal-lat cut), # and an extended version with dust extinction cuts. # If you're using something else (non-standard), this script will have to be modified. # Note these "footprints" are standard (full) length healpix arrays for nside, but with values of 0/1 wfd_defaults = ['standard', 'extended', 'dust', 'extended with dust'] nside = 64 if args.wfd.lower() == 'standard': wfd_footprint = schedUtils.WFD_no_gp_healpixels(nside) elif args.wfd.lower() == 'extended': wfd_footprint = schedUtils.WFD_bigsky_healpixels(nside) elif args.wfd.lower() == 'dust' or args.wfd.lower( ) == 'extended with dust': wfd_footprint = schedUtils.WFD_no_dust_healpixels(nside) else: raise ValueError( f'This script understands wfd footprints of types {wfd_defaults}') visits = get_visits(args.dbfile) visits, propTags, propId = label_visits(visits, wfd_footprint) update_database(args.dbfile, visits, propTags, propId)