datadir = options.datadir
doTestProcessFunction = options.test
force_reprocess = options.force_reprocess
lorafile_suffix = options.lorafile_suffix
writedir = options.writedir
lorafile_suffix = ''
# read in LOFAR calibrated pulse block, if desired with debug-lofar-pulse option
lofar_pulse = None
if options.debug_lofar_pulse:
    options.debug_test_pulse = True  # if not set, set to True
    # Read database info, for crp_plotfiles key which contains the most actual results directory (i.e. the one used in  the database)
    import psycopg2  # for changing status in database
    from pycrtools import crdatabase as crdb
    dbManager = crdb.CRDatabase("crdb",
                                host="coma00.science.ru.nl",
                                user="******",
                                password="******",
                                dbname="crdb")
    db = dbManager.db
    print '### Replacing simulated data by LOFAR data processed by pipeline (calibrated-xyz) ###'
    print 'Reading event data, id = %d ...' % eventid
    event_data = crdb.Event(db=db, id=eventid)
    #event_data.stations[0].polarization['0']["crp_integrated_pulse_power"]
    station = [st for st in event_data.stations
               if st.stationname == 'CS002'][0]
    crp_integrated_pulse_power_01 = station.polarization['0'][
        'crp_integrated_pulse_power']
    crp_integrated_pulse_power_xyz = station.polarization['xyz'][
        'crp_integrated_pulse_power']
    crp_plotfiles = event_data["crp_plotfiles"]
    results_dir = os.path.split(crp_plotfiles[0])[0]
Пример #2
0
(options, args) = parser.parse_args()

list_id = int(options.list_id)

sim_list_file = open('xmax_dataset_nop_bias_1e17_1e18.txt', 'r')
list1 = np.genfromtxt(sim_list_file, usecols=(0))

nEvents = len(list1)

ldf_fit_energy = np.zeros([nEvents])
ldf_fit_energy_particle = np.zeros([nEvents])
lora_energy = np.zeros([nEvents])
lora_elevation = np.zeros([nEvents])

dbManager = crdb.CRDatabase(host='coma00.science.ru.nl',
                            user='******',
                            password='******',
                            dbname='crdb')
db = dbManager.db

for i in np.arange(15):
    event_id = int(list1[i])
    print event_id

    try:
        event = crdb.Event(db=db, id=event_id)

        ldf_fit_energy[i] = event["ldf_fit_energy"]
        ldf_fit_energy_particle[i] = event["ldf_fit_energy_particle"]
        lora_elevation[i] = (90 - event["lora_elevation"]) * np.pi / 180.0
        lora_energy[i] = event["lora_energy"]
Пример #3
0
def GetLDF(eventno):
    # Open CR Database connection to read out data pipeline parameters for this event
    #    nofAttempts = 3
    #thisAttempt = 0
    #database_connection = False
    #while not database_connection and (thisAttempt < nofAttempts):
    #    try:
    #            database_connection = True
    #    except:
    #        thisAttempt += 1
    #        print 'Database connection failed at attempt %d' % thisAttempt
    #        import time
    #        time.sleep(20 + 100*np.random.rand() ) # sleep for 20 to 120 seconds before retrying

    #    if not database_connection:
    #        raise ValueError("No database connection after {0} attempts!".format(nofAttempts))

    dbManager = crdb.CRDatabase(host='coma00.science.ru.nl',
                                user='******',
                                password='******',
                                dbname='crdb')

    db = dbManager.db
    event = crdb.Event(db=db, id=eventno)
    """gotEvent = False
        nofAttempts = 3
        thisAttempt = 0
        while not gotEvent and (thisAttempt < nofAttempts):
            try:
                gotEvent = True
            except:
                thisAttempt += 1
                print 'Get event failed at attempt %d' % thisAttempt
                import time
                time.sleep(20 + 100*np.random.rand() )

        if not gotEvent:
            raise ValueError("Could not read event data after {0} attempts!".format(nofAttempts))
    """
    lora_elevation = 0
    try:
        energy = event["lora_energy"]
        core_x = event["lora_core_x"]
        core_y = event["lora_core_y"]
        azimuth = event["lora_azimuth"]
        lora_elevation = event["lora_elevation"]
        moliere = event["lora_moliere"]
        lora_x = event["lora_posx"]
        lora_y = event["lora_posy"]
        lora_dens = event["lora_particle_density__m2"]

        if LoraQualityPassed(moliere, core_x, core_y, lora_elevation):
            print "GOOD quality event (LORA)", energy, azimuth, lora_elevation, core_x, core_y, moliere
        else:
            print "LORA Quality not passed", energy, azimuth, lora_elevation, core_x, core_y, moliere
    except:
        print "Skipping event, no LORA data"
        return -1  # This should be tested for when calling the function... Nothing to return for a file without LORA info.

    # Loop over all stations in event
    stations = []
    for f in event.datafiles:
        stations.extend(f.stations)

    positions = []
    selected_dipoles = []
    delays = []
    amplitude = []
    rms = []
    power11 = []
    power21 = []
    power41 = []
    noisepower = []
    stationname = []
    pulse_direction = []
    pulse_delay_fit_residual = []
    time = []
    for station in stations:
        if station.status == "GOOD":
            try:
                p = station.polarization["xyz"]
                p0 = station.polarization["0"]
                positions.append(station["local_antenna_positions"])
                selected_dipoles.append(station["crp_selected_dipoles"])
                amplitude.append(p["crp_pulse_peak_amplitude"])
                power11.append(p0["crp_integrated_pulse_power"])
                power21.append(p0["crp_integrated_pulse_power_wide"])
                power41.append(p0["crp_integrated_pulse_power_double_wide"])
                noisepower.append(p0["crp_integrated_noise_power"])
                rms.append(p["crp_rms"])
                stationname.append([station.stationname] * len(p["crp_rms"]))
                pulse_direction.append(station["crp_pulse_direction"])
                pulse_delay_fit_residual.append(
                    station["crp_pulse_delay_fit_residual"])
                time.append(station["crp_pulse_time"])
            except:
                print "Do not have all pulse parameters for station", station.stationname

    #print positions
    positions = np.vstack(positions)
    selected_dipoles = np.hstack(selected_dipoles)
    amplitude = np.vstack(amplitude)
    rms = np.vstack(rms)
    power11 = np.vstack(power11)
    power21 = np.vstack(power21)
    power41 = np.vstack(power41)
    noisepower = np.vstack(noisepower)
    pulse_delay_fit_residual = np.hstack(pulse_delay_fit_residual)
    #stationname = np.array(stationname)
    stationname = np.array(sum(stationname, []))
    time = np.vstack(time)
    #print time[:,0]
    lof_azimuth = np.mean(pulse_direction, axis=0)[0]
    lof_elevation = np.mean(pulse_direction, axis=0)[1]

    shape = positions.shape
    positions = positions.reshape((shape[0] / 2, 2, shape[1]))[:, 0]
    positions = positions.copy()

    #uncer = np.sqrt(rms[:,0]**2+rms[:,1]**2 + rms[:,2]**2)
    #total = np.sqrt(amplitude[:,0]*signals[:,0]+signals[:,1]*signals[:,1]+signals[:,2]*signals[:,2])

    #print "Returning event:", eventno
    dist = GetDistance([core_x, core_y, 0], [azimuth, lora_elevation],
                       positions)
    x_err = GetTotalDistanceUncertainty([core_x, core_y, 0], [5., 5., 0],
                                        positions, [azimuth, lora_elevation],
                                        [2, 2, 0], dist)

    dist = np.array(dist)
    x_err = np.array(x_err)
    #print positions.shape, amplitude.shape, power.shape, rms.shape
    return core_x, core_y, stationname, selected_dipoles, positions, dist, x_err, amplitude, power11, power21, power41, rms, noisepower, pulse_delay_fit_residual, time[:, 0], lora_x, lora_y, lora_dens, lof_azimuth, lof_elevation, float(
        lora_elevation)
Пример #4
0
parser.add_option("--xmax", type=float, default=None, help="If given use this xmax, otherwise fall back to database value")
parser.add_option("--nof-conex-proton", type=int, default=150)
parser.add_option("--nof-conex-iron", type=int, default=50)
parser.add_option("--showers-around-xmax-estimate", type=int, default=11, help="Number of showers to simulate in a narrow region around the estimated Xmax. Default 11.")
parser.add_option("--width-around-xmax-estimate", type=float, default=20.0, help="Width of densely simulated region around Xmax estimate. Default 20 g/cm2")
parser.add_option("--no-atmosphere", default=False, action="store_true")
parser.add_option("--skip-conex", default=False, action="store_true")
parser.add_option("--skip-coreas", default=False, action="store_true")
parser.add_option("--skip-analysis", default=False, action="store_true")
parser.add_option("--ignore-suspended-jobs", default=False, action="store_true", help="Ignore suspended jobs in queue, i.e. submit duplicate jobs for these (if no other jobs RUNNING or PENDING for the same event)")

parser.add_option("--hadronic-interaction-model", type=str, default="QGSII")
(options, args) = parser.parse_args()

db_filename = options.database
dbManager = crdb.CRDatabase(db_filename, host=options.host, user=options.user, password=options.password, dbname=options.dbname)
db = dbManager.db

valid_status = ["DESIRED", "CONEX_STARTED", "CONEX_DONE", "COREAS_STARTED", "COREAS_DONE"]

print "skipping conex:", options.skip_conex
print "skipping coreas:", options.skip_coreas
print "skipping analysis:", options.skip_analysis

# Ignore if simulations are already scheduled
if options.id in simhelp.running_jobs(ignore_suspended=options.ignore_suspended_jobs):
    print "Event {0} already scheduled, skipping...".format(options.id)
    sys.exit(0)

# Get event from database and run pipeline on it
with process_event(crdb.Event(db=db, id=options.id)) as event:
Пример #5
0
def find_trigger(event_id):

    #print 'running event: {0}'.format(event_id)
    
    detector=np.zeros([nDet])
    ymd=np.zeros([nDet])
    gps=np.zeros([nDet])
    ctd=np.zeros([nDet])
    nsec=np.zeros([nDet])
    trigg_condition=-1*np.ones([nDet])
    trigg_pattern=np.zeros([nDet])
    total_counts=np.zeros([nDet])
    pulse_height=np.zeros([nDet])
    pulse_width=np.zeros([nDet])
    counts=np.zeros([nDet,nTrace])
    on_off=np.zeros([nDet])
    local_trigger=-1*np.ones([nLasa])
    trigger_setting=-1
    dbManager = crdb.CRDatabase("crdb", host="coma00.science.ru.nl",user="******", password="******", dbname="crdb")
    db = dbManager.db

    event = crdb.Event(db = db, id = event_id)
    lora_nsec=event["lora_nsecs"]
    lora_utc=event["lora_utc_time_secs"]
    #print 'lora utc: {0}'.format(lora_utc)
    
    
    
    data=np.genfromtxt(open(on_off_filename,'r'))
    logUTC=data.T[0]
    on_off_log=data.T[1:21].T
    
    ## find closest day to lora utc
    idx = (np.abs(logUTC - lora_utc)).argmin()
    if logUTC[idx]>lora_utc:
        idx=idx-1
    on_off= on_off_log[idx]

    ## find detectors that are on/off daily with
    
    
    
    
    
    
    
    
    
    


    year=time.gmtime(lora_utc).tm_year
    month=time.gmtime(lora_utc).tm_mon
    day=time.gmtime(lora_utc).tm_mday

    # find correct log file for event (daily, sometimes not with a standard name)
    log_list= glob.glob(data_dir+'{0:04d}{1:02d}{2:02d}*.log'.format(year,month,day))
    log_list.extend(glob.glob(data_dir+'{0:04d}{1:02d}{2:02d}*.log'.format(year,month,day-1)))

    if day==1:
        log_list.extend(glob.glob(data_dir+'{0:04d}{1:02d}{2:02d}*.log'.format(year,month-1,30)))
        log_list.extend(glob.glob(data_dir+'{0:04d}{1:02d}{2:02d}*.log'.format(year,month-1,31)))
    if day==1 and month==1:
        log_list.extend(glob.glob(data_dir+'{0:04d}{1:02d}{2:02d}*.log'.format(year-1,12,31)))
        log_list.extend(glob.glob(data_dir+'{0:04d}{1:02d}{2:02d}*.log'.format(year-1,12,30)))


    found_utc=0
    for l in np.arange(len(log_list)):
        if str(int(lora_utc)) in open(log_list[l]).read():
            log_file_name=log_list[l]
            root_file_name=log_file_name.split('.')[0]+'.root'
            found_utc=1

    if found_utc==1:

        log_file=open(log_file_name,'r')
        root_file=ROOT.TFile.Open(root_file_name)

        tree_sec = root_file.Get("Tree_sec")
        tree_event = root_file.Get("Tree_event")
        tree_log = root_file.Get("Tree_log")
        tree_noise = root_file.Get("Tree_noise")

        active_lasas,trigger_setting=find_active_stations(log_file)
        
        event_index=find_entry_number(lora_utc,lora_nsec,tree_event)


        for d in np.arange(nDet):
            detname='Det'+str(d+1)
            det=tree_event.GetBranch(detname)

            detector[d],ymd[d],gps[d],ctd[d],nsec[d],trigg_condition[d],trigg_pattern[d],total_counts[d],pulse_height[d],pulse_width[d],counts[d]=getData(det,event_index)
        
            if np.max(counts[d])>0.0:
                on_off[d]=1

        # to get the lasa local trigger condition
        #katie: question- how can only 2 detectors have condition 2/4??
        lasa1=np.min(trigg_condition[0:4])
        lasa2=np.min(trigg_condition[4:8])
        lasa3=np.min(trigg_condition[8:12])
        lasa4=np.min(trigg_condition[12:16])
        lasa5=np.min(trigg_condition[16:20])
        local_trigger=np.asarray([lasa1,lasa2,lasa3,lasa4,lasa5])

        ## change to stricter condition in case of broken files
        for t in np.arange(len(local_trigger)):
            if local_trigger[t]<0.5:
                local_trigger[t]=3.0
        print 'event okay'


    else:
        print 'didn\'t find matching file'




    ## if there is a problem reading local trigger setting, change everything to strictest condition (3/4)
    if (-1 in local_trigger)==True:
        local_trigger=3*np.ones([nLasa])

    ## check if station or detector trigger
    ## catch weird settings

    trigger_type='d'
    if (int(trigger_setting)<6.0 and int(trigger_setting)>0.0):
        print 'station trigger'
        trigger_type='s'
    elif (int(trigger_setting)>5.0 and int(trigger_setting)<21.0):
        print 'detector trigger'
        trigger_type='d'
    else:
        if lora_utc<(jan2012+6*30*utc_day):
            trigger_setting=5.0
            trigger_type='s'
        elif lora_utc>(jan2012+6*30*utc_day) and lora_utc<jan2013:
            trigger_setting=4.0
            trigger_type='s'
        else:
            trigger_setting=13
            trigger_type='d'


    return on_off,int(trigger_setting),local_trigger,trigger_type