if not trigger_dict:
	print "Warning: No triggers in the input files, exiting without doing anything."
	sys.exit(0)

if opts.dq_segments:
	# load dq segments
	(dq_segments, covered_segments) = idq.extract_dq_segments(open(opts.dq_segments, "r"), opts.dq_segments_name)
	# sort and merge segments
	dq_segments = event.fixsegments(dq_segments)
else:
	dq_segments = None

# construct auxmvc feature vectors
auxmvc_vectors = idq.build_auxmvc_vectors(trigger_dict, main_channel, opts.time_window, opts.signif_threshold, opts.output_file,\
  gps_start_time=gps_start_time, gps_end_time=gps_end_time, channels=opts.channels, unsafe_channels=opts.unsafe_channels,\
  science_segments = dq_segments, clean_samples_rate=opts.clean_samples_rate, filter_out_unclean=opts.filter_out_unclean,\
  max_clean_samples = opts.max_clean_samples, max_glitch_samples = opts.max_glitch_samples)
#  gps_start_time=gps_start_time, gps_end_time=gps_end_time, channels=opts.channels, unsafe_channels=opts.unsafe_channels,\
#  science_segments = dq_segments, clean_samples_rate=opts.clean_samples_rate, filter_out_unclean=opts.filter_out_unclean,\
#  max_clean_samples = opts.max_clean_samples, max_glitch_samples = opts.max_glitch_samples)

clean_samples = auxmvc_vectors[numpy.nonzero(auxmvc_vectors['i']==0)[0],:]
glitch_samples = auxmvc_vectors[numpy.nonzero(auxmvc_vectors['i']==1)[0],:]

if opts.verbose:
	print "total number of glitch samples in the set: " + str(len(glitch_samples))
	print "total number of clean samples in the set: " +str(len(clean_samples))



                clean_gps = sorted(event.randomrate(clean_rate, [[gpsstart-lookback, gpsstart + stride]])) ### generate random clean times as a poisson time series within analysis range
            clean_gps = [ l[0] for l in event.exclude( [[gps] for gps in clean_gps], dirtyseg, tcent=0)] ### keep only those gps times that are outside of dirtyseg

            ### keep only the most relevant cleans
            if len(clean_gps) > max_cln_samples:
                clean_gps = clean_gps[-max_cln_samples:]

            ### keep only times that are within science time
            if not opts.ignore_science_segments:
                logger.info('  filtering trigger_dict through scisegs')
                trigger_dict.include(scisegs) ### already loaded into memory above here

            ### build vectors, also writes them into pat
            logger.info('  writting %s'%pat)
            idq.build_auxmvc_vectors(trigger_dict, gwchannel, auxmvc_coinc_window, auxmc_gw_signif_thr, pat, gps_start_time=gpsstart-lookback,
                                gps_end_time=gpsstart + stride,  channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, clean_times=clean_gps,
                                clean_window=clean_window, filter_out_unclean=False )

            ptas_exit_status = 0 ### used to check for success

#            (ptas_exit_status, _) = idq.execute_build_auxmvc_vectors( config, output_dir, AUXkwtrgdir, gwchannel, pat, gpsstart - lookback, gpsstart + stride, channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, dq_segments=seg_file, dq_segments_name=dq_name )
#            os.chdir(cwd) ### go back to starting directory
 
        # check if process has been executed correctly
        if ptas_exit_status != 0: ### check that process executed correctly
            logger.warning('WARNING: Preparing training auxmvc samples failed')
            if opts.force:
                raise StandardError, "auxmvc samples required for successful training"
            else:
                logger.warning('WARNING: skipping re-training the MLA classifiers')
        else:
                clean_gps = clean_gps[-max_cln_samples:]

            ### keep only times that are within science time
            if not opts.ignore_science_segments:
                logger.info('  filtering trigger_dict through scisegs')
                trigger_dict.include(
                    scisegs)  ### already loaded into memory above here

            ### build vectors, also writes them into pat
            logger.info('  writting %s' % pat)
            idq.build_auxmvc_vectors(trigger_dict,
                                     gwchannel,
                                     auxmvc_coinc_window,
                                     auxmc_gw_signif_thr,
                                     pat,
                                     gps_start_time=gpsstart - lookback,
                                     gps_end_time=gpsstart + stride,
                                     channels=auxmvc_selected_channels,
                                     unsafe_channels=auxmvc_unsafe_channels,
                                     clean_times=clean_gps,
                                     clean_window=clean_window,
                                     filter_out_unclean=False)

            ptas_exit_status = 0  ### used to check for success

#            (ptas_exit_status, _) = idq.execute_build_auxmvc_vectors( config, output_dir, AUXkwtrgdir, gwchannel, pat, gpsstart - lookback, gpsstart + stride, channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, dq_segments=seg_file, dq_segments_name=dq_name )
#            os.chdir(cwd) ### go back to starting directory

# check if process has been executed correctly
        if ptas_exit_status != 0:  ### check that process executed correctly
            logger.warning('WARNING: Preparing training auxmvc samples failed')
            if opts.force:
예제 #4
0
    clean_gps = sorted(event.randomrate(clean_rate, [[gpsstart-lookback, gpsstart + stride]])) ### generate random clean times as a poisson time series within analysis range
clean_gps = [ l[0] for l in event.exclude( [[gps] for gps in clean_gps], dirtyseg, tcent=0)] ### keep only those gps times that are outside of dirtyseg
### keep only the most relevant cleans
if len(clean_gps) > max_cln_samples:
    clean_gps = clean_gps[-max_cln_samples:]

### keep only times that are within science time
if not opts.ignore_science_segments:
    logger.info('  filtering trigger_dict through scisegs')
    trigger_dict.include(scisegs) ### already loaded into memory above here
print("Number of trigs in scisegs: " + str(len(trigger_dict[gwchannel])))

### build vectors, also writes them into pat
logger.info('  writing %s'%pat)
idq.build_auxmvc_vectors(trigger_dict, gwchannel, auxmvc_coinc_window, auxmc_gw_signif_thr, pat, gps_start_time=gpsstart,
    gps_end_time=gpsstop,  channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, clean_times=clean_gps,
    clean_window=clean_window, filter_out_unclean=False, locked_segments = "segment_times.txt", include_time_locked = opts.include_time_locked, 
    include_time_until_end = opts.include_time_until_end)

ptas_exit_status = 0 ### used to check for success

if ptas_exit_status != 0: ### check that process executed correctly
    logger.warning('WARNING: Preparing training auxmvc samples failed')
    if opts.force:
	raise StandardError, "auxmvc samples required for successful training"
    else:
	logger.warning('WARNING: skipping re-training the MLA classifiers')
else:
    ### figure out training set size
    ### load auxmvc vector samples
    auxmvc_samples = auxmvc_utils.ReadMVSCTriggers([pat], Classified=False)
        pat = idq.pat(
            opts.outdir, ifo, usertag, gps_start, twopadding
        )  #"%s/%s_%d-%d.pat"%(opts.outdir, ifo, gps_start, twopadding)

        # generating auxmvc vector samples. result is saved into pat file
        # FIXME: depending how padding is done we should adjust behavior of build_auxmvc_vectors
        # Currently it keeps gw trigger from [t, t + stride] and uses time_window to pad this segment for auxiliary triggers
        # we do not filter out unclean beacuse it is already done when clean_gps times are formed
        auxmvc_vectors = idq.build_auxmvc_vectors(
            trgdict,
            gwchannel,
            auxmvc_coinc_window,
            auxmc_gw_signif_thr,
            pat,
            gps_start_time=gps_start,
            gps_end_time=gps_padd,
            channels=auxmvc_selected_channels,
            unsafe_channels=auxmvc_unsafe_channels,
            clean_times=clean_gps,
            clean_window=clean_window,
            filter_out_unclean=False)

    #=============================================
    # predictions
    #=============================================
    dats = {}
    for classifier in classifiers:
        flavor = classifiersD[classifier]['flavor']

        print '%s cycle -> flavor=%s' % (classifier, flavor)
    trgdict.include([[t, t + stride]], channels=[gwchannel])

    #====================
    # generate patfiles for mla classifiers
    #====================
    if mla:  # only build patfiles if machine-learning algorithms are present
        print 'building auxmvc feature vectors ...'

        pat = idq.pat(opts.outdir, ifo, usertag, gps_start, twopadding) #"%s/%s_%d-%d.pat"%(opts.outdir, ifo, gps_start, twopadding)

        # generating auxmvc vector samples. result is saved into pat file
        # FIXME: depending how padding is done we should adjust behavior of build_auxmvc_vectors
        # Currently it keeps gw trigger from [t, t + stride] and uses time_window to pad this segment for auxiliary triggers
                # we do not filter out unclean beacuse it is already done when clean_gps times are formed
        auxmvc_vectors = idq.build_auxmvc_vectors(trgdict, gwchannel, auxmvc_coinc_window, auxmc_gw_signif_thr, pat, gps_start_time=gps_start,
                                gps_end_time=gps_padd,  channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, clean_times=clean_gps,
                                clean_window=clean_window, filter_out_unclean=False )

    #=============================================
    # predictions
    #=============================================
    dats = {}
    for classifier in classifiers:
        flavor = classifiersD[classifier]['flavor']

        print '%s cycle -> flavor=%s'%(classifier, flavor)

        ### find best training data
        cache = train_cache[classifier]

        ### the following is a stupid fudge to allow for multi-line cache file formats
            if not opts.ignore_science_segments:
                clean_gps = sorted(event.randomrate(clean_rate, scisegs)) ### generate random clean times as a poisson time series within scisegs
            else:
                clean_gps = sorted(event.randomrate(clean_rate, [[gpsstart-lookback, gpsstart + stride]])) ### generate random clean times as a poisson time series within analysis range
            clean_gps = [ l[0] for l in event.exclude( [[gps] for gps in clean_gps], dirtyseg, tcent=0)] ### keep only those gps times that are outside of dirtyseg

            ### keep only times that are within science time
            if not opts.ignore_science_segments:
                logger.info('  filtering trigger_dict through scisegs')
                trigger_dict.include(scisegs) ### already loaded into memory above here

            ### build vectors, also writes them into pat
            logger.info('  writting %s'%pat)
            idq.build_auxmvc_vectors(trigger_dict, gwchannel, auxmvc_coinc_window, auxmc_gw_signif_thr, pat, gps_start_time=gpsstart-lookback,
                                gps_end_time=gpsstart + stride,  channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, clean_times=clean_gps,
                                clean_window=clean_window, filter_out_unclean=False, max_glitch_samples=max_gch_samples, max_clean_samples=max_cln_samples ,
                                science_segments=None ) ### we handle scisegs in this script rather than delegating to idq.build_auxmvc_vectors, so science_segments=None is appropriate

            ptas_exit_status = 0 ### used to check for success

#            (ptas_exit_status, _) = idq.execute_build_auxmvc_vectors( config, output_dir, AUXkwtrgdir, gwchannel, pat, gpsstart - lookback, gpsstart + stride, channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, dq_segments=seg_file, dq_segments_name=dq_name )
#            os.chdir(cwd) ### go back to starting directory
 
        # check if process has been executed correctly
        if ptas_exit_status != 0: ### check that process executed correctly
            logger.warning('WARNING: Preparing training auxmvc samples failed')
            if opts.force:
                raise StandardError, "auxmvc samples required for successful training"
            else:
                logger.warning('WARNING: skipping re-training the MLA classifiers')
        else:
예제 #8
0
    print "Warning: No triggers in the input files, exiting without doing anything."
    sys.exit(0)

if opts.dq_segments:
    # load dq segments
    (dq_segments,
     covered_segments) = idq.extract_dq_segments(open(opts.dq_segments, "r"),
                                                 opts.dq_segments_name)
    # sort and merge segments
    dq_segments = event.fixsegments(dq_segments)
else:
    dq_segments = None

# construct auxmvc feature vectors
auxmvc_vectors = idq.build_auxmvc_vectors(trigger_dict, main_channel, opts.time_window, opts.signif_threshold, opts.output_file,\
  gps_start_time=gps_start_time, gps_end_time=gps_end_time, channels=opts.channels, unsafe_channels=opts.unsafe_channels,\
  science_segments = dq_segments, clean_samples_rate=opts.clean_samples_rate, filter_out_unclean=opts.filter_out_unclean,\
  max_clean_samples = opts.max_clean_samples, max_glitch_samples = opts.max_glitch_samples)
#  gps_start_time=gps_start_time, gps_end_time=gps_end_time, channels=opts.channels, unsafe_channels=opts.unsafe_channels,\
#  science_segments = dq_segments, clean_samples_rate=opts.clean_samples_rate, filter_out_unclean=opts.filter_out_unclean,\
#  max_clean_samples = opts.max_clean_samples, max_glitch_samples = opts.max_glitch_samples)

clean_samples = auxmvc_vectors[numpy.nonzero(auxmvc_vectors['i'] == 0)[0], :]
glitch_samples = auxmvc_vectors[numpy.nonzero(auxmvc_vectors['i'] == 1)[0], :]

if opts.verbose:
    print "total number of glitch samples in the set: " + str(
        len(glitch_samples))
    print "total number of clean samples in the set: " + str(
        len(clean_samples))
예제 #9
0
            if not opts.ignore_science_segments:
                clean_gps = sorted(event.randomrate(clean_rate, scisegs)) ### generate random clean times as a poisson time series within scisegs
            else:
                clean_gps = sorted(event.randomrate(clean_rate, [[gpsstart-lookback, gpsstart + stride]])) ### generate random clean times as a poisson time series within analysis range
            clean_gps = [ l[0] for l in event.exclude( [[gps] for gps in clean_gps], dirtyseg, tcent=0)] ### keep only those gps times that are outside of dirtyseg

            ### keep only times that are within science time
            if not opts.ignore_science_segments:
                logger.info('  filtering trigger_dict through scisegs')
                trigger_dict.include(scisegs) ### already loaded into memory above here

            ### build vectors, also writes them into pat
            logger.info('  writting %s'%pat)
            idq.build_auxmvc_vectors(trigger_dict, gwchannel, auxmvc_coinc_window, auxmc_gw_signif_thr, pat, gps_start_time=gpsstart-lookback,
                                gps_end_time=gpsstart + stride,  channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, clean_times=clean_gps,
                                clean_window=clean_window, filter_out_unclean=False, max_glitch_samples=max_gch_samples, max_clean_samples=max_cln_samples ,
                                science_segments=None ) ### we handle scisegs in this script rather than delegating to idq.build_auxmvc_vectors, so science_segments=None is appropriate

            ptas_exit_status = 0 ### used to check for success

#            (ptas_exit_status, _) = idq.execute_build_auxmvc_vectors( config, output_dir, AUXkwtrgdir, gwchannel, pat, gpsstart - lookback, gpsstart + stride, channels=auxmvc_selected_channels, unsafe_channels=auxmvc_unsafe_channels, dq_segments=seg_file, dq_segments_name=dq_name )
#            os.chdir(cwd) ### go back to starting directory
 
        # check if process has been executed correctly
        if ptas_exit_status != 0: ### check that process executed correctly
            logger.warning('WARNING: Preparing training auxmvc samples failed')
            if opts.force:
                raise StandardError, "auxmvc samples required for successful training"
            else:
                logger.warning('WARNING: skipping re-training the MLA classifiers')
        else: