Beispiel #1
0
            logger.info('  downselecting to the %d most recent cleans' %
                        max_num_cln)
            cleans = cleans[-max_num_cln:]
        output = idq.combine_separated_output(these_columns,
                                              [glitches, cleans])

        ### define weights over time
        output['weight'] = calibration.weights(output['GPS'],
                                               weight_type="uniform")

        if not opts.dont_cluster:
            cluster_dat = idq.dat(output_dir, classifier, ifo, "clustered",
                                  usertag, gpsstart - lookback, lookback +
                                  stride)  ### write clustered dat file
            logger.info('  writing %s' % cluster_dat)
            idq.output_to_datfile(output, cluster_dat)
        else:
            cluster_dat = idq.dat(output_dir, classifier, ifo, "unclustered",
                                  usertag, gpsstart - lookback,
                                  lookback + stride)
            logger.info('  writing %s' % cluster_dat)
            idq.output_to_datfile(output, cluster_dat)

        ### compute rcg from output
        r, c, g = idq.dat_to_rcg(output)

        logger.info('    N_gch = %d , N_cln = %d' % (g[-1], c[-1]))

        ### dump into roc file
        roc = idq.roc(output_dir, classifier, ifo, usertag,
                      gpsstart - lookback, lookback + stride)
                raise ValueError(
                    "combiner=%s joint_p(c)=%s not understood" %
                    (combiner, combinersD[combiner]['joint_p(c)']))

            ### compute combined statistics
            logger.info('  computing approximation to joint likelihood ratio')
            L_joint = pofg_joint / pofc_joint  ### compute likelihood
            r_joint = L_joint / (1 + L_joint)  ### compute rank

            ### put them into output
            output['rank'] = r_joint
            output['Likelihood'] = L_joint

            ### write datfile
            logger.info('  writing %s' % dat)
            idq.output_to_datfile(output, dat)

            ### convert datfiles to xml tables
            logger.info('  Begin: converting %s dat file into xml files' %
                        combiner)
            logger.info('    converting %s to xml tables' % dat)

            ### read dafile -> xml docs
            (gchxml_doc,
             clnxml_doc) = idq.datfile2xmldocs(dat,
                                               ifo,
                                               fapmap,
                                               Lmap=False,
                                               Effmap=effmap,
                                               flavor=flavor,
                                               gwchan=gwchannel,
        cleans.sort(key=lambda l: l[these_columns['GPS']])
        if len(glitches) > max_num_gch:
            logger.info('  downselecting to the %d most recent glitches'%max_num_gch)
            glitches = glitches[-max_num_gch:]
        if len(cleans) > max_num_cln:
            logger.info('  downselecting to the %d most recent cleans'%max_num_cln)
            cleans = cleans[-max_num_cln:]
        output = idq.combine_separated_output( these_columns, [glitches, cleans] )

        ### define weights over time
        output['weight'] = calibration.weights( output['GPS'], weight_type="uniform" )

        if not opts.dont_cluster:
            cluster_dat = idq.dat(output_dir, classifier, ifo, "clustered", usertag, gpsstart-lookback, lookback+stride) ### write clustered dat file
            logger.info('  writing %s'%cluster_dat)
            idq.output_to_datfile( output, cluster_dat )
        else:
            cluster_dat = idq.dat(output_dir, classifier, ifo, "unclustered", usertag, gpsstart-lookback, lookback+stride)
            logger.info('  writing %s'%cluster_dat)
            idq.output_to_datfile( output, cluster_dat )

        ### compute rcg from output
        r, c, g = idq.dat_to_rcg( output )

        logger.info('    N_gch = %d , N_cln = %d'%(g[-1], c[-1]))


        ### dump into roc file
        roc = idq.roc(output_dir, classifier, ifo, usertag, gpsstart-lookback, lookback+stride)
        logger.info('  writting %s'%roc)
        idq.rcg_to_file(roc, r, c, g)
                pofc_joint = numpy.prod( pofc_joint, axis=0 )
            else:
                raise ValueError("combiner=%s joint_p(c)=%s not understood"%(combiner, combinersD[combiner]['joint_p(c)']))

            ### compute combined statistics
            logger.info('  computing approximation to joint likelihood ratio')
            L_joint = pofg_joint / pofc_joint ### compute likelihood
            r_joint = L_joint / ( 1 + L_joint ) ### compute rank

            ### put them into output
            output['rank'] = r_joint
            output['Likelihood'] = L_joint

            ### write datfile
            logger.info('  writing %s'%dat)
            idq.output_to_datfile( output, dat )

            ### convert datfiles to xml tables
            logger.info('  Begin: converting %s dat file into xml files'%combiner)
            logger.info('    converting %s to xml tables' % dat)

            ### read dafile -> xml docs
            (gchxml_doc, clnxml_doc) = idq.datfile2xmldocs(dat, ifo, fapmap, Lmap=False, Effmap=effmap, flavor=flavor,
                                         gwchan=gwchannel, gwtrigs=gw_trig, prog=__prog__, options=opts.__dict__, version=__version__ )

            ### write documents
            logger.info('    --> writing ' + gchxml)
            idq.ligolw_utils.write_filename(gchxml_doc, gchxml, gz=gchxml.endswith('.gz'))

            logger.info('    --> writing ' + clnxml)
            idq.ligolw_utils.write_filename(clnxml_doc, clnxml, gz=clnxml.endswith('.gz'))