def arraystat_2 (table, sid, season=0, rob=True, per=True, flags=0) :
    """ Calculates a complicated number of parameters for a given star.

    Inputs:
      table -- an ATpy table with time-series photometry
      sid -- a WFCAM source ID.
      
    Optional inputs:
      season -- which season to select (1,2,3, or other=All)
      rob -- also use Robust statistics? (takes longer, default True)
      per -- run period-finding? (takes longer, default True)
      flags -- Maximum ppErrBit quality flags to use (default 0)

    Returns:
      ret -- a data structure containing the computed values.
      """
    
    s_table = data_cut( table, [sid], season=season, flags=flags )

    if len(s_table) < 1:
        print "no data for %d!" % sid
        return None
    
    jcol = s_table.JAPERMAG3; jerr = s_table.JAPERMAG3ERR
    hcol = s_table.HAPERMAG3; herr = s_table.HAPERMAG3ERR
    kcol = s_table.KAPERMAG3; kerr = s_table.KAPERMAG3ERR
    jmhcol=s_table.JMHPNT   ; jmherr = s_table.JMHPNTERR
    hmkcol=s_table.HMKPNT   ; hmkerr = s_table.HMKPNTERR
    racol= s_table.RA
    decol= s_table.DEC

    date = s_table.MEANMJDOBS 

    messy_table = data_cut( table, [sid], season=-1 )
    jppcol=messy_table.JPPERRBITS
    hppcol=messy_table.HPPERRBITS
    kppcol=messy_table.KPPERRBITS

    # make an empty data structure and just assign it information, then return 
    # the object itself!!! then there's no more worrying about indices.
    class Empty():
        pass

    ret = Empty()
    
    ret.N = len(s_table)
    ret.RA = racol.mean()
    ret.DEC = decol.mean()
    
    ret.chip = get_chip(date[0], np.degrees(racol[0]), np.degrees(decol[0]))
    if ret.N > 4:
        ret.one_chip = ( get_chip(date[0], racol[0], decol[0]) ==
                         get_chip(date[1], racol[1], decol[1]) ==
                         get_chip(date[2], racol[2], decol[2]) ==
                         get_chip(date[3], racol[3], decol[3]) )
    else:
        ret.one_chip = True
    
    ret.Stetson = stetson.S(jcol, jerr, hcol, herr, kcol, kerr)
    
    ret.j = Empty();   ret.j.data = jcol;   ret.j.err = jerr
    ret.h = Empty();   ret.h.data = hcol;   ret.h.err = herr
    ret.k = Empty();   ret.k.data = kcol;   ret.k.err = kerr
    ret.jmh = Empty(); ret.jmh.data=jmhcol; ret.jmh.err = jmherr
    ret.hmk = Empty(); ret.hmk.data=hmkcol; ret.hmk.err = hmkerr

    bands = [ ret.j, ret.h, ret.k, ret.jmh, ret.hmk ]

    for b in bands:
        # use b.data, b.err
        
        b.rchi2 = reduced_chisq( b.data, b.err )

        b.mean = b.data.mean()
        b.rms = b.data.std()
        b.min = b.data.min()
        b.max = b.data.max()
        b.peak_trough = b.max - b.min

        b.mean_err = b.err.mean()

        # Robust quantifiers simply have an "r" at the end of their names
        if rob:
            b.datar = rb.removeoutliers(b.data, 3, niter=2)
            
            b.meanr = rb.meanr(b.data)
            b.rmsr = rb.stdr(b.data)
            b.minr = b.datar.min()
            b.maxr = b.datar.max()
            b.peak_troughr = b.maxr - b.minr

        # Period finding... is a little dodgy still, and might take forever
        if per:
            
            b.lsp = lsp(date, b.data, 6., 6.) # apologies if this is cluttered
            Jmax = lsp_mask(b.lsp[0], b.lsp[1])
            b.lsp_per = 1./ b.lsp[0][Jmax]
            b.lsp_pow = b.lsp[1][Jmax]
            b.fx2_per = 1./ test_analyze( date, b.data, b.err )

    # Finally we'll want to do the whole slope, distance on the JMH graph
    # (until I get the fitting done, we'll have to use hmk and jmh naively)
    ret.color_slope = (ret.jmh.peak_trough / ret.hmk.peak_trough)
    


    # and the pp_max, using the messy table
    ret.jpp_max = jppcol.max()
    ret.hpp_max = hppcol.max()
    ret.kpp_max = kppcol.max()

    return ret
Example #2
0
def happy_chipnights ( data_table, corrections_table, max_correction=.0125 ):
    ''' Removes bad chip-nights from a data table.
    
    Inputs:
      data_table -- an ATpy table with WFCAM time-series photometry.
      corrections_table -- an ATpy table with per-night per-chip corrections.
      
    Optional inputs:
      max_correction -- the minimum "chip deviation" to flag a chip_night as
                        needing removal.
    '''

    # So, we'll load up the table, and... for every datapoint we'll 
    # filter stuff? That might take forever!
    # Alternatively, we could check every source for whether it's on only
    # one chip

    # How do I even delete a row from an atpy table? 
    # Ah... I have a clever way to do it I think. 
    # The best way to do this seems to be to add the j,h,k corrections columns to this table, then populate them row-by-row, and filter it all at the end! Okay, I'll do that.
    
    ctable = corrections_table
    

    # I think I just learned how to copy tables! 
    # it uses really silly "where" calls like where something is positive or negative or zero
    if atpy.__version__ == '0.9.3':
        try:
            # deleting the columns we need to create in a sec
            data_table.remove_columns( ['j_correction',
                                        'h_correction',
                                        'k_correction'])
        except: 
            pass
        table = data_table
    else:
        # making a friggin copy so we can use it, and calling it table
        table = data_table.copy()
    
    table.add_empty_column('j_correction', np.float64)
    table.add_empty_column('h_correction', np.float64)
    table.add_empty_column('k_correction', np.float64)
   
    for row in data_table:
        chip = n2.get_chip( row['MEANMJDOBS'], 
                            np.degrees(row['RA']),
                            np.degrees(row['DEC']) )
        
        local_corrections = ctable.where( (ctable.date == row['MEANMJDOBS']) &
                                          (ctable.chip == chip) )
        
        row['j_correction'] = local_corrections.j_correction
        row['h_correction'] = local_corrections.h_correction
        row['k_correction'] = local_corrections.k_correction

    
    print "Made corrections rows! They look like this (j, h, k):"
    print table.j_correction[0]
    print table.h_correction[0]
    print table.k_correction[0]

    # Now I guess it's time to filter stuff... though the above seems 
    # pretty useful on its own... like, that's basically the implementation
    # for apply_corrections...
    
    f_table = table.where( (np.abs(table.j_correction) < max_correction) &
                           (np.abs(table.h_correction) < max_correction) &
                           (np.abs(table.k_correction) < max_correction) )

    print "done filtering!"
    
    return f_table