예제 #1
0
def compute_residuals(model):
    """
    """
    res = []
    for i in range(len(Globals.times[0])):
        for j in range(len(model)-1):
            if model[j][0] <= Globals.times[0][i] <= model[j+1][0]:
                res.append([Globals.times[0][i], Globals.rvs[0][i] - model[j][1], Globals.rv_errs[0][i] ])
    
    mgls_io.write_file('residuals.dat', res, ' ', '')
예제 #2
0
def multiset_model(opt_freqs, opt_jitters, fitting_coeffs):
    """model
    """ 
    model = list()
    for s in range(Globals.n_sets):
        offset = fitting_coeffs[s]
        #recenter each dataset
        data_file = []
        for j in range(len(Globals.times[s])):
            if Globals.jitter:
                data_file.append( [Globals.times[s][j], Globals.rvs[s][j]-offset, sqrt(Globals.rv_errs[s][j]**2 + opt_jitters[s]**2)] ) 
            else:
                data_file.append( [Globals.times[s][j], Globals.rvs[s][j]-offset, Globals.rv_errs[s][j]] )
        mgls_io.write_file('offset_' + str(s) + '.dat', data_file, ' ', '')
        
        t_0, t_f = Globals.times[s][0] - 10.0, Globals.times[s][-1] + 10.0
        for i in range(12*int(t_f-t_0)):
            t = t_0 + i*(t_f-t_0)/(12*int(t_f-t_0))
            y_model = 0.0
            if Globals.ndim > 0:
                for k in range(Globals.n_sets, len(fitting_coeffs)):
                    if k >= Globals.n_sets and k < Globals.ndim + Globals.n_sets:
                        y_model += fitting_coeffs[k]*cos(2.0*pi*(t-Globals.times[0][0])*opt_freqs[k-Globals.n_sets])
                    elif (k >= (Globals.ndim + Globals.n_sets)) and (k < (2*Globals.ndim+Globals.n_sets)):
                        y_model += fitting_coeffs[k]*sin(2.0*pi*(t-Globals.times[0][0])*opt_freqs[k-(2*Globals.ndim+Globals.n_sets)])    
                    elif k == (2*Globals.ndim + Globals.n_sets):
                        y_model += fitting_coeffs[k]*(t-Globals.times[0][0])
            #write line    
            model.append( [t, y_model, 1.0] )        
        #sort points
        model_s = sorted(model, key=lambda row: row[0])
    #write on file
    mgls_io.write_file('model.dat', model_s, ' ', '')
    
    print_message("Datasets w/ offset written in ./offset_*", 6, 95)
    print_message("Model written in ./model.dat", 6, 95)

    compute_residuals(model_s)

    return model_s
예제 #3
0
파일: mgls.py 프로젝트: rosich/mgls
                v.append([p_y, pwr_y])
                pwry.append(pwr_y)

            for j in range(bb.shape[0]):
                b = bb[j][0]
                p_x = 1. / b
                if i == j:
                    pwr_x = zz[j - 1, i]
                else:
                    pwr_x = zz[j, i]

                h.append([p_x, pwr_x])
                periods.append(p_x)
                pwrx.append(pwr_x)

            mgls_io.write_file('cut_v.dat', v, ' ', '')
            mgls_io.write_file('cut_h.dat', h, ' ', '')

            import warnings
            warnings.filterwarnings("ignore")

            #fig1 = figure()
            #plt.subplots_adjust(bottom=0.11, right=0.76)
            fig1 = plt.figure(1, figsize=(8, 6))
            plt.subplots_adjust(left=0.07,
                                right=0.73,
                                top=0.96,
                                bottom=0.1,
                                hspace=0.05)

            plt.rc('font', serif='Helvetica Neue')
예제 #4
0
파일: mgls_mc.py 프로젝트: rosich/mgls
def _gls_instance_bootstrapping(n):
    """
    """
    #l = mp.Lock()
    max_pows = []
    for _j in range(n):
        rv_0_seq, rv_err_0_seq = Globals.rvs_seq, Globals.rv_errs_seq
        rv_0, rv_err_0 = Globals.rvs, Globals.rv_errs
        t_0 = time.time()
        #shuffle data with thir error
        rv_sets = []
        rv_err_sets = []
        rv_sh = []
        rv_err_sh = []
        for ii in range(Globals.n_sets):
            rv_sets_s, rv_err_sets_s = shuffle_multiset_data(
                Globals.rvs[ii], Globals.rv_errs[ii])
            rv_sets.append(rv_sets_s)
            rv_err_sets.append(rv_err_sets_s)
            rv_sh.extend(rv_sets_s)
            rv_err_sh.extend(rv_err_sets_s)

        Globals.rvs, Globals.rv_errs = rv_sets[:], rv_err_sets[:]

        Globals.rvs_seq, Globals.rv_errs_seq = rv_sh[:], rv_err_sh[:]

        #compute new jitters
        max_logL_0 = -np.inf
        for _ in range(2):
            Globals.logL_0 = logL_NullModel()
            if Globals.logL_0 > max_logL_0:
                max_logL_0 = Globals.logL_0

        Globals.logL_0 = max_logL_0
        #compute GLS
        Globals.ndim = 1
        #freqs, pwr, max_pow = gls_1D()
        #if max_pow != 0:
        #   max_pows.append(max_pow)
        Globals.inhibit_msg = True

        max_logL = -np.inf
        for _i in range(6):
            #MC optimization
            opt_state = run_MGLS(Globals.ncpus, N_CANDIDATES=4)
            #compute coefficients and A matrix, given the optimal configuration
            pwr, fitting_coeffs, A, logL = mgls_multiset(opt_state)
            if logL > max_logL:
                max_logL = logL

        if (max_logL - Globals.logL_0) > 15.0:
            print "logL > 15 found"
            data_ = zip(Globals.times_seq, Globals.rvs_seq,
                        Globals.rv_errs_seq)
            mgls_io.write_file('_' + str(max_logL - max_logL_0) + '.dat',
                               data_, ' ', '')

        if max_logL - Globals.logL_0 > 0.0:
            max_pows.append(max_logL - Globals.logL_0)

        q = (time.time() - t_0)
        chain = "\tIteration: " + str(_j) + "/" + str(n)
        sys.stdout.write('\r' + chain)
        sys.stdout.flush()

        #restablish the original timeseries
        Globals.rvs_seq, Globals.rv_errs_seq = rv_0_seq[:], rv_err_0_seq[:]
        Globals.rvs, Globals.rv_errs = rv_0[:], rv_err_0[:]

    return max_pows
예제 #5
0
파일: mgls_mc.py 프로젝트: rosich/mgls
def noise_analysis(init_dim, opt_freqs_base, fitting_coeffs_base,
                   opt_jitters_base):
    """
    """
    Globals.ndim = init_dim
    counter = 0
    histogram = []
    NSAMPLES = 50

    rvs_seq, rv_errs_seq = copy.deepcopy(Globals.rvs_seq), copy.deepcopy(
        Globals.rv_errs_seq)
    rvs_cp, rv_errs_cp = copy.deepcopy(Globals.rvs), copy.deepcopy(
        Globals.rv_errs)

    for iter in range(NSAMPLES):
        try:
            #do n times to compute 1% percentile
            if not Globals.jitter:
                jitters = [0.0 for iter in range(Globals.n_sets)]

            Globals.ndim = init_dim
            #generate a model with the reulting parameters
            is_model = gen_synthetic_model(1. / opt_freqs_base,
                                           fitting_coeffs_base,
                                           opt_jitters_base)
            #model_0
            max_logL_0 = -np.inf
            for k in range(4):
                opt_state_0 = parallel_optimization_multiset(Globals.ncpus,
                                                             N_CANDIDATES=4)
                #compute coefficients and A matrix, given the optimal configuration
                pwr, fitting_coeffs, A, logL_0 = mgls_multiset(opt_state_0)

                if logL_0 > max_logL_0:
                    max_logL_0 = logL_0
                    Globals.logL_0 = max_logL_0

            #model
            max_logL = -np.inf
            Globals.ndim += 1
            for k in range(4):
                opt_state = parallel_optimization_multiset(Globals.ncpus,
                                                           N_CANDIDATES=4)
                #compute coefficients and A matrix, given the optimal configuration
                pwr, fitting_coeffs, A, logL = mgls_multiset(opt_state)

                if logL > max_logL:
                    max_logL = logL
            #print max_logL_0, max_logL

            DLog = max_logL - max_logL_0
            if DLog < 0.0: DLog = 0.0

            if (max_logL - Globals.logL_0) > 15.0:
                print "logL > 15 found"

                data_ = zip(Globals.times_seq, Globals.rvs_seq,
                            Globals.rv_errs_seq)
                mgls_io.write_file(
                    '_' + str(max_logL - Globals.logL_0) + '.dat', data_, ' ',
                    '')

            histogram.append(DLog)
            counter += 1

            if counter % 5 == 0:
                sys.stdout.write('\r\t                             ')
                sys.stdout.write(
                    '\r\t' + " >> Completed " +
                    str(round((100. * float(counter) / float(NSAMPLES)), 2)) +
                    ' %')
                sys.stdout.flush()

            Globals.rvs_seq, Globals.rv_errs_seq = rvs_seq[:], rv_errs_seq[:]
            Globals.rvs, Globals.rv_errs = rvs_cp[:], rv_errs_cp[:]

        except KeyboardInterrupt:
            exit()

        except:
            print "Exception occurred."
            pass

    #histogram = np.array(histogram)
    sys.stdout.write('\r                              ')
    sys.stdout.write('\n')
    mgls_io.write_file_onecol(
        'H_' + str(Globals.ndim) + '_' + str(time.time()) + '.dat', histogram,
        ' ', '')

    print "DlogL max.", max(histogram)

    return np.percentile(histogram, 90.0), np.percentile(histogram,
                                                         99.0), np.percentile(
                                                             histogram, 99.9)