Exemple #1
0
    def run(self, modelcard=None, testcard=None, stokes='I'):
        """
        Method that cross-validates set of image-plane models obtained by
        modelling training samples on corresponding set of testing samples.

        :param modelfiles:
            Wildcard of file names ~ 'model_0i_0jofN.txt', where model in
            'model_0i_0jofN.txt' file is from modelling ``0j``-th training
            sample ('train_0jofN.FITS') with ``0i``-th model.

        :param testfiles:
            Wildcard of file names ~ 'test_0jofN.FITS'.

        :return:
            List of lists [modelfilename, CV-score, sigma_cv_score].
        """

        modelfiles = glob.glob(modelcard)
        testfiles = glob.glob(testcard)
        modelfiles.sort()
        testfiles.sort()
        ntest = len(testfiles)
        nmodels = len(modelfiles) / ntest

        assert (not len(modelfiles) % float(len(testfiles)))

        print "modelfiles : " + str(modelfiles)
        print "testfiles : " + str(testfiles)

        result = list()

        for i in range(nmodels):
            print "using models " + str(modelfiles[ntest * i: ntest * (i + 1)]) \
                  + " and testing sample " + str(testfiles)
            models = modelfiles[ntest * i:ntest * (i + 1)]
            cv_scores = list()
            for j, testfile in enumerate(testfiles):
                model = Model()
                model.add_from_txt(models[j], stoke=stokes)
                print "using test file " + str(testfile)
                data = create_uvdata_from_fits_file(testfile)
                cv_score = data.cv_score(model, stokes=stokes)
                print "cv_score for one testing sample is " + str(cv_score)
                cv_scores.append(cv_score)

            mean_cv_score = np.mean(cv_scores)
            std_cv_score = np.std(cv_scores)
            print mean_cv_score, std_cv_score

            result.append(["model#" + str(i + 1), mean_cv_score, std_cv_score])

        return result
Exemple #2
0
for i, (fl, x, y, w) in enumerate(zip(fs, xs, ys, ws)):
    if mask[i]:
        k1_model.add_component(CGComponent(fl * 0.8, x + sk1[0], y + sk1[1],
                                           w))
        q1_model.add_component(
            CGComponent(fl, x + shift[0] + sq1[0], y + shift[1] + sq1[1], w))
        print i, fl, x, y, w

# Display model
# image = create_clean_image_from_fits_file(os.path.join(path, k1_image))
# image._image = np.zeros(image._image.shape, dtype=float)
# image.add_model(k1_model)

# Move model to UV-plane
k1_uvdata = create_uvdata_from_fits_file(os.path.join(path, k1_uvfile))
noise = k1_uvdata.noise(average_freq=True)
for baseline, std in noise.items():
    noise[baseline] = noise_factor * std
k1_uvdata.substitute([k1_model])
k1_uvdata.noise_add(noise)
k1_uvdata.save(k1_uvdata.data, os.path.join(path, 'k1_uv.fits'))

q1_uvdata = create_uvdata_from_fits_file(os.path.join(path, q1_uvfile))
noise = q1_uvdata.noise(average_freq=True)
for baseline, std in noise.items():
    noise[baseline] = noise_factor * std
q1_uvdata.substitute([q1_model])
q1_uvdata.noise_add(noise)
q1_uvdata.save(q1_uvdata.data, os.path.join(path, 'q1_uv.fits'))
Exemple #3
0
ccmodel_c1 = Model(stokes='I')
for comp in ccmodel_x1._components:
    r = np.sqrt(comp.p[1]**2. + comp.p[2]**2.)
    if r < 3. * (abs(pixsize_c1[0]) / mas_to_rad):
        print "removing component position ", comp.p
        continue
    comp._p[0] = comp._p[0] * sp_steeper(r)
    # Some shift
    comp._p[1] = comp._p[1] + 0.9
    comp._p[2] = comp._p[2] - 0.1
    ccmodel_c1.add_component(comp)
ccmodel_c1.add_component(CGComponent(0.35, 0.0 + sc1[0], 0.0 + sc1[1], 0.15))

# Overal shift = sqrt((0.9 + 0.1)**2 + (0.1 + 0.1)**2) = 1.02 mas
# Move model to UV-plane
c1_uvdata = create_uvdata_from_fits_file(
    os.path.join(base_path, '1458+718.C1.2007_03_01.PINAL'))
noise = c1_uvdata.noise(average_freq=True)
for baseline, std in noise.items():
    noise[baseline] = noise_factor * std
c1_uvdata.substitute([ccmodel_c1])
c1_uvdata.noise_add(noise)
c1_uvdata.save(c1_uvdata.data, os.path.join(base_path, 'c1_uv_real.fits'))

x1_uvdata = create_uvdata_from_fits_file(
    os.path.join(base_path, '1458+718.X1.2007_03_01.PINAL'))
noise = x1_uvdata.noise(average_freq=True)
for baseline, std in noise.items():
    noise[baseline] = 0.05 * std
x1_uvdata.substitute([ccmodel_x1])
# FIXME: !!!
noise[264] = 0.05 * noise[noise.keys()[0]]
Exemple #4
0
im_data_dir = '/home/ilya/Dropbox/Zhenya/to_ilya/clean_images/'
# Path to project's root directory
base_path = '/home/ilya/sandbox/heteroboot/'
path_to_script = '/home/ilya/Dropbox/Zhenya/to_ilya/clean/final_clean_nw'

# Workflow for one source
source = '0952+179'
epoch = '2007_04_30'
band = 'c1'
n_boot = 10
stoke = 'i'
image_fname = '0952+179.c1.2007_04_30.i.fits'
uv_fname = '0952+179.C1.2007_04_30.PINAL'

ccmodel = create_ccmodel_from_fits_file(os.path.join(im_data_dir, image_fname))
uvdata = create_uvdata_from_fits_file(os.path.join(uv_data_dir, uv_fname))
uvdata_m = create_uvdata_from_fits_file(os.path.join(uv_data_dir, uv_fname))
uvdata_m.substitute([ccmodel])
uvdata_r = uvdata - uvdata_m

baseline = uvdata.baselines[0]
print "baseline {}".format(baseline)
i, indxs_i = uvdata._choose_uvdata(baselines=[baseline], IF=1, stokes='I')
rl, indxs_rl = uvdata._choose_uvdata(baselines=[baseline], IF=1, stokes='RL')
lr, indxs_lr = uvdata._choose_uvdata(baselines=[baseline], IF=1, stokes='LR')
i = i[:, 0, 0]
rl = rl[:, 0, 0]
lr = lr[:, 0, 0]

import astropy.io.fits as pf
hdus = pf.open(os.path.join(uv_data_dir, uv_fname))
Exemple #5
0
import emcee
import triangle
import scipy as sp
import numpy as np
from from_fits import create_uvdata_from_fits_file
from components import CGComponent
from model import Model, CCModel
from stats import LnPost

if __name__ == '__main__':
    uv_fname = '1633+382.l22.2010_05_21.uvf'
    map_fname = '1633+382.l22.2010_05_21.icn.fits'
    uvdata = create_uvdata_from_fits_file(uv_fname)
    # Create several components
    cg1 = CGComponent(1.0, 0.0, 0.0, 1.)
    cg1.add_prior(flux=(
        sp.stats.uniform.logpdf,
        [0., 3.],
        dict(),
    ),
                  bmaj=(
                      sp.stats.uniform.logpdf,
                      [0, 10.],
                      dict(),
                  ))
    # Create model
    mdl1 = Model(stokes='I')
    # Add components to model
    mdl1.add_component(cg1)
    # Create posterior for data & model
    lnpost = LnPost(uvdata, mdl1)
Exemple #6
0
        v = hdu.data[hdu.header['PTYPE2']] / hdu.header['PSCAL2'] - \
            hdu.header['PZERO2']
        w = hdu.data[hdu.header['PTYPE3']] / hdu.header['PSCAL3'] - \
            hdu.header['PZERO3']
        # ``DATE`` can have different number among parameters
        indx_date = par_dict['DATE']
        time = hdu.data[hdu.header['PTYPE' + str(indx_date)]] / \
            hdu.header['PSCAL' + str(indx_date)] - hdu.header['PZERO' +
                                                              str(indx_date)]

        # Filling structured array by fields
        _data['uvw'] = np.column_stack((u, v, w))
        _data['time'] = time
        indx_bl = par_dict['BASELINE']
        _data['baseline'] = \
            vec_int(hdu.data[hdu.header['PTYPE' + str(indx_bl)]] /
                    hdu.header['PSCAL' + str(indx_bl)] -
                    hdu.header['PZERO' + str(indx_bl)])
        _data['hands'] = hands
        _data['weights'] = weights

        return _data


if __name__ == '__main__':
    from from_fits import create_uvdata_from_fits_file
    import os
    os.chdir('/home/ilya/code/vlbi_errors/data/misha')
    uvdata = create_uvdata_from_fits_file('1308+326.U1.2009_08_28.UV_CAL')
    uvdata.cv(15, 'CV_TEST')