Esempio n. 1
0
    def scale(self, ny=1, nx=1):
        m = sp.profile(nt=self.nt, ny=ny, nx=nx, nw=self.nw, ns=self.ns)

        xx = np.arange(self.nx, dtype='float32')
        yy = np.arange(self.ny, dtype='float32')

        xx1 = np.arange(nx, dtype='float32') / np.maximum(1,
                                                          nx - 1.0) * self.nx
        yy1 = np.arange(ny, dtype='float32') / np.maximum(1,
                                                          ny - 1.0) * self.ny

        me = np.mean(self.dat[:, :, :, :, 0])
        if (me < 1.e-19): me = 1.0
        me1 = 1.0 / me

        for tt in range(self.nt):
            inte = interp2d(xx, yy, self.pweights[tt, :, :], kind='linear')
            m.pweights[yy, :, :] = inte(xx1, yy1)

            for ww in range(self.nw):
                for ss in range(self.ns):
                    inte = interp2d(xx,
                                    yy,
                                    self.dat[tt, :, :, ww, ss].squeeze() * me1,
                                    kind='linear')
                    m.dat[tt, :, :, ww, ss] = inte(xx1, yy1) * me

        m.wav[:] = self.wav
        m.weights[:, :] = self.weights[:, :]

        return m
Esempio n. 2
0
    def __add__(self, o):

        if(self.nt == o.nt and\
           self.nx == o.nx and\
           self.ny == o.ny and\
           self.ns == o.ns):
            nw = o.nw + self.nw
        else:
            return None

        n = sp.profile(nx=self.nx, ny=self.ny, nw=nw, ns=self.ns, nt=self.nt)

        for ss in range(self.ns):
            n.weights[:, ss] = np.append(self.weights[:, ss].squeeze(),
                                         o.weights[:, ss].squeeze())
            for tt in range(self.nt):
                for yy in range(self.ny):
                    for xx in range(self.nx):
                        n.dat[tt, yy, xx, :, ss] = np.append(
                            self.dat[tt, yy, xx, :, ss].squeeze().copy(),
                            o.dat[tt, yy, xx, :, ss].squeeze().copy())

        n.wav[:] = np.append(self.wav, o.wav)
        n.pweights[:, :, :] = 0.5 * (self.pweights + o.pweights)

        return (n)
Esempio n. 3
0
 def skip(self, fx=2, fy=2):
     nt, ny, nx, nw, ns = self.dat[:, 0::fy, 0::fx, :, :].shape[:]
     m = sp.profile(nx=nx, ny=ny, nt=nt, nw=nw, ns=ns)
     m.dat[:, :, :, :, :] = self.dat[:, 0::fy, 0::fx, :, :]
     m.weights[:, :] = self.weights[:, :]
     m.wav[:] = self.wav
     m.pweights[:] = self.pweights[:, 0::fy, 0::fx]
     return m
Esempio n. 4
0
 def initObs(self):
     self.o = sp.profile(self.fname_obs)
     self.wsel = np.where(
         self.o.dat[0, self.o.ny // 2, self.o.nx // 2, :, 0] > 0)[0]
     self.wav = self.o.wav[self.wsel]
     self.plot_iwav = np.diff(self.wav).max() > 50.
     if self.plot_iwav:
         self.plot_wav = np.arange(self.wsel.size)
     else:
         self.plot_wav = self.wav
     self.obsprof = self.o.dat[:, :, :, self.wsel, :]
Esempio n. 5
0
    def create_dataset(self, model_train_list, stokes_train_list,
                       logtau_train_list):
        """Creates a dataset for training

        Parameters
        ----------
        model_train_list : list of strings
            List of models in STiC format used for training
        stokes_train_list : list of strings
            List of observed or synthetic profiles for training
        logtau_train_list : list
            List of logtau values included in the training

        """
        self.logtau = np.array(logtau_train_list)

        stokelist, cubelist = [], []
        for simu in range(len(model_train_list)):
            m = sp.model(model_train_list[simu])
            s = sp.profile(stokes_train_list[simu])
            idx = np.where(s.weights[:, 0] < 1.0)[0]
            indices = sorted(
                gentools.findindex(self.logtau, m.ltau[0, 0, 0, :]))
            ni = len(indices)

            # Physical parameters
            supercube = np.zeros(
                (ni * self.num_params, m.temp.shape[1], m.temp.shape[2]))
            supercube[:ni] = m.temp[0, :, :, indices] / 1e3
            supercube[ni:2 * ni] = m.vlos[0, :, :, indices] / 1e5
            supercube[ni * 2:3 * ni] = m.vturb[0, :, :, indices] / 1e5
            supercube[ni * 3:4 * ni] = m.Bln[0, :, :, indices] / 1e3
            supercube[ni * 4:5 * ni] = m.Bho[0, :, :, indices] / 1e3
            supercube[ni * 5:6 * ni] = m.azi[0, :, :, indices]

            # Stokes parameters
            stokes = np.concatenate([
                s.dat[0, :, :, idx, 0], 1e0 * s.dat[0, :, :, idx, 1],
                1e0 * s.dat[0, :, :, idx, 2], 1e0 * s.dat[0, :, :, idx, 3]
            ])

            stokelist.append(stokes)
            cubelist.append(supercube)

        self.cubelist = cubelist
        self.stokelist = stokelist
        self.nl = len(stokes)
Esempio n. 6
0
    def extractPix(self, x0=0, x1=-1, y0=0, y1=-1, t0=0, t1=-1):
        if (x1 == -1): x1 = self.nx
        if (y1 == -1): y1 = self.ny
        if (t1 == -1): t1 = self.nt

        nx = x1 - x0
        ny = y1 - y0
        nt = t1 - t0

        n = sp.profile(nx=nx, ny=ny, nt=nt, nw=self.nw)

        n.dat[:, :, :, :, :] = self.dat[t0:t1, y0:y1, x0:x1, :, :]
        n.wav[:] = self.wav
        n.weights[:, :] = self.weights
        n.pweights[:, :, :] = self.pweights[t0:t1, y0:y1, x0:x1]

        return (n)
Esempio n. 7
0
    def extractWav(self, w0=0, w1=-1):
        if (w1 == -1): w1 = self.nw

        nw = w1 - w0

        n = sp.profile(nx = self.nx,\
                       ny = self.ny,\
                       nw = nw,\
                       ns = self.ns,\
                       nt = self.nt)

        n.dat[:, :, :, :, :] = self.dat[:, :, :, w0:w1, :].copy()
        n.wav[:] = self.wav[w0:w1].copy()
        n.weights[:, :] = self.weights[w0:w1, :].copy()
        n.pweights[:, :, :] = self.pweights.copy()

        if (self.rf is not None):
            n.rf = self.rf[:, :, :, :, :, w0:w1, :].copy()
            n.rf_type = self.rf_type.copy()
            n.nder = self.nder
        return (n)
Esempio n. 8
0
    wc8, ic8 = findgrid(ca8[0,:], (ca8[0,10]-ca8[0,9])*0.5, extra=8)
    
    # Ca II K, the observations are recorded in a grid of 3 km/s + 2 external points
    # These profiles are in theory critically sampled, but we need to add extra points
    # for the outer points and for the continuum (last point in the array).

    wck, ick = findgrid(ck[0,0:39], (ck[0,10]-ck[0,9]), extra=8)
    
    
    #
    # Now we create a container for each spectral region in STIC format
    # We will add the fine grid, but all points that were not observed will
    # be given weight zero, so they don't contribute to the inversion.
    # 

    fe_1 = sp.profile(nx=1, ny=1, ns=4, nw=wfe.size)
    ca_8 = sp.profile(nx=1, ny=1, ns=4, nw=wc8.size)
    ca_k = sp.profile(nx=1, ny=1, ns=4, nw=wck.size+1) # add 1 continuum point!


    # Now fill in the profiles, weights and wavelength

    fe_1.wav[:] = wfe[:]
    ca_8.wav[:] = wc8[:]
    ca_k.wav[0:-1] = wck[:]
    ca_k.wav[-1]    = ck[0,-1] # Continuum point

    # Fill arrays with the observed points. The rest can be left zeroed.
    # To scale the problem to numbers close to 1, the code allows to
    # choose a normalization factor. I normally choose the quiet-Sun
    # continuum. It does not really matter as long as we tell the code
Esempio n. 9
0
    def predict(self,
                name,
                inputdata,
                logtau,
                original_logtau,
                nameoutput='model_neuralnetwork.nc',
                pgastop=1.0):
        """It uses a pre-trained neural network with new observed data

        Parameters
        ----------
        name : str, optional
            name of the network, by default 'network1'
        inputdata : ncfile
            input file in STiC format
        logtau : list
            logtau scale used to train the network
        original_logtau : list
            Final stratification of the model to do the interpolation
        nameoutput : str, optional
            name of the output model, by default 'model_neuralnetwork.nc'

        Example
        -------
        >>> dataprediction = 'newprofiles.nc'
        >>> original_logtau = sp.model(model_train_list[0],0,0,0).ltau[0,0,0,:]
        >>> myestimator.prediction(name='network1',dataprediction,logtau,original_logtau,"model_output.nc")
        
        """

        print('[INFO] Sending the data to the network')
        o = sp.profile(inputdata)
        idx = np.where(o.weights[:, 0] < 1.0)[0]
        stokelist = np.array([
            np.concatenate([
                o.dat[0, :, :, idx, 0], 1e0 * o.dat[0, :, :, idx, 1],
                1e0 * o.dat[0, :, :, idx, 2], 1e0 * o.dat[0, :, :, idx, 3]
            ])
        ])
        print(stokelist.shape, '...')
        self.nl = stokelist.shape[1]
        self.deepl = deep_network(name, logtau, self.nl)
        prediction = self.deepl.read_and_predict(stokelist)
        nx, ny, dum = prediction[0, :, :, :].shape
        prediction = np.reshape(prediction[0, :, :, :],
                                (nx, ny, 6, len(logtau)))
        noriginaltau = len(original_logtau)

        # Fill the model with the prediction
        print('[INFO] Writing in STiC format')
        m = sp.model(nx=nx, ny=ny, nt=1, ndep=noriginaltau)
        from tqdm import tqdm
        for ix in tqdm(range(nx)):
            for iy in range(ny):
                temp = np.interp(original_logtau, logtau,
                                 np.abs(prediction[ix, iy, 0, :]))
                vlos = np.interp(original_logtau, logtau, prediction[ix, iy,
                                                                     1, :])
                vturb = np.interp(original_logtau, logtau,
                                  np.abs(prediction[ix, iy, 2, :]))
                Bln = np.interp(original_logtau, logtau, prediction[ix, iy,
                                                                    3, :])
                Bho = np.interp(original_logtau, logtau,
                                np.abs(prediction[ix, iy, 4, :]))
                Bazi = np.interp(original_logtau, logtau, prediction[ix, iy,
                                                                     5, :])

                m.ltau[0, iy, ix, :] = original_logtau
                m.temp[0, iy, ix, :] = temp * 1e3
                m.vlos[0, iy, ix, :] = vlos * 1e5
                m.pgas[0, iy, ix, :] = pgastop
                m.vturb[0, iy, ix, :] = vturb * 1e5
                m.Bln[0, iy, ix, :] = Bln * 1e3
                m.Bho[0, iy, ix, :] = Bho * 1e3
                m.azi[0, iy, ix, :] = Bazi

        # Write the model
        m.write(nameoutput)
Esempio n. 10
0
 def initSynth(self):
     self.s = sp.profile(self.fname_synth)
     self.synprof = self.s.dat[:, :, :, self.wsel, :]
     self.nw = self.wsel.size
     self.ww = 0
     self.istokes = 0
Esempio n. 11
0
File: plot.py Progetto: vlslv/stic
       r'\sisetup{detect-all}',   # ...this to force siunitx to actually use your fonts
       r'\usepackage{helvet}',    # set the normal font here
       r'\usepackage{sansmath}',  # load up the sansmath so that math -> helvet
       r'\sansmath'               # <- tricky! -- gotta actually tell tex to use!
]
m.rcParams['mathtext.fontset'] = 'custom'
m.rcParams['mathtext.rm'] = font
m.rcParams['mathtext.it'] = font+':italic'
m.rcParams['mathtext.bf'] = font+':bold'


import sparsetools as sp
import matplotlib.pyplot as plt
#import spectral as s

i = sp.profile('observed.nc')
o = sp.profile('synthetic_cycle1.nc')
m = sp.model('atmosout_cycle1.nc')

plt.close("all")

f = plt.figure(figsize=(7,7))
ax1 = plt.subplot2grid((7,6), (0,0), colspan=6, rowspan=3)
ax2 = plt.subplot2grid((7,6), (3,0), colspan=2, rowspan=2)
ax3 = plt.subplot2grid((7,6), (3,2), colspan=2, rowspan=2)
ax4 = plt.subplot2grid((7,6), (3,4), colspan=2, rowspan=2)
ax5 = plt.subplot2grid((7,6), (5,0), colspan=2, rowspan=2)
ax6 = plt.subplot2grid((7,6), (5,2), colspan=2, rowspan=2)
ax7 = plt.subplot2grid((7,6), (5,4), colspan=2, rowspan=2)

ss = 0