def __init__(self, gpu=0, checkpoint=None):
        self.cuda = torch.cuda.is_available()
        self.gpu = gpu
        self.device = torch.device(f"cuda:{self.gpu}" if self.cuda else "cpu")

        self.model = model.Network(161 * 3, 100, 86 * 3).to(self.device)

        root = '/Users/ferran_2020/TFG/Neural_RH_Inversion/'
        print("Reading Enhanced_tau_530 - tau")
        tmp = io.readsav(f'{root}Enhanced_tau_530.save')
        self.T_tau = tmp['tempi'].reshape((86, 504 * 504))
        self.Pe_tau = tmp['epresi'].reshape((86, 504 * 504))

        print("Reading Enhanced_tau_530 - z")
        tmp = io.readsav(f'{root}Enhanced_530_optiona_rh.save')
        self.T_z = tmp['tg']  #.reshape((161, 504*504))
        self.Pg_z = tmp['pg']  #.reshape((161, 504*504))
        self.z = tmp['z'] / 1e3

        import pickle
        filename = '/Users/ferran_2020/TFG/Neural_RH_Inversion/checkpoint_Whole.dict'

        with open(filename, 'rb') as f:
            s = pickle.load(f)
        f.close()
        self.model.load_state_dict(s[0])
def main():
	postx=readsav('samples_x1.sav', python_dict=True)
	postt=readsav('samples_t2.sav', python_dict=True)
	#alpha is the intercept and beta is the slope

	alpha=postx['postx1']['alpha']
	beta=postx['postx1']['beta']
Exemple #3
0
def find_photo_z(type, file=None, file_z=None, z=None, mu=None,
                 sigma=None):
    """Type: file or gauss
       Note: file_z implementation NOT tested.
    """

    if type == 'file':
        try:
            pz_file = io.readsav(file)
            pz = pz_file['p_z']
        except:
            pz_file = np.genfromtxt(file, dtype=None, names=True)
            pz = pz_file['pdf']
        if file_z is not None:
            try:
                my_z = io.readsav(file_z)
                my_z = my_z['z']
            except:
                my_z = np.genfromtxt(file, dtype=None, names=True)
                my_z = my_z['z']
        else:
            my_z = np.arange(0, 5, .01)
        if np.shape(pz) != np.shape(my_z):
            raise ValueError("pz array and z array are different sizes!")
        func_my_photo_z = interpolate.interp1d(my_z, pz)
        my_photo_z = func_my_photo_z(z)
        my_photo_z = np.asarray(my_photo_z/my_photo_z.max())

    elif type == 'gauss':
        my_photo_z = stats.norm.pdf(z, mu, sigma)
        my_photo_z = np.asarray(my_photo_z/my_photo_z.max())
    return my_photo_z
Exemple #4
0
def get_dict_from_file(date, prefix="eis3"):
    """
    Reads an IDL .sav file containing EIS housekeeping data and returns its
    contents as a python dictionary. For speed, if the file has already been
    read, it may return the contents from a hidden memo. If the file is not
    found in the location specified it will attempt to download it once and
    save the file in the location originally specified.

    Parameters
    ----------
    date: date or datetime object
        Date of the observation required. If the file is present in the sunpy
        data directory, it will be read from there, or downloaded to that
        location if it isn't.
    prefix: str
        file prefix (eis3 for thermal correction, fpp1 for doppler shift)
    """
    key = '{0}_{1:%Y%m}.sav'.format(prefix, date)
    if key in __housekeeping_memo__:
        file_dict = __housekeeping_memo__[key]
    else:
        download_dir = os.path.join(sunpy.util.config._get_home(), 'EISpy',
                                    'eispy', 'data', key)
        try:
            file_dict = readsav(download_dir, python_dict=True)
        except IOError:
            url = "http://sdc.uio.no/eis_wave_corr_hk_data/" + key
            urllib.urlretrieve(url, filename=download_dir)
            file_dict = readsav(download_dir, python_dict=True)
            warnings.warn(
                "File was not found, so it was downloaded and " +
                "placed at the given location", UserWarning)
        __housekeeping_memo__.update({key: file_dict})
    return file_dict
def load_vlf():
    print('LOAD VLF: Currently low flyer only')
    vlf12_lf = readsav(path + "Low-Flyer/efield_VLF/" + '35040_LFDSP_S1_VLF12_mvm_AaronB.sav') #Low flyer (35.040)
    vlf34_hf = readsav(path + "High-Flyer/efield_VLF/" + '35039_Main_VLF34B_VLF34Boosted.sav') 

    data = {"vlf12_lf":vlf12_lf, "vlf34_hf":vlf34_hf}
    return data
Exemple #6
0
def find_photo_z(type, file=None, file_z=None, z=None, mu=None, sigma=None):
    """Type: file or gauss
       Note: file_z implementation NOT tested.
    """

    if type == 'file':
        try:
            pz_file = io.readsav(file)
            pz = pz_file['p_z']
        except:
            pz_file = np.genfromtxt(file, dtype=None, names=True)
            pz = pz_file['pdf']
        if file_z is not None:
            try:
                my_z = io.readsav(file_z)
                my_z = my_z['z']
            except:
                my_z = np.genfromtxt(file, dtype=None, names=True)
                my_z = my_z['z']
        else:
            my_z = np.arange(0, 5, .01)
        if np.shape(pz) != np.shape(my_z):
            raise ValueError("pz array and z array are different sizes!")
        func_my_photo_z = interpolate.interp1d(my_z, pz)
        my_photo_z = func_my_photo_z(z)
        my_photo_z = np.asarray(my_photo_z / my_photo_z.max())

    elif type == 'gauss':
        my_photo_z = stats.norm.pdf(z, mu, sigma)
        my_photo_z = np.asarray(my_photo_z / my_photo_z.max())
    return my_photo_z
def load_quats(loc_q_sc=os.getcwd() + '/data/q_sc.sav',
               loc_q_global=os.getcwd() + '/data/q_global.sav'):
    '''
    Load all quaternions from a file location
    
    Input:
        loc_q_sc: location of spacecraft quaternion file q_sc.sav
        loc_q_global: location of global quaternions file q_global.sav
        
    Output:
        quats: dict contatining quaternion objects q_sc an q_global
        
    Example Usage:
        1) quats = load_quats()
    '''
    from pyquaternion import Quaternion
    from scipy.io import readsav

    q_sc = readsav(loc_q_sc, python_dict=True)
    for cam in list(q_sc.keys()):
        q_sc[cam] = Quaternion(
            (q_sc[cam][3], q_sc[cam][0], q_sc[cam][1], q_sc[cam][2]))

    q_global = readsav(loc_q_global, python_dict=True)
    for img in list(q_global.keys()):
        q_global[img] = Quaternion((q_global[img][3], q_global[img][0],
                                    q_global[img][1], q_global[img][2]))

    quats = {'q_sc': q_sc, 'q_global': q_global}

    return quats
Exemple #8
0
def get_dict_from_file(date, prefix="eis3"):
    """
    Reads an IDL .sav file containing EIS housekeeping data and returns its
    contents as a python dictionary. For speed, if the file has already been
    read, it may return the contents from a hidden memo. If the file is not
    found in the location specified it will attempt to download it once and
    save the file in the location originally specified.

    Parameters
    ----------
    date: date or datetime object
        Date of the observation required. If the file is present in the sunpy
        data directory, it will be read from there, or downloaded to that
        location if it isn't.
    prefix: str
        file prefix (eis3 for thermal correction, fpp1 for doppler shift)
    """
    key = '{0}_{1:%Y%m}.sav'.format(prefix, date)
    if key in __housekeeping_memo__:
        file_dict = __housekeeping_memo__[key]
    else:
        download_dir = os.path.join(sunpy.util.config._get_home(),
                                    'EISpy', 'eispy', 'data', key)
        try:
            file_dict = readsav(download_dir, python_dict=True)
        except IOError:
            url = "http://sdc.uio.no/eis_wave_corr_hk_data/" + key
            urllib.urlretrieve(url, filename=download_dir)
            file_dict = readsav(download_dir, python_dict=True)
            warnings.warn("File was not found, so it was downloaded and " +
                          "placed at the given location", UserWarning)
        __housekeeping_memo__.update({key: file_dict})
    return file_dict
Exemple #9
0
def loadfhdsav(savfile):
    #get the other poles
    pol = savfile.split('_')[-1][:2]
    if pol=='xx':
        uvfile_altpol = savfile.replace('xx','yy')
        pols = [0,1]
    elif pol=='yy':
        uvfile_altpol = savfile.replace('yy','xx')
        pols = [1,0]
    else:
        print "polarization not found in filename. skipping"
        raise(StandardError)
    if not os.path.exists(uvfile_altpol):
        print "pol file",uvfile_altpol,"not found. please find"
        raise(StandardError)
    #paramfile = savfile.split('_')[0]+'_params.sav'
    paramfile = savfile.replace('vis_%s'%(['xx','yy'][pols[0]]),'params')
    if not os.path.exists(paramfile):
        print "error: paramfile=",paramfile,"not found. please find"
        raise(StandardError)
    #weightfile = savfile.split('_')[0]+'_flags.sav'
    weightfile = savfile.replace('vis_%s'%(['xx','yy'][pols[0]]),'flags')
    if not os.path.exists(weightfile):
        print "error: weightfile",weightfile,"not found, please find"
        raise(StandardError)
    print "loading:",savfile
    uvfile = readsav(savfile)
    ant1 =  uvfile['obs']['baseline_info'][0]['tile_a'][0]-1
    print "min(ant1)=",ant1.min(),"should be 0"
    ant2 =  uvfile['obs']['baseline_info'][0]['tile_b'][0]-1
    print "max(ant2)=",ant2.max(),"should be 127"
    data =  uvfile['vis_ptr']
    #times = uvfile['obs']['baseline_info'][0]['jdate'][0]
    baselines = (ant2)*256+ant1
    freqs = uvfile['obs']['baseline_info'][0]['freq'][0]

    print "loading alternate polarization",uvfile_altpol
    uv_altpol = readsav(uvfile_altpol)
    data_altpol = uv_altpol['vis_ptr']

    print "loading baselines from params file:",paramfile
    params = readsav(paramfile)
    U = params['params']['uu'][0]*1e9
    V = params['params']['vv'][0]*1e9
    W = params['params']['ww'][0]*1e9
    uvw = n.array(zip(U,V,W))
    times = params['params']['time'][0]
    print "loading weights from :",weightfile
    flags = readsav(weightfile)
    mask = n.dstack([flags['flag_arr'][0],flags['flag_arr'][1]])==0 #the zeros are the flags 
    #create the new fits file
    outdata = n.zeros((data.shape[0],data.shape[1],2)).astype(n.complex64)
    outdata[:,:,pols[0]] = data
    outdata[:,:,pols[1]] = data_altpol
    ant1,ant2 = i2a(ant1),i2a(ant2)
    return uvw,ant1,ant2,baselines,times,freqs,outdata,mask
Exemple #10
0
def run_master(file_int, file_vv, db_images, db_vel, zero):

    print(" * MASTER : reading {0}...".format(file_int))
    im = io.readsav('/net/duna/scratch1/aasensio/deepLearning/opticalFlow/database/{0}.save'.format(file_int))['int']
    print(" * MASTER : reading {0}...".format(file_vv))
    vel = io.readsav('/net/duna/scratch1/aasensio/deepLearning/opticalFlow/database/{0}.save'.format(file_vv))

    n_timesteps, nx_orig, ny_orig = im.shape

    tasks = [i for i in range(n_timesteps)]

    task_index = 0
    num_workers = size - 1
    closed_workers = 0
    print("*** Master starting with {0} workers".format(num_workers))
    while closed_workers < num_workers:
        dataReceived = comm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status)                
        source = status.Get_source()
        tag = status.Get_tag()
        if tag == tags.READY:
            # Worker is ready, so send it a task
            if task_index < len(tasks):
                dataToSend = {'index': task_index+zero, 'image': im[task_index,0:1008,0:1008], 'vx1': vel['vx1'][task_index,0:1008,0:1008], 'vz1': vel['vz1'][task_index,0:1008,0:1008],
                    'vx01': vel['vx01'][task_index,0:1008,0:1008], 'vz01': vel['vz01'][task_index,0:1008,0:1008], 'vx001': vel['vx001'][task_index,0:1008,0:1008], 'vz001': vel['vz001'][task_index,0:1008,0:1008]}
                comm.send(dataToSend, dest=source, tag=tags.START)
                print(" * MASTER : sending task {0} to worker {1}".format(task_index, source), flush=True)
                task_index += 1
            else:
                print("Sending termination")
                comm.send(None, dest=source, tag=tags.EXIT)
        elif tag == tags.DONE:
            index = dataReceived['index']
            im_r = dataReceived['image']
            vx1_r = dataReceived['vx1']
            vz1_r = dataReceived['vz1']
            vx01_r = dataReceived['vx01']
            vz01_r = dataReceived['vz01']
            vx001_r = dataReceived['vx001']
            vz001_r = dataReceived['vz001']
                        
            db_images[index,:,:] = im_r
            db_vel[0,index,:,:] = vx1_r
            db_vel[1,index,:,:] = vz1_r
            db_vel[2,index,:,:] = vx01_r
            db_vel[3,index,:,:] = vz01_r
            db_vel[4,index,:,:] = vx001_r
            db_vel[5,index,:,:] = vz001_r
    
            print(" * MASTER : got block {0} from worker {1}".format(index, source), flush=True)
            
        elif tag == tags.EXIT:
            print(" * MASTER : worker {0} exited.".format(source))
            closed_workers += 1

    print("Master block finished")
    return len(tasks)
Exemple #11
0
def loadfhdsav(savfile):
    # get the other poles
    pol = savfile.split("_")[-1][:2]
    if pol == "xx":
        uvfile_altpol = savfile.replace("xx", "yy")
        pols = [0, 1]
    elif pol == "yy":
        uvfile_altpol = savfile.replace("yy", "xx")
        pols = [1, 0]
    else:
        print "polarization not found in filename. skipping"
        raise (StandardError)
    if not os.path.exists(uvfile_altpol):
        print "pol file", uvfile_altpol, "not found. please find"
        raise (StandardError)
    # paramfile = savfile.split('_')[0]+'_params.sav'
    paramfile = savfile.replace("vis_%s" % (["xx", "yy"][pols[0]]), "params")
    if not os.path.exists(paramfile):
        print "error: paramfile=", paramfile, "not found. please find"
        raise (StandardError)
    # weightfile = savfile.split('_')[0]+'_flags.sav'
    weightfile = savfile.replace("vis_%s" % (["xx", "yy"][pols[0]]), "flags")
    if not os.path.exists(weightfile):
        print "error: weightfile", weightfile, "not found, please find"
        raise (StandardError)
    print "loading:", savfile
    uvfile = readsav(savfile)
    ant1 = uvfile["obs"]["baseline_info"][0]["tile_a"][0] - 1
    print "min(ant1)=", ant1.min(), "should be 0"
    ant2 = uvfile["obs"]["baseline_info"][0]["tile_b"][0] - 1
    print "max(ant2)=", ant2.max(), "should be 127"
    data = uvfile["vis_ptr"]
    # times = uvfile['obs']['baseline_info'][0]['jdate'][0]
    baselines = (ant2) * 256 + ant1
    freqs = uvfile["obs"]["baseline_info"][0]["freq"][0]

    print "loading alternate polarization", uvfile_altpol
    uv_altpol = readsav(uvfile_altpol)
    data_altpol = uv_altpol["vis_ptr"]

    print "loading baselines from params file:", paramfile
    params = readsav(paramfile)
    U = params["params"]["uu"][0] * 1e9
    V = params["params"]["vv"][0] * 1e9
    W = params["params"]["ww"][0] * 1e9
    uvw = n.array(zip(U, V, W))
    times = params["params"]["time"][0]
    print "loading weights from :", weightfile
    flags = readsav(weightfile)
    mask = n.dstack([flags["flag_arr"][0], flags["flag_arr"][1]]) == 0  # the zeros are the flags
    # create the new fits file
    outdata = n.zeros((data.shape[0], data.shape[1], 2)).astype(n.complex64)
    outdata[:, :, pols[0]] = data
    outdata[:, :, pols[1]] = data_altpol
    ant1, ant2 = i2a(ant1), i2a(ant2)
    return uvw, ant1, ant2, baselines, times, freqs, outdata, mask
def read_pfs(spectrum_path, wvlen_soln, verbose=False, is_template=False):
    """
    Duplicate from cdips_followup.spectools because of py27 compatability.
    """

    if not is_template:
        sp = readsav(spectrum_path, python_dict=True, verbose=verbose)
        wvlen = readsav(wvlen_soln, python_dict=True, verbose=verbose)
        return sp['sp'], wvlen['w']

    else:
        s = readsav(spectrum_path, python_dict=True, verbose=verbose)
        return s['star'], s['w']
    def __init__(self, filename, cubeInfoFilename):
        """
        Reads in the polarised-differential calibrated VAMPIRES data produced by the IDL pipeline
        :param filename: The full filename of the data file. E.g. diffdata_vega_.......idlvar
        """

        dObj = io.readsav(filename, python_dict=False, verbose=False)
        self.vhvv = dObj.vhvv
        self.vhvverr = dObj.vhvverr
        self.vhvvu = dObj.vhvvu
        self.vhvvuerr = dObj.vhvvuerr
        self.blengths = dObj.blengths
        self.bazims = dObj.bazims
        self.inFilename = filename
        try:
            self.diffCP = dObj.cp
            self.diffCPerr = dObj.cperr
            self.diffCPu = dObj.cpu
            self.diffCPuerr = dObj.cpuerr
            self.BL2H_IX = dObj.BL2H_IX
            self.H2BL_IX = dObj.H2BL_IX
            self.BL2BS_IX = dObj.BL2BS_IX
            self.BS2BL_IX = dObj.BS2BL_IX
        except:
            print "Couldn't find diff CP data for " + filename
        try:
            self.u_coords = dObj.u_coords
            self.v_coords = dObj.v_coords
        except:
            # Some older files didn't have these saved
            self.u_coords = ()
            self.v_coords = ()
        del (dObj)

        # Get useful metadata from cubeinfo file
        cubeinfoObj = io.readsav(cubeInfoFilename,
                                 python_dict=False,
                                 verbose=False)
        self.UTCs = cubeinfoObj.olog.utc[0]
        self.filters = cubeinfoObj.olog.filter[0]
        self.ras = cubeinfoObj.olog.ra[0]
        self.decs = cubeinfoObj.olog.dec[0]
        self.mask = cubeinfoObj.olog.mask[0]
        self.adate = cubeinfoObj.olog.adate[0]
        self.emgains = cubeinfoObj.olog.emgain[0]
        self.mffile = cubeinfoObj.plog.mf_file[0]
        self.pkflux = cubeinfoObj.framestats.pkflx[0]
        self.totflux = cubeinfoObj.framestats.totflx[0]
        self.cubename = cubeinfoObj.olog.cube_fname[0][0]
        del (cubeinfoObj)
        print "Read " + filename
Exemple #14
0
def load(filePath, biasOffset=True):
    '''
Loads data into python.  Currently supports formats: 3ds, sxm, dat, nvi, nvl, mat.
For 3ds and dat file types their is an optional flag to correct for bias offset
that is true by default.  This does not correct for a current offset, and
should not be used in cases where there is a significant current offset.
Note: mat files are supported as exports from STMView only.
Please include the file extension in the path, e.g. 'file.3ds'

Usage: data = load('file.3ds', biasOffset=True)
    '''
    if filePath.endswith('.3ds'):
        if biasOffset:
            return _correct_bias_offset(Nanonis3ds(filePath), '.3ds')
        else:
            return Nanonis3ds(filePath)

    elif filePath.endswith('.sxm'):
        return NanonisSXM(filePath)

    elif filePath.endswith('.dat'):
        if biasOffset:
            return _correct_bias_offset(NanonisDat(filePath), '.dat')
        else:
            return NanonisDat(filePath)

    elif filePath[-3:] == 'NVI' or filePath[-3:] == 'nvi':
        return NISTnvi(sio.readsav(filePath))

    elif filePath[-3:] == 'NVL' or filePath[-3:] == 'nvl':
        return NISTnvl(sio.readsav(filePath))

    elif filePath.endswith('.mat'):
        raw_mat = matio.loadmat(filePath)
        mappy_dict = {}
        for key in raw_mat:
            try:
                mappy_dict[key] = matio.Mappy()
                mappy_dict[key].mat2mappy(raw_mat[key])
                print('Created channel: {:}'.format(key))
            except:
                del mappy_dict[key]
                print('Could not convert: {:}'.format(key))
        if len(mappy_dict) == 1: return mappy_dict[mappy_dict.keys()[0]]
        else: return mappy_dict

    elif filePath.endswith('.asc'):
        return AsciiFile(filePath)

    else:
        raise IOError('ERR - Wrong file type.')
Exemple #15
0
    def __init__(self):
        super(Dataset, self).__init__()

        print("Reading database with models...")
        tmp = io.readsav('params.idl')

        params = np.transpose(tmp['params'], axes=(2,1,0))
        [self.nx,self.ny,npars]=params.shape

        self.phys = params.reshape((self.nx*self.ny, npars))
        print('Number of parameters={}'.format(npars))

        # Disambiguation
        ibx=6
        ind = np.where(self.phys[:,ibx] < 0.0)[0]
        if len(ind) > 0: print("Disambiguating {} profiles".format(len(ind)))
        self.phys[ind,ibx] = -self.phys[ind,ibx]
        self.phys[ind,ibx+1] = -self.phys[ind,ibx+1]
        
        normalization = np.array([1e3, 1e3, 1e3, 1e3, 1e3, 1e3, 1e3, 1e3, 1e5]) 
        normalization_mean = np.array([5e3, 5e3, 5e3, 5e3, 5e3, 0, 0, 0, 0])
        
        self.phys = (self.phys - normalization_mean[None,:]) / normalization[None,:]

        self.n_training, self.n_par = self.phys.shape

        print("Reading database with Stokes profiles...")
        tmp=io.readsav('database.prof.idl')
        [self.nlam,self.ny2,self.nx2]=tmp['stki'].shape
        self.stokes=np.zeros((self.nx2,self.ny2,self.nlam,4))
        self.stokes[:,:,:,0]=np.transpose(tmp['stki'], axes=(2,1,0) )
        self.stokes[:,:,:,1]=np.transpose(tmp['stkq'], axes=(2,1,0) )
        self.stokes[:,:,:,2]=np.transpose(tmp['stku'], axes=(2,1,0) )
        self.stokes[:,:,:,3]=np.transpose(tmp['stkv'], axes=(2,1,0) )     

        # print("Normalizing Stokes parameters...")

        #[self.nx2,self.ny2,self.nlam,nstokes]=self.stokes.shape
        if self.nx2 != self.nx or self.ny2 != self.ny:
            print('params.idl is {}x{} but database_prof.npy is {}x{}'.format(self.nx,self.ny,self.nx2,self.ny2))
            sys.exit(1)
        self.stokes[:,:,:,1] /= self.Weights[1]
        self.stokes[:,:,:,2] /= self.Weights[2]
        self.stokes[:,:,:,3] /= self.Weights[3]
        
        print("Reshaping Stokes...")
        self.stokes = np.transpose(self.stokes, axes=(0,1,3,2))
        self.stokes = self.stokes.reshape((self.nx*self.ny, 4 * self.nlam))
        
        self.n_training, self.n_spectral = self.stokes.shape
Exemple #16
0
def idl2python(idlsavefile):
    '''
    PORPOISE :
        Read IDL save file and return a dictionary with stored values
    '''
    returndict = io.readsav(idlsavefile)
    return returndict
Exemple #17
0
 def calc_specifics(self, Temp):
     """A separate method to calculate the specific line list properties based on an input T."""
     if self.specs_calced == 0:
         #make sure we don't inadvertently try and do this twice
         if self.ll_name == 'HITRAN04':
             self.Temp = Temp
             self.specs_calced = 1
             #lets make sure the relevant temperature is now carried around with the linelist.                
             
             props = HT04_globals(self.spec, self.iso)
             
             if Temp == 296.0 and self.ll_name == 'HITRAN04':
                 Q=props.Q296
             else:
                 Q=getQ(self.spec, self.iso, self.ll_name, Temp)    
             
  
             E_temp = -1.0 * self.epp * c2 / Temp
             #print E_temp
             w_temp = -1.0 * self.wave * c2 / Temp
             #print w_temp
             self.strength = self.strength * (props.abund/ Q) * (np.exp(E_temp) * (1.0-np.exp(w_temp))) * apc.c.cgs.value
             #I have no idea why Jan multiplies by C here, but he does, so lets copy it.
             
             strengths_jan = readsav('/home/dstock/sf/idl/code/ff.xdr')
Exemple #18
0
def grabdate(d):
  global rundir, srcdir, outdir
  # Limit output to one line per date. 
  status(d)
  # Loop over both probes. 
  for p in ('a', 'b'):
    status(p)
    # Nuke the run directory. Leave stdout and stderr. 
    [ os.remove(x) for x in os.listdir(rundir) if x not in ('stdoe.txt',) ]
    # Create and execute an IDL script to grab position, electric field, and
    # magnetic field data for the day and and dump it into a sav file. 
    out, err = spedas( idlcode(probe=p, date=d) )
    # Make sure there's somewhere to store the pickles. 
    pkldir = outdir + d.replace('-', '') + '/' + p + '/'
    if not os.path.exists(pkldir):
      os.makedirs(pkldir)
    # Read in the IDL output. 
    if not os.path.exists('temp.sav'):
      status('X')
      continue
    else:
      temp = io.readsav('temp.sav')
    # Rewrite the data as pickles. (Pickles are Python data files. They are
    # reasonably efficient in terms of both storage size and load time.)
    for key, arr in temp.items():
      with open(pkldir + key + '.pkl', 'wb') as handle:
        pickle.dump(arr, handle, protocol=-1)
    # Acknowledge successful date access. 
    status('OK')
  # Move to the next line. 
  return status()
Exemple #19
0
    def load(filename="sme.npy"):
        """
        Load SME data from disk

        Currently supported file formats:
            * ".npy": Numpy save file of an SME_Struct
            * ".sav", ".inp", ".out": IDL save file with an sme structure
            * ".ech": Echelle file from (Py)REDUCE

        Parameters
        ----------
        filename : str, optional
            name of the file to load (default: 'sme.npy')

        Returns
        -------
        sme : SME_Struct
            Loaded SME structure

        Raises
        ------
        ValueError
            If the file format extension is not recognized
        """
        logging.info("Loading SME file %s", filename)
        ext = Path(filename).suffix
        if ext == ".npy":
            # Numpy Save file
            s = np.load(filename, allow_pickle=True)
            s = np.atleast_1d(s)[0]
        elif ext == ".npz":
            s = np.load(filename, allow_pickle=True)
            s = s["sme"][()]
        elif ext in [".sav", ".out", ".inp"]:
            # IDL save file (from SME)
            s = readsav(filename)["sme"]
            s = {name.casefold(): s[name][0] for name in s.dtype.names}
            s = SME_Struct(**s)
        elif ext == ".ech":
            # Echelle file (from REDUCE)
            ech = echelle.read(filename)
            s = SME_Struct()
            if hasattr(ech, "columns"):
                s.wind = np.cumsum([0, *np.diff(ech.columns, axis=1).ravel()])
            s.wave = np.ma.compressed(ech.wave)
            s.spec = np.ma.compressed(ech.spec)
            s.uncs = np.ma.compressed(ech.sig)
            s.mask = np.full(s.sob.size, 1)
            s.wran = [[w[0], w[-1]] for w in s.wave]
            try:
                s.object = ech.head["OBJECT"]
            except KeyError:
                pass
        else:
            options = [".npy", ".sav", ".out", ".inp", ".ech"]
            raise ValueError(
                f"File format not recognised, expected one of {options} but got {ext}"
            )

        return s
Exemple #20
0
def readsnrsav(name, extsavdir=None):
    """read the extinction data from .sav files of Chen. return its xygrid ar data and disbins
    Args:
        name: name of target SNR, eg.'snr169'
        extsavdir:a path of the dir storing .sav files
    Returns:
        xgrid, ygrid: 1-d arrray, coordinates of centers of every grids
        ar: 1-d array with 2-d array as elements, extinction data loaded from .sav files
        realdis: 1-d array, distance of centers of each bin
    """
    #set the datapath
    if extsavdir == None:
        data_file = pjoin('..', '..', 'Data', 'extin3d',
                          '{0}extin3d015.sav'.format(name))
    else:
        data_file = pjoin(extsavdir, '{0}extin3d015.sav'.format(name))
    #read cordinate, distance and extinction for each star from sav files
    res = readsav(data_file, python_dict=True)
    res = res['exta']
    xgrid = res.gl[0]
    ygrid = res.gb[0]
    ar = res.dar[0]
    dismo = res.dis[0]  #distant modulu
    realdis = 10.**(dismo / 5. + 1) / 1000.
    return xgrid, ygrid, ar, realdis
Exemple #21
0
def load_nvi(filePath):
    '''UNTESTED - Load NISTview image data into python. '''
    nviData = sio.readsav(filePath)
    self = Spy()
    self._raw = nviData['imagetosave']
    self.map = self._raw.currentdata[0]
    self.header = {
        name: self._raw.header[0][name][0]
        for name in self._raw.header[0].dtype.names
    }
    self.info = {
        'FILENAME': self._raw.filename[0],
        'FILSIZE': int(self._raw.header[0].filesize[0]),
        'CHANNELS': self._raw.header[0].scan_channels[0],
        'XSIZE': self._raw.xsize[0],
        'YSIZE': self._raw.ysize[0],
        'TEMPERATURE': self._raw.header[0].temperature[0],
        'LOCKIN_AMPLITUDE': self._raw.header[0].lockin_amplitude[0],
        'LOCKIN_FREQUENCY': self._raw.header[0].lockin_frequency[0],
        'DATE': self._raw.header[0].date[0],
        'TIME': self._raw.header[0].time[0],
        'BIAS_SETPOINT': self._raw.header[0].bias_setpoint[0],
        'BIAS_OFFSET': self._raw.header[0].bias_offset[0],
        'BFIELD': self._raw.header[0].bfield[0],
        'ZUNITS': self._raw.zunits[0],
    }
    return self
Exemple #22
0
def import_cosmology(filename, structure_name="fid"):
    r""" Loads an icosmo cosmology from a fiducial structure stored in an
    idl save file into a cosmicpy cosmology.

    Parameters
    ----------
    filename : str
        Name of the idl save from which to load the cosmology.
    structure_name : str, optional
        Name of the icosmo fiducial structure stored in the save file.

    Returns
    -------
    cosmo : cosmology
        cosmicpy cosmology corresponding to the icosmo input.
    """
    icosmo_file = readsav(filename)
    icosmo = icosmo_file.get(structure_name)

    h = icosmo['cosmo'][0]['h'][0]
    Omega_m = icosmo['cosmo'][0]['omega_m'][0]
    Omega_de = icosmo['cosmo'][0]['omega_l'][0]
    Omega_b = icosmo['cosmo'][0]['omega_b'][0]
    w0 = icosmo['cosmo'][0]['w0'][0]
    wa = icosmo['cosmo'][0]['wa'][0]
    tau = icosmo['cosmo'][0]['tau'][0]
    n = icosmo['cosmo'][0]['n'][0]
    sigma8 = icosmo['cosmo'][0]['sigma8'][0]

    cosmo = cosmicpy.cosmology(h=h, Omega_m=Omega_m, Omega_de=Omega_de,
                              Omega_b=Omega_b, w0=w0, wa=wa, tau=tau,
                              n=n, sigma8=sigma8)
    return cosmo
Exemple #23
0
def makeWeightsArray(X):
    savfile = readsav(X)
    wuv = savfile['weights_uv']
    wxy = n.fft.fft2(wuv)
    wuv = n.abs(wuv)
    wxy = n.abs(wxy)
    return wuv,wxy
Exemple #24
0
    def test_arrays_replicated_3d(self):
        pth = path.join(DATA_PATH, 'struct_pointer_arrays_replicated_3d.sav')
        s = readsav(pth, verbose=False)

        # Check column types
        assert_(s.arrays_rep.g.dtype.type is np.object_)
        assert_(s.arrays_rep.h.dtype.type is np.object_)

        # Check column shapes
        assert_equal(s.arrays_rep.g.shape, (4, 3, 2))
        assert_equal(s.arrays_rep.h.shape, (4, 3, 2))

        # Check values
        for i in range(4):
            for j in range(3):
                for k in range(2):
                    assert_array_identical(
                        s.arrays_rep.g[i, j, k],
                        np.repeat(np.float32(4.), 2).astype(np.object_))
                    assert_array_identical(
                        s.arrays_rep.h[i, j, k],
                        np.repeat(np.float32(4.), 3).astype(np.object_))
                    assert_(
                        np.all(
                            vect_id(s.arrays_rep.g[i, j, k]) == id(
                                s.arrays_rep.g[0, 0, 0][0])))
                    assert_(
                        np.all(
                            vect_id(s.arrays_rep.h[i, j, k]) == id(
                                s.arrays_rep.h[0, 0, 0][0])))
Exemple #25
0
    def test_arrays_replicated(self):
        s = readsav(path.join(DATA_PATH,
                              'struct_pointer_arrays_replicated.sav'),
                    verbose=False)

        # Check column types
        assert_(s.arrays_rep.g.dtype.type is np.object_)
        assert_(s.arrays_rep.h.dtype.type is np.object_)

        # Check column shapes
        assert_equal(s.arrays_rep.g.shape, (5, ))
        assert_equal(s.arrays_rep.h.shape, (5, ))

        # Check values
        for i in range(5):
            assert_array_identical(
                s.arrays_rep.g[i],
                np.repeat(np.float32(4.), 2).astype(np.object_))
            assert_array_identical(
                s.arrays_rep.h[i],
                np.repeat(np.float32(4.), 3).astype(np.object_))
            assert_(
                np.all(vect_id(s.arrays_rep.g[i]) == id(s.arrays_rep.g[0][0])))
            assert_(
                np.all(vect_id(s.arrays_rep.h[i]) == id(s.arrays_rep.h[0][0])))
Exemple #26
0
 def test_scalars(self):
     s = readsav(path.join(DATA_PATH, 'struct_pointers.sav'), verbose=False)
     assert_identical(s.pointers.g,
                      np.array(np.float32(4.), dtype=np.object_))
     assert_identical(s.pointers.h,
                      np.array(np.float32(4.), dtype=np.object_))
     assert_(id(s.pointers.g[0]) == id(s.pointers.h[0]))
Exemple #27
0
 def test_7d(self):
     s = readsav(path.join(DATA_PATH, 'array_float32_pointer_7d.sav'),
                 verbose=False)
     assert_equal(s.array7d.shape, (2, 1, 2, 3, 4, 3, 2))
     assert_(np.all(s.array7d == np.float32(4.)))
     assert_(
         np.all(vect_id(s.array7d) == id(s.array7d[0, 0, 0, 0, 0, 0, 0])))
Exemple #28
0
    def test_arrays_replicated_3d(self):
        s = readsav(path.join(DATA_PATH, 'struct_arrays_replicated_3d.sav'),
                    verbose=False)

        # Check column types
        assert_(s.arrays_rep.a.dtype.type is np.object_)
        assert_(s.arrays_rep.b.dtype.type is np.object_)
        assert_(s.arrays_rep.c.dtype.type is np.object_)
        assert_(s.arrays_rep.d.dtype.type is np.object_)

        # Check column shapes
        assert_equal(s.arrays_rep.a.shape, (4, 3, 2))
        assert_equal(s.arrays_rep.b.shape, (4, 3, 2))
        assert_equal(s.arrays_rep.c.shape, (4, 3, 2))
        assert_equal(s.arrays_rep.d.shape, (4, 3, 2))

        # Check values
        for i in range(4):
            for j in range(3):
                for k in range(2):
                    assert_array_identical(s.arrays_rep.a[i, j, k],
                                           np.array([1, 2, 3], dtype=np.int16))
                    assert_array_identical(
                        s.arrays_rep.b[i, j, k],
                        np.array([4., 5., 6., 7.], dtype=np.float32))
                    assert_array_identical(
                        s.arrays_rep.c[i, j, k],
                        np.array([np.complex64(1 + 2j),
                                  np.complex64(7 + 8j)]))
                    assert_array_identical(
                        s.arrays_rep.d[i, j, k],
                        np.array([b"cheese", b"bacon", b"spam"], dtype=object))
Exemple #29
0
    def __init__(self, sav_fileloc = '/home/wilcoxr/m3dc1/profile_savs/dens_xsection.sav',
                 linfac = -2.5 * 4 / np.pi):
        """
        I'm only saving this in case I have trouble making class M3DC1_xsection backwards compatible

        Requires previously saved IDL .sav file containing profile variables
        I generate this data with this (on venus): /u/wilcoxr/idl/load_m3dc1_single_n_fullxsection.pro

        ** This is only OK to do simply because I'm using the toroidal location at the peak of the density perturbation!
           Any other location requires more processing

        For original plot_dens_xsection: sav_fileloc = '/home/wilcoxr/m3dc1/profile_savs/dens_xsection.sav',
                                         linfac = -2.5*4/np.pi
        """
    
        idlsave_dict = si.readsav(sav_fileloc, python_dict = True, verbose = False)
    
        # fields = ['te','den','ti','pe','p'] (usually ordered like this in arrays)
    
    
        self.fields = idlsave_dict[
            'fields']  # fields that are in the IDL sav file, and which index corresponds to which
        self.r = idlsave_dict['r']  # grid locations (401)
        self.z = idlsave_dict['z']
        self.dens_pert = linfac * np.real(
            idlsave_dict['res'])  # Not correct unless just looking at the peak of the dens pert
def sav_to_fits(savfile):
    """
    Simple method to convert the DEIMOS .sav files, which contain the
    optical model maps, to .fits files.
    ToDO: This is specific for keck_deimos `static_calib` data, since the path
    is explicitly mentioned. If needed, this method could be generalized.
    TODO: move it to `pypeit.io`.
    Args:
        savfile: path to the .sav file

    Returns:
        Save the content of the .sav file into a .fits file in `data/static_calibs/keck_deimos/`

    """
    savfile_name = os.path.splitext(os.path.basename(savfile))[0]
    sav = readsav(savfile, python_dict=True)

    list_keys = list(sav.keys())
    for k in list_keys:
        if type(sav[k]) is not np.ndarray:
            sav[k] = np.asarray([sav[k]])

    to_path = resource_filename('pypeit', 'data/static_calibs/keck_deimos/')
    io.write_to_fits(sav, to_path + savfile_name + '.fits', overwrite=True)

    return
Exemple #31
0
def grabdate(d):
    global rundir, srcdir, outdir
    # Limit output to one line per date.
    status(d)
    # Loop over both probes.
    for p in ('a', 'b'):
        status(p)
        # Nuke the run directory. Leave stdout and stderr.
        [os.remove(x) for x in os.listdir(rundir) if x not in ('stdoe.txt', )]
        # Create and execute an IDL script to grab position, electric field, and
        # magnetic field data for the day and and dump it into a sav file.
        out, err = spedas(idlcode(probe=p, date=d))
        # Make sure there's somewhere to store the pickles.
        pkldir = outdir + d.replace('-', '') + '/' + p + '/'
        if not os.path.exists(pkldir):
            os.makedirs(pkldir)
        # Read in the IDL output.
        if not os.path.exists('temp.sav'):
            status('X')
            continue
        else:
            temp = io.readsav('temp.sav')
        # Rewrite the data as pickles. (Pickles are Python data files. They are
        # reasonably efficient in terms of both storage size and load time.)
        for key, arr in temp.items():
            with open(pkldir + key + '.pkl', 'wb') as handle:
                pickle.dump(arr, handle, protocol=-1)
        # Acknowledge successful date access.
        status('OK')
    # Move to the next line.
    return status()
Exemple #32
0
def read_padsav(file_name, disable_UserWarnings=True):
    """
        Reads data from an idl_dict object into XPadDataItem objects

        Parameters
        ----------
        file_name : str
            Path to XPad*.padsav file


        Returns
        -------
        items : list
            A list of XPadDataItems
    """

    warning_action = "default"
    if (disable_UserWarnings):
        warning_action = "ignore"

    with catch_warnings():
        simplefilter(warning_action, UserWarning)
        idl_dict = readsav(file_name)

    return parse_padsav(idl_dict)
    def __init__(self,infile):
        mypath=os.getcwd()
        if mypath.find('/Users/rfinn') > -1:
            print "Running on Rose's mac pro or laptop"
            homedir='/Users/rfinn/'
        elif mypath.find('Users/kellywhalen') > -1:
            print "Running on Kelly's Laptop"
            homedir='/Users/kellywhalen/Github/Virgo/'

        cefile=readsav(infile)
        self.nulnu_iras25=cefile['nulnu_iras25']
        self.nulnu_iras100=cefile['nulnu_iras100']
        self.nulnu_iras12=cefile['nulnu_iras12']
        self.nulnu_iras60=cefile['nulnu_iras60']
        self.nulnuinlsun=cefile['nulnuinlsun']
        self.lir_sanders=cefile['lir_sanders']
        self.lir=cefile['lir']
        self.nulnu_lw3=cefile['nulnu_lw3']
        self.nulnu_lw2=cefile['nulnu_lw2']
        self.lamb=cefile['lambda']
        #
        # convert all to double-precision arrays
        #
        self.lamb=array(self.lamb,'d')
        self.nulnu_iras25=array(self.nulnu_iras25,'d')
        self.nulnu_iras100=array(self.nulnu_iras100,'d')
        self.nulnu_iras12=array(self.nulnu_iras12,'d')
        self.nulnu_iras60=array(self.nulnu_iras60,'d')
        self.nulnuinlsun=array(self.nulnuinlsun,'d')
        self.lir_sanders=array(self.lir_sanders,'d')
        self.lir=array(self.lir,'d')
        self.nulnu_lw3=array(self.nulnu_lw3,'d')
        self.nulnu_lw2=array(self.nulnu_lw3,'d')
Exemple #34
0
def dgplot1(ax1, snrNum, level, lC, bC, hl, num):
    # 只画消光数据
    # 整理消光数据
    res = readsav('../../data/map2018/IDLs/extin3d015{0}.sav'.format(snrNum))
    midMum = res.exta.dis[0] # 距离间隔的中间值,距离模数
    gl = res.exta.gl[0]  # 网点的银经
    gb = res.exta.gb[0]  # 网点的银维
    # debr = np.stack(res.exta.debr[0].dar)  # 区间内的E(B-V)h值,三维数组,23层,每层是一个二维矩阵 
    degk = np.stack(res.exta.degk[0].dar)
    AG = degk    # + 1.987 * dehk  # G波段消光
    disBins = [[i - 0.125, i + 0.125] for i in midMum]
    AG_new = np.zeros([int(AG.shape[0] / 2), AG.shape[1], AG.shape[2]])
    for i in range(int(AG.shape[0] / 2)):
        AG_new[i, :, :] = np.sum(AG[2 * i:2 * i +2, :, :], axis=0)
    AG = AG_new
    # 绘制消光/距离等值线图
    zmax = np.percentile(AG, 98)
    levs = zmax / level * (np.arange(level) + 1)[1:]
    extin = gaussian_filter(AG[num, :, :], 0.68)
    cont = ax1.contour(gl, gb, extin, levs)
    ax1.set_xlim(lC + hl, lC - hl)
    ax1.set_ylim(bC - hl, bC + hl)
    tit = r'{0:4.2f}-{1:4.2f}'.format(disBins[num][0] * 2, disBins[num][1] * 2)
    ax1.text(0.02, 1.05, tit, va='top', ha='left', transform=ax1.transAxes, color='red', fontsize=12, zorder=10)
    # 调整刻度
    ax1.xaxis.set_major_locator(ticker.MultipleLocator(1))
    ax1.xaxis.set_minor_locator(ticker.MultipleLocator(0.2))
    ax1.yaxis.set_major_locator(ticker.MultipleLocator(1))
    ax1.yaxis.set_minor_locator(ticker.MultipleLocator(0.2))
    ax1.tick_params(labelsize=10)
    # 调整xylabel
    ax1.set_xlabel('Galactic Latitude', fontsize=12)
    ax1.set_ylabel('Galactic Longitude', fontsize=12)
Exemple #35
0
def xidl_arcspec(xidl_file, slit):
    xidl_dict = readsav(xidl_file)
    if xidl_dict['archive_arc'].ndim == 2:
        nspec = xidl_dict['archive_arc'].shape[0]
        npix = xidl_dict['archive_arc'].shape[1]
    else:
        npix = xidl_dict['archive_arc'].shape[0]
    # This is the best one (well-centered)
    calib = xidl_dict['calib'][slit]
    # Generate the wavelengths
    if calib['FUNC'] == b'CHEBY':
        wv_air = cheby_val(calib['FFIT'], np.arange(npix),
                       calib['NRM'], calib['NORD'])
    elif calib['FUNC'] == b'POLY':
        wv_air = poly_val(calib['FFIT'], np.arange(npix), calib['NRM'])

    wv_vac = airtovac(wv_air * units.AA)
    if xidl_dict['archive_arc'].ndim == 2:
        spec = xidl_dict['archive_arc'][slit]
    else:
        spec = xidl_dict['archive_arc']
    # Flip to blue to red?
    if wv_vac[1] < wv_vac[0]:
        wv_vac = wv_vac[::-1]
        spec = spec[::-1]
    # Return
    return wv_vac.value, spec
Exemple #36
0
    def __init__(self, filename):
        idl = readsav(filename)
        idlout = idl['output']

        self.x = idlout['x'][0]
        self.y = idlout['y'][0]
        self.z = idlout['z'][0]
        self.vx = idlout['vx'][0]
        self.vy = idlout['vy'][0]
        self.vz = idlout['vz'][0]
        self.frac = idlout['frac'][0]
        self.time = idlout['time'][0]
        self.index = idlout['index'][0]

        index = sorted(list(set(idlout['index'][0])))
        x0 = [idlout['x0'][0][idlout['index'][0] == i][0] for i in index]
        y0 = [idlout['z0'][0][idlout['index'][0] == i][0] for i in index]
        z0 = [idlout['y0'][0][idlout['index'][0] == i][0] for i in index]
        vx0 = [idlout['vx0'][0][idlout['index'][0] == i][0] for i in index]
        vy0 = [idlout['vy0'][0][idlout['index'][0] == i][0] for i in index]
        vz0 = [idlout['vz0'][0][idlout['index'][0] == i][0] for i in index]

        self.x0 = np.array(x0)
        self.y0 = np.array(y0)
        self.z0 = np.array(z0)
        self.vx0 = np.array(vx0)
        self.vy0 = np.array(vy0)
        self.vz0 = np.array(vz0)
        del idlout
Exemple #37
0
def z_from_photo_z(photo_z_file, n, my_z_array=None):
    my_p_z = io.readsav(photo_z_file)
    pz = my_p_z['p_z']
    if my_z_array is None:
        z = np.arange(0, 5, .01)
    else:
        z = my_z_array
    if np.shape(pz) != np.shape(z):
        raise ValueError("p_z array and z array are different sizes")
    dz = z[1] - z[0]
    pz /= (dz * pz).sum()
    ecdf = np.cumsum(pz * dz)
    cdf = interpolate.interp1d(z, ecdf)

    def func(x, *args):
        my_cdf = args[0]
        cdf = args[1]
        return abs(my_cdf - cdf(x))
    out_z = []
    for i in range(n):
        my_cdf = np.random.uniform(0, 1)
        my_z = optimize.fmin(func, (1.5), args=(my_cdf, cdf), disp=0)
        out_z.append(my_z[0])
    out_z = np.asarray(out_z)
    return out_z
    def __init__(self, root):

        self.root = root
        self.noise = noise
        self.batch_size = 256

        self.dataFile = "/net/duna/scratch1/aasensio/deepLearning/milne/database/database_6301_hinode_1component.h5"

        f = h5py.File(self.dataFile, 'r')
        self.pars = f.get("parameters")                
        self.lower = np.min(self.pars, axis=0)
        self.upper = np.max(self.pars, axis=0)
        f.close()

        
        self.root_hinode = "/net/nas4/fis/aasensio/scratch/HINODE/SUNSPOT/"

        self.label_files = ["sunspot_stokesI_512x512.sav", "sunspot_stokesQ_512x512.sav", "sunspot_stokesU_512x512.sav", "sunspot_stokesV_512x512.sav"]

        self.std_values = np.load('{0}_normalization.npy'.format(self.root))

        labels_data = ['data_ii', 'data_qq', 'data_uu', 'data_vv']

        self.stokes = np.zeros((512,512,50,4))

        for i in range(4):
            print("Reading file {0}".format(self.label_files[i]))
            stokes = io.readsav("/net/nas4/fis/aasensio/scratch/HINODE/SUNSPOT/{0}".format(self.label_files[i]))[labels_data[i]]
            if (i == 0):
                mean_stokesi = np.mean(stokes[400:500,0:100,0])

            stokes = stokes[:,:,0:50] / mean_stokesi
            self.stokes[:,:,:,i] = stokes / self.std_values[None,None,:,i]

        self.stokes = self.stokes.reshape((512*512,50,4))
def get_sav_data(file_name, name):
    readed = scio.readsav(file_name,
                          idict=None,
                          python_dict=False,
                          uncompressed_file_name=None,
                          verbose=False)
    return (readed['{}'.format(name)])
Exemple #40
0
def read_match_file(file, *args, **kwargs):
    try:
        match = readsav(file)['match']
    except Exception as e:
        raise Exception("cannot read match data from file: %s" %
                        (file, )) from e
    return match, 'ROTSE1'
Exemple #41
0
def read_wic_si_data(dtm, read_si=True, read_wic=False):
    """ Reads IMAGE SI and WIC image data
    """
    from scipy.io import readsav
    import glob
    import numpy as np

    if read_si:
        fname_image = "../from_harald_frey/IMF12LSI_2002_0318_" + dtm.strftime(
            "%H%M") + "*.sav"
    if read_wic:
        fname_image = "../from_harald_frey/IMFHWIC_2002_0318_" + dtm.strftime(
            "%H%M") + "*.sav"
    fname_image = glob.glob(fname_image)[0]
    dat = readsav(fname_image, python_dict=True, verbose=False)
    data_dict = {}

    # Note: the data is originally saved in IDL
    # and IDL considers the first dimension to be the column.
    # Therefore, we do the transpose to fix it. Also we need to
    # rotate the image so that the dayside is on the top

    #    data_dict["mlt_img"] = dat['imageinfo'].mlt_img[0]
    #    data_dict["image"] = dat['imageinfo'].image[0]
    #    data_dict["mlat"] = dat['imageinfo'].mlat[0]
    #    data_dict["mlon"] = dat['imageinfo'].mlon[0]
    #    data_dict["mlt"] = dat['imageinfo'].mlt[0]

    data_dict["mlt_img"] = np.flip(dat['imageinfo'].mlt_img[0], axis=0)
    data_dict["image"] = np.flip(dat['imageinfo'].image[0], axis=0)
    data_dict["mlat"] = np.flip(dat['imageinfo'].mlat[0], axis=0)
    data_dict["mlon"] = np.flip(dat['imageinfo'].mlon[0], axis=0)
    data_dict["mlt"] = np.flip(dat['imageinfo'].mlt[0], axis=0)

    return fname_image, data_dict
Exemple #42
0
def traceLength(inst):
    """ For fine-tuning the trace lengths in the contamVerify output
    figures"""

    # Getting example trace to calculate rough estimate of trace lengths
    if 'NIRCam' in inst:
        FILE = 'rot_o1_6000.0.fits'
    elif 'MIRI' in inst:
        FILE = 'LOWbg_6000.0.fits'
    elif 'NIRISS' in inst:
        FILE = 'modelOrder12_teff6000.sav'

    trFile = os.path.join(TRACES_PATH, inst.replace(' ', '_'), FILE)
    if 'NIRCam' in inst:
        trFile = trFile.replace('NIRCam', 'NIRCam_F444W')
    trData = readsav(trFile)['modelo12'] if 'NIRISS' in inst \
        else fits.getdata(trFile, 1)
    trData = trData[0]

    ax = 1 if 'NIRCam' in inst else 0
    peak = trData.max()

    # the length of the trace
    targ_trace_start = np.where(trData > 0.0001 * peak)[ax].min()
    targ_trace_stop = np.where(trData > 0.0001 * peak)[ax].max()

    return targ_trace_start, targ_trace_stop
def load_telem(datestring, filenum, nirc2_mjd):
    """ Loads and aggregates telemetry info from one file """
    acceptable_dt = .0001 # precision of mjd match (~10 seconds or so)
    
    # Telemetry file will match:
    telem_pattern = f"/g/lu/data/keck_telemetry/{datestring}*/**/n?{filenum}_*.sav"
    
    # Get all matching files
    telem_files = glob.glob(telem_pattern, recursive=True)
    if len(telem_files) == 0: # No matches
        return
    
    for telem_file in telem_files:
        # Read telemetry file
        telem = readsav(telem_file)
        
        # Get mjd
        telem_mjd = get_telem_mjd(telem)
        
        if np.abs(telem_mjd-nirc2_mjd) < acceptable_dt:
            # Get mean and std of rms residuals
            rms_mean = np.mean(telem.a.residualrms[0][0])
            rms_std = np.std(telem.a.residualrms[0][0])
            return telem_file, telem_mjd, rms_mean, rms_std
            
        del telem
    
    return None
Exemple #44
0
def _get_dusty_array(y_window, x_window):
    """
    Returns the sliced array of dusty pixels
    """
    url = darts + 'data/cal/dp/dusty_pixels.sav'
    http_down = urllib.urlretrieve(url)
    dusties = readsav(http_down[0]).dp_data
    return dusties[y_window[0]:y_window[1], x_window[0]: x_window[1]]
Exemple #45
0
    def __init__(self, root, output, name_of_variable):

# Only allocate needed memory
        config = tf.ConfigProto()
        config.gpu_options.allow_growth=True
        session = tf.Session(config=config)
        ktf.set_session(session)

        self.root = root
        self.nx = 576
        self.ny = 576
        self.n_times = 2
        self.n_filters = 64
        self.batch_size = 1
        self.n_conv_layers = 20
        self.stride = 1
        self.skip_frequency = 2
        self.n_frames = 1        
        self.output = output
        self.name_of_variable = name_of_variable

        telescope_radius = 0.5 * 0.965 * u.meter
        pixel_size = 0.02759 * u.arcsec / u.pixel
        fov = 1152 * u.pixel
        lambda0 = 500 * u.nm
        imax = imax_degradation(telescope_radius, pixel_size, fov)
        imax.compute_psf(lambda0)


        res = io.readsav('/net/viga/scratch1/deepLearning/opticalFlow/mancha/c3d_1152_cont4_4bin_012000_continuum.sav')['continuum']

        self.images = np.zeros((2,576,576), dtype='float32')

# 576 pixels are obtained by resampling 1152 pixels of 0.02759 "/px to 0.0545 "/px for IMaX
        self.images[0,:,:] = congrid.resample(imax.apply_psf(res[0,:,:]), (576, 576))
        self.images[1,:,:] = congrid.resample(imax.apply_psf(res[1,:,:]), (576, 576))

        res = io.readsav('/net/viga/scratch1/deepLearning/opticalFlow/mancha/c3d_1152_cont4_4bin_012000.isotau.sav')

        self.vx = np.zeros((3,576,576), dtype='float32')
        self.vy = np.zeros((3,576,576), dtype='float32')

        for i in range(3):
            self.vx[i,:,:] = congrid.resample(imax.apply_psf(res['vx'][i,:,:]), (576, 576))
            self.vy[i,:,:] = congrid.resample(imax.apply_psf(res['vy'][i,:,:]), (576, 576))
def find_sol_mask(shotnr, frame_info=None, rz_array=None,
                  datadir='/Users/ralph/source/blob_tracking/test_data'):
    """
    Returns a mask for the pixels in between the separatrix and the LCFS.
    """
    s = readsav('%s/separatrix.sav' % (datadir), verbose=False)

    return ((s['rmid'].reshape(64, 64) > s['rmid_sepx']) &
            (s['rmid'].reshape(64, 64) < s['rmid_lim']))
Exemple #47
0
def get_kordopatis_comparisons():
    data = readsav(os.path.join(DATA_PATH, "RAVE_DR5_calibration_data.save"))
    
    return Table(data={
        "TEFF": data["calibration_data"]["TEFF"][0],
        "LOGG": data["calibration_data"]["LOGG"][0],
        "FEH": data["calibration_data"]["FEH"][0],
        "REF": data["calibration_data"]["REF"][0],
        "Name": [each.strip() for each in data["calibration_data"]["DR5_OBS_ID"][0]]
        })
Exemple #48
0
def wotta16():
    """ Generate sys files from IDL save files
    Returns
    -------
    """
    from scipy.io import readsav
    # Non-excluded
    all = readsav(pyigm_path+'/data/LLS/Literature/wotta16_final.save')

    # Build Lehner+13
    assert False # Need RA/DEC
Exemple #49
0
def fetch_meta(fhd_run, obsids=None):
    '''
    Return meta data needed for the FHD deconvolved source components.

    Parameters
    ----------
    fhd_run: string
        The name identifier of the FHD run, e.g. \'pac_decon_eor1_June2016\'.
    obsids: list-like, optional
        Obsids (as strings) to fetch data from. Defaults to all deconvolved.
    '''
    decon_dir='%sfhd_%s/deconvolution/'%(fhd_base(),fhd_run)
    meta_dir='%sfhd_%s/metadata/'%(fhd_base(),fhd_run)
    if obsids is None: obsids = fp.get_obslist(decon_dir)
    meta = {'clustered':False}
    for o in obsids:
        params = readsav(decon_dir+o+'_fhd_params.sav')['fhd_params']       
        metaobs = readsav('%s%s_obs.sav'%(meta_dir,o))['obs']
        meta[o] = {'n_iter':params.n_iter[0],'det_thresh':params.detection_threshold[0],'beam_thresh':params.beam_threshold[0],'max_bl':metaobs.max_baseline[0],'freq':metaobs.freq_center[0],'degpix':metaobs.degpix[0]}
        meta[o]['beam_width'] = meta[o]['max_bl']**-1 * 180./np.pi
    return meta
Exemple #50
0
def makeVisArray(X):
    #savfile = []
    #vis_array = []
    savfile = readsav(X)
    for m in X.split('_'):
        if m == 'model':
            vis_array = savfile['vis_model_ptr']
            print 'Model found!'
            break
    if m != 'model':
        vis_array = savfile['vis_ptr']
    try:
        flag = readsav(X.split('_')[0]+'_flags.sav')
        X.split('_')[0]+'_flags.sav'
    except IOError:
        flag = readsav(('_').join(X.split('_')[0:3])+'_flags.sav')
        ('_').join(X.split('_')[0:3])+'_flags.sav'
    print flag.keys()
    flag = flag['flag_arr'][0]
    #vis_array[flag==1] = 0
    #vis_array[flag==1] = 0
    print n.max(vis_array)
    #vis_array[flag==-1]=0
    obs = savfile['obs']
    times = obs['baseline_info'][0]['JDATE'][0]
    #ants1 = obs['baseline_info'][0]['TILE_A'][0]
    #ants2 = obs['baseline_info'][0]['TILE_B'][0]
    #print n.where(vis_array[5000,:]==0)[0].shape
    ntimes = len(times)
    nbls = obs['NBASELINES'][0]
    time_order = n.argsort(times)
    ant1,ant2 = options.baseline.split('_')
    ind1 = (obs[0]['baseline_info']['tile_a'][0]==int(ant1)).astype(int)
    ind2 = (obs[0]['baseline_info']['tile_b'][0]==int(ant2)).astype(int)
    intersect = ((ind1+ind2)/2).astype(bool)
    print intersect.max()
    bsl_array = vis_array[intersect]
    print bsl_array.shape
    bsl_array = bsl_array[time_order]
    return bsl_array
Exemple #51
0
def load_nvl(filePath):
    '''UNTESTED - Load NISTview layer data into python. '''
    nvlData = sio.readsav(filePath)
    self = Spy()
    self._raw = nvlData['savestructure']
    self.en = self._raw.energies[0]
    self.map = self._raw.fwddata[0]
    self.ave = [np.mean(layer) for layer in self.map]
    self.header = {name:self._raw.header[0][name][0] for name in self._raw.header[0].dtype.names}
    for name in self._raw.dtype.names:
        if name not in self.header.keys():
            self.header[name] = self._raw[name][0]
    return self
Exemple #52
0
def makeespec(name, options=None):
    data = readsav(name+".sav")
    try:
        return _espec_v4(data, options)
    except KeyError:
        try:
            print("Reading version 4 failed. Falling back on version 3.")
            return _espec_v3(data, options)
        except KeyError:
            try:
                print("Reading version 3 failed. Falling back on version 2.")
                return _espec_v2(data, identify=_ident_any)
            except IndexError:
                print("Reading version 2 failed. Falling back on version 1.")
                return _espec_v1(data)
Exemple #53
0
def read_rave_data(filename):
    inputf = readsav(filename)
    items = inputf.items()
    data = items[0][1]
    wl = data['lambda'][0]  # assuming they're all the same...
    sp = data['obs_sp']  # (75437, 839)
    test_flux = np.zeros((len(sp), len(sp[0])))
    for jj in xrange(len(sp)):
        test_flux[jj, :] = sp[jj]
    snr = np.array(data['snr'])
    test_ivar = (snr[:, None] / test_flux)**2
    bad = np.logical_or(np.isnan(test_ivar), np.isnan(test_flux))
    test_ivar[bad] = 0.
    test_flux[bad] = 0.
    return (test_flux, test_ivar, wl)
Exemple #54
0
def read_tr_data():
    inputf = readsav('RAVE_DR4_calibration_data.save')
    items = inputf.items()
    data = items[0][1]
    tr_flux = data['spectrum'][0].T # shape (807, 839) = (nstars, npix)
    npix = tr_flux.shape[1]
    nstars = tr_flux.shape[0]
    teff = data['teff'][0] # length 807
    logg = data['logg'][0] # length 807
    feh = data['feh'][0]
    tr_label = np.vstack((teff, logg, feh)).T
    snr = np.zeros(nstars)
    snr.fill(100) # a guess for what the SNR could be
    tr_ivar = (snr[:,None]/tr_flux)**2
    return tr_flux, tr_ivar, tr_label
Exemple #55
0
    def validation_generator(self):
        f = io.readsav(self.observations)
        out = f[self.name_of_variable]

        self.median_i = np.median(out[:,100:-100,100:-100])

        input_validation = np.zeros((self.batch_size,self.nx,self.ny,2), dtype='float32')

        while 1:
            for i in range(self.n_frames):

                print('{0}/{1}'.format(i,self.n_frames))

                input_validation[:,:,:,0] = out[i*self.batch_size:(i+1)*self.batch_size,100:100+self.nx,100:100+self.ny] / self.median_i
                input_validation[:,:,:,1] = out[i*self.batch_size+1:(i+1)*self.batch_size+1,100:100+self.nx,100:100+self.ny] / self.median_i

                yield input_validation

        f.close()
Exemple #56
0
def losint_image(filein,fileout,a,e,i,asc,wbar,meanlong,
                 nlon=50,nlat=17,lon1=35.,lon2=60.,lat1=-8.5,lat2=8.5,nstep=200):

    if isfile(fileout):
        print("ERROR: Output file exists already, exiting.")
        return

    # run IDL to generate a save file
    args = 'ring_image,"'+filein+'","'+fileout+'",'+str(a)+','+str(e)+','+str(i)+','+str(asc)+','+str(wbar)+','+str(meanlong)+',lon1='+str(lon1)+',lon2='+str(lon2)+',lat1='+str(lat1)+',lat2='+str(lat2)+',nlon='+str(nlon)+',nlat='+str(nlat)
    p1 = Popen(['echo',args],stdout=PIPE)
    p2 = Popen('idl',stdin=p1.stdout,stdout=PIPE)
    p1.stdout.close()
    aout = p2.communicate()
    print(aout)

    # grab desired output from save file and then delete it
    out = readsav(fileout)
    remove(fileout)
    return (out.im,out.lons,out.lats)
Exemple #57
0
def read_sourcelist(fhdsav, tag='component_array'):
    '''
    Get the list of obsids with deconvolution ouput.

    Parameters
    ----------
    fhdsav: string    fhd_run: string

        Full path to the IDL save file containing a source list structure.
    tag: string, optional
        The tag name of the source list in the IDL structure. Defaults to \'component_array\'.
    '''
    cat = readsav(fhdsav)[tag]
    items = [cat.id, cat.x, cat.y, cat.ra, cat.dec, 
             np.vstack(cat.flux).T['i'][0],
             cat.gain,cat.alpha, cat.freq, cat.flag]
    items = [item.astype(np.float64) for item in items]
    cat = dict(zip(['id','x','y','ra','dec','flux','gain','alpha','freq',
                    'flag'],items))
    return cat
Exemple #58
0
def read_geo(orbit):
    """Output data from a GEO file created from IDL/GDL

    Arguments
    ---------
    orbit : string
        orbit number
    """
    fil = geo_path+orbit.zfill(7)+'_001.GEO.sav'
    a = readsav(fil)
    d = {'x': a.geo.x.item(),
         'j2000': a.geo.j2000.item(),
         'km': a.geo.km.item(),
         'lon': a.geo.lon.item(),
         'lat': a.geo.lat.item(),
         'sza': a.geo.sza.item(),
         'roll': a.geo.roll.item(),
         'az_res': a.geo.az_res.item(),
         'n_pre': a.geo.n_pre.item(),
        }
    return pd.DataFrame(d)
Exemple #59
0
    def predict_validation(self):
        print("Predicting validation data...")

        tmp = np.load('/net/duna/scratch1/aasensio/deepLearning/opticalFlow/database/normalization.npz')
        min_i, max_i, min_v, max_v = tmp['arr_0'], tmp['arr_1'], tmp['arr_2'], tmp['arr_3']

        f = io.readsav(self.observations)
        out = f[self.name_of_variable]

        self.median_i = np.median(out[:,100:-100,100:-100])

        input_validation = np.zeros((1,self.nx,self.ny,2), dtype='float32')
        input_validation[0,:,:,0] = out[0:1,100:100+self.nx,100:100+self.ny] / self.median_i
        input_validation[0,:,:,1] = out[1:2,100:100+self.nx,100:100+self.ny] / self.median_i


        # ff = io.readsav(self.observations)
        # im = ff['cont']

        # x = np.arange(self.nx)
        # y = np.arange(self.ny)

        start = time.time()
        out = self.model.predict_generator(self.validation_generator(), self.n_frames, max_q_size=1)
        end = time.time()

        print("Prediction took {0} seconds...".format(end-start))

        fun = ktf.function([self.model.layers[0].input],[self.model.layers[1].output])
        output = np.squeeze(fun([input_validation])[0][0,200:300,200:300,:]).reshape((100,100,8,8))
        f, ax = pl.subplots(nrows=2, ncols=2, figsize=(12,12))
        ax[0,0].imshow(output[:,:,0,0] / np.median(output[:,:,0,0]))
        ax[0,1].imshow(output[:,:,4,0] / np.median(output[:,:,4,0]))
        ax[1,0].imshow(output[:,:,3,4] / np.median(output[:,:,3,4]))
        ax[1,1].imshow(output[:,:,2,2] / np.median(output[:,:,2,2]))
        pl.show()

        # 

        stop()
Exemple #60
0
def _try_download_nearest_cal(date, pix_type, detector, top_bot, left_right):
    """
    Tries to download the requested calibration data, looking for up to one
    month before and after to do so.
    """
    key = _construct_hot_warm_pix_url(date, pix_type, detector, top_bot,
                                      left_right)
    if key in __pix_memo__:
        return __pix_memo__[key]
    dates = _get_cal_dates(pix_type)
    dates.sort(key=lambda d: d - date if d > date else date - d)
    # dates is now a sorted list of the dates closest to the specified date
    for cal_date in dates:
        url = _construct_hot_warm_pix_url(cal_date, pix_type, detector,
                                          top_bot, left_right)
        http_response = urllib.urlopen(url)
        http_response.close()
        if http_response.code == 200:  # HTTP OK
            http_down = urllib.urlretrieve(url)
            arr = readsav(http_down[0]).ccd_data
            __pix_memo__[key] = arr
            return arr