示例#1
0
def rt_pipe(model, logger=get_logger(__name__)):
    """Configure and run the required RTs.

    Paramters:
        model (model): input model.
    """
    # Get RTs to run
    rts = model.get_rt_list()

    # Run RTs in the correct order
    # Dust first
    continuum = AVAILABLE_DUST_RT.intersection(rts)
    file_track = {}
    for cont in continuum:
        logger.info('Running continuum RT: %s', cont)
        pipe = rt.lower()+'_pipe'
        file_track.update(REGISTERED_FUNCTIONS[pipe](model))
    else:
        logger.info('No continuum RT requested')

    # Line
    line = AVAILABLE_LINE_RT.intersection(rts)
    for rt in line:
        logger.info('Running line RT: %s', rt)
        pipe = rt.lower()+'_pipe'
        file_track.update(REGISTERED_FUNCTIONS[pipe](model, logger=logger))
    else:
        logger.info('No line RT requested')

    return file_track
示例#2
0
    def __init__(self, address, nhdu=0):
        """Defines a new data object.

        Parameters:
            address (str): file name.
            nhdu (int, default=0): HDU number.
        """
        super(Data2D, self).__init__(address)
        self.nhdu = nhdu
        self.logger = get_logger(__name__)

        # Check dimensions
        if len(self.array.shape) > 2:
            self.logger.warn('Reducing data dimensions: %r', self.array.shape)
            for i in range(len(self.array.shape) - 2):
                self.array = self.array[0]
            self.logger.warn('New dimensions: %r', self.array.shape)

            # Update header
            self.header['NAXIS'] = 2
            keys = ['CDELT%i', 'CRVAL%i', 'CTYPE%i', 'CROTA%i', 'NAXIS%i']
            for i, key in product([3, 4], keys):
                try:
                    self.logger.debug('Deleting %s', key % i)
                    del self.data[self.nhdu].header[key % i]
                except:
                    self.logger.debug('%s could not be deleted', key % i)
                    pass
示例#3
0
def mollie_casa_line_cube(config, model_file, source, PA=0.*u.deg,
        vlsr=0.*u.km/u.s, logger=get_logger(__name__)):
    logger.info('Running Mollie-CASA line cube pipeline')
    # Run casa
    filename = casa_base_pipe(config, model_file, source, PA)

    # Load cube
    cube = None

    return cube
示例#4
0
    def __init__(self, file_name, wlg=None):
        """Creates a new profile object.

        Parameters:
            file_name (str): file name of the profile.
            wlg (float, default=None): wavelength.
        """
        self.wlg = wlg
        super(Profile, self).__init__(file_name)
        self.logger = get_logger(__name__, __package__ + '.log')
示例#5
0
def mollie_casa_line_pv(config, model_file, source, PA=0.*u.deg,
        vlsr=0.*u.km/u.s, logger=get_logger(__name__)):
    logger.info('Running Mollie-CASA line pv pipeline')
    # Run casa
    filename = casa_base_pipe(config, model_file, source, PA)
    print filename
    exit()

    # Get PV maps

    return data
示例#6
0
class Image(Data2D):
    """Defines an image object.

    Attributes:
        address: file name.
        data: the data.
        nhdu: HDU number to work with.
        logger: logging manager
    """
    logger = get_logger(__name__, __package__ + '.log')

    @property
    def wcs(self):
        return WCS(self.header).sub(['longitude', 'latitude'])
示例#7
0
def log_likelihood(data, model, outconfig, logger=get_logger(__name__)):
    # Setup RTs and run RTs
    files = rt_pipe(model, logger=logger)

    # Process output
    modelout = modelOutput(model.name, config=outconfig)
    logger.debug('Model files: %r', files.keys())
    modelout.load_all(files, data, PA=model.get_pa(), 
            vlsr=model.get_vlsr())
    exit()

    # Compare
    for d, mod in zip_str(data, modelout):
        likelihood += -0.5*np.sum((d-mod)**2/e**2 + np.log(2.*np.pi*e**2))
示例#8
0
def mollie_base_pipe(model_file, source, filename=None, PA=0.*u.deg,
        logger=get_logger(__name__)):
    # Load & save FITS file
    if filename is None:
        filename = model_file + '_raw.fits'
        rotated = model_file + '_rotated.fits'
    else:
        rotated = filename.replace('.fits', '_rotated.fits')
    cube = load_model_cube(model_file, source, filename, 
        logger=logger, pa=PA)

    # Rotate & save
    if PA!=0.*u.deg:
        logger.info('Rotating cube')
        rotate(filename, rotated, z=1.2, cube=True)
        logger.info('Cube written: %s', os.path.basename(rotated))
        return rotated
    else:
        return filename
示例#9
0
class modelOutput(Container):
    """Model output class

    Attributes:
        name (str): model name
        config (myConfigParser): output configuration file
        data (OrderedDict): output data
        logger: logging manager
    """
    logger = get_logger(__name__, __package__+'.log')

    #def __init__(self, name, config):
    #    """Defines a new model output.

    #    Parameters:
    #        name (str): model name
    #        config (myConfigParser): configuration
    #    """
    #    super(modelOutput, self).__init__(name, config=config)

    def load_data(self, data_name, model_file, source, PA=0., vlsr=0.):
        assert data_name in self.config.sections()

        pipe = self.config[data_name]['pipe']
        fn = REGISTERED_FUNCTIONS[pipe.lower()]
        self.data[data_name] = fn(self.config[data_name], model_file, source, 
                PA=PA, vlsr=vlsr)

    def load_all(self, model_files, source, PA=0., vlsr=0.):
        for key in self.config.sections():
            if key not in model_files:
                if 'group' in self.config[key] and \
                        self.config[key]['group'] in model_files:
                    realkey = self.config[key]['group']
                else:
                    raise KeyError('No models with key: %s' % key)
            else:
                realkey = key

            self.load_data(key, model_files[realkey], source, PA=PA, vlsr=vlsr)
示例#10
0
def load_model_cube(model_file, source, filename, pa=0*u.deg, 
        logger=get_logger(__name__,__package__+'.log'), velocity=True, 
        bunit=u.Jy):
    """Load a Mollie model.

    Written by: K. G. Johnston.
    Modified by: F. Olguin
    References to older versions were removed

    Parameters:
        model_file (str): model file name.
        source (astroSource): source information.
        filename (str): output file name.
        pa (astropy.quantity, default=0.): source position angle.
        logger (logging, optional): logger.
        velocity (bool, default=True): output cube 3rd axis in velocity or frequency.
        bunit (astropy.unit, default=Jy): output flux unit.
    """

    # Open file
    logger.info('Opening file: %s', os.path.basename(model_file))
    f = open(model_file, "rb")
    endian = '>'
    byte = f.read(4)
    nlines = struct.unpack(endian+'l', byte)[0]
    
    # Swap bytes
    swap_bytes = False
    if nlines > 200:
        swap_bytes = True
        endian = '<'
    logger.info('Swapping bytes? %s', swap_bytes)

    # Number of lines
    nlines = struct.unpack(endian+'l', byte)[0]
    logger.info('Number of lines: %i', nlines)

    # Number of viewing angles
    nviews = struct.unpack(endian+'l',f.read(4))[0]
    logger.info('Number of viewing angles: %i', nviews)

    # Number of channels
    nchan = np.zeros(nlines,dtype=int)
    for line in range(nlines):
        nch = struct.unpack(endian+'l',f.read(4))[0]
        nchan[line] = nch
    logger.info('Number of channels: %r', nchan)

    # Grid parameters
    nx = struct.unpack(endian+'l',f.read(4))[0]
    ny = struct.unpack(endian+'l',f.read(4))[0]
    cellx = struct.unpack(endian+'f',f.read(4))[0] * u.pc
    celly = struct.unpack(endian+'f',f.read(4))[0] * u.pc
    beamx = struct.unpack(endian+'f',f.read(4))[0] * u.pc
    beamy = struct.unpack(endian+'f',f.read(4))[0] * u.pc
    logger.info('Grid size nx=%i, ny=%i', nx, ny)
    logger.info('Cell size %sx%s', cellx, celly)

    # Line names
    maxname = 20
    linename = (nlines)*['']
    for i in range(nlines):
        for j in range(maxname):
            bytvar = struct.unpack(endian+'c',f.read(1))[0]
            linename[i] += bytvar.decode('ascii')
        linename[i] = linename[i].strip()

    # Rest frequencies
    restfreq = np.zeros(nlines) * u.Hz
    for i in range(nlines):
        restfreq[i] = struct.unpack(endian+'d',f.read(8))[0]*u.Hz
        logger.info('The lines are:')
        logger.info('%s at %s', linename[i], restfreq[i].to(u.GHz))

    # Channel velocity ranges
    maxch = max(nchan)
    chvel = np.zeros((nlines,maxch)) * u.cm/u.s
    for i in range(nlines):
        for n in range(nchan[i]):
            chvel[i,n] = struct.unpack(endian+'f',f.read(4))[0] * u.cm/u.s
        logger.info('Velocity range for line %s:  %s, %s', linename[i],
                    chvel[i,0].to(u.km/u.s), chvel[i,nchan[i]-1].to(u.km/u.s))

    # Viewing angles
    lng = np.zeros(nviews) * u.deg
    lat = np.zeros(nviews) * u.deg
    for i in range(nviews):
        lng[i] = struct.unpack(endian+'f',f.read(4))[0] * u.deg
        lat[i] = struct.unpack(endian+'f',f.read(4))[0] * u.deg
    logger.info('Longitudes: %s', lng)

    # Fix inclination convention to Hyperion
    lat = 90.*u.deg - lat
    logger.info('Latitudes: %s', lat)

    # RA axis
    xc = np.zeros(nx)
    for i in range(nx):
        xc[i] = struct.unpack(endian+'f',f.read(4))[0]

    # Dec axis
    yc = np.zeros(ny)
    for i in range(ny):
        yc[i] = struct.unpack(endian+'f',f.read(4))[0]

    # Data
    data = np.ones((nlines,nviews,nx,ny,maxch)) * np.nan
    for l in range(nlines):
        data_bytes = f.read(4 * nviews * nx * ny * nchan[l])
        data[l,:,:,:,:nchan[l]] = np.fromstring(data_bytes, 
                dtype=endian + 'f4').reshape(nviews, nx, ny, nchan[l])
        logger.info('Max in line %i is %.3e', l, np.nanmax(data[l]))
    f.close()
    logger.info('Min and max brightness in data set: %.3e, %.3e',
                np.nanmin(data), np.nanmax(data))

    # Set up header common to all files
    ra, dec = source.position.ra, source.position.dec
    distance = source.distance.to(u.pc)
    header_template = fits.Header()
    header_template['OBJECT'] = 'MODEL'
    header_template['TELESCOP'] = 'MOLLIE'
    header_template['INSTRUME'] = 'MOLLIE'
    header_template['OBSERVER'] = 'MOLLIE'
    header_template['CTYPE1'] = 'RA---SIN'
    header_template['CTYPE2'] = 'DEC--SIN'
    header_template['CUNIT1'] = 'degree'
    header_template['CUNIT2'] = 'degree'
    header_template['CRPIX1'] = nx / 2.
    header_template['CRPIX2'] = ny / 2. + 1.
    header_template['CDELT1'] = -1.*np.abs(np.degrees((cellx.si/distance.si).value))
    header_template['CDELT2'] = np.degrees((celly.si/distance.si).value)
    header_template['CRVAL1'] = ra.to(u.deg).value
    header_template['CRVAL2'] = dec.to(u.deg).value
    header_template['EPOCH'] = 2000
    header_template['EQUINOX'] = 2000.
    if pa or source.get_quantity('pa') is not None:
        header_template['CROTA1'] = 0
        if pa is None:
            pa = source.get_quantity('pa')
        header_template['CROTA2'] = (360*u.deg - pa.to(u.deg)).value

    # Line indices
    minimum_line = nlines - 1
    minimum_velocity = chvel[nlines - 1,0]

    # Now figure out the velocity shift due to line frequencies
    velocity_shift = -1. * ct.c.cgs * (restfreq - restfreq[minimum_line]) / \
            restfreq[minimum_line]
    for line in range(nlines):
        logger.info('Velocity shift for line %s: %s', linename[line],
                velocity_shift[line].to(u.km/u.s))
    
    # Maximum velocity
    maximum_velocity = chvel[0,nchan[0]-1] + velocity_shift[0]
    logger.info('Min and max velocities: %s, %s',
                minimum_velocity.to(u.km/u.s), maximum_velocity.to(u.km/u.s))

    # Make a new velocity array starting at the minimum velocity
    dv = (chvel[0,1]-chvel[0,0])
    logger.info('Channel width %s', dv.to(u.km/u.s))
    number_channels = int(((maximum_velocity.cgs - minimum_velocity.cgs) /\
            dv.cgs).value) + 1
    logger.info('Number of channels: %i', number_channels)
    velo = np.linspace(0., number_channels-1., number_channels)*dv + \
            minimum_velocity
    logger.info('New velocity range [%s:%s]', velo[0].to(u.km/u.s),
                velo[number_channels-1].to(u.km/u.s))

    # New array to hold 1 line with all the spectra
    cube = np.zeros((nviews,nx,ny,number_channels))
    for line in range(nlines):
        for i in range(nchan[line]):
            j = int(((chvel[line,i].cgs + velocity_shift[line].cgs -\
                    velo[0].cgs)/dv.cgs).value)
            cube[:,:,:,j] = (data[line,:,:,:,i] + cube[:,:,:,j])
    nchan[0] = number_channels

    # Save images per viewing angle
    images = []
    for v in range(nviews):
        # Header
        header = header_template.copy()
        header['LINE'] = '%s (all)' % linename[0].split('(')[0]

        # Save velocity or frequency
        cdelt3 = dv.to(u.km/u.s)
        if velocity:
            header['CTYPE3'] = 'VELO-LSR'
            header['CUNIT3'] = 'KM/S'
            crval3 = minimum_velocity.to(u.km/u.s)
            logger.info("Channel width %s", cdelt3)
        else:
            header['CTYPE3'] = 'FREQ'
            header['CUNIT3'] = 'Hz'
            crval3 = (restfreq[0]*(1. - cdelt3.cgs/ct.c.cgs)).to(u.Hz)
            cdelt3 = (restfreq[0]*cdelt3.cgs/ct.c.cgs).to(u.Hz)
            logger.info("Channel width %s", cdelt3.to(u.MHz))
        header['CRPIX3'] = 1.
        header['CDELT3'] = cdelt3.value
        header['CRVAL3'] = crval3.value
        header['RESTFREQ'] = restfreq[l].to(u.Hz).value

        # Save file
        logger.info('Cube file name: %s', os.path.basename(filename))
        if bunit is u.K:
            header['BUNIT'] = 'K'
            logger.info('Saving cube with units: K')
            fits.writeto(filename, cube[v,:nchan[0]].transpose(), 
                         header, overwrite=True)
            images += [Data3D(filename)]
        elif bunit is u.Jy:
            pixel_area_deg = np.abs(header['CDELT1']) * header['CDELT2']
            K_to_Jy = (header['RESTFREQ']*u.Hz).to(u.GHz).value**2 * \
                    3600**2 / 1.224e6 / 1.1331 * pixel_area_deg
            header['BUNIT'] = 'JY/PIXEL'
            logger.info('Saving cube with units: Jy/pixel')
            fits.writeto(filename,
                    cube[v,:,:,:nchan[0]].transpose() * K_to_Jy, header, 
                    overwrite=True)
            images += [Data3D(filename)]
        else:
            raise NotImplementedError

    return images
示例#11
0
#!/bin/python
import os, argparse

import numpy as np
import matplotlib.pyplot as plt
from myutils.logger import get_logger
from scipy.ndimage.morphology import binary_dilation
from scipy.stats import linregress
from astropy.stats import sigma_clip
from myutils.argparse_actions import LoadFITS, LoadTXTArray
from extract_spectra import find_peak

# Start settings
logger = get_logger(__name__, filename='continuum_iterative.log')


def group_chans(inds):
    """ Group contiguous channels.
    Notes:
        Taken from:
        https://stackoverflow.com/questions/7352684/how-to-find-the-groups-of-consecutive-elements-from-an-array-in-numpy
    """
    return np.split(inds, np.where(np.diff(inds) != 1)[0] + 1)


def filter_min_width(mask, min_width=2):
    if np.all(mask):
        return mask
    ind = np.arange(mask.size)
    groups = group_chans(ind[mask])
    for g in groups:
示例#12
0
class Source(Container):
    """Defines an astronomical source, its properties and data.

    Attributes:
        name: name of the source.
        config: configuration file of the source.
        data: the data belonging to the source.
        logger: logging manager.
    """
    logger = get_logger(__name__, __package__+'.log')

    def __init__(self, name=None, config=None, otf=False):
        """Creates a new Source.

        Parameters:
            name: the name of the source.
            config: the configuration file.
            otf: load data on-the-fly.
        """
        assert name is not None or config is not None

        # Initialize
        if name is not None:
            self.logger.info('Initializing source: %s', name)
        else:
            self.logger.info('Initializing source from configuration')
        super(Source, self).__init__(name, config_file=config)

        # Load data
        if not otf:
            self.logger.info('Loading all data')
            self.load_all_data()

    def __str__(self):
        """String representation"""
        line = '%s\n%s\n' % (self.name, '-'*len(self.name))
        fmt = '%s = %s\n'
        for item in self.config.items('INFO'):
            line += fmt % item
        if self.data:
            line += 'Loaded data:\n\t'
            for key in self.data.iterkeys():
                line += '%s, ' % key
        return line.strip().strip(',')

    def get_quantity(self, prop, section='INFO'):
        return self.config.getquantity(section, prop, fallback=None)

    @property
    def distance(self):
        return self.get_quantity('distance')

    @property
    def luminosity(self):
        return self.get_quantity('luminosity')

    @property
    def position(self):
        ra = self.config.get('INFO','ra')
        dec = self.config.get('INFO','dec')
        frame = self.config.get('INFO','frame',fallback='icrs')
        return SkyCoord(ra, dec, frame=frame)

    @property
    def ra(self):
        return self.position.ra

    @property
    def dec(self):
        return self.position.dec

    def get_type(self, section):
        """Get the type of data.

        Parameters:
            section (str): the data key.
        """
        assert section in self.config.sections()
        return self.config[section]['type'].lower()

    def load_data(self, section, file_name=None):
        """Load the data.

        It uses the classes registered in *REGISTERED_CLASSES* to identify
        which type of data it has to load. The data information (e.g. file 
        name) has to be source configuration file.

        Parameters:
            section (str): the data to be loaded.
        """
        if file_name is not None:
            data_file = os.path.expanduser(file_name)
        else:
            data_file = os.path.expanduser(self.config[section]['file'])
        assert os.path.isfile(data_file)

        self.data[section] = load_data_by_type(data_file, 
                self.config[section]['type'].lower(), REGISTERED_CLASSES)

    def load_all_data(self):
        """Load all the data with information in the configuration file."""
        for section in self.config.sections():
            if section=='INFO' or 'type' not in self.config.options(section):
                continue
            else:
                self.logger.info('Loading: %s', section)
                self.load_data(section)

    def load_config(self, config_file):
        """Load a configuration file.

        Parameters:
            config_file (str): name of the configuration file
        """
        super(Source, self).load_config(config_file)
        
        # Check INFO section
        if 'INFO' not in self.config:
            self.logger.warn('Source does not have INFO: some functions will not work')

        # Check source name
        if self.name is None:
            self.name = self.config.get('INFO', 'name', fallback='NN')
            self.logger.info('Source name: %s', self.name)
        else:
            pass
示例#13
0
class YSO(Distribution):
    """Create a YSO object for the model.

    The YSO object manages the pgysical properties of the yso. When the YSO
    pbject is called with the coordinates of a grid point, the total density is
    returned.

    Attributes:
        __params (myConfigParser): physical properties of the YSO.
        loc (iterable): location of the source in the grid.
    """

    logger = get_logger(__name__, __package__+'.log')

    def __init__(self, params, loc=(0,0,0)):
        """Initialize the YSO.

        Parameters:
            params (str): YSO configuration file.
            loc (iterable): location of the source.
        """
        assert len(loc)==3
        super(YSO, self).__init__(params)

        # For backwards compatibilty:
        if 'Star' in self.sections:
            self.loc = self.params.getquantity('Star', 'loc')
            self.logger.info('Replacing location from parameters: %s', self.loc)
        else:
            self.loc = loc

    def __call__(self, x, y, z, component='dust'):
        """Density distribution.

        Parameters:
            x, y, z (floats): position where the density is evaluated.
        """
        disc, envelope, cavity = self.density(x, y, z, component=component)

        return disc + envelope + cavity

    def update(self, section, param, value, tie_rc=True):
        """Change the value for a given parameter
        
        If the density in the envelope is ulrich and the stellar mass is
        updated, then the infall rate is updated to keep the density
        distribution unchanged. Note that if the stellar mass parameter in the
        envelope is updated only the stellar mass is changed, i.e. the envelope
        infall rate do not change.

        If input value do not have units, the units of the current parameter
        are assigned.

        Parameters:
            section (str): density structure name
            param (str): name of the parameter
            value (float or astropy.quantity): new value
            tie_rc (boolean, optional): it ties the value of the centrifugal
                radius with the disc value (only if envelope is ulrich)
        """
        #if section.lower()=='envelope' and param.lower()=='mstar':
        #    super(YSO, self).update('Star', param, value)
        #    super(YSO, self).update(section, param, value)

        env_type = self.params['Envelope']['type'].lower()
        if section.lower()=='star' and param.lower()=='m' and env_type=='ulrich':
            self.logger.info('Updating stellar mass and scaling envelope ' +\
                    'infall rate')
            value = self._convert_units(section, param, value)
            mstar = self[section, param]
            mdotold = self['Envelope', 'mdot']
            mdotnew = np.sqrt(value/mstar) * mdotold
            super(YSO, self).update(section, param, value)
            #super(YSO, self).update('Envelope', 'mstar_ulrich', value)
            super(YSO, self).update('Envelope', 'mdot',
                    mdotnew.to(mdotold.unit))
        elif (section.lower()=='envelope' or section.lower()=='disc') and \
                self.__params['Envelope']['type']=='ulrich' and \
                (param.lower()=='rdisc' or param.lower()=='rc') and tie_rc:
            super(YSO, self).update('Envelope', 'rc', value)
            super(YSO, self).update('Disc', 'rdisc', value)
        else:
            super(YSO, self).update(section, param, value)

    def flatten(self, ignore_params=[], get_db_format=False):
        """Return 2 arrays containg parameters and values

        Parameters names in the DEFAULT are renamed *<parameter>_<section>*.
        Parameters in list format are passed as a string

        Parameters:
            get_db_format (bool, optional): get an array with formats for
                databases
        """
        return super(YSO, self).flatten(
                ignore_params=['dust_dir']+ignore_params, 
                get_db_format=get_db_format)

    def from_fits(self, quantity, section='DEFAULT'):
        """Load a quantity from a FITS file.

        The FITS files have a standard name for each of the quantities.

        Parameters:
            quantity (str): quantity to load.
        """
        dirname = os.path.expanduser(self.params.get(section, 'grids_library'))
        if 'density' in quantity:
            fname = '{0}_rc{1:d}.fits'.format(quantity,
                    self.params.getquantity('Envelope','rc').to(u.au).value)
        fname = os.path.join(dirname, fname)
        assert os.path.isfile(fname)
        self.logger('Loading: %s', os.path.basename(fname))

        return fits.open(fname)[0]

    def to_fits(self, quantity, value, section='DEFAULT'):
        """Save a quantity grid into a fits file.

        The FITS files have a standard name for each of the quantities.

        Parameters:
            quantity (str): quantity to save.
            value (ndarray): values to save.
            section (str): section with the directory name.
        """
        assert value.ndim==3
        if 'density' in quantity:
            rc = self.params.getquantity('Envelope','rc').to(u.au).value
            nz, ny, nx = value.shape
            fname = '{0}_nx{1}_ny{2}_nz{3}_rc{4:d}.fits'.format(quantity, nx, ny,
                    nz, rc)
        dirname = os.path.expanduser(self.params.get(section, 'grids_library'))
        fname = os.path.join(dirname, fname)
        hdu = fits.PrimaryHDU(value)
        hdu.writeto(fname, overwrite=True)

    def density(self, x, y, z, component='dust', save_components=False,
            from_file=False):
        """Calculates and returns the density distribution by components.

        Parameters:
            x, y, z (floats): position where the density is evaluated.
        """
        # Load from file if needed
        if from_file:
            try:
                dims = 'nx%i_ny%i_nz%i' % x.shape[::-1]
                disc = self.from_fits('density_disc_%s' % dims)
                envelope = self.from_fits('density_envelope_%s' % dims)
                cavity = self.from_fits('density_cavity_%s' % dims)
                return disc, envelope, cavity
            except AssertionError:
                pass
        else:
            pass

        # Convert coordinates to spherical
        r, th, phi = cart_to_sph(x, y, z, pos0=self.loc)

        # Disk
        if 'Disc' in self.params:
            disc = discs.flared(r, th, self.params, component=component)
        else:
            disc = 0.

        # Envelope and cavity
        if 'Envelope' in self.params:
            envelope = ulrich.density(r, th, self.params, component=component)
            if 'Cavity' in self.params:
                cavity, mask = outflow.density(r, th, self.params,
                        component=component)
                cavity[cavity>envelope] = envelope[cavity>envelope]
                envelope[~mask] = 0.
        else:
            envelope = cavity = 0.

        if save_components:
            self.to_fits('density_disc', disc)
            self.to_fits('density_envelope', envelope)
            self.to_fits('density_cavity', cavity)

        return disc, envelope, cavity

    def velocity(self, x, y, z, component='dust', disc_dens=None, 
            env_dens=None, cav_dens=None):
        """Velocity distribution.

        Parameters:
            x, y, z (floats): position where the density is evaluated.
            min_height_to_disc (float): the velocity of points below this
                height are set to the disc velocity.
        """
        # Convert coordinates to spherical
        r, th, phi = cart_to_sph(x, y, z, pos0=self.loc)

        # Disc radius
        rdisc = self.params.getquantity('Disc', 'rdisc')

        # Velocity in Envelope and cavity
        if self.params.get('Velocity', 'envelope', fallback='').lower() == 'ulrich':
            # Envelope
            vr_env, vth_env, vphi_env = ulrich.velocity(r, th, self.params,
                    component=component)
        # Cavity
        vr_out, vth_out, vphi_out, mask = outflow.velocity(r, th, self.params,
                component=component)
        vr_env[~mask] = 0.
        vth_env[~mask] = 0.
        vphi_env[~mask] = 0.

        # Disc
        vr_disc, vth_disc, vphi_disc = discs.keplerian_rotation(r, th, 
                self.params, component=component)
        ind = r.cgs<=rdisc.cgs
        vr_disc[~mask] = 0.
        vth_disc[~mask] = 0.
        vphi_disc[~mask] = 0.
        vr_env[ind] = 0.
        vth_env[ind] = 0.
        vphi_env[ind] = 0.
        
        # Combine
        if disc_dens is None and env_dens is None and cav_dens is None:
            disc, envelope, cavity = self.density(x, y, z, component=component)
        else:
            disc, envelope, cavity = disc_dens, env_dens, cav_dens
        envelope[ind] = 0.
        dens = envelope + cavity
        vr = vr_env + vr_out
        vth = vth_env + vth_out
        vphi = vphi_env + vphi_out
        vr = (vr*dens + vr_disc*disc) / (dens+disc)
        vth = (vth*dens + vth_disc*disc) / (dens+disc)
        vphi = (vphi*dens + vphi_disc*disc) / (dens+disc)
        vr[np.isnan(vr.value)] = 0.
        vth[np.isnan(vth.value)] = 0.
        vphi[np.isnan(vphi.value)] = 0.
        vr[np.isinf(vr.value)] = 0.
        vth[np.isinf(vth.value)] = 0.
        vphi[np.isinf(vphi.value)] = 0.

        return vel_sph_to_cart(vr, vth, vphi, th, phi)

    @timed
    def get_all(self, x, y, z, temperature=None, component='dust', nquad=1):
        """Optimized function for obtaining the 3-D density and the velocity
        simultaneously.

        This avoid recalculating the density when the velocity function is
        called. The function can also be evaluated in different quadrants to
        avoid slowing the code with larger grids.

        Parameters:
            x, y, z (floats): position where the density is evaluated.
            ignore_rim (bool): if True the inner (dust) rim is ignored and the
                distributions are calculated to the surface of the star for the
                density and the inner velocity rim for the velocity.
            nquad (int, default=1): number of quadrants to devide the x, y, z 
                grids.
        """
        assert x.shape==y.shape==z.shape
        assert x.ndim==3

        # Initialize grids
        disc = np.zeros(x.shape)
        envelope = np.zeros(x.shape)
        cavity = np.zeros(x.shape)
        vx = np.zeros(x.shape)
        vy = np.zeros(x.shape)
        vz = np.zeros(x.shape)
        if temperature is not None:
            temp = np.zeros(x.shape) * u.K

        # Indices of divisions
        xinds = np.arange(-1, x.shape[2]+1, x.shape[2]/nquad)
        xinds[0] = 0
        xinds = zip(xinds[:-1], xinds[1:])
        yinds = np.arange(-1, x.shape[1]+1, x.shape[1]/nquad)
        yinds[0] = 0
        yinds = zip(yinds[:-1], yinds[1:])
        zinds = np.arange(-1, x.shape[0]+1, x.shape[0]/nquad)
        zinds[0] = 0
        zinds = zip(zinds[:-1], zinds[1:])

        for xind, yind, zind in product(xinds, yinds, zinds):
            # Sub axes
            subx = x[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] 
            suby = y[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1]
            subz = z[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1]

            # Evaluate
            dens = self.density(subx, suby, subz, component=component)
            vel = self.velocity(subx, suby, subz, component=component,
                    disc_dens=dens[0], env_dens=dens[1], cav_dens=dens[2])
            if temperature is not None:
                t = temperature(subx, suby, subz)
                temp[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = \
                        t

            # Put units
            if not hasattr(disc, 'unit'):
                disc = disc * dens[0].unit
                envelope = envelope * dens[1].unit
                cavity = cavity * dens[2].unit
                vx = vx * vel[0].unit
                vy = vy * vel[1].unit
                vz = vz * vel[2].unit

            # replace
            disc[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = \
                    dens[0]
            envelope[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = \
                    dens[1]
            cavity[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = \
                    dens[2]
            vx[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = vel[0]
            vy[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = vel[1]
            vz[zind[0]:zind[1]+1,yind[0]:yind[1]+1,xind[0]:xind[1]+1] = vel[2]

        if temperature is not None:
            return disc+envelope+cavity, vx, vy, vz, temp
        else:
            return disc+envelope+cavity, vx, vy, vz

    def abundance(self, x, y, z, temperature, key='Abundance', index='',
            ignore_min=False):
        """Molecular abundance.

        Parameters:
            temperature: the temperature distribution.
        """
        # Option formats
        if index!='':
            key = key + '%s' % index 
            t_fmt = 't%s%s' % (index, '%i')
            abn_fmt = 'abn%s%s' % (index, '%i')
        else:
            t_fmt = 't%i'
            abn_fmt = 'abn%i'
    
        # Get temperature steps and abundances
        nsteps = self.params.getint(key, 'nsteps')
        if nsteps==0:
            abn = self.params.getfloat(key, 'abn')
            abundance = np.ones(temperature.shape) * abn
        else:
            abundance = np.zeros(temperature.shape) 
            for i in range(nsteps):
                t = self.params.getquantity(key, t_fmt % i)
                abn = self.params.getfloat(key, abn_fmt % (i+1))
                abundance[temperature>=t] = abn
                if i==0:
                    abn_min = self.params.getfloat(key, abn_fmt % i)
                    if not ignore_min:
                        abundance[temperature<t] = abn_min
            abundance[abundance==0.] = abn_min
        
        if ignore_min:
            return abundance, abn_min
        else:
            return abundance
        #Ts = np.atleast_1d(self.params.getquantity('Abundance', 't'))
        #abn = np.array(self.params.getfloatlist('Abundance','abn'))
        #assert len(Ts)==len(abn)-1
        #abundance = np.ones(temperature.shape) * np.min(abn)

        #for i,T in enumerate(Ts):
        #    if i==0:
        #        abundance[temperature<T] = abn[i]
        #        if len(Ts)==1:
        #            abundance[temperature>=T] = abn[i+1]
        #    elif i==len(Ts)-1:
        #        ind = (temperature<T) & (temperature>=Ts[i-1])
        #        abundance[ind] = abn[i]
        #        abundance[temperature>=T] = abn[i+1]
        #    else:
        #        ind = (temperature<T) & (temperature>=Ts[i-1])
        #        abundance[ind] = abn[i]

        return abundance

    def linewidth(self, x, y, z, temperature, minwidth=0.*u.km/u.s):
        amu = 1.660531e-24 * u.g
        atoms = self.params.getfloat('Velocity', 'atoms')
        c_s2 = ct.k_B * temperature / (atoms * amu)
        linewidth = np.sqrt(self.params.getquantity('Velocity', 'linewidth')**2
                + c_s2)

        # Outside the YSO
        #r = np.sqrt(x**2 + y**2 + z**2).reshape(linewidth.shape)
        #ind = r > self['Envelope','renv']

        ## Zero values should be replaced by a minimum value later
        #linewidth[ind] = minwidth
         
        return linewidth
示例#14
0
def load_logger(name):
    return get_logger(name)
示例#15
0
class Distribution(object):
    """Base distribution class.

    Attributes:
        __params (myConfigparser): model parameters
    """
    __metaclass__ = ABCMeta
    logger = get_logger(__name__, __package__ + '.log')

    def __init__(self, param_file):
        """Initialize distribution.

        Parameters:
            param_file (str): distribution parameter file.
        """
        self.logger.info('Loading parameters from: %s', param_file)
        self.__params = self.load_config(param_file)

    def __getitem__(self, key):
        """Get a parameter as a quantity.
        
        Only keys with a section and parameter are allowed."""
        if len(key) != 2:
            raise KeyError('Key must be length 2: %r' % key)
        self._validate_keys(*key)
        try:
            # Load a quantity by default
            value = self.__params.getquantity(*key)

            # For debugging
            assert hasattr(value, 'unit')

        except ValueError:
            # If value is a string
            value = self.__params.get(*key)

        return value

    @abstractmethod
    def update(self, section, param, value):
        """Update the value of a parameter"""
        # Check units
        value = self._convert_units(section, param, value)

        # Update
        newval = '%.8e %s' % (value.value, value.unit.to_string(format='cds'))
        self.__params[section][param] = newval

    @property
    def params(self):
        return self.__params

    @property
    def sections(self):
        return self.params.sections()

    #@property
    #def param_list(self):
    #    """Compile a list of all the parameters"""
    #    params = []
    #    for section in self.params.sections():
    #        params += params[section].options()
    #    return params

    @staticmethod
    def load_config(filename):
        """Load the parameter configuration file.

        Parameters:
            filename (str): YSO configuration file name.
        """
        # Verify file
        name = os.path.realpath(os.path.expanduser(filename))
        assert os.path.isfile(name)

        # Load file
        parser = myConfigParser(interpolation=ExtendedInterpolation())
        parser.read(name)

        return parser

    def _convert_units(self, section, param, value):
        """Check and convert units for an input value.

        Parameters:
            section (str): distribution section.
            param (str): parameter name
            value (quantity or float): value to convert
        """
        self._validate_keys(section, param)

        # Check unit compatibility of new value
        old = self[section, param]
        if not hasattr(value, 'unit') and hasattr(old, 'unit'):
            return value * old.unit
        elif hasattr(value, 'unit') and hasattr(old, 'unit'):
            return value.to(old.unit)
        elif hasattr(value, 'unit') and not hasattr(old, 'unit'):
            raise ValueError('The parameter *%s* in %s does not have unit' % \
                    (param, section))
        else:
            return value

    def _validate_keys(self, section, param=None):
        """Validate keys"""
        # Check section
        if section not in self.sections:
            raise KeyError('Section *%s* does not exist' % section)
        elif param is None:
            return True
        else:
            pass

        # Check param
        if param not in self.__params.options(section):
            raise KeyError('Param *%s* does not exist' % param)
        else:
            return True

    def flatten(self, ignore_params=[], get_db_format=False):
        """Return 2 arrays containg parameters and values

        Parameters names in the DEFAULT are renamed *<parameter>_<section>*.
        Parameters in list format are passed as a string

        Parameters:
            ignore_params (list, optional): list of prameters to ignore
            get_db_format (bool, optional): get an array with formats for
                databases
        """
        self.logger.debug('Flattening parameters:')
        keys, vals, fmts = [], [], []
        for section in self.sections:
            for param in self.params[section]:
                # Ignores
                if param in ignore_params:
                    self.logger.debug('Ignoring parameter: %s', param)
                    continue

                # Keys
                if param in self.params['DEFAULT']:
                    self.logger.debug('Renaming parameter: %s', param)
                    keys += ['%s_%s' % (param, section.lower())]
                else:
                    keys += [param]

                # Values
                val = self[section, param]
                if hasattr(val, 'unit'):
                    try:
                        aux = len(val)
                        vals += ['%s' % (list(val.value), )]
                        fmts += ['TEXT']
                    except TypeError:
                        vals += [val.value]
                        fmts += ['REAL']
                else:
                    vals += [val]
                    fmts += ['TEXT']

        if get_db_format:
            return (keys, vals, fmts)
        else:
            return (keys, vals)
示例#16
0
#!/bin/python
import os, argparse, sys

import numpy as np
import matplotlib.pyplot as plt
from myutils.logger import get_logger
from scipy.ndimage.filters import maximum_filter
from scipy.ndimage.morphology import binary_dilation
from scipy.stats import linregress
from astropy.io import fits
from astropy.stats import sigma_clip
from astropy.wcs import WCS
from myutils.argparse_actions import LoadFITS

# Start settings
logger = get_logger(__name__, filename='extract_spectra.log')


def new_fits(data, hdr=None, filename=None):

    hdu = fits.PrimaryHDU(data, header=hdr)
    hdul = fits.HDUList([hdu])
    if filename:
        hdul.writeto(filename, overwrite=True)

    return hdul[0]


def sum_collapse(cube, rms=None, filename=None):
    return _sum_collapse(cube, rms=rms, filename=filename)[0]
示例#17
0
class BaseModel(object):
    """Model class.

    Attributes:
        config (myConfigParser): model configuration file name.
        params (OrderedDict): model physical parameters.
        setup (myConfigParser): model rt setup.
        images (myConfigParser): model images setup.
        grids (dict): grids by rt.
        logger: logging system.
    """

    __metaclass__ = ABCMeta
    logger = get_logger(__name__, __package__+'.log')

    def __init__(self, name=None, config=None, params=None, setup=None,
            source_names=['source'], locs=[(0,0,0)]):
        """Initialize the model.

        A model can be created with just a name. A generic name is provided,
        thus if none of the parameters are given no exception is raised.

        If a model *name* and a configuration file are given, the name in the
        configuration file will be replaced by *name*.

        The configuration file is standard for each model and contains all the
        information to load/recover a model. Therefore, it includes the params
        and setup file names. If all of the latter and model name are given the
        parameter *config* is ignored.

        Parameters:
            name (str): model name.
            config (str): model configuration file.
            params (str or list): model parameter file or files for each model 
                source.
            setup (str): model setup file.
            source_names (list): names of the model sources.
            locs (list): location of each model source.
        """
        # Initialize 
        self.config = myConfigParser(interpolation=ExtendedInterpolation())
        self.params = OrderedDict()
        self.setup = None
        self.images = None
        self.grids = {}

        # Load configuration
        if config and not params and not setup:
            self.load_config(config, name)
        elif params:
            assert len(source_names)==len(locs)
            self.config['DEFAULT'] = {'name': name or 'model', 'setup': setup}
            for i,(name,loc) in enumerate(zip(source_name, locs)):
                assert len(loc)==3
                self.config[name] = {'loc': loc}
                pname = os.path.realpath(os.path.expanduser(params[i]))
                if os.path.isfile(pname):
                    self.config[name]['params'] = pname
                elif os.path.isfile(params):
                    self.config[name]['params'] = params
            self.load_params()
            if setup:
                self.load_setup(setup)
            if images:
                raise NotImplementedError

    @abstractmethod
    def fill_grids(self):
        return True

    @abstractmethod
    def load_params(self):
        """Load parameter file for each model source."""
        for section in self.config.sections():
            self.logger.info('Loading parameters for: %s', section)
            self.params[section] = YSO(self.config[section]['params'],
                    loc=self.config.getfloatlist(section, 'loc'))

    @property
    def name(self):
        return self.config['DEFAULT']['name']

    @name.setter
    def name(self, value):
        self.config['DEFAULT']['name'] = value

    def _load_configparser(self, parser, filename):
        # Verify file
        name = os.path.realpath(os.path.expanduser(filename))
        assert os.path.isfile(name)

        # Load file
        parser.read(name)

        return parser

    def _load_parser(self, filename):
        parser = myConfigParser(interpolation=ExtendedInterpolation())
        parser = self._load_configparser(parser, filename)

        return parser

    def load_config(self, config, name=None):
        """Load configuration file.

        Parameters:
            config: configuration file name.
            name: model name.
        """
        # Load file and update name
        self.logger.info('Loading model configuration file')
        self.config = self._load_configparser(self.config, config)
        if name:
            self.name = name
        self.logger.info('Model name: %s', self.name)

        # Load setup and params
        if self.config.get('DEFAULT','setup'):
            self.load_setup(self.config.get('DEFAULT','setup'))
        if self.config.get('DEFAULT','images'):
            self.load_image_setup(self.config.get('DEFAULT','images'))
        self.load_params()

    def load_setup(self, filename):
        """Load setup file.

        Parameters:
            filename: name of the setup file.
        """
        self.logger.info('Loading model setup')
        self.setup = self._load_parser(filename)

    def load_image_setup(self, filename):
        """Load the image setup file.

        Parameters:
            filename: name of the image setup file.
        """
        self.logger.info('Loading image setup')
        self.images = self._load_parser(filename)

    def load_grids(self, rt):
        """Load the grids from file.
        
        Parameters:
            rt: RT transfer config section.
        """
        if rt in self.grids:
            return self.grids[rt]

        else:
            grids = []
            for grid in self.setup.getlist(rt, 'grids'):
                self.logger.info('Loading grid: %s', os.path.basename(grid))
                fname = os.path.realpath(os.path.expanduser(grid))
                grid = load_struct_array(fname, usecols=None)
                grids += [grid]
            
            # other values
            cell_sizes = self.setup.getintlist(rt, 'cell_sizes')
            oversample = self.setup.getintlist(rt, 'oversample')

            # Sort grid sizes
            self.logger.info('Sorting grids by cell size (a->Z)')
            ind = np.argsort(cell_sizes)
            grids = map(lambda x: grids[x], ind)
            cell_sizes = map(lambda x: cell_sizes[x], ind)
            oversample = map(lambda x: oversample[x], ind)

            # To save time
            self.grids[rt] = grids, cell_sizes

            return grids, cell_sizes, oversample
示例#18
0
class Container(object):
    """Defines a container ABC.

    Attributes:
        name: name of the container
        config: configuration 
        data: all the data contained
    """
    __metaclass__ = ABCMeta
    logger = get_logger(__name__, __package__ + '.log')

    def __init__(self, name, config_file=None, config=None):
        """Defines a new container.

        Parameters:
            name: container name
        """
        assert not (config_file is not None and config is not None)

        self.name = name
        self.data = {}

        if config_file:
            # Load configuration
            self.logger.debug('Loading configuration file: %s', config_file)
            try:
                assert os.path.isfile(config_file)
            except AssertionError:
                self.logger.exception('File %s does not exist', config_file)
                raise IOError
            self.load_config(config_file)
            self.logger.info('Configuration file loaded')
        elif config is not None:
            self.config = config
            self.logger.info('Configuration assigned')
        else:
            self.config = None

    @abstractmethod
    def load_data(self, key, file_name=None):
        """Load *data* from file and save it in *key*"""
        pass

    def __getitem__(self, key):
        if key not in self.data.keys():
            self.load_data(key, None)
        return self.data[key]

    def __setitem__(self, key, value):
        self[key] = value

    def load_data_from_keys(self, keys, file_names):
        """Load all the data in each file in *file_names* and store it in
        *key*
        
        Parameters:
            keys (iterable): list of keys for each file
            file_names (iterable): list of files to load
        """
        for k, f in zip(keys, file_names):
            self.load_data(k, f)

    def load_config(self, config_file):
        """Load a configuration file.

        Parameters:
            config_file (str): name of the configuration file
        """
        self.config = myConfigParser(interpolation=ExtendedInterpolation())
        self.config.read(config_file)
示例#19
0
def rebin_2Dsph_to_cart(val, new_pos, yso, pos=None, rebin=False, interp=False,
        min_height_to_disc=None, logger=get_logger(__name__), **kwargs):
    """Function for rebin or interpolate an input grid into a new grid.

    Parameters:
        val: 2-D spherically symmetric grid of values for 1st and 4th quadrant.
        new_pos: new grid centres.
        yso (YSO): YSO object containg the physical parameters.
        pos: original positions in the grid in spherical coords.
        rebin: rebin original grid
        interp: interpolate original grid
        kwargs: keyword arguments for the rebin or interpolate functions.
    """
    # Checks
    kwargs.setdefault('statistic', 'average')

    # Rebin if needed
    if rebin and pos is not None:
        logger.info('Rebinning grid')
        # Current position mesh
        R, TH = np.meshgrid(pos[0], pos[1])
        YI = R * np.sin(TH)
        ZI = R * np.cos(TH)

        # Extend new positions until maximum radial distance
        maxr = np.nanmax(np.sqrt(new_pos[1].value**2+new_pos[0].value**2))
        delta = np.abs(new_pos[1][0].value-new_pos[1][1].value)
        nymax = int(np.ceil(maxr/delta))
        extra = np.nanmax(new_pos[1].value) + \
                np.arange(1,abs(nymax-int(np.sum(new_pos[1]>0)))+1)*delta
        new_pos0 = np.append(-1.*extra, new_pos[1].value)
        new_pos0 = np.append(new_pos0, extra)
        new_pos0 = np.sort(new_pos0) * new_pos[1].unit

        # Meshes
        #YN, ZN = np.meshgrid(new_pos[1], new_pos[2])
        YN, ZN = np.meshgrid(new_pos0, new_pos[2])

        # Evaluate the function
        if interp:
            #val1 = rebin_2Dsph_to_cart(val, (new_pos[0], np.array([0.])*new_pos[1].unit,
            #    new_pos[2]), yso, pos=pos, interp=True, logger=logger)
            val1 = rebin_2Dsph_to_cart(val, (new_pos0, np.array([0.])*new_pos[1].unit,
                new_pos[2]), yso, pos=pos, interp=True, logger=logger)
            val1 = val1[:,0,:]
        else:
            val1 = yso(YN, np.zeros(YN.shape), ZN)
            assert val1.cgs.unit == u.g/u.cm**3
            val1 = val1.cgs.value

        # Walls
        #walls = get_walls(new_pos[1], new_pos[-1])
        walls = get_walls(new_pos0, new_pos[-1])
        walls = [wall.to(pos[0].unit).value for wall in walls]

        # Rebin 2-D
        val2 = rebin_irregular_nd(val[0,:,:],
                walls[::-1], ZI, YI, statistic=kwargs['statistic'],
                weights=kwargs.get('weights'))

        # Replace nans
        ind = np.isnan(val2)
        val2[ind] = val1[ind]

        # Interpolate
        ZN3, YN3, XN = np.meshgrid(*new_pos[::-1], indexing='ij')
        NEWX = np.sqrt(XN**2+YN3**2)
        val2 = griddata((YN.value.flatten(), ZN.value.flatten()),
                val2.flatten(), (NEWX.to(ZN.unit).value.flatten(), 
                    ZN3.to(ZN.unit).value.flatten()),
                method=kwargs.get('method', 'nearest'))

        return val2.reshape(XN.shape)
    elif interp and pos is not None:
        logger.info('Interpolating grid')
        val1 = map_sph_to_cart_axisym(val[0,:,:], pos[0], pos[1], *new_pos)
        return val1
    else:
        logger.info('Evaluating grid')
        # Meshes
        ZN, YN, XN = np.meshgrid(*new_pos[::-1], indexing='ij')

        # Evaluate the function
        val1 = yso(XN, YN, ZN, ignore_rim=True,
                min_height_to_disc=min_height_to_disc)
        assert val1.cgs.unit == u.g/u.cm**3
        val1 = val1.cgs.value

        return val1
示例#20
0
def set_physical_props(yso, grids, cell_sizes, save_dir, oversample=3,
        dust_out=None, logger=get_logger(__name__)):
    """Calculate and write the physical properties of the model.

    Parameters:
        yso: the model parameters.
        grids: grids where the model will be evaluated
        template: filename of the *define_model.c* file.
        logger: logging system.
    """
    # Load temperature function
    if yso.params.get('DEFAULT', 'quantities_from'):
        hmodel = yso.params.get('DEFAULT', 'quantities_from')
        logger.info('Loading Hyperion model: %s', os.path.basename(hmodel))
        hmodel = ModelOutput(os.path.expanduser(hmodel))
        q = hmodel.get_quantities()
        temperature = np.sum(q['temperature'].array[1:], axis=0)
        r, th = q.r*u.cm, q.t*u.rad
        temp_func = get_temp_func(yso.params, temperature, r, th)
    elif dust_out is not None:
        logger.info('Loading Hyperion model: %s', os.path.basename(dust_out))
        hmodel = ModelOutput(os.path.expanduser(dust_out))
        q = hmodel.get_quantities()
        temperature = np.sum(q['temperature'].array[1:], axis=0)
        r, th = q.r*u.cm, q.t*u.rad
        temp_func = get_temp_func(yso.params, temperature, r, th)
    else:
        raise NotImplementedError

    # Open template
    fitslist = []
    # Start from smaller to larger grid
    for i,grid,cellsz in zip(range(len(grids))[::-1], grids, cell_sizes):
        print '='*80
        logger.info('Working on grid: %i', i)
        logger.info('Grid cell size: %i', cellsz)
        x = grid[0]['x'] * grid[1]['x']
        y = grid[0]['y'] * grid[1]['y']
        z = grid[0]['z'] * grid[1]['z']
        xi = np.unique(x)
        yi = np.unique(y)
        zi = np.unique(z)

        # Density and velocity
        dens, (vx, vy, vz), temp = phys_oversampled_cart(xi, yi, zi, yso,
                temp_func, oversample=oversample if i!=2 else 5, logger=logger)
        dens[dens.cgs<=0./u.cm**3] = 10./u.cm**3
        temp[np.isnan(temp.value)] = 2.7 * u.K

        # Replace the inner region by rebbining the previous grid
        if i<len(grids)-1:
            # Walls of central cells
            j = cell_sizes.index(cellsz)
            xlen = cell_sizes[j-1] * len(xprev) * u.au
            nxmid = int(xlen.value) / cellsz
            xw = np.linspace(-0.5*xlen.value, 0.5*xlen.value, nxmid+1) * u.au
            ylen = cell_sizes[j-1] * len(yprev) * u.au
            nymid = int(ylen.value) / cellsz
            yw = np.linspace(-0.5*ylen.value, 0.5*ylen.value, nymid+1) * u.au
            zlen = cell_sizes[j-1] * len(zprev) * u.au
            nzmid = int(zlen.value) / cellsz
            zw = np.linspace(-0.5*zlen.value, 0.5*zlen.value, nzmid+1) * u.au
            if nxmid==nymid==nzmid==0:
                logger.warning('The inner grid is smaller than current grid size')
            else:
                logger.info('The inner %ix%ix%i cells will be replaced', nxmid,
                        nymid, nzmid)

                # Rebin previous grid
                # Density
                vol_prev = (cell_sizes[j-1]*u.au)**3
                vol = (cellsz * u.au)**3
                N_cen = vol_prev.cgs * rebin_regular_nd(dens_prev.cgs.value, 
                        zprev.cgs.value, yprev.cgs.value, xprev.cgs.value, 
                        bins=(zw.cgs.value,yw.cgs.value,xw.cgs.value), 
                        statistic='sum') * dens_prev.cgs.unit
                dens_cen = N_cen / vol
                dens_cen = dens_cen.to(dens.unit)
                # Temperature
                T_cen = rebin_regular_nd(temp_prev.value * dens_prev.cgs.value,
                        zprev.cgs.value, yprev.cgs.value, xprev.cgs.value, 
                        bins=(zw.cgs.value,yw.cgs.value, xw.cgs.value), 
                        statistic='sum') * temp_prev.unit * dens_prev.cgs.unit
                T_cen = vol_prev.cgs * T_cen / N_cen.cgs
                T_cen = T_cen.to(temp.unit)

                # Replace
                dens[len(zi)/2-nzmid/2:len(zi)/2+nzmid/2,
                        len(yi)/2-nymid/2:len(yi)/2+nymid/2,
                        len(xi)/2-nxmid/2:len(xi)/2+nxmid/2] = dens_cen
                temp[len(zi)/2-nzmid/2:len(zi)/2+nzmid/2,
                        len(yi)/2-nymid/2:len(yi)/2+nymid/2,
                        len(xi)/2-nxmid/2:len(xi)/2+nxmid/2] = T_cen
        dens_prev = dens
        temp_prev = temp
        xprev = xi
        yprev = yi
        zprev = zi

        # Abundance
        abundance = yso.abundance(temp)
        
        # Linewidth
        amu = 1.660531e-24 * u.g
        atoms = yso.params.getfloat('Velocity', 'atoms')
        c_s2 = ct.k_B * temp / (atoms * amu)
        linewidth = np.sqrt(yso.params.getquantity('Velocity', 'linewidth')**2
                + c_s2)

        # Write FITS
        fitsnames = write_fits(os.path.expanduser(save_dir), 
                **{'temp%i'%i: temp.value, 'dens%i'%i: dens.cgs.value, 
                'vx%i'%i: vx.cgs.value, 'vy%i'%i: vy.cgs.value, 'vz%i'%i:
                vz.cgs.value, 'abn%i'%i: abundance,
                'lwidth%i'%i: linewidth.cgs.value})
        fitslist += fitsnames

    return fitslist
示例#21
0
def get_physical_props_single(yso, grids, cell_sizes, save_dir, oversample=[3],
        dust_out=None, logger=get_logger(__name__)):
    """Calculate and write the physical properties a model with one source.

    Parameters:
        yso: the model parameters.
        grids: grids where the model will be evaluated
        template: filename of the *define_model.c* file.
        logger: logging system.
    """
    # Models with more than one source should be treated in another function
    # because the oversampling should be different.

    # FITS list
    fitslist = []

    # Validate oversample
    if len(oversample)==1 and len(oversample)!=len(cell_sizes):
        oversample = oversample * len(cell_sizes)
    elif len(oversample)==len(cell_sizes):
        pass
    else:
        raise ValueError('The length of oversample != number of grids')

    # Temperature function
    if yso.params.get('DEFAULT', 'quantities_from'):
        hmodel = yso.params.get('DEFAULT', 'quantities_from')
        logger.info('Loading Hyperion model: %s', os.path.basename(hmodel))
        hmodel = ModelOutput(os.path.expanduser(hmodel))
        q = hmodel.get_quantities()
        temperature = np.sum(q['temperature'].array[1:], axis=0)
        r, th = q.r*u.cm, q.t*u.rad
        temp_func = get_temp_func(yso.params, temperature, r, th)
    elif dust_out is not None:
        logger.info('Loading Hyperion model: %s', os.path.basename(dust_out))
        hmodel = ModelOutput(os.path.expanduser(dust_out))
        q = hmodel.get_quantities()
        temperature = np.sum(q['temperature'].array[1:], axis=0)
        r, th = q.r*u.cm, q.t*u.rad
        temp_func = get_temp_func(yso.params, temperature, r, th)
    else:
        raise NotImplementedError

    # Start from smaller to larger grid
    inv_i = len(grids) - 1
    for i,(grid,cellsz) in enumerate(zip(grids, cell_sizes)):
        # Initialize grid axes
        print '='*80
        logger.info('Working on grid: %i', i)
        logger.info('Oversampling factor: %i', oversample[i])
        logger.info('Grid cell size: %i', cellsz)
        # Multiply by units
        x = grid[0]['x'] * grid[1]['x']
        y = grid[0]['y'] * grid[1]['y']
        z = grid[0]['z'] * grid[1]['z']
        xi = np.unique(x)
        yi = np.unique(y)
        zi = np.unique(z)

        # Density and velocity
        dens, (vx, vy, vz), temp = phys_oversampled_cart(
                xi, yi, zi, yso, temp_func, oversample=oversample[i], 
                logger=logger)

        # Replace out of range values
        dens[dens.cgs<=0./u.cm**3] = 10./u.cm**3
        temp[np.logical_or(np.isnan(temp.value), temp.value<2.7)] = 2.7 * u.K

        # Replace the inner region by rebbining the previous grid
        if i>0:
            # Walls of central cells
            j = cell_sizes.index(cellsz)
            xlen = cell_sizes[j-1] * len(xprev) * u.au
            nxmid = int(xlen.value) / cellsz
            xw = np.linspace(-0.5*xlen.value, 0.5*xlen.value, nxmid+1) * u.au
            ylen = cell_sizes[j-1] * len(yprev) * u.au
            nymid = int(ylen.value) / cellsz
            yw = np.linspace(-0.5*ylen.value, 0.5*ylen.value, nymid+1) * u.au
            zlen = cell_sizes[j-1] * len(zprev) * u.au
            nzmid = int(zlen.value) / cellsz
            zw = np.linspace(-0.5*zlen.value, 0.5*zlen.value, nzmid+1) * u.au
            if nxmid==nymid==nzmid==0:
                logger.warning('The inner grid is smaller than current grid size')
            else:
                logger.info('The inner %ix%ix%i cells will be replaced', nxmid,
                        nymid, nzmid)

                # Rebin previous grid
                # Density
                vol_prev = (cell_sizes[j-1]*u.au)**3
                vol = (cellsz * u.au)**3
                N_cen = vol_prev.cgs * rebin_regular_nd(dens_prev.cgs.value, 
                        zprev.cgs.value, yprev.cgs.value, xprev.cgs.value, 
                        bins=(zw.cgs.value,yw.cgs.value,xw.cgs.value), 
                        statistic='sum') * dens_prev.cgs.unit
                dens_cen = N_cen / vol
                dens_cen = dens_cen.to(dens.unit)
                # Temperature
                T_cen = rebin_regular_nd(temp_prev.value * dens_prev.cgs.value,
                        zprev.cgs.value, yprev.cgs.value, xprev.cgs.value, 
                        bins=(zw.cgs.value,yw.cgs.value, xw.cgs.value), 
                        statistic='sum') * temp_prev.unit * dens_prev.cgs.unit
                T_cen = vol_prev.cgs * T_cen / N_cen.cgs
                T_cen = T_cen.to(temp.unit)

                # Replace
                dens[len(zi)/2-nzmid/2:len(zi)/2+nzmid/2,
                        len(yi)/2-nymid/2:len(yi)/2+nymid/2,
                        len(xi)/2-nxmid/2:len(xi)/2+nxmid/2] = dens_cen
                temp[len(zi)/2-nzmid/2:len(zi)/2+nzmid/2,
                        len(yi)/2-nymid/2:len(yi)/2+nymid/2,
                        len(xi)/2-nxmid/2:len(xi)/2+nxmid/2] = T_cen
        dens_prev = dens
        temp_prev = temp
        xprev = xi
        yprev = yi
        zprev = zi

        # Linewidth and abundance
        linewidth = yso.linewidth(x, y, z, temp).to(u.cm/u.s)

        # Abundance per molecule
        abns = {}
        abn_fmt = 'abn_%s_%i'
        j = 1
        for section in yso.params.sections():
            if not section.lower().startswith('abundance'):
                continue
            mol = yso[section, 'molecule']
            abns[abn_fmt % (mol, inv_i)] = yso.abundance(x, y, z, temp, 
                    index=j, ignore_min=False)
            j = j+1

        # Write FITS
        kw = {'temp%i'%inv_i: temp.value, 'dens%i'%inv_i: dens.cgs.value, 
                'vx%i'%inv_i: vx.cgs.value, 'vy%i'%inv_i: vy.cgs.value, 
                'vz%i'%inv_i: vz.cgs.value, 
                'lwidth%i'%inv_i: linewidth.cgs.value}
        kw.update(abns)
        fitsnames = write_fits(os.path.expanduser(save_dir), 
                **kw)
        fitslist += fitsnames
        inv_i = inv_i - 1

    return fitslist
示例#22
0
def phys_oversampled_cart(x, y, z, yso, temp_func, oversample=5, 
        logger=get_logger(__name__)):
    """Calculate the density and velocity distributions by oversampling the
    grid.

    This function first creates a new oversampled grid and then rebin this grid
    to the input one by taking weighted averages of the physical quantities.

    Parameters:
        yso (YSO object): the object containing the model parameters.
        xlim (tuple): lower and upper limits of the x-axis.
        ylim (tuple): lower and upper limits of the y-axis.
        zlim (tuple): lower and upper limits of the z-axis.
        cellsize (float): physical size of the cell in the coarse grid.
        oversample (int, default=5): oversampling factor.
        logger (logging): logger manager.
    """
    # Hydrogen mass
    mH = ct.m_p + ct.m_e

    # Special case
    if oversample==1:
        logger.info('Evaluating the grid')
        ZN, YN, XN = np.meshgrid(z, y, x, indexing='ij')
        n, vx, vy, vz, temp = yso.get_all(XN, YN, ZN,
                temperature=temp_func, component='gas')
        n = n / (2.33 * mH)
        temp[temp<2.7*u.K] = 2.7*u.K
        assert n.cgs.unit == 1/u.cm**3
        assert temp.unit == u.K
        assert vx.cgs.unit == u.cm/u.s
        assert vy.cgs.unit == u.cm/u.s
        assert vz.cgs.unit == u.cm/u.s
        return n, (vx, vy, vz), temp

    logger.info('Resampling grid')
    # Create new grid
    dx = np.abs(x[0]-x[1])
    dy = np.abs(y[0]-y[1])
    dz = np.abs(z[0]-z[1])
    xw,xstep = np.linspace(np.min(x)-dx/2., np.max(x)+dx/2., num=len(x)*oversample+1,
            endpoint=True, retstep=True)
    xover = (xw[:-1] + xw[1:]) / 2.
    logger.info('Resampled grid x-step = %s', xstep.to(u.au))
    yw,ystep = np.linspace(np.min(y)-dy/2., np.max(y)+dy/2., num=len(y)*oversample+1,
            endpoint=True, retstep=True)
    yover = (yw[:-1] + yw[1:]) / 2.
    logger.info('Resampled grid y-step = %s', ystep.to(u.au))
    zw,zstep = np.linspace(np.min(z)-dz/2., np.max(z)+dz/2., num=len(z)*oversample+1,
            endpoint=True, retstep=True)
    zover = (zw[:-1] + zw[1:]) / 2.
    logger.info('Resampled grid z-step = %s', zstep.to(u.au))
    ZN, YN, XN = np.meshgrid(zover, yover, xover, indexing='ij')

    # Volumes
    vol = dx*dy*dz
    vol_over = xstep*ystep*zstep

    # Number density
    bins = get_walls(z, y, x)
    n_over, vx_over, vy_over, vz_over, temp = yso.get_all(XN, YN, ZN,
            temperature=temp_func, component='gas', nquad=oversample)
    n_over = n_over / (2.33 * mH)
    assert n_over.cgs.unit == 1/u.cm**3
    N = vol_over * rebin_regular_nd(n_over.cgs.value, zover, yover, xover, bins=bins,
            statistic='sum') * n_over.cgs.unit
    dens = N / vol
    assert dens.cgs.unit == 1/u.cm**3

    # Temperature
    temp = rebin_regular_nd(temp.value*n_over.cgs.value, zover, yover, 
            xover, bins=bins, statistic='sum') * \
                    temp.unit * n_over.cgs.unit
    temp = vol_over * temp / N
    temp[temp<2.7*u.K] = 2.7*u.K
    assert temp.unit == u.K

    # Velocity
    assert vx_over.cgs.unit == u.cm/u.s
    assert vy_over.cgs.unit == u.cm/u.s
    assert vz_over.cgs.unit == u.cm/u.s
    v = []
    for vi in (vx_over, vy_over, vz_over):
        vsum = rebin_regular_nd(vi.cgs.value*n_over.cgs.value, zover, yover, 
                xover, bins=bins, statistic='sum') * \
                        vi.cgs.unit * n_over.cgs.unit
        vsum = vol_over * vsum / N
        vsum[np.isnan(vsum)] = 0.
        assert vsum.cgs.unit == u.cm/u.s
        v += [vsum]

    return dens, v, temp
示例#23
0
def mollie_pipe(model, from_file=False, logger=get_logger(__name__)):
    """Run the line RT.

    """
    MOLLIE = os.path.abspath(os.path.join(THIS, '../mollie'))
    rt = 'mollie'

    # Load grid 
    grids, cell_sizes, oversample = model.load_grids(rt)

    # Configure and write model
    if len(model.params.keys())==1:
        key = model.params.keys()[0]
        fitslist = get_physical_props_single(model.params[key], grids, 
                cell_sizes, MOLLIE, oversample=oversample, logger=logger)
    else:
        raise NotImplementedError
    logger.debug('FITS files: %r', fitslist)

    # Other configurations
    nproc = model.setup.getint(rt, 'nproc')
    server = model.setup.get(rt, 'server', fallback=None)
    logger.info('Number of processes: %i', nproc)
    if server:
        logger.info('Will run in server: %s', server)
    model_dir = os.path.expanduser(model.config.get('DEFAULT', 'model_dir',
        fallback='./'))
    model_dir = os.path.join(model_dir, 'Model_%s' % model.name)
    logger.info('Model directory: %s', model_dir)

    # Check the model directory tree exist or create it
    try:
        os.makedirs(model_dir)
        logger.info('Model directory created: %s', os.path.basename(model_dir))
    except:
        logger.error('Model directory already exist')
        exit()

    # Run model
    mollie_file = os.path.join(MOLLIE, 'ModelCube0')
    setup_template = os.path.join(MOLLIE, 'src/setup_template.c')
    original_file = mollie_file
    file_track = {}
    for sect in model.images.sections():
        print('='*80)
        logger.info('Running model for: %s', sect)
        model_name = rt.capitalize()+'_'+sect.upper()

        # Move abundances
        for i in range(len(cell_sizes)):
            abn_fmt = 'abn_%s_%i.fits'
            orig_abn = filter(lambda x: (abn_fmt % (sect,i)) in x, fitslist)
            dest_abn = orig_abn[0].replace(abn_fmt % (sect,i), 
                    'abn%i.fits' % i)
            logger.info('Copying: %s -> %s', os.path.basename(orig_abn[0]),
                    os.path.basename(dest_abn))
            shutil.copy(orig_abn[0], dest_abn)

        # Setup RT for line
        # When several sources are defined the observed inclination angle is
        # defined by the first in the list. The definition of the sources
        # distributions must consider the relative angles between different
        # sources.
        mname = model.params.keys()[0]
        incl = model.params[mname]['Geometry','incl']
        phi = model.params[mname]['Geometry','phi']
        write_setup(sect, model, setup_template, phi=phi, incl=incl)

        # line RT
        if from_file:
            mollie_file = os.path.join(mollie_dir, model_name)

            # Run if it does not exist
            if not os.path.isfile(mollie_file):
                run_mollie(mollie_dir, logger=logger, np=nproc, server=server,
                        from_email='*****@*****.**',
                        to_email='*****@*****.**')

                # Move to model directory
                logger.info('Moving model %s -> %s', os.path.basename(original_file),
                        mollie_file)
                shutil.move(original_file, mollie_file)
            file_track[section] = mollie_file
        else:
            run_mollie(MOLLIE, logger=logger, np=nproc, server=server, 
                    from_email='*****@*****.**', 
                    to_email='*****@*****.**')

            # Move to model directory
            logger.info('Moving model %s -> %s', os.path.basename(mollie_file),
                    model_name)
            shutil.move(mollie_file, os.path.join(model_dir, model_name))
            file_track[sect] = os.path.join(model_dir, model_name)

    # Move FITS files
    for ffile in fitslist:
        shutil.move(ffile, model_dir)

    return file_track
示例#24
0
class modelBayes(BaseModel):
    """Manages the models for a bayesian fit

    Attributes:
        config (myConfigParser): model configuration file name.
        params (OrderedDict): model physical parameters.
        setup (myConfigParser): model rt setup.
        images (myConfigParser): model images setup.
        priors (OrderedDict): parameters for the prior calculation.
        db (sqlite3): database.
        logger: logging system.
    """

    logger = get_logger(__name__, __package__+'.log')

    def __init__(self, config, database='models_db.sqlite'):
        """Initialize model.

        A timestamp will be assigned as the model name suffix.

        Parameters:
            config (str): configuration file name.
        """
        # Initialize model
        name = get_timestamp()
        super(modelBayes, self).__init__(name=name, config=config)
        #self.config = self.config['source']

        # Database
        self.db = None
        self.logger.info('Model database: %s', database)
        self._load_database(database)

        # Likelihood
        self.priors = OrderedDict.fromkeys(self.config.sections())
        self.load_priors()

    def load_params(self):
        """Load parameter file for each model source."""
        print('-'*50)
        for section in self.config.sections():
            self.logger.info('Loading parameters for: %s', section)
            self.params[section] = YSO(self.config[section]['params'])
            print('-'*50)

    def fill_grids(self):
        super(modelBayes, self).fill_grids()

    @property
    def param_list(self):
        pars = []
        for yso in self.params.values():
            pars += yso.param_list
        return pars

    def _filter_params(self, param):
        return param not in self.prior and \
                param not in self.config[db_ignore_params]

    def load_priors(self):
        """Load the prior for the parameters of each source"""
        print('-'*50)
        for key in self.priors.keys():
            self.logger.info('Loading priors for: %s', key)
            self.logger.info('Priors file: %s', self.config[key]['priors'])
            self.priors[key] = self._load_parser(self.config[key]['priors'])
            print('-'*50)

    def get_logprior(self, source, param):
        """Get the natural logarithm of the prior value for a parameter.

        Parameters:
            source (str): model source name
            param (str): parameter name
        """
        if self.priors[source][param]['type']=='uniform':
            section = self.priors[source][param]['section']
            val = self.params[source][section, param]
            vmin = self.priors[source].getquantity(param, 'min')
            vmax = self.priors[source].getquantity(param, 'max')
            if vmin < val < vmax:
                return 0.
            else:
                return -np.inf
        else:
            raise NotImplementedError

    def get_logpriors(self, source=None):
        """Get the natural logarithm of the model parameter priors.

        If a source is not specified the priors for each source are given in an
        OrderedDict.

        Parameters:
            source (str, optional): source name
        Returns:
            priors (float): the natural logarithm of all the priors.
        """
        assert source is None or source in self.config

        prior = 0.

        for src, pr in self.priors.items():
            if source and src==source:
                prior = sum(self.get_logprior(src, key) \
                        for key in pr.sections())
                break
            elif source:
                continue
            else:
                prior += sum(self.get_logprior(src, key) \
                        for key in pr.sections())

        return prior

    # This only works for a model with one source
    def get_pa(self):
        src = self.params.keys()[0]
        if len(self.params.keys())>1:
            logger.warn('Will use PA of %s', src)
        return self.params[src]['Geometry','pa']

    # This only works for a model with one source
    def get_vlsr(self):
        src = self.params.keys()[0]
        if len(self.params.keys())>1:
            logger.warn('Will use v_LSR of %s', src)
        return self.params[src]['Velocity','vlsr']

    def get_dimensions(self):
        """Get the number of parameters to fit"""
        ndim = 0
        for key,item in self.priors.items():
            ndim += len(item.sections())
        return ndim

    def update_params(self, values):
        """Update the parameters of the model

        The order of the parameters in values follow model source as in the
        config file first and then parameter order as in the prior file.
        Therefore only parameters in the prior can be changed.

        If the model is in the database, the model name will be updated to that
        of the model in the database and return True.

        Parameters:
            values (list): new values of the parameters.

        Returns:
            validate (bool or str): if the model is in the database it returns
                the model ID else True.

        Notes:
            If the values do not have units, the units of the current parameter
            are assigned by the class managing the distribution.
        """
        # Get timestamp
        self.name = get_timestamp()

        # Change model
        for src in self.params.keys():
            for i, key in enumerate(self.priors[src].sections()):
                section = self.priors[src].get(key, 'section', fallback=None)
                if section is None or section.lower()=='all':
                    val = values[i]
                    for section in self.params[src].sections():
                        self.params[src].update(section, key, val)
                else:
                    self.params[src].update(section, key, values[i])

            # Update database
            if len(self.params.keys())==1:
                validate = self.update_database(table='models')
            else:
                validate = self.update_database(table=src, validate=False)

        return validate

    def _load_database(self, filename, table='models', update=False):
        """Load models database

        Parameters:
            filename (str): database file name
            table (str, optional): table name
        """
        if len(self.params.keys())==1:
            key = self.params.keys()[0]
            keys, vals, fmts = self.params[key].flatten(
                    ignore_params=self.config.getlist(key, 'db_ignore_params'),
                    get_db_format=True)
            keys = ['ID'] + keys
            vals = [self.name] + vals
            fmts = ['TEXT PRIMARY KEY'] + fmts
            self.db = load_database(filename, table, keys, fmts)
            if update:
                self.update_database(table=table, keys=keys, vals=vals,
                        validate=False)
        else:
            # Models with more than 1 source are exceptional and probably unique
            for src, yso in self.params.items():
                keys, vals, fmts = yso.flatten()
                if self.db is None:
                    self.db = load_database(filename, src, keys, fmts)
                else:
                    self.db.create_table(src, keys, fmts)

                self.update_database(table=src, keys=keys, vals=vals,
                        validate=False)

    def update_database(self, table='models', keys=None, vals=None, 
            validate=True):
        """Update the database with the current model parameters
        
        Parameters:
            table (str, optional): table name
            keys (list, optional): table column names
            vals (list, optional): values to store
            validate (bool, optional): whether to validate the parameters
        """

        if vals is None or keys is None:
            if len(self.params.keys())==1:
                source = self.params.keys()[0]
            else:
                source = None
            keys, vals = self.params[source or table].flatten(
                    ignore_params=self.config.getlist(source, 'db_ignore_params'),
                    get_db_format=False)
            keys = ['ID'] + keys
            vals = [self.name] + vals

        if validate:
            valid = self._validate_db_params(table, keys, vals)
            if valid:
                self.db.update(table, vals)
            return valid
        else:
            self.db.update(table, vals)
            return True

    def _validate_db_params(self, table, keys, vals):
        """Query the database to find suplicates.

        Parameters:
            keys (list): column names. First comlumn is used as ID.
            vals (list): values to test
        """
        cond = ' AND '.join('{0}=:{0}'.format(key) for key in keys)
        self.db.query(table, 'ID', cond, dict(zip(keys, vals)))
        res = self.db.fetchone()

        if res is None:
            return True
        else:
            self.logger.warn('Found a duplicate model: %s', res['ID'])
            return res['ID']

    def get_random_initial(self, n):
        """Create an initial guess based on the input parameters
        
        Parameters:
            n (int): number of walkers
        """
        p0 = []
        
        for src, val in self.priors.items():
            for prior in val.sections():
                section = val[prior]['section']
                inval = self.params[src][section,prior]
                if val[prior]['type'].lower() == 'uniform':
                    low = val.getquantity(prior, 'min').to(inval.unit)
                    high = val.getquantity(prior, 'max').to(inval.unit)
                    p0 += [np.random.uniform(low.value, high.value, n)]
                else:
                    raise NotImplementedError

        return np.array(p0).T

    def get_rt_list(self):
        """Get a list with all the RTs used by the images"""
        rts = []
        for sec in self.images.sections():
            rt = self.images.get(sec, 'rt')
            if rt not in self.setup.sections():
                self.logger.warn('Dropping rt: %s', rt)
            else:
                rts += [rt]
        
        return set(rts)