Ejemplo n.º 1
0
def getInterpModels(interpolatedDimensions, SQL, boundaryValues, deckShape):

    conn = modeldb.getModelDBConn()

    dimensions = ', '.join(interpolatedDimensions)

    if len(dimensions) > 0:
        positions = conn.execute('select %s %s' % (
            dimensions,
            SQL,
        ), boundaryValues).fetchall()

        # There is a bug that if the length of each position is one, then instead
        # of returning a tuple it should return a float

        for i, position in enumerate(positions):
            if len(position) == 1:
                positions[i] = position[0]

    else:
        positions = []

    modelGrid = conn.execute('select deck %s' % (SQL, ),
                             boundaryValues).fetchall()

    conn.close()
    print
    return positions, np.array(
        [item.reshape(deckShape) for item in zip(*modelGrid)[0]])
Ejemplo n.º 2
0
def getInterpModels(interpolatedDimensions, SQL, boundaryValues, deckShape):

    conn = modeldb.getModelDBConn()
    
    dimensions = ', '.join(interpolatedDimensions)
    
    if len(dimensions) > 0:
        positions = conn.execute('select %s %s' % (dimensions, SQL,),
                                    boundaryValues).fetchall()
        
        # There is a bug that if the length of each position is one, then instead
        # of returning a tuple it should return a float
        
        for i, position in enumerate(positions):
            if len(position) == 1:
                positions[i] = position[0]
    
    else: positions = []    
     
                                    
    modelGrid = conn.execute('select deck %s' % (SQL,),
                                boundaryValues).fetchall()
    
    conn.close()
    print
    return positions, np.array([item.reshape(deckShape) for item in zip(*modelGrid)[0]])
Ejemplo n.º 3
0
def installedModels():
    """
    
    Reads from the database and returns the models on disk
    
    """
    conn = modeldb.getModelDBConn()
    modelData = conn.execute("SELECT model_name FROM atmosphy_conf WHERE installed=1")
    modelNames = ([str(item[0]) for item in modelData])

    #modelNames.remove('models')
    
    return modelNames
Ejemplo n.º 4
0
def importModel(modelName, modelID):
    
    "importing model into the database"
    
    
    atmosphy_path = os.path.expanduser('~/.atmosphy')
    
    conn = modeldb.getModelDBConn()
    deckShape = conn.execute('select ROWS, COLS from ATMOSPHY_CONF '
                             'where MODEL_NAME=?', (modelName,)).fetchone()
    for fname in glob(os.path.join(atmosphy_path, 'models', modelName, '*.dat')):
        modelSrc = file(fname).read()
        modelsRawData = re.split('B?EGIN\s+ITERATION\s+\d+\s+COMPLETED',modelSrc)
        
        for model in modelsRawData:
            #problem with split
            if model == '\n' or model == '': continue
            
            teffLoggMatch = re.search('T?EFF\s+(\d+\.\d*)\s+GRAVITY\s+(\d+\.\d*)',model)
            
            #searching for metallicity, alpha and microturbulence
            metalAlphaMatch = re.search('\[([\s+-]?\d+\.\d+)([aAbB]?)\]?', model)
            microMatch = re.search('VTURB[ =]?(\d+[\.\d+]?)',model)
            mixLengthMatch = re.search('ONVECTION (OFF|ON)\s+(\d+\.\d+)',model)
            pradkMatch = re.search('P?RADK (\d+\.\d+E[+-]?\d+)',model)
            
            #Checking the integrity of the model

            if teffLoggMatch == None:
                raise casKurImportException(
                    "Current Model does not contain effective temperature:"
                    "\n\n--------\n\n%s" % (model,))
    
                
                
            try:                    
                if metalAlphaMatch == None:
                    raise casKurImportException(
                        "Current Model does not contain metallicity information:"
                        "\n\n--------\n\n%s" % (model,))
            except casKurImportException:
                knownProblemFiles = ['ap00k2.dat','ap00k4.dat','asun.dat']
                if os.path.basename(fname) in knownProblemFiles:
                    continue
                else:
                    raise casKurImportException(
                        "Current Model does not contain metallicity information:"
                        "\n\n--------\n\n%s" % (model,))
                
            if mixLengthMatch == None:
                raise casKurImportException(
                    "Current Model does not contain mixing length information:"
                    "\n\n--------\n\n%s" % (model,))
    
            
            #reading in the model parameters
            convertAlpha = {'':0.0, 'a':0.4, 'b':1.0}
            
            teff    = float(teffLoggMatch.groups()[0])
            logg     = float(teffLoggMatch.groups()[1])
            feh        = float(metalAlphaMatch.groups()[0])
            alpha     = convertAlpha[metalAlphaMatch.groups()[1].lower()]
            micro    = float(microMatch.groups()[0])
            mixing     = float(mixLengthMatch.groups()[1])
            pradk    = float(pradkMatch.groups()[0])
            
            #reading model, pickling it and compressing it
            deck = readDeck(model)
            
            
            #fix for deck with only 71 points (there seems to be only one in ap05k2odfnew.dat)
            if modelName == 'castelli-kurucz' and deck.shape[0] == 71:
                continue
            if deck.shape != deckShape:
                raise ValueError('Deck shape missmatch: expected: %s got %s\n'
                                 'This should not happen please contact the developers of atmosphy' %
                                 (deckShape, deck.shape))
            
            
            #writing to db
            modeldb.insertModelData(conn, modelName, [modelID, teff, logg, feh, alpha, micro, mixing, deck])
    
    logging.info('Added all decks from model %s to db' % modelName)
    logging.info('Updating the atmosphy_conf table')
    conn.execute('update ATMOSPHY_CONF set rows=?,'
                 'cols=?, INSTALLED=? where ID=?',
                 (deck.shape[0], deck.shape[1], 1, modelID))
    conn.commit()
    conn.close()
Ejemplo n.º 5
0
def download(modelName, clobber=False, verbose=True, database='~/.atmosphy/atmosphy.db3'):

    """
    
    Download the given model(s) from the Kurucz awebsite and load them into your database.
    
    
    Parameters:
    ===========
    
    modelNames  :   string or list
                    Downloads all of the model names suppled, or any that match the wildmask supplied.
    
                    
    Available models:
    =================
    
        Kurucz          :   Kuruczs grids of model atmospheres, as described in X
        
        Kurucz-NOVER    :   Kurucz models with no convective overshooting computed by Fiorella Castelli
                            [[email protected]] in Trieste. The convective treatment is described in
                            Castelli, Gratton, and Kuruczs 1997, A&A 328, 841.
                            
        Kurucz-ODFNEW   :   Kurucz models as NOVER but with newly computed ODFs with better opacities
                            and better abundances.
                            
        Kurucz-AODFNEW  :   Kurucz models as per ODFNEW but with Alpha enhancement. The alpha-process
                            elements (O, Ne, Mg, Si, S, Ar, Ca, and Ti) enhanced by +0.4 in the log and
                            Fe -4.53

    
    
    Examples:
    =========
    
        download('Kurucz')          :   Download the standard Kurucz grid models.
        
        download('Kurucz-*ODFNEW')  :   This will download both the Kurucz-AODFNEW and the Kurucs-ODFNEW
                                        model as they both match the wildmask given.

    """

    
    atmosphy_path = os.path.expanduser('~/.atmosphy/')
    models_path = os.path.join(atmosphy_path, 'models')

    conn = modeldb.getModelDBConn()
    
    
    
    modelID = conn.execute('select ID from ATMOSPHY_CONF '
                           'where MODEL_NAME = ?',
                           (modelName,)).fetchall()
    
    
    #checking if this model does exist and is not installed
    if len(modelID) == 1:
        modelID = modelID[0][0]
        installed = conn.execute('select INSTALLED from ATMOSPHY_CONF '
                                 'where ID = ?', (modelID,)).fetchone()[0]
        if installed == 1:
            raise ValueError('Model %s is installed.' % modelName)
    elif len(modelID) == 0:
        availableModels = conn.execute('select MODEL_NAME from ATMOSPHY_CONF '
                                       'where INSTALLED = 0').fetchall()
        print "Available models:\n %s" % ','.join(availableModels)
        raise ValueError('Model %s does not exist' % modelName)
        
    
    
    
    # Generate the models directory if it doesn't exist
    if not os.path.exists(models_path):
        logging.info('Creating %s' % models_path)
        os.makedirs(models_path)
    
    # Generate this specific model directory if it doesn't exist
    modelDir = os.path.join(models_path, modelName)
    if not os.path.exists(os.path.join(models_path, modelName)):
        logging.info('Creating %s' % os.path.join(models_path, modelName))
        os.makedirs(os.path.join(models_path,modelName))
    
    
    #Getting models and checking MD5s
    
    curs = conn.cursor()
    
    
    
    for url, md5_hash in curs.execute('select URL, MD5_HASH from ATMOSPHY_URLS where MODEL_ID = ?', (modelID,)):
        filename = url.split('/')[-1]
        if os.path.exists(os.path.join(modelDir, filename)):
            print "Checking MD5 for %s" % os.path.join(modelDir, filename),
            curMD5 = md5_file(os.path.join(modelDir, filename))
            if md5_hash == curMD5:
                print "....Verified"
            else:
                print "....Failed. Redownloading."
                stream = urllib2.urlopen(url)
                data = stream.read()
                stream.close()
                
                newFile = open(os.path.join(modelDir, filename), 'w')
                newFile.write(data)
                newFile.close()
                
        else:
            print "Downloading from %s" % url
            stream = urllib2.urlopen(url)
            data = stream.read()
            stream.close()
            
            newFile = open(os.path.join(modelDir, filename), 'w')
            newFile.write(data)
            newFile.close()

    
    logging.info('Importing models ...')
    importModel(modelName, modelID)
    logging.info('Successfully imported %s model' % (modelName,))
    logging.info('Done!.')
Ejemplo n.º 6
0
def interpModelGrid(modelName,
                    Teff,
                    logg,
                    FeH,
                    k=2.0,
                    alpha=0.0,
                    level=1,
                    method='linear'):
    """
    
    Interpolates in N-dimensional space to find the given point
    (Teff, logg, FeH, k, alpha) given the model name.
    
    Parameters:
    ===========
    
    modelName   : string
                  The name of the model to lookup in your local SQL database.

    Teff        : float
                  The effective temperature (Teff in Kelvin) of the star you plan to interpolate for.
              
    FeH         : float
                  The metallicity ([Fe/H]) of the star you plan to interpolate for.
                
    logg        : float
                  The surface gravity (log g) of the star you plan to interpolate for.
            
    k           : float, optional (default = 2.0)
                  The turbulence in the atmosphere of the star (km/s).
                  
    alpha       : float, optional (default = 0.0)
                  The level of alpha enhancement of the star ([alpha/Fe] in dex).
            
    level       : integer, optional (default = 1)
                  The maximum number of levels on either side of the point you wish to return in each
                  dimension.
                  
    method      : string, optional (default = 'linear')
                  The interpolation method which is passed to scipy.interpolate.griddata.
                  
                  Options are 'linear', 'nearest', or 'cubic'. 
    
    """

    # Our grid point in N dimensionsal space
    gridSpace = {
        'teff': Teff,
        'logg': logg,
        'feh': FeH,
        'k': k,
        'alpha': alpha
    }

    conn = modeldb.getModelDBConn()
    # Find the nearest neighbours in N dimensions, and get the SQL
    interpolatedDimensions, SQL, boundaryValues = getNearestNeighbours(
        modelName, Teff, logg, FeH, k, alpha, level=level)

    # Populate the model grid

    deckShape = conn.execute(
        'select ROWS, COLS from ATMOSPHY_CONF where MODEL_NAME=?',
        (modelName, )).fetchone()
    modelGridCoord, modelGrid = getInterpModels(interpolatedDimensions, SQL,
                                                boundaryValues, deckShape)

    # If there are no grid points to interpolate between then no interpolation is necessary
    if len(modelGridCoord) == 0: return modelGrid[0]

    # Update our grid point based on interpolatedDimensions
    gridPoint = []
    for interpolatedDimension in interpolatedDimensions:
        gridPoint.append(gridSpace[interpolatedDimension])

    #pdb.set_trace()
    #if len(interpolatedDimension) = 1 we have a oned interpolate
    #griddate gives back nan values if that is not sorted. sent message to scipy mailing list

    if len(interpolatedDimensions) == 1:
        modelGrid = [modelGrid[i] for i in np.argsort(modelGridCoord)]
        modelGridCoord = np.sort(modelGridCoord)

    # Return the interpolated grid deck
    #Fix for griddata scipy 0.9RC1 inspect this at a later time!!!
    gridPoint = np.array(gridPoint).reshape(1, len(gridPoint))

    #if modelGridCoord.ndim == 1: modelGridCoord = [modelGridCoord]
    return interpolate.griddata(modelGridCoord,
                                modelGrid,
                                gridPoint,
                                method=method)
Ejemplo n.º 7
0
def getNearestNeighbours(model, Teff, logg, FeH, k=2.0, alpha=0.0, level=1):
    """
    
    Finds the nearest neighbours to a point in a multi-dimensional grid.
    
    
    Parameters:
    ===========
    
    FeH     : float
              The metallicity ([Fe/H]) of the star you plan to interpolate for.
            
    Teff    : float
              The effective temperature (Teff in Kelvin) of the star you plan to interpolate for.
    
    logg    : float
              The surface gravity (log g) of the star you plan to interpolate for.
            
    k       : float, optional
              The turbulence in the atmosphere of the star (km/s).
            
    level   : integer, optional
              The maximum number of levels on either side of the point you wish to return in each
              dimension.
    """

    if (1 > level): raise ValueError, 'level must be a positive integer'
    if (Teff < 0): raise ValueError, 'Teff must be a positive float'
    if (logg < 0): raise ValueError, 'logg must be a positive float'
    if (k < 0): raise ValueError, 'k must be a positive float'

    connection = modeldb.getModelDBConn()
    modelID = connection.execute(
        'select id from atmosphy_conf '
        'where model_name = ?', (model, )).fetchone()[0]
    result = connection.execute(
        'select Teff, logg, FeH, k, alpha from models'
        ' where model_id = ?', (modelID, ))

    # todo - consider rewriting following section into a loop?
    Teff_grid, logg_grid, FeH_grid, k_grid, alpha_grid = zip(
        *result.fetchall())
    connection.close()

    grid = zip(Teff_grid, logg_grid, FeH_grid, k_grid, alpha_grid)

    # Find the nearest N levels of indexedFeHs
    FeH_neighbours = get1Dneighbours(FeH_grid, FeH, level=level)

    # Find the Teff available for our FeH possibilites
    Teff_available = [point[0] for point in grid if point[2] in FeH_neighbours]
    Teff_neighbours = get1Dneighbours(Teff_available, Teff, level=level)

    # Find the logg available for our FeH and Teff possibilities
    logg_available = [
        point[1] for point in grid
        if point[2] in FeH_neighbours and point[0] in Teff_neighbours
    ]
    logg_neighbours = get1Dneighbours(logg_available, logg, level=level)

    # Find the k available for our FeH, Teff, and logg restricted
    k_available = [
        point[3] for point in grid if point[2] in FeH_neighbours
        and point[0] in Teff_neighbours and point[1] in logg_neighbours
    ]
    k_neighbours = get1Dneighbours(k_available, k, level=level)

    # Find the alpha available for our FeH, Teff, logg, and k restricted
    alpha_available = [
        point[4] for point in grid
        if point[2] in FeH_neighbours and point[0] in Teff_neighbours
        and point[1] in logg_neighbours and point[3] in k_neighbours
    ]
    alpha_neighbours = get1Dneighbours(alpha_available, alpha, level=level)

    # Build the dimensions we want back from the SQL table

    SQL = 'from models where MODEL_ID=%d and ' % modelID

    boundaryValues = []
    interpolatedDimensions = []

    availableDimensions = {
        'feh': FeH_neighbours,
        'teff': Teff_neighbours,
        'logg': logg_neighbours,
        'k': k_neighbours,
        'alpha': alpha_neighbours,
    }

    for dimension, value in zip(['teff', 'logg', 'feh', 'k', 'alpha'],
                                [Teff, logg, FeH, k, alpha]):
        neighbours = availableDimensions[dimension]

        # If only one 'neighbour' is present, then this dimension does not need to be interpolated upon
        if (len(neighbours) > 1):
            interpolatedDimensions.append(dimension)

            # Add these limits for the sql query
            boundaryValues.append(min(neighbours))
            boundaryValues.append(max(neighbours))

            SQL += ' %s between ? and ? and' % dimension

        else:

            boundaryValues.append(value)

            SQL += ' %s = ? and' % dimension

    if SQL[-3:] == 'and': SQL = SQL[:-3]

    # Return the SQL
    return (interpolatedDimensions, SQL, tuple(boundaryValues))
Ejemplo n.º 8
0
def interpModelGrid(modelName, Teff, logg, FeH, k=2.0, alpha=0.0, level=1, method='linear'):

    """
    
    Interpolates in N-dimensional space to find the given point
    (Teff, logg, FeH, k, alpha) given the model name.
    
    Parameters:
    ===========
    
    modelName   : string
                  The name of the model to lookup in your local SQL database.

    Teff        : float
                  The effective temperature (Teff in Kelvin) of the star you plan to interpolate for.
              
    FeH         : float
                  The metallicity ([Fe/H]) of the star you plan to interpolate for.
                
    logg        : float
                  The surface gravity (log g) of the star you plan to interpolate for.
            
    k           : float, optional (default = 2.0)
                  The turbulence in the atmosphere of the star (km/s).
                  
    alpha       : float, optional (default = 0.0)
                  The level of alpha enhancement of the star ([alpha/Fe] in dex).
            
    level       : integer, optional (default = 1)
                  The maximum number of levels on either side of the point you wish to return in each
                  dimension.
                  
    method      : string, optional (default = 'linear')
                  The interpolation method which is passed to scipy.interpolate.griddata.
                  
                  Options are 'linear', 'nearest', or 'cubic'. 
    
    """

    # Our grid point in N dimensionsal space
    gridSpace = {
                    'teff' : Teff,
                    'logg' : logg,
                    'feh'  : FeH,
                    'k'    : k,
                    'alpha': alpha
                 }
                
    conn = modeldb.getModelDBConn()
    # Find the nearest neighbours in N dimensions, and get the SQL
    interpolatedDimensions, SQL, boundaryValues = getNearestNeighbours(modelName, Teff, logg, FeH, k, alpha, level=level)

    # Populate the model grid
    
    deckShape = conn.execute('select ROWS, COLS from ATMOSPHY_CONF where MODEL_NAME=?', (modelName,)).fetchone()
    modelGridCoord, modelGrid = getInterpModels(interpolatedDimensions, SQL, boundaryValues, deckShape)
    
    # If there are no grid points to interpolate between then no interpolation is necessary
    if len(modelGridCoord) == 0: return modelGrid[0]

    # Update our grid point based on interpolatedDimensions
    gridPoint = []
    for interpolatedDimension in interpolatedDimensions:
        gridPoint.append(gridSpace[interpolatedDimension])
        
    
    
    #pdb.set_trace()
    #if len(interpolatedDimension) = 1 we have a oned interpolate
    #griddate gives back nan values if that is not sorted. sent message to scipy mailing list
    
    if len(interpolatedDimensions) == 1:
        modelGrid= [modelGrid[i] for i in np.argsort(modelGridCoord)]
        modelGridCoord = np.sort(modelGridCoord)
        
    # Return the interpolated grid deck
    #Fix for griddata scipy 0.9RC1 inspect this at a later time!!!
    gridPoint = np.array(gridPoint).reshape(1, len(gridPoint))
    
    #if modelGridCoord.ndim == 1: modelGridCoord = [modelGridCoord]
    return interpolate.griddata(modelGridCoord, modelGrid, gridPoint, method=method)
Ejemplo n.º 9
0
def getNearestNeighbours(model, Teff, logg, FeH, k=2.0, alpha=0.0, level=1):

    """
    
    Finds the nearest neighbours to a point in a multi-dimensional grid.
    
    
    Parameters:
    ===========
    
    FeH     : float
              The metallicity ([Fe/H]) of the star you plan to interpolate for.
            
    Teff    : float
              The effective temperature (Teff in Kelvin) of the star you plan to interpolate for.
    
    logg    : float
              The surface gravity (log g) of the star you plan to interpolate for.
            
    k       : float, optional
              The turbulence in the atmosphere of the star (km/s).
            
    level   : integer, optional
              The maximum number of levels on either side of the point you wish to return in each
              dimension.
    """
    

    if (1 > level): raise ValueError, 'level must be a positive integer'
    if (Teff < 0): raise ValueError, 'Teff must be a positive float'
    if (logg < 0): raise ValueError, 'logg must be a positive float'
    if (k < 0): raise ValueError, 'k must be a positive float'

    

    connection = modeldb.getModelDBConn()
    modelID = connection.execute('select id from atmosphy_conf '
                                'where model_name = ?', (model,)).fetchone()[0]
    result = connection.execute('select Teff, logg, FeH, k, alpha from models'
                                ' where model_id = ?', (modelID,))
    
    # todo - consider rewriting following section into a loop?
    Teff_grid, logg_grid, FeH_grid, k_grid, alpha_grid = zip(*result.fetchall())
    connection.close()
    
    grid = zip(Teff_grid, logg_grid, FeH_grid, k_grid, alpha_grid)
    
    
    # Find the nearest N levels of indexedFeHs
    FeH_neighbours  = get1Dneighbours(FeH_grid, FeH, level=level)

    # Find the Teff available for our FeH possibilites
    Teff_available = [point[0] for point in grid if point[2] in FeH_neighbours]
    Teff_neighbours = get1Dneighbours(Teff_available, Teff, level=level)
    
    # Find the logg available for our FeH and Teff possibilities
    logg_available = [point[1] for point in grid if point[2] in FeH_neighbours and point[0] in Teff_neighbours]
    logg_neighbours = get1Dneighbours(logg_available, logg, level=level)
    
    # Find the k available for our FeH, Teff, and logg restricted 
    k_available = [point[3] for point in grid if point[2] in FeH_neighbours and point[0] in Teff_neighbours and point[1] in logg_neighbours]
    k_neighbours = get1Dneighbours(k_available, k, level=level)
    
    # Find the alpha available for our FeH, Teff, logg, and k restricted
    alpha_available = [point[4] for point in grid if point[2] in FeH_neighbours and point[0] in Teff_neighbours and point[1] in logg_neighbours and point[3] in k_neighbours]
    alpha_neighbours = get1Dneighbours(alpha_available, alpha, level=level)
    
    
    # Build the dimensions we want back from the SQL table
    
    SQL = 'from models where MODEL_ID=%d and ' % modelID
    
    boundaryValues = []
    interpolatedDimensions = []
    
    availableDimensions = {    
                            'feh'   : FeH_neighbours,
                            'teff'  : Teff_neighbours,
                            'logg'  : logg_neighbours,
                            'k'     : k_neighbours,
                            'alpha' : alpha_neighbours,
                            }
                            
    for dimension, value in zip(['teff', 'logg', 'feh', 'k', 'alpha'], [Teff, logg, FeH, k, alpha]):
        neighbours = availableDimensions[dimension]
            
        # If only one 'neighbour' is present, then this dimension does not need to be interpolated upon
        if (len(neighbours) > 1):
            interpolatedDimensions.append(dimension)
        
            # Add these limits for the sql query
            boundaryValues.append(min(neighbours))
            boundaryValues.append(max(neighbours))
            
            SQL += ' %s between ? and ? and' % dimension 
        
        else:
        
            boundaryValues.append(value)
            
            SQL += ' %s = ? and' % dimension
    
    if SQL[-3:] == 'and': SQL = SQL[:-3]

    # Return the SQL
    return (interpolatedDimensions, SQL, tuple(boundaryValues))