Exemplo n.º 1
0
def load_database(connection_string):
    # Create and load the database
    create_database(connection_string)

    # Now that the database is created, connect to it and load up the JSON data
    db = Database(connection_string, reference_tables=REFERENCE_TABLES)
    db.load_database(DB_PATH, verbose=False)

    print('New database generated.')

    # Close all connections
    db.session.close()
    db.engine.dispose()
Exemplo n.º 2
0
def db():
    # Create a fresh temporary database and assert it exists
    # Because we've imported simple.schema, we will be using that schema for the database

    if os.path.exists(DB_NAME):
        os.remove(DB_NAME)
    connection_string = 'sqlite:///' + DB_NAME
    create_database(connection_string)
    assert os.path.exists(DB_NAME)

    # Connect to the new database and confirm it has the Sources table
    db = Database(connection_string)
    assert db
    assert 'source' in [c.name for c in db.Sources.columns]

    # Load the database contents
    # This should take care of finding serious issues (key/column violations)
    db.load_database(DB_PATH, verbose=False)

    return db
Exemplo n.º 3
0
def db():
    # Create a fresh temporary database and assert it exists
    # Because we've imported simple.schema, we will be using that schema for the database

    if os.path.exists(DB_NAME):
        os.remove(DB_NAME)
    connection_string = 'sqlite:///' + DB_NAME
    create_database(connection_string)
    assert os.path.exists(DB_NAME)

    # Connect to the new database and confirm it has the Sources table
    db = Database(connection_string)
    assert db
    assert 'source' in [c.name for c in db.Sources.columns]

    return db
Exemplo n.º 4
0
def load_postgres(connection_string):
    # For Postgres, we connect and drop all database tables

    # Fix for SQLAlchemy 1.4.x
    if connection_string.startswith("postgres://"):
        connection_string = connection_string.replace("postgres://", "postgresql://", 1)

    try:
        db = Database(connection_string)
        db.base.metadata.drop_all()
        db.session.close()
        db.engine.dispose()
    except RuntimeError:
        # Database already empty or doesn't yet exist
        pass

    # Proceed to load the database
    load_database(connection_string)
Exemplo n.º 5
0
def load_db():
    # Utility function to load the database

    db_file = 'SIMPLE.db'
    db_file_path = Path(db_file)
    db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite browser

    if RECREATE_DB and db_file_path.exists():
        os.remove(db_file)  # removes the current .db file if one already exists

    if not db_file_path.exists():
        create_database(db_connection_string)  # creates empty database based on the simple schema
        db = Database(db_connection_string)  # connects to the empty database
        db.load_database('data/')  # loads the data from the data files into the database
    else:
        db = Database(db_connection_string)  # if database already exists, connects to .db file

    return db
Exemplo n.º 6
0
# SQLlite settings
DB_TYPE = 'sqlite'
DB_NAME = 'SIMPLE.db'

# Set correct connection string
if DB_TYPE == 'sqlite':
    # First, remove the existing database in order to recreate it from the schema
    # If the schema has not changed, this part can be skipped
    if os.path.exists(DB_NAME):
        os.remove(DB_NAME)
    connection_string = 'sqlite:///' + DB_NAME
elif DB_TYPE == 'postgres':
    # For Postgres, we connect and drop all database tables
    connection_string = 'postgresql://' + DB_NAME
    try:
        db = Database(connection_string)
        db.base.metadata.drop_all()
        db.session.close()
        db.engine.dispose()
    except RuntimeError:
        # Database already empty or doesn't yet exist
        pass

create_database(connection_string)

# Now that the database is created, connect to it and load up the JSON data
db = Database(connection_string)
db.load_database(DB_PATH, verbose=False)

print('New database generated.')
Exemplo n.º 7
0
# Script to add Y dwarfs spectral types

from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
#from simple.schema import *
from astropy.table import Table
import numpy as np
import re
from utils import convert_spt_string_to_code

connection_string = 'sqlite:///../../SIMPLE.db'  # SQLite
create_database(connection_string)
db = Database(connection_string)
db.load_database('../../data')

# load table
ingest_table = Table.read('Y-dwarf_table.csv', data_start=2)
names = ingest_table['source']
n_sources = len(names)
regime = ['infrared'
          ] * n_sources  # all source have IR spectral classifications
spectral_types = ingest_table['SpT']
spt_refs = ingest_table['spt_ref']

# sources names in database Names table
db_names = []
for name in names:
    db_name = db.search_object(name, output_table='Sources')[0].source
    db_names.append(db_name)

# Convert SpT string to code
Exemplo n.º 8
0
from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
from astropy.table import Table
from utils import ingest_parallaxes

connection_string = 'sqlite:///../../SIMPLE.db'  # SQLite
create_database(connection_string)
db = Database(connection_string)
db.load_database('../../data')

# load table
ingest_table = Table.read('Y-dwarf_table.csv', data_start=2)
sources = ingest_table['source']
plx = ingest_table['plx_mas']
plx_unc = ingest_table['plx_err']
plx_ref = ingest_table['astrometry_ref']

ingest_parallaxes(db, sources, plx, plx_unc, plx_ref, verbose=True)

# Save modified JSON files
db.save_db('../../data')
Exemplo n.º 9
0
from astrodbkit2.astrodb import Database, and_
from sqlalchemy import types  # for BDNYC column overrides
from collections import Counter

# Establish connection to databases

# Note that special parameters have to be passed to allow the BDNYC schema work properly
connection_string = 'sqlite:///../BDNYCdevdb/bdnycdev.db'
bdnyc = Database(connection_string,
                 reference_tables=[
                     'changelog', 'data_requests', 'publications', 'ignore',
                     'modes', 'systems', 'telescopes', 'versions',
                     'instruments'
                 ],
                 primary_table='sources',
                 primary_table_key='id',
                 foreign_key='source_id',
                 column_type_overrides={
                     'spectra.spectrum': types.TEXT(),
                     'spectra.local_spectrum': types.TEXT()
                 })

# SIMPLE
connection_string = 'sqlite:///SIMPLE.db'
db = Database(connection_string)

# Copy first publications that are not already in SIMPLE
temp = db.query(db.Publications.c.name).all()
existing_simple = [s[0] for s in temp]
temp = bdnyc.query(bdnyc.publications)\
Exemplo n.º 10
0
RECREATE_DB = True  #recreates the .db file from the data files
VERBOSE = False

verboseprint = print if VERBOSE else lambda *a, **k: None

db_file = 'SIMPLE.db'
db_file_path = Path(db_file)
db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite browser

if RECREATE_DB and db_file_path.exists():
    os.remove(db_file)  #removes the current .db file if one already exists

if not db_file_path.exists():
    create_database(db_connection_string
                    )  #creates empty database based on the simple schema
    db = Database(db_connection_string)  #connects to the empty database
    db.load_database(
        'data/')  #loads the data from the data files into the database
else:
    db = Database(db_connection_string
                  )  #if database already exists, connects to .db file

#Adding missing publications to publication table
add_publication(db, name='Sahl16', bibcode='J/MNRAS/455/357', dryrun=False)
add_publication(db, name='Liu16', bibcode='J/ApJ/833/96', dryrun=False)
add_publication(db, name='Wang18', bibcode='2018PASP..130f4402W', dryrun=False)
add_publication(db, name='Bedi17', bibcode='2017MNRAS.470.1140B', dryrun=False)
add_publication(db, name='Delo17', bibcode='2017A&A...608A..79D', dryrun=False)
add_publication(db, name='Luhm16', bibcode='2016AJ....152...78L', dryrun=False)

# load table of sources to ingest
Exemplo n.º 11
0
#------------------------------------------------------------------------------------------------

from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
#from simple.schema import *
from astropy.table import Table
import numpy as np
from astropy.coordinates import SkyCoord
import astropy.units as u
from astroquery.simbad import Simbad
import warnings
warnings.filterwarnings("ignore", module='astroquery.simbad')

connection_string = 'sqlite:///../../SIMPLE.db'  # SQLite
create_database(connection_string)
db = Database(connection_string)
db.load_database('../../data')

# load table of sources to ingest
input_file = ("ATLAS_table.vot")
ATLAS = Table.read(input_file)

#Making sure all target names are simbad resolvable:

resolved_name = []

for j in range(len(ATLAS)):
    identifer_result_table = Simbad.query_object(ATLAS['Name'][j],
                                                 verbose=False)
    if identifer_result_table is not None and len(identifer_result_table) == 1:
        # Successfully identified
Exemplo n.º 12
0
def load_simpledb(db_file, recreatedb=True):
    # Utility function to load the database

    db_file_path = Path(db_file)
    db_connection_string = 'sqlite:///' + db_file

    if recreatedb and db_file_path.exists():
        os.remove(
            db_file)  # removes the current .db file if one already exists

    if not db_file_path.exists():
        try:  # Use fancy in-memory database, if supported by astrodbkit2
            db = Database(
                'sqlite://'
            )  # creates and connects to a temporary in-memory database
            db.load_database(
                'data/'
            )  # loads the data from the data files into the database
            db.dump_sqlite(db_file)  # dump in-memory database to file
            db = Database(db_connection_string
                          )  # replace database object with new file version
        except RuntimeError:
            # use in-file database
            create_database(
                db_connection_string
            )  # creates empty database based on the simple schema
            db = Database(
                db_connection_string)  # connects to the empty database
            db.load_database(
                'data/'
            )  # loads the data from the data files into the database
    else:
        db = Database(db_connection_string
                      )  # if database already exists, connects to .db file

    return db
Exemplo n.º 13
0
from sqlalchemy import types  # for BDNYC column overrides

verbose = True

# --------------------------------------------------------------------------------------
# Establish connection to databases

# Note that special parameters have to be passed to allow the BDNYC schema work properly
connection_string = 'sqlite:///../BDNYCdevdb/bdnycdev.db'
bdnyc = Database(connection_string,
                 reference_tables=[
                     'changelog', 'data_requests', 'publications', 'ignore',
                     'modes', 'systems', 'telescopes', 'versions',
                     'instruments'
                 ],
                 primary_table='sources',
                 primary_table_key='id',
                 foreign_key='source_id',
                 column_type_overrides={
                     'spectra.spectrum': types.TEXT(),
                     'spectra.local_spectrum': types.TEXT()
                 })

# SIMPLE
connection_string = 'sqlite:///SIMPLE.db'
db = Database(connection_string)

# --------------------------------------------------------------------------------------
# Reload from directory, if needed
db.load_database('data', verbose=False)
Exemplo n.º 14
0
# Example on how to load a single, manually created object into the database

from astrodbkit2.astrodb import Database, create_database

# Establish connection to database
connection_string = 'postgresql://localhost/SIMPLE'  # Postgres
connection_string = 'sqlite:///SIMPLE.db'  # SQLite
db = Database(connection_string)

# If brand new database, run the following
# NOTE: Some databases, like Postgres, will need an empty database created first before running this
from simple.schema import *
create_database(connection_string)

# Adding information for 2MASS J13571237+1428398

# Add references
publications_data = [{
    'name':
    'Schm10',
    'bibcode':
    '2010AJ....139.1808S',
    'doi':
    '10.1088/0004-6256/139/5/1808',
    'description':
    'Colors and Kinematics of L Dwarfs From the Sloan Digital Sky Survey'
}, {
    'name': 'Cutr12',
    'bibcode': '2012yCat.2311....0C',
    'doi': None,
    'description': 'WISE All-Sky Data Release'
Exemplo n.º 15
0
# ---------------------------------------------------------------------------------
# Establish connection to database
db_file = 'SIMPLE.db'
db_file_path = Path(db_file)
db_connection_string = 'sqlite:///SIMPLE.db'

# Remove existing database if it exists and we are recreating it
if RECREATE_DB and db_file_path.exists():
    os.remove(db_file)

# Connect to the database; if creating it, also populate it
# Note that we need to specify this is a new reference table (until AstrodbKit2 gets updated for it)
if not db_file_path.exists():
    create_database(db_connection_string)
    db = Database(db_connection_string, reference_tables=REFERENCE_TABLES)
    db.load_database('data/')
else:
    db = Database(db_connection_string, reference_tables=REFERENCE_TABLES)

# ---------------------------------------------------------------------------------
# Add telescope/instrument information first

# Fetch existing telescopes, add if missing
telescopes = list(set([s['telescope'] for s in filters_to_add]))
existing = db.query(db.Telescopes).filter(db.Telescopes.c.name.in_(telescopes)).table()
if len(existing) > 0:
    existing = existing['name'].tolist()
new_telescopes = list(set(telescopes)-set(existing))
insert_data = [{'name': s} for s in new_telescopes]
if len(insert_data) > 0:
Exemplo n.º 16
0
# Query examples against the database

from astrodbkit2.astrodb import Database

# Establish connection to database
connection_string = 'sqlite:///SIMPLE.db'
db = Database(connection_string)

# Query for all sources
results = db.query(db.Sources).all()
print(results)

# Query for all publications
db.query(db.Publications).all()

# Query for sources with declinations larger than 0
db.query(db.Sources).filter(db.Sources.c.dec > 0).all()

# Query and sort sources by declination
db.query(db.Sources.c.source).order_by(db.Sources.c.dec).all()

# Query for join Sources and Publications and return just several of the columns
results = db.query(db.Sources.c.source, db.Sources.c.reference, db.Publications.c.name)\
            .join(db.Publications, db.Sources.c.reference == db.Publications.c.name)\
            .all()
print(results)

# Query with AND
db.query(db.Sources).filter(and_(db.Sources.c.dec > 0,
                                 db.Sources.c.ra > 200)).all()
Exemplo n.º 17
0
#app_onc.vars['source_id'] = ''

app_simple.vars = dict()
app_simple.vars['query'] = ''
app_simple.vars['search'] = ''
app_simple.vars['specid'] = ''
app_simple.vars['source_id'] = ''

from astrodbkit2.astrodb import Database
#connection_string = 'sqlite:///bdnyc_database.db'   #Had been 'sqlite:///SIMPLE.db'
#db_dir = r'C:\Users\danie\SIMPLE-web'
#db = Database(connection_string)
#db.load_database(db_dir)

db_file = 'sqlite:///SIMPLE.db'
db = Database(db_file)
pd.set_option('max_colwidth', -1)

#db_file_OLD = 'bdnyc_database.db'
#db_OLD = astrodb.Database(db_file)
#pd.set_option('max_colwidth', -1)

# Redirect to the main page
#@app_onc.route('/')
#@app_onc.route('/index')


@app_simple.route('/')
@app_simple.route('/index')
# Page with a text box to take the SQL query
#@app_onc.route('/index', methods=['GET', 'POST'])
Exemplo n.º 18
0
save_db = True #modifies .db file but not the data files
RECREATE_DB = True #recreates the .db file from the data files
VERBOSE = False

verboseprint = print if VERBOSE else lambda *a, **k: None

db_file = 'SIMPLE.db'
db_file_path = Path(db_file)
db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite browser

if RECREATE_DB and db_file_path.exists():
    os.remove(db_file) #removes the current .db file if one already exists

if not db_file_path.exists():
    create_database(db_connection_string) #creates empty database based on the simple schema
    db = Database(db_connection_string) #connects to the empty database
    db.load_database('data/') #loads the data from the data files into the database
else:
    db = Database(db_connection_string) #if database already exists, connects to .db file


add_Arti_bibcode = db.Publications.update().where(db.Publications.c.name == 'Arti15').\
	values(bibcode='2015ApJ...806..254A')
db.engine.execute(add_Arti_bibcode)
db.Publications.delete().where(db.Publications.c.name == 'Barm10').execute()
db.Publications.delete().where(db.Publications.c.name == 'Bouy06').execute()
update_Deac11 = db.Publications.update().where(db.Publications.c.name == 'Deac11').\
	values(name='Deac11b')
db.engine.execute(update_Deac11)
update_Delo17 = db.Publications.update().where(db.Publications.c.name == 'Delo17').\
	values(name='Delo17b')
Exemplo n.º 19
0
# Example adding a single spectrum to the database

from astrodbkit2.astrodb import Database
from datetime import datetime

connection_string = 'sqlite:///SIMPLE.db'  # SQLite
db = Database(connection_string)

# Add missing telescopes, etc
db.Telescopes.insert().execute([{'name': 'IRTF'}])
db.Instruments.insert().execute([{'name': 'SpeX'}])
db.Modes.insert().execute([{
    'name': 'Prism',
    'instrument': 'SpeX',
    'telescope': 'IRTF'
}])

# Add actual Spectra
spec_data = [{
    'source': '2MASS J00192626+4614078',
    'regime': 'infrared',
    'spectrum': 'https://s3.amazonaws.com/bdnyc/SpeX/Prism/U10013_SpeX.fits',
    'telescope': 'IRTF',
    'instrument': 'SpeX',
    'mode': 'Prism',
    'reference': 'Cruz18',
    'wavelength_units': 'um',
    'flux_units': 'erg s-1 cm-2 A-1',
    'observation_date': datetime.fromisoformat('2004-11-08')
}]
db.Spectra.insert().execute(spec_data)
Exemplo n.º 20
0
# Script to ingest Y dwarfs from Kirkpartick+2019

from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
#from simple.schema import *
from astropy.table import Table
import numpy as np

connection_string = 'sqlite:///../../SIMPLE.db'  # SQLite
create_database(connection_string)
db = Database(connection_string)
db.load_database('../../data')

# load table of sources to ingest
Ydwarfs = Table.read('Y-dwarf_table.csv', data_start=2)

# find sources already in database
existing_sources = []
missing_sources = []
db_names = []
for i, name in enumerate(Ydwarfs['source']):
    if len(db.search_object(name, resolve_simbad=True)) != 0:
        existing_sources.append(i)
        db_names.append(db.search_object(name, resolve_simbad=True)[0].source)
    else:
        missing_sources.append(i)
        db_names.append(Ydwarfs['source'][i])

# add missing references
ref_list = Ydwarfs['reference'].tolist()
included_ref = db.query(db.Publications.c.name).filter(
Exemplo n.º 21
0
# Example on how to use the BDNYC database with Astrodbkit2

from astrodbkit2.astrodb import Database, copy_database_schema
from sqlalchemy import types  # for BDNYC column overrides

# Establish connection to database
# Note that special parameters have to be passed to allow the BDNYC schema work properly
connection_string = 'sqlite:///../BDNYCdb-1/bdnyc_database.db'
db = Database(connection_string,
              reference_tables=['changelog', 'data_requests', 'publications', 'ignore', 'modes',
                                'systems', 'telescopes', 'versions', 'instruments'],
              primary_table='sources',
              primary_table_key='id',
              foreign_key='source_id',
              column_type_overrides={'spectra.spectrum': types.TEXT(),
                                     'spectra.local_spectrum': types.TEXT()})

# Query similarly to SIMPLE
results = db.query(db.sources).limit(10).all()
for row in results: print(row)

# The spectra table contains columns of type SPECTRUM, the column_type_overrides allows us to work with them as text
for c in db.spectra.columns: print(c, c.type)
db.query(db.spectra).limit(10).all()

_ = db.inventory(11, pretty_print=True)

# Can output the full contents of BDNYC as json files
db.save_db('bdnyc')

# Copy to another database
Exemplo n.º 22
0
# Query examples against the database

from astrodbkit2.astrodb import Database, and_, or_

# Establish connection to database
connection_string = 'sqlite:///SIMPLE.db'
db = Database(connection_string)

# Query for all sources
results = db.query(db.Sources).all()
print(results)

# Alternative output formats
db.query(db.Sources).astropy()
db.query(db.Sources).table()  # equivalent to astropy
db.query(db.Sources).pandas()

# Query for all publications
db.query(db.Publications).all()

# Query for sources with declinations larger than 0
db.query(db.Sources).filter(db.Sources.c.dec > 0).all()

# Query and sort sources by declination
db.query(db.Sources.c.source).order_by(db.Sources.c.dec).all()

# Query for join Sources and Publications and return just several of the columns
results = db.query(db.Sources.c.source, db.Sources.c.reference, db.Publications.c.name)\
            .join(db.Publications, db.Sources.c.reference == db.Publications.c.name)\
            .all()
print(results)
#------------------------------------------------------------------------------------------------

from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
from astropy.table import Table
import numpy as np
import re
from utils import convert_spt_string_to_code

connection_string = 'sqlite:///../../SIMPLE.db'  # SQLite
create_database(connection_string)
db = Database(connection_string)
db.load_database('../../data')

# load table of sources to ingest
ingest_table = Table.read("ATLAS_table.vot")
names = ingest_table['Name']
n_sources = len(names)
#regime = ['infrared'] * n_sources # all source have IR spectral classifications
spectral_types_optical = ingest_table['SpType']
spectral_types_nir = ingest_table['SpTSpeX']
#spt_refs = ingest_table['spt_ref']
# need to adjust for only adding SpT for sources lacking SpT

# sources names in database Names table
db_names = []
for name in names:
    print(name)
    db_name = db.search_object(name, output_table='Sources')[0].source
    print(db_name)
    db_names.append(db_name)
DRY_RUN = True
RECREATE_DB = True
VERBOSE = False

verboseprint = print if VERBOSE else lambda *a, **k: None

db_file = 'SIMPLE.db'
db_file_path = Path(db_file)
db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite

if RECREATE_DB and db_file_path.exists():
    os.remove(db_file)

if not db_file_path.exists():
    create_database(db_connection_string)
    db = Database(db_connection_string)
    db.load_database('data')

# try:
# 	db_file_path = db_file.resolve(strict=True)
# except:
# 	# SIMPLE.db file does not exist so create it
# 	create_database(db_connection_string)
# 	db = Database(db_connection_string)
# 	db.load_database('data')
# else:
# 	# SIMPLE.db file does exist
# 	if RECREATE_DB: # Recreate database anyway
# 		os.remove(db_file)
# 		create_database(db_connection_string)
# 		db = Database(db_connection_string)