def load_database(connection_string):
    # Create and load the database
    create_database(connection_string)

    # Now that the database is created, connect to it and load up the JSON data
    db = Database(connection_string, reference_tables=REFERENCE_TABLES)
    db.load_database(DB_PATH, verbose=False)

    print('New database generated.')

    # Close all connections
    db.session.close()
    db.engine.dispose()
def load_db():
    # Utility function to load the database

    db_file = 'SIMPLE.db'
    db_file_path = Path(db_file)
    db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite browser

    if RECREATE_DB and db_file_path.exists():
        os.remove(db_file)  # removes the current .db file if one already exists

    if not db_file_path.exists():
        create_database(db_connection_string)  # creates empty database based on the simple schema
        db = Database(db_connection_string)  # connects to the empty database
        db.load_database('data/')  # loads the data from the data files into the database
    else:
        db = Database(db_connection_string)  # if database already exists, connects to .db file

    return db
Beispiel #3
0
def db():
    # Create a fresh temporary database and assert it exists
    # Because we've imported simple.schema, we will be using that schema for the database

    if os.path.exists(DB_NAME):
        os.remove(DB_NAME)
    connection_string = 'sqlite:///' + DB_NAME
    create_database(connection_string)
    assert os.path.exists(DB_NAME)

    # Connect to the new database and confirm it has the Sources table
    db = Database(connection_string)
    assert db
    assert 'source' in [c.name for c in db.Sources.columns]

    # Load the database contents
    # This should take care of finding serious issues (key/column violations)
    db.load_database(DB_PATH, verbose=False)

    return db
Beispiel #4
0
def load_simpledb(db_file, recreatedb=True):
    # Utility function to load the database

    db_file_path = Path(db_file)
    db_connection_string = 'sqlite:///' + db_file

    if recreatedb and db_file_path.exists():
        os.remove(
            db_file)  # removes the current .db file if one already exists

    if not db_file_path.exists():
        try:  # Use fancy in-memory database, if supported by astrodbkit2
            db = Database(
                'sqlite://'
            )  # creates and connects to a temporary in-memory database
            db.load_database(
                'data/'
            )  # loads the data from the data files into the database
            db.dump_sqlite(db_file)  # dump in-memory database to file
            db = Database(db_connection_string
                          )  # replace database object with new file version
        except RuntimeError:
            # use in-file database
            create_database(
                db_connection_string
            )  # creates empty database based on the simple schema
            db = Database(
                db_connection_string)  # connects to the empty database
            db.load_database(
                'data/'
            )  # loads the data from the data files into the database
    else:
        db = Database(db_connection_string
                      )  # if database already exists, connects to .db file

    return db
Beispiel #5
0
for row in results: print(row)

# The spectra table contains columns of type SPECTRUM, the column_type_overrides allows us to work with them as text
for c in db.spectra.columns: print(c, c.type)
db.query(db.spectra).limit(10).all()

_ = db.inventory(11, pretty_print=True)

# Can output the full contents of BDNYC as json files
db.save_db('bdnyc')

# Copy to another database
source_connection_string = 'sqlite:///../BDNYCdb-1/bdnyc_database.db'  # SQLite
destination_connection_string = 'postgresql://localhost/BDNYC'  # Postgres
copy_database_schema(source_connection_string, destination_connection_string)

# Load database contents from JSON
db = Database(destination_connection_string,
              reference_tables=['changelog', 'data_requests', 'publications', 'ignore', 'modes',
                                'systems', 'telescopes', 'versions', 'instruments'],
              primary_table='sources',
              primary_table_key='id',
              foreign_key='source_id',
              column_type_overrides={'spectra.spectrum': types.TEXT(),
                                     'spectra.local_spectrum': types.TEXT()})
db.load_database('bdnyc')
# NOTE: This currently does not work since there are inconsistencies
# in the sqlite bdnyc contents (strings in integer fields for example)
# Also: some manual editing was required since SPECTRUM is not a valid Postgres type
# and keeps getting interpreted as numeric as opposed to text
# Script to add Y dwarfs spectral types

from astrodbkit2.astrodb import create_database
from astrodbkit2.astrodb import Database
#from simple.schema import *
from astropy.table import Table
import numpy as np
import re
from utils import convert_spt_string_to_code

connection_string = 'sqlite:///../../SIMPLE.db'  # SQLite
create_database(connection_string)
db = Database(connection_string)
db.load_database('../../data')

# load table
ingest_table = Table.read('Y-dwarf_table.csv', data_start=2)
names = ingest_table['source']
n_sources = len(names)
regime = ['infrared'
          ] * n_sources  # all source have IR spectral classifications
spectral_types = ingest_table['SpT']
spt_refs = ingest_table['spt_ref']

# sources names in database Names table
db_names = []
for name in names:
    db_name = db.search_object(name, output_table='Sources')[0].source
    db_names.append(db_name)

# Convert SpT string to code
# Set correct connection string
if DB_TYPE == 'sqlite':
    # First, remove the existing database in order to recreate it from the schema
    # If the schema has not changed, this part can be skipped
    if os.path.exists(DB_NAME):
        os.remove(DB_NAME)
    connection_string = 'sqlite:///' + DB_NAME
elif DB_TYPE == 'postgres':
    # For Postgres, we connect and drop all database tables
    connection_string = 'postgresql://' + DB_NAME
    try:
        db = Database(connection_string)
        db.base.metadata.drop_all()
        db.session.close()
        db.engine.dispose()
    except RuntimeError:
        # Database already empty or doesn't yet exist
        pass

create_database(connection_string)

# Now that the database is created, connect to it and load up the JSON data
db = Database(connection_string)
db.load_database(DB_PATH, verbose=False)

print('New database generated.')

# Close all connections
db.session.close()
db.engine.dispose()
VERBOSE = False

verboseprint = print if VERBOSE else lambda *a, **k: None

db_file = 'SIMPLE.db'
db_file_path = Path(db_file)
db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite browser

if RECREATE_DB and db_file_path.exists():
    os.remove(db_file)  #removes the current .db file if one already exists

if not db_file_path.exists():
    create_database(db_connection_string
                    )  #creates empty database based on the simple schema
    db = Database(db_connection_string)  #connects to the empty database
    db.load_database(
        'data/')  #loads the data from the data files into the database
else:
    db = Database(db_connection_string
                  )  #if database already exists, connects to .db file

#Adding missing publications to publication table
add_publication(db, name='Sahl16', bibcode='J/MNRAS/455/357', dryrun=False)
add_publication(db, name='Liu16', bibcode='J/ApJ/833/96', dryrun=False)
add_publication(db, name='Wang18', bibcode='2018PASP..130f4402W', dryrun=False)
add_publication(db, name='Bedi17', bibcode='2017MNRAS.470.1140B', dryrun=False)
add_publication(db, name='Delo17', bibcode='2017A&A...608A..79D', dryrun=False)
add_publication(db, name='Luhm16', bibcode='2016AJ....152...78L', dryrun=False)

# load table of sources to ingest
input_file = ("scripts/ingests/ATLAS_table.vot")
ATLAS = Table.read(input_file, format='votable')
RECREATE_DB = True
VERBOSE = False

verboseprint = print if VERBOSE else lambda *a, **k: None

db_file = 'SIMPLE.db'
db_file_path = Path(db_file)
db_connection_string = 'sqlite:///SIMPLE.db'  # SQLite

if RECREATE_DB and db_file_path.exists():
    os.remove(db_file)

if not db_file_path.exists():
    create_database(db_connection_string)
    db = Database(db_connection_string)
    db.load_database('data')

# try:
# 	db_file_path = db_file.resolve(strict=True)
# except:
# 	# SIMPLE.db file does not exist so create it
# 	create_database(db_connection_string)
# 	db = Database(db_connection_string)
# 	db.load_database('data')
# else:
# 	# SIMPLE.db file does exist
# 	if RECREATE_DB: # Recreate database anyway
# 		os.remove(db_file)
# 		create_database(db_connection_string)
# 		db = Database(db_connection_string)
# 		db.load_database('data')
Beispiel #10
0
print(results)

# Direct SQL queries
results = db.sql_query('select * from sources')
print(results)
print(results[0].keys())

# Use inventory to check a single object (output is dictionary)
data = db.inventory('2MASS J13571237+1428398', pretty_print=True)
print(type(data))

# Delete a row
for row in db.query(db.Photometry).all():
    print(row)
db.Photometry.delete().where(db.Photometry.c.band == 'WISE_W1').execute()
db.query(db.Photometry).all()
db.query(db.Photometry).count()

# Delete entire table
# NOTE: data linked via foreign keys are also deleted
db.query(db.Telescopes).all()
db.Telescopes.delete().execute()
db.query(db.Telescopes).count()

# Reload via json file
db.load_table('Telescopes', 'data')

# Reload everything
# NOTE: this clears existing DB values to replace them with on-disk file data
db.load_database('data', verbose=True)
                 ],
                 primary_table='sources',
                 primary_table_key='id',
                 foreign_key='source_id',
                 column_type_overrides={
                     'spectra.spectrum': types.TEXT(),
                     'spectra.local_spectrum': types.TEXT()
                 })

# SIMPLE
connection_string = 'sqlite:///SIMPLE.db'
db = Database(connection_string)

# --------------------------------------------------------------------------------------
# Reload from directory, if needed
db.load_database('data', verbose=False)

# --------------------------------------------------------------------------------------
# For each source in SIMPLE, search in BDNYC and grab specified photometry

# Will be only grabbing WISE data for now
telescope = 'WISE'
band_list = ['WISE_W1', 'WISE_W2', 'WISE_W3', 'WISE_W4']

# Don't include sources that already have photometry in these bands
temp = db.query(db.Photometry.c.source).filter(
    db.Photometry.c.band.in_(band_list)).distinct().all()
sources_with_photometry = [s[0] for s in temp]

sources = db.query(db.Sources).\
    filter(db.Sources.c.source.notin_(sources_with_photometry)).\