def main(): args = handle_args() logger = setup_logging(args.logfile_path) dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") if six.PY3: stdin = sys.stdin.buffer.read() else: stdin = sys.stdin.read() # Py2 v = voeventparse.loads(stdin) session = Session(bind=create_engine(dburl)) try: conv.safe_insert_voevent(session, v) session.commit() except: logger.exception( "Could not insert packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) logger.info("Loaded packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) return 0
def main(): args = handle_args() dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): db_utils.create_empty_database(dbconfig.default_admin_db_url, args.dbname) logger.info('Database "{}" created.'.format(dburl.database)) engine = create_engine(dburl) db_utils.create_tables_and_indexes(engine.connect()) return 0
def main(): args = handle_args() dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") filecount = 1 n_packets_written = 0 def get_tarfile_path(): if args.nsplit: suffix = '.{0:03d}.tar.bz2'.format(filecount) else: suffix = '.tar.bz2' return args.tarfile_pathstem + suffix session = Session(bind=create_engine(dburl)) if args.prefetch: qry = session.query(Voevent.ivorn, Voevent.xml) else: qry = session.query(Voevent) if args.all: logger.info("Dumping **all** packets currently in database") else: qry = qry.filter(Voevent.author_datetime < args.end) if args.start is not None: qry = qry.filter(Voevent.author_datetime >= args.start) logger.info("Fetching packets from {}".format(args.start)) else: logger.info("Fetching packets from beginning of time") logger.info("...until: {}".format(args.end)) qry = qry.order_by(Voevent.id) n_matching = qry.count() logger.info("Dumping {} packets".format(n_matching)) start_time = datetime.datetime.now() while n_packets_written < n_matching: logger.debug("Fetching batch of up to {} packets".format(args.nsplit)) voevents = qry.limit(args.nsplit).offset(n_packets_written).all() n_packets_written += write_tarball(voevents, get_tarfile_path()) elapsed = (datetime.datetime.now() - start_time).total_seconds() logger.info( "{} packets dumped so far, in {} ({:.0f} kilopacket/s)".format( n_packets_written, elapsed, n_packets_written / elapsed )) filecount += 1 session.close() logger.info("Wrote {} packets".format(n_packets_written)) return 0
def main(dbname, check, tarballs): dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") with click.progressbar(tarballs) as tarball_bar: for tbpath in tarball_bar: session = Session(bind=create_engine(dburl)) n_parsed, n_loaded = ingest.load_from_tarfile( session, tarfile_path=tbpath, check_for_duplicates=check) logger.info("Loaded {} packets into {} from {}".format( n_loaded, dbname, tbpath)) session.close() return 0
def main(): args = handle_args() dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") filecount = 1 n_packets_written = 0 def get_tarfile_path(): if args.nsplit: suffix = '.{0:03d}.tar.bz2'.format(filecount) else: suffix = '.tar.bz2' return args.tarfile_pathstem + suffix session = Session(bind=create_engine(dburl)) if args.prefetch: qry = session.query(Voevent.ivorn, Voevent.xml) else: qry = session.query(Voevent) if args.all: logger.info("Dumping **all** packets currently in database") else: qry = qry.filter(Voevent.author_datetime < args.end) if args.start is not None: qry = qry.filter(Voevent.author_datetime >= args.start) logger.info("Fetching packets from {}".format(args.start)) else: logger.info("Fetching packets from beginning of time") logger.info("...until: {}".format(args.end)) qry = qry.order_by(Voevent.id) n_matching = qry.count() logger.info("Dumping {} packets".format(n_matching)) start_time = datetime.datetime.now() while n_packets_written < n_matching: logger.debug("Fetching batch of up to {} packets".format(args.nsplit)) voevents = qry.limit(args.nsplit).offset(n_packets_written).all() n_packets_written += write_tarball(voevents, get_tarfile_path()) elapsed = (datetime.datetime.now() - start_time).total_seconds() logger.info( "{} packets dumped so far, in {} ({:.0f} kilopacket/s)".format( n_packets_written, elapsed, n_packets_written / elapsed)) filecount += 1 session.close() logger.info("Wrote {} packets".format(n_packets_written)) return 0
def cli(dbname, direct_store, hashdb_path, logfile, sleeptime): """ Trivial wrapper about main to create a command line interface entry-point. (This preserves main for use as a regular function for use elsewhere e.g. testing, and also provide a sensible location to initialise logging.) """ dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, dbname) session_registry.configure( bind=sqlalchemy.engine.create_engine(dburl, echo=False)) if direct_store: main(hashdb_path, logfile, voevent_pause_secs=0.0, process_function=direct_store_voevent) else: main(hashdb_path, logfile, sleeptime)
def main(): args = handle_args() logger = setup_logging(args.logfile_path) dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, args.dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError("Database not found") stdin = sys.stdin.read() v = voeventparse.loads(stdin) session = Session(bind=create_engine(dburl)) try: conv.safe_insert_voevent(session, v) session.commit() except: logger.exception("Could not insert packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) logger.info("Loaded packet with ivorn {} into {}".format( v.attrib['ivorn'], args.dbname)) return 0
import voeventdb.server.database.convenience as dbconv import fourpisky as fps # logger = logging.getLogger(__name__) logger = get_task_logger(__name__) dummy_email_mode = os.environ.get(fps_env_vars.use_dummy_mode, None) if dummy_email_mode is not None: fps.comms.email.send_email = fps.comms.email.dummy_email_send_function fps.comms.comet.send_voevent = fps.comms.comet.dummy_send_to_comet_stub logger.warning("Dummy stub-functions engaged!") voeventdb_dbname = os.environ.get(fps_env_vars.voeventdb_dbname, dbconfig.testdb_corpus_url.database) dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, voeventdb_dbname) if not db_utils.check_database_exists(dburl): raise RuntimeError( "voeventdb database not found: {}".format(voeventdb_dbname)) dbengine = create_engine(dburl) @fps_app.task() def process_voevent_celerytask(bytestring): """ Process the voevent using the 'voevent_logic' i.e. the function defined in `fourpisky.scripts.process_voevent`. """ v = voeventparse.loads(bytestring)
from twisted.plugin import IPlugin from comet.icomet import IHandler import comet.log as log import os import voeventparse from sqlalchemy import create_engine from sqlalchemy.orm import Session import voeventdb.server.database.config as dbconfig from voeventdb.server.database import db_utils import voeventdb.server.database.convenience as dbconv voeventdb_dbname = os.environ.get("VOEVENTDB_DBNAME", dbconfig.testdb_corpus_url.database) dburl = dbconfig.make_db_url(dbconfig.default_admin_db_params, voeventdb_dbname) if not db_utils.check_database_exists(dburl): log.warn("voeventdb database not found: {}".format( voeventdb_dbname)) dbengine = create_engine(dburl) @implementer(IPlugin, IHandler) class VoeventdbInserter(object): name = "voeventdb-insert" # When the handler is called, it is passed an instance of # comet.utility.xml.xml_document. def __call__(self, event): """ Add an event to the celery processing queue """