if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--config', help='CATCH configuration file.') parser.add_argument('--source', default='observation', help=('limit analysis to this data source ' '(default: show all data)')) parser.add_argument('--nside', default=2048, help=('Healpix nside parameter, default is 2048' ' for 1.7 arcmin resolution')) parser.add_argument( '-o', default=None, help='output file name prefix, default based on --source') parser.add_argument('--format', default='png', help='plot file format') parser.add_argument('--dpi', type=int, default=200) args = parser.parse_args() prefix = args.source if args.o is None else args.o config = Config.from_file(args.config) with Catch.with_config(config) as catch: catch.source = args.source source_name = catch.source.__data_source_name__ cov = make_sky_coverage_map(catch, args.nside) hp.write_map('.'.join((prefix, 'fits')), cov, overwrite=True) plot(cov, source_name) plt.savefig('.'.join((prefix, args.format)), dpi=args.dpi)
import argparse from sqlalchemy.orm.session import make_transient from catch import Catch, Config from catch.model import NEATMauiGEODSS, NEATPalomarTricam, SkyMapper from sbsearch.model import ObservationSpatialTerm from sbsearch.logging import ProgressBar parser = argparse.ArgumentParser() parser.add_argument('source_config') parser.add_argument('destination_config') args = parser.parse_args() src = Catch.with_config(Config.from_file(args.source_config)) dest = Catch.with_config(Config.from_file(args.destination_config)) # for this example, just copy the observation tables and the spatial index for table in (NEATMauiGEODSS, NEATPalomarTricam, SkyMapper, ObservationSpatialTerm): n_obs = src.db.session.query(table).count() with ProgressBar(n_obs, src.logger, scale='log') as bar: n_obs = 0 while True: rows = ( src.db.session.query(table).offset(n_obs).limit(1000).all()) if len(rows) == 0: break for row in rows: n_obs += 1 bar.update() src.db.session.expunge(row)
from astropy.time import Time from catch import Catch, Config from catch.model import SkyMapper # Find 65P in SkyMapper DR2 # # Catch v0 result: # # * JD: 2457971.9152 # * Product ID: 20170806095706-22 # * https://api.skymapper.nci.org.au/public/siap/dr2/get_image?IMAGE=20170806095706-22&SIZE=0.08333333333333333&POS=237.22441,-23.40757&FORMAT=fits # # For CATCH with min_edge_length = 3e-4 rad, spatial index terms are: # $9e8c1,9e8c1,9e8c4,9e8d,9e8c,9e9,9ec,$9e8c7,9e8c7,$9e8ea04,9e8ea04,9e8ea1,9e8ea4,9e8eb,9e8ec,9e8f,$9e8ea0c,9e8ea0c,$9e8ea74,9e8ea74,9e8ea7 config = Config.from_file('../catch.config', debug=True) with Catch.with_config(config) as catch: catch.db.engine.echo = False # set to true to see SQL statements expected = (catch.db.session.query(SkyMapper) .filter(SkyMapper.product_id == '20170806095706-22') .all())[0] # benchmark queries t = [] # full survey search t.append(Time.now()) job_id = uuid.uuid4() count = catch.query('65P', job_id, sources=['skymapper'], cached=False, debug=True)
import numpy as np import matplotlib.pyplot as plt from matplotlib.collections import PolyCollection from astropy.time import Time from spherical_geometry.polygon import SphericalPolygon, great_circle_arc, vector from catch import Catch, Config, model from sbsearch.core import line_to_segment_query_terms from sbsearch.spatial import term_to_cell_vertices target = '65P' dates = ('2017-07-15', '2017-08-15') #dates = ('2017-01-01', '2017-12-31') #dates = ('2014-03-15', '2018-03-15') view = (10, -110) # elevation, azimuth for 3D plot config = Config(database='postgresql://@/catch_dev', log='/dev/null', debug=True) file_suffix = (f'{target.lower().replace(" ", "").replace("/", "")}' f'-{dates[0].replace("-", "")}' f'-{dates[1].replace("-", "")}') with Catch.with_config(config) as catch: # get 65P query terms for Jul/Aug 2017 comet = catch.get_designation(target) eph = comet.ephemeris(model.SkyMapper, start=Time(dates[0]), stop=Time(dates[1])) ra = np.array([e.ra for e in eph]) dec = np.array([e.dec for e in eph])
from catch import Catch, Config from env import ENV # Build URI and instantiate data-provider service db_engine_URI: str = ( f"{ENV.DB_DIALECT}://{ENV.DB_USERNAME}:{ENV.DB_PASSWORD}@{ENV.DB_HOST}" f"/{ENV.DB_DATABASE}") db_engine: Engine = sqlalchemy.create_engine(db_engine_URI, poolclass=NullPool, pool_recycle=3600, pool_pre_ping=True) db_session: scoped_session = scoped_session(sessionmaker(bind=db_engine)) # catch library configuration catch_config: Config = Config(log=ENV.CATCH_LOG) @contextmanager def data_provider_session() -> Iterator[Session]: """Provide a transactional scope around a series of operations.""" session: Session = db_session() try: yield session session.commit() except (SQLAlchemyError, DBAPIError): session.rollback() raise finally: db_session.remove()
db.close() if not os.path.exists("ps1dr2.db") or args.db_only: print("building temporary database") build_db(args.warp_meta, args.warp_files) print("completed") if args.db_only: sys.exit(0) test_time_agreement = False # connect to catch database config = Config.from_args(args) with Catch.with_config(config) as catch: # setup WCS object to calculate image corners w = WCS(naxis=2) w.wcs.ctype = "RA---TAN", "DEC--TAN" w.wcs.cdelt = -6.94444461e-05, 6.94444461e-05 catch.db.drop_spatial_index() observations = [] tri = ProgressTriangle(1, catch.logger, base=2) bad_dt = [] # iterate over rows in temporary database for row in get_rows(args.start_offset): # PS1DR2 object inherits sbsearch.model.Observation columns # observation_id, source, mjd_start, mjd_stop, fov, spatial_terms,
def catch_cli(*args): """CATCH command-line script.""" import sys import argparse import uuid from astropy.time import Time from astropy.table import Table from catch import Catch, Config from catch.config import _config_example parser = argparse.ArgumentParser( "catch", epilog=f"Configuration files are JSON-formatted:\n{_config_example}") parser.add_argument("--config", help="CATCH configuration file") parser.add_argument("--database", help="use this database URI") parser.add_argument("--log", help="save log messages to this file") parser.add_argument("--arc-limit", type=float, help="maximal arc length to search, radians") parser.add_argument("--time-limit", type=float, help="maximal time length to search, days") parser.add_argument("--debug", action="store_true", help="debug mode") subparsers = parser.add_subparsers(help="sub-command help") verify = subparsers.add_parser( "verify", help="connect to database and verify and create tables") verify.set_defaults(command="verify") list_sources = subparsers.add_parser("sources", help="show available data sources") list_sources.set_defaults(command="sources") search = subparsers.add_parser("search", help="search for an object") search.set_defaults(command="search") search.add_argument("desg", help="object designation") search.add_argument( "--source", dest="sources", action="append", help="search this observation source (may be used multiple times)", ) search.add_argument("--force", dest="cached", action="store_false", help="do not use cached results") search.add_argument("-o", help="write table to this file") args = parser.parse_args() try: getattr(args, "command") except AttributeError: parser.print_help() sys.exit() if args.command == "verify": print("Verify databases and create as needed.\n") rows = [] config = Config.from_args(args) with Catch.with_config(config) as catch: if args.command == "verify": pass elif args.command == "sources": print("Available sources:\n *", "\n * ".join(catch.sources.keys())) elif args.command == "search": job_id = uuid.uuid4() catch.query(args.desg, job_id, sources=args.sources, cached=args.cached) columns = set() # catch.caught returns a list of rows. for row in catch.caught(job_id): r = {} # Each row consists of a Found and an Observation object. The # Observation object will be a subclass, e.g., # NeatPalomarTricam, or SkyMapper. for data_object in row: # Aggregate fields and values from each data object for k, v in _serialize_object(data_object): r[k] = v columns = columns.union(set(r.keys())) r["cutout_url"] = row.Observation.cutout_url( row.Found.ra, row.Found.dec) r["date"] = Time(row.Found.mjd, format="mjd").iso rows.append(r) if args.command == "search": if rows == []: print("# none found") else: # make sure all rows have all columns for i in range(len(rows)): for col in columns: rows[i][col] = rows[i].get(col) tab = Table(rows=rows) if args.o: tab.write(args.o, format="ascii.fixed_width_two_line", overwrite=True) else: tab.pprint(-1, -1)