def test_msgs(): from pypit import ardebug from pypit import armsgs as pyparm debug = ardebug.init() msgs = pyparm.Messages(None, debug, 1) msgs.info("test 123") msgs.warn("test 123") msgs.bug("test 123") msgs.work("test 123") msgs.close()
def get_dummy_logger(): """ Useful for testing Returns ------- """ from pypit import ardebug from pypit import armsgs as pyparm debug = ardebug.init() pyparm.pypit_logger = pyparm.Messages(None, debug, 1) return pyparm.pypit_logger
def get_dummy_logger(develop=False): """ Useful for testing Returns ------- """ from pypit import ardebug from pypit import armsgs as pyparm debug = ardebug.init() debug['develop'] = develop pyparm.pypit_logger = pyparm.Messages(None, debug, 0) return pyparm.pypit_logger
def test_log_write(): from pypit import ardebug from pypit import armsgs as pyparm debug = ardebug.init() outfil = 'tst.log' msgs = pyparm.Messages(outfil, debug, 1) msgs.close() # Insure scipy, numpy, astropy are being version with open(outfil, 'r') as f: lines = f.readlines() pckgs = ['scipy', 'numpy', 'astropy'] flgs = [False]*len(pckgs) for line in lines: for jj,pckg in enumerate(pckgs): if pckg in line: flgs[jj] = True for flg in flgs: assert flg is True
def test_log_write(): from pypit import ardebug from pypit import armsgs as pyparm debug = ardebug.init() outfil = "tst.log" msgs = pyparm.Messages(outfil, debug, 1) msgs.close() # Insure scipy, numpy, astropy are being version with open(outfil, "r") as f: lines = f.readlines() pckgs = ["scipy", "numpy", "astropy"] flgs = [False] * len(pckgs) for line in lines: for jj, pckg in enumerate(pckgs): if pckg in line: flgs[jj] = True for flg in flgs: assert flg is True
from pypit.core.artraceslits import trace_fweight, trace_gweight from pypit.arutils import robust_polyfit, func_val from pypit import msgs from pypit import ardebug as debugger from pypit import ginga #from pypit import arload #from pypit import arproc #from pypit import arcomb #from pypit import ardeimos #from pypit import arlris #from pypit import arpixels #from pypit import arsave #from pypit import traceslits debug = debugger.init() debug['develop'] = True msgs.reset(debug=debug, verbosity=2) import sys from scipy.io import readsav import scipy #from pydl.pydlutils.trace import traceset2xy, xy2traceset from pydl.pydlutils.image import djs_maskinterp from specobj import SpecObj #from pypit.idl_stats import djs_iterstat from pypit import ginga #from pypit.extract_boxcar import extract_boxcar from matplotlib import pyplot as plt from astropy.stats import sigma_clipped_stats
# See top-level LICENSE file for Copyright information # # -*- coding: utf-8 -*- """ This script runs PYPIT """ from __future__ import (print_function, absolute_import, division, unicode_literals) import pdb as debugger # Globals from pypit import ardebug debug = ardebug.init() #debug['develop'] = True #debug['arc'] = True #debug['sky_sub'] = True #debug['trace'] = True #debug['obj_profile'] = True #debug['trace_obj'] = True #debug['tilts'] = True #debug['flexure'] = True #debug['no_qa'] = True from pypit.armsgs import Messages as Initmsg initmsgs = Initmsg(None, debug, 1) def parser(options=None):
""" Port the global sky sub routine from LowRedux """ import numpy as np import sys import os from astropy.io import fits from pypit import msgs from pypit import ardebug as debugger debug = debugger.init() debug['develop'] = True msgs.reset(debug=debug, verbosity=2) from pypit import ginga from pydl.pydlutils.bspline import bspline sys.path.append(os.path.abspath("./")) import dev_extract maskval=-999999.9, def global_skysub(sciimg, sciivar, piximg, slitmask, edgmask, skymask=None, bsp=0.6, islit=None, sigrej=3., debug=False): # Python indexing
""" This script pushes a FITS file to ginga """ from __future__ import print_function, absolute_import, division, unicode_literals try: from xastropy.xutils import xdebug as debugger except: import pdb as debugger # Globals from pypit import ardebug debug = ardebug.init() # debug['develop'] = True # debug['arc'] = True # debug['sky_sub'] = True # debug['trace'] = True # debug['obj_profile'] = True # debug['tilts'] = True # debug['flexure'] = True from pypit.armsgs import Messages as Initmsg initmsgs = Initmsg(None, debug, 1) def parser(options=None): import argparse
def PYPIT(redname, debug=None, progname=__file__, quick=False, ncpus=1, verbosity=1, use_masters=False, devtest=False, logname=None): """ Main driver of the PYPIT code. Default settings and user-specified changes are made, and passed to the appropriate code for data reduction. Parameters ---------- redname : string Input reduction script debug : dict, optional Debug dict progname : string Name of the program quick : bool If True, a quick reduction (but possibly less accurate) will be performed. This flag is most useful for observing at a telescope, but not for publication quality results. ncpus : int Number of CPUs to use for multiprocessing the data reduction (sometimes not used) verbosity : int (0,1,2) Level of verbosity: 0 = No output 1 = Minimal output (default - suitable for the average user) 2 = All output use_masters : bool, optional Load calibration files from MasterFrames directory, if they exist devtest : bool, optional Running PYPIT-Development suite; will turn instrument-specific options logname : str or None The name of an ascii log file which is used to save the output details of the reduction debug : dict A PYPIT debug dict (from ardebug.init) version : str last_updated : str --------------------------------------------------- """ from pypit import ardebug # Init logger if debug is None: debug = ardebug.init() msgs = armsgs.get_logger((logname, debug, verbosity)) msgs.pypit_file = redname # This needs to be loaded after msgs from pypit import arparse # version checking try: archeck.version_check() except archeck.VersionError as err: msgs.error(err.message) # First send all signals to messages to be dealt with (i.e. someone hits ctrl+c) sigsignal(SIGINT, msgs.signal_handler) # Ignore all warnings given by python resetwarnings() simplefilter("ignore") # Record the starting time tstart = time() # Load the input file pyp_dict = load_input(redname, msgs) parlines, datlines, spclines = [ pyp_dict[ii] for ii in ['par', 'dat', 'spc'] ] # Initialize the arguments and flags # argflag = arload.argflag_init() # settings.argflag['run']['ncpus'] = ncpus # settings.argflag['output']['verbosity'] = verbosity # Determine the name of the spectrograph specname = None for i in range(len(parlines)): parspl = parlines[i].split() if len(parspl) < 3: msgs.error( "There appears to be a missing argument on the following input line" + msgs.newline() + parlines[i]) if (parspl[0] == 'run') and (parspl[1] == 'spectrograph'): specname = parspl[2] break if specname is None: msgs.error( "Please specify the spectrograph settings to be used with the command" + msgs.newline() + "run spectrograph <name>") msgs.info("Reducing data from the {0:s} spectrograph".format(specname)) # Determine the type of reduction used for this spectrograph redtype = None # Get the software path prgn_spl = progname.split('/') tfname = "/".join(prgn_spl[:-1]) + "/" # Settings file fname = tfname + 'data/settings/settings.' + specname try: spl = open(fname, 'r').readlines() except IOError: msgs.error( "The following instrument settings file cannot be found:" + msgs.newline() + fname + msgs.newline() + "Please check the settings file exists, and that the instrument name is spelt correctly." ) for i in range(len(spl)): parspl = spl[i].split() if len(parspl) < 3: continue if (parspl[0] == 'mosaic') and (parspl[1] == 'reduction'): redtype = parspl[2] break if redtype is None: msgs.bug( "The {0:s} instrument settings file must contain the reduction type" .format(specname)) msgs.error("Please specify the reduction type with the command" + msgs.newline() + "mosaic reduction <type>") # Load default reduction arguments/flags, and set any command line arguments argf = arparse.get_argflag_class( (redtype.upper(), ".".join(redname.split(".")[:-1]))) argf.init_param() # Run specific argf.set_param('run pypitdir {0:s}'.format(tfname)) argf.set_param('run progname {0:s}'.format(progname)) argf.set_param('run redname {0:s}'.format(redname)) # Load user changes to the arguments/flags plines = argf.load_lines(parlines) argf.set_paramlist(plines) # If the user wishes to load a settings file, do that now if argf.__dict__['_argflag']['run']['load']['settings'] is not None: lines = argf.load_file( argf.__dict__['_argflag']['run']['load']['settings']) argf.set_paramlist(lines) # Load default spectrograph settings spect = arparse.get_spect_class( (redtype.upper(), specname, ".".join(redname.split(".")[:-1]))) lines = spect.load_file(base=True) # Base spectrograph settings spect.set_paramlist(lines) lines = spect.load_file() # Instrument specific spect.set_paramlist(lines) # Load frametype numbers, as relevant if len(pyp_dict['ftype']) > 0: ftlines = spect.load_ftype(pyp_dict['ftype']) plines = spect.load_lines(ftlines) spect.set_paramlist(plines) # Load user changes to the arguments/flags plines = spect.load_lines(spclines) spect.set_paramlist(plines) if argf.__dict__['_argflag']['run']['load']['spect'] is not None: lines = spect.load_file( argf.__dict__['_argflag']['run']['load']['spect']) spect.set_paramlist(lines) # If the instrument settings file sets some argflag settings, implement those changes now if len(spect.__dict__['_settings']) != 0: argf.set_paramlist(spect.__dict__['_settings']) # Load command line changes argf.set_param('run ncpus {0:d}'.format(ncpus)) argf.set_param('output verbosity {0:d}'.format(verbosity)) if use_masters: argf.set_param('reduce masters reuse True') msgs.work("Make appropriate changes to quick reduction") # Load Development suite changes if devtest: msgs.info( "Loading instrument specific argurment for Development Suite tests" ) from pypit import ardevtest ardevtest.set_param(argf, specname) if quick: # If a quick reduction has been requested, make sure the requested pipeline # is the quick implementation (if it exists), otherwise run the standard pipeline. msgs.work("QUICK REDUCTION TO STILL BE DONE") # Setup from PYPIT file? if len(pyp_dict['setup']['name']) == 1: argf.set_param('setup name {:s}'.format(pyp_dict['setup']['name'][0])) # Finally, save the arguments/flags and spectrograph settings used for this reduction argf.save() spect.save() # Now that all of the relevant settings are loaded, globalize the settings arparse.init(argf, spect) ''' # Test that a maximum of one .setup files is present from pypit import arsort setup_file, nexist = arsort.get_setup_file() if nexist == 1: msgs.info("Found setup_file: {:s}".format(setup_file)) msgs.info("Will use this to guide the data reduction.") ''' # Load the important information from the fits headers from pypit.arload import load_headers fitsdict, updates = load_headers(datlines) # If some settings were updated because of the fits headers, globalize the settings again if len(updates) != 0: spect.set_paramlist(updates) arparse.init(argf, spect) # If the dispersion direction is 1, flip the axes if arparse.argflag['trace']['dispersion']['direction'] == 1: # Update the keywords of all fits files for ff in range(len(fitsdict['naxis0'])): temp = fitsdict['naxis0'][ff] fitsdict['naxis0'][ff] = fitsdict['naxis1'][ff] fitsdict['naxis1'][ff] = temp # Update the spectrograph settings for all detectors in the mosaic for dd in range(arparse.spect['mosaic']['ndet']): ddnum = arparse.get_dnum(dd + 1) # Change the user-specified (x,y) pixel sizes tmp = arparse.spect[ddnum]['xgap'] arparse.spect[ddnum]['xgap'] = arparse.spect[ddnum]['ygap'] arparse.spect[ddnum]['ygap'] = tmp arparse.spect[ddnum]['ysize'] = 1.0 / arparse.spect[ddnum]['ysize'] # Update the amplifier/data/overscan sections for i in range(arparse.spect[ddnum]['numamplifiers']): # Flip the order of the sections arparse.spect[ddnum]['datasec{0:02d}'.format( i + 1)] = arparse.spect[ddnum]['datasec{0:02d}'.format( i + 1)][::-1] arparse.spect[ddnum]['oscansec{0:02d}'.format( i + 1)] = arparse.spect[ddnum]['oscansec{0:02d}'.format( i + 1)][::-1] # Reduce the data! status = 0 # Send the data away to be reduced if spect.__dict__['_spect']['mosaic']['reduction'] == 'ARMLSD': msgs.info("Data reduction will be performed using PYPIT-ARMLSD") from pypit import armlsd status = armlsd.ARMLSD(fitsdict) elif spect.__dict__['_spect']['mosaic']['reduction'] == 'ARMED': msgs.info("Data reduction will be performed using PYPIT-ARMED") from pypit import armed status = armed.ARMED(fitsdict) # Check for successful reduction if status == 0: from pypit import arqa msgs.info("Data reduction complete") # QA HTML msgs.info("Generating QA HTML") arqa.gen_mf_html(redname) arqa.gen_exp_html() elif status == 1: msgs.info("Setup complete") elif status == 2: msgs.info("Calcheck complete") else: msgs.error("Data reduction failed with status ID {0:d}".format(status)) # Capture the end time and print it to user tend = time() codetime = tend - tstart if codetime < 60.0: msgs.info("Data reduction execution time: {0:.2f}s".format(codetime)) elif codetime / 60.0 < 60.0: mns = int(codetime / 60.0) scs = codetime - 60.0 * mns msgs.info("Data reduction execution time: {0:d}m {1:.2f}s".format( mns, scs)) else: hrs = int(codetime / 3600.0) mns = int(60.0 * (codetime / 3600.0 - hrs)) scs = codetime - 60.0 * mns - 3600.0 * hrs msgs.info( "Data reduction execution time: {0:d}h {1:d}m {2:.2f}s".format( hrs, mns, scs)) return
def PYPIT(redname, debug=None, progname=__file__, quick=False, ncpus=1, verbose=1, use_masters=False, logname=None): """ Main driver of the PYPIT code. Default settings and user-specified changes are made, and passed to the appropriate code for data reduction. Parameters ---------- redname : string Input reduction script debug : dict, optional Debug dict progname : string Name of the program quick : bool If True, a quick reduction (but possibly less accurate) will be performed. This flag is most useful for observing at a telescope, but not for publication quality results. ncpus : int Number of CPUs to use for multiprocessing the data reduction (sometimes not used) verbose : int (0,1,2) Level of verbosity: 0 = No output 1 = Minimal output (default - suitable for the average user) 2 = All output use_masters : bool, optional Load calibration files from MasterFrames directory, if they exist logname : str or None The name of an ascii log file which is used to save the output details of the reduction debug : dict A PYPIT debug dict (from ardebug.init) version : str last_updated : str --------------------------------------------------- """ from pypit import ardebug # Init logger if debug is None: debug=ardebug.init() msgs = armsgs.get_logger((logname, debug, verbose)) from pypit import arload # This needs to be after msgs is defined! # First send all signals to messages to be dealt with (i.e. someone hits ctrl+c) sigsignal(SIGINT, msgs.signal_handler) # Ignore all warnings given by python resetwarnings() simplefilter("ignore") # Record the starting time tstart = time() # Load the input file parlines, datlines, spclines = arload.load_input(redname) # Initialize the arguments and flags argflag = arload.argflag_init() argflag['run']['ncpus'] = ncpus argflag['out']['verbose'] = verbose # Determine the name of the spectrograph specname = None for i in range(len(parlines)): parspl = parlines[i].split() if len(parspl) < 3: msgs.error("There appears to be a missing argument on the following input line" + msgs.newline() + parlines[i]) if (parspl[0] == 'run') and (parspl[1] == 'spectrograph'): specname = parspl[2] if specname is None: msgs.error("Please specify the spectrograph settings to be used with the command" + msgs.newline() + "run spectrograph <name>") msgs.info("Reducing data from the {0:s} spectrograph".format(specname)) # Load the Spectrograph settings spect = arload.load_spect(progname, specname) # Load default reduction arguments/flags, and set any command line arguments #argflag = arload.optarg(argflag, cmdlnarg, spect['mosaic']['reduction'].lower()) # Load the default settings prgn_spl = progname.split('/') tfname = "" for i in range(0,len(prgn_spl)-1): tfname += prgn_spl[i]+"/" #fname = tfname + prgn_spl[-2] + '/settings.' + spect['mosaic']['reduction'].lower() fname = tfname + '/settings.' + spect['mosaic']['reduction'].lower() argflag = arload.load_settings(fname, argflag) argflag['run']['prognm'] = progname argflag['run']['pypitdir'] = tfname # Now update the settings based on the user input file argflag = arload.set_params(parlines, argflag, setstr="Input ") # Check the input file arload.check_argflag(argflag) # Load any changes to the spectrograph settings based on the user input file spect = arload.load_spect(progname, specname, spect=spect, lines=spclines) # Command line arguments if use_masters: argflag['masters']['use'] = True # If a quick reduction has been requested, make sure the requested pipeline # is the quick implementation (if it exists), otherwise run the standard pipeline. if quick: # Change to a "quick" settings file msgs.work("QUICK REDUCTION TO STILL BE DONE") # Load the important information from the fits headers fitsdict = arload.load_headers(argflag, spect, datlines) # Reduce the data! status = 0 msgs.work("Make appropriate changes to quick reduction") if quick: msgs.work("define what is needed here for quick reduction") # Send the data away to be reduced if spect['mosaic']['reduction'] == 'ARMLSD': msgs.info("Data reduction will be performed using PYPIT-ARMLSD") from pypit import armlsd status = armlsd.ARMLSD(argflag, spect, fitsdict) elif spect['mosaic']['reduction'] == 'ARMED': msgs.info("Data reduction will be performed using PYPIT-ARMED") from pypit import armed status = armed.ARMED(argflag, spect, fitsdict) # Check for successful reduction if status == 0: msgs.info("Data reduction complete") else: msgs.error("Data reduction failed with status ID {0:d}".format(status)) # Capture the end time and print it to user tend = time() codetime = tend-tstart if codetime < 60.0: msgs.info("Data reduction execution time: {0:.2f}s".format(codetime)) elif codetime/60.0 < 60.0: mns = int(codetime/60.0) scs = codetime - 60.0*mns msgs.info("Data reduction execution time: {0:d}m {1:.2f}s".format(mns, scs)) else: hrs = int(codetime/3600.0) mns = int(60.0*(codetime/3600.0 - hrs)) scs = codetime - 60.0*mns - 3600.0*hrs msgs.info("Data reduction execution time: {0:d}h {1:d}m {2:.2f}s".format(hrs, mns, scs)) return