Exemplo n.º 1
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser()
    # required arguments
    parser.add_argument("--input_rectwv_coeff",
                        required=True,
                        help="Input JSON file with rectification and "
                        "wavelength calibration polynomials "
                        "corresponding to a longslit observation",
                        type=argparse.FileType('rt'))
    parser.add_argument("--output_rectwv_coeff",
                        required=True,
                        help="Output JSON file with updated longslit_model "
                        "coefficients",
                        type=lambda x: arg_file_is_new(parser, x, mode='wt'))

    # optional arguments
    parser.add_argument("--geometry",
                        help="tuple x,y,dx,dy (default 0,0,640,480)",
                        default="0,0,640,480")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    logging_from_debugplot(args.debugplot)
    logger = logging.getLogger(__name__)

    # geometry
    if args.geometry is None:
        geometry = None
    else:
        tmp_str = args.geometry.split(",")
        x_geom = int(tmp_str[0])
        y_geom = int(tmp_str[1])
        dx_geom = int(tmp_str[2])
        dy_geom = int(tmp_str[3])
        geometry = x_geom, y_geom, dx_geom, dy_geom

    # generate RectWaveCoeff object
    rectwv_coeff = RectWaveCoeff._datatype_load(args.input_rectwv_coeff.name)

    # update longslit_model parameters
    rectwv_coeff_updated = rectwv_coeff_add_longslit_model(
        rectwv_coeff=rectwv_coeff, geometry=geometry, debugplot=args.debugplot)

    # save updated RectWaveCoeff object into JSON file
    rectwv_coeff_updated.writeto(args.output_rectwv_coeff.name)
    logger.info('>>> Saving file ' + args.output_rectwv_coeff.name)
Exemplo n.º 2
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: compute median spectrum for each slitlet')

    # positional arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file name",
                        type=argparse.FileType('rb'))
    parser.add_argument("outfile",
                        help="Output FITS file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--mode",
                        help="Output type: 0 -> full frame (default), "
                        "1 -> individual slitlets, "
                        "2 -> collapsed single spectrum)",
                        default=0,
                        type=int,
                        choices=[0, 1, 2])
    parser.add_argument("--minimum_slitlet_width_mm",
                        help="Minimum slitlet width (mm) for --mode 2 "
                        "(default=0)",
                        default=EMIR_MINIMUM_SLITLET_WIDTH_MM,
                        type=float)
    parser.add_argument("--maximum_slitlet_width_mm",
                        help="Maximum slitlet width (mm) for --mode 2 "
                        "(default=" + str(EMIR_MAXIMUM_SLITLET_WIDTH_MM) + ")",
                        default=EMIR_MAXIMUM_SLITLET_WIDTH_MM,
                        type=float)
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting/debugging" +
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args(args=args)

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read input FITS file
    hdulist = fits.open(args.fitsfile)

    image_median = median_slitlets_rectified(
        hdulist,
        mode=args.mode,
        minimum_slitlet_width_mm=args.minimum_slitlet_width_mm,
        maximum_slitlet_width_mm=args.maximum_slitlet_width_mm,
        debugplot=args.debugplot)

    # save result
    image_median.writeto(args.outfile, overwrite=True)
Exemplo n.º 3
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: compute median spectrum for each slitlet'
    )

    # positional arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file name",
                        type=argparse.FileType('rb'))
    parser.add_argument("outfile",
                        help="Output FITS file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--mode",
                        help="Output type: 0 -> full frame (default), "
                             "1 -> individual slitlets, "
                             "2 -> collapsed single spectrum)",
                        default=0, type=int,
                        choices=[0, 1, 2])
    parser.add_argument("--minimum_slitlet_width_mm",
                        help="Minimum slitlet width (mm) for --mode 2 "
                             "(default=0)",
                        default=EMIR_MINIMUM_SLITLET_WIDTH_MM, type=float)
    parser.add_argument("--maximum_slitlet_width_mm",
                        help="Maximum slitlet width (mm) for --mode 2 "
                             "(default=" +
                             str(EMIR_MAXIMUM_SLITLET_WIDTH_MM) + ")",
                        default=EMIR_MAXIMUM_SLITLET_WIDTH_MM, type=float)
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting/debugging" +
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args(args=args)

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read input FITS file
    hdulist = fits.open(args.fitsfile)

    image_median = median_slitlets_rectified(
        hdulist,
        mode=args.mode,
        minimum_slitlet_width_mm=args.minimum_slitlet_width_mm,
        maximum_slitlet_width_mm=args.maximum_slitlet_width_mm,
        debugplot=args.debugplot
    )

    # save result
    image_median.writeto(args.outfile, overwrite=True)
Exemplo n.º 4
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser()

    # positional arguments
    parser.add_argument("filename",
                        help="TXT with list of bounddict files",
                        type=argparse.FileType('rt'))
    parser.add_argument("--outfile",
                        required=True,
                        help="Output merged JSON file",
                        type=lambda x: arg_file_is_new(parser, x))

    # optional arguments
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args()

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # initialize empty output
    bounddict = {}

    # read list of JSON files to be merged
    file_content = args.filename.read().splitlines()
    next_file_is_first = True
    for line in file_content:
        if len(line) > 0:
            if line[0] != '#':
                tmpfile = line.split()[0]
                if not os.path.isfile(tmpfile):
                    raise ValueError("File " + tmpfile + " not found!")
                tmpbounddict = json.loads(open(tmpfile).read())
                if next_file_is_first:
                    bounddict = deepcopy(tmpbounddict)
                    # update some values
                    bounddict['meta_info']['creation_date'] = \
                        datetime.now().isoformat()
                    bounddict['uuid'] = str(uuid4())
                    next_file_is_first = False
                else:
                    for islitlet in range(EMIR_NBARS):
                        cslitlet = "slitlet" + str(islitlet).zfill(2)
                        if cslitlet in tmpbounddict['contents']:
                            for dateobs in tmpbounddict['contents'][cslitlet]:
                                bounddict['contents'][cslitlet][dateobs] = \
                                    tmpbounddict['contents'][cslitlet][dateobs]

    # save merged JSON file
    with open(args.outfile.name, 'w') as fstream:
        json.dump(bounddict, fstream, indent=2, sort_keys=True)
        print('>>> Saving file ' + args.outfile.name)
Exemplo n.º 5
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser()

    # positional arguments
    parser.add_argument("filename",
                        help="TXT with list of bounddict files",
                        type=argparse.FileType('rt'))
    parser.add_argument("--outfile", required=True,
                        help="Output merged JSON file",
                        type=lambda x: arg_file_is_new(parser, x))

    # optional arguments
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args()

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # initialize empty output
    bounddict = {}

    # read list of JSON files to be merged
    file_content = args.filename.read().splitlines()
    next_file_is_first = True
    for line in file_content:
        if len(line) > 0:
            if line[0] != '#':
                tmpfile = line.split()[0]
                if not os.path.isfile(tmpfile):
                    raise ValueError("File " + tmpfile + " not found!")
                tmpbounddict = json.loads(open(tmpfile).read())
                if next_file_is_first:
                    bounddict = deepcopy(tmpbounddict)
                    # update some values
                    bounddict['meta_info']['creation_date'] = \
                        datetime.now().isoformat()
                    bounddict['uuid'] = str(uuid4())
                    next_file_is_first = False
                else:
                    for islitlet in range(EMIR_NBARS):
                        cslitlet = "slitlet" + str(islitlet).zfill(2)
                        if cslitlet in tmpbounddict['contents']:
                            for dateobs in tmpbounddict['contents'][cslitlet]:
                                bounddict['contents'][cslitlet][dateobs] = \
                                    tmpbounddict['contents'][cslitlet][dateobs]

    # save merged JSON file
    with open(args.outfile.name, 'w') as fstream:
        json.dump(bounddict, fstream, indent=2, sort_keys=True)
        print('>>> Saving file ' + args.outfile.name)
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: convert JSON file with refined multislit '
        'parameters to new JSON format')

    # required arguments
    parser.add_argument("input_json",
                        help="Input JSON with refined boundary parameters",
                        type=argparse.FileType('rt'))
    parser.add_argument("output_json",
                        help="Output JSON with fitted boundary parameters",
                        type=lambda x: arg_file_is_new(parser, x, mode='wt'))

    # optional arguments
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # read input JSON file
    input_json = json.loads(open(args.input_json.name).read())

    # generate object of type RefinedBoundaryModelParam from input JSON file
    refined_boundary_model = RefinedBoundaryModelParam(instrument='EMIR')
    refined_boundary_model.tags = {
        'grism': input_json['tags']['grism'],
        'filter': input_json['tags']['filter']
    }
    refined_boundary_model.contents = input_json['contents']
    refined_boundary_model.meta_info['dtu_configuration'] = \
        input_json['dtu_configuration']
    refined_boundary_model.meta_info['dtu_configuration_maxdiff'] = \
        input_json['dtu_configuration_maxdiff']
    for item in [
            'function_evaluations', 'global_residual', 'maxDTUoffset',
            'numresolution', 'parmodel', 'tolerance'
    ]:
        refined_boundary_model.meta_info[item] = input_json['meta_info'][item]
    refined_boundary_model.meta_info['origin']['bounddict'] = \
        'uuid:' + input_json['meta_info']['origin']['bounddict_uuid']
    refined_boundary_model.meta_info['origin']['init_bound_param'] = \
        'uuid:' + input_json['meta_info']['origin']['init_bound_param_uuid']
    refined_boundary_model.quality_control = numina.types.qc.QC.GOOD
    refined_boundary_model.writeto(args.output_json.name)
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: convert JSON file with refined multislit '
                    'parameters to new JSON format'
    )

    # required arguments
    parser.add_argument("input_json",
                        help="Input JSON with refined boundary parameters",
                        type=argparse.FileType('rt'))
    parser.add_argument("output_json",
                        help="Output JSON with fitted boundary parameters",
                        type=lambda x: arg_file_is_new(parser, x, mode='wt'))

    # optional arguments
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # read input JSON file
    input_json = json.loads(open(args.input_json.name).read())

    # generate object of type RefinedBoundaryModelParam from input JSON file
    refined_boundary_model = RefinedBoundaryModelParam(instrument='EMIR')
    refined_boundary_model.tags = {
        'grism': input_json['tags']['grism'],
        'filter': input_json['tags']['filter']
    }
    refined_boundary_model.contents = input_json['contents']
    refined_boundary_model.meta_info['dtu_configuration'] = \
        input_json['dtu_configuration']
    refined_boundary_model.meta_info['dtu_configuration_maxdiff'] = \
        input_json['dtu_configuration_maxdiff']
    for item in ['function_evaluations', 'global_residual', 'maxDTUoffset',
                 'numresolution', 'parmodel', 'tolerance']:
        refined_boundary_model.meta_info[item] = input_json['meta_info'][item]
    refined_boundary_model.meta_info['origin']['bounddict'] = \
        'uuid:' + input_json['meta_info']['origin']['bounddict_uuid']
    refined_boundary_model.meta_info['origin']['init_bound_param'] = \
        'uuid:' + input_json['meta_info']['origin']['init_bound_param_uuid']
    refined_boundary_model.quality_control = numina.types.qc.QC.GOOD
    refined_boundary_model.writeto(args.output_json.name)
Exemplo n.º 8
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: merge 2 EMIR images averaging the common'
                    ' region'
    )

    # positional arguments
    parser.add_argument("infile1",
                        help="Input FITS file name #1",
                        type=argparse.FileType('rb'))
    parser.add_argument("infile2",
                        help="Input FITS file name #2",
                        type=argparse.FileType('rb'))
    parser.add_argument("outfile",
                        help="Output FITS file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting/debugging" +
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args(args=args)

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read input FITS files
    hdulist1 = fits.open(args.infile1)
    hdulist2 = fits.open(args.infile2)

    image_merged = merge2images(
        hdulist1,
        hdulist2,
        debugplot=args.debugplot
    )

    # save result
    image_merged.writeto(args.outfile, overwrite=True)
Exemplo n.º 9
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: merge 2 EMIR images averaging the common'
        ' region')

    # positional arguments
    parser.add_argument("infile1",
                        help="Input FITS file name #1",
                        type=argparse.FileType('rb'))
    parser.add_argument("infile2",
                        help="Input FITS file name #2",
                        type=argparse.FileType('rb'))
    parser.add_argument("outfile",
                        help="Output FITS file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting/debugging" +
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args(args=args)

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read input FITS files
    hdulist1 = fits.open(args.infile1)
    hdulist2 = fits.open(args.infile2)

    image_merged = merge2images(hdulist1, hdulist2, debugplot=args.debugplot)

    # save result
    image_merged.writeto(args.outfile, overwrite=True)
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: evaluate rectification and wavelength '
                    'calibration polynomials for the CSU configuration of a '
                    'particular image'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rect_wpoly_MOSlibrary", required=True,
                        help="Input JSON file with library of rectification "
                             "and wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--out_json", required=True,
                        help="Output JSON file with calibration computed for "
                             "the input FITS file",
                        type=lambda x: arg_file_is_new(parser, x, mode='wt'))
    # optional arguments
    parser.add_argument("--global_integer_offset_x_pix",
                        help="Global integer offset in the X direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--global_integer_offset_y_pix",
                        help="Global integer offset in the Y direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--ignore_dtu_configuration",
                        help="Ignore DTU configurations differences between "
                             "model and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # generate HDUList object
    hdulist = fits.open(args.fitsfile)

    # generate MasterRectWave object
    master_rectwv = MasterRectWave._datatype_load(
        args.rect_wpoly_MOSlibrary.name)

    # compute rectification and wavelength calibration coefficients
    rectwv_coeff = rectwv_coeff_from_mos_library(
        hdulist,
        master_rectwv,
        ignore_dtu_configuration=args.ignore_dtu_configuration,
        debugplot=args.debugplot
    )

    # set global offsets
    rectwv_coeff.global_integer_offset_x_pix = \
        args.global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix = \
        args.global_integer_offset_y_pix

    # save RectWaveCoeff object into JSON file
    rectwv_coeff.writeto(args.out_json.name)
    print('>>> Saving file ' + args.out_json.name)
Exemplo n.º 11
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: compute pixel-to-pixel flatfield'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file (flat ON-OFF)",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff", required=True,
                        help="Input JSON file with rectification and "
                             "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--minimum_slitlet_width_mm", required=True,
                        help="Minimum slitlet width in mm",
                        type=float)
    parser.add_argument("--maximum_slitlet_width_mm", required=True,
                        help="Maximum slitlet width in mm",
                        type=float)
    parser.add_argument("--minimum_fraction", required=True,
                        help="Minimum allowed flatfielding value",
                        type=float, default=0.01)
    parser.add_argument("--minimum_value_in_output",
                        help="Minimum value allowed in output file: pixels "
                             "below this value are set to 1.0 (default=0.01)",
                        type=float, default=0.01)
    parser.add_argument("--maximum_value_in_output",
                        help="Maximum value allowed in output file: pixels "
                             "above this value are set to 1.0 (default=10.0)",
                        type=float, default=10.0)
    parser.add_argument("--nwindow_median",
                        help="Window size to smooth median spectrum in the "
                             "spectral direction",
                        type=int)
    parser.add_argument("--outfile", required=True,
                        help="Output FITS file",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--delta_global_integer_offset_x_pix",
                        help="Delta global integer offset in the X direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--delta_global_integer_offset_y_pix",
                        help="Delta global integer offset in the Y direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                             "2 -> linear interpolation (default)",
                        default=2, type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_DTUconf",
                        help="Ignore DTU configurations differences between "
                             "model and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # This code is obsolete
    raise ValueError('This code is obsolete: use recipe in '
                     'emirdrp/recipes/spec/flatpix2pix.py')

    # read calibration structure from JSON file
    rectwv_coeff = RectWaveCoeff._datatype_load(args.rectwv_coeff.name)

    # modify (when requested) global offsets
    rectwv_coeff.global_integer_offset_x_pix += \
        args.delta_global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix += \
        args.delta_global_integer_offset_y_pix

    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)
    header = hdulist[0].header
    image2d = hdulist[0].data
    hdulist.close()

    # apply global offsets
    image2d = apply_integer_offsets(
        image2d=image2d,
        offx=rectwv_coeff.global_integer_offset_x_pix,
        offy=rectwv_coeff.global_integer_offset_y_pix
    )

    # protections
    naxis2, naxis1 = image2d.shape
    if naxis1 != header['naxis1'] or naxis2 != header['naxis2']:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)
        raise ValueError('Something is wrong with NAXIS1 and/or NAXIS2')
    if abs(args.debugplot) >= 10:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)

    # check that the input FITS file grism and filter match
    filter_name = header['filter']
    if filter_name != rectwv_coeff.tags['filter']:
        raise ValueError("Filter name does not match!")
    grism_name = header['grism']
    if grism_name != rectwv_coeff.tags['grism']:
        raise ValueError("Filter name does not match!")
    if abs(args.debugplot) >= 10:
        print('>>> grism.......:', grism_name)
        print('>>> filter......:', filter_name)

    # check that the DTU configurations are compatible
    dtu_conf_fitsfile = DtuConfiguration.define_from_fits(args.fitsfile)
    dtu_conf_jsonfile = DtuConfiguration.define_from_dictionary(
        rectwv_coeff.meta_info['dtu_configuration'])
    if dtu_conf_fitsfile != dtu_conf_jsonfile:
        print('DTU configuration (FITS file):\n\t', dtu_conf_fitsfile)
        print('DTU configuration (JSON file):\n\t', dtu_conf_jsonfile)
        if args.ignore_DTUconf:
            print('WARNING: DTU configuration differences found!')
        else:
            raise ValueError('DTU configurations do not match')
    else:
        if abs(args.debugplot) >= 10:
            print('>>> DTU Configuration match!')
            print(dtu_conf_fitsfile)

    # load CSU configuration
    csu_conf_fitsfile = CsuConfiguration.define_from_fits(args.fitsfile)
    if abs(args.debugplot) >= 10:
        print(csu_conf_fitsfile)

    # valid slitlet numbers
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in rectwv_coeff.missing_slitlets:
        print('-> Removing slitlet (not defined):', idel)
        list_valid_islitlets.remove(idel)
    # filter out slitlets with widths outside valid range
    list_outside_valid_width = []
    for islitlet in list_valid_islitlets:
        slitwidth = csu_conf_fitsfile.csu_bar_slit_width(islitlet)
        if (slitwidth < args.minimum_slitlet_width_mm) or \
                (slitwidth > args.maximum_slitlet_width_mm):
            list_outside_valid_width.append(islitlet)
            print('-> Removing slitlet (invalid width):', islitlet)
    if len(list_outside_valid_width) > 0:
        for idel in list_outside_valid_width:
            list_valid_islitlets.remove(idel)
    print('>>> valid slitlet numbers:\n', list_valid_islitlets)

    # ---

    # compute and store median spectrum (and masked region) for each
    # individual slitlet
    image2d_sp_median = np.zeros((EMIR_NBARS, EMIR_NAXIS1))
    image2d_sp_mask = np.zeros((EMIR_NBARS, EMIR_NAXIS1), dtype=bool)
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            if args.debugplot == 0:
                islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            # define Slitlet2D object
            slt = Slitlet2D(islitlet=islitlet,
                            rectwv_coeff=rectwv_coeff,
                            debugplot=args.debugplot)

            if abs(args.debugplot) >= 10:
                print(slt)

            # extract (distorted) slitlet from the initial image
            slitlet2d = slt.extract_slitlet2d(
                image_2k2k=image2d,
                subtitle='original image'
            )

            # rectify slitlet
            slitlet2d_rect = slt.rectify(
                slitlet2d=slitlet2d,
                resampling=args.resampling,
                subtitle='original rectified'
            )
            naxis2_slitlet2d, naxis1_slitlet2d = slitlet2d_rect.shape

            if naxis1_slitlet2d != EMIR_NAXIS1:
                print('naxis1_slitlet2d: ', naxis1_slitlet2d)
                print('EMIR_NAXIS1.....: ', EMIR_NAXIS1)
                raise ValueError("Unexpected naxis1_slitlet2d")

            sp_mask = np.zeros(naxis1_slitlet2d, dtype=bool)

            # for grism LR set to zero data beyond useful wavelength range
            if grism_name == 'LR':
                wv_parameters = set_wv_parameters(filter_name, grism_name)
                x_pix = np.arange(1, naxis1_slitlet2d + 1)
                wl_pix = polyval(x_pix, slt.wpoly)
                lremove = wl_pix < wv_parameters['wvmin_useful']
                sp_mask[lremove] = True
                slitlet2d_rect[:, lremove] = 0.0
                lremove = wl_pix > wv_parameters['wvmax_useful']
                slitlet2d_rect[:, lremove] = 0.0
                sp_mask[lremove] = True

            # get useful slitlet region (use boundaries instead of frontiers;
            # note that the nscan_minmax_frontiers() works well independently
            # of using frontiers of boundaries as arguments)
            nscan_min, nscan_max = nscan_minmax_frontiers(
                slt.y0_reference_lower,
                slt.y0_reference_upper,
                resize=False
            )
            ii1 = nscan_min - slt.bb_ns1_orig
            ii2 = nscan_max - slt.bb_ns1_orig + 1

            # median spectrum
            sp_collapsed = np.median(slitlet2d_rect[ii1:(ii2 + 1), :], axis=0)

            # smooth median spectrum along the spectral direction
            sp_median = ndimage.median_filter(
                sp_collapsed,
                args.nwindow_median,
                mode='nearest'
            )

            """
                nremove = 5
                spl = AdaptiveLSQUnivariateSpline(
                    x=xaxis1[nremove:-nremove],
                    y=sp_collapsed[nremove:-nremove],
                    t=11,
                    adaptive=True
                )
                xknots = spl.get_knots()
                yknots = spl(xknots)
                sp_median = spl(xaxis1)

                # compute rms within each knot interval
                nknots = len(xknots)
                rms_array = np.zeros(nknots - 1, dtype=float)
                for iknot in range(nknots - 1):
                    residuals = []
                    for xdum, ydum, yydum in \
                            zip(xaxis1, sp_collapsed, sp_median):
                        if xknots[iknot] <= xdum <= xknots[iknot + 1]:
                            residuals.append(abs(ydum - yydum))
                    if len(residuals) > 5:
                        rms_array[iknot] = np.std(residuals)
                    else:
                        rms_array[iknot] = 0

                # determine in which knot interval falls each pixel
                iknot_array = np.zeros(len(xaxis1), dtype=int)
                for idum, xdum in enumerate(xaxis1):
                    for iknot in range(nknots - 1):
                        if xknots[iknot] <= xdum <= xknots[iknot + 1]:
                            iknot_array[idum] = iknot

                # compute new fit removing deviant points (with fixed knots)
                xnewfit = []
                ynewfit = []
                for idum in range(len(xaxis1)):
                    delta_sp = abs(sp_collapsed[idum] - sp_median[idum])
                    rms_tmp = rms_array[iknot_array[idum]]
                    if idum == 0 or idum == (len(xaxis1) - 1):
                        lok = True
                    elif rms_tmp > 0:
                        if delta_sp < 3.0 * rms_tmp:
                            lok = True
                        else:
                            lok = False
                    else:
                        lok = True
                    if lok:
                        xnewfit.append(xaxis1[idum])
                        ynewfit.append(sp_collapsed[idum])
                nremove = 5
                splnew = AdaptiveLSQUnivariateSpline(
                    x=xnewfit[nremove:-nremove],
                    y=ynewfit[nremove:-nremove],
                    t=xknots[1:-1],
                    adaptive=False
                )
                sp_median = splnew(xaxis1)
            """

            ymax_spmedian = sp_median.max()
            y_threshold = ymax_spmedian * args.minimum_fraction
            lremove = np.where(sp_median < y_threshold)
            sp_median[lremove] = 0.0
            sp_mask[lremove] = True

            image2d_sp_median[islitlet - 1, :] = sp_median
            image2d_sp_mask[islitlet - 1, :] = sp_mask

            if abs(args.debugplot) % 10 != 0:
                xaxis1 = np.arange(1, naxis1_slitlet2d + 1)
                title = 'Slitlet#' + str(islitlet) + ' (median spectrum)'
                ax = ximplotxy(xaxis1, sp_collapsed,
                               title=title,
                               show=False, **{'label' : 'collapsed spectrum'})
                ax.plot(xaxis1, sp_median, label='fitted spectrum')
                ax.plot([1, naxis1_slitlet2d], 2*[y_threshold],
                        label='threshold')
                # ax.plot(xknots, yknots, 'o', label='knots')
                ax.legend()
                ax.set_ylim(-0.05*ymax_spmedian, 1.05*ymax_spmedian)
                pause_debugplot(args.debugplot,
                                pltshow=True, tight_layout=True)
        else:
            if args.debugplot == 0:
                islitlet_progress(islitlet, EMIR_NBARS, ignore=True)

    # ToDo: compute "average" spectrum for each pseudo-longslit, scaling
    #       with the median signal in each slitlet; derive a particular
    #       spectrum for each slitlet (scaling properly)

    image2d_sp_median_masked = np.ma.masked_array(
        image2d_sp_median,
        mask=image2d_sp_mask
    )
    ycut_median = np.ma.median(image2d_sp_median_masked, axis=1).data
    ycut_median_2d = np.repeat(ycut_median, EMIR_NAXIS1).reshape(
        EMIR_NBARS, EMIR_NAXIS1)
    image2d_sp_median_eq = image2d_sp_median_masked / ycut_median_2d
    image2d_sp_median_eq = image2d_sp_median_eq.data

    if True:
        ximshow(image2d_sp_median, title='sp_median', debugplot=12)
        ximplotxy(np.arange(1, EMIR_NBARS + 1), ycut_median, 'ro',
                  title='median value of each spectrum', debugplot=12)
        ximshow(image2d_sp_median_eq, title='sp_median_eq', debugplot=12)

    csu_conf_fitsfile.display_pseudo_longslits(
        list_valid_slitlets=list_valid_islitlets)
    dict_longslits = csu_conf_fitsfile.pseudo_longslits()

    # compute median spectrum for each longslit and insert (properly
    # scaled) that spectrum in each slitlet belonging to that longslit
    image2d_sp_median_longslit = np.zeros((EMIR_NBARS, EMIR_NAXIS1))
    islitlet = 1
    loop = True
    while loop:
        if islitlet in list_valid_islitlets:
            imin = dict_longslits[islitlet].imin()
            imax = dict_longslits[islitlet].imax()
            print('--> imin, imax: ', imin, imax)
            sp_median_longslit = np.median(
                image2d_sp_median_eq[(imin - 1):imax, :], axis=0)
            for i in range(imin, imax+1):
                print('----> i: ', i)
                image2d_sp_median_longslit[(i - 1), :] = \
                    sp_median_longslit * ycut_median[i - 1]
            islitlet = imax
        else:
            print('--> ignoring: ', islitlet)
        if islitlet == EMIR_NBARS:
            loop = False
        else:
            islitlet += 1
    if True:
        ximshow(image2d_sp_median_longslit, debugplot=12)

    # initialize rectified image
    image2d_flatfielded = np.zeros((EMIR_NAXIS2, EMIR_NAXIS1))

    # main loop
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            if args.debugplot == 0:
                islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            # define Slitlet2D object
            slt = Slitlet2D(islitlet=islitlet,
                            rectwv_coeff=rectwv_coeff,
                            debugplot=args.debugplot)

            # extract (distorted) slitlet from the initial image
            slitlet2d = slt.extract_slitlet2d(
                image_2k2k=image2d,
                subtitle='original image'
            )

            # rectify slitlet
            slitlet2d_rect = slt.rectify(
                slitlet2d=slitlet2d,
                resampling=args.resampling,
                subtitle='original rectified'
            )
            naxis2_slitlet2d, naxis1_slitlet2d = slitlet2d_rect.shape

            sp_median = image2d_sp_median_longslit[islitlet - 1, :]

            # generate rectified slitlet region filled with the median spectrum
            slitlet2d_rect_spmedian = np.tile(sp_median, (naxis2_slitlet2d, 1))
            if abs(args.debugplot) > 10:
                slt.ximshow_rectified(
                    slitlet2d_rect=slitlet2d_rect_spmedian,
                    subtitle='rectified, filled with median spectrum'
                )

            # unrectified image
            slitlet2d_unrect_spmedian = slt.rectify(
                slitlet2d=slitlet2d_rect_spmedian,
                resampling=args.resampling,
                inverse=True,
                subtitle='unrectified, filled with median spectrum'
            )

            # normalize initial slitlet image (avoid division by zero)
            slitlet2d_norm = np.zeros_like(slitlet2d)
            for j in range(naxis1_slitlet2d):
                for i in range(naxis2_slitlet2d):
                    den = slitlet2d_unrect_spmedian[i, j]
                    if den == 0:
                        slitlet2d_norm[i, j] = 1.0
                    else:
                        slitlet2d_norm[i, j] = slitlet2d[i, j] / den

            if abs(args.debugplot) > 10:
                slt.ximshow_unrectified(
                    slitlet2d=slitlet2d_norm,
                    subtitle='unrectified, pixel-to-pixel'
                )

            # check for pseudo-longslit with previous slitlet
            if islitlet > 1:
                if (islitlet - 1) in list_valid_islitlets:
                    c1 = csu_conf_fitsfile.csu_bar_slit_center(islitlet - 1)
                    w1 = csu_conf_fitsfile.csu_bar_slit_width(islitlet - 1)
                    c2 = csu_conf_fitsfile.csu_bar_slit_center(islitlet)
                    w2 = csu_conf_fitsfile.csu_bar_slit_width(islitlet)
                    if abs(w1-w2)/w1 < 0.25:
                        wmean = (w1 + w2) / 2.0
                        if abs(c1 - c2) < wmean/4.0:
                            same_slitlet_below = True
                        else:
                            same_slitlet_below = False
                    else:
                        same_slitlet_below = False
                else:
                    same_slitlet_below = False
            else:
                same_slitlet_below = False

            # check for pseudo-longslit with next slitlet
            if islitlet < EMIR_NBARS:
                if (islitlet + 1) in list_valid_islitlets:
                    c1 = csu_conf_fitsfile.csu_bar_slit_center(islitlet)
                    w1 = csu_conf_fitsfile.csu_bar_slit_width(islitlet)
                    c2 = csu_conf_fitsfile.csu_bar_slit_center(islitlet + 1)
                    w2 = csu_conf_fitsfile.csu_bar_slit_width(islitlet + 1)
                    if abs(w1-w2)/w1 < 0.25:
                        wmean = (w1 + w2) / 2.0
                        if abs(c1 - c2) < wmean/4.0:
                            same_slitlet_above = True
                        else:
                            same_slitlet_above = False
                    else:
                        same_slitlet_above = False
                else:
                    same_slitlet_above = False
            else:
                same_slitlet_above = False

            for j in range(EMIR_NAXIS1):
                xchannel = j + 1
                y0_lower = slt.list_frontiers[0](xchannel)
                y0_upper = slt.list_frontiers[1](xchannel)
                n1, n2 = nscan_minmax_frontiers(y0_frontier_lower=y0_lower,
                                                y0_frontier_upper=y0_upper,
                                                resize=True)
                # note that n1 and n2 are scans (ranging from 1 to NAXIS2)
                nn1 = n1 - slt.bb_ns1_orig + 1
                nn2 = n2 - slt.bb_ns1_orig + 1
                image2d_flatfielded[(n1 - 1):n2, j] = \
                    slitlet2d_norm[(nn1 - 1):nn2, j]

                # force to 1.0 region around frontiers
                if not same_slitlet_below:
                    image2d_flatfielded[(n1 - 1):(n1 + 2), j] = 1
                if not same_slitlet_above:
                    image2d_flatfielded[(n2 - 5):n2, j] = 1
        else:
            if args.debugplot == 0:
                islitlet_progress(islitlet, EMIR_NBARS, ignore=True)

    if args.debugplot == 0:
        print('OK!')

    # restore global offsets
    image2d_flatfielded = apply_integer_offsets(
        image2d=image2d_flatfielded ,
        offx=-rectwv_coeff.global_integer_offset_x_pix,
        offy=-rectwv_coeff.global_integer_offset_y_pix
    )

    # set pixels below minimum value to 1.0
    filtered = np.where(image2d_flatfielded < args.minimum_value_in_output)
    image2d_flatfielded[filtered] = 1.0

    # set pixels above maximum value to 1.0
    filtered = np.where(image2d_flatfielded > args.maximum_value_in_output)
    image2d_flatfielded[filtered] = 1.0

    # save output file
    save_ndarray_to_fits(
        array=image2d_flatfielded,
        file_name=args.outfile,
        main_header=header,
        overwrite=True
    )
    print('>>> Saving file ' + args.outfile.name)
Exemplo n.º 12
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(prog='rect_wpoly_for_mos')
    # required arguments
    parser.add_argument("input_list",
                        help="TXT file with list JSON files derived from "
                             "longslit data")
    parser.add_argument("--fitted_bound_param", required=True,
                        help="Input JSON with fitted boundary parameters",
                        type=argparse.FileType('rt'))
    parser.add_argument("--out_MOSlibrary", required=True,
                        help="Output JSON file with results",
                        type=lambda x: arg_file_is_new(parser, x))
    # optional arguments
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # Read input TXT file with list of JSON files
    list_json_files = list_fileinfo_from_txt(args.input_list)
    nfiles = len(list_json_files)
    if abs(args.debugplot) >= 10:
        print('>>> Number of input JSON files:', nfiles)
        for item in list_json_files:
            print(item)
    if nfiles < 2:
        raise ValueError("Insufficient number of input JSON files")

    # read fitted boundary parameters and check that all the longslit JSON
    # files have been computed using the same fitted boundary parameters
    refined_boundary_model = RefinedBoundaryModelParam._datatype_load(
        args.fitted_bound_param.name)
    for ifile in range(nfiles):
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        uuid_tmp = coef_rect_wpoly.meta_info['origin']['bound_param']
        if uuid_tmp[4:] != refined_boundary_model.uuid:
            print('Expected uuid:', refined_boundary_model.uuid)
            print('uuid for ifile #' + str(ifile + 1) + ": " + uuid_tmp)
            raise ValueError("Fitted boundary parameter uuid's do not match")

    # check consistency of grism, filter, DTU configuration and list of
    # valid slitlets
    coef_rect_wpoly_first_longslit = RectWaveCoeff._datatype_load(
        list_json_files[0].filename)
    filter_name = coef_rect_wpoly_first_longslit.tags['filter']
    grism_name = coef_rect_wpoly_first_longslit.tags['grism']
    dtu_conf = DtuConfiguration.define_from_dictionary(
        coef_rect_wpoly_first_longslit.meta_info['dtu_configuration']
    )
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in coef_rect_wpoly_first_longslit.missing_slitlets:
        list_valid_islitlets.remove(idel)
    for ifile in range(1, nfiles):
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        filter_tmp = coef_rect_wpoly.tags['filter']
        if filter_name != filter_tmp:
            print(filter_name)
            print(filter_tmp)
            raise ValueError("Unexpected different filter found")
        grism_tmp = coef_rect_wpoly.tags['grism']
        if grism_name != grism_tmp:
            print(grism_name)
            print(grism_tmp)
            raise ValueError("Unexpected different grism found")
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        dtu_conf_tmp = DtuConfiguration.define_from_dictionary(
            coef_rect_wpoly.meta_info['dtu_configuration']
        )
        if dtu_conf != dtu_conf_tmp:
            print(dtu_conf)
            print(dtu_conf_tmp)
            raise ValueError("Unexpected different DTU configurations found")
        list_valid_islitlets_tmp = list(range(1, EMIR_NBARS + 1))
        for idel in coef_rect_wpoly.missing_slitlets:
            list_valid_islitlets_tmp.remove(idel)
        if list_valid_islitlets != list_valid_islitlets_tmp:
            print(list_valid_islitlets)
            print(list_valid_islitlets_tmp)
            raise ValueError("Unexpected different list of valid slitlets")

    # check consistency of horizontal bounding box limits (bb_nc1_orig and
    # bb_nc2_orig) and ymargin_bb, and store the values for each slitlet
    dict_bb_param = {}
    print("Checking horizontal bounding box limits and ymargin_bb:")
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            cslitlet = 'slitlet' + str(islitlet).zfill(2)
            dict_bb_param[cslitlet] = {}
            for par in ['bb_nc1_orig', 'bb_nc2_orig', 'ymargin_bb']:
                value_initial = \
                    coef_rect_wpoly_first_longslit.contents[islitlet - 1][par]
                for ifile in range(1, nfiles):
                    coef_rect_wpoly = RectWaveCoeff._datatype_load(
                        list_json_files[ifile].filename)
                    value_tmp = coef_rect_wpoly.contents[islitlet - 1][par]
                    if value_initial != value_tmp:
                        print(islitlet, value_initial, value_tmp)
                        print(value_tmp)
                        raise ValueError("Unexpected different " + par)
                    dict_bb_param[cslitlet][par] = value_initial
        else:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=True)
    print('OK!')

    # ---

    # Read and store all the longslit data
    list_coef_rect_wpoly = []
    for ifile in range(nfiles):
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        list_coef_rect_wpoly.append(coef_rect_wpoly)

    # ---

    # Initialize structure to save results into an ouptut JSON file
    outdict = {}
    outdict['refined_boundary_model'] = refined_boundary_model.__getstate__()
    outdict['instrument'] = 'EMIR'
    outdict['meta_info'] = {}
    outdict['meta_info']['creation_date'] = datetime.now().isoformat()
    outdict['meta_info']['description'] = \
        'rectification and wavelength calibration polynomial coefficients ' \
        'as a function of csu_bar_slit_center for MOS'
    outdict['meta_info']['recipe_name'] = 'undefined'
    outdict['meta_info']['origin'] = {}
    outdict['meta_info']['origin']['wpoly_longslits'] = {}
    for ifile in range(nfiles):
        cdum = 'longslit_' + str(ifile + 1).zfill(3) + '_uuid'
        outdict['meta_info']['origin']['wpoly_longslits'][cdum] = \
            list_coef_rect_wpoly[ifile].uuid
    outdict['tags'] = {}
    outdict['tags']['grism'] = grism_name
    outdict['tags']['filter'] = filter_name
    outdict['dtu_configuration'] = dtu_conf.outdict()
    outdict['uuid'] = str(uuid4())
    outdict['contents'] = {}

    # include bb_nc1_orig, bb_nc2_orig and ymargin_bb for each slitlet
    # (note that the values of bb_ns1_orig and bb_ns2_orig cannot be
    # computed at this stage because they depend on csu_bar_slit_center)
    for islitlet in list_valid_islitlets:
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        outdict['contents'][cslitlet] = dict_bb_param[cslitlet]

    # check that order for rectification transformations is the same for all
    # the slitlets and longslit configurations
    order_check_list = []
    for ifile in range(nfiles):
        tmpdict = list_coef_rect_wpoly[ifile].contents
        for islitlet in list_valid_islitlets:
            ttd_order = tmpdict[islitlet - 1]['ttd_order']
            if ttd_order is not None:
                order_check_list.append(ttd_order)
            ttd_order_modeled = \
                tmpdict[islitlet - 1]['ttd_order_longslit_model']
            order_check_list.append(ttd_order_modeled)
    # remove duplicates in list
    order_no_duplicates = list(set(order_check_list))
    if len(order_no_duplicates) != 1:
        print('order_no_duplicates:', order_no_duplicates)
        raise ValueError('tdd_order is not constant!')
    ttd_order = int(order_no_duplicates[0])
    ncoef_rect = ncoef_fmap(ttd_order)
    if abs(args.debugplot) >= 10:
        print('>>> ttd_order........:', ttd_order)
        print('>>> ncoef_rect.......:', ncoef_rect)

    # check that polynomial degree in frontiers and spectrails are the same
    poldeg_check_list = []
    for ifile in range(nfiles):
        tmpdict = list_coef_rect_wpoly[ifile].contents
        for islitlet in list_valid_islitlets:
            tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_lower']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_upper']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_lower']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_middle']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_upper']
            poldeg_check_list.append(len(tmppoly) - 1)
    # remove duplicates in list
    poldeg_no_duplicates = list(set(poldeg_check_list))
    if len(poldeg_no_duplicates) != 1:
        print('poldeg_no_duplicates:', poldeg_no_duplicates)
        raise ValueError('poldeg is not constant in frontiers and '
                         'spectrails!')
    poldeg_spectrails = int(poldeg_no_duplicates[0])
    if abs(args.debugplot) >= 10:
        print('>>> poldeg spectrails:', poldeg_spectrails)

    # check that polynomial degree of wavelength calibration is the same for
    # all the slitlets
    poldeg_check_list = []
    for ifile in range(nfiles):
        tmpdict = list_coef_rect_wpoly[ifile].contents
        for islitlet in list_valid_islitlets:
            tmppoly = tmpdict[islitlet - 1]['wpoly_coeff']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['wpoly_coeff_longslit_model']
            poldeg_check_list.append(len(tmppoly) - 1)
    # remove duplicates in list
    poldeg_no_duplicates = list(set(poldeg_check_list))
    if len(poldeg_no_duplicates) != 1:
        print('poldeg_no_duplicates:', poldeg_no_duplicates)
        raise ValueError('poldeg is not constant in wavelength calibration '
                         'polynomials!')
    poldeg_wavecal = int(poldeg_no_duplicates[0])
    if abs(args.debugplot) >= 10:
        print('>>> poldeg wavecal...:', poldeg_wavecal)

    # ---

    # csu_bar_slit_center values for each slitlet
    print("CSU_bar_slit_center values:")
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            cslitlet = 'slitlet' + str(islitlet).zfill(2)
            list_csu_bar_slit_center = []
            for ifile in range(nfiles):
                tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1]
                csu_bar_slit_center = tmpdict['csu_bar_slit_center']
                list_csu_bar_slit_center.append(csu_bar_slit_center)
            # check that list_csu_bar_slit_center is properly sorted
            if not np.all(list_csu_bar_slit_center[:-1] <=
                      list_csu_bar_slit_center[1:]):
                print('cslitlet: ', cslitlet)
                print('list_csu_bar_slit_center: ', list_csu_bar_slit_center)
                raise ValueError('Unsorted list_csu_bar_slit_center')
            outdict['contents'][cslitlet]['list_csu_bar_slit_center'] = \
                list_csu_bar_slit_center
        else:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=True)
    print('OK!')

    # ---

    # rectification polynomial coefficients

    # note: when aij and bij have not been computed, we use the modeled
    # version aij_longslit_model and bij_longslit_model
    print("Rectification polynomial coefficients:")
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            cslitlet = 'slitlet' + str(islitlet).zfill(2)
            outdict['contents'][cslitlet]['ttd_order'] = ttd_order
            outdict['contents'][cslitlet]['ncoef_rect'] = ncoef_rect
            for keycoef in ['ttd_aij', 'ttd_bij', 'tti_aij', 'tti_bij']:
                for icoef in range(ncoef_rect):
                    ccoef = str(icoef).zfill(2)
                    list_cij = []
                    for ifile in range(nfiles):
                        tmpdict = \
                            list_coef_rect_wpoly[ifile].contents[islitlet - 1]
                        cij = tmpdict[keycoef]
                        if cij is not None:
                            list_cij.append(cij[icoef])
                        else:
                            cij_modeled = tmpdict[keycoef + '_longslit_model']
                            if cij_modeled is None:
                                raise ValueError("Unexpected cij_modeled=None!")
                            else:
                                list_cij.append(cij_modeled[icoef])
                            if abs(args.debugplot) >= 10:
                                print("Warning: using " + keycoef +
                                      "_longslit_model for " + cslitlet +
                                      " in file " +
                                      list_json_files[ifile].filename)
                    cdum = 'list_' + keycoef + '_' + ccoef
                    outdict['contents'][cslitlet][cdum] = list_cij
        else:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=True)

    print('OK!')

    # ---

    # wavelength calibration polynomial coefficients

    # note: when wpoly_coeff have not been computed, we use the
    # wpoly_coeff_longslit_model
    print("Wavelength calibration polynomial coefficients:")
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            cslitlet = 'slitlet' + str(islitlet).zfill(2)
            outdict['contents'][cslitlet]['wpoly_degree'] = poldeg_wavecal
            for icoef in range(poldeg_wavecal + 1):
                ccoef = str(icoef).zfill(2)
                list_cij = []
                for ifile in range(nfiles):
                    tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1]
                    cij = tmpdict['wpoly_coeff']
                    if cij is not None:
                        list_cij.append(cij[icoef])
                    else:
                        cij_modeled = tmpdict['wpoly_coeff_longslit_model']
                        if cij_modeled is None:
                            raise ValueError("Unexpected cij_modeled=None!")
                        else:
                            list_cij.append(cij_modeled[icoef])
                        if abs(args.debugplot) >= 10:
                            print("Warning: using wpoly_coeff_longslit_model" +
                                  " for " + cslitlet +
                                  " in file " +
                                  list_json_files[ifile].filename)
                outdict['contents'][cslitlet]['list_wpoly_coeff_' + ccoef] = \
                    list_cij
        else:
            islitlet_progress(islitlet, EMIR_NBARS, ignore=True)
    print('OK!')

    # ---

    # OBSOLETE
    # Save resulting JSON structure
    '''
    with open(args.out_MOSlibrary.name + '_old', 'w') as fstream:
        json.dump(outdict, fstream, indent=2, sort_keys=True)
        print('>>> Saving file ' + args.out_MOSlibrary.name + '_old')
    '''

    # --

    # Create object of type MasterRectWave with library of coefficients
    # for rectification and wavelength calibration
    master_rectwv = MasterRectWave(instrument='EMIR')
    master_rectwv.quality_control = numina.types.qc.QC.GOOD
    master_rectwv.tags['grism'] = grism_name
    master_rectwv.tags['filter'] = filter_name
    master_rectwv.meta_info['dtu_configuration'] = outdict['dtu_configuration']
    master_rectwv.meta_info['refined_boundary_model'] = {
        'parmodel': refined_boundary_model.meta_info['parmodel']
    }
    master_rectwv.meta_info['refined_boundary_model'].update(
        outdict['refined_boundary_model']['contents']
    )
    master_rectwv.total_slitlets = EMIR_NBARS
    master_rectwv.meta_info['origin'] = {
        'bound_param': 'uuid' + refined_boundary_model.uuid,
        'longslit_frames': ['uuid:' + list_coef_rect_wpoly[ifile].uuid
                            for ifile in range(nfiles)]
    }
    for i in range(EMIR_NBARS):
        islitlet = i + 1
        dumdict = {'islitlet': islitlet}
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        if cslitlet in outdict['contents']:
            dumdict.update(outdict['contents'][cslitlet])
        else:
            dumdict.update({
                'bb_nc1_orig': 0,
                'bb_nc2_orig': 0,
                'ymargin_bb': 0,
                'list_csu_bar_slit_center': [],
                'ttd_order': 0,
                'ncoef_rect': 0,
                'wpolydegree': 0
            })
            master_rectwv.missing_slitlets.append(islitlet)
        master_rectwv.contents.append(dumdict)
    master_rectwv.writeto(args.out_MOSlibrary.name)
    print('>>> Saving file ' + args.out_MOSlibrary.name)
Exemplo n.º 13
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: apply rectification polynomials '
                    'for the CSU configuration of a particular image'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff", required=True,
                        help="Input JSON file with rectification and "
                             "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--outfile", required=True,
                        help="Output FITS file with rectified image",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                             "2 -> linear interpolation (default)",
                        default=2, type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_dtu_configuration",
                        help="Ignore DTU configurations differences between "
                             "transformation and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # read calibration structure from JSON file
    rectwv_coeff = RectWaveCoeff._datatype_load(
        args.rectwv_coeff.name)

    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)
    header = hdulist[0].header
    image2d = hdulist[0].data
    hdulist.close()

    # protections
    naxis2, naxis1 = image2d.shape
    if naxis1 != header['naxis1'] or naxis2 != header['naxis2']:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)
        raise ValueError('Something is wrong with NAXIS1 and/or NAXIS2')
    if abs(args.debugplot) >= 10:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)

    # check that the input FITS file grism and filter match
    filter_name = header['filter']
    if filter_name != rectwv_coeff.tags['filter']:
        raise ValueError("Filter name does not match!")
    grism_name = header['grism']
    if grism_name != rectwv_coeff.tags['grism']:
        raise ValueError("Filter name does not match!")
    if abs(args.debugplot) >= 10:
        print('>>> grism.......:', grism_name)
        print('>>> filter......:', filter_name)

    # check that the DTU configurations are compatible
    dtu_conf_fitsfile = DtuConfiguration.define_from_fits(args.fitsfile)
    dtu_conf_jsonfile = DtuConfiguration.define_from_dictionary(
        rectwv_coeff.meta_info['dtu_configuration'])
    if dtu_conf_fitsfile != dtu_conf_jsonfile:
        print('DTU configuration (FITS file):\n\t', dtu_conf_fitsfile)
        print('DTU configuration (JSON file):\n\t', dtu_conf_jsonfile)
        if args.ignore_dtu_configuration:
            print('WARNING: DTU configuration differences found!')
        else:
            raise ValueError("DTU configurations do not match!")
    else:
        if abs(args.debugplot) >= 10:
            print('>>> DTU Configuration match!')
            print(dtu_conf_fitsfile)

    # valid slitlet numbers
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in rectwv_coeff.missing_slitlets:
        list_valid_islitlets.remove(idel)
    if abs(args.debugplot) >= 10:
        print('>>> valid slitlet numbers:\n', list_valid_islitlets)

    naxis2_enlarged = EMIR_NBARS * EMIR_NPIXPERSLIT_RECTIFIED
    image2d_rectified = np.zeros((naxis2_enlarged, EMIR_NAXIS1))
    image2d_unrectified = np.zeros((EMIR_NAXIS2, EMIR_NAXIS1))

    for islitlet in list_valid_islitlets:
        if args.debugplot == 0:
            islitlet_progress(islitlet, EMIR_NBARS)

        # define Slitlet2D object
        slt = Slitlet2D(islitlet=islitlet,
                        rectwv_coeff=rectwv_coeff,
                        debugplot=args.debugplot)

        # extract 2D image corresponding to the selected slitlet: note that
        # in this case we are not using select_unrectified_slitlets()
        # because it introduces extra zero pixels in the slitlet frontiers
        slitlet2d = slt.extract_slitlet2d(image2d)

        # rectify image
        slitlet2d_rect = slt.rectify(slitlet2d,
                                     resampling=args.resampling)

        # minimum and maximum useful row in the full 2d rectified image
        # (starting from 0)
        i1 = slt.iminslt - 1
        i2 = slt.imaxslt

        # minimum and maximum scan in the rectified slitlet
        # (in pixels, from 1 to NAXIS2)
        ii1 = slt.min_row_rectified
        ii2 = slt.max_row_rectified + 1

        # save rectified slitlet in its corresponding location within
        # the full 2d rectified image
        image2d_rectified[i1:i2, :] = slitlet2d_rect[ii1:ii2, :]

        # ---

        # unrectify image
        slitlet2d_unrect = slt.rectify(slitlet2d_rect,
                                       resampling=args.resampling,
                                       inverse=True)

        # minimum and maximum useful scan (pixel in the spatial direction)
        # for the rectified slitlet
        nscan_min, nscan_max = nscan_minmax_frontiers(
            slt.y0_frontier_lower,
            slt.y0_frontier_upper,
            resize=False
        )
        ii1 = nscan_min - slt.bb_ns1_orig
        ii2 = nscan_max - slt.bb_ns1_orig + 1

        j1 = slt.bb_nc1_orig - 1
        j2 = slt.bb_nc2_orig
        i1 = slt.bb_ns1_orig - 1 + ii1
        i2 = i1 + ii2 - ii1

        image2d_unrectified[i1:i2, j1:j2] = slitlet2d_unrect[ii1:ii2, :]

    if args.debugplot == 0:
        print('OK!')

    save_ndarray_to_fits(
        array=[image2d_rectified, image2d_unrectified],
        file_name=args.outfile,
        cast_to_float=[True] * 2,
        overwrite=True
    )
    print('>>> Saving file ' + args.outfile.name)
Exemplo n.º 14
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: display arrangement of EMIR CSU bars',
        formatter_class=argparse.RawTextHelpFormatter)

    # positional arguments
    parser.add_argument("filename",
                        help="TXT file with list of ABBA FITS files",
                        type=argparse.FileType('rt'))
    parser.add_argument("--step",
                        required=True,
                        help=textwrap.dedent("""\
                        0: preliminary rectwv_coeff.json
                        1: refined rectwv_coeff.json
                        2: ABBA fast reduction
                        3: ABBA careful reduction"""),
                        type=int,
                        choices=[0, 1, 2, 3])
    parser.add_argument("--outfile",
                        required=True,
                        help="Output YAML file name",
                        type=lambda x: arg_file_is_new(parser, x))

    # optional arguments
    parser.add_argument("--pattern",
                        help="Observation pattern",
                        default='ABBA',
                        choices=['A', 'AB', 'ABBA'])
    parser.add_argument("--repeat",
                        help="Repetitions at each position",
                        default=1,
                        type=int)
    parser.add_argument("--npreliminary",
                        help="number of images to be combined to compute "
                        "preliminary rectwv_coeff.json",
                        type=int,
                        default=1)
    parser.add_argument("--refine_wavecalib_mode",
                        help=textwrap.dedent("""\
                        0: no refinement
                        1: global offset to all the slitlets (ARC lines)
                        2: individual offset to each slitlet (ARC lines)
                        11: global offset to all the slitlets (OH lines)
                        12: individual offset to each slitlet (OH lines)"""),
                        type=int,
                        choices=[0, 1, 2, 11, 12])
    parser.add_argument("--minimum_slitlet_width_mm", type=float)
    parser.add_argument("--maximum_slitlet_width_mm", type=float)
    parser.add_argument("--global_integer_offset_x_pix", type=int)
    parser.add_argument("--global_integer_offset_y_pix", type=int)
    parser.add_argument("--obsid_prefix", type=str)
    parser.add_argument("--rectwv_combined",
                        help="Generate single rectwv_coeff.json",
                        action="store_true")
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read TXT file
    with args.filename as f:
        file_content = f.read().splitlines()
    list_fileinfo = []
    for line in file_content:
        if len(line) > 0:
            if line[0] not in ['#', '@']:
                tmplist = line.split()
                tmpfile = tmplist[0]
                if len(tmplist) > 1:
                    tmpinfo = tmplist[1:]
                else:
                    tmpinfo = None
                list_fileinfo.append(FileInfo(tmpfile, tmpinfo))

    # check consistency of pattern, repeat and number of images
    nimages = len(list_fileinfo)
    pattern_sequence = ''
    for i in range(len(args.pattern)):
        pattern_sequence += args.pattern[i] * args.repeat
    if nimages % len(pattern_sequence) != 0:
        raise ValueError('Unexpected number of images')
    nsequences = nimages // len(pattern_sequence)
    full_set = pattern_sequence * nsequences

    print('Expected sequence pattern: {}'.format(pattern_sequence))
    print('Number of sequences......: {}'.format(nsequences))
    print('Full set of images.......: {}'.format(full_set))

    output = generate_yaml_content(args, list_fileinfo)

    # generate YAML file
    with args.outfile as f:
        f.write(output)
    print('--> File {} generated!'.format(args.outfile.name))
Exemplo n.º 15
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(prog='rect_wpoly_for_mos')
    # required arguments
    parser.add_argument("input_list",
                        help="TXT file with list JSON files derived from "
                        "longslit data")
    parser.add_argument("--fitted_bound_param",
                        required=True,
                        help="Input JSON with fitted boundary parameters",
                        type=argparse.FileType('rt'))
    parser.add_argument("--out_MOSlibrary",
                        required=True,
                        help="Output JSON file with results",
                        type=lambda x: arg_file_is_new(parser, x))
    # optional arguments
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # Read input TXT file with list of JSON files
    list_json_files = list_fileinfo_from_txt(args.input_list)
    nfiles = len(list_json_files)
    if abs(args.debugplot) >= 10:
        print('>>> Number of input JSON files:', nfiles)
        for item in list_json_files:
            print(item)
    if nfiles < 2:
        raise ValueError("Insufficient number of input JSON files")

    # read fitted boundary parameters and check that all the longslit JSON
    # files have been computed using the same fitted boundary parameters
    refined_boundary_model = RefinedBoundaryModelParam._datatype_load(
        args.fitted_bound_param.name)
    for ifile in range(nfiles):
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        uuid_tmp = coef_rect_wpoly.meta_info['origin']['bound_param']
        if uuid_tmp[4:] != refined_boundary_model.uuid:
            print('Expected uuid:', refined_boundary_model.uuid)
            print('uuid for ifile #' + str(ifile + 1) + ": " + uuid_tmp)
            raise ValueError("Fitted boundary parameter uuid's do not match")

    # check consistency of grism, filter, DTU configuration and list of
    # valid slitlets
    coef_rect_wpoly_first_longslit = RectWaveCoeff._datatype_load(
        list_json_files[0].filename)
    filter_name = coef_rect_wpoly_first_longslit.tags['filter']
    grism_name = coef_rect_wpoly_first_longslit.tags['grism']
    dtu_conf = DtuConfiguration.define_from_dictionary(
        coef_rect_wpoly_first_longslit.meta_info['dtu_configuration'])
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in coef_rect_wpoly_first_longslit.missing_slitlets:
        list_valid_islitlets.remove(idel)
    for ifile in range(1, nfiles):
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        filter_tmp = coef_rect_wpoly.tags['filter']
        if filter_name != filter_tmp:
            print(filter_name)
            print(filter_tmp)
            raise ValueError("Unexpected different filter found")
        grism_tmp = coef_rect_wpoly.tags['grism']
        if grism_name != grism_tmp:
            print(grism_name)
            print(grism_tmp)
            raise ValueError("Unexpected different grism found")
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        dtu_conf_tmp = DtuConfiguration.define_from_dictionary(
            coef_rect_wpoly.meta_info['dtu_configuration'])
        if dtu_conf != dtu_conf_tmp:
            print(dtu_conf)
            print(dtu_conf_tmp)
            raise ValueError("Unexpected different DTU configurations found")
        list_valid_islitlets_tmp = list(range(1, EMIR_NBARS + 1))
        for idel in coef_rect_wpoly.missing_slitlets:
            list_valid_islitlets_tmp.remove(idel)
        if list_valid_islitlets != list_valid_islitlets_tmp:
            print(list_valid_islitlets)
            print(list_valid_islitlets_tmp)
            raise ValueError("Unexpected different list of valid slitlets")

    # check consistency of horizontal bounding box limits (bb_nc1_orig and
    # bb_nc2_orig) and ymargin_bb, and store the values for each slitlet
    dict_bb_param = {}
    print("Checking horizontal bounding box limits and ymargin_bb:")
    for islitlet in list_valid_islitlets:
        islitlet_progress(islitlet, EMIR_NBARS)
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        dict_bb_param[cslitlet] = {}
        for par in ['bb_nc1_orig', 'bb_nc2_orig', 'ymargin_bb']:
            value_initial = \
                coef_rect_wpoly_first_longslit.contents[islitlet - 1][par]
            for ifile in range(1, nfiles):
                coef_rect_wpoly = RectWaveCoeff._datatype_load(
                    list_json_files[ifile].filename)
                value_tmp = coef_rect_wpoly.contents[islitlet - 1][par]
                if value_initial != value_tmp:
                    print(islitlet, value_initial, value_tmp)
                    print(value_tmp)
                    raise ValueError("Unexpected different " + par)
                dict_bb_param[cslitlet][par] = value_initial
    print('OK!')

    # ---

    # Read and store all the longslit data
    list_coef_rect_wpoly = []
    for ifile in range(nfiles):
        coef_rect_wpoly = RectWaveCoeff._datatype_load(
            list_json_files[ifile].filename)
        list_coef_rect_wpoly.append(coef_rect_wpoly)

    # ---

    # Initialize structure to save results into an ouptut JSON file
    outdict = {}
    outdict['refined_boundary_model'] = refined_boundary_model.__getstate__()
    outdict['instrument'] = 'EMIR'
    outdict['meta_info'] = {}
    outdict['meta_info']['creation_date'] = datetime.now().isoformat()
    outdict['meta_info']['description'] = \
        'rectification and wavelength calibration polynomial coefficients ' \
        'as a function of csu_bar_slit_center for MOS'
    outdict['meta_info']['recipe_name'] = 'undefined'
    outdict['meta_info']['origin'] = {}
    outdict['meta_info']['origin']['wpoly_longslits'] = {}
    for ifile in range(nfiles):
        cdum = 'longslit_' + str(ifile + 1).zfill(3) + '_uuid'
        outdict['meta_info']['origin']['wpoly_longslits'][cdum] = \
            list_coef_rect_wpoly[ifile].uuid
    outdict['tags'] = {}
    outdict['tags']['grism'] = grism_name
    outdict['tags']['filter'] = filter_name
    outdict['dtu_configuration'] = dtu_conf.outdict()
    outdict['uuid'] = str(uuid4())
    outdict['contents'] = {}

    # include bb_nc1_orig, bb_nc2_orig and ymargin_bb for each slitlet
    # (note that the values of bb_ns1_orig and bb_ns2_orig cannot be
    # computed at this stage because they depend on csu_bar_slit_center)
    for islitlet in list_valid_islitlets:
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        outdict['contents'][cslitlet] = dict_bb_param[cslitlet]

    # check that order for rectification transformations is the same for all
    # the slitlets and longslit configurations
    order_check_list = []
    for ifile in range(nfiles):
        tmpdict = list_coef_rect_wpoly[ifile].contents
        for islitlet in list_valid_islitlets:
            ttd_order = tmpdict[islitlet - 1]['ttd_order']
            if ttd_order is not None:
                order_check_list.append(ttd_order)
            ttd_order_modeled = \
                tmpdict[islitlet - 1]['ttd_order_longslit_model']
            order_check_list.append(ttd_order_modeled)
    # remove duplicates in list
    order_no_duplicates = list(set(order_check_list))
    if len(order_no_duplicates) != 1:
        print('order_no_duplicates:', order_no_duplicates)
        raise ValueError('tdd_order is not constant!')
    ttd_order = int(order_no_duplicates[0])
    ncoef_rect = ncoef_fmap(ttd_order)
    if abs(args.debugplot) >= 10:
        print('>>> ttd_order........:', ttd_order)
        print('>>> ncoef_rect.......:', ncoef_rect)

    # check that polynomial degree in frontiers and spectrails are the same
    poldeg_check_list = []
    for ifile in range(nfiles):
        tmpdict = list_coef_rect_wpoly[ifile].contents
        for islitlet in list_valid_islitlets:
            tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_lower']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['frontier']['poly_coef_upper']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_lower']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_middle']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['spectrail']['poly_coef_upper']
            poldeg_check_list.append(len(tmppoly) - 1)
    # remove duplicates in list
    poldeg_no_duplicates = list(set(poldeg_check_list))
    if len(poldeg_no_duplicates) != 1:
        print('poldeg_no_duplicates:', poldeg_no_duplicates)
        raise ValueError('poldeg is not constant in frontiers and '
                         'spectrails!')
    poldeg_spectrails = int(poldeg_no_duplicates[0])
    if abs(args.debugplot) >= 10:
        print('>>> poldeg spectrails:', poldeg_spectrails)

    # check that polynomial degree of wavelength calibration is the same for
    # all the slitlets
    poldeg_check_list = []
    for ifile in range(nfiles):
        tmpdict = list_coef_rect_wpoly[ifile].contents
        for islitlet in list_valid_islitlets:
            tmppoly = tmpdict[islitlet - 1]['wpoly_coeff']
            poldeg_check_list.append(len(tmppoly) - 1)
            tmppoly = tmpdict[islitlet - 1]['wpoly_coeff_longslit_model']
            poldeg_check_list.append(len(tmppoly) - 1)
    # remove duplicates in list
    poldeg_no_duplicates = list(set(poldeg_check_list))
    if len(poldeg_no_duplicates) != 1:
        print('poldeg_no_duplicates:', poldeg_no_duplicates)
        raise ValueError('poldeg is not constant in wavelength calibration '
                         'polynomials!')
    poldeg_wavecal = int(poldeg_no_duplicates[0])
    if abs(args.debugplot) >= 10:
        print('>>> poldeg wavecal...:', poldeg_wavecal)

    # ---

    # csu_bar_slit_center values for each slitlet
    print("CSU_bar_slit_center values:")
    for islitlet in list_valid_islitlets:
        islitlet_progress(islitlet, EMIR_NBARS)
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        list_csu_bar_slit_center = []
        for ifile in range(nfiles):
            tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1]
            csu_bar_slit_center = tmpdict['csu_bar_slit_center']
            list_csu_bar_slit_center.append(csu_bar_slit_center)
        # check that list_csu_bar_slit_center is properly sorted
        if not np.all(
                list_csu_bar_slit_center[:-1] <= list_csu_bar_slit_center[1:]):
            print('cslitlet: ', cslitlet)
            print('list_csu_bar_slit_center: ', list_csu_bar_slit_center)
            raise ValueError('Unsorted list_csu_bar_slit_center')
        outdict['contents'][cslitlet]['list_csu_bar_slit_center'] = \
            list_csu_bar_slit_center
    print('OK!')

    # ---

    # rectification polynomial coefficients

    # note: when aij and bij have not been computed, we use the modeled
    # version aij_longslit_model and bij_longslit_model
    print("Rectification polynomial coefficients:")
    for islitlet in list_valid_islitlets:
        islitlet_progress(islitlet, EMIR_NBARS)
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        outdict['contents'][cslitlet]['ttd_order'] = ttd_order
        outdict['contents'][cslitlet]['ncoef_rect'] = ncoef_rect
        for keycoef in ['ttd_aij', 'ttd_bij', 'tti_aij', 'tti_bij']:
            for icoef in range(ncoef_rect):
                ccoef = str(icoef).zfill(2)
                list_cij = []
                for ifile in range(nfiles):
                    tmpdict = \
                        list_coef_rect_wpoly[ifile].contents[islitlet - 1]
                    cij = tmpdict[keycoef]
                    if cij is not None:
                        list_cij.append(cij[icoef])
                    else:
                        cij_modeled = tmpdict[keycoef + '_longslit_model']
                        if cij_modeled is None:
                            raise ValueError("Unexpected cij_modeled=None!")
                        else:
                            list_cij.append(cij_modeled[icoef])
                        if abs(args.debugplot) >= 10:
                            print("Warning: using " + keycoef +
                                  "_longslit_model for " + cslitlet +
                                  " in file " +
                                  list_json_files[ifile].filename)
                cdum = 'list_' + keycoef + '_' + ccoef
                outdict['contents'][cslitlet][cdum] = list_cij
    print('OK!')

    # ---

    # wavelength calibration polynomial coefficients

    # note: when wpoly_coeff have not been computed, we use the
    # wpoly_coeff_longslit_model
    print("Wavelength calibration polynomial coefficients:")
    for islitlet in list_valid_islitlets:
        islitlet_progress(islitlet, EMIR_NBARS)
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        outdict['contents'][cslitlet]['wpoly_degree'] = poldeg_wavecal
        for icoef in range(poldeg_wavecal + 1):
            ccoef = str(icoef).zfill(2)
            list_cij = []
            for ifile in range(nfiles):
                tmpdict = list_coef_rect_wpoly[ifile].contents[islitlet - 1]
                cij = tmpdict['wpoly_coeff']
                if cij is not None:
                    list_cij.append(cij[icoef])
                else:
                    cij_modeled = tmpdict['wpoly_coeff_longslit_model']
                    if cij_modeled is None:
                        raise ValueError("Unexpected cij_modeled=None!")
                    else:
                        list_cij.append(cij_modeled[icoef])
                    if abs(args.debugplot) >= 10:
                        print("Warning: using wpoly_coeff_longslit_model" +
                              " for " + cslitlet + " in file " +
                              list_json_files[ifile].filename)
            outdict['contents'][cslitlet]['list_wpoly_coeff_' + ccoef] = \
                list_cij
    print('OK!')

    # ---

    # OBSOLETE
    # Save resulting JSON structure
    '''
    with open(args.out_MOSlibrary.name + '_old', 'w') as fstream:
        json.dump(outdict, fstream, indent=2, sort_keys=True)
        print('>>> Saving file ' + args.out_MOSlibrary.name + '_old')
    '''

    # --

    # Create object of type MasterRectWave with library of coefficients
    # for rectification and wavelength calibration
    master_rectwv = MasterRectWave(instrument='EMIR')
    master_rectwv.quality_control = numina.types.qc.QC.GOOD
    master_rectwv.tags['grism'] = grism_name
    master_rectwv.tags['filter'] = filter_name
    master_rectwv.meta_info['dtu_configuration'] = outdict['dtu_configuration']
    master_rectwv.meta_info['refined_boundary_model'] = {
        'parmodel': refined_boundary_model.meta_info['parmodel']
    }
    master_rectwv.meta_info['refined_boundary_model'].update(
        outdict['refined_boundary_model']['contents'])
    master_rectwv.total_slitlets = EMIR_NBARS
    master_rectwv.meta_info['origin'] = {
        'bound_param':
        'uuid' + refined_boundary_model.uuid,
        'longslit_frames': [
            'uuid:' + list_coef_rect_wpoly[ifile].uuid
            for ifile in range(nfiles)
        ]
    }
    for i in range(EMIR_NBARS):
        islitlet = i + 1
        dumdict = {'islitlet': islitlet}
        cslitlet = 'slitlet' + str(islitlet).zfill(2)
        if cslitlet in outdict['contents']:
            dumdict.update(outdict['contents'][cslitlet])
        else:
            dumdict.update({
                'bb_nc1_orig': 0,
                'bb_nc2_orig': 0,
                'ymargin_bb': 0,
                'list_csu_bar_slit_center': [],
                'ttd_order': 0,
                'ncoef_rect': 0,
                'wpolydegree': 0
            })
            master_rectwv.missing_slitlets.append(islitlet)
        master_rectwv.contents.append(dumdict)
    master_rectwv.writeto(args.out_MOSlibrary.name)
    print('>>> Saving file ' + args.out_MOSlibrary.name)
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser()
    # required arguments
    parser.add_argument("--input_rectwv_coeff", required=True,
                        help="Input JSON file with rectification and "
                             "wavelength calibration polynomials "
                             "corresponding to a longslit observation",
                        type=argparse.FileType('rt'))
    parser.add_argument("--output_rectwv_coeff", required=True,
                        help="Output JSON file with updated longslit_model "
                             "coefficients",
                        type=lambda x: arg_file_is_new(parser, x, mode='wt'))

    # optional arguments
    parser.add_argument("--geometry",
                        help="tuple x,y,dx,dy (default 0,0,640,480)",
                        default="0,0,640,480")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    logging_from_debugplot(args.debugplot)
    logger = logging.getLogger(__name__)

    # geometry
    if args.geometry is None:
        geometry = None
    else:
        tmp_str = args.geometry.split(",")
        x_geom = int(tmp_str[0])
        y_geom = int(tmp_str[1])
        dx_geom = int(tmp_str[2])
        dy_geom = int(tmp_str[3])
        geometry = x_geom, y_geom, dx_geom, dy_geom

    # generate RectWaveCoeff object
    rectwv_coeff = RectWaveCoeff._datatype_load(
        args.input_rectwv_coeff.name)

    # update longslit_model parameters
    rectwv_coeff_updated = rectwv_coeff_add_longslit_model(
        rectwv_coeff=rectwv_coeff,
        geometry=geometry,
        debugplot=args.debugplot
    )

    # save updated RectWaveCoeff object into JSON file
    rectwv_coeff_updated.writeto(args.output_rectwv_coeff.name)
    logger.info('>>> Saving file ' + args.output_rectwv_coeff.name)
Exemplo n.º 17
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser()

    # positional arguments
    parser.add_argument("fitsfile",
                        help="FITS file name to be displayed",
                        type=argparse.FileType('rb'))
    parser.add_argument("--fitted_bound_param", required=True,
                        help="JSON file with fitted boundary coefficients "
                             "corresponding to the multislit model",
                        type=argparse.FileType('rt'))
    parser.add_argument("--slitlets", required=True,
                        help="Slitlet selection: string between double "
                             "quotes providing tuples of the form "
                             "n1[,n2[,step]]",
                        type=str)

    # optional arguments
    parser.add_argument("--outfile",
                        help="Output FITS file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))
    parser.add_argument("--maskonly",
                        help="Generate mask for the indicated slitlets",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting/debugging" +
                             " (default=0)",
                        type=int, default=0,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args()

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read input FITS file
    hdulist_image = fits.open(args.fitsfile.name)
    image_header = hdulist_image[0].header
    image2d = hdulist_image[0].data

    naxis1 = image_header['naxis1']
    naxis2 = image_header['naxis2']

    if image2d.shape != (naxis2, naxis1):
        raise ValueError("Unexpected error with NAXIS1, NAXIS2")

    if image2d.shape != (EMIR_NAXIS2, EMIR_NAXIS1):
        raise ValueError("NAXIS1, NAXIS2 unexpected for EMIR detector")

    # remove path from fitsfile
    if args.outfile is None:
        sfitsfile = os.path.basename(args.fitsfile.name)
    else:
        sfitsfile = os.path.basename(args.outfile.name)

    # check that the FITS file has been obtained with EMIR
    instrument = image_header['instrume']
    if instrument != 'EMIR':
        raise ValueError("INSTRUME keyword is not 'EMIR'!")

    # read GRISM, FILTER and ROTANG from FITS header
    grism = image_header['grism']
    spfilter = image_header['filter']
    rotang = image_header['rotang']

    # read fitted_bound_param JSON file
    fittedpar_dict = json.loads(open(args.fitted_bound_param.name).read())
    params = bound_params_from_dict(fittedpar_dict)
    if abs(args.debugplot) in [21, 22]:
        params.pretty_print()

    parmodel = fittedpar_dict['meta_info']['parmodel']
    if parmodel != 'multislit':
        raise ValueError("Unexpected parameter model: ", parmodel)

    # define slitlet range
    islitlet_min = fittedpar_dict['tags']['islitlet_min']
    islitlet_max = fittedpar_dict['tags']['islitlet_max']
    list_islitlet = list_slitlets_from_string(
        s=args.slitlets,
        islitlet_min=islitlet_min,
        islitlet_max=islitlet_max
    )

    # read CsuConfiguration object from FITS file
    csu_config = CsuConfiguration.define_from_fits(args.fitsfile)

    # define csu_bar_slit_center associated to each slitlet
    list_csu_bar_slit_center = []
    for islitlet in list_islitlet:
        list_csu_bar_slit_center.append(
            csu_config.csu_bar_slit_center(islitlet))

    # initialize output data array
    image2d_output = np.zeros((naxis2, naxis1))

    # main loop
    for islitlet, csu_bar_slit_center in \
            zip(list_islitlet, list_csu_bar_slit_center):
        image2d_tmp = select_unrectified_slitlet(
            image2d=image2d,
            islitlet=islitlet,
            csu_bar_slit_center=csu_bar_slit_center,
            params=params,
            parmodel=parmodel,
            maskonly=args.maskonly
        )
        image2d_output += image2d_tmp

    # update the array of the output file
    hdulist_image[0].data = image2d_output

    # save output FITS file
    hdulist_image.writeto(args.outfile)

    # close original image
    hdulist_image.close()

    # display full image
    if abs(args.debugplot) % 10 != 0:
        ax = ximshow(image2d=image2d_output,
                     title=sfitsfile + "\n" + args.slitlets,
                     image_bbox=(1, naxis1, 1, naxis2), show=False)

        # overplot boundaries
        overplot_boundaries_from_params(
            ax=ax,
            params=params,
            parmodel=parmodel,
            list_islitlet=list_islitlet,
            list_csu_bar_slit_center=list_csu_bar_slit_center
        )

        # overplot frontiers
        overplot_frontiers_from_params(
            ax=ax,
            params=params,
            parmodel=parmodel,
            list_islitlet=list_islitlet,
            list_csu_bar_slit_center=list_csu_bar_slit_center,
            micolors=('b', 'b'), linetype='-',
            labels=False    # already displayed with the boundaries
        )

        # show plot
        pause_debugplot(12, pltshow=True)
Exemplo n.º 18
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: compute pixel-to-pixel flatfield'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file (flat ON-OFF)",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff", required=True,
                        help="Input JSON file with rectification and "
                             "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--minimum_fraction", required=True,
                        help="Minimum allowed flatfielding value",
                        type=float, default=0.01)
    parser.add_argument("--minimum_value_in_output",
                        help="Minimum value allowed in output file: pixels "
                             "below this value are set to 1.0 (default=0.01)",
                        type=float, default=0.01)
    parser.add_argument("--nwindow_median", required=True,
                        help="Window size to smooth median spectrum in the "
                             "spectral direction",
                        type=int)
    parser.add_argument("--outfile", required=True,
                        help="Output FITS file",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--delta_global_integer_offset_x_pix",
                        help="Delta global integer offset in the X direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--delta_global_integer_offset_y_pix",
                        help="Delta global integer offset in the Y direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                             "2 -> linear interpolation (default)",
                        default=2, type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_DTUconf",
                        help="Ignore DTU configurations differences between "
                             "model and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # read calibration structure from JSON file
    rectwv_coeff = RectWaveCoeff._datatype_load(args.rectwv_coeff.name)

    # modify (when requested) global offsets
    rectwv_coeff.global_integer_offset_x_pix += \
        args.delta_global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix += \
        args.delta_global_integer_offset_y_pix

    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)
    header = hdulist[0].header
    image2d = hdulist[0].data
    hdulist.close()

    # apply global offsets
    image2d = apply_integer_offsets(
        image2d=image2d,
        offx=rectwv_coeff.global_integer_offset_x_pix,
        offy=rectwv_coeff.global_integer_offset_y_pix
    )

    # protections
    naxis2, naxis1 = image2d.shape
    if naxis1 != header['naxis1'] or naxis2 != header['naxis2']:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)
        raise ValueError('Something is wrong with NAXIS1 and/or NAXIS2')
    if abs(args.debugplot) >= 10:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)

    # check that the input FITS file grism and filter match
    filter_name = header['filter']
    if filter_name != rectwv_coeff.tags['filter']:
        raise ValueError("Filter name does not match!")
    grism_name = header['grism']
    if grism_name != rectwv_coeff.tags['grism']:
        raise ValueError("Filter name does not match!")
    if abs(args.debugplot) >= 10:
        print('>>> grism.......:', grism_name)
        print('>>> filter......:', filter_name)

    # check that the DTU configurations are compatible
    dtu_conf_fitsfile = DtuConfiguration.define_from_fits(args.fitsfile)
    dtu_conf_jsonfile = DtuConfiguration.define_from_dictionary(
        rectwv_coeff.meta_info['dtu_configuration'])
    if dtu_conf_fitsfile != dtu_conf_jsonfile:
        print('DTU configuration (FITS file):\n\t', dtu_conf_fitsfile)
        print('DTU configuration (JSON file):\n\t', dtu_conf_jsonfile)
        if args.ignore_DTUconf:
            print('WARNING: DTU configuration differences found!')
        else:
            raise ValueError('DTU configurations do not match')
    else:
        if abs(args.debugplot) >= 10:
            print('>>> DTU Configuration match!')
            print(dtu_conf_fitsfile)

    # valid slitlet numbers
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in rectwv_coeff.missing_slitlets:
        list_valid_islitlets.remove(idel)
    if abs(args.debugplot) >= 10:
        print('>>> valid slitlet numbers:\n', list_valid_islitlets)

    # ---

    # initialize rectified image
    image2d_flatfielded = np.zeros((EMIR_NAXIS2, EMIR_NAXIS1))

    # main loop
    for islitlet in list_valid_islitlets:
        if args.debugplot == 0:
            islitlet_progress(islitlet, EMIR_NBARS)

        # define Slitlet2D object
        slt = Slitlet2D(islitlet=islitlet,
                        rectwv_coeff=rectwv_coeff,
                        debugplot=args.debugplot)

        if abs(args.debugplot) >= 10:
            print(slt)

        # extract (distorted) slitlet from the initial image
        slitlet2d = slt.extract_slitlet2d(image2d)

        # rectify slitlet
        slitlet2d_rect = slt.rectify(
            slitlet2d,
            resampling=args.resampling
        )
        naxis2_slitlet2d, naxis1_slitlet2d = slitlet2d_rect.shape

        if naxis1_slitlet2d != EMIR_NAXIS1:
            print('naxis1_slitlet2d: ', naxis1_slitlet2d)
            print('EMIR_NAXIS1.....: ', EMIR_NAXIS1)
            raise ValueError("Unexpected naxis1_slitlet2d")

        # get useful slitlet region (use boundaires instead of frontiers;
        # note that the nscan_minmax_frontiers() works well independently
        # of using frontiers of boundaries as arguments)
        nscan_min, nscan_max = nscan_minmax_frontiers(
            slt.y0_reference_lower,
            slt.y0_reference_upper,
            resize=False
        )
        ii1 = nscan_min - slt.bb_ns1_orig
        ii2 = nscan_max - slt.bb_ns1_orig + 1

        # median spectrum
        sp_collapsed = np.median(slitlet2d_rect[ii1:(ii2 + 1), :], axis=0)

        # smooth median spectrum along the spectral direction
        sp_median = ndimage.median_filter(sp_collapsed, args.nwindow_median,
                                          mode='nearest')
        ymax_spmedian = sp_median.max()
        y_threshold = ymax_spmedian * args.minimum_fraction
        sp_median[np.where(sp_median < y_threshold)] = 0.0

        if abs(args.debugplot) > 10:
            title = 'Slitlet#' + str(islitlet) + '(median spectrum)'
            xdum = np.arange(1, naxis1_slitlet2d + 1)
            ax = ximplotxy(xdum, sp_collapsed,
                           title=title,
                           show=False, **{'label' : 'collapsed spectrum'})
            ax.plot(xdum, sp_median, label='filtered spectrum')
            ax.plot([1, naxis1_slitlet2d], 2*[y_threshold],
                    label='threshold')
            ax.legend()
            ax.set_ylim(-0.05*ymax_spmedian, 1.05*ymax_spmedian)
            pause_debugplot(args.debugplot,
                            pltshow=True, tight_layout=True)

        # generate rectified slitlet region filled with the median spectrum
        slitlet2d_rect_spmedian = np.tile(sp_median, (naxis2_slitlet2d, 1))
        if abs(args.debugplot) > 10:
            slt.ximshow_rectified(slitlet2d_rect_spmedian)

        # unrectified image
        slitlet2d_unrect_spmedian = slt.rectify(
            slitlet2d_rect_spmedian,
            resampling=args.resampling,
            inverse=True
        )

        # normalize initial slitlet image (avoid division by zero)
        slitlet2d_norm = np.zeros_like(slitlet2d)
        for j in range(naxis1_slitlet2d):
            for i in range(naxis2_slitlet2d):
                den = slitlet2d_unrect_spmedian[i, j]
                if den == 0:
                    slitlet2d_norm[i, j] = 1.0
                else:
                    slitlet2d_norm[i, j] = slitlet2d[i, j] / den

        if abs(args.debugplot) > 10:
            slt.ximshow_unrectified(slitlet2d_norm)

        for j in range(EMIR_NAXIS1):
            xchannel = j + 1
            y0_lower = slt.list_frontiers[0](xchannel)
            y0_upper = slt.list_frontiers[1](xchannel)
            n1, n2 = nscan_minmax_frontiers(y0_frontier_lower=y0_lower,
                                            y0_frontier_upper=y0_upper,
                                            resize=True)
            # note that n1 and n2 are scans (ranging from 1 to NAXIS2)
            nn1 = n1 - slt.bb_ns1_orig + 1
            nn2 = n2 - slt.bb_ns1_orig + 1
            image2d_flatfielded[(n1 - 1):n2, j] = \
                slitlet2d_norm[(nn1 - 1):nn2, j]

            # force to 1.0 region around frontiers
            image2d_flatfielded[(n1 - 1):(n1 + 2), j] = 1
            image2d_flatfielded[(n2 - 5):n2, j] = 1
    if args.debugplot == 0:
        print('OK!')

    # set pixels below minimum value to 1.0
    filtered = np.where(image2d_flatfielded < args.minimum_value_in_output)
    image2d_flatfielded[filtered] = 1.0

    # restore global offsets
    image2d_flatfielded = apply_integer_offsets(
        image2d=image2d_flatfielded ,
        offx=-rectwv_coeff.global_integer_offset_x_pix,
        offy=-rectwv_coeff.global_integer_offset_y_pix
    )

    # save output file
    save_ndarray_to_fits(
        array=image2d_flatfielded,
        file_name=args.outfile,
        main_header=header,
        overwrite=True
    )
    print('>>> Saving file ' + args.outfile.name)
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: evaluate rectification and wavelength '
        'calibration polynomials for the CSU configuration of a '
        'particular image')

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rect_wpoly_MOSlibrary",
                        required=True,
                        help="Input JSON file with library of rectification "
                        "and wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--out_json",
                        required=True,
                        help="Output JSON file with calibration computed for "
                        "the input FITS file",
                        type=lambda x: arg_file_is_new(parser, x, mode='wt'))
    # optional arguments
    parser.add_argument("--global_integer_offset_x_pix",
                        help="Global integer offset in the X direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--global_integer_offset_y_pix",
                        help="Global integer offset in the Y direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--ignore_dtu_configuration",
                        help="Ignore DTU configurations differences between "
                        "model and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # generate HDUList object
    hdulist = fits.open(args.fitsfile)

    # generate MasterRectWave object
    master_rectwv = MasterRectWave._datatype_load(
        args.rect_wpoly_MOSlibrary.name)

    # compute rectification and wavelength calibration coefficients
    rectwv_coeff = rectwv_coeff_from_mos_library(
        hdulist,
        master_rectwv,
        ignore_dtu_configuration=args.ignore_dtu_configuration,
        debugplot=args.debugplot)

    # set global offsets
    rectwv_coeff.global_integer_offset_x_pix = \
        args.global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix = \
        args.global_integer_offset_y_pix

    # save RectWaveCoeff object into JSON file
    rectwv_coeff.writeto(args.out_json.name)
    print('>>> Saving file ' + args.out_json.name)
Exemplo n.º 20
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: apply rectification and wavelength '
                    'calibration polynomials for the CSU configuration of a '
                    'particular image'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff", required=True,
                        help="Input JSON file with rectification and "
                             "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--outfile", required=True,
                        help="Output FITS file with rectified and "
                             "wavelength calibrated image",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--delta_global_integer_offset_x_pix",
                        help="Delta global integer offset in the X direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--delta_global_integer_offset_y_pix",
                        help="Delta global integer offset in the Y direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                             "2 -> linear interpolation (default)",
                        default=2, type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_dtu_configuration",
                        help="Ignore DTU configurations differences between "
                             "transformation and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    logging_from_debugplot(args.debugplot)

    # generate RectWaveCoeff object
    rectwv_coeff = RectWaveCoeff._datatype_load(args.rectwv_coeff.name)

    # modify (when requested) global offsets
    rectwv_coeff.global_integer_offset_x_pix += \
        args.delta_global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix += \
        args.delta_global_integer_offset_y_pix

    # generate HDUList object
    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)

    # rectification and wavelength calibration
    reduced_arc = apply_rectwv_coeff(
        hdulist,
        rectwv_coeff,
        args_resampling=args.resampling,
        args_ignore_dtu_configuration=args.ignore_dtu_configuration,
        debugplot=args.debugplot
    )

    # save result
    reduced_arc.writeto(args.outfile, overwrite=True)
Exemplo n.º 21
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: compute pixel-to-pixel flatfield'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file (flat ON-OFF)",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff", required=True,
                        help="Input JSON file with rectification and "
                             "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--minimum_slitlet_width_mm", required=True,
                        help="Minimum slitlet width in mm",
                        type=float)
    parser.add_argument("--maximum_slitlet_width_mm", required=True,
                        help="Maximum slitlet width in mm",
                        type=float)
    parser.add_argument("--minimum_fraction", required=True,
                        help="Minimum allowed flatfielding value",
                        type=float, default=0.01)
    parser.add_argument("--minimum_value_in_output",
                        help="Minimum value allowed in output file: pixels "
                             "below this value are set to 1.0 (default=0.01)",
                        type=float, default=0.01)
    parser.add_argument("--maximum_value_in_output",
                        help="Maximum value allowed in output file: pixels "
                             "above this value are set to 1.0 (default=10.0)",
                        type=float, default=10.0)
    # parser.add_argument("--nwindow_median", required=True,
    #                     help="Window size to smooth median spectrum in the "
    #                          "spectral direction",
    #                     type=int)
    parser.add_argument("--outfile", required=True,
                        help="Output FITS file",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--delta_global_integer_offset_x_pix",
                        help="Delta global integer offset in the X direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--delta_global_integer_offset_y_pix",
                        help="Delta global integer offset in the Y direction "
                             "(default=0)",
                        default=0, type=int)
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                             "2 -> linear interpolation (default)",
                        default=2, type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_DTUconf",
                        help="Ignore DTU configurations differences between "
                             "model and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # read calibration structure from JSON file
    rectwv_coeff = RectWaveCoeff._datatype_load(args.rectwv_coeff.name)

    # modify (when requested) global offsets
    rectwv_coeff.global_integer_offset_x_pix += \
        args.delta_global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix += \
        args.delta_global_integer_offset_y_pix

    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)
    header = hdulist[0].header
    image2d = hdulist[0].data
    hdulist.close()

    # apply global offsets
    image2d = apply_integer_offsets(
        image2d=image2d,
        offx=rectwv_coeff.global_integer_offset_x_pix,
        offy=rectwv_coeff.global_integer_offset_y_pix
    )

    # protections
    naxis2, naxis1 = image2d.shape
    if naxis1 != header['naxis1'] or naxis2 != header['naxis2']:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)
        raise ValueError('Something is wrong with NAXIS1 and/or NAXIS2')
    if abs(args.debugplot) >= 10:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)

    # check that the input FITS file grism and filter match
    filter_name = header['filter']
    if filter_name != rectwv_coeff.tags['filter']:
        raise ValueError("Filter name does not match!")
    grism_name = header['grism']
    if grism_name != rectwv_coeff.tags['grism']:
        raise ValueError("Filter name does not match!")
    if abs(args.debugplot) >= 10:
        print('>>> grism.......:', grism_name)
        print('>>> filter......:', filter_name)

    # check that the DTU configurations are compatible
    dtu_conf_fitsfile = DtuConfiguration.define_from_fits(args.fitsfile)
    dtu_conf_jsonfile = DtuConfiguration.define_from_dictionary(
        rectwv_coeff.meta_info['dtu_configuration'])
    if dtu_conf_fitsfile != dtu_conf_jsonfile:
        print('DTU configuration (FITS file):\n\t', dtu_conf_fitsfile)
        print('DTU configuration (JSON file):\n\t', dtu_conf_jsonfile)
        if args.ignore_DTUconf:
            print('WARNING: DTU configuration differences found!')
        else:
            raise ValueError('DTU configurations do not match')
    else:
        if abs(args.debugplot) >= 10:
            print('>>> DTU Configuration match!')
            print(dtu_conf_fitsfile)

    # load CSU configuration
    csu_conf_fitsfile = CsuConfiguration.define_from_fits(args.fitsfile)
    if abs(args.debugplot) >= 10:
        print(csu_conf_fitsfile)

    # valid slitlet numbers
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in rectwv_coeff.missing_slitlets:
        print('-> Removing slitlet (not defined):', idel)
        list_valid_islitlets.remove(idel)
    # filter out slitlets with widths outside valid range
    list_outside_valid_width = []
    for islitlet in list_valid_islitlets:
        slitwidth = csu_conf_fitsfile.csu_bar_slit_width(islitlet)
        if (slitwidth < args.minimum_slitlet_width_mm) or \
                (slitwidth > args.maximum_slitlet_width_mm):
            list_outside_valid_width.append(islitlet)
            print('-> Removing slitlet (invalid width):', islitlet)
    if len(list_outside_valid_width) > 0:
        for idel in list_outside_valid_width:
            list_valid_islitlets.remove(idel)
    print('>>> valid slitlet numbers:\n', list_valid_islitlets)

    # ---

    # initialize rectified image
    image2d_flatfielded = np.zeros((EMIR_NAXIS2, EMIR_NAXIS1))

    # main loop
    for islitlet in list(range(1, EMIR_NBARS + 1)):
        if islitlet in list_valid_islitlets:
            if args.debugplot == 0:
                islitlet_progress(islitlet, EMIR_NBARS, ignore=False)
            # define Slitlet2D object
            slt = Slitlet2D(islitlet=islitlet,
                            rectwv_coeff=rectwv_coeff,
                            debugplot=args.debugplot)

            if abs(args.debugplot) >= 10:
                print(slt)

            # extract (distorted) slitlet from the initial image
            slitlet2d = slt.extract_slitlet2d(
                image_2k2k=image2d,
                subtitle='original image'
            )

            # rectify slitlet
            slitlet2d_rect = slt.rectify(
                slitlet2d=slitlet2d,
                resampling=args.resampling,
                subtitle='original rectified'
            )
            naxis2_slitlet2d, naxis1_slitlet2d = slitlet2d_rect.shape

            if naxis1_slitlet2d != EMIR_NAXIS1:
                print('naxis1_slitlet2d: ', naxis1_slitlet2d)
                print('EMIR_NAXIS1.....: ', EMIR_NAXIS1)
                raise ValueError("Unexpected naxis1_slitlet2d")

            # get useful slitlet region (use boundaries instead of frontiers;
            # note that the nscan_minmax_frontiers() works well independently
            # of using frontiers of boundaries as arguments)
            nscan_min, nscan_max = nscan_minmax_frontiers(
                slt.y0_reference_lower,
                slt.y0_reference_upper,
                resize=False
            )
            ii1 = nscan_min - slt.bb_ns1_orig
            ii2 = nscan_max - slt.bb_ns1_orig + 1

            # median spectrum
            sp_collapsed = np.median(slitlet2d_rect[ii1:(ii2 + 1), :], axis=0)

            # smooth median spectrum along the spectral direction
            # sp_median = ndimage.median_filter(
            #     sp_collapsed,
            #     args.nwindow_median,
            #     mode='nearest'
            # )
            xaxis1 = np.arange(1, naxis1_slitlet2d + 1)
            nremove = 5
            spl = AdaptiveLSQUnivariateSpline(
                x=xaxis1[nremove:-nremove],
                y=sp_collapsed[nremove:-nremove],
                t=11,
                adaptive=True
            )
            xknots = spl.get_knots()
            yknots = spl(xknots)
            sp_median = spl(xaxis1)

            # compute rms within each knot interval
            nknots = len(xknots)
            rms_array = np.zeros(nknots - 1, dtype=float)
            for iknot in range(nknots - 1):
                residuals = []
                for xdum, ydum, yydum in \
                        zip(xaxis1, sp_collapsed, sp_median):
                    if xknots[iknot] <= xdum <= xknots[iknot + 1]:
                        residuals.append(abs(ydum - yydum))
                if len(residuals) > 5:
                    rms_array[iknot] = np.std(residuals)
                else:
                    rms_array[iknot] = 0

            # determine in which knot interval falls each pixel
            iknot_array = np.zeros(len(xaxis1), dtype=int)
            for idum, xdum in enumerate(xaxis1):
                for iknot in range(nknots - 1):
                    if xknots[iknot] <= xdum <= xknots[iknot + 1]:
                        iknot_array[idum] = iknot

            # compute new fit removing deviant points (with fixed knots)
            xnewfit = []
            ynewfit = []
            for idum in range(len(xaxis1)):
                delta_sp = abs(sp_collapsed[idum] - sp_median[idum])
                rms_tmp = rms_array[iknot_array[idum]]
                if idum == 0 or idum == (len(xaxis1) - 1):
                    lok = True
                elif rms_tmp > 0:
                    if delta_sp < 3.0 * rms_tmp:
                        lok = True
                    else:
                        lok = False
                else:
                    lok = True
                if lok:
                    xnewfit.append(xaxis1[idum])
                    ynewfit.append(sp_collapsed[idum])
            nremove = 5
            splnew = AdaptiveLSQUnivariateSpline(
                x=xnewfit[nremove:-nremove],
                y=ynewfit[nremove:-nremove],
                t=xknots[1:-1],
                adaptive=False
            )
            sp_median = splnew(xaxis1)

            ymax_spmedian = sp_median.max()
            y_threshold = ymax_spmedian * args.minimum_fraction
            sp_median[np.where(sp_median < y_threshold)] = 0.0

            if abs(args.debugplot) > 10:
                title = 'Slitlet#' + str(islitlet) + ' (median spectrum)'
                ax = ximplotxy(xaxis1, sp_collapsed,
                               title=title,
                               show=False, **{'label' : 'collapsed spectrum'})
                ax.plot(xaxis1, sp_median, label='fitted spectrum')
                ax.plot([1, naxis1_slitlet2d], 2*[y_threshold],
                        label='threshold')
                ax.plot(xknots, yknots, 'o', label='knots')
                ax.legend()
                ax.set_ylim(-0.05*ymax_spmedian, 1.05*ymax_spmedian)
                pause_debugplot(args.debugplot,
                                pltshow=True, tight_layout=True)

            # generate rectified slitlet region filled with the median spectrum
            slitlet2d_rect_spmedian = np.tile(sp_median, (naxis2_slitlet2d, 1))
            if abs(args.debugplot) > 10:
                slt.ximshow_rectified(
                    slitlet2d_rect=slitlet2d_rect_spmedian,
                    subtitle='rectified, filled with median spectrum'
                )

            # unrectified image
            slitlet2d_unrect_spmedian = slt.rectify(
                slitlet2d=slitlet2d_rect_spmedian,
                resampling=args.resampling,
                inverse=True,
                subtitle='unrectified, filled with median spectrum'
            )

            # normalize initial slitlet image (avoid division by zero)
            slitlet2d_norm = np.zeros_like(slitlet2d)
            for j in range(naxis1_slitlet2d):
                for i in range(naxis2_slitlet2d):
                    den = slitlet2d_unrect_spmedian[i, j]
                    if den == 0:
                        slitlet2d_norm[i, j] = 1.0
                    else:
                        slitlet2d_norm[i, j] = slitlet2d[i, j] / den

            if abs(args.debugplot) > 10:
                slt.ximshow_unrectified(
                    slitlet2d=slitlet2d_norm,
                    subtitle='unrectified, pixel-to-pixel'
                )

            # check for pseudo-longslit with previous slitlet
            if islitlet > 1:
                if (islitlet - 1) in list_valid_islitlets:
                    c1 = csu_conf_fitsfile.csu_bar_slit_center(islitlet - 1)
                    w1 = csu_conf_fitsfile.csu_bar_slit_width(islitlet - 1)
                    c2 = csu_conf_fitsfile.csu_bar_slit_center(islitlet)
                    w2 = csu_conf_fitsfile.csu_bar_slit_width(islitlet)
                    if abs(w1-w2)/w1 < 0.25:
                        wmean = (w1 + w2) / 2.0
                        if abs(c1 - c2) < wmean/4.0:
                            same_slitlet_below = True
                        else:
                            same_slitlet_below = False
                    else:
                        same_slitlet_below = False
                else:
                    same_slitlet_below = False
            else:
                same_slitlet_below = False

            # check for pseudo-longslit with previous slitlet
            if islitlet < EMIR_NBARS:
                if (islitlet + 1) in list_valid_islitlets:
                    c1 = csu_conf_fitsfile.csu_bar_slit_center(islitlet)
                    w1 = csu_conf_fitsfile.csu_bar_slit_width(islitlet)
                    c2 = csu_conf_fitsfile.csu_bar_slit_center(islitlet + 1)
                    w2 = csu_conf_fitsfile.csu_bar_slit_width(islitlet + 1)
                    if abs(w1-w2)/w1 < 0.25:
                        wmean = (w1 + w2) / 2.0
                        if abs(c1 - c2) < wmean/4.0:
                            same_slitlet_above = True
                        else:
                            same_slitlet_above = False
                    else:
                        same_slitlet_above = False
                else:
                    same_slitlet_above = False
            else:
                same_slitlet_above = False

            for j in range(EMIR_NAXIS1):
                xchannel = j + 1
                y0_lower = slt.list_frontiers[0](xchannel)
                y0_upper = slt.list_frontiers[1](xchannel)
                n1, n2 = nscan_minmax_frontiers(y0_frontier_lower=y0_lower,
                                                y0_frontier_upper=y0_upper,
                                                resize=True)
                # note that n1 and n2 are scans (ranging from 1 to NAXIS2)
                nn1 = n1 - slt.bb_ns1_orig + 1
                nn2 = n2 - slt.bb_ns1_orig + 1
                image2d_flatfielded[(n1 - 1):n2, j] = \
                    slitlet2d_norm[(nn1 - 1):nn2, j]

                # force to 1.0 region around frontiers
                if not same_slitlet_below:
                    image2d_flatfielded[(n1 - 1):(n1 + 2), j] = 1
                if not same_slitlet_above:
                    image2d_flatfielded[(n2 - 5):n2, j] = 1
        else:
            if args.debugplot == 0:
                islitlet_progress(islitlet, EMIR_NBARS, ignore=True)

    if args.debugplot == 0:
        print('OK!')

    # restore global offsets
    image2d_flatfielded = apply_integer_offsets(
        image2d=image2d_flatfielded ,
        offx=-rectwv_coeff.global_integer_offset_x_pix,
        offy=-rectwv_coeff.global_integer_offset_y_pix
    )

    # set pixels below minimum value to 1.0
    filtered = np.where(image2d_flatfielded < args.minimum_value_in_output)
    image2d_flatfielded[filtered] = 1.0

    # set pixels above maximum value to 1.0
    filtered = np.where(image2d_flatfielded > args.maximum_value_in_output)
    image2d_flatfielded[filtered] = 1.0

    # save output file
    save_ndarray_to_fits(
        array=image2d_flatfielded,
        file_name=args.outfile,
        main_header=header,
        overwrite=True
    )
    print('>>> Saving file ' + args.outfile.name)
Exemplo n.º 22
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: determine rectification and wavelength '
        'calibration polynomials from arc image')

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file with longslit data",
                        type=argparse.FileType('rb'))
    parser.add_argument("--bound_param",
                        required=True,
                        help="Input JSON with fitted boundary parameters",
                        type=argparse.FileType('rt'))
    parser.add_argument("--order_fmap",
                        required=True,
                        help="Order of the 2D rectification transformation "
                        "(default=2)",
                        default=2,
                        type=int)
    parser.add_argument("--wv_master_file",
                        required=True,
                        help="TXT file containing wavelengths")
    parser.add_argument("--poldeg_initial",
                        required=True,
                        help="Polynomial degree for initial calibration",
                        type=int)
    parser.add_argument("--poldeg_refined",
                        required=True,
                        help="Polynomial degree for refined calibration "
                        "(0=do not refine)",
                        type=int)
    parser.add_argument("--out_json",
                        required=True,
                        help="Output JSON file with results",
                        type=lambda x: arg_file_is_new(parser, x))

    # optional arguments
    parser.add_argument("--interactive",
                        help="Ask the user for confirmation before updating "
                        "the wavelength calibration polynomial",
                        action="store_true")
    parser.add_argument("--ymargin_bb",
                        help="Number of pixels above and below frontiers to "
                        "determine the vertical bounding box of each "
                        "undistorted slitlet (default=2)",
                        type=int,
                        default=2)
    parser.add_argument("--remove_sp_background",
                        help="Remove background spectrum prior to arc line "
                        "detection",
                        action="store_true")
    parser.add_argument("--times_sigma_threshold",
                        help="Times sigma above threshold to detect unknown"
                        " arc lines (default=10)",
                        type=float,
                        default=10)
    parser.add_argument("--margin_npix",
                        help="Number of pixels before and after expected "
                        "wavelength calibrated spectrum to trim the "
                        "wv_master table in the wavelength direction "
                        "(default=50)",
                        type=int,
                        default=50)
    parser.add_argument("--nbrightlines",
                        help="tuple with number of brightlines to "
                        "be employed in the initial wavelength "
                        "calibration (e.g. \"10,5,4\")")
    parser.add_argument("--threshold_wv",
                        help="Minimum signal in the line peaks (default=0)",
                        default=0,
                        type=float)
    parser.add_argument("--sigma_gaussian_filtering",
                        help="Sigma of the gaussian filter to be applied to "
                        "the spectrum in order to avoid problems with "
                        "saturated lines in the wavelength calibration "
                        "process",
                        default=0,
                        type=float)
    parser.add_argument("--out_55sp",
                        help="FITS file containing the set of averaged "
                        "spectra employed to derive the wavelength "
                        "calibration",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))
    parser.add_argument("--ylogscale",
                        help="Display spectrum signal in logarithmic units",
                        action="store_true")
    parser.add_argument("--geometry",
                        help="tuple x,y,dx,dy (default 0,0,640,480)",
                        default="0,0,640,480")
    parser.add_argument("--pdffile",
                        help="output PDF file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    logger = logging.getLogger(__name__)

    logging_from_debugplot(args.debugplot)

    # read pdffile
    if args.pdffile is not None:
        if args.interactive:
            raise ValueError('--interactive is not compatible with --pdffile')
        from matplotlib.backends.backend_pdf import PdfPages
        pdf = PdfPages(args.pdffile.name)
    else:
        pdf = None

    # geometry
    if args.geometry is None:
        geometry = None
    else:
        tmp_str = args.geometry.split(",")
        x_geom = int(tmp_str[0])
        y_geom = int(tmp_str[1])
        dx_geom = int(tmp_str[2])
        dy_geom = int(tmp_str[3])
        geometry = x_geom, y_geom, dx_geom, dy_geom

    # generate HDUList object
    hdulist = fits.open(args.fitsfile)

    # generate RefinedBoundaryModelParam object
    bound_param = RefinedBoundaryModelParam._datatype_load(
        args.bound_param.name)

    # generate lines_catalog
    lines_catalog = np.genfromtxt(args.wv_master_file)

    rectwv_coeff, reduced_55sp = rectwv_coeff_from_arc_image(
        hdulist,
        bound_param,
        lines_catalog,
        args_nbrightlines=args.nbrightlines,
        args_ymargin_bb=args.ymargin_bb,
        args_remove_sp_background=args.remove_sp_background,
        args_times_sigma_threshold=args.times_sigma_threshold,
        args_order_fmap=args.order_fmap,
        args_sigma_gaussian_filtering=args.sigma_gaussian_filtering,
        args_margin_npix=args.margin_npix,
        args_poldeg_initial=args.poldeg_initial,
        args_poldeg_refined=args.poldeg_refined,
        args_interactive=args.interactive,
        args_threshold_wv=args.threshold_wv,
        args_ylogscale=args.ylogscale,
        args_pdf=pdf,
        args_geometry=geometry,
        debugplot=args.debugplot)

    # save image with collapsed spectra employed to determine the
    # wavelength calibration
    if args.out_55sp is not None:
        reduced_55sp.writeto(args.out_55sp, overwrite=True)

    # save RectWaveCoeff object into JSON file
    rectwv_coeff.writeto(args.out_json.name)
    logger.info('>>> Saving file ' + args.out_json.name)
    # debugging __getstate__ and __setstate__
    # check_setstate_getstate(rectwv_coeff, args.out_json.name)

    if pdf is not None:
        pdf.close()
Exemplo n.º 23
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: apply rectification and wavelength '
        'calibration polynomials for the CSU configuration of a '
        'particular image')

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff",
                        required=True,
                        help="Input JSON file with rectification and "
                        "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--outfile",
                        required=True,
                        help="Output FITS file with rectified and "
                        "wavelength calibrated image",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--delta_global_integer_offset_x_pix",
                        help="Delta global integer offset in the X direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--delta_global_integer_offset_y_pix",
                        help="Delta global integer offset in the Y direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                        "2 -> linear interpolation (default)",
                        default=2,
                        type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_dtu_configuration",
                        help="Ignore DTU configurations differences between "
                        "transformation and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    logging_from_debugplot(args.debugplot)

    # generate RectWaveCoeff object
    rectwv_coeff = RectWaveCoeff._datatype_load(args.rectwv_coeff.name)

    # modify (when requested) global offsets
    rectwv_coeff.global_integer_offset_x_pix += \
        args.delta_global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix += \
        args.delta_global_integer_offset_y_pix

    # generate HDUList object
    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)

    # rectification and wavelength calibration
    reduced_arc = apply_rectwv_coeff(
        hdulist,
        rectwv_coeff,
        args_resampling=args.resampling,
        args_ignore_dtu_configuration=args.ignore_dtu_configuration,
        debugplot=args.debugplot)

    # save result
    reduced_arc.writeto(args.outfile, overwrite=True)
Exemplo n.º 24
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: display arrangement of EMIR CSU bars',
        formatter_class=argparse.RawTextHelpFormatter)

    # positional or required arguments
    parser.add_argument("filename",
                        help="TXT file with list of ABBA FITS files",
                        type=argparse.FileType('rt'))
    parser.add_argument("--step",
                        required=True,
                        help=textwrap.dedent("""\
                        0: preliminary STARE_IMAGE
                        1: combination with FULL_DITHERED_IMAGE"""),
                        type=int,
                        choices=[0, 1])
    parser.add_argument("--outfile",
                        required=True,
                        help="Output YAML file name",
                        type=lambda x: arg_file_is_new(parser, x))

    # optional arguments
    parser.add_argument("--repeat",
                        help="Repetitions at each position",
                        default=1,
                        type=int)
    parser.add_argument("--obsid_combined", type=str)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # read TXT file
    with args.filename as f:
        file_content = f.read().splitlines()
    list_fileinfo = []
    for line in file_content:
        if len(line) > 0:
            if line[0] not in ['#', '@']:
                tmplist = line.split()
                tmpfile = tmplist[0]
                if len(tmplist) > 1:
                    tmpinfo = tmplist[1:]
                else:
                    tmpinfo = None
                list_fileinfo.append(FileInfo(tmpfile, tmpinfo))

    # check consistency of pattern, repeat and number of images
    nimages = len(list_fileinfo)
    if nimages % args.repeat != 0:
        raise ValueError('Unexpected number of images')

    output = generate_yaml_content(args, list_fileinfo)

    # generate YAML file
    with args.outfile as f:
        f.write(output)
    print('--> File {} generated!'.format(args.outfile.name))
Exemplo n.º 25
0
def main(args=None):
    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: compute pixel-to-pixel flatfield')

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file (flat ON-OFF)",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rectwv_coeff",
                        required=True,
                        help="Input JSON file with rectification and "
                        "wavelength calibration coefficients",
                        type=argparse.FileType('rt'))
    parser.add_argument("--minimum_fraction",
                        required=True,
                        help="Minimum allowed flatfielding value",
                        type=float,
                        default=0.01)
    parser.add_argument("--minimum_value_in_output",
                        help="Minimum value allowed in output file: pixels "
                        "below this value are set to 1.0 (default=0.01)",
                        type=float,
                        default=0.01)
    parser.add_argument("--nwindow_median",
                        required=True,
                        help="Window size to smooth median spectrum in the "
                        "spectral direction",
                        type=int)
    parser.add_argument("--outfile",
                        required=True,
                        help="Output FITS file",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))

    # optional arguments
    parser.add_argument("--delta_global_integer_offset_x_pix",
                        help="Delta global integer offset in the X direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--delta_global_integer_offset_y_pix",
                        help="Delta global integer offset in the Y direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--resampling",
                        help="Resampling method: 1 -> nearest neighbor, "
                        "2 -> linear interpolation (default)",
                        default=2,
                        type=int,
                        choices=(1, 2))
    parser.add_argument("--ignore_DTUconf",
                        help="Ignore DTU configurations differences between "
                        "model and input image",
                        action="store_true")
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                        " (default=0)",
                        default=0,
                        type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # read calibration structure from JSON file
    rectwv_coeff = RectWaveCoeff._datatype_load(args.rectwv_coeff.name)

    # modify (when requested) global offsets
    rectwv_coeff.global_integer_offset_x_pix += \
        args.delta_global_integer_offset_x_pix
    rectwv_coeff.global_integer_offset_y_pix += \
        args.delta_global_integer_offset_y_pix

    # read FITS image and its corresponding header
    hdulist = fits.open(args.fitsfile)
    header = hdulist[0].header
    image2d = hdulist[0].data
    hdulist.close()

    # apply global offsets
    image2d = apply_integer_offsets(
        image2d=image2d,
        offx=rectwv_coeff.global_integer_offset_x_pix,
        offy=rectwv_coeff.global_integer_offset_y_pix)

    # protections
    naxis2, naxis1 = image2d.shape
    if naxis1 != header['naxis1'] or naxis2 != header['naxis2']:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)
        raise ValueError('Something is wrong with NAXIS1 and/or NAXIS2')
    if abs(args.debugplot) >= 10:
        print('>>> NAXIS1:', naxis1)
        print('>>> NAXIS2:', naxis2)

    # check that the input FITS file grism and filter match
    filter_name = header['filter']
    if filter_name != rectwv_coeff.tags['filter']:
        raise ValueError("Filter name does not match!")
    grism_name = header['grism']
    if grism_name != rectwv_coeff.tags['grism']:
        raise ValueError("Filter name does not match!")
    if abs(args.debugplot) >= 10:
        print('>>> grism.......:', grism_name)
        print('>>> filter......:', filter_name)

    # check that the DTU configurations are compatible
    dtu_conf_fitsfile = DtuConfiguration.define_from_fits(args.fitsfile)
    dtu_conf_jsonfile = DtuConfiguration.define_from_dictionary(
        rectwv_coeff.meta_info['dtu_configuration'])
    if dtu_conf_fitsfile != dtu_conf_jsonfile:
        print('DTU configuration (FITS file):\n\t', dtu_conf_fitsfile)
        print('DTU configuration (JSON file):\n\t', dtu_conf_jsonfile)
        if args.ignore_DTUconf:
            print('WARNING: DTU configuration differences found!')
        else:
            raise ValueError('DTU configurations do not match')
    else:
        if abs(args.debugplot) >= 10:
            print('>>> DTU Configuration match!')
            print(dtu_conf_fitsfile)

    # valid slitlet numbers
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in rectwv_coeff.missing_slitlets:
        list_valid_islitlets.remove(idel)
    if abs(args.debugplot) >= 10:
        print('>>> valid slitlet numbers:\n', list_valid_islitlets)

    # ---

    # initialize rectified image
    image2d_flatfielded = np.zeros((EMIR_NAXIS2, EMIR_NAXIS1))

    # main loop
    for islitlet in list_valid_islitlets:
        if args.debugplot == 0:
            islitlet_progress(islitlet, EMIR_NBARS)

        # define Slitlet2D object
        slt = Slitlet2D(islitlet=islitlet,
                        rectwv_coeff=rectwv_coeff,
                        debugplot=args.debugplot)

        if abs(args.debugplot) >= 10:
            print(slt)

        # extract (distorted) slitlet from the initial image
        slitlet2d = slt.extract_slitlet2d(image2d)

        # rectify slitlet
        slitlet2d_rect = slt.rectify(slitlet2d, resampling=args.resampling)
        naxis2_slitlet2d, naxis1_slitlet2d = slitlet2d_rect.shape

        if naxis1_slitlet2d != EMIR_NAXIS1:
            print('naxis1_slitlet2d: ', naxis1_slitlet2d)
            print('EMIR_NAXIS1.....: ', EMIR_NAXIS1)
            raise ValueError("Unexpected naxis1_slitlet2d")

        # get useful slitlet region (use boundaires instead of frontiers;
        # note that the nscan_minmax_frontiers() works well independently
        # of using frontiers of boundaries as arguments)
        nscan_min, nscan_max = nscan_minmax_frontiers(slt.y0_reference_lower,
                                                      slt.y0_reference_upper,
                                                      resize=False)
        ii1 = nscan_min - slt.bb_ns1_orig
        ii2 = nscan_max - slt.bb_ns1_orig + 1

        # median spectrum
        sp_collapsed = np.median(slitlet2d_rect[ii1:(ii2 + 1), :], axis=0)

        # smooth median spectrum along the spectral direction
        sp_median = ndimage.median_filter(sp_collapsed,
                                          args.nwindow_median,
                                          mode='nearest')
        ymax_spmedian = sp_median.max()
        y_threshold = ymax_spmedian * args.minimum_fraction
        sp_median[np.where(sp_median < y_threshold)] = 0.0

        if abs(args.debugplot) > 10:
            title = 'Slitlet#' + str(islitlet) + '(median spectrum)'
            xdum = np.arange(1, naxis1_slitlet2d + 1)
            ax = ximplotxy(xdum,
                           sp_collapsed,
                           title=title,
                           show=False,
                           **{'label': 'collapsed spectrum'})
            ax.plot(xdum, sp_median, label='filtered spectrum')
            ax.plot([1, naxis1_slitlet2d],
                    2 * [y_threshold],
                    label='threshold')
            ax.legend()
            ax.set_ylim(-0.05 * ymax_spmedian, 1.05 * ymax_spmedian)
            pause_debugplot(args.debugplot, pltshow=True, tight_layout=True)

        # generate rectified slitlet region filled with the median spectrum
        slitlet2d_rect_spmedian = np.tile(sp_median, (naxis2_slitlet2d, 1))
        if abs(args.debugplot) > 10:
            slt.ximshow_rectified(slitlet2d_rect_spmedian)

        # unrectified image
        slitlet2d_unrect_spmedian = slt.rectify(slitlet2d_rect_spmedian,
                                                resampling=args.resampling,
                                                inverse=True)

        # normalize initial slitlet image (avoid division by zero)
        slitlet2d_norm = np.zeros_like(slitlet2d)
        for j in range(naxis1_slitlet2d):
            for i in range(naxis2_slitlet2d):
                den = slitlet2d_unrect_spmedian[i, j]
                if den == 0:
                    slitlet2d_norm[i, j] = 1.0
                else:
                    slitlet2d_norm[i, j] = slitlet2d[i, j] / den

        if abs(args.debugplot) > 10:
            slt.ximshow_unrectified(slitlet2d_norm)

        for j in range(EMIR_NAXIS1):
            xchannel = j + 1
            y0_lower = slt.list_frontiers[0](xchannel)
            y0_upper = slt.list_frontiers[1](xchannel)
            n1, n2 = nscan_minmax_frontiers(y0_frontier_lower=y0_lower,
                                            y0_frontier_upper=y0_upper,
                                            resize=True)
            # note that n1 and n2 are scans (ranging from 1 to NAXIS2)
            nn1 = n1 - slt.bb_ns1_orig + 1
            nn2 = n2 - slt.bb_ns1_orig + 1
            image2d_flatfielded[(n1 - 1):n2, j] = \
                slitlet2d_norm[(nn1 - 1):nn2, j]

            # force to 1.0 region around frontiers
            image2d_flatfielded[(n1 - 1):(n1 + 2), j] = 1
            image2d_flatfielded[(n2 - 5):n2, j] = 1
    if args.debugplot == 0:
        print('OK!')

    # set pixels below minimum value to 1.0
    filtered = np.where(image2d_flatfielded < args.minimum_value_in_output)
    image2d_flatfielded[filtered] = 1.0

    # restore global offsets
    image2d_flatfielded = apply_integer_offsets(
        image2d=image2d_flatfielded,
        offx=-rectwv_coeff.global_integer_offset_x_pix,
        offy=-rectwv_coeff.global_integer_offset_y_pix)

    # save output file
    save_ndarray_to_fits(array=image2d_flatfielded,
                         file_name=args.outfile,
                         main_header=header,
                         overwrite=True)
    print('>>> Saving file ' + args.outfile.name)
Exemplo n.º 26
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: overplot boundary model over FITS image')

    # positional arguments
    parser.add_argument("fitsfile",
                        help="FITS file name to be displayed",
                        type=argparse.FileType('rb'))
    parser.add_argument("--rect_wpoly_MOSlibrary",
                        required=True,
                        help="Input JSON file with library of rectification "
                        "and wavelength calibration coefficients",
                        type=argparse.FileType('rt'))

    # optional arguments
    parser.add_argument("--global_integer_offset_x_pix",
                        help="Global integer offset in the X direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--global_integer_offset_y_pix",
                        help="Global integer offset in the Y direction "
                        "(default=0)",
                        default=0,
                        type=int)
    parser.add_argument("--arc_lines",
                        help="Overplot arc lines",
                        action="store_true")
    parser.add_argument("--oh_lines",
                        help="Overplot OH lines",
                        action="store_true")
    parser.add_argument("--ds9_frontiers",
                        help="Output ds9 region file with slitlet frontiers",
                        type=lambda x: arg_file_is_new(parser, x))
    parser.add_argument("--ds9_boundaries",
                        help="Output ds9 region file with slitlet boundaries",
                        type=lambda x: arg_file_is_new(parser, x))
    parser.add_argument("--ds9_lines",
                        help="Output ds9 region file with arc/oh lines",
                        type=lambda x: arg_file_is_new(parser, x))
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting/debugging" +
                        " (default=12)",
                        type=int,
                        default=12,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")

    args = parser.parse_args()

    if args.echo:
        print('\033[1m\033[31mExecuting: ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    # avoid incompatible options
    if args.arc_lines and args.oh_lines:
        raise ValueError("--arc_lines and --oh_lines cannot be used "
                         "simultaneously")

    # --ds9_lines requires --arc_lines or --oh_lines
    if args.ds9_lines:
        if not (args.arc_lines or args.oh_lines):
            raise ValueError("--ds9_lines requires the use of either "
                             "--arc_lines or --oh_lines")

    # read input FITS file
    hdulist = fits.open(args.fitsfile)
    image_header = hdulist[0].header
    image2d = hdulist[0].data
    hdulist.close()

    naxis1 = image_header['naxis1']
    naxis2 = image_header['naxis2']

    if image2d.shape != (naxis2, naxis1):
        raise ValueError("Unexpected error with NAXIS1, NAXIS2")
    if image2d.shape != (EMIR_NAXIS2, EMIR_NAXIS1):
        raise ValueError("Unexpected values for NAXIS1, NAXIS2")

    # remove path from fitsfile
    sfitsfile = os.path.basename(args.fitsfile.name)

    # check that the FITS file has been obtained with EMIR
    instrument = image_header['instrume']
    if instrument != 'EMIR':
        raise ValueError("INSTRUME keyword is not 'EMIR'!")

    # read GRISM, FILTER and ROTANG from FITS header
    grism = image_header['grism']
    spfilter = image_header['filter']
    rotang = image_header['rotang']

    # ---

    # generate MasterRectWave object
    master_rectwv = MasterRectWave._datatype_load(
        args.rect_wpoly_MOSlibrary.name)

    # check that grism and filter are the expected ones
    grism_ = master_rectwv.tags['grism']
    if grism_ != grism:
        raise ValueError('Unexpected grism: ' + str(grism_))
    spfilter_ = master_rectwv.tags['filter']
    if spfilter_ != spfilter:
        raise ValueError('Unexpected filter ' + str(spfilter_))

    # valid slitlet numbers
    list_valid_islitlets = list(range(1, EMIR_NBARS + 1))
    for idel in master_rectwv.missing_slitlets:
        list_valid_islitlets.remove(idel)

    # read CsuConfiguration object from FITS file
    csu_config = CsuConfiguration.define_from_fits(args.fitsfile)

    # list with csu_bar_slit_center for valid slitlets
    list_csu_bar_slit_center = []
    for islitlet in list_valid_islitlets:
        list_csu_bar_slit_center.append(
            csu_config.csu_bar_slit_center(islitlet))

    # define parmodel and params
    fitted_bound_param_json = {
        'contents': master_rectwv.meta_info['refined_boundary_model']
    }
    parmodel = fitted_bound_param_json['contents']['parmodel']
    fitted_bound_param_json.update({'meta_info': {'parmodel': parmodel}})
    params = bound_params_from_dict(fitted_bound_param_json)
    if parmodel != "multislit":
        raise ValueError('parmodel = "multislit" not found')

    # ---

    # define lines to be overplotted
    if args.arc_lines or args.oh_lines:

        rectwv_coeff = rectwv_coeff_from_mos_library(hdulist, master_rectwv)
        rectwv_coeff.global_integer_offset_x_pix = \
            args.global_integer_offset_x_pix
        rectwv_coeff.global_integer_offset_y_pix = \
            args.global_integer_offset_y_pix
        # rectwv_coeff.writeto('xxx.json')

        if args.arc_lines:
            if grism == 'LR':
                catlines_file = 'lines_argon_neon_xenon_empirical_LR.dat'
            else:
                catlines_file = 'lines_argon_neon_xenon_empirical.dat'
            dumdata = pkgutil.get_data('emirdrp.instrument.configs',
                                       catlines_file)
            arc_lines_tmpfile = StringIO(dumdata.decode('utf8'))
            catlines = np.genfromtxt(arc_lines_tmpfile)
            # define wavelength and flux as separate arrays
            catlines_all_wave = catlines[:, 0]
            catlines_all_flux = catlines[:, 1]
        elif args.oh_lines:
            dumdata = pkgutil.get_data('emirdrp.instrument.configs',
                                       'Oliva_etal_2013.dat')
            oh_lines_tmpfile = StringIO(dumdata.decode('utf8'))
            catlines = np.genfromtxt(oh_lines_tmpfile)
            # define wavelength and flux as separate arrays
            catlines_all_wave = np.concatenate((catlines[:, 1], catlines[:,
                                                                         0]))
            catlines_all_flux = np.concatenate((catlines[:, 2], catlines[:,
                                                                         2]))
        else:
            raise ValueError("This should not happen!")

    else:
        rectwv_coeff = None
        catlines_all_wave = None
        catlines_all_flux = None

    # ---

    # generate output ds9 region file with slitlet boundaries
    if args.ds9_boundaries is not None:
        save_boundaries_from_params_ds9(
            params=params,
            parmodel=parmodel,
            list_islitlet=list_valid_islitlets,
            list_csu_bar_slit_center=list_csu_bar_slit_center,
            uuid=master_rectwv.uuid,
            grism=grism,
            spfilter=spfilter,
            ds9_filename=args.ds9_boundaries.name,
            global_offset_x_pix=-args.global_integer_offset_x_pix,
            global_offset_y_pix=-args.global_integer_offset_y_pix)

    # generate output ds9 region file with slitlet frontiers
    if args.ds9_frontiers is not None:
        save_frontiers_from_params_ds9(
            params=params,
            parmodel=parmodel,
            list_islitlet=list_valid_islitlets,
            list_csu_bar_slit_center=list_csu_bar_slit_center,
            uuid=master_rectwv.uuid,
            grism=grism,
            spfilter=spfilter,
            ds9_filename=args.ds9_frontiers.name,
            global_offset_x_pix=-args.global_integer_offset_x_pix,
            global_offset_y_pix=-args.global_integer_offset_y_pix)

    # ---

    # display full image
    if abs(args.debugplot) % 10 != 0:
        ax = ximshow(image2d=image2d,
                     title=sfitsfile + "\ngrism=" + grism + ", filter=" +
                     spfilter + ", rotang=" + str(round(rotang, 2)),
                     image_bbox=(1, naxis1, 1, naxis2),
                     show=False)

        # overplot boundaries
        overplot_boundaries_from_params(
            ax=ax,
            params=params,
            parmodel=parmodel,
            list_islitlet=list_valid_islitlets,
            list_csu_bar_slit_center=list_csu_bar_slit_center,
            global_offset_x_pix=-args.global_integer_offset_x_pix,
            global_offset_y_pix=-args.global_integer_offset_y_pix)

        # overplot frontiers
        overplot_frontiers_from_params(
            ax=ax,
            params=params,
            parmodel=parmodel,
            list_islitlet=list_valid_islitlets,
            list_csu_bar_slit_center=list_csu_bar_slit_center,
            micolors=('b', 'b'),
            linetype='-',
            labels=False,  # already displayed with the boundaries
            global_offset_x_pix=-args.global_integer_offset_x_pix,
            global_offset_y_pix=-args.global_integer_offset_y_pix)

    else:
        ax = None

    # overplot lines
    if catlines_all_wave is not None:

        if args.ds9_lines is None:
            ds9_file = None
        else:
            ds9_file = open(args.ds9_lines.name, 'w')
            ds9_file.write('# Region file format: DS9 version 4.1\n')
            ds9_file.write('global color=#00ffff dashlist=0 0 width=2 '
                           'font="helvetica 10 normal roman" select=1 '
                           'highlite=1 dash=0 fixed=0 edit=1 '
                           'move=1 delete=1 include=1 source=1\n')
            ds9_file.write('physical\n#\n')

            ds9_file.write('#\n# uuid..: {0}\n'.format(master_rectwv.uuid))
            ds9_file.write('# filter: {0}\n'.format(spfilter))
            ds9_file.write('# grism.: {0}\n'.format(grism))
            ds9_file.write('#\n# global_offset_x_pix: {0}\n'.format(
                args.global_integer_offset_x_pix))
            ds9_file.write('# global_offset_y_pix: {0}\n#\n'.format(
                args.global_integer_offset_y_pix))
            if parmodel == "longslit":
                for dumpar in EXPECTED_PARAMETER_LIST:
                    parvalue = params[dumpar].value
                    ds9_file.write('# {0}: {1}\n'.format(dumpar, parvalue))
            else:
                for dumpar in EXPECTED_PARAMETER_LIST_EXTENDED:
                    parvalue = params[dumpar].value
                    ds9_file.write('# {0}: {1}\n'.format(dumpar, parvalue))

        overplot_lines(ax, catlines_all_wave, list_valid_islitlets,
                       rectwv_coeff, args.global_integer_offset_x_pix,
                       args.global_integer_offset_y_pix, ds9_file,
                       args.debugplot)

        if ds9_file is not None:
            ds9_file.close()

    if ax is not None:
        # show plot
        pause_debugplot(12, pltshow=True)
Exemplo n.º 27
0
def main(args=None):

    # parse command-line options
    parser = argparse.ArgumentParser(
        description='description: determine rectification and wavelength '
                    'calibration polynomials from arc image'
    )

    # required arguments
    parser.add_argument("fitsfile",
                        help="Input FITS file with longslit data",
                        type=argparse.FileType('rb'))
    parser.add_argument("--bound_param", required=True,
                        help="Input JSON with fitted boundary parameters",
                        type=argparse.FileType('rt'))
    parser.add_argument("--order_fmap", required=True,
                        help="Order of the 2D rectification transformation "
                             "(default=2)",
                        default=2, type=int)
    parser.add_argument("--wv_master_file", required=True,
                        help="TXT file containing wavelengths")
    parser.add_argument("--poldeg_initial", required=True,
                        help="Polynomial degree for initial calibration",
                        type=int)
    parser.add_argument("--poldeg_refined", required=True,
                        help="Polynomial degree for refined calibration "
                             "(0=do not refine)",
                        type=int)
    parser.add_argument("--out_json", required=True,
                        help="Output JSON file with results",
                        type=lambda x: arg_file_is_new(parser, x))

    # optional arguments
    parser.add_argument("--interactive",
                        help="Ask the user for confirmation before updating "
                             "the wavelength calibration polynomial",
                        action="store_true")
    parser.add_argument("--ymargin_bb",
                        help="Number of pixels above and below frontiers to "
                             "determine the vertical bounding box of each "
                             "undistorted slitlet (default=2)",
                        type=int, default=2)
    parser.add_argument("--remove_sp_background",
                        help="Remove background spectrum prior to arc line "
                             "detection",
                        action="store_true")
    parser.add_argument("--times_sigma_threshold",
                        help="Times sigma above threshold to detect unknown"
                             " arc lines (default=10)",
                        type=float, default=10)
    parser.add_argument("--margin_npix",
                        help="Number of pixels before and after expected "
                             "wavelength calibrated spectrum to trim the "
                             "wv_master table in the wavelength direction "
                             "(default=50)",
                        type=int, default=50)
    parser.add_argument("--nbrightlines",
                        help="tuple with number of brightlines to "
                             "be employed in the initial wavelength "
                             "calibration (e.g. \"10,5,4\")")
    parser.add_argument("--threshold_wv",
                        help="Minimum signal in the line peaks (default=0)",
                        default=0, type=float)
    parser.add_argument("--sigma_gaussian_filtering",
                        help="Sigma of the gaussian filter to be applied to "
                             "the spectrum in order to avoid problems with "
                             "saturated lines in the wavelength calibration "
                             "process",
                        default=0, type=float)
    parser.add_argument("--out_55sp",
                        help="FITS file containing the set of averaged "
                             "spectra employed to derive the wavelength "
                             "calibration",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))
    parser.add_argument("--ylogscale",
                        help="Display spectrum signal in logarithmic units",
                        action="store_true")
    parser.add_argument("--geometry",
                        help="tuple x,y,dx,dy (default 0,0,640,480)",
                        default="0,0,640,480")
    parser.add_argument("--pdffile",
                        help="output PDF file name",
                        type=lambda x: arg_file_is_new(parser, x, mode='wb'))
    parser.add_argument("--debugplot",
                        help="Integer indicating plotting & debugging options"
                             " (default=0)",
                        default=0, type=int,
                        choices=DEBUGPLOT_CODES)
    parser.add_argument("--echo",
                        help="Display full command line",
                        action="store_true")
    args = parser.parse_args(args)

    if args.echo:
        print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')

    # ---

    logger = logging.getLogger(__name__)

    logging_from_debugplot(args.debugplot)

    # read pdffile
    if args.pdffile is not None:
        if args.interactive:
            raise ValueError('--interactive is not compatible with --pdffile')
        from matplotlib.backends.backend_pdf import PdfPages
        pdf = PdfPages(args.pdffile.name)
    else:
        pdf = None

    # geometry
    if args.geometry is None:
        geometry = None
    else:
        tmp_str = args.geometry.split(",")
        x_geom = int(tmp_str[0])
        y_geom = int(tmp_str[1])
        dx_geom = int(tmp_str[2])
        dy_geom = int(tmp_str[3])
        geometry = x_geom, y_geom, dx_geom, dy_geom

    # generate HDUList object
    hdulist = fits.open(args.fitsfile)

    # generate RefinedBoundaryModelParam object
    bound_param = RefinedBoundaryModelParam._datatype_load(
        args.bound_param.name)

    # generate lines_catalog
    lines_catalog = np.genfromtxt(args.wv_master_file)

    rectwv_coeff, reduced_55sp = rectwv_coeff_from_arc_image(
        hdulist,
        bound_param,
        lines_catalog,
        args_nbrightlines=args.nbrightlines,
        args_ymargin_bb=args.ymargin_bb,
        args_remove_sp_background=args.remove_sp_background,
        args_times_sigma_threshold=args.times_sigma_threshold,
        args_order_fmap=args.order_fmap,
        args_sigma_gaussian_filtering=args.sigma_gaussian_filtering,
        args_margin_npix=args.margin_npix,
        args_poldeg_initial=args.poldeg_initial,
        args_poldeg_refined=args.poldeg_refined,
        args_interactive=args.interactive,
        args_threshold_wv=args.threshold_wv,
        args_ylogscale=args.ylogscale,
        args_pdf=pdf,
        args_geometry=geometry,
        debugplot=args.debugplot
    )

    # save image with collapsed spectra employed to determine the
    # wavelength calibration
    if args.out_55sp is not None:
        reduced_55sp.writeto(args.out_55sp, overwrite=True)

    # save RectWaveCoeff object into JSON file
    rectwv_coeff.writeto(args.out_json.name)
    logger.info('>>> Saving file ' + args.out_json.name)
    # debugging __getstate__ and __setstate__
    # check_setstate_getstate(rectwv_coeff, args.out_json.name)

    if pdf is not None:
        pdf.close()