def t120_mkflat(flat_dir=t120.t120_flat_dir,
                master_name_root='master',
                master_offset=t120.t120_ofst_dir + t120.t120_master_name):
    # read offset
    hdu_offset_list = fits.open(master_offset)
    offset = CCDData(hdu_offset_list[0].data, unit=u.adu)

    dict_ccd_data = {}
    list_ccd_data = []
    for fit_file in glob.glob(flat_dir + '*.fit'):
        t120.log.info('now opening file: ' + fit_file)
        hdu = fits.open(fit_file)
        filter_name = hdu[0].header['FILTER']
        t120.log.info('filter=' + filter_name)
        if not dict_ccd_data.has_key(filter_name):
            dict_ccd_data[filter_name] = []
        else:
            dict_ccd_data[filter_name].append(
                subtract_overscan(CCDData(hdu[0].data, unit=u.adu), offset))

    t120.log.info('now loop over the filters')
    for filter_name in dict_ccd_data:
        combiner = Combiner(dict_ccd_data[filter_name])
        master_flat = combiner.median_combine()
        hdu = master_flat.to_hdu()
        master_file = flat_dir + master_name_root + '-' + filter_name + '.fits'
        hdu.writeto(master_file, overwrite=True)
        t120.log.info('Master flat saved in ' + master_file)
    return
Example #2
0
 def test_register_ccddata(self):
     ccd_image = CCDData(
         self.image,
         mask=self.image_mask,
         meta={
             "object": "fake galaxy",
             "filter": "R"
         },
         unit="adu",
     )
     ccd_image_ref = CCDData(
         self.image_ref,
         mask=self.image_ref_mask,
         meta={
             "object": "fake galaxy",
             "filter": "R"
         },
         unit="adu",
     )
     registered_img, footp = aa.register(source=ccd_image,
                                         target=ccd_image_ref)
     self.assertIsInstance(registered_img, np.ndarray)
     self.assertIsInstance(footp, np.ndarray)
     self.assertIs(footp.dtype, np.dtype("bool"))
     fraction = self.compare_image(registered_img)
     self.assertGreater(fraction, 0.85)
Example #3
0
    def test_register_ccddata(self):
        from ccdproc import CCDData
        from skimage.transform import SimilarityTransform

        transf = SimilarityTransform(rotation=np.pi / 2.0, translation=(1, 0))

        cd = CCDData(
            [[0.0, 1.0], [2.0, 3.0]],
            mask=[[True, False], [False, False]],
            unit="adu",
        )
        registered_img, footp = aa.apply_transform(transf,
                                                   cd,
                                                   cd,
                                                   propagate_mask=True)
        err = np.linalg.norm(registered_img -
                             np.array([[2.0, 0.0], [3.0, 1.0]]))
        self.assertLess(err, 1e-6)
        err_mask = footp == np.array([[False, True], [False, False]])
        self.assertTrue(all(err_mask.flatten()))

        cd = CCDData([[0.0, 1.0], [2.0, 3.0]], unit="adu")
        registered_img, footp = aa.apply_transform(transf,
                                                   cd,
                                                   cd,
                                                   propagate_mask=True)
        err = np.linalg.norm(registered_img -
                             np.array([[2.0, 0.0], [3.0, 1.0]]))
        self.assertLess(err, 1e-6)
        err_mask = footp == np.array([[False, False], [False, False]])
        self.assertTrue(all(err_mask.flatten()))
    def setUp(self):
        self.fake_image = CCDData(data=np.ones((100, 100)),
                                  meta=fits.Header(),
                                  unit='adu')
        self.fake_image.header.set('NAXIS', value=2)
        self.fake_image.header.set('NAXIS1', value=100)
        self.fake_image.header.set('NAXIS2', value=100)
        self.fake_image.header.set('OBSTYPE', value='COMP')
        self.fake_image.header['GSP_FNAM'] = 'fake-image.fits'

        # Create model aligned with pixels - represents the trace
        self.target_trace = models.Linear1D(slope=0, intercept=50.3)

        # Calculate the STDDEV
        self.stddev = 8.4

        # Calculate how many STDDEV will be extracted - N_STDDEV
        self.n_stddev = 2

        # Calculate how far the background is from the the center.
        self.distance = 1

        self.target_profile = models.Gaussian1D(amplitude=1,
                                                mean=50.3,
                                                stddev=self.stddev)

        self.reference_result = np.ones(100) * self.stddev * self.n_stddev
    def setUp(self):
        # create a master flat
        self.master_flat = CCDData(data=np.ones((100, 100)),
                                   meta=fits.Header(),
                                   unit='adu')
        self.master_flat.header.set('GRATING', value='RALC_1200-BLUE')
        self.master_flat.header.set('SLIT', value='0.84" long slit')
        self.master_flat.header.set('FILTER2', value='<NO FILTER>')
        self.master_flat.header.set('WAVMODE', value='1200 m2')
        self.master_flat_name = 'master_flat_1200m2.fits'
        # expected master flat to be retrieved by get_best_flat
        self.reference_flat_name = 'master_flat_1200m2_0.84_dome.fits'
        # location of sample flats
        self.flat_path = 'goodman_pipeline/data/test_data/master_flat'
        slit = re.sub('[A-Za-z" ]', '', self.master_flat.header['SLIT'])
        self.flat_name_base = re.sub('.fits', '_' + slit + '*.fits',
                                     self.master_flat_name)

        # save a master flat with some random structure.

        self.master_flat_name_norm = 'flat_to_normalize.fits'
        # add a bias level
        self.master_flat.data += 300.
        # add noise
        self.master_flat.data += np.random.random_sample(
            self.master_flat.data.shape)

        self.master_flat.write(os.path.join(self.flat_path,
                                            self.master_flat_name_norm),
                               overwrite=False)
    def setUp(self):
        self.fake_image = CCDData(data=np.ones((100, 100)),
                                  meta=fits.Header(),
                                  unit='adu')

        self.file_name = 'sample_file.fits'
        self.target_non_zero = 4
        self.current_directory = os.getcwd()
        self.full_path = os.path.join(self.current_directory, self.file_name)
        self.parent_file = 'parent_file.fits'

        self.fake_image.header.set('CCDSUM',
                                   value='1 1',
                                   comment='Fake values')

        self.fake_image.header.set('OBSTYPE',
                                   value='OBJECT',
                                   comment='Fake values')

        self.fake_image.header.set('GSP_FNAM',
                                   value=self.file_name,
                                   comment='Fake values')

        self.fake_image.header.set('GSP_PNAM',
                                   value=self.parent_file,
                                   comment='Fake values')

        self.fake_image.write(self.full_path, overwrite=False)
 def setUp(self):
     self.create = GenerateDcrParFile()
     self.ccd = CCDData(data=np.ones((100, 100)),
                        meta=fits.Header(),
                        unit='adu')
     self.ccd.header.set('INSTCONF', value='Red')
     self.ccd.header.set('CCDSUM', value='1 1')
Example #8
0
def swarp(hdus, reference_hdu, rate, hdu_idx=None, stacking_mode="MEAN"):
    """
    use the WCS to project all image to the 'reference_hdu' shifting the the CRVAL of each image by rate*dt
    :param stacking_mode: what process to use for combining images MEAN or MEDIAN
    :param hdu_idx: which HDU in each HDUList listed in hdus is the ImageData in?
    :param hdus: list of HDUList
    :param reference_hdu: reference HDUList in hdus
    :param rate: dictionary with the ra/dec shift rates.
    :return: fits.HDUList
    """
    # Project the input images to the same grid using interpolation
    if stacking_mode not in ['MEDIAN', 'MEAN']:
        logging.warning(
            f'{stacking_mode} not available for swarp stack. Setting to MEAN')
        stacking_mode = 'MEAN'
    if hdu_idx is None:
        hdu_idx = HSC_HDU_MAP
    reference_date = mid_exposure_mjd(reference_hdu[0])
    stack_input = []
    logging.info(f'stacking at rate/angle set: {rate}')
    ccd_data = {}
    for hdu in hdus:
        wcs_header = hdu[1].header.copy()
        dt = (mid_exposure_mjd(hdu[0]) - reference_date)
        if rate is not None:
            wcs_header['CRVAL1'] += (rate['dra'] * dt)
            wcs_header['CRVAL2'] += (rate['ddec'] * dt)
        for layer in hdu_idx:
            data = hdu[hdu_idx[layer]].data
            if layer == 'variance':
                data = VarianceUncertainty(data)
            elif layer == 'mask':
                data = bitfield_to_boolean_mask(data,
                                                ignore_flags=STACK_MASK,
                                                flip_bits=True)
            ccd_data[layer] = data
        logging.info(f'Adding {hdu[0]} to projected stack.')
        stack_input.append(
            wcs_project(
                CCDData(ccd_data['image'],
                        mask=ccd_data['mask'],
                        header=wcs_header,
                        wcs=WCS(wcs_header),
                        unit='adu',
                        uncertainty=ccd_data['variance']),
                WCS(reference_hdu.header)))
        logging.debug(f'{stack_input[-1].header}')
    if rate is not None:
        combiner = Combiner(stack_input)
        if stacking_mode == 'MEDIAN':
            stacked_image = combiner.median_combine()
        else:
            stacked_image = combiner.average_combine()
        return fits.HDUList([
            fits.PrimaryHDU(header=reference_hdu[0]),
            fits.ImageHDU(data=stacked_image.data,
                          header=reference_hdu[1].header)
        ])
    else:
        return stack_input
    def setUp(self):
        argument_list = [
            '--data-path',
            os.getcwd(),
            '--proc-path',
            os.getcwd(),
            '--search-pattern',
            'cfzsto',
            '--output-prefix',
            'w',
            '--extraction',
            'fractional',
            '--reference-files',
            'data/ref_comp',
            '--max-targets',
            '3',
        ]
        arguments = get_args(argument_list)
        self.wc = WavelengthCalibration(args=arguments)

        self.ccd = CCDData(data=np.random.random_sample(200),
                           meta=fits.Header(),
                           unit='adu')
        self.ccd = add_wcs_keys(ccd=self.ccd)
        self.ccd.header.set('SLIT',
                            value='1.0" long slit',
                            comment="slit [arcsec]")
Example #10
0
def reproject(filename,ref_header,skylist,explist):
    #Open the image.
    hdulist = fits.open(filename)
    datacube = hdulist[0].data
    header = hdulist[0].header

    #Create a list with zeros for the reprojected datacube.
    repro_datacube = [0] * len(datacube)

    #Loop over the different frames in the datacube.
    for i,data in enumerate(datacube):
        #Create a CCDData class object.
        data_ccd = CCDData(data,header=header,unit="count",wcs=wcs.WCS(header).celestial)
        #Reproject the data to the reference data.
        repro_datacube[i] = np.asarray(wcs_project(data_ccd,wcs.WCS(ref_header).celestial,target_shape=(ref_header['NAXIS2'],ref_header['NAXIS1'])))

    #Temporary workaround to update the header of the image.
    new_data = wcs_project(data_ccd,wcs.WCS(ref_header).celestial,target_shape=(ref_header['NAXIS2'],ref_header['NAXIS1']))
    new_data.write(filename.replace(".img","_r.img"),format="fits",overwrite=True)
    temp_hdu = fits.open(filename.replace(".img","_r.img"))
    new_header = temp_hdu[0].header

    #Append the reprojected datacube to the list and write it to a new image.
    skylist.append(np.array(repro_datacube))
    new_hdu = fits.PrimaryHDU(repro_datacube,new_header)
    new_hdu.writeto(filename.replace(".img","_r.img"),overwrite=True)
   
    #Reproject the exposure map.
    data_exp_ccd = CCDData.read(filename.replace("nm_coilsszp_c","ex"),unit="count",hdu=1)
    repro_data_exp = wcs_project(data_exp_ccd,wcs.WCS(ref_header).celestial,target_shape=(ref_header['NAXIS2'],ref_header['NAXIS1']))

    #Append the reprojected data to a list and write it to a new image.
    explist.append(np.array(repro_data_exp))
    repro_data_exp.write(filename.replace("nm_coilsszp_c","ex_r"),format="fits",overwrite=True)
Example #11
0
    def setUp(self):
        self.ccd = CCDData(data=np.ones((800, 2000)),
                           meta=fits.Header(),
                           unit='adu')

        self.all_keywords = [
            'GSP_TMOD', 'GSP_TORD', 'GSP_TC00', 'GSP_TC01', 'GSP_TC02',
            'GSP_TERR'
        ]

        self.trace_info = collections.OrderedDict()

        self.trace_info['GSP_TMOD'] = [
            'Polinomial1D', 'Model name used to fit trace'
        ]

        self.trace_info['GSP_TORD'] = [
            2, 'Degree of the model used to fit '
            'target trace'
        ]

        self.trace_info['GSP_TC00'] = [500, 'Parameter c0']
        self.trace_info['GSP_TC01'] = [1, 'Parameter c1']
        self.trace_info['GSP_TC02'] = [2, 'Parameter c2']
        self.trace_info['GSP_TERR'] = [0.5, 'RMS error of target trace']
Example #12
0
def create_ccd(size=50, scale=1.0, mean=0.0, seed=123):
    """Create a fake ccd for data testing data processing
    """
    with NumpyRNGContext(seed):
        data = np.random.normal(loc=mean, size=[size, size], scale=scale)

    ccd = CCDData(data, unit=u.adu)
    return ccd
Example #13
0
 def setUp(self):
     self.sm = SpectroscopicMode()
     self.ccd = CCDData(data=np.ones((800, 2000)),
                        meta=fits.Header(),
                        unit='adu')
     self.ccd.header.set('GRATING', value='SYZY_400')
     self.ccd.header.set('CAM_TARG', value='16.1')
     self.ccd.header.set('GRT_TARG', value='7.5')
     self.ccd.header.set('FILTER2', value='GG455')
Example #14
0
def reduce_image(imagefile, dark=None, flat=None):
    im = CCDData.read(imagefile, unit='adu')
    if dark is not None:
        dark = CCDData.read(dark, unit='adu')
        im = im.subtract(dark)
    if flat is not None:
#         masterflat = CCDData.read(flat, unit='adu')
        hdul = fits.open(flat)
        masterflat = CCDData(data=hdul[0].data, uncertainty=None, meta=hdul[0].header, unit='adu')
        im = flat_correct(im, masterflat)
    return im
Example #15
0
    def test_combine(self):

        ccd1 = CCDData(np.random.normal(size=(10, 10)), unit='adu')
        ccd2 = ccd1.copy()
        ccd3 = ccd1.copy()

        combiner = Combiner([ccd1, ccd2, ccd3])
        combiner.sigma_clipping(low_thresh=2, high_thresh=5)
        combined_data = combiner.median_combine()

        np.testing.assert_equal(combined_data.data, ccd1.data)
Example #16
0
    def setUp(self):
        self.ccd = CCDData(data=np.ones((100, 100)),
                           meta=fits.Header(),
                           unit='adu')
        self.ccd.header.set('INSTCONF', value='Red')
        self.ccd.header.set('GAIN', value=1.48)
        self.ccd.header.set('RDNOISE', value=3.89)

        self.half_full_well = 69257

        self.saturation_values = SaturationValues(ccd=self.ccd)
Example #17
0
    def _create_stack(self, images_list, stack_name):

        CCD_data_table = [CCDData(im.data, unit='adu') for im in images_list]
        combiner = Combiner(CCD_data_table)
        median = combiner.median_combine()

        master_hdr = self._create_stack_hdr(
            images_list, self.config_section.get('datetime_key'),
            self.config_section.get('jd_key'))

        self.info('Processing stack {} finished'.format(stack_name))
        self._save_stack(median, stack_name, master_hdr)
Example #18
0
    def setUp(self):
        self.ccd = CCDData(data=np.ones((800, 2000)),
                           meta=fits.Header(),
                           unit='adu')

        self.profile_1 = models.Gaussian1D(amplitude=200, mean=100,
                                           stddev=10).rename('Profile_1')
        self.profile_2 = models.Gaussian1D(amplitude=200, mean=600,
                                           stddev=10).rename('Profile_2')

        profile_sum = self.profile_1 + self.profile_2
        for i in range(self.ccd.data.shape[1]):
            self.ccd.data[:, i] *= profile_sum(range(self.ccd.data.shape[0]))
def t120_mkdark(dark_dir=t120.t120_dark_dir,
                master_offset=t120.t120_ofst_dir + t120.t120_master_name,
                master_file_name=t120.t120_master_name):
    # read offset
    hdu_offset_list = fits.open(master_offset)
    offset = CCDData(hdu_offset_list[0].data, unit=u.adu)

    master_file = dark_dir + master_file_name
    listimg = ImageFileCollection(
        dark_dir)  #,glob_include='*.fit',glob_exclude='*.fits')
    dict_ccd_data = {}
    list_ccd_data = []
    for fit_file in glob.glob(dark_dir + '*.fit'):
        t120.log.info('now opening file: ' + fit_file)
        hdu = fits.open(fit_file)
        exp_time = hdu[0].header['EXPTIME']
        strexptime = "%3.1f" % exp_time
        t120.log.info('EXPTIME=' + str(exp_time) + ' strexptime=' + strexptime)
        if not dict_ccd_data.has_key(strexptime):
            dict_ccd_data[strexptime] = []
        else:
            dict_ccd_data[strexptime].append(
                subtract_overscan(CCDData(hdu[0].data, unit=u.adu), offset))

    t120.log.info('now loop over the exp_time')
    for strexp_time in dict_ccd_data:
        t120.log.info('exp_time: ' + strexp_time)
        combiner = Combiner(dict_ccd_data[strexp_time])
        master_dark = combiner.median_combine()
        master_file = dark_dir + master_file_name.replace(
            '.fits', '') + '-' + strexp_time + '.fits'
        hdu = master_dark.to_hdu()
        #hdu[0].header.set('EXPTIME',value=exp_time,comment='Exposure time in sec')
        #hdu[0].header.set('EXPOSURE',value=exp_time,comment='Exposure time in sec')
        #hdu.writeto(master_file,overwrite=True)
        fits_ccddata_writer(master_dark, master_file)
        t120.log.info('Master dark saved in ' + master_file)
    return
Example #20
0
    def setUp(self):
        self.rd = ReferenceData(reference_dir=os.path.join(
            os.getcwd(), 'goodman_pipeline/data/ref_comp'))
        self.ccd = CCDData(data=np.ones((800, 2000)),
                           meta=fits.Header(),
                           unit='adu')

        self.columns = ['object', 'grating', 'grt_targ', 'cam_targ']

        self.data_exist = [['HgArNe', 'SYZY_400', 7.5, 16.1],
                           ['HgAr', 'SYZY_400', 7.5, 16.1]]

        self.data_does_not_exist = [['HgArNe', 'SYZY_800', 7.5, 16.1],
                                    ['HgAr', 'SYZY_800', 7.5, 16.1]]
Example #21
0
    def setUp(self):
        self.ccd = CCDData(data=np.ones((100, 100)),
                           meta=fits.Header(),
                           unit='adu')
        self.file_name = 'cr_test.fits'

        self.ccd.header.set('CCDSUM', value='1 1')
        self.ccd.header.set('OBSTYPE', value='OBJECT')
        self.ccd.header.set('INSTCONF', value='Red')
        self.ccd.header.set('GSP_FNAM', value=self.file_name)
        self.ccd.header.set('GSP_COSM', value='none')

        self.red_path = os.getcwd()
        self.out_prefix = 'prefix'
Example #22
0
    def setUp(self):
        # Create fake image
        self.fake_image = CCDData(data=np.ones((100, 100)),
                                  meta=fits.Header(),
                                  unit='adu')

        # define
        self.slit_low_limit = 5
        self.slit_high_limit = 95

        self.reference_slit_trim = '[1:100,{:d}:{:d}]'.format(
            self.slit_low_limit + 10 + 1, self.slit_high_limit - 10)

        # make a flat-like structure
        self.fake_image.data[self.slit_low_limit:self.slit_high_limit, :] = 100
Example #23
0
    def test_combine_masked(self):

        x = np.random.normal(size=(10, 10))
        x[5, :] = 0
        x = np.ma.masked_where(x == 0, x)

        ccd1 = CCDData(x, unit='adu')
        ccd2 = ccd1.copy()
        ccd3 = ccd1.copy()

        combiner = Combiner([ccd1, ccd2, ccd3])
        combiner.sigma_clipping(low_thresh=2, high_thresh=5)
        combined_data = combiner.median_combine()

        np.testing.assert_equal(combined_data.data, ccd1.data)
    def run(self):

        list_of_data = []
        for f in self.input_list:

            self.debug('Processing file: {:s}'.format(f))
            hdr = pyfits.getheader(f)
            data = pyfits.getdata(f)

            x_center = data.shape[1] // 2
            x_bsize = int(0.05 * data.shape[1])
            x1, x2 = x_center - x_bsize, x_center + x_bsize
            x_where = np.zeros_like(data)
            x_where[:, x1:x2] = 1

            y_center = data.shape[0] // 2
            y_bsize = int(0.05 * data.shape[0])
            y1, y2 = y_center - y_bsize, y_center + y_bsize
            y_where = np.zeros_like(data)
            y_where[y1:y2, :] = 1

            where = np.where(x_where * y_where == 1, True, False)
            norm_factor = np.median(data[where])
            data /= norm_factor

            data = CCDData(data, unit=u.adu)
            list_of_data.append(data)

        # Parameter obtained from PySOAR, written by Luciano Fraga
        master_flat = combine(list_of_data,
                              method='median',
                              mem_limit=6.4e7,
                              sigma_clip=True)

        master_flat.header = hdr
        if self.output_filename is None:

            filter_name = hdr['FILTERS'].strip()
            binning = int(hdr['CCDSUM'].strip().split(' ')[0])
            self.debug('Binning: {:d}'.format(binning))

            filename = '1NSFLAT{0:d}x{0:d}_{1:s}.fits'.format(
                binning, filter_name)

            master_flat.write(filename, overwrite=True)

        else:
            master_flat.write(self.output_filename, overwrite=True)
Example #25
0
	def combinaImagensBias(self, numeroImagens=10):
            newlist, dados, i = [], [], 0
            step = round(len(self.listaImagensBias)/numeroImagens)
            while i < numeroImagens:
                newlist.append(self.cwd + '\\' + self.listaImagensBias[i*step])		
                i+=1
            for img in newlist:
                dados.append(fits.getdata(img, 0))		
            #gera um outro vetor na classe CCDData
            x = []
            for img in dados:
                x.append(CCDData(img, unit = 'adu'))
                i+=1
            combinedImage = Combiner(x)
            combinedImageMedian = combinedImage.average_combine() #average_median
            self.NPcombinedImage = np.asarray(fits.getdata(self.cwd + '\\' + self.listaImagensBias[0])[0])
Example #26
0
    def _computeScienceImage(self):
        print('\n MASTER SCIENCE: \n')
        #        self.sciTrim = self._overscanAndtrim(self.science)
        # TODO: use ccd_process?
        if type(self._science) == list:
            scisCorrected = []
            for sci in self._science:
                darkCorrection = self._subtractDark(sci)
                flatCorrection = self._correctForFlat(darkCorrection)
                skyCorrection = self._subtractSky(flatCorrection)
                #                 sciFinal = self._trimImage(skyCorrection)
                scisCorrected.append(skyCorrection)
            print('Sigma clipping...')
            sciCombiner = Combiner(scisCorrected)
            sciCombiner.sigma_clipping(low_thresh=3.,
                                       high_thresh=3.,
                                       func=np.ma.median,
                                       dev_func=np.ma.std)
            print('Median combine...')
            medianSci = sciCombiner.median_combine()
            mask = self.getBadPixelMask() + medianSci.mask
            print('Getting master science frame...')
            self.masterSci = CCDData(medianSci, mask=mask, unit='adu')
            print('Writing the header...')
            self.masterSci.header = self._science[0].meta
            # TODO: risky header?
#             self.masterSci.header['FRAMETYP'] = \
#                 self._science[0].header['FRAMETYP']
#             self.masterSci.header['OBJECT'] = self._science[0].header['OBJECT']
#             self.masterSci.header['DIT'] = self._science[0].header['DIT']
#             self.masterSci.header['FILTER'] = \
#                 self._science[0].header['FILTER']
#             self.masterSci.header['OBJRA'] = self._science[0].header['OBJRA']
#             self.masterSci.header['OBJDEC'] = self._science[0].header['OBJDEC']
#             self.masterSci.header['DATE'] = self._science[0].header['DATE']
#             self.masterSci.header['GAIN'] = self._science[0].header['GAIN']
        else:
            sci_dark = self._subtractDark(self._science)
            sciFlat = self._correctForFlat(sci_dark)
            print('Getting master science frame...')
            self.masterSci = self._subtractSky(sciFlat)
            print('Writing the header...')
            self.masterSci.header = self._science.header

        if self._unit == 'electron':
            self.masterSci = self._adu2Electron(self.masterSci)
            self.masterSci.header['UNIT'] = 'electrons'
    def setUp(self):
        self.file_list = []
        argument_list = [
            '--data-path',
            os.getcwd(),
            '--proc-path',
            os.getcwd(),
            '--search-pattern',
            'cfzsto',
            '--output-prefix',
            'w',
            '--extraction',
            'fractional',
            '--reference-files',
            'data/ref_comp',
            '--max-targets',
            '3',
        ]
        arguments = get_args(argument_list)
        self.wc = WavelengthCalibration()

        self.ccd = CCDData(data=np.random.random_sample(200),
                           meta=fits.Header(),
                           unit='adu')
        self.ccd = add_wcs_keys(ccd=self.ccd)
        self.ccd.header.set('SLIT',
                            value='1.0_LONG_SLIT',
                            comment="slit [arcsec]")
        self.ccd.header.set('GSP_FNAM',
                            value='some_name.fits',
                            comment='Name of the current file')
        self.ccd.header.set('OBSTYPE', value='SPECTRUM', comment='Obstype')
        self.ccd.header.set('OBJECT',
                            value='An X Object',
                            comment='Some random object name')
        self.ccd.header.set('GSP_FLAT',
                            value='some_flat_file.fits',
                            comment='The name of the flat')
        self.ccd.header.set('CCDSUM', value='1 1', comment='Binning')
        self.ccd.header.set('WAVMODE', value='400 M1', comment='wavmode')

        self.lamp = self.ccd.copy()
        self.lamp.header.set('OBSTYPE',
                             value='COMP',
                             comment='Comparison lamp obstype')
        self.lamp.header.set('OBJECT', value='HgArNe')
    def setUp(self):
        arguments = ['--saturation', '1']
        args = get_args(arguments=arguments)
        data_container = NightDataContainer(path='/fake',
                                            instrument='Red',
                                            technique='Spectroscopy')
        self.image_processor = ImageProcessor(args=args,
                                              data_container=data_container)

        self.ccd = CCDData(data=np.ones((100, 100)),
                           meta=fits.Header(),
                           unit='adu')
        self.ccd.header.set('INSTCONF', value='Red')
        self.ccd.header.set('GAIN', value=1.48)
        self.ccd.header.set('RDNOISE', value=3.89)

        self.half_full_well = 69257
Example #29
0
def criaImgBias_Reduction(listaImgBias):
    imgReduce = 0
    bias = []
    for img in listaImgBias:
        bias.append(fits.getdata(img))

    #gera um outro vetor na classe CCDData
    x = []
    i = 0
    while i < len(bias):
        x.append(CCDData(bias[i], unit='adu'))
        i += 1

    combinedImage = Combiner(x)
    combinedImageMedian = combinedImage.average_combine()  #average_median
    NPcombinedImage = np.asarray(combinedImageMedian)
    return NPcombinedImage
Example #30
0
def geraArquivo(inputlist, n):
    scidata = []
    #vetor com os dados
    for img in inputlist:
        scidata.append(img)

    #gera um outro vetor na classe CCDData
    x = []
    i = 0
    while i < len(inputlist):
        x.append(CCDData(scidata[i], unit='adu'))
        i += 1

    combinedImage = Combiner(x)
    combinedImageMedian = combinedImage.median_combine()  #average_combine
    NPcombinedImage = np.asarray(combinedImageMedian)
    return NPcombinedImage