Ejemplo n.º 1
0
def test_save_a_layer_to_file():
    metad = metadata.MetaData()

    metad.create_a_new_layer(
        'dummy_layer',
        [['OHOHOH', 'IHIHIH'], ['S150', 'S10'], ['km/s', 'h/(2pi)']],
        [['0', '1'], ['59', '41']])
    metad.save_a_layer_to_file('./', 'dummy_metadata.fits', 'dummy_layer')

    metad2 = metadata.MetaData()
    metad2.load_all_metadata('./', 'dummy_metadata.fits')

    assert metad2.dummy_layer[1].keys() == ['OHOHOH', 'IHIHIH']
Ejemplo n.º 2
0
def test_update_reduction_metadata_headers_summary_with_new_images():
    setup = mock.MagicMock()

    reduction_metadata = metadata.MetaData()

    pipeline_config = stage0.read_the_config_file('../../Config/', log=None)

    stage0.update_reduction_metadata_with_config_file(reduction_metadata, pipeline_config, log=None)
    inst_config_dictionnary = {'OBSTYPE': {"comment": "tres bon",
                                           "value": "OBJECT",
                                           "format": "S200",
                                           "unit": ""}}

    stage0.update_reduction_metadata_with_inst_config_file(reduction_metadata, inst_config_dictionnary, log=None)
    reduction_metadata.data_architecture[1] = {'IMAGES_PATH': ['./']}

    image_bad_pixel_mask = np.zeros((3, 3))
    image_bad_pixel_mask += 89
    header = fits.Header(('OBJECT', 'HUNGRY'))
    header['OBJECT'] = 'NDG'

    image = fits.PrimaryHDU(image_bad_pixel_mask, header=header)
    hdulist = fits.HDUList([image])

    hdulist.writeto('Leia.fits', overwrite=True)

    stage0.update_reduction_metadata_headers_summary_with_new_images(setup,
                                                                     reduction_metadata,
                                                                     ['Leia.fits'], log=None)

    assert reduction_metadata.headers_summary[1]['IMAGES'][0] == 'Leia.fits'
    assert reduction_metadata.headers_summary[1]['OBSTYPE'][0] == 'NDG'
    os.remove('Leia.fits')
Ejemplo n.º 3
0
def test_update_reduction_metadata_stamps():
    setup = mock.MagicMock()

    reduction_metadata = metadata.MetaData()

    image = np.zeros((300, 300))
    image = fits.PrimaryHDU(image)

    stage0.update_reduction_metadata_stamps(setup, reduction_metadata, image,
                                            stamp_size=None, arcseconds_stamp_size=(60, 60),
                                            pixel_scale=0.51, number_of_overlaping_pixels=25,
                                            log=None)

    expected_values = np.array([[0., 0., 142., 0., 142.],
                                [1., 0., 142., 92., 259.],
                                [2., 0., 142., 300., 376.],
                                [3., 92., 259., 0., 142.],
                                [4., 92., 259., 92., 259.],
                                [5., 92., 259., 300., 376.],
                                [6., 300., 376., 0., 142.],
                                [7., 300., 376., 92., 259.],
                                [8., 300., 376., 300., 376.]])

    assert reduction_metadata.stamps[0]['NAME'] == 'stamps'
    assert np.allclose(reduction_metadata.stamps[1]['PIXEL_INDEX'].data.astype(float), expected_values[:,0])
    assert np.allclose(reduction_metadata.stamps[1]['Y_MIN'].data.astype(float), expected_values[:,1])
    assert np.allclose(reduction_metadata.stamps[1]['Y_MAX'].data.astype(float), expected_values[:, 2])
    assert np.allclose(reduction_metadata.stamps[1]['X_MIN'].data.astype(float), expected_values[:, 3])
    assert np.allclose(reduction_metadata.stamps[1]['X_MAX'].data.astype(float), expected_values[:, 4])
Ejemplo n.º 4
0
def convert_red_metadata():
    """Function to convert the configuration parameter files and trendlogs
    from an (IDL) DanDIA reduction into the metadata structure used by
    pyDANDIA."""
    
    config = get_config()

    meta = metadata.MetaData()
    
    event_info = read_event_info(config)
    meta.set_pars(event_info)
    
    meta.set_reduction_paths(config['red_dir'])
    
    red_config = read_red_config(config)
    for key, value in red_config.items():
        meta.set_pars(red_config)
    
    meta.inventory = read_data_inventory(config)
    
    meta.imred = read_imred_trendlog(config['imred'])
    
    meta.gimred = read_gimred_trendlog(config['gimred'])
    
    meta.write()
Ejemplo n.º 5
0
def read_star_catalog(red_dir, log):
    """Function to extract the star catalog from a given reduction of a single
    dataset, using the information in that reduction's metadata file.

    Inputs:
        :param str red_dir: Path to the reduction directory
        :param logger log: Script's own logging object
    
    Returns:
        :param Table catalog: Catalog of objects from a single reduction
    """

    meta_file = os.path.join(red_dir, 'pyDANDIA_metadata.fits')

    catalog = None

    if os.path.isfile(meta_file):

        m = metadata.MetaData()

        m.load_a_layer_from_file(red_dir, 'pyDANDIA_metadata.fits',
                                 'star_catalog')

        catalog = m.star_catalog[1]

        log.info('Read star catalog from metadata for '+\
                    os.path.basename(red_dir))

    return catalog
Ejemplo n.º 6
0
def test_plot_ref_mag_errors():
    """Function to test the plotting function"""

    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})

    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'star_catalog')

    idx = reduction_metadata.star_catalog[1]['star_index'].data
    x = reduction_metadata.star_catalog[1]['x_pixel'].data
    y = reduction_metadata.star_catalog[1]['y_pixel'].data
    ra = reduction_metadata.star_catalog[1]['RA_J2000'].data
    dec = reduction_metadata.star_catalog[1]['DEC_J2000'].data
    mag = reduction_metadata.star_catalog[1]['Instr_mag'].data
    merr = reduction_metadata.star_catalog[1]['Instr_mag_err'].data

    ref_star_catalog = []

    for i in range(0, len(idx), 1):

        ref_star_catalog.append(
            [idx[i], x[i], y[i], ra[i], dec[i], mag[i], merr[i]])

    ref_star_catalog = np.array(ref_star_catalog)

    photometry.plot_ref_mag_errors(setup, ref_star_catalog)

    plot_file = os.path.join(setup.red_dir, 'ref', 'ref_image_phot_errors.png')

    assert os.path.isfile(plot_file)
Ejemplo n.º 7
0
def test_model_sky_background():
    """Function to test the function to model the sky background of an image"""

    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})

    log = logs.start_stage_log(cwd, 'test_psf_selection')

    detected_sources_file = path.join(
        TEST_DATA, 'lsc1m005-fl15-20170701-0144-e91_cropped_sources.txt')

    detected_sources = catalog_utils.read_source_catalog(detected_sources_file)

    ref_star_catalog = np.zeros([len(detected_sources), 13])
    ref_star_catalog[:, 0] = detected_sources[:, 0]
    ref_star_catalog[:, 1] = detected_sources[:, 1]
    ref_star_catalog[:, 2] = detected_sources[:, 2]

    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'reduction_parameters')
    reduction_metadata.ref_image_path = path.join(
        TEST_DATA, 'lsc1m005-fl15-20170701-0144-e91_cropped.fits')

    log.info('Read metadata')

    sky_background.model_sky_background(setup, reduction_metadata, log,
                                        ref_star_catalog)

    logs.close_log(log)
Ejemplo n.º 8
0
def test_update_reduction_metadata_with_inst_config_file():
    reduction_metadata = metadata.MetaData()

    pipeline_config = stage0.read_the_config_file('../../Config/', log=None)

    stage0.update_reduction_metadata_with_config_file(reduction_metadata, pipeline_config, log=None)
    inst_config_dictionnary = {'fromage': {"comment": "tres bon",
                                           "value": "camembert",
                                           "format": "S200",
                                           "unit": ""}
        , 'dessert': {"comment": "moins bon",
                      "value": "pomme",
                      "format": "S200",
                      "unit": ""}}

    stage0.update_reduction_metadata_with_inst_config_file(reduction_metadata, inst_config_dictionnary, log=None)

    assert 'FROMAGE' in reduction_metadata.reduction_parameters[1].keys()
    assert reduction_metadata.reduction_parameters[1]['FROMAGE'] == 'camembert'
    assert reduction_metadata.reduction_parameters[1]['FROMAGE'].dtype == 'S200'
    assert reduction_metadata.reduction_parameters[1]['FROMAGE'].unit == ''

    assert 'DESSERT' in reduction_metadata.reduction_parameters[1].keys()
    assert reduction_metadata.reduction_parameters[1]['DESSERT'] == 'pomme'
    assert reduction_metadata.reduction_parameters[1]['DESSERT'].dtype == 'S200'
    assert reduction_metadata.reduction_parameters[1]['DESSERT'].unit == ''
Ejemplo n.º 9
0
def add(url):
    if contains(url):
        return get_by_url(url).id
    m = metadata.MetaData(url)
    db[url] = m
    id_to_url[m.id] = url
    return m.id
Ejemplo n.º 10
0
def test_id_crowded_stars():
    """Function to test the exclusion of crowded stars from the PSF star 
    selection process"""
    
    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})
    
    log = logs.start_stage_log( cwd, 'test_psf_selection' )
    
    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir, 
                                              'pyDANDIA_metadata.fits', 
                                              'reduction_parameters')
    log.info('Read metadata')
    
    nstars = 10
    bright = 16.0
    faint = 23.0
    
    ref_star_catalog = np.zeros([nstars,13])
    ref_star_catalog[:,0] = range(0,nstars,1)
    
    istar = -1
    for j in range(0,7,1):
        istar += 1
        ref_star_catalog[istar,1] = abs(random.normalvariate(100.0,100.0))
        ref_star_catalog[istar,2] = abs(random.normalvariate(100.0,100.0))
        ref_star_catalog[istar,5] = abs(random.normalvariate(16.0,0.5))
    
    exclude = []
    
    for j in range(istar,nstars,1):
        i = random.randint(0,2)
        xstar = ref_star_catalog[i,1]
        ystar = ref_star_catalog[i,2]
        ref_star_catalog[j,1] = random.normalvariate(xstar,1.0)
        ref_star_catalog[j,2] = random.normalvariate(ystar,1.0)
        ref_star_catalog[j,5] = abs(random.normalvariate(17.0,0.5))
        
        exclude.append(i)
    
    ref_star_catalog[:,3] = random.normalvariate(17.0*15.0,20.0)
    ref_star_catalog[:,4] = random.normalvariate(-27.0,10.0)
    ref_star_catalog[:,6] = 0.005 + ref_star_catalog[:,4]*0.05
    
    psf_stars_idx = np.array([1]*nstars)
    
    psf_stars_idx = psf_selection.id_crowded_stars(setup,reduction_metadata,log,
                                    ref_star_catalog,psf_stars_idx)

    psf_selection.plot_ref_star_catalog_positions(setup,reduction_metadata,log,
                                    ref_star_catalog, psf_stars_idx)
    
    star_index = np.where(psf_stars_idx == 1)[0]
    
    for j in exclude:
        
        assert j not in star_index
    
    logs.close_log(log)
Ejemplo n.º 11
0
def test_update_reduction_metadata_with_config_file():
    reduction_metadata = metadata.MetaData()

    pipeline_config = stage0.read_the_config_file('../../Config/', log=None)

    stage0.update_reduction_metadata_with_config_file(reduction_metadata, pipeline_config, log=None)

    assert len(reduction_metadata.reduction_parameters[1]) != 0
Ejemplo n.º 12
0
def test_update_column_to_layer():
    metad = metadata.MetaData()

    metad.create_a_new_layer('dummy_layer', [['OHOHOH'], ['S150'], ['km/s']],
                             [['0', '59']])

    metad.update_row_to_layer('dummy_layer', 'OHOHOH', ['89', '-98'])
    assert metad.dummy_layer[1]['OHOHOH'][0] == '89'
    assert metad.dummy_layer[1]['OHOHOH'][1] == '-98'
Ejemplo n.º 13
0
def test_load_a_layer_from_file():
    metad = metadata.MetaData()
    metad.load_a_layer_from_file('./', 'dummy_metadata.fits', 'data_inventory')

    assert metad.data_inventory[1].keys() == [
        'IMAGES', 'STAGE_0', 'STAGE_1', 'STAGE_2', 'STAGE_3', 'STAGE_4',
        'STAGE_5', 'STAGE_6', 'STAGE_7'
    ]

    assert len(metad.data_inventory[1]) == 0
Ejemplo n.º 14
0
def main():
    print("Read values for IMG_0613.mp4")
    file = "./IMG_0613.mp4"
    md = metadata.MetaData(file)

    # Print metadata values
    print("codecs: ", md.get_codecs())
    print("height: ", md.get_height())
    print("width: ", md.get_width())
    print("fps: ", md.get_fps())

    print("Read values for IMG_0753.MOV")
    file = "./IMG_0753.MOV"
    md = metadata.MetaData(file)

    # Print metadata values
    print("codecs: ", md.get_codecs())
    print("height: ", md.get_height())
    print("width: ", md.get_width())
    print("fps: ", md.get_fps())
Ejemplo n.º 15
0
def test_transform_2D_table_to_dictionary():
    metad = metadata.MetaData()

    metad.create_a_new_layer(
        'dummy_layer',
        [['OHOHOH', 'IHIHIH'], ['S150', 'S10'], ['km/s', 'h/(2pi)']],
        [['0'], ['59']])

    dico = metad.transform_2D_table_to_dictionary('dummy_layer')

    assert len(dico._fields) == 2
    assert getattr(dico, 'OHOHOH') == '0'
    assert getattr(dico, 'IHIHIH') == '59'
Ejemplo n.º 16
0
def test_id_mid_range_stars():
    """Function to test the selection of stars in the reference image, excluding
    the brightest and faintest N% of those detected"""
    
    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})
    
    log = logs.start_stage_log( cwd, 'test_psf_selection' )
    
    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir, 
                                              'pyDANDIA_metadata.fits', 
                                              'reduction_parameters')
    log.info('Read metadata')
    
    # Generating test catalog with columns:
    # idx x  y  ra  dec  inst_mag inst_mag_err J  Jerr  H Herr   K   Kerr
    nstars = 10
    bright = 16.0
    faint = 23.0
    
    ref_star_catalog = np.zeros([nstars,13])
    ref_star_catalog[:,0] = range(0,nstars,1)
    ref_star_catalog[:,1] = random.normalvariate(100.0,100.0)
    ref_star_catalog[:,2] = random.normalvariate(100.0,100.0)
    ref_star_catalog[:,3] = random.normalvariate(17.0*15.0,20.0)
    ref_star_catalog[:,4] = random.normalvariate(-27.0,10.0)
    ref_star_catalog[:,5] = np.arange(bright,faint,(faint-bright)/float(nstars))
    ref_star_catalog[:,6] = 0.005 + ref_star_catalog[:,4]*0.05
    
    psf_stars_idx = np.array([1]*nstars)
    
    psf_range_thresh = reduction_metadata.reduction_parameters[1]['PSF_RANGE_THRESH'][0]
    
    log.info('Read psf range threshold = '+str(psf_range_thresh))
    
    nstar_cut = int(float(nstars) * (psf_range_thresh/100.0))
    istart = nstar_cut
    iend = len(ref_star_catalog) - nstar_cut
    
    test_psf_stars_idx = np.ones(nstars)
    test_psf_stars_idx[0:nstar_cut] = 0
    test_psf_stars_idx[(-1*nstar_cut):] = 0
    
    psf_stars_idx = psf_selection.id_mid_range_stars(setup,reduction_metadata,
                                                     log,
                                                     ref_star_catalog,
                                                     psf_stars_idx)
    
    assert psf_stars_idx.all() == test_psf_stars_idx.all()
    
    logs.close_log(log)
Ejemplo n.º 17
0
 def __init__(self):
     self.mData = metadata.MetaData()
     self.mData.load()
     size = (880, 600)
     self.screen = pygame.display.set_mode(size)
     pygame.display.set_caption("Painter")
     self.clock = pygame.time.Clock()
     self.im = CImage(self.mData)
     self.brush = Brush(self.screen, self.im)
     self.menu = Menu(self.screen)
     self.menu.set_brush(self.brush)
     self.board = Board(self.screen)
     self.process = None
     self.queue = Queue()
Ejemplo n.º 18
0
def test_add_row_to_layer():
    metad = metadata.MetaData()

    metad.create_a_new_layer(
        'dummy_layer',
        [['OHOHOH', 'IHIHIH'], ['S150', 'S10'], ['km/s', 'h/(2pi)']],
        [['0'], ['59']])

    new_row = ['purple', 'orange']
    metad.add_row_to_layer('dummy_layer', new_row)

    assert metad.dummy_layer[1]['OHOHOH'][0] == '0'
    assert metad.dummy_layer[1]['OHOHOH'][1] == 'purple'

    assert metad.dummy_layer[1]['IHIHIH'][0] == '59'
    assert metad.dummy_layer[1]['IHIHIH'][1] == 'orange'
Ejemplo n.º 19
0
def test_extract_parameters_stage3():

    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})

    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'reduction_parameters')
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'images_stats')
    reduction_metadata.reference_image_path = os.path.join(
        setup.red_dir, 'lsc1m005-fl15-20170418-0131-e91_cropped.fits')

    meta_pars = stage3.extract_parameters_stage3(reduction_metadata)

    print meta_pars
Ejemplo n.º 20
0
def test_update_2D_table_with_dictionary():
    metad = metadata.MetaData()

    metad.create_a_new_layer(
        'dummy_layer',
        [['OHOHOH', 'IHIHIH'], ['S150', 'S10'], ['km/s', 'h/(2pi)']],
        [['0'], ['59']])

    dictionary = collections.namedtuple('dummy_dictionary',
                                        ['OHOHOH', 'IHIHIH'])
    setattr(dictionary, 'OHOHOH', 'monalisa')
    setattr(dictionary, 'IHIHIH', 'batistuta')

    metad.update_2D_table_with_dictionary('dummy_layer', dictionary)

    assert metad.dummy_layer[1]['OHOHOH'] == 'monalisa'
    assert metad.dummy_layer[1]['IHIHIH'] == 'batistuta'
Ejemplo n.º 21
0
def test_build_psf():

    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})

    log = logs.start_stage_log(cwd, 'test_build_psf')

    log.info(setup.summary())

    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'reduction_parameters')

    reduction_metadata.reference_image_path = os.path.join(
        TEST_DATA, 'lsc1m005-fl15-20170701-0144-e91_cropped.fits')
    reduction_metadata.background_type = 'constant'

    star_catalog_file = os.path.join(TEST_DATA, 'star_catalog.fits')

    ref_star_catalog = catalog_utils.read_ref_star_catalog_file(
        star_catalog_file)

    log.info('Read in catalog of ' + str(len(ref_star_catalog)) + ' stars')

    psf_stars_idx = np.zeros(len(ref_star_catalog))
    psf_stars_idx[400:500] = 1
    ref_star_catalog[:, 13] = psf_stars_idx

    ref_image = fits.getdata(reduction_metadata.reference_image_path)

    log.info('Loaded reference image')

    sky_model = psf.ConstantBackground()
    sky_model.constant = 1345.0
    sky_model.background_parameters.constant = 1345.0

    (psf_model, status) = psf.build_psf(setup,
                                        reduction_metadata,
                                        log,
                                        ref_image,
                                        ref_star_catalog,
                                        sky_model,
                                        diagnostics=True)

    logs.close_log(log)
Ejemplo n.º 22
0
    def readAngularDistFile(self):
        angleRotLabel = md.MDL_ANGLE_ROT
        angleTiltLabel = md.MDL_ANGLE_TILT
        anglePsiLabel = md.MDL_ANGLE_PSI
        mdAngDist = md.MetaData(self.angularDistFile)
        if not mdAngDist.containsLabel(md.MDL_ANGLE_PSI):
            anglePsiLabel = None
            if mdAngDist.containsLabel(md.RLN_ORIENT_PSI):
                angleRotLabel = md.RLN_ORIENT_ROT
                angleTiltLabel = md.RLN_ORIENT_TILT
                anglePsiLabel = md.RLN_ORIENT_PSI

        if not mdAngDist.containsLabel(md.MDL_WEIGHT):
            mdAngDist.fillConstant(md.MDL_WEIGHT, 1.)

        maxweight = mdAngDist.aggregateSingle(md.AGGR_MAX, md.MDL_WEIGHT)
        minweight = mdAngDist.aggregateSingle(md.AGGR_MIN, md.MDL_WEIGHT)
        interval = maxweight - minweight

        self.angulardist = []
        x2 = self.xdim / 2
        y2 = self.ydim / 2
        z2 = self.zdim / 2
        #cofr does not seem to work!
        #self.angulardist.append('cofr %d,%d,%d'%(x2,y2,z2))
        for id in mdAngDist:
            rot = mdAngDist.getValue(angleRotLabel, id)
            tilt = mdAngDist.getValue(angleTiltLabel, id)
            psi = mdAngDist.getValue(anglePsiLabel, id) if anglePsiLabel else 0
            weight = mdAngDist.getValue(md.MDL_WEIGHT, id)
            weight = 0 if interval == 0 else (
                weight - minweight) / interval  #avoid cero division
            weight = weight + 0.5  #add 0.5 to avoid cero weight
            x, y, z = xmipp.Euler_direction(rot, tilt, psi)
            radius = weight * self.spheresMaxRadius

            x = x * self.spheresDistance + x2
            y = y * self.spheresDistance + y2
            z = z * self.spheresDistance + z2
            command = 'shape sphere radius %s center %s,%s,%s color %s ' % (
                radius, x, y, z, self.spheresColor)
            self.angulardist.append(command)
Ejemplo n.º 23
0
def test_model_sky_background():
    """Function to test the fitting of a sky background model to a masked
    real star image."""

    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})

    log = logs.start_stage_log(cwd, 'test_sky_background')

    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'reduction_parameters')
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'images_stats')

    log.info('Read metadata')

    # Need to check where these parameters come from
    reduction_metadata.reference_image_path = os.path.join(
        cwd, 'data', 'lsc1m005-fl15-20170701-0144-e91_cropped.fits')
    reduction_metadata.background_type = 'constant'

    ref_star_catalog_file = os.path.join(cwd, 'data', 'star_catalog.fits')

    ref_star_catalog = catalog_utils.read_ref_star_catalog_file(
        ref_star_catalog_file)

    log.info('Read reference image star catalog from ' + ref_star_catalog_file)

    sky_model = sky_background.model_sky_background(setup, reduction_metadata,
                                                    log, ref_star_catalog)

    log.info('Fit image sky background with '+\
            reduction_metadata.background_type+' model, parameters:')

    for key in sky_model.model:

        log.info(key + ' = ' +
                 str(getattr(sky_model.background_parameters, key)))

    logs.close_log(log)
Ejemplo n.º 24
0
def test_add_column_to_layer():

    metad = metadata.MetaData()

    metad.create_a_new_layer(
        'dummy_layer',
        [['OHOHOH', 'IHIHIH'], ['S150', 'S10'], ['km/s', 'h/(2pi)']],
        [['0'], ['59']])

    new_column_name = 'LOL'
    new_column_data = [42]
    new_column_format = 'float64'
    new_column_unit = 'N/c'

    metad.add_column_to_layer('dummy_layer', new_column_name, new_column_data,
                              new_column_format, new_column_unit)

    assert metad.dummy_layer[1]['LOL'] == 42
    assert metad.dummy_layer[1]['LOL'].dtype == 'float64'
    assert metad.dummy_layer[1]['LOL'].unit == 'N/c'
Ejemplo n.º 25
0
    def plotAngularDistributionFromMd(self, mdFile, title, **kwargs):
        """ Read the values of rot, tilt and weights from
        the medata and plot the angular distribution.
        ANGLES are in DEGREES
        In the metadata:
            rot: MDL_ANGLE_ROT
            tilt: MDL_ANGLE_TILT
            weight: MDL_WEIGHT
        """

        angMd = md.MetaData(mdFile)
        rot = []
        tilt = []
        weight = []

        for row in md.iterRows(angMd):
            rot.append(radians(row.getValue(md.MDL_ANGLE_ROT)))
            tilt.append(row.getValue(md.MDL_ANGLE_TILT))
            weight.append(row.getValue(md.MDL_WEIGHT))
        return self.plotAngularDistribution(title, rot, tilt, weight, **kwargs)
Ejemplo n.º 26
0
def test_parse_the_image_header():
    setup = mock.MagicMock()

    reduction_metadata = metadata.MetaData()

    pipeline_config = stage0.read_the_config_file('../../Config/', log=None)

    stage0.update_reduction_metadata_with_config_file(reduction_metadata, pipeline_config, log=None)
    inst_config_dictionnary = {'OBSTYPE': {"comment": "tres bon",
                                           "value": "OBJECT",
                                           "format": "S200",
                                           "unit": ""}}

    stage0.update_reduction_metadata_with_inst_config_file(reduction_metadata, inst_config_dictionnary, log=None)

    image = stage0.open_an_image(setup, '../tests/data/proc/ROME-FIELD-0002_lsc-doma-1m0-05-fl15_ip/data',
                                 'lsc1m005-fl15-20170418-0131-e91_cropped.fits')

    values = stage0.parse_the_image_header(reduction_metadata, image)

    assert values[0][1] == 'ROME-FIELD-02'
Ejemplo n.º 27
0
def test_load_metadata_from_file():
    metad = metadata.MetaData()
    metad.load_all_metadata('./', 'dummy_metadata.fits')

    assert metad.stamps == [None, None]
    assert metad.reduction_parameters == [None, None]
    assert metad.headers_summary == [None, None]

    assert metad.data_inventory[1].keys() == [
        'IMAGES', 'STAGE_0', 'STAGE_1', 'STAGE_2', 'STAGE_3', 'STAGE_4',
        'STAGE_5', 'STAGE_6', 'STAGE_7'
    ]
    assert len(metad.data_inventory[1]) == 0

    assert metad.data_architecture[1].keys() == [
        'METADATA_NAME', 'OUTPUT_DIRECTORY'
    ]

    assert metad.data_architecture[1]['METADATA_NAME'] == 'dummy_metadata.fits'
    assert metad.data_architecture[1]['OUTPUT_DIRECTORY'] == './'

    assert len(metad.data_architecture[1]) == 1
Ejemplo n.º 28
0
def test_create_a_new_layer():
    metad = metadata.MetaData()

    metad.create_a_new_layer(
        'dummy_layer',
        [['OHOHOH', 'IHIHIH'], ['S150', 'S10'], ['km/s', 'h/(2pi)']],
        [['0', '1'], ['59', '41']])

    new_layer = metad.dummy_layer

    assert new_layer[1].keys() == ['OHOHOH', 'IHIHIH']

    assert new_layer[1]['OHOHOH'].dtype == 'S150'
    assert new_layer[1]['IHIHIH'].dtype == 'S10'

    assert new_layer[1]['OHOHOH'].unit == 'km/s'
    assert new_layer[1]['IHIHIH'].unit == 'h/(2pi)'

    assert new_layer[1]['OHOHOH'][0] == '0'
    assert new_layer[1]['OHOHOH'][1] == '1'
    assert new_layer[1]['IHIHIH'][0] == '59'
    assert new_layer[1]['IHIHIH'][1] == '41'
Ejemplo n.º 29
0
    def add_package(self, path, repo_uri):
        package = Package(path, 'r')
        # extract control files
        util.clean_dir(ctx.config.install_dir())
        package.extract_PISI_files(ctx.config.install_dir())

        md = metadata.MetaData()
        md.read(os.path.join(ctx.config.install_dir(), ctx.const.metadata_xml))
        md.package.packageSize = long(os.path.getsize(path))
        if ctx.config.options and ctx.config.options.absolute_uris:
            # FIXME: the name "absolute_uris" does not seem to fit below :/
            md.package.packageURI = os.path.realpath(path)
        else:                           # create relative path by default
            # TODO: in the future well do all of this with purl/pfile/&helpers
            # really? heheh -- future exa
            md.package.packageURI = util.removepathprefix(repo_uri, path)
        # check package semantics
        errs = md.errors()
        if md.errors():
            ctx.ui.error(_('Package %s: metadata corrupt, skipping...') % md.package.name)
            ctx.ui.error(unicode(Error(*errs)))
        else:
            self.packages.append(md.package)
Ejemplo n.º 30
0
def test_run_iterative_PSF_photometry():
    """Function to test the PSF-fitting photometry module for a single image"""

    setup = pipeline_setup.pipeline_setup({'red_dir': TEST_DIR})

    log = logs.start_stage_log(cwd, 'test_photometry')

    reduction_metadata = metadata.MetaData()
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'reduction_parameters')
    reduction_metadata.load_a_layer_from_file(setup.red_dir,
                                              'pyDANDIA_metadata.fits',
                                              'images_stats')

    log.info('Read metadata')

    # NOTE: Once stage 2 is complete, the reference image path should be
    # extracted directly from the metadata.
    reduction_metadata.reference_image_path = os.path.join(
        TEST_DIR, 'data', 'lsc1m005-fl15-20170418-0131-e91_cropped.fits')
    image_path = reduction_metadata.reference_image_path

    log.info('Performing PSF fitting photometry on ' +
             os.path.basename(image_path))

    phot_data = photometry.run_iterative_PSF_photometry(setup,
                                                        reduction_metadata,
                                                        image_path,
                                                        log,
                                                        diagnostics=True)

    test_output = Table()

    assert type(phot_data) == type(test_output)

    logs.close_log(log)