コード例 #1
0
def search_invalid_data(params):

    log = log_utils.start_day_log(params, 'data_validator')

    red_dir_list = glob.glob(path.join(params['top_level_dir'], '*'))
    exclude_dirs = ['logs', 'config', 'downloads']

    for red_dir in red_dir_list:
        if path.isdir(red_dir) and red_dir not in exclude_dirs:
            review_dataset(red_dir, log)

    log_utils.close_log(log)
コード例 #2
0
def count_image_data(params):

    log = log_utils.start_day_log(params, 'count_images')

    datasets = glob.glob(
        path.join(params['top_dir'], '*_???-dom?-1m0-??-f???_?p'))

    nimages = 0
    log.info('# Dataset        N images')
    for subdir in datasets:
        image_list = glob.glob(path.join(subdir, 'data', '*.fits'))
        log.info(path.basename(subdir) + ' ' + str(len(image_list)))
        nimages += len(image_list)
    log.info('Total number of images: ' + str(nimages))

    log_utils.close_log(log)
コード例 #3
0
def search_archive_for_data(CONFIG_FILE):

    config = config_utils.get_config(CONFIG_FILE)

    log = log_utils.start_day_log(config, 'data_download')

    downloaded_frames = read_frame_list(config, log)

    (start_time, end_time) = set_date_range(config, log)

    new_frames = fetch_new_frames(config, start_time, end_time, log)

    downloaded_frames = download_new_frames(config, new_frames,
                                            downloaded_frames, log)

    framelist_utils.output_frame_list(config, downloaded_frames, log)

    log_utils.close_log(log)
コード例 #4
0
def prepare_data_for_reduction(CONFIG_FILE):

    config = config_utils.get_config(CONFIG_FILE)

    log = log_utils.start_day_log(config, 'data_preparation')

    compressed_frames = check_for_new_frames(config, log)

    if len(compressed_frames) > 0:

        decompressed_frames = decompress_new_frames(config, log, compressed_frames)

        #transform_frames(decompressed_frames, log)

        sort_data.sort_data(config['data_download_dir'],config['separate_instruments'],log=log)

        datasets = get_dataset_list(config, log)

        for dataset_dir in datasets:
            transfer_to_reduction_directory(config, log, dataset_dir)

    log_utils.close_log(log)
コード例 #5
0
def test_build_frame_list():

    proposal = 'KEY2020B-003'
    config = {'log_dir': '.', 'proposal_ids': '[KEY2020B-003, LCO2020A-002]'}
    log = log_utils.start_day_log(config,'test_data_download')

    default_params = { 'url': 'https://archive.lco.global',
                        'DATE_OBS': '2020-07-06 12:30:00',
                        'PROPID': 'KEY2020B-003',
                        'INSTRUME': 'fa12',
                        'OBJECT': 'TEST',
                        'SITEID': 'cpt',
                        'TELID': '1m003',
                        'EXPTIME': 30.0,
                        'FILTER': 'gp',
                        'REQNUM': 2168224 }

    frame1 = default_params.copy()
    frame1['filename'] = 'cpt1m003-fa12-20200607-0123-e91.fits'

    frame2 = default_params.copy()
    frame2['filename'] = 'cpt1m003-fa12-20200607-skyflat-bin2x2-B.fits'

    frame3 = default_params.copy()
    frame3['filename'] = 'cpt1m003-fa12-20200607-dark-bin1x1.fits'

    frame4 = default_params.copy()
    frame4['filename'] = 'cpt1m003-fa12-20200607-bias-bin1x1.fits'

    query_results = {'results': [ frame1, frame2, frame3, frame4 ]}

    new_frames = []

    new_frames = framelist_utils.build_frame_list(config, query_results, proposal, new_frames, log)

    assert len(new_frames) == 1

    log_utils.close_log(log)