示例#1
0
def test_logger_list_debug():
    mylog = logger.getChild('testing')
    logs = []
    log_to_list(mylog, logs)
    mylog.setLevel('DEBUG')
    mylog.warn('Warning test')
    mylog.error('Error test')
    mylog.info('Info test')
    mylog.debug('Debug test')
    check.equal(mylog.name, 'astropop.testing')
    check.equal(len(logs), 4)
示例#2
0
def test_logger_list_only_messagens(level, expected):
    mylog = logger.getChild('testing')
    logs = []
    log_to_list(mylog, logs, full_record=False)
    mylog.setLevel(level)
    mylog.error('Error test')
    mylog.warn('Warning test')
    mylog.info('Info test')
    mylog.debug('Debug test')
    check.equal(mylog.name, 'astropop.testing')
    check.equal(len(logs), expected)
    for i, k in zip(['Error test', 'Warning test', 'Info test',
                     'Debug test'][0:expected], logs):
        check.equal(i, k)
示例#3
0
 def test_logger_list_only_messagens(self, level, expected):
     mylog = logger.getChild('testing')
     logs = []
     log_to_list(mylog, logs, full_record=False)
     mylog.setLevel(level)
     mylog.error('Error test %i', 40)
     mylog.warning('Warning test %i', 30)
     mylog.info('Info test %i', 20)
     mylog.debug('Debug test %i', 10)
     assert_equal(mylog.name, 'astropop.testing')
     assert_equal(len(logs), expected)
     for i, k in zip(['Error test 40', 'Warning test 30', 'Info test 20',
                      'Debug test 10'][0:expected], logs):
         assert_equal(i, k)
示例#4
0
 def test_logger_list_defaults(self, level, expected):
     mylog = logger.getChild('testing')
     logs = []
     log_to_list(mylog, logs)
     mylog.setLevel(level)
     mylog.error('Error test')
     mylog.warning('Warning test')
     mylog.info('Info test')
     mylog.debug('Debug test')
     assert_equal(mylog.name, 'astropop.testing')
     assert_equal(len(logs), expected)
     for i, k in zip(['Error test', 'Warning test', 'Info test',
                     'Debug test'][0:expected],
                     logs):
         assert_equal(i, k)
示例#5
0
    def test_image_loading_fitshdu(self, disk_cache):
        n = 10
        d = np.ones((10, 10))
        l = [fits.PrimaryHDU(d) for i in range(n)]

        logs = []
        lh = log_to_list(logger, logs, full_record=True)
        comb = ImCombiner(use_disk_cache=disk_cache)
        comb._load_images(l)

        # check if the logging is properly being emitted.
        log = [
            i for i in logs if i.msg == 'The images to combine are not '
            'FrameData. Some features may be disabled.'
        ]
        assert_equal(len(log), 1)
        assert_equal(log[0].levelname, 'WARNING')
        logger.removeHandler(lh)

        assert_equal(len(comb._images), n)
        assert_is_none(comb._buffer)
        for i, v in enumerate(comb._images):
            assert_is_instance(v, FrameData)
            if disk_cache:
                assert_true(v._memmapping)

        comb._clear()
        # must start empty
        assert_equal(len(comb._images), 0)
        assert_is_none(comb._buffer)
示例#6
0
    def test_image_loading_fitsfile(self, tmpdir, disk_cache):
        tmp = tmpdir.strpath
        n = 10
        d = np.ones((10, 10))
        l = [os.path.join(tmp, f'fits_test{i}') for i in range(n)]
        for f in l:
            fits.PrimaryHDU(d).writeto(f)

        logs = []
        lh = log_to_list(logger, logs, full_record=True)
        comb = ImCombiner(use_disk_cache=disk_cache)
        comb._load_images(l)

        # check if the logging is properly being emitted.
        log = [
            i for i in logs if i.msg == 'The images to combine are not '
            'FrameData. Some features may be disabled.'
        ]
        assert_equal(len(log), 1)
        assert_equal(log[0].levelname, 'WARNING')

        assert_equal(len(comb._images), n)
        assert_is_none(comb._buffer)
        for i, v in enumerate(comb._images):
            assert_is_instance(v, FrameData)
            if disk_cache:
                assert_true(v._memmapping)

        comb._clear()
        # must start empty
        assert_equal(len(comb._images), 0)
        assert_is_none(comb._buffer)
示例#7
0
def test_logger_list_full_record(level, expected):
    mylog = logger.getChild('testing')
    logs = []
    log_to_list(mylog, logs, full_record=True)
    mylog.setLevel(level)
    mylog.error('Error test')
    mylog.warn('Warning test')
    mylog.info('Info test')
    mylog.debug('Debug test')
    check.equal(mylog.name, 'astropop.testing')
    check.equal(len(logs), expected)
    for i, k, n in zip(['Error', 'Warning', 'Info', 'Debug'][0:expected], logs,
                       [40, 30, 20, 10][0:expected]):
        check.equal(f'{i} test', k.msg)
        check.equal(k.name, 'astropop.testing')
        check.equal(k.levelno, n)
        check.equal(k.levelname, i.upper())
示例#8
0
def test_logger_remove_handler():
    mylog = logger.getChild('testing')
    msg = 'Some error happend here.'
    logs = []
    lh = log_to_list(mylog, logs)
    mylog.setLevel('DEBUG')
    mylog.error(msg)
    check.is_instance(lh, ListHandler)
    check.is_in(lh, mylog.handlers)
    mylog.removeHandler(lh)
    check.is_not_in(lh, mylog.handlers)
    check.equal(logs[0], msg)
    check.equal(lh.log_list[0], msg)
    check.equal(lh.log_list, logs)
示例#9
0
 def test_logging_err(self, com):
     logl = []
     # stdout messages must not appear due to loglevel
     expect_log = ['this is an error']
     lh = log_to_list(logger, logl)
     stdout = []
     stderr = []
     _, out, err = run_command(com, stdout=stdout, stderr=stderr,
                               stdout_loglevel='DEBUG',
                               stderr_loglevel='ERROR')
     assert_is(out, stdout)
     assert_is(err, stderr)
     assert_equal(stdout, [])
     assert_equal(stderr, ['this is an error'])
     assert_equal(logl, expect_log)
     logger.removeHandler(lh)
示例#10
0
    def test_logging(self, com):
        logl = []
        logcmd = com if isinstance(com, list) else shlex.split(com)
        logcmd = " ".join(logcmd)
        expect_log = [f"Runing: {logcmd}"]
        expect_log += list(range(1, 11))
        expect_log += [f"Done with process: {logcmd}"]

        lh = log_to_list(logger, logl)
        stdout = []
        stderr = []
        _, out, err = run_command(com, stdout=stdout, stderr=stderr,
                                  stdout_loglevel='WARN')
        assert_is(out, stdout)
        assert_is(err, stderr)
        assert_equal(stdout, [str(i) for i in range(1, 11)])
        assert_equal(stderr, [])
        assert_equal(logl, expect_log)
        logger.removeHandler(lh)
示例#11
0
    def test_chunk_yielder_uncertainty(self):
        n = 100
        d = np.random.random((100, 100)).astype(np.float64)
        u = np.random.random((100, 100)).astype(np.float64)
        l = [FrameData(d, uncertainty=u, unit='adu') for i in range(n)]

        # simple sum with uncertainties
        comb = ImCombiner(max_memory=2e6, dtype=np.float64)
        comb._load_images(l)
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='sum'):
            i += 1
            for k, un in zip(chunk, unct):
                assert_in(k.shape, ((7, 100), (2, 100)))
                assert_almost_equal(k, d[slc])
                assert_almost_equal(un, u[slc])
                assert_is_instance(un, np.ma.MaskedArray)
        assert_equal(i, 15)

        # if a single uncertainty is empty, disable it
        logs = []
        lh = log_to_list(logger, logs, False)
        level = logger.getEffectiveLevel()
        logger.setLevel('DEBUG')

        l[5].uncertainty = None
        comb = ImCombiner(max_memory=2e6, dtype=np.float64)
        comb._load_images(l)
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='sum'):
            i += 1
            for k in chunk:
                assert_in(k.shape, ((7, 100), (2, 100)))
                assert_almost_equal(k, d[slc])
                assert_equal(unct, None)
        assert_equal(i, 15)
        assert_in(
            'One or more frames have empty uncertainty. '
            'Some features are disabled.', logs)
        logs.clear()
        logger.setLevel(level)
        logger.removeHandler(lh)
示例#12
0
    def test_data_in_other_hdu(self, tmpdir):
        tbl = Table(np.ones(10).reshape(5, 2))
        data = 100 * np.ones(self.shape)
        hdul = fits.HDUList(hdus=[
            fits.PrimaryHDU(),
            fits.TableHDU(tbl.as_array()),
            fits.ImageHDU(data)
        ])
        fname = tmpdir.join('test_table.fits').strpath
        hdul.writeto(fname)

        logs = []
        lh = log_to_list(logger, logs, full_record=True)
        f = _extract_fits(fname)
        assert_equal(f['data'], 100 * np.ones(self.shape))
        assert_equal(f['unit'], None)

        # ensure log emitting
        logs = [i for i in logs if i.message == 'First hdu with image data: 2']
        assert_equal(len(logs), 1)
        assert_equal(logs[0].levelname, 'INFO')

        logger.removeHandler(lh)
示例#13
0
    def test_chunk_yielder_f32(self):
        # using float32, the number of chunks are almost halved
        n = 100
        d = np.random.random((100, 100)).astype(np.float64)
        l = [FrameData(d, unit='adu') for i in range(n)]
        # data size = 4 000 000 = 4 bytes * 100 * 100 * 100
        # mask size = 1 000 000 = 1 bytes * 100 * 100 * 100
        # total size = 5 000 000

        comb = ImCombiner(max_memory=1e6, dtype=np.float32)
        comb._load_images(l)

        logs = []
        lh = log_to_list(logger, logs, False)
        level = logger.getEffectiveLevel()
        logger.setLevel('DEBUG')

        # for median, tot_size=5*4.5=22.5
        # xstep = 4, so n_chuks=25
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='median'):
            i += 1
            for k in chunk:
                assert_equal(k.shape, (4, 100))
                assert_almost_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 25)
        assert_in('Splitting the images into 25 chunks.', logs)
        logs.clear()

        # for mean and sum, tot_size=5*3=15
        # xstep = 6, so n_chunks=16+1
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='mean'):
            i += 1
            for k in chunk:
                assert_in(k.shape, [(6, 100), (4, 100)])
                assert_almost_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 17)
        assert_in('Splitting the images into 17 chunks.', logs)
        logs.clear()

        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='sum'):
            i += 1
            for k in chunk:
                assert_in(k.shape, [(6, 100), (4, 100)])
                assert_almost_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 17)
        assert_in('Splitting the images into 17 chunks.', logs)
        logs.clear()

        # this should not split into chunks
        comb = ImCombiner(max_memory=1e8, dtype=np.float32)
        comb._load_images(l)
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='median'):
            i += 1
            for k in chunk:
                assert_equal(k.shape, (100, 100))
                assert_almost_equal(k, d)
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 1)
        assert_equal(len(logs), 0)
        logs.clear()

        # this should split in 300 chunks!
        # total_size = 4.5*5e6=22.5e6 = 225 chunks
        # x_step = 1
        # y_step = 45
        comb = ImCombiner(max_memory=1e5, dtype=np.float32)
        comb._load_images(l)
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='median'):
            i += 1
            for k in chunk:
                assert_in(k.shape, ((1, 45), (1, 10)))
                assert_almost_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 300)
        assert_in('Splitting the images into 300 chunks.', logs)
        logs.clear()

        logger.setLevel(level)
        logger.removeHandler(lh)
示例#14
0
    def test_chunk_yielder_f64(self):
        n = 100
        d = np.random.random((100, 100)).astype(np.float64)
        l = [FrameData(d, unit='adu') for i in range(n)]
        # data size = 8 000 000 = 8 bytes * 100 * 100 * 100
        # mask size = 1 000 000 = 1 bytes * 100 * 100 * 100
        # total size = 9 000 000

        comb = ImCombiner(max_memory=1e6, dtype=np.float64)
        comb._load_images(l)

        logs = []
        lh = log_to_list(logger, logs, False)
        level = logger.getEffectiveLevel()
        logger.setLevel('DEBUG')

        # for median, tot_size=9*4.5=41
        # xstep = 2, so n_chuks=50
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='median'):
            i += 1
            for k in chunk:
                assert_equal(k.shape, (2, 100))
                assert_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 50)
        assert_in('Splitting the images into 50 chunks.', logs)
        logs.clear()

        # for mean and sum, tot_size=9*3=27
        # xstep = 3, so n_chunks=33+1
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='mean'):
            i += 1
            for k in chunk:
                assert_in(k.shape, [(3, 100), (1, 100)])
                assert_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 34)
        assert_in('Splitting the images into 34 chunks.', logs)
        logs.clear()

        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='sum'):
            i += 1
            for k in chunk:
                assert_in(k.shape, [(3, 100), (1, 100)])
                assert_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 34)
        assert_in('Splitting the images into 34 chunks.', logs)
        logs.clear()

        # this should not split into chunks
        comb = ImCombiner(max_memory=1e8)
        comb._load_images(l)
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='median'):
            i += 1
            for k in chunk:
                assert_equal(k.shape, (100, 100))
                assert_equal(k, d)
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 1)
        assert_equal(len(logs), 0)
        logs.clear()

        # this should split in 400 chunks!
        comb = ImCombiner(max_memory=1e5)
        comb._load_images(l)
        i = 0
        for chunk, unct, slc in comb._chunk_yielder(method='median'):
            i += 1
            for k in chunk:
                assert_equal(k.shape, (1, 25))
                assert_equal(k, d[slc])
                assert_is_none(unct)
                assert_is_instance(k, np.ma.MaskedArray)
        assert_equal(i, 400)
        assert_in('Splitting the images into 400 chunks.', logs)
        logs.clear()

        logger.setLevel(level)
        logger.removeHandler(lh)