Exemple #1
0
def test_load_skypy_yaml():

    # Read empty config file
    filename = get_pkg_data_filename('data/empty_config.yml')
    assert load_skypy_yaml(filename) == {}

    # Read config file and check entries are parsed to the correct types
    filename = get_pkg_data_filename('data/test_config.yml')
    config = load_skypy_yaml(filename)
    assert isinstance(config['test_int'], int)
    assert isinstance(config['test_float'], float)
    assert isinstance(config['test_str'], str)
    assert isinstance(config['test_func'], Call)
    assert isinstance(config['test_func_with_arg'], Call)
    assert isinstance(config['test_object'], Cosmology)
    assert isinstance(config['cosmology'], Call)
    assert isinstance(config['tables']['test_table_1']['test_column_3'], Call)

    # Bad function
    filename = get_pkg_data_filename('data/bad_function.yml')
    with pytest.raises(ImportError):
        load_skypy_yaml(filename)

    # Bad module
    filename = get_pkg_data_filename('data/bad_module.yml')
    with pytest.raises(ImportError):
        load_skypy_yaml(filename)

    # Bad object
    filename = get_pkg_data_filename('data/bad_object.yml')
    with pytest.raises(ValueError):
        load_skypy_yaml(filename)
Exemple #2
0
def main(args=None):

    parser = argparse.ArgumentParser(description="SkyPy pipeline driver")
    parser.add_argument('--version', action='version', version=skypy_version)
    parser.add_argument('config', help='Config file name')
    parser.add_argument('-f',
                        '--format',
                        required=False,
                        choices=['fits', 'hdf5'],
                        help='Table file format')
    parser.add_argument('-o',
                        '--overwrite',
                        action='store_true',
                        help='Whether to overwrite existing files')

    # get system args if none passed
    if args is None:
        args = sys.argv[1:]

    args = parser.parse_args(args or ['--help'])
    config = load_skypy_yaml(args.config)

    if 'lightcone' in config:
        pipeline = Lightcone(config)
    else:
        pipeline = Pipeline(config)
    pipeline.execute()
    pipeline.write(file_format=args.format, overwrite=args.overwrite)
    return (0)
Exemple #3
0
def test_yaml_quantities():
    # config with quantities
    filename = get_pkg_data_filename('data/quantities.yml')
    config = load_skypy_yaml(filename)

    assert config['42_km'] == units.Quantity('42 km')
    assert config['1_deg2'] == units.Quantity('1 deg2')
Exemple #4
0
def test_logging(capsys, tmp_path):

    # Run skypy with default verbosity and check log is empty
    config_filename = get_pkg_data_filename('data/test_config.yml')
    output_filename = str(tmp_path / 'logging.fits')
    skypy.main([config_filename, output_filename])
    out, err = capsys.readouterr()
    assert (not err)

    # Run again with increased verbosity and capture log. Force an exception by
    # not using the "--overwrite" flag when the output file already exists.
    with pytest.raises(SystemExit):
        skypy.main([config_filename, output_filename, '--verbose'])
    out, err = capsys.readouterr()

    # Determine all DAG jobs and function calls from config
    config = load_skypy_yaml(config_filename)
    cosmology = config.pop('cosmology', None)
    tables = config.pop('tables', {})
    config.update({k: v.pop('.init', Call(Table)) for k, v in tables.items()})
    columns = [f'{t}.{c}' for t, cols in tables.items() for c in cols]
    functions = [f for f in config.values() if isinstance(f, Call)]
    functions += [
        f for t, cols in tables.items() for f in cols.values()
        if isinstance(f, Call)
    ]

    # Check all jobs appear in the log
    for job in list(config) + list(tables) + columns:
        log_string = f"[INFO] skypy.pipeline: Generating {job}"
        assert (log_string in err)

    # Check all functions appear in the log
    for f in functions:
        log_string = f"[INFO] skypy.pipeline: Calling {f.function.__name__}"
        assert (log_string in err)

    # Check cosmology appears in the log
    if cosmology:
        assert ("[INFO] skypy.pipeline: Setting cosmology" in err)

    # Check writing output file is in the log
    assert (f"[INFO] skypy: Writing {output_filename}" in err)

    # Check error for existing output file is in the log
    try:
        # New error message introduced in astropy PR #12179
        from astropy.utils.misc import NOT_OVERWRITING_MSG
        error_string = NOT_OVERWRITING_MSG.format(output_filename)
    except ImportError:
        # Fallback on old error message from astropy v4.x
        error_string = f"[ERROR] skypy: File {output_filename!r} already exists."
    assert (error_string in err)

    # Run again with decreased verbosity and check the log is empty
    with pytest.raises(SystemExit):
        skypy.main([config_filename, output_filename, '-qq'])
    out, err = capsys.readouterr()
    assert (not err)
Exemple #5
0
def test_logging(capsys):

    # Run skypy with default verbosity and check log is empty
    filename = get_pkg_data_filename('data/test_config.yml')
    output_file = 'logging.fits'
    skypy.main([filename, output_file])
    out, err = capsys.readouterr()
    assert (not err)

    # Run again with increased verbosity and capture log. Force an exception by
    # not using the "--overwrite" flag when the output file already exists.
    with pytest.raises(SystemExit):
        skypy.main([filename, output_file, '--verbose'])
    out, err = capsys.readouterr()

    # Determine all DAG jobs and function calls from config
    config = load_skypy_yaml(filename)
    cosmology = config.pop('cosmology', None)
    tables = config.pop('tables', {})
    config.update({k: v.pop('.init', Call(Table)) for k, v in tables.items()})
    columns = [f'{t}.{c}' for t, cols in tables.items() for c in cols]
    functions = [f for f in config.values() if isinstance(f, Call)]
    functions += [
        f for t, cols in tables.items() for f in cols.values()
        if isinstance(f, Call)
    ]

    # Check all jobs appear in the log
    for job in list(config) + list(tables) + columns:
        log_string = f"[INFO] skypy.pipeline: Generating {job}"
        assert (log_string in err)

    # Check all functions appear in the log
    for f in functions:
        log_string = f"[INFO] skypy.pipeline: Calling {f.function.__name__}"
        assert (log_string in err)

    # Check cosmology appears in the log
    if cosmology:
        assert ("[INFO] skypy.pipeline: Setting cosmology" in err)

    # Check writing output file is in the log
    assert (f"[INFO] skypy: Writing {output_file}" in err)

    # Check error for existing output file is in the log
    assert (f"[ERROR] skypy: File '{output_file}' already exists." in err)

    # Run again with decreased verbosity and check the log is empty
    with pytest.raises(SystemExit):
        skypy.main([filename, output_file, '-qq'])
    out, err = capsys.readouterr()
    assert (not err)
Exemple #6
0
def main(args=None):

    parser = argparse.ArgumentParser(description="SkyPy pipeline driver")
    parser.add_argument('--version', action='version', version=skypy_version)
    parser.add_argument('config', help='Config file name')
    parser.add_argument('output', help='Output file name')
    parser.add_argument('-o', '--overwrite', action='store_true',
                        help='Whether to overwrite existing files')
    parser.add_argument("-v", "--verbose", action="count", default=0,
                        help="Increase logging verbosity")
    parser.add_argument("-q", "--quiet", action="count", default=0,
                        help="Decrease logging verbosity")

    # get system args if none passed
    if args is None:
        args = sys.argv[1:]

    args = parser.parse_args(args or ['--help'])

    # Setup skypy logger
    default_level = logging._nameToLevel['WARNING']
    logging_level = default_level + 10 * (args.quiet - args.verbose)
    formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s')
    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(logging_level)
    stream_handler.setFormatter(formatter)
    logger = logging.getLogger('skypy')
    logger.setLevel(logging_level)
    logger.addHandler(stream_handler)

    try:
        config = load_skypy_yaml(args.config)
        pipeline = Pipeline(config)
        pipeline.execute()
        if args.output:
            logger.info(f"Writing {args.output}")
            pipeline.write(args.output, overwrite=args.overwrite)
    except Exception as e:
        logger.exception(e)
        raise SystemExit(2) from e

    return(0)
Exemple #7
0
def test_keys_must_be_strings(config):
    filename = get_pkg_data_filename(f'data/{config}.yml')
    with pytest.raises(ValueError, match='key ".*" is of non-string type ".*"'):
        load_skypy_yaml(filename)
Exemple #8
0
def test_empty_ref():
    filename = get_pkg_data_filename('data/test_empty_ref.yml')
    with pytest.raises(ValueError, match='empty reference'):
        load_skypy_yaml(filename)