コード例 #1
0
def test_hdf5():
    size = 100
    string = size*'a'
    config = {'tables': {
              'test_table': {
                'column1': Call(np.random.uniform, [], {
                  'size': size}),
                'column2': Call(np.random.uniform, [], {
                  'low': Ref('test_table.column1')}),
                'column3': Call(list, [string], {})}}}

    pipeline = Pipeline(config)
    pipeline.execute()
    pipeline.write('output.hdf5')
    hdf_table = read_table_hdf5('output.hdf5', 'tables/test_table', character_as_bytes=False)
    assert np.all(hdf_table == pipeline['test_table'])
コード例 #2
0
def main(args=None):

    parser = argparse.ArgumentParser(description="SkyPy pipeline driver")
    parser.add_argument('--version', action='version', version=skypy_version)
    parser.add_argument('config', help='Config file name')
    parser.add_argument('output', help='Output file name')
    parser.add_argument('-o', '--overwrite', action='store_true',
                        help='Whether to overwrite existing files')
    parser.add_argument("-v", "--verbose", action="count", default=0,
                        help="Increase logging verbosity")
    parser.add_argument("-q", "--quiet", action="count", default=0,
                        help="Decrease logging verbosity")

    # get system args if none passed
    if args is None:
        args = sys.argv[1:]

    args = parser.parse_args(args or ['--help'])

    # Setup skypy logger
    default_level = logging._nameToLevel['WARNING']
    logging_level = default_level + 10 * (args.quiet - args.verbose)
    formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(name)s: %(message)s')
    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(logging_level)
    stream_handler.setFormatter(formatter)
    logger = logging.getLogger('skypy')
    logger.setLevel(logging_level)
    logger.addHandler(stream_handler)

    try:
        config = load_skypy_yaml(args.config)
        pipeline = Pipeline(config)
        pipeline.execute()
        if args.output:
            logger.info(f"Writing {args.output}")
            pipeline.write(args.output, overwrite=args.overwrite)
    except Exception as e:
        logger.exception(e)
        raise SystemExit(2) from e

    return(0)
コード例 #3
0
ファイル: test_pipeline.py プロジェクト: rrjbca/skypy
def test_pipeline():

    # Evaluate and store the default astropy cosmology.
    config = {'test_cosmology': (default_cosmology.get, )}

    pipeline = Pipeline(config)
    pipeline.execute()
    assert pipeline['test_cosmology'] == default_cosmology.get()

    # Generate a simple two column table with a dependency. Also write the
    # table to a fits file and check it's contents.
    size = 100
    string = size * 'a'
    config = {
        'tables': {
            'test_table': {
                'column1': (np.random.uniform, {
                    'size': size
                }),
                'column2': (np.random.uniform, {
                    'low': '$test_table.column1'
                }),
                'column3': (list, [string])
            }
        }
    }

    pipeline = Pipeline(config)
    pipeline.execute()
    pipeline.write(file_format='fits')
    assert len(pipeline['test_table']) == size
    assert np.all(
        pipeline['test_table.column1'] < pipeline['test_table.column2'])
    with fits.open('test_table.fits') as hdu:
        assert np.all(Table(hdu[1].data) == pipeline['test_table'])

    # Check for failure if output files already exist and overwrite is False
    pipeline = Pipeline(config)
    pipeline.execute()
    with pytest.raises(OSError):
        pipeline.write(file_format='fits', overwrite=False)

    # Check that the existing output files are modified if overwrite is True
    new_size = 2 * size
    new_string = new_size * 'a'
    config['tables']['test_table']['column1'][1]['size'] = new_size
    config['tables']['test_table']['column3'][1][0] = new_string
    pipeline = Pipeline(config)
    pipeline.execute()
    pipeline.write(file_format='fits', overwrite=True)
    with fits.open('test_table.fits') as hdu:
        assert len(hdu[1].data) == new_size

    # Check for failure if 'column1' requires itself creating a cyclic
    # dependency graph
    config['tables']['test_table']['column1'] = (list, '$test_table.column1')
    with pytest.raises(networkx.NetworkXUnfeasible):
        Pipeline(config).execute()

    # Check for failure if 'column1' and 'column2' both require each other
    # creating a cyclic dependency graph
    config['tables']['test_table']['column1'] = (list, '$test_table.column2')
    with pytest.raises(networkx.NetworkXUnfeasible):
        Pipeline(config).execute()

    # Check for failure if 'column1' is removed from the config so that the
    # requirements for 'column2' are not satisfied.
    del config['tables']['test_table']['column1']
    with pytest.raises(KeyError):
        Pipeline(config).execute()

    # Check variables intialised by value
    config = {
        'test_int': 1,
        'test_float': 1.0,
        'test_string': 'hello world',
        'test_list': [0, 'one', 2.],
        'test_dict': {
            'a': 'b'
        }
    }
    pipeline = Pipeline(config)
    pipeline.execute()
    assert isinstance(pipeline['test_int'], int)
    assert isinstance(pipeline['test_float'], float)
    assert isinstance(pipeline['test_string'], str)
    assert isinstance(pipeline['test_list'], list)
    assert isinstance(pipeline['test_dict'], dict)
    assert pipeline['test_int'] == 1
    assert pipeline['test_float'] == 1.0
    assert pipeline['test_string'] == 'hello world'
    assert pipeline['test_list'] == [0, 'one', 2.]
    assert pipeline['test_dict'] == {'a': 'b'}

    # Check variables intialised by function
    config = {
        'test_func': (list, 'hello world'),
        'len_of_test_func': (len, '$test_func'),
        'nested_references': (sum, [['$test_func', [' '], '$test_func'], []]),
        'nested_functions': (list, (range, (len, '$test_func')))
    }
    pipeline = Pipeline(config)
    pipeline.execute()
    assert pipeline['test_func'] == list('hello world')
    assert pipeline['len_of_test_func'] == len('hello world')
    assert pipeline['nested_references'] == list('hello world hello world')
    assert pipeline['nested_functions'] == list(range(len('hello world')))

    # Check parameter initialisation
    config = {'parameters': {'param1': 1.0}}
    pipeline = Pipeline(config)
    pipeline.execute()
    assert pipeline['param1'] == 1.0

    # Update parameter and re-run
    new_parameters = {'param1': 5.0}
    pipeline.execute(parameters=new_parameters)
    assert pipeline['param1'] == new_parameters['param1']