Ejemplo n.º 1
0
    (options, args) = parser.parse_args()

    directory = options.hdf5_directory
    attribute_cache_directory = options.attribute_cache_directory    
    table_name = options.table_name
    cache_year = options.cache_year
    
    if (directory is None or 
        attribute_cache_directory is None or 
        table_name is None or
        cache_year is None):
        
        parser.print_help()
        sys.exit(1)
        
    input_storage = hdf5_storage(storage_location = directory)
    
    attribute_cache = AttributeCache(cache_directory=attribute_cache_directory)
    output_storage = attribute_cache.get_flt_storage_for_year(cache_year)
    SimulationState().set_current_time(cache_year)
    SessionConfiguration(new_instance=True,
                         package_order=[],
                         in_storage=AttributeCache())
    
    logger.start_block("Exporting table '%s' to year %s of cache located at %s..." %
                   (table_name, cache_year, attribute_cache_directory))
    try:
        ExportStorage().export_dataset(
            dataset_name = table_name,
            in_storage = input_storage, 
            out_storage = output_storage,
    parser = OptionParser(description='Converts tables in Opus cache directory into hdf5 files. There is one file per table.')
    
    parser.add_option('-c', '--cache_path', dest='cache_path', type='string', 
        help='The filesystem path to the cache to export (required)')
    parser.add_option('-o', '--output_directory', dest='output_directory', 
        type='string', help='The filesystem path to which output will be written (required)')
    parser.add_option('-t', '--table_name', dest='table_name', 
        type='string', help='Name of table to be exported (optional). If not used, all tables are exported.')
    parser.add_option('--compression', dest='compression', type='string', default=None,
        help='Compression type. Available: gzip, lzf. Default is no compression.')
    (options, args) = parser.parse_args()
    
    cache_path = options.cache_path
    output_directory = options.output_directory
    table_name = options.table_name
    
    if None in (cache_path, output_directory):
        parser.print_help()
        sys.exit(1)

    in_storage = flt_storage(storage_location = cache_path)

    out_storage = hdf5_storage(storage_location = output_directory)
    
    if not os.path.exists(output_directory):
        os.makedirs(output_directory)

    if table_name is not None:
        ExportStorage().export_dataset(table_name, in_storage=in_storage, out_storage=out_storage, compression=options.compression)
    else:
        ExportStorage().export(in_storage=in_storage, out_storage=out_storage, compression=options.compression)
Ejemplo n.º 3
0
        dest='compression',
        type='string',
        default=None,
        help=
        'Compression type. Available: gzip, lzf. Default is no compression.')
    (options, args) = parser.parse_args()

    cache_path = options.cache_path
    output_directory = options.output_directory
    table_name = options.table_name

    if None in (cache_path, output_directory):
        parser.print_help()
        sys.exit(1)

    in_storage = flt_storage(storage_location=cache_path)

    out_storage = hdf5_storage(storage_location=output_directory)

    if not os.path.exists(output_directory):
        os.makedirs(output_directory)

    if table_name is not None:
        ExportStorage().export_dataset(table_name,
                                       in_storage=in_storage,
                                       out_storage=out_storage,
                                       compression=options.compression)
    else:
        ExportStorage().export(in_storage=in_storage,
                               out_storage=out_storage,
                               compression=options.compression)