def apm_generate_aperture_map():
    r"""
    Driver function to generate an aperture map from a TIF image.
    """
    # parsing commandline args
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')

    # checking path to prevent accidental overwritting
    if not namespace.aperture_map_name:
        map_name = os.path.basename(namespace.image_file)
        map_name = map_name.replace( os.path.splitext(map_name)[1], '-aperture-map.txt')
        namespace.aperture_map_name = map_name

    #
    map_path = os.path.join(namespace.output_dir, namespace.aperture_map_name)
    if os.path.exists(map_path) and not namespace.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(map_path))

    # loading image data
    data_array = load_image_data(namespace.image_file, namespace.invert)
    data_array = data_array.astype(sp.int8)

    # summing data array down into a 2-D map
    logger.info('creating 2-D aperture map...')
    aperture_map = sp.sum(data_array, axis=1, dtype=int)

    # saving map
    logger.info('saving aperture map as {}'.format(map_path))
    sp.savetxt(map_path, aperture_map.T, fmt='%d', delimiter='\t')
def apm_bulk_run():
    r"""
    Driver function to handle parsing of command line args and setting up
    the bulk run
    """
    # parsing commandline args
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')

    bulk_run = None
    for input_file in namespace.input_files:
        #
        # loading yaml file and parsing input file
        input_file = open(input_file, 'r')
        inputs = yaml.load(input_file)
        inp_file = InputFile(inputs['initial_input_file'])

        # Creating class with provided kwargs
        if not bulk_run:
            bulk_run = BulkRun(inp_file, **inputs['bulk_run_keyword_args'])

        # Generating the InputFile list
        bulk_run.generate_input_files(inputs['default_run_parameters'],
                                      inputs['default_file_formats'],
                                      case_identifer=inputs['case_identifier'],
                                      case_params=inputs['case_parameters'],
                                      append=True)

    # starting or dry running sims
    if namespace.start:
        bulk_run.start()
    else:
        bulk_run.dry_run()
        print('Add "--start" flag to begin simulations')
def apm_process_data_map():
    r"""
    Parses command line arguments and delegates tasks to helper functions
    for actual data processing
    """
    args = parser.parse_args()
    #
    if args.verbose:
        set_main_logger_level('debug')
    #
    process_files(args)
def apm_calculate_offset_map():
    r"""
    Driver program to load an image and generate an offset map. Arrays
    can be quite large and are explicity deleted to conserve RAM
    """
    # parsing commandline args
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')

    # checking paths
    map_path = os.path.join(namespace.output_dir, namespace.offset_map_name)
    #
    if namespace.img_stack_dirname is not None:
        stack_path = os.path.join(namespace.output_dir,
                                  namespace.img_stack_dirname)
        if os.path.exists(stack_path) and not namespace.force:
            msg = 'Image Stack directory: {} already exists, '
            msg += 'use "-f" option to overwrite'
            raise FileExistsError(msg.format(stack_path))
    #
    if os.path.exists(map_path) and not namespace.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(map_path))

    # loading image data
    data_array = load_image_data(namespace.image_file, namespace.invert)
    img_dims = data_array.shape
    nonzero_locs = locate_nonzero_data(data_array)
    index_map = generate_index_map(nonzero_locs, img_dims)

    # determing connectivity and removing clusters
    conns = generate_node_connectivity_array(index_map, data_array)
    del data_array
    del index_map
    nonzero_locs = remove_isolated_clusters(conns,
                                            nonzero_locs,
                                            namespace.num_clusters)

    # saving processed image
    if namespace.img_stack_dirname is not None:
        save_image_stack(nonzero_locs, img_dims, stack_path)

    # creating offset map and filling gaps left from zero aperture regions
    offset_map = generate_offset_map(nonzero_locs, img_dims)
    offset_map = patch_holes(offset_map)

    # saving map
    sp.savetxt(map_path, offset_map.T, fmt='%d', delimiter='\t')
def init_case_dir():
    r"""
    Parses command line arguments and delegates tasks to helper functions
    """
    arg_dict = args.__dict__

    # checking args
    if args.verbose:
        set_main_logger_level('debug')
    check_args()

    # creating job directory
    try:
        os.makedirs(args.job_dir)
    except FileExistsError as err:
        if args.force:
            rmtree(args.job_dir)
            os.makedirs(args.job_dir)
        else:
            raise err

    # copying contents of template directory to job_dir
    os.system('cp -r {}/* {}'.format(args.template, args.job_dir))

    # copying over the aperture map and updating args
    os.system('cp {} {}'.format(args.aper_map, args.job_dir))
    args.aper_map = os.path.join(args.job_dir, os.path.basename(args.aper_map))

    #
    update_system_files(**arg_dict)
    update_transport_props(**arg_dict)
    update_u_file(**arg_dict)
    update_p_file(**arg_dict)

    #
    # updating run script
    if args.no_hpcee is False:
        with open(os.path.join(args.job_dir, args.script_name), 'r+') as run:
            content = run.read()
            content = content.format(**arg_dict)
            run.seek(0)
            run.truncate()
            run.write(content)
def apm_subtract_data_maps():
    r"""
    Parses command line arguments and delegates tasks to helper functions
    for actual data processing
    """
    args = parser.parse_args()
    args.perc = [0, 1, 5, 10, 25, 50, 75, 90, 95, 99, 100]
    #
    if args.verbose:
        set_main_logger_level('debug')
    #
    # testing output map path
    if os.path.exists(args.out_name) and not args.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(args.out_name))
    #
    aper_map, data_map1, data_map2 = prepare_maps(args)
    result = process_maps(aper_map, data_map1, data_map2, args)
    #
    # writing out resultant data map
    sp.savetxt(args.out_name, result.data_map, delimiter='\t')
#
desc_str = r"""
Description: Generates 2-D data maps from OpenFoam data saved by paraview
as a CSV file. The data has to be saved as point data and the following fields
are expected p, points:0->2, u:0->2. An aperture map is the second main input
and is used to generate the interpolation coordinates as well as convert
the flow velocities into volumetic flow rates.

Written By: Matthew stadelman
Date Written: 2016/09/29
Last Modfied: 2016/09/29
"""

# setting up logger
set_main_logger_level('info')
logger = _get_logger('ApertureMapModelTools.Scripts')

# setting a few convenience globals
avg_fact = None
voxel_size = None
base_name = None

# creating arg parser
parser = argparse.ArgumentParser(description=desc_str, formatter_class=RawDesc)

# adding arguments
parser.add_argument('data_file', type=os.path.realpath,
                    help='paraview CSV data file')

parser.add_argument('map_file', type=os.path.realpath,
def apm_parallel_mesh_generation():
    r"""
    Processes the command line arguments and generates the mesh
    """
    #
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')
    #
    # initial mesh parameters
    mesh_params = {
        'convertToMeters': '2.680E-5',
        'numbersOfCells': '(1 1 1)',
        #
        'boundary.left.type': 'wall',
        'boundary.right.type': 'wall',
        'boundary.top.type': 'wall',
        'boundary.bottom.type': 'wall',
        'boundary.front.type': 'wall',
        'boundary.back.type': 'wall'
    }
    #
    # reading params file if supplied
    if namespace.read_file:
        logger.info('Reading parameters file...')
        read_params_file(namespace.read_file, mesh_params)
    #
    # creating data field from aperture map
    logger.info('Processing aperture map...')
    map_field = DataField(namespace.map_file)
    #
    # reading offset file if provided
    offset_field = None
    if namespace.offset_file:
        offset_field = DataField(namespace.offset_file)
    #
    # setting up mesh generator
    system_dir = os.path.join(namespace.system_path,'system')
    np = namespace.np
    kwargs = {'nprocs': np,
              'avg_fact': namespace.avg_fact,
              'mesh_params': mesh_params,
              'offset_field': offset_field}
    #
    logger.info('Setting generator up...')
    pmg = ParallelMeshGen(map_field, system_dir, **kwargs)
    #
    # creating the mesh
    logger.info('Creating the mesh...')
    pmg.generate_mesh(namespace.mesh_type,
                      path=namespace.output_dir,
                      overwrite=namespace.force)
    #
    # moving mesh files out of region directory
    out_path = namespace.output_dir
    reg_dir = os.path.join(out_path, 'mesh-region0', '*')
    if namespace.force:
        os.system('cp -ralf {} {}'.format(reg_dir, out_path))
        os.system('rm -rf {}'.format(os.path.join(out_path, 'mesh-region0')))
    else:
        os.system('mv {} {}'.format(reg_dir, out_path))
        os.system('rmdir {}'.format(os.path.join(out_path, 'mesh-region0')))