Example #1
0
def output_percentile_set(data_field, args):
    r"""
    Does three sets of percentiles and stacks them as columns: raw data,
    absolute value data, normalized+absolute value
    """
    data = {}
    #
    # outputting percentiles of initial subtraction to screen
    field = DataField(data_field.data_map)
    pctle = Percentiles(field, percentiles=args.perc)
    pctle.process()
    data['raw'] = pctle.processed_data
    #
    # normalizing data
    field = DataField(data_field.data_map)
    field._data_map = field.data_map / sp.amax(sp.absolute(field.data_map))
    pctle = Percentiles(field, percentiles=args.perc)
    pctle.process()
    data['norm'] = pctle.processed_data
    #
    # taking absolute value of data
    field = DataField(data_field.data_map)
    field._data_map = sp.absolute(field.data_map)
    pctle = Percentiles(field, percentiles=args.perc)
    pctle.process()
    data['abs'] = pctle.processed_data
    #
    # absolute value + normed
    field._data_map = field.data_map / sp.amax(field.data_map)
    pctle = Percentiles(field, percentiles=args.perc)
    pctle.process()
    data['abs+norm'] = pctle.processed_data
    #
    # outputting stacked percentiles
    fmt = '    {:>6.2f}\t{: 0.6e}\t{: 0.6e}\t{: 0.6e}\t{: 0.6e}\n'
    content = '\nPercentile\tRaw Data\tAbsolute\tNormalized\tNorm+abs\n'
    data = zip(args.perc, data['raw'].values(), data['abs'].values(),
               data['norm'].values(), data['abs+norm'].values())
    #
    for row in data:
        content += fmt.format(*row)
    content += '\n'
    logger.info(content)
Example #2
0
def prepare_maps(args):
    r"""
    loads the aperture map and data maps and then masks zero aperture zones
    as well as performs pre-subtraction normalization if desired.
    """
    #
    # loading files
    aper_map = DataField(args.map_file)
    data_map1 = DataField(args.data_file1)
    data_map2 = DataField(args.data_file2)
    #
    # generating percentiles of each data field
    msg = 'Percentiles of data map: '
    logger.info(msg + os.path.basename(args.data_file1))
    output_percentile_set(data_map1, args)
    #
    logger.info(msg + os.path.basename(args.data_file2))
    output_percentile_set(data_map2, args)
    #
    # masking zero aperture zones
    data_map1 = data_map1.data_vector
    data_map2 = data_map2.data_vector
    data_map1[sp.where(aper_map.data_vector == 0)] = 0
    data_map2[sp.where(aper_map.data_vector == 0)] = 0
    #
    # normalizing data maps if desired
    if args.pre_normalize:
        data_map1 = data_map1 / sp.amax(sp.absolute(data_map1))
        data_map2 = data_map1 / sp.amax(sp.absolute(data_map2))
    #
    # reshaping data maps back into 2-D arrays
    data_map1 = sp.reshape(data_map1, aper_map.data_map.shape)
    data_map2 = sp.reshape(data_map2, aper_map.data_map.shape)
    #
    #
    return aper_map, data_map1, data_map2
def update_u_file(job_dir, **kwargs):
    r"""
    Updates the 0/U file
    """
    aper_map = DataField(kwargs['aper_map'])
    p_file = OpenFoamFile(os.path.join(job_dir, '0', 'p'))
    u_file = OpenFoamFile(os.path.join(job_dir, '0', 'U'))
    inlet_side = kwargs['inlet_side']
    outlet_side = kwargs['outlet_side']
    vox = kwargs['voxel_size']
    avg = kwargs['avg_fact']
    #
    area_dict = {
        'left': sum(aper_map.data_map[:, 0] * vox**2 * avg),
        'right': sum(aper_map.data_map[:, -1] * vox**2 * avg),
        'top': sum(aper_map.data_map[-1, :] * vox**2 * avg),
        'bottom': sum(aper_map.data_map[0, :] * vox**2 * avg)
    }

    # calculating SI velocities
    if kwargs['inlet_q']:
        vel = kwargs['inlet_q'][0:3]
        vel = [convert_value(float(v), kwargs['inlet_q'][3]) for v in vel]
        vel = vel/area_dict[inlet_side]
        vel = 'uniform ({} {} {})'.format(*vel)
        #
        u_file['boundaryField'][inlet_side]['type'] = 'fixedValue'
        u_file['boundaryField'][inlet_side]['value'] = vel
        #
        p_file['boundaryField'][inlet_side]['type'] = 'zeroGradient'
        p_file['boundaryField'][inlet_side].pop('value', None)
    #
    if kwargs['outlet_q']:
        vel = kwargs['outlet_q'][0:3]
        vel = [convert_value(float(v), kwargs['outlet_q'][3]) for v in vel]
        vel = vel/area_dict[outlet_side]
        vel = 'uniform ({} {} {})'.format(*vel)
        #
        u_file['boundaryField'][outlet_side]['type'] = 'fixedValue'
        u_file['boundaryField'][outlet_side]['value'] = vel
        p_file['boundaryField'][outlet_side]['type'] = 'zeroGradient'
        p_file['boundaryField'][outlet_side].pop('value', None)
    #
    p_file.write_foam_file(path=job_dir, overwrite=True)
    u_file.write_foam_file(path=job_dir, overwrite=True)
def filter_high_gradients(data_map):
    r"""
    Filters the offset field to reduce the number of very steep gradients.
    The magnitude of the gradient is taken and all values less than or
    greater than +-99th percentile are removed and recalculated.
    """
    #
    logger.info('filtering offset map to remove steeply sloped cells')
    #
    zdir_grad, xdir_grad = sp.gradient(data_map)
    mag = sp.sqrt(zdir_grad**2 + xdir_grad**2)
    data_map += 1
    data_vector = sp.ravel(data_map)
    #
    # setting regions outside of 99th percentile to 0 for cluster removal
    val = calc_percentile(99, sp.ravel(mag))
    data_map[zdir_grad < -val] = 0
    data_map[zdir_grad > val] = 0
    data_map[xdir_grad < -val] = 0
    data_map[xdir_grad > val] = 0
    #
    logger.debug('\tremoving clusters isolated by high gradients')
    offsets = DataField(data_map)
    adj_mat = offsets.create_adjacency_matrix()
    cs_num, cs_ids = csgraph.connected_components(csgraph=adj_mat,
                                                  directed=False)
    cs_num, counts = sp.unique(cs_ids, return_counts=True)
    cs_num = cs_num[sp.argsort(counts)][-1]
    #
    data_vector[sp.where(cs_ids != cs_num)[0]] = sp.nan
    data_map = sp.reshape(data_vector, data_map.shape)
    #
    # re-interpolating for the nan regions
    logger.debug('\tpatching holes left by cluster removal')
    patch_holes(data_map)
    #
    return data_map
def process_files(args):
    r"""
    Handles processing of the input maps based on the supplied arguments
    """
    for file in args.files:
        field = DataField(file)
        processor = args.func(field, **args.__dict__)
        processor.process()

        # printing data to screen if -s flag
        if args.screen:
            processor.gen_output(delim='\t')
            processor.print_data()

        # writing data if -W was not used
        if not args.no_write:
            processor.gen_output(delim=',')
            #
            filename = os.path.join(args.output_dir, processor.outfile_name)
            if os.path.exists(filename) and not args.force:
                msg = '{} already exists, use "-f" option to overwrite'
                raise FileExistsError(msg.format(filename))
            #
            processor.write_data(path=args.output_dir)
Example #6
0
def process_maps(aper_map, data_map1, data_map2, args):
    r"""
    subtracts the data maps and then calculates percentiles of the result
    before outputting a final map to file.
    """
    #
    # creating resultant map from clone of aperture map
    result = data_map1 - data_map2
    result = DataField(result)
    result.infile = args.out_name
    result.outfile = args.out_name
    #
    logger.info('Percentiles of data_map1 - data_map2')
    output_percentile_set(result, args)
    #
    # checking if data is to be normalized and/or absolute
    if args.post_abs:
        result._data_map = sp.absolute(result.data_map)
    #
    if args.post_normalize:
        result._data_map = result.data_map / sp.amax(
            sp.absolute(result.data_map))
    #
    return result
def read_data_files(para_file, map_file):
    r"""
    Reads in the paraview data file and aperture map file.
    """
    #
    # reading aperture map
    logger.info('reading aperture map...')
    aper_map = DataField(map_file)
    #
    # reading first line of paraview file to get column names
    logger.info('reading paraview data file')
    with open(para_file, 'r') as file:
        cols = file.readline()
        cols = cols.strip().replace('"', '').lower()
        cols = cols.split(',')

    #
    # reading entire dataset and splitting into column vectors
    data = sp.loadtxt(para_file, delimiter=',', dtype=float, skiprows=1)
    data_dict = {}
    for i, col in enumerate(cols):
        data_dict[col] = data[:, i]
    #
    return aper_map, data_dict
def load_inp_file():
    r"""
    Loads in an APM-MODEL input file if it was supplied and pulls out
    required information.
    """
    global namespace, export, apm_input_file, map_data_field, block_mesh
    #
    # loading file and getting all uncommented lines
    apm_input_file = InputFile(namespace.input_file)
    input_args = apm_input_file.get_uncommented_values()
    #
    # building actual path to map file based on input file location
    file_path = os.path.realpath(namespace.input_file)
    map_path = input_args['APER-MAP'].value
    map_path = os.path.join(os.path.split(file_path)[0], map_path)
    map_path = os.path.realpath(map_path)
    #
    try:
        map_data_field = DataField(map_path)
    except FileNotFoundError:
        logger.warn('Aperture map file was not found at path: '+map_path)
    #
    # setting transport and bc params from file
    input_params = [
        ('MAP', 'avg_fact', 1.0),
        ('HIGH-MASK', 'high_mask', 1.0E6),
        ('LOW-MASK', 'low_mask', 0.0),
        ('ROUGHNESS', 'roughness', 0.0),
        ('VOXEL', 'voxel_size', 1.0),
        ('INLET-PRESS', 'inlet_p', None),
        ('OUTLET-PRESS', 'outlet_p', None),
        ('INLET-RATE', 'inlet_rate', None),
        ('OUTLET-RATE', 'outlet_rate', None),
        ('FLUID-VISCOSITY', 'fluid_visc', 0.001),
        ('FLUID-DENSITY', 'fluid_dens', 1000)
    ]
    sim_params = {}
    for keyword, key, default in input_params:
        sim_params[key] = default
        # checking if keyword exists
        if keyword not in input_args:
            continue
        # setting value of keywork
        value = float(input_args[keyword].value)
        unit = input_args[keyword].unit
        sim_params[key] = value
        if not unit:
            continue
        # converting unit of value if needed
        try:
            sim_params[key] = value * get_conversion_factor(unit)
        except (KeyError, ValueError) as err:
            del sim_params[key]
            msg = 'Could not process input line: {} - Encountered {}: {}'
            msg = msg.format(apm_input_file[keyword].line,
                             err.__class__.__name__,
                             str(err))
            logger.warn(msg)
    #
    # getting inlet/outlet sides
    sides = {'left': 'right', 'right': 'left', 'top': 'bottom', 'bottom': 'top'}
    sim_params['inlet'] = sides[apm_input_file['OUTLET-SIDE'].value.lower()]
    sim_params['outlet'] = apm_input_file['OUTLET-SIDE'].value.lower()
    namespace.sim_params = sim_params
    #
    if map_data_field is None:
        return
    #
    # applying any geometric changes needed to the fracture data
    value = sim_params['roughness']
    map_data_field.data_map = map_data_field.data_map - value
    map_data_field.data_vector = map_data_field.data_vector - value
    #
    value = sim_params['high_mask']
    map_data_field.data_map[map_data_field.data_map > value] = value
    map_data_field.data_vector[map_data_field.data_vector > value] = value
    #
    value = sim_params['low_mask']
    map_data_field.data_map[map_data_field.data_map < value] = value
    map_data_field.data_vector[map_data_field.data_vector < value] = value
    #
    # setting mesh parameters
    mesh_params = {
        'convertToMeters': sim_params['voxel_size'],
        'numbersOfCells': '(5 5 5)',
        'boundary.'+sim_params['inlet']+'.type': 'patch',
        'boundary.'+sim_params['outlet']+'.type': 'patch'
    }
    #
    # creating blockMeshDict file
    block_mesh = BlockMeshDict(map_data_field,
                               sim_params['avg_fact'],
                               mesh_params)
    export.block_mesh_dict = block_mesh
Example #9
0
def test_parallel_mesh_gen():
    #
    infile = os.path.join(FIXTURE_DIR, 'maps',
                          'Fracture1ApertureMap-10avg.txt')
    field = DataField(infile)
    offset_field = DataField(sp.ones(field.data_map.shape))
    #
    # adding a fake boundary file to the mesh-region0 directory
    # this will be overwritten if testing with real OpenFoam programs
    bnd_file = OpenFoamFile(os.path.join('constant', 'polyMesh'), 'boundary')
    face_list = OpenFoamList('boundary')
    face_list.append(OpenFoamDict('top', {'numFaces': 10}))
    face_list.append(OpenFoamDict('mergeTB0', {'numFaces': 0}))
    face_list.append('non-dict-entry blah blah')
    bnd_file[face_list.name] = face_list
    out_path = os.path.join(TEMP_DIR, 'test-pmg', 'mesh-region0')
    bnd_file.write_foam_file(path=out_path, overwrite=True)
    #
    # initialzing mesh generator
    sys_dir = os.path.join(FIXTURE_DIR, 'system')
    out_path = os.path.join(TEMP_DIR, 'test-pmg')
    parallel_mesh_gen = ParallelMeshGen(field,
                                        sys_dir,
                                        offset_field=offset_field)
    #
    # running each possible mesh type
    parallel_mesh_gen.generate_mesh(mesh_type='simple',
                                    path=out_path,
                                    ndivs=2,
                                    overwrite=True)
    #
    parallel_mesh_gen.generate_mesh(mesh_type='threshold',
                                    path=out_path,
                                    ndivs=4,
                                    overwrite=True)
    #
    parallel_mesh_gen.generate_mesh(mesh_type='symmetry',
                                    path=out_path,
                                    ndivs=2,
                                    overwrite=True)
    #
    # hitting error cases
    parallel_mesh_gen = ParallelMeshGen(field, sys_dir)
    #
    # adding a fake blockMeshDict file to throw an error in mesh gen
    bnd_file.name = 'blockMeshDict'
    out_path = os.path.join(TEMP_DIR, 'test-pmg2-fail', 'mesh-region3')
    bnd_file.write_foam_file(path=out_path, overwrite=True)
    out_path = os.path.join(TEMP_DIR, 'test-pmg2-fail')
    with pytest.raises(OSError):
        parallel_mesh_gen._create_subregion_meshes(4,
                                                   mesh_type='simple',
                                                   path=out_path)
    #
    pmg_submodule._blockMesh_error.clear()
    parallel_mesh_gen._create_subregion_meshes(4,
                                               mesh_type='simple',
                                               path=out_path,
                                               overwrite=True)
    grid = sp.arange(0, 16, dtype=int)
    grid = sp.reshape(grid, (4, 4))
    #
    # renaming a merge directory to throw an error
    parallel_mesh_gen.merge_groups[3].region_dir += '-mergemesh-exit1'
    with pytest.raises(OSError):
        parallel_mesh_gen._merge_submeshes(grid)
Example #10
0
def main():
    r"""
    Processes the command line arguments and generates the mesh
    """
    #
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')
    #
    # initial mesh parameters
    mesh_params = {
        'convertToMeters': '2.680E-5',
        'numbersOfCells': '(1 1 1)',
        #
        'boundary.left.type': 'wall',
        'boundary.right.type': 'wall',
        'boundary.top.type': 'wall',
        'boundary.bottom.type': 'wall',
        'boundary.front.type': 'wall',
        'boundary.back.type': 'wall'
    }
    #
    # reading params file if supplied
    if namespace.read_file:
        logger.info('Reading parameters file...')
        read_params_file(namespace.read_file, mesh_params)
    #
    # creating data field from aperture map
    logger.info('Processing aperture map...')
    map_field = DataField(namespace.map_file)
    #
    # reading offset file if provided
    offset_field = None
    if namespace.offset_file:
        offset_field = DataField(namespace.offset_file)
    #
    # setting up mesh generator
    system_dir = os.path.join(namespace.system_path, 'system')
    np = namespace.np
    kwargs = {'nprocs': np,
              'avg_fact': namespace.avg_fact,
              'mesh_params': mesh_params,
              'offset_field': offset_field}
    #
    logger.info('Setting generator up...')
    pmg = ParallelMeshGen(map_field, system_dir, **kwargs)
    #
    # creating the mesh
    logger.info('Creating the mesh...')
    pmg.generate_mesh(namespace.mesh_type,
                      path=namespace.output_dir,
                      overwrite=namespace.force)
    #
    # moving mesh files out of region directory
    out_path = namespace.output_dir
    reg_dir = os.path.join(out_path, 'mesh-region0', '*')
    if namespace.force:
        os.system('cp -ralf {} {}'.format(reg_dir, out_path))
        os.system('rm -rf {}'.format(os.path.join(out_path, 'mesh-region0')))
    else:
        os.system('mv {} {}'.format(reg_dir, out_path))
        os.system('rmdir {}'.format(os.path.join(out_path, 'mesh-region0')))