예제 #1
0
def main():
    r"""
    Driver function to color the tif image.
    """
    # parsing commandline args
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')

    # checking path to prevent accidental overwritting
    if not args.outfile_name:
        filename = os.path.basename(args.image_file)
        filename = os.path.splitext(filename)[0]
        args.outfile_name = filename + '-resized.tif'

    #
    filename = os.path.join(args.output_dir, args.outfile_name)
    if os.path.exists(filename) and not args.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(filename))
    os.makedirs(os.path.split(filename)[0], exist_ok=True)

    # create the aperture colored image
    image = resize_image(args.image_file, args.invert)

    # saving map
    logger.info('saving image data to file' + filename)
    image.save(filename, overwrite=args.force)
예제 #2
0
def main():
    r"""
    Driver function to process the stat file.
    """
    # parsing commandline args
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')

    # checking path to prevent accidental overwriting
    if not args.out_name:
        args.out_name = os.path.basename(args.stat_file)
        args.out_name = os.path.splitext(args.out_name)[0] + '.yaml'
    filename = os.path.join(args.output_dir, args.out_name)
    #
    if os.path.exists(filename) and not args.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(filename))

    # loading data
    logger.info('parsing csv stat file')
    data = StatFile(args.stat_file)

    # saving data
    logger.info('saving yaml file as {}'.format(filename))
    with open(filename, 'w') as outfile:
        yaml.dump(dict(data), outfile)
def main():
    r"""
    Parses command line arguments and delegates tasks to helper functions
    for actual data processing
    """
    args = parser.parse_args()
    #
    if args.verbose:
        set_main_logger_level('debug')
    #
    process_files(args)
def main():
    r"""
    Driver function to generate an aperture map from a TIF image.
    """
    # parsing commandline args
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')

    # checking path to prevent accidental overwriting
    image_file = os.path.basename(args.image_file)
    image_file = os.path.splitext(image_file)[0]
    args.aperture_map_name = args.aperture_map_name.format(
        image_file=image_file)
    #
    map_path = os.path.join(args.output_dir, args.aperture_map_name)
    if os.path.exists(map_path) and not args.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(map_path))
    os.makedirs(os.path.split(map_path)[0], exist_ok=True)

    # loading image data
    logger.info('loading image...')
    img_data = FractureImageStack(args.image_file)
    if args.invert:
        logger.debug('inverting image data')
        img_data = ~img_data
    logger.debug('image dimensions: {} {} {}'.format(*img_data.shape))

    # summing data array down into a 2-D map
    logger.info('creating 2-D aperture map...')
    aperture_map = img_data.create_aperture_map()

    # saving map
    logger.info('saving aperture map as {}'.format(map_path))
    sp.savetxt(map_path, aperture_map, fmt='%d', delimiter='\t')

    # generating colored stack if desired
    if args.gen_colored_stack:
        image = gen_colored_image_stack(img_data, aperture_map)
        # save the image data
        filename = os.path.splitext(image_file)[0] + '-colored.tif'
        filename = os.path.join(args.output_dir, filename)
        #
        logger.info('saving image data to file' + filename)
        image.save(filename, overwrite=args.force)
def init_case_dir():
    r"""
    Parses command line arguments and delegates tasks to helper functions
    """
    arg_dict = args.__dict__

    # checking args
    if args.verbose:
        set_main_logger_level('debug')
    check_args()

    # creating job directory
    try:
        os.makedirs(args.job_dir)
    except FileExistsError as err:
        if args.force:
            rmtree(args.job_dir)
            os.makedirs(args.job_dir)
        else:
            raise err

    # copying contents of template directory to job_dir
    os.system('cp -r {}/* {}'.format(args.template, args.job_dir))

    # copying over the aperture map and updating args
    os.system('cp {} {}'.format(args.aper_map, args.job_dir))
    args.aper_map = os.path.join(args.job_dir, os.path.basename(args.aper_map))

    #
    update_system_files(**arg_dict)
    update_transport_props(**arg_dict)
    update_u_file(**arg_dict)
    update_p_file(**arg_dict)

    #
    # updating run script
    if args.no_hpcee is False:
        with open(os.path.join(args.job_dir, args.script_name), 'r+') as run:
            content = run.read()
            content = content.format(**arg_dict)
            run.seek(0)
            run.truncate()
            run.write(content)
예제 #6
0
def main():
    r"""
    Driver program to handles combining YAML stat files into a single
    CSV file.
    """
    #
    args = parser.parse_args()
    #
    if args.verbose:
        set_main_logger_level('debug')
    #
    # checking ouput file path
    filename = os.path.join(args.output_dir, args.outfile_name)
    if (os.path.exists(filename) and not args.force):
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(filename))
    #
    # finding files
    files = files_from_directory(directory=args.directory,
                                 pattern=args.pattern,
                                 deep=args.recursive)
    if not files:
        msg = 'Pattern: {} found no files in searched directory: {}'
        logger.fatal(msg.format(args.pattern, args.directory))
        return
    #
    # printing files found
    logger.debug('Found the following files:')
    for f in files:
        logger.debug(' - %s', os.path.relpath(f, start=args.directory))
    #
    # ordering CSV keys using the same order in first YAML file
    key_order = determine_key_order(files[0])
    #
    # reading all YAML files
    data_list = []
    for stat_file in files:
        with open(stat_file, 'r') as f:
            data_list.append(yaml.load(f))
            data_list[-1]['_stat_file'] = stat_file
    #
    # outputing data
    output_stat_data(filename, key_order, data_list)
예제 #7
0
def main():
    r"""
    Driver function to handle parsing of command line args and setting up
    the bulk run
    """
    # parsing commandline args
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')

    bulk_run = None
    msg = 'Processing {} run parameter files'
    logger.debug(msg.format(len(namespace.input_files)))
    for input_file in namespace.input_files:
        msg = 'Reading {1} parameter file.'
        logger.debug(msg.format(*os.path.split(input_file)))
        #
        # loading yaml file and parsing input file
        input_file = open(input_file, 'r')
        inputs = yaml.load(input_file)
        inp_file = InputFile(inputs['initial_input_file'])

        # Creating class with provided kwargs
        if not bulk_run:
            logger.debug('Instantiating initial BulkRun class')
            bulk_run = BulkRun(inp_file, **inputs['bulk_run_keyword_args'])

        # Generating the InputFile list
        case_identifer = inputs.get('case_identifier', None)
        case_params = inputs.get('case_parameters', None)
        bulk_run.generate_input_files(inputs['default_run_parameters'],
                                      inputs.get('default_file_formats', None),
                                      case_identifer=case_identifer,
                                      case_params=case_params,
                                      append=True)

    # starting or dry running sims
    if namespace.start:
        bulk_run.start()
    else:
        bulk_run.dry_run()
        print('Add "--start" flag to begin simulations')
예제 #8
0
def main():
    r"""
    Parses command line arguments and delegates tasks to helper functions
    for actual data processing
    """
    args = parser.parse_args()
    args.perc = [0, 1, 5, 10, 25, 50, 75, 90, 95, 99, 100]
    #
    if args.verbose:
        set_main_logger_level('debug')
    #
    # testing output map path
    if os.path.exists(args.out_name) and not args.force:
        msg = '{} already exists, use "-f" option to overwrite'
        raise FileExistsError(msg.format(args.out_name))
    #
    aper_map, data_map1, data_map2 = prepare_maps(args)
    result = process_maps(aper_map, data_map1, data_map2, args)
    #
    # writing out resultant data map
    filename = os.path.join(args.output_dir, args.out_name)
    sp.savetxt(filename, result.data_map, delimiter='\t')
def main():
    r"""
    Processes commandline args and runs script
    """
    global avg_fact, voxel_size, base_name
    #
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')
    #
    # these will be command-line args
    para_infile = args.data_file
    aper_infile = args.map_file
    avg_fact = args.avg_fact
    voxel_size = args.voxel_size
    #
    base_name = args.base_name
    if base_name is None:
        base_name = os.path.basename(para_infile).split('.')[0]
    base_name = os.path.join(args.output_dir, base_name)
    #
    aper_map, data_dict = read_data_files(para_infile, aper_infile)
    map_coords, data_coords = generate_coordinate_arrays(aper_map, data_dict)
    save_data_maps(map_coords, data_coords, aper_map, data_dict, args.rho)
예제 #10
0
def main():
    r"""
    Driver function to handle parsing command line args and running the model.
    """
    # parsing commandline args
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')
    #
    for n, input_file in enumerate(args.input_files):
        msg = 'Processing input file {} of {}: {}'
        fname = os.path.split(input_file)[1]
        logger.debug(msg.format(n + 1, len(args.input_files), fname))
        input_file = InputFile(input_file)
        #
        ram_req = estimate_req_RAM([input_file['APER-MAP'].value])[0]
        msg = 'Map will require approimately {:0.6f} GBs of RAM'.format(
            ram_req)
        logger.info(msg)
        #
        if args.executable:
            input_file.executable = args.executable
        #
        proc = run_model(input_file, synchronous=True, show_stdout=True)
예제 #11
0
| Written By: Matthew stadelman
| Date Written: 2016/09/13
| Last Modfied: 2017/04/23

|

"""
import argparse
from argparse import RawDescriptionHelpFormatter as RawDesc
import os
from apmapflow import _get_logger, set_main_logger_level
from apmapflow import FractureImageStack


# setting up logger
set_main_logger_level('info')
logger = _get_logger('apmapflow.scripts')

# creating arg parser
parser = argparse.ArgumentParser(description=__doc__, formatter_class=RawDesc)

# adding arguments
parser.add_argument('-f', '--force', action='store_true',
                    help='allows program to overwrite existing files')

parser.add_argument('-v', '--verbose', action='store_true',
                    help='debug messages are printed to the screen')

parser.add_argument('-o', '--output-dir',
                    type=os.path.realpath, default=os.getcwd(),
                    help='''outputs files to the specified
def main():
    r"""
    Driver program to load an image and process it to output hurst exponents
    """
    # parsing command line args
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')
    #
    # checking output file path and setting default if required
    if args.data_filename is None:
        args.data_filename = os.path.basename(args.image_file)
        args.data_filename = os.path.splitext(args.data_filename)[0]
        args.data_filename += '-df' + os.extsep + 'txt'
    args.data_filename = os.path.join(args.output_dir, args.data_filename)
    #
    if os.path.exists(args.data_filename) and not args.force:
        msg = 'File %s already exists, '
        msg += 'use "-f" option to overwrite'
        raise FileExistsError(msg % args.data_filename)
    #
    os.makedirs(os.path.split(args.data_filename)[0], exist_ok=True)
    #
    # setting up which traces to calculate
    if (args.bot or args.top) and not args.mid:
        traces = []
    else:
        traces = ['mid']
    #
    if args.bot:
        traces.append('bot')
    if args.top:
        traces.append('top')
    #
    # loading image data
    logger.info('loading image...')
    image_data = FractureImageStack(args.image_file)
    if args.invert:
        logger.debug('inverting image data')
        image_data = ~image_data
    logger.debug('image dimensions: {} {} {}'.format(*image_data.shape))
    #
    # processing data along each axis
    x_data = None
    if args.x_axis:
        logger.info('calculating the fractal dimension for the x-axis')
        x_data = []
        for i in range(0, image_data.shape[2]):
            logger.debug('Processing x axis slice %d', i)
            slice_data = image_data[:, :, i]
            fracture_slice = process_slice(slice_data, traces)
            x_data.append(fracture_slice)
    #
    z_data = None
    if args.z_axis:
        logger.info('calculating the fractal dimension for the z-axis')
        z_data = []
        for i in range(image_data.shape[0]):
            logger.debug('Processing z axis slice %d', i)
            slice_data = image_data[i, :, :].T
            fracture_slice = process_slice(slice_data, traces)
            z_data.append(fracture_slice)

    # saving data
    logger.info('saving fractal dimension exponent data to file')
    with open(args.data_filename, 'w') as outfile:
        output_data(outfile, traces, x_data=x_data, z_data=z_data)
def main():
    r"""
    Driver program to load an image and generate maps. Memory
    requirements when processing a large TIFF stack can be very high.
    """
    # parsing commandline args
    args = parser.parse_args()
    if args.verbose:
        set_main_logger_level('debug')
    #
    # initializing output filenames as needed and pre-appending the output path
    img_basename = os.path.basename(args.image_file)
    img_basename = os.path.splitext(img_basename)[0]
    if args.aper_map_name is None:
        args.aper_map_name = img_basename + '-aperture-map.txt'
    #
    if args.offset_map_name is None:
        args.offset_map_name = img_basename + '-offset-map.txt'
    #
    if args.img_stack_name is None:
        args.img_stack_name = img_basename + '-processed.tif'
    #
    aper_map_file = os.path.join(args.output_dir, args.aper_map_name)
    offset_map_file = os.path.join(args.output_dir, args.offset_map_name)
    img_stack_file = os.path.join(args.output_dir, args.img_stack_name)
    #
    # checking paths
    if not args.no_aper_map:
        if os.path.exists(aper_map_file) and not args.force:
            msg = '{} already exists, use "-f" option to overwrite'
            raise FileExistsError(msg.format(aper_map_file))
    #
    if not args.no_offset_map:
        if os.path.exists(offset_map_file) and not args.force:
            msg = '{} already exists, use "-f" option to overwrite'
            raise FileExistsError(msg.format(offset_map_file))
    #
    if not args.no_img_stack:
        if os.path.exists(img_stack_file) and not args.force:
            msg = '{} already exists, use "-f" option to overwrite'
            raise FileExistsError(msg.format(img_stack_file))
    #
    # loading image data
    logger.info('loading image...')
    img_data = FractureImageStack(args.image_file)
    if args.invert:
        logger.debug('inverting image data')
        img_data = ~img_data
    logger.debug('image dimensions: {} {} {}'.format(*img_data.shape))
    #
    # processing image stack based on connectivity
    if args.num_clusters:
        kwargs = {
            'output_img': args.gen_cluster_img,
            'img_name': os.path.splitext(img_stack_file)[0] + '-clusters.tif',
            'img_shape': img_data.shape
        }
        img_data = process_image(img_data, args.num_clusters, **kwargs)

    #
    # outputing aperture map
    if not args.no_aper_map:
        aper_map = img_data.create_aperture_map()
        logger.info('saving aperture map file')
        sp.savetxt(aper_map_file, aper_map, fmt='%d', delimiter='\t')
        del aper_map
    #
    # outputing offset map
    if not args.no_offset_map:
        offset_map = calculate_offset_map(img_data)
        #
        # saving map
        logger.info('saving offset map file')
        sp.savetxt(offset_map_file, offset_map, fmt='%f', delimiter='\t')
        del offset_map
    #
    # saving image data
    if args.num_clusters and not args.no_img_stack:
        logger.info('saving copy of processed image data')
        img_data.save(img_stack_file, overwrite=args.force)
예제 #14
0
def main():
    r"""
    Processes the command line arguments and generates the mesh
    """
    #
    namespace = parser.parse_args()
    if namespace.verbose:
        set_main_logger_level('debug')
    #
    # initial mesh parameters
    mesh_params = {
        'convertToMeters': '2.680E-5',
        'numbersOfCells': '(1 1 1)',
        #
        'boundary.left.type': 'wall',
        'boundary.right.type': 'wall',
        'boundary.top.type': 'wall',
        'boundary.bottom.type': 'wall',
        'boundary.front.type': 'wall',
        'boundary.back.type': 'wall'
    }
    #
    # reading params file if supplied
    if namespace.read_file:
        logger.info('Reading parameters file...')
        read_params_file(namespace.read_file, mesh_params)
    #
    # creating data field from aperture map
    logger.info('Processing aperture map...')
    map_field = DataField(namespace.map_file)
    #
    # reading offset file if provided
    offset_field = None
    if namespace.offset_file:
        offset_field = DataField(namespace.offset_file)
    #
    # setting up mesh generator
    system_dir = os.path.join(namespace.system_path, 'system')
    np = namespace.np
    kwargs = {'nprocs': np,
              'avg_fact': namespace.avg_fact,
              'mesh_params': mesh_params,
              'offset_field': offset_field}
    #
    logger.info('Setting generator up...')
    pmg = ParallelMeshGen(map_field, system_dir, **kwargs)
    #
    # creating the mesh
    logger.info('Creating the mesh...')
    pmg.generate_mesh(namespace.mesh_type,
                      path=namespace.output_dir,
                      overwrite=namespace.force)
    #
    # moving mesh files out of region directory
    out_path = namespace.output_dir
    reg_dir = os.path.join(out_path, 'mesh-region0', '*')
    if namespace.force:
        os.system('cp -ralf {} {}'.format(reg_dir, out_path))
        os.system('rm -rf {}'.format(os.path.join(out_path, 'mesh-region0')))
    else:
        os.system('mv {} {}'.format(reg_dir, out_path))
        os.system('rmdir {}'.format(os.path.join(out_path, 'mesh-region0')))