def statgenerate(ctx, filename: str): """ Generates particle statistics based on the data provided in the input file.""" if filename == None: raise ValueError( 'Please provide the name of the input file available in the current directory!' ) cwd = os.getcwd() particleStatGenerator(cwd + '/' + filename)
def statgenerate(ctx, filename: str): """ Generates particle statistics based on the data provided in the input file.""" if filename == None: click.echo('') click.echo('Please provide the name of the input file available in the current directory', err=True) click.echo('For more info. run: kanapy statgenerate --help\n', err=True) sys.exit(0) else: cwd = os.getcwd() if not os.path.exists(cwd + '/{}'.format(filename)): click.echo('') click.echo("Mentioned file: '{}' does not exist in the current working directory!\n".format(filename), err=True) sys.exit(0) particleStatGenerator(cwd + '/' + filename)
def main(): """ The complete process consists of 4 stages: * Particle data generation based on user defined statitics. * Particle packing routine. * RVE voxelization routine. * Writing output files. Individual stages can be run by commenting out the remaining stages. """ inputFile = os.getcwd() + '/stat_input.json' particleStatGenerator(inputFile) # Generate data for particle simulation packingRoutine() # Particle packing simulation voxelizationRoutine(750) # RVE voxelization (Meshing) write_abaqus_inp() # Write out Abaqus input (.inp) file write_output_stat() # Compare input and output statistics return
def test_voxelizationRoutine(): # Test if FileNotFoundError is raised with pytest.raises(FileNotFoundError): voxelizationRoutine(12597856985475) # create an temporary input file for user defined statistics cwd = os.getcwd() json_dir = cwd + '/json_files' dump_dir = cwd + '/dump_files' stat_inp = cwd + '/stat_input.json' # create an temporary 'json' directory for reading files from to_write = { 'Equivalent diameter': { 'std': 0.531055, 'mean': 2.76736, 'cutoff_min': 1.0, 'cutoff_max': 2.0 }, 'Aspect ratio': { 'mean': 2.5 }, 'Orientation': { 'sigma': 28.8, 'mean': 87.4 }, 'RVE': { 'side_length': 3, 'voxel_per_side': 10 }, 'Simulation': { 'nsteps': 1000, 'periodicity': 'True', 'output_units': 'mm' } } with open(stat_inp, 'w') as outfile: json.dump(to_write, outfile, indent=2) particleStatGenerator(stat_inp) # create an temporary 'dump' directory for reading files from with open(json_dir + '/RVE_data.json') as json_file: RVE_data = json.load(json_file) with open(json_dir + '/particle_data.json') as json_file: particle_data = json.load(json_file) sim_box = Simulation_Box(RVE_data['RVE_size'], RVE_data['RVE_size'], RVE_data['RVE_size']) Particles = particle_generator(particle_data, sim_box) write_dump(Particles, sim_box, len(Particles)) voxelizationRoutine(0) assert os.path.isfile(json_dir + '/nodeDict.json') assert os.path.isfile(json_dir + '/elmtDict.json') assert os.path.isfile(json_dir + '/elmtSetDict.json') os.remove(stat_inp) shutil.rmtree(json_dir) shutil.rmtree(dump_dir)
def test_voxelizationRoutine(): # create an temporary input file for user defined statistics cwd = os.getcwd() json_dir = cwd + '/json_files' dump_dir = cwd + '/dump_files' stat_inp = cwd + '/input_test.json' # create an temporary 'json' directory for reading files from to_write = { 'Equivalent diameter': { 'std': 0.531055, 'mean': 2.76736, 'cutoff_min': 1.0, 'cutoff_max': 2.0 }, 'Aspect ratio': { 'mean': 2.5 }, 'Tilt angle': { 'sigma': 28.8, 'mean': 87.4 }, 'RVE': { "sideX": 3, "sideY": 3, "sideZ": 3, "Nx": 10, "Ny": 10, "Nz": 10 }, 'Simulation': { 'periodicity': 'True', 'output_units': 'mm' } } with open(stat_inp, 'w') as outfile: json.dump(to_write, outfile, indent=2) particleStatGenerator(stat_inp) # create an temporary 'dump' directory for reading files from with open(json_dir + '/RVE_data.json') as json_file: RVE_data = json.load(json_file) with open(json_dir + '/particle_data.json') as json_file: particle_data = json.load(json_file) sim_box = Simulation_Box(RVE_data['RVE_sizeX'], RVE_data['RVE_sizeY'], RVE_data['RVE_sizeZ']) sim_box.sim_ts = 580 Particles = particle_generator(particle_data, sim_box) write_dump(Particles, sim_box, len(Particles)) voxelizationRoutine() assert os.path.isfile(json_dir + '/nodeDict.json') assert os.path.isfile(json_dir + '/elmtDict.json') assert os.path.isfile(json_dir + '/elmtSetDict.json') os.remove(stat_inp) shutil.rmtree(json_dir) shutil.rmtree(dump_dir)