def create_network(self): ''' Execute dfnGen Parameters ---------- self : object DFN Class Returns ------- None Notes ----- After generation is complete, this script checks whether the generation of the fracture network failed or succeeded based on the existance of the file params.txt. ''' print('--> Running DFNGEN') # copy input file into job folder cmd = os.environ[ 'DFNGEN_EXE'] + ' ' + self.local_dfnGen_file[: -4] + '_clean.dat' + ' ' + self.jobname print("Running %s" % cmd) subprocess.call(cmd, shell=True) if os.path.isfile("params.txt") is False: error = "ERROR! Generation Failed\nExiting Program." sys.stderr.write(error) sys.exit(1) else: num_poly, h, _, _, _ = parse_params_file(quite=True) self.num_frac = num_poly self.h = h print('-' * 80) print("Generation Succeeded") print('-' * 80)
def get_domain(): ''' Return dictionary of domain x,y,z by calling parse_params_file Parameters ---------- None Returns ------- domain : dict Dictionary of domain sizes in x, y, z Notes ----- parse_params_file() is in mesh_dfn_helper.py ''' _, _, _, _, domain = parse_params_file(quite=True) return domain
def zone2ex(self, uge_file='', zone_file='', face='', boundary_cell_area=1.e-1): """ Convert zone files from LaGriT into ex format for LaGriT Parameters ----------- uge_file : string Name of uge file zone_file : string Name of zone file Face : Face of the plane corresponding to the zone file zone_file : string Name of zone file to work on. Can be 'all' processes all directions, top, bottom, left, right, front, back boundary_cell_area : double Boundary cells are moved a distance of boundary_cell_area 1e-1 Returns ---------- None Notes ---------- the boundary_cell_area should be a function of h, the mesh resolution """ print('--> Converting zone files to ex') if self.uge_file: uge_file = self.uge_file else: self.uge_file = uge_file uge_file = self.uge_file if uge_file == '': error = 'ERROR: Please provide uge filename!' sys.stderr.write(error) sys.exit(1) # Opening uge file print('\n--> Opening uge file') fuge = open(uge_file, 'r') # Reading cell ids, cells centers and cell volumes line = fuge.readline() line = line.split() NumCells = int(line[1]) Cell_id = np.zeros(NumCells, 'int') Cell_coord = np.zeros((NumCells, 3), 'float') Cell_vol = np.zeros(NumCells, 'float') for cells in range(NumCells): line = fuge.readline() line = line.split() Cell_id[cells] = int(line.pop(0)) line = [float(id) for id in line] Cell_vol[cells] = line.pop(3) Cell_coord[cells] = line fuge.close() print('--> Finished with uge file\n') # loop through zone files if zone_file is 'all': zone_files = ['pboundary_front_n.zone', 'pboundary_back_s.zone', 'pboundary_left_w.zone', \ 'pboundary_right_e.zone', 'pboundary_top.zone', 'pboundary_bottom.zone'] face_names = ['north', 'south', 'west', 'east', 'top', 'bottom'] else: if zone_file == '': error = 'ERROR: Please provide boundary zone filename!' sys.stderr.write(error) sys.exit(1) if face == '': error = 'ERROR: Please provide face name among: top, bottom, north, south, east, west !' sys.stderr.write(error) sys.exit(1) zone_files = [zone_file] face_names = [face] for iface, zone_file in enumerate(zone_files): face = face_names[iface] # Ex filename ex_file = zone_file.strip('zone') + 'ex' # Opening the input file print('--> Opening zone file: ', zone_file) fzone = open(zone_file, 'r') fzone.readline() fzone.readline() fzone.readline() # Read number of boundary nodes print('--> Calculating number of nodes') num_nodes = int(fzone.readline()) Node_array = np.zeros(num_nodes, 'int') # Read the boundary node ids print('--> Reading boundary node ids') if (num_nodes < 10): g = fzone.readline() node_array = g.split() # Convert string to integer array node_array = [int(id) for id in node_array] Node_array = np.asarray(node_array) else: for i in range(int(num_nodes / 10 + (num_nodes % 10 != 0))): g = fzone.readline() node_array = g.split() # Convert string to integer array node_array = [int(id) for id in node_array] if (num_nodes - 10 * i < 10): for j in range(num_nodes % 10): Node_array[i * 10 + j] = node_array[j] else: for j in range(10): Node_array[i * 10 + j] = node_array[j] fzone.close() print('--> Finished with zone file') if self.h == "": from pydfnworks.dfnGen.mesh_dfn_helper import parse_params_file _, self.h, _, _, _ = parse_params_file(quite=True) Boundary_cell_area = np.zeros(num_nodes, 'float') for i in range(num_nodes): Boundary_cell_area[ i] = boundary_cell_area # Fix the area to a large number print('--> Finished calculating boundary connections') boundary_cell_coord = [ Cell_coord[Cell_id[i - 1] - 1] for i in Node_array ] epsilon = self.h * 10**-3 if (face == 'top'): boundary_cell_coord = [[cell[0], cell[1], cell[2] + epsilon] for cell in boundary_cell_coord] elif (face == 'bottom'): boundary_cell_coord = [[cell[0], cell[1], cell[2] - epsilon] for cell in boundary_cell_coord] elif (face == 'north'): boundary_cell_coord = [[cell[0], cell[1] + epsilon, cell[2]] for cell in boundary_cell_coord] elif (face == 'south'): boundary_cell_coord = [[cell[0], cell[1] - epsilon, cell[2]] for cell in boundary_cell_coord] elif (face == 'east'): boundary_cell_coord = [[cell[0] + epsilon, cell[1], cell[2]] for cell in boundary_cell_coord] elif (face == 'west'): boundary_cell_coord = [[cell[0] - epsilon, cell[1], cell[2]] for cell in boundary_cell_coord] elif (face == 'none'): boundary_cell_coord = [[cell[0], cell[1], cell[2]] for cell in boundary_cell_coord] else: error = 'ERROR: unknown face. Select one of: top, bottom, east, west, north, south.' sys.stderr.write(error) sys.exit(1) with open(ex_file, 'w') as f: f.write('CONNECTIONS\t%i\n' % Node_array.size) for idx, cell in enumerate(boundary_cell_coord): f.write('%i\t%.6e\t%.6e\t%.6e\t%.6e\n' % (Node_array[idx], cell[0], cell[1], cell[2], Boundary_cell_area[idx])) print('--> Finished writing ex file "' + ex_file + '" corresponding to the zone file: ' + zone_file + '\n') print('--> Converting zone files to ex complete')
def map_to_continuum(self, l, orl): """ This function generates an octree-refined continuum mesh using the reduced_mesh.inp as input. To generate the reduced_mesh.inp, one must turn visualization mode on in the DFN input card. Parameters ---------- self : object DFN Class l : float Size (m) of level-0 mesh element in the continuum mesh orl : int Number of total refinement levels in the octree Returns ------- None Notes ----- octree_dfn.inp : Mesh file Octree-refined continuum mesh fracX.inp : Mesh files Octree-refined continuum meshes, which contain intersection areas """ print('=' * 80) print("Meshing Continuum Using LaGrit : Starting") print('=' * 80) if type(orl) is not int or orl < 1: error = "ERROR: orl must be positive integer. Exiting" sys.stderr.write(error) sys.exit(1) # Read in normal vectors and points from dfnWorks output normal_vectors = np.genfromtxt('normal_vectors.dat', delimiter=' ') with open('translations.dat') as old, open('points.dat', 'w') as new: old.readline() for line in old: if not 'R' in line: new.write(line) points = np.genfromtxt('points.dat', skip_header=0, delimiter=' ') num_poly, _, _, _, domain = mh.parse_params_file(quite=True) # Extent of domain x0 = 0 - (domain['x'] / 2.0) x1 = 0 + (domain['x'] / 2.0) y0 = 0 - (domain['y'] / 2.0) y1 = 0 + (domain['y'] / 2.0) z0 = 0 - (domain['z'] / 2.0) z1 = 0 + (domain['z'] / 2.0) # Number of cell elements in each direction at coarse level nx = domain['x'] / l + 1 ny = domain['y'] / l + 1 nz = domain['z'] / l + 1 if nx * ny * nz > 1e8: error = "ERROR: Number of elements > 1e8. Exiting" sys.stderr.write(error) sys.exit(1) print("\nCreating *.lgi files for octree mesh\n") try: os.mkdir('octree') except OSError: rmtree('octree') os.mkdir('octree') lagrit_driver(nx, ny, nz, num_poly, normal_vectors, points) lagrit_parameters(orl, x0, x1, y0, y1, z0, z1, nx, ny, nz) lagrit_build() lagrit_intersect() lagrit_hex_to_tet() lagrit_remove() lagrit_run() lagrit_strip(num_poly) driver_parallel(self, num_poly)
def mesh_network(self, prune=False, uniform_mesh=False, production_mode=True, refine_factor=1, slope=2, visual_mode=None): ''' Mesh fracture network using LaGriT Parameters ---------- self : object DFN Class prune : bool If prune is False, mesh entire network. If prune is True, mesh only fractures in self.prune_file uniform_mesh : bool If true, mesh is uniform resolution. If False, mesh is spatially variable production_mode : bool If True, all working files while meshing are cleaned up. If False, then working files will not be deleted refine_factor : float Determines distance for mesh refinement (default=1) slope : float Slope of piecewise linear function determining rate of coarsening. visual_mode : None If the user wants to run in a different meshing mode from what is in params.txt, set visual_mode = True/False on command line to override meshing mode Returns ------- None Notes ------ 1. For uniform resolution mesh, set slope = 0 2. All fractures in self.prune_file must intersect at least 1 other fracture ''' print('=' * 80) print("Meshing Network Using LaGriT : Starting") print('=' * 80) if uniform_mesh: slope = 0 # Setting slope = 0, results in a uniform mesh if prune: if self.prune_file == "": error = "ERROR!! User requested pruning in meshing but \ did not provide file of fractures to keep.\nExiting program." sys.stderr.write(error) sys.exit(1) mh.create_mesh_links(self.path) num_poly, h, params_visual_mode, dudded_points, domain = mh.parse_params_file( ) if visual_mode == None: visual_mode = params_visual_mode print("Loading list of fractures to remain in network from %s" % self.prune_file) fracture_list = sort(genfromtxt(self.prune_file).astype(int)) print(fracture_list) lagrit.edit_intersection_files(num_poly, fracture_list, self.path) num_poly = len(fracture_list) else: num_poly, h, params_visual_mode, dudded_points, domain = mh.parse_params_file( ) if visual_mode == None: visual_mode = params_visual_mode fracture_list = range(1, num_poly + 1) # if number of fractures is greater than number of CPUS, # only use num_poly CPUs. This change is only made here, so ncpus # is still used in PFLOTRAN ncpu = min(self.ncpu, num_poly) lagrit.create_parameter_mlgi_file(fracture_list, h, slope=slope) lagrit.create_lagrit_scripts(visual_mode, ncpu) lagrit.create_user_functions() failure = run_mesh.mesh_fractures_header(fracture_list, ncpu, visual_mode) if failure: mh.cleanup_dir() error = "One or more fractures failed to mesh properly.\nExiting Program" sys.stderr.write(error) sys.exit(1) n_jobs = lagrit.create_merge_poly_files(ncpu, num_poly, fracture_list, h, visual_mode, domain, self.flow_solver) run_mesh.merge_the_meshes(num_poly, ncpu, n_jobs, visual_mode) if (not visual_mode and not prune): if not mh.check_dudded_points(dudded_points): mh.cleanup_dir() error = "ERROR!!! Incorrect Number of dudded points.\nExiting Program" sys.stderr.write(error) sys.exit(1) if production_mode: mh.cleanup_dir() if not visual_mode: lagrit.define_zones() if prune: mh.clean_up_files_after_prune(self) mh.output_meshing_report(self.local_jobname, visual_mode) print("--> Meshing Complete")
def uncorrelated(self, mu, sigma, path='../'): """ Creates Fracture Based Log-Normal Permeability field with mean mu and variance sigma. Aperture is dervived using the cubic law Parameters ----------- mu : double Mean of LogNormal Permeability field sigma : double Variance of permeability field path : string path to original network. Can be current directory Returns ---------- None Notes ---------- mu is the mean of perm not log(perm) """ from pydfnworks.dfnGen.mesh_dfn_helper import parse_params_file print('--> Creating Uncorrelated Transmissivity Fields') print('Mean: ', mu) print('Variance: ', sigma) print('Running un-correlated') #note, need to know number of fractures, use parse_params from mesh_helper to get n # JDH 5/8/2019 n = parse_params_file(quite=True)[0] perm = np.log(mu) * np.ones(n) perturbation = np.random.normal(0.0, 1.0, n) perm = np.exp(perm + np.sqrt(sigma) * perturbation) aper = np.sqrt((12.0 * perm)) print('\nPerm Stats') print('\tMean:', np.mean(perm)) print('\tMean:', np.mean(np.log(perm))) print('\tVariance:', np.var(np.log(perm))) print('\tMinimum:', min(perm)) print('\tMaximum:', max(perm)) print('\tMinimum:', min(np.log(perm))) print('\tMaximum:', max(np.log(perm))) print('\nAperture Stats') print('\tMean:', np.mean(aper)) print('\tVariance:', np.var(aper)) print('\tMinimum:', min(aper)) print('\tMaximum:', max(aper)) # Write out new aperture.dat and perm.dat files output_filename = 'aperture_' + str(sigma) + '.dat' f = open(output_filename, 'w+') f.write('aperture\n') for i in range(n): f.write('-%d 0 0 %0.5e\n' % (i + 7, aper[i])) f.close() try: os.symlink(output_filename, 'aperture.dat') except: print("WARNING!!!! Could not make symlink to aperture.dat file") output_filename = 'perm_' + str(sigma) + '.dat' f = open(output_filename, 'w+') f.write('permeability\n') for i in range(n): f.write('-%d 0 0 %0.5e %0.5e %0.5e\n' % (i + 7, perm[i], perm[i], perm[i])) f.close() try: os.symlink(output_filename, 'perm.dat') except: print("WARNING!!!! Could not make symlink to perm.dat file")
def upscale(self, mat_perm, mat_por): """ Generate permeabilities and porosities based on output of map2continuum. Parameters ---------- self : object DFN Class mat_perm : float Matrix permeability (in m^2) mat_por: float Matrix porosity Returns ------- perm_fehm.dat : text file Contains permeability data for FEHM input rock_fehm.dat : text file Contains rock properties data for FEHM input mesh_permeability.h5 : h5 file Contains permeabilites at each node for PFLOTRAN input mesh_porosity.h5 : h5 file Contains porosities at each node for PFLOTRAN input Notes ----- None """ print('=' * 80) print("Generating permeability and porosity for octree mesh: Starting") print('=' * 80) os.symlink("../params.txt", "params.txt") num_poly, _, _, _, domain = mh.parse_params_file() # Bring in all the relevant data os.symlink("../aperture.dat", "aperture.dat") os.symlink("../normal_vectors.dat", "normal_vectors.dat") aperture = np.genfromtxt('aperture.dat', skip_header=1)[:, -1] normal_vectors = np.genfromtxt('normal_vectors.dat', delimiter=' ') if self.flow_solver == "FEHM": with open("perm_fehm.dat", "w") as f: f.write("perm\n") with open("rock_fehm.dat", "w") as g: g.write("rock\n") # Make dictionary w/ cell IDs (keys) and intersecting fractures, areas (values) for i in range(1, num_poly + 1): if i == 1: with open('frac1.inp', 'r') as f: line = f.readline().strip().split() num_nodes = int(float(line[0])) num_cells = int(float(line[1])) f_dict = {} perm_var = np.zeros(num_nodes, 'float') por_var = np.zeros(num_nodes, 'float') cv_vol = np.zeros(num_nodes, 'float') iarray = np.zeros(num_nodes, '=i4') frac_vol = np.zeros(num_nodes, 'float') permX = np.zeros(num_nodes, 'float') permY = np.zeros(num_nodes, 'float') permZ = np.zeros(num_nodes, 'float') f.close() with open('frac{0}.inp'.format(i), 'r') as f: with open('area_sum{0}.inp'.format(i), 'r') as g: for j in range(num_nodes): f.readline() g.readline() f.readline() g.readline() for j in range(num_cells): f.readline() g.readline() f.readline() f.readline() f.readline() g.readline() g.readline() g.readline() g.readline() g.readline() g.readline() g.readline() for j in range(num_nodes): fline = f.readline().strip().split() gline = g.readline().strip().split() iarray[j] = int(float(gline[0])) if int(float(gline[1])) != (num_poly + 1): f_dict.setdefault(j + 1, []).append( (i, float(gline[6]))) g.close() f.close() with open('full_mesh.uge', 'r') as f: f.readline() for j in range(num_nodes): fline = f.readline().strip().split() cv_vol[j] = float(fline[4]) f.close() # Populate permeability and porosity arrays here for i in range(1, num_nodes + 1): if i in f_dict: # Get porosity: for j in range(len(f_dict[i])): # Calculate total volume of fractures in cv cell i frac_vol[i - 1] += aperture[f_dict[i][j][0] - 1] * f_dict[i][j][1] por_var[i - 1] = frac_vol[i - 1] / cv_vol[i - 1] if por_var[i - 1] == 0: por_var[i - 1] = mat_por if por_var[i - 1] > 1.0: por_var[i - 1] = 1.0 # Get permeability: perm_tensor = np.zeros([3, 3]) phi_sum = 0 for j in range(len(f_dict[i])): phi = (aperture[f_dict[i][j][0] - 1] * f_dict[i][j][1]) / cv_vol[i - 1] if phi > 1.0: phi = 1.0 phi_sum += phi if phi_sum > 1.0: phi_sum = 1.0 b = aperture[f_dict[i][j][0] - 1] # Construct tensor Omega Omega = np.zeros([3, 3]) n1 = normal_vectors[f_dict[i][j][0] - 1][0] n2 = normal_vectors[f_dict[i][j][0] - 1][1] n3 = normal_vectors[f_dict[i][j][0] - 1][2] Omega[0][0] = (n2)**2 + (n3)**2 Omega[0][1] = -n1 * n2 Omega[0][2] = -n3 * n1 Omega[1][0] = -n1 * n2 Omega[1][1] = (n3)**2 + (n1)**2 Omega[1][2] = -n2 * n3 Omega[2][0] = -n3 * n1 Omega[2][1] = -n2 * n3 Omega[2][2] = (n1)**2 + (n2)**2 perm_tensor += (phi * (b)**2 * Omega) perm_tensor *= 1. / 12 # Calculate eigenvalues permX[i - 1] = np.linalg.eigvals(perm_tensor)[0] permY[i - 1] = np.linalg.eigvals(perm_tensor)[1] permZ[i - 1] = np.linalg.eigvals(perm_tensor)[2] # Arithmetic average of matrix perm permX[i - 1] += (1 - phi_sum) * mat_perm permY[i - 1] += (1 - phi_sum) * mat_perm permZ[i - 1] += (1 - phi_sum) * mat_perm # Correction factor # Actual value doesn't matter here, just needs to be high min_n1 = 1e6 min_n2 = 1e6 min_n3 = 1e6 # See Sweeney et al. 2019 Computational Geoscience for j in range(len(f_dict[i])): n1_temp = normal_vectors[f_dict[i][j][0] - 1][0] theta1_t = m.degrees(m.acos(n1_temp)) % 90 if abs(theta1_t - 45) <= min_n1: theta1 = theta1_t min_n1 = theta1_t n2_temp = normal_vectors[f_dict[i][j][0] - 1][1] theta2_t = m.degrees(m.acos(n2_temp)) % 90 if abs(theta2_t - 45) <= min_n2: theta2 = theta2_t min_n2 = theta2_t n3_temp = normal_vectors[f_dict[i][j][0] - 1][2] theta3_t = m.degrees(m.acos(n3_temp)) % 90 if abs(theta3_t - 45) <= min_n3: theta3 = theta3_t min_n3 = theta3_t sl = (2 * 2**(1. / 2) - 1) / -45.0 b = 2 * 2**(1. / 2) cf_x = sl * abs(theta1 - 45) + b cf_y = sl * abs(theta2 - 45) + b cf_z = sl * abs(theta3 - 45) + b permX[i - 1] *= cf_x permY[i - 1] *= cf_y permZ[i - 1] *= cf_z # Correct 0 perm if exists if permX[i - 1] == 0: permX[i - 1] += mat_perm if permY[i - 1] == 0: permY[i - 1] += mat_perm if permZ[i - 1] == 0: permZ[i - 1] += mat_perm perm_var[i - 1] = max(permX[i - 1], permY[i - 1], permZ[i - 1]) else: # Assign matrix properties por_var[i - 1] = mat_por perm_var[i - 1] = mat_perm # Note these aren't needed if not using anisotropic perm permX[i - 1] = mat_perm permY[i - 1] = mat_perm permZ[i - 1] = mat_perm if self.flow_solver == "FEHM": with open("perm_fehm.dat", "a") as f: f.write( str(i) + " " + str(i) + " " + "1" + " " + str(permX[i - 1]) + " " + str(permY[i - 1]) + " " + str(permZ[i - 1]) + "\n") with open("rock_fehm.dat", "a") as g: g.write( str(i) + " " + str(i) + " " + "1" + " " + "2757." + " " + "1180." + " " + str(por_var[i - 1]) + "\n") # Need an extra space at end for FEHM if self.flow_solver == "FEHM": with open("perm_fehm.dat", "a") as f: f.write("\n") with open("rock_fehm.dat", "a") as g: g.write("\n") if self.flow_solver == "PFLOTRAN": perm_filename = 'mesh_permeability.h5' poros_filename = 'mesh_porosity.h5' h5file = h5py.File(perm_filename, mode='w') dataset_name = 'Cell Ids' h5dset = h5file.create_dataset(dataset_name, data=iarray) dataset_name = 'Permeability' h5dset = h5file.create_dataset(dataset_name, data=perm_var) #dataset_name = 'Perm_X' #h5dset = h5file.create_dataset(dataset_name, data = permX) #dataset_name = 'Perm_Y' #h5set = h5file.create_dataset(dataset_name, data = permY) #dataset_name = 'Perm_Z' #h5set = h5file.create_dataset(dataset_name, data = permZ) h5file.close() h5file = h5py.File(poros_filename, mode='w') dataset_name = 'Cell Ids' h5dset = h5file.create_dataset(dataset_name, data=iarray) dataset_name = 'Porosity' h5dset = h5file.create_dataset(dataset_name, data=por_var) h5file.close() upscale_cleanup()