def convert_solution_to_common_h5(self, filename): root = filename.rsplit('.', 1)[0] solution_filename = '{}_tough3.h5'.format(root) solution = QASolutionWriter(solution_filename) tough_obs = [] for root, dire, files in os.walk('.'): for name in files: if (name.startswith('FOFT')) \ and (name.endswith('.csv')): tough_obs.append(name) self._process_output_time_slice('OUTPUT_ELEME.csv', solution) if len(tough_obs) > 0: self._process_output_observation_file(solution, tough_obs) solution.destroy() return solution_filename
def convert_solution_to_common_h5(self, filename): root = filename.rsplit('.', 1)[0] solution_filename = '{}_tdycore.h5'.format(root) solution = QASolutionWriter(solution_filename) #-------------------------------------------------- # Assumptions: # The results are on a 2D unit square. Calculate the descretization # accordingly. # fin = open('{}.sol'.format(root), 'r') fin = open('{}.vtk'.format(root), 'r') all_values = [] found = False for line in fin: if (found): try: f = float(line) all_values.append(f) except ValueError: break if line.startswith("LOOKUP_TABLE"): found = True n = int(math.sqrt(len(all_values))) all_values = np.asarray(all_values, dtype=np.float64) all_values = np.reshape(all_values, (n, n)) x = np.linspace(0.5 / n, 1. - 0.5 / n, n, dtype=np.float64) z = np.zeros(1) z[0] = 0.5 solution.write_coordinates(x, x, z) time = 0. # time = 1.e6 solution.set_time_unit('y') solution.write_dataset(time, all_values, 'Liquid Pressure', 'Time Slice') fin.close() solution.destroy() return solution_filename
def convert_solution_to_common_h5(self,root): debug_push('QASimulatorPFLOTRAN convert_solution_to_common_h5') solution_filename = \ '{}_pflotran.h5'.format(root) solution = QASolutionWriter(solution_filename) h5_filename = '{}_pft.h5'.format(root) tec_filename = '{}_pft-obs-0.tec'.format(root) time_slice = False observation_file = False try: f = File(h5_filename,'r') time_slice = True except: print ('No time slice file found, checking for observation file') if time_slice: x, y, z = self.get_cell_centered_coordinates_h5(h5_filename) solution.write_coordinates(x,y,z) first = True group_name = 'Time Slice' for tkey in list(f.keys()): if tkey.startswith('Time'): w = tkey.split() time_string = float(w[1]) if first: first = False if len(w) < 3: print('Time without unit in {}.'.format(h5_filename)) raise solution.set_time_unit(w[2]) for dkey in list(f[tkey].keys()): if dkey in h5_mapping: new_key = h5_mapping[dkey] else: new_key = dkey solution.write_dataset(time_string, np.array(f[tkey+'/'+dkey]),new_key,group_name) f.close() try: fin = open(tec_filename,'r') observation_file = True except: print('No observation file found') if observation_file: group_name = 'Observation' header = [] all_values = [] for line in fin: if ('Time' in line): header = [x.strip('"').strip().strip(' "') for x in line.split(',')] else: values = line.strip().split() all_values.append(values) all_values = np.asarray(all_values, dtype=np.float64).transpose() solution.set_time_unit(header[0].split()[1].strip('[').strip(']')) solution.write_time(all_values[0]) for i in range(1,len(header)): s = [x.strip(')').strip('(') for x in header[i].split()] location_string = s[-3:] location_floats = [float(i) for i in location_string] variable = all_values[i] dkey = header[i].split(' obs_pt')[0] if dkey in obs_mapping: new_key = obs_mapping[dkey] else: new_key = dkey solution.write_dataset(location_floats, variable,new_key,group_name) debug_pop() return solution_filename
def convert_solution_to_common_h5(self, filename): root = filename.rsplit('.', 1)[0] solution_filename = '{}_crunchflow.h5'.format(root) solution = QASolutionWriter(solution_filename) c_out = [] obs_out = [] for root, dire, files in os.walk('.'): for name in files: if (name.startswith('conc') or name.startswith('saturation')) \ and (name.endswith('.out') or ###not gonna work name.endswith('.tec')): c_out.append(name) if (name.startswith('crunch_observation') and name.endswith('.out')): obs_out.append(name) first = True for i in range(len(c_out)): fin = open(c_out[i], 'r') log = False time = None variables = None all_values = [] for line in fin: words = line.strip().split() if words == []: continue if ('Time' in words): time = float(words[-1].strip()) if ('Distance' in words): variables = words if ('VARIABLES' in words): variables = [x.strip('"') for x in words] variables = variables[2:] if ('Log10' in words): log = True if words[0][0].isdigit(): values = words all_values.append(values) all_values = np.asarray(all_values, dtype=np.float64).transpose() if not variables: print('ERROR: headers could not be found in %s.opt file' % c_out[i]) sys.exit(0) if not time: num = re.findall(r'\d+', c_out[i]) time, time_unit = self._find_time_tecplot( filename, int(num[0])) ##return time unit else: time_unit = self._find_time_unit_outfile(filename) solution.set_time_unit(time_unit) if variables[0] == 'Distance': #write_coordinates if first == True: x = all_values[0] y = np.ones(len(all_values[0])) z = np.ones(len(all_values[0])) solution.write_coordinates(x, y, z) first = False for n in range(1, len(all_values)): if variables[n] in time_mapping: new_key = time_mapping[variables[n]] else: new_key = variables[n] if log == True: #####TECPLOT DOESN'T RECORD IF ITS LOG OR NOT????? solution.write_dataset(time, 10**all_values[n], new_key, 'Time Slice') else: solution.write_dataset(time, all_values[n], new_key, 'Time Slice') else: if first == True: x = all_values[0] y = all_values[1] z = all_values[2] first = False solution.write_coordinates(x, y, z) for n in range(3, len(all_values)): if variables[n] in time_mapping: new_key = time_mapping[variables[n]] else: new_key = variables[n] if log == True: all_values[n] == 10**all_values solution.write_dataset(time, all_values[n], new_key, 'Time Slice') first = True for i in range(len(obs_out)): counter = 0 fin = open(obs_out[i], 'r') all_values = [] for line in fin: words = line.strip().split() if counter == 0: location = words[-3:] location = self._convert_location(filename, location) if counter == 1: if ('VARIABLES' in words): variables = [ x.strip('"').strip('(').strip(')') for x in words ] variables = [val for val in variables if val.isalnum()] variables = variables[1:] solution.set_time_unit(variables[1]) variables.pop(1) else: variables = words solution.set_time_unit( variables[0].strip(')').split('(')[1]) if words[0][0].isdigit(): values = words all_values.append(values) counter = counter + 1 all_values = np.asarray(all_values, dtype=np.float64).transpose() if first == True: solution.write_time(all_values[0]) first = False for n in range(1, len(all_values)): if variables[n] in obs_mapping: new_key = time_mapping[variables[n]] else: new_key = variables[n] solution.write_dataset(location, all_values[n], new_key, 'Observation') solution.destroy() return solution_filename
def convert_to_h5_file(filename, x, y, z, values): solution = QASolutionWriter(filename) solution.write_coordinates(x, y, z) solution.write_dataset(0.0, values, 'Pressure', 'Time Slice') solution.write_time(x) solution.write_dataset((0.0, 0.0, 0.0), values, 'Pressure', 'Observation') solution.destroy()
def convert_solution_to_common_h5(self, filename): debug_push('QASimulatorSTOMP convert_solution_to_common_h5') solution_filename = '{}_stomp.h5'.format(filename) solution = QASolutionWriter(solution_filename) tslice_out = [] x = [] y = [] z = [] dim = [''] * 3 first_file = True for r, dirct, files in os.walk('.'): for name in files: if name.startswith('plot') and not name.endswith('.dat'): tslice_out.append(name) for i in range(len(tslice_out)): if first_file: fin = open(tslice_out[i], 'r') for line in fin: line = line.strip() if line == []: continue #get dimensions if ('Number of X' in line): words = line.split('=') n = words[1].split() dim[0] = int(n[0]) if ('Number of Y' in line): words = line.split('=') n = words[1].split() dim[1] = int(n[0]) if ('Number of Z' in line): words = line.split('=') n = words[1].split() dim[2] = int(n[0]) #get coordinates if ('X-Direction Nodal Vertices, m' in line): for line in fin: line = line.strip() words = line.split() if not words: break # get x-centroid x_centroid = (float(words[0]) + float(words[1])) * 0.5 x.append(x_centroid) x = x[:dim[0]] if ('Y-Direction Nodal Vertices, m' in line): for line in fin: line = line.strip() words = line.split() if not words: break # get y-centroid y_centroid = (float(words[0]) + float(words[2])) * 0.5 y.append(y_centroid) start = 0 stop = dim[0] * dim[1] step = dim[0] keep_ind = np.arange(start, stop, step) y = [y[ind] for ind in keep_ind] if ('Z-Direction Nodal Vertices, m' in line): for line in fin: line = line.strip() words = line.split() if not words: break # get z-centroid z_centroid = (float(words[0]) + float(words[3])) * 0.5 z.append(z_centroid) start = 0 stop = dim[0] * dim[1] * dim[2] step = dim[0] * dim[1] keep_ind = np.arange(start, stop, step) z = [z[ind] for ind in keep_ind] fin.close() #check if one of the coordinates is empty and if so, populate with a (0.5 value) #Note: For 2D problems, Stomp only output 2 of the 3 dimensions if not x: x = [0.5] if not y: y = [0.5] if not z: z = [0.5] solution.write_coordinates(x, y, z) first_file = False # read file fin = open(tslice_out[i], 'r') time = None all_values = [] for line in fin: line = line.strip() if line == []: continue # get time if ('Time = ' in line): words = line.split() time_units = words[5].split(',') time = float(time_units[0]) t_units = time_units[1] solution.set_time_unit(t_units) for j, (key, v_name) in enumerate(time_slice_mapping.items()): all_values = [] if (key in line): for line in fin: line = line.strip() words = line.split() if not words: break for var_values in words: all_values.append(float(var_values)) all_values_np = np.asarray( all_values, dtype=np.float64).transpose() all_values_np = np.reshape(all_values_np, (dim[0], dim[1], dim[2]), order='F') solution.write_dataset(time, all_values_np, v_name, 'Time Slice') fin.close() debug_pop() return solution_filename