def get_batch(n, bs, indices): batch_indices = [k for k in range(n*bs, (n+1)*bs)] batch_IDs = indices[batch_indices] data_list = [] mesh_list = [] soln_list = [] domain_list = [] for ID in batch_IDs: # Read mesh and data files source = read_data(ID, os.path.join(DATA_dir,data_dir), USE_HIRES=USE_HIRES) data_array = np.expand_dims(np.transpose(source, (1, 2, 0)),0) data_list.append(data_array) mesh_data = read_mesh(ID, os.path.join(DATA_dir,mesh_dir), USE_HIRES=USE_HIRES) mesh_array = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)),0) mesh_list.append(mesh_array) y_data = read_soln(ID, os.path.join(DATA_dir,soln_dir), USE_HIRES=USE_HIRES) soln_array = np.expand_dims(np.transpose(y_data, (1, 2, 0)),0) soln_list.append(soln_array) domain_count = mesh_array[mesh_array > 0.0].size domain_list.append(domain_count) data_batch = np.concatenate(data_list, axis=0) mesh_batch = np.concatenate(mesh_list, axis=0) soln_batch = np.concatenate(soln_list, axis=0) domain_batch = np.array(domain_list) return data_batch, mesh_batch, soln_batch, domain_batch
#print(op.name) # Define input and output nodes data = graph.get_tensor_by_name('prefix/data_test:0') mesh = graph.get_tensor_by_name('prefix/mesh_test:0') soln = graph.get_tensor_by_name('prefix/soln_test:0') y_pred = graph.get_tensor_by_name('prefix/masked_pred_test:0') y_scale = graph.get_tensor_by_name('prefix/masked_scale_test:0') with tf.Session(graph=graph) as sess: # Run initial session to remove graph loading time # Read mesh and data files source = read_data(0, os.path.join(DATA_dir, data_dir), USE_HIRES=USE_HIRES) data_batch = np.expand_dims(np.transpose(source, (1, 2, 0)), 0) mesh_data = read_mesh(0, os.path.join(DATA_dir, mesh_dir), USE_HIRES=USE_HIRES) mesh_batch = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)), 0) y_data = read_soln(0, os.path.join(DATA_dir, soln_dir), USE_HIRES=USE_HIRES) soln_batch = np.expand_dims(np.transpose(y_data, (1, 2, 0)), 0) # Compute network prediction y_out = sess.run(y_pred,
training = np.isin(ID, t_indices, assume_unique=True) validation = np.isin(ID, v_indices, assume_unique=True) if training: print("\n[ TRAINING SET ]") elif validation: print("\n[ VALIDATION SET ]") else: print("\n[*] Warning: ID not found in indices.") with tf.Session(graph=graph) as sess: # Run initial session to remove graph loading time # Read mesh and data files source = read_data(0, os.path.join(DATA_dir, data_dir), USE_HIRES=USE_HIRES) data_batch = np.expand_dims(np.transpose(source, (1, 2, 0)), 0) stiff = read_data(0, os.path.join(DATA_dir, data_dir), USE_HIRES=USE_HIRES, STIFF=True) coeff_batch = np.expand_dims(np.transpose(stiff, (1, 2, 0)), 0) mesh_data = read_mesh(0, os.path.join(DATA_dir, mesh_dir), USE_HIRES=USE_HIRES) mesh_batch = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)), 0) y_data = read_soln(0,
# Display operators defined in graph #for op in graph.get_operations(): #print(op.name) # Define input and output nodes data = graph.get_tensor_by_name('prefix/data_test:0') mesh = graph.get_tensor_by_name('prefix/mesh_test:0') soln = graph.get_tensor_by_name('prefix/soln_test:0') y_pred = graph.get_tensor_by_name('prefix/masked_pred_test:0') with tf.Session(graph=graph) as sess: # Run initial session to remove graph loading time # Read mesh and data files source = read_data(0, data_dir, USE_HIRES=USE_HIRES) data_batch = np.expand_dims(np.transpose(source, (1, 2, 0)), 0) mesh_data = read_mesh(0, mesh_dir, USE_HIRES=USE_HIRES) mesh_batch = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)), 0) y_data = read_soln(0, soln_dir, USE_HIRES=USE_HIRES) soln_batch = np.expand_dims(np.transpose(y_data, (1, 2, 0)), 0) # Compute network prediction y_out = sess.run(y_pred, feed_dict={ data: data_batch, mesh: mesh_batch, soln: soln_batch })