Пример #1
0
        def get_batch(n, bs, indices):
            batch_indices = [k for k in range(n*bs, (n+1)*bs)]
            batch_IDs = indices[batch_indices]

            data_list = []
            mesh_list = []
            soln_list = []
            domain_list = []
            
            for ID in batch_IDs:
                # Read mesh and data files
                source = read_data(ID, os.path.join(DATA_dir,data_dir), USE_HIRES=USE_HIRES)
                data_array = np.expand_dims(np.transpose(source, (1, 2, 0)),0)
                data_list.append(data_array)
                
                mesh_data = read_mesh(ID, os.path.join(DATA_dir,mesh_dir), USE_HIRES=USE_HIRES)
                mesh_array = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)),0)
                mesh_list.append(mesh_array)
                
                y_data = read_soln(ID, os.path.join(DATA_dir,soln_dir), USE_HIRES=USE_HIRES)
                soln_array = np.expand_dims(np.transpose(y_data, (1, 2, 0)),0)
                soln_list.append(soln_array)

                domain_count = mesh_array[mesh_array > 0.0].size
                domain_list.append(domain_count)
                
            data_batch = np.concatenate(data_list, axis=0)
            mesh_batch = np.concatenate(mesh_list, axis=0)
            soln_batch = np.concatenate(soln_list, axis=0)
            domain_batch = np.array(domain_list)
            
            return data_batch, mesh_batch, soln_batch, domain_batch
Пример #2
0
        # Run initial session to remove graph loading time

        # Read mesh and data files
        source = read_data(0,
                           os.path.join(DATA_dir, data_dir),
                           USE_HIRES=USE_HIRES)
        data_batch = np.expand_dims(np.transpose(source, (1, 2, 0)), 0)

        mesh_data = read_mesh(0,
                              os.path.join(DATA_dir, mesh_dir),
                              USE_HIRES=USE_HIRES)
        mesh_batch = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)), 0)

        y_data = read_soln(0,
                           os.path.join(DATA_dir, soln_dir),
                           USE_HIRES=USE_HIRES)
        soln_batch = np.expand_dims(np.transpose(y_data, (1, 2, 0)), 0)

        # Compute network prediction
        y_out = sess.run(y_pred,
                         feed_dict={
                             data: data_batch,
                             mesh: mesh_batch,
                             soln: soln_batch
                         })

        # Load training and validation indices
        t_indices = np.load(t_indices_file)
        v_indices = np.load(v_indices_file)
Пример #3
0
    mesh = graph.get_tensor_by_name('prefix/mesh_test:0')
    soln = graph.get_tensor_by_name('prefix/soln_test:0')
    y_pred = graph.get_tensor_by_name('prefix/masked_pred_test:0')

    with tf.Session(graph=graph) as sess:

        # Run initial session to remove graph loading time

        # Read mesh and data files
        source = read_data(0, data_dir, USE_HIRES=USE_HIRES)
        data_batch = np.expand_dims(np.transpose(source, (1, 2, 0)), 0)

        mesh_data = read_mesh(0, mesh_dir, USE_HIRES=USE_HIRES)
        mesh_batch = np.expand_dims(np.transpose(mesh_data, (1, 2, 0)), 0)

        y_data = read_soln(0, soln_dir, USE_HIRES=USE_HIRES)
        soln_batch = np.expand_dims(np.transpose(y_data, (1, 2, 0)), 0)

        # Compute network prediction
        y_out = sess.run(y_pred,
                         feed_dict={
                             data: data_batch,
                             mesh: mesh_batch,
                             soln: soln_batch
                         })

        # Read mesh and data files
        source = read_data(ID, data_dir, USE_HIRES=USE_HIRES)
        data_batch = np.expand_dims(np.transpose(source, (1, 2, 0)), 0)

        mesh_data = read_mesh(ID, mesh_dir, USE_HIRES=USE_HIRES)