def get_flowfront_bool_dryspot(self, filename, states=None):
        """
        Load the flow front for the all states or given states. Returns a bool label for dryspots.
        """
        f = h5py.File(filename, 'r')
        meta_file = h5py.File(
            str(filename).replace("RESULT.erfh5", "meta_data.hdf5"), 'r')
        try:
            if self.ignore_useless_states:
                useless_states = meta_file["useless_states/singlestates"][()]
            array_of_states = meta_file["dryspot_states/singlestates"][()]
            set_of_states = set(array_of_states.flatten())
            coord_as_np_array = f[
                "post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/"
                "erfblock/res"][()]
            # Cut off last column (z), since it is filled with 1s anyway
            _coords = coord_as_np_array[:, :-1]
            _coords = normalize_coords(_coords)
            if not states:
                states = f["post"]["singlestate"]

            states = list(states)[self.skip_indizes[0]:self.
                                  skip_indizes[1]:self.skip_indizes[2]]

            filling_factors_at_certain_times = [
                f["post"]["singlestate"][state]["entityresults"]["NODE"]
                ["FILLING_FACTOR"]["ZONE1_set1"]["erfblock"]["res"][()]
                for state in states
            ]

            flat_fillings = np.squeeze(filling_factors_at_certain_times)
            instances = []
            for filling, state in zip(flat_fillings, states):
                if self.ignore_useless_states and len(
                        useless_states
                ) > 0 and state == f'state{useless_states[0]:012d}':
                    break
                label = 0
                if int(str(state).replace("state", "0")) in set_of_states:
                    label = 1
                instances.append((create_np_image(target_shape=self.image_size,
                                                  norm_coords=_coords,
                                                  data=filling), label))
            f.close()
            meta_file.close()
            return instances
        except KeyError:
            logger = logging.getLogger()

            logger.warning(f'Warning: {filename}')
            f.close()
            meta_file.close()
            return None
Example #2
0
    def __get_filling_data(self, file, states):
        coord_as_np_array = file[
            "post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/erfblock/res"][
                ()]
        # Cut off last column (z), since it is filled with 1s anyway
        coords = coord_as_np_array[:, :-1]
        coords = normalize_coords(coords)

        filling_factors_at_certain_times = [
            file[
                f"post/singlestate/{state}/entityresults/NODE/FILLING_FACTOR/ZONE1_set1/erfblock/res"]
            [()] for state in states
        ]
        flat_fillings = np.squeeze(filling_factors_at_certain_times)

        return coords, flat_fillings
    def __calculate_mesh_components(self,
                                    sample_file,
                                    normalize_coordinates=True):
        f = h5py.File(sample_file, 'r')

        try:
            verts = f["post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/"
                      "erfblock/res"][()]

            if normalize_coordinates:
                verts = normalize_coords(verts, third_dim=True)

            # Get internal indices of nodes
            hashes = f[
                "post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/"
                "erfblock/entid"][()]
            hashes = {h: i for i, h in enumerate(hashes)}

            # Calculate faces based on internal indices
            faces = f["post/constant/connectivities/SHELL/erfblock/ic"][()]
            faces = faces[:, :-1]
            faces = np.vectorize(hashes.__getitem__)(faces)

            f.close()
            self.vertices = verts
            self.faces = faces
            self.edges = self.__calculate_edges()

        except KeyError:
            logger = logging.getLogger()
            logger.warning(f'KeyError: Calculation of mesh failed.')
            f.close()
            raise Exception('Calculation of mesh failed because of a KeyError')
        except IndexError:
            logger = logging.getLogger()
            logger.warning(f'KeyError: Calculation of mesh failed.')
            f.close()
            raise Exception(
                'Calculation of mesh failed because of a IndexError')
    def get_sensor_flowfront_mesh(self, filename):
        """Returns samples of shape (num_vertices, 1)  with following values:
           if nearest neighbor of sensor: sensorvalue else: 0
           The label is of shape (num_vertices, 1) containing the filling factor on each node.
        """
        f = h5py.File(filename, 'r')
        folder = get_folder_of_erfh5(filename)
        instances = []

        if self.sensor_verts is None:
            print("Calculating sensor vertices from scratch.")
            self.sensor_verts = extract_nearest_mesh_nodes_to_sensors(
                folder,
                sensor_indices=self.sensor_indices,
                target_size=self.intermediate_target_size,
                third_dim=self.third_dim,
                subsampled_nodes=self.subsampled_nodes)
            if self.sensor_verts_path is not None:
                pickle.dump(self.sensor_verts,
                            open(self.sensor_verts_path, 'wb'))
                print(f"Saved sensor vertices in {self.sensor_verts_path}.")

            if self.third_dim:
                self.sensor_verts = self.sensor_verts[:-1]
            print(f"Calculated {len(self.sensor_verts)} sensor vertices.")

        try:
            verts = f["post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/"
                      "erfblock/res"][()]

            if self.subsampled_nodes is not None:
                verts = verts[self.subsampled_nodes]

            verts = normalize_coords(verts, third_dim=True)

            states = f["post"]["singlestate"]
            all_inputs = []
            all_labels = []

            # Get all pressure and filling factor states
            for s in states:
                input_features = np.zeros((verts.shape[0]))
                pressure = f['post']['singlestate'][s]['entityresults'][
                    'NODE']['PRESSURE']['ZONE1_set1']['erfblock']['res'][()]

                flowfront = f['post']['singlestate'][s]['entityresults'][
                    'NODE']['FILLING_FACTOR']['ZONE1_set1']['erfblock']['res'][
                        ()]

                flowfront = np.squeeze(np.ceil(flowfront))

                if self.subsampled_nodes is not None:
                    pressure = pressure[self.subsampled_nodes]
                    flowfront = flowfront[self.subsampled_nodes]

                input_features[self.sensor_verts] = np.squeeze(
                    pressure[self.sensor_verts])
                if self.divide_by_100k:
                    # input_features = input_features / 100000
                    input_features = input_features * 10
                all_inputs.append(input_features)
                all_labels.append(flowfront)

            for i, x in enumerate(all_inputs):
                instances.append((x, all_labels[i]))

            f.close()
            return instances

        except KeyError:
            logger = logging.getLogger()
            logger.warning(f'KeyError: {filename}')
            f.close()
            return None
    def get_sensor_dryspot_mesh(self, filename):
        """Returns samples of shape (num_vertices, 1)  with following values:
           if nearest neighbor of sensor: sensorvalue else: 0
           The label is either 0 or 1, whether the sample contains a dryspot or not.
        """

        f = h5py.File(filename, 'r')
        meta_file = h5py.File(
            str(filename).replace("RESULT.erfh5", "meta_data.hdf5"), 'r')
        folder = get_folder_of_erfh5(filename)
        instances = []
        all_labels = []

        if self.sensor_verts is None:
            print("calculating sensor vertices from scratch.")
            self.sensor_verts = extract_nearest_mesh_nodes_to_sensors(folder)
            print("Calculated sensor vertices.")

        try:
            verts = f["post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/"
                      "erfblock/res"][()]
            verts = normalize_coords(verts, third_dim=True)

            array_of_states = meta_file["dryspot_states/singlestates"][()]
            if self.ignore_useless_states:
                useless_states = meta_file["useless_states/singlestates"][()]
            set_of_states = set(array_of_states.flatten())

            states = f["post"]["singlestate"]
            all_inputs = []

            # Get all pressure states and labels
            for s in states:
                if self.ignore_useless_states and len(
                        useless_states
                ) > 0 and s == f'state{useless_states[0]:012d}':
                    break

                input_features = np.zeros((verts.shape[0]))
                pressure = f['post']['singlestate'][s]['entityresults'][
                    'NODE']['PRESSURE']['ZONE1_set1']['erfblock']['res'][()]

                input_features[self.sensor_verts] = np.squeeze(
                    pressure[self.sensor_verts])
                if self.divide_by_100k:
                    input_features = input_features / 100000
                all_inputs.append(input_features)

                label = 0
                state_num = int(str(s).replace("state", "0"))
                if state_num in set_of_states:
                    label = 1

                all_labels.append(label)

            for i, x in enumerate(all_inputs):
                instances.append((x, all_labels[i]))

            f.close()
            return instances

        except KeyError:
            logger = logging.getLogger()
            logger.warning(f'KeyError: {filename}')
            f.close()
            return None