Пример #1
0
    def get_flowfront_to_flowfront(self, filename):
        try:
            per_step = 0.01
            # logger = logging.getLogger(__name__)
            # logger.debug(
            #     "Loading flow front and premeability maps from {}".format(
            #         filename)
            # )
            f = h5py.File(filename, "r")

            states = list(f["post"]["singlestate"])

            fillings = []
            for state in states:
                try:
                    fillings.append(f["post"]["singlestate"][state]
                                    ["entityresults"]["NODE"]["FILLING_FACTOR"]
                                    ["ZONE1_set1"]["erfblock"]["res"][()])
                except KeyError as e:
                    return None
            fillings = np.stack(fillings).squeeze()
            activated_pixels = np.count_nonzero(fillings, axis=1)
            percentage_of_all_sensors = activated_pixels / 28464  # Number individual points
            returns = []
            current = 0
            coords = self.get_coords(filename)
            for i, sample in enumerate(percentage_of_all_sensors):
                if sample >= current:
                    img = create_np_image((143, 111), coords, fillings[i, :])
                    returns.append((img, img))
                    current += per_step
            return returns
        except Exception as e:
            return None
    def _get_flowfront(self, f: h5py.File, meta_f: h5py.File, states=None):
        """
        Load the flow front for the given states or all available states if states is None
        """
        useless_states = None
        try:
            coords = self._get_coords(f)
            if not states:
                states = f["post"]["singlestate"]
            states = list(states)[self.skip_indizes[0]:self.
                                  skip_indizes[1]:self.skip_indizes[2]]
            if meta_f is not None:
                useless_states = meta_f["useless_states/singlestates"][()]
                if len(useless_states) == 0:
                    useless_states = None
            filling_factors_at_certain_times = []
            for state in states:
                if useless_states is not None and state == f'state{useless_states[0]:012d}':
                    break
                else:
                    filling_factors_at_certain_times.append(
                        f["post"]["singlestate"][state]["entityresults"]
                        ["NODE"]["FILLING_FACTOR"]["ZONE1_set1"]["erfblock"]
                        ["res"][()])

            flat_fillings = np.squeeze(filling_factors_at_certain_times)
            return (create_np_image(target_shape=self.image_size,
                                    norm_coords=coords,
                                    data=filling) for filling in flat_fillings)
        except KeyError:
            return None
Пример #3
0
    def get_flowfront_bool_dryspot(self, filename, states=None):
        """
        Load the flow front for the all states or given states. Returns a bool label for dryspots.
        """
        f = h5py.File(filename, 'r')
        meta_file = h5py.File(
            str(filename).replace("RESULT.erfh5", "meta_data.hdf5"), 'r')
        try:
            states, set_of_states, useless_states = self.__get_dryspot_data(
                f, meta_file)
            _coords, flat_fillings = self.__get_filling_data(f, states)
            instances = []

            for filling, state in zip(flat_fillings, states):
                if self.ignore_useless_states and len(
                        useless_states
                ) > 0 and state == f'state{useless_states[0]:012d}':
                    break
                label = 0
                if int(str(state).replace("state", "0")) in set_of_states:
                    label = 1
                instances.append((create_np_image(target_shape=self.image_size,
                                                  norm_coords=_coords,
                                                  data=filling), label))
            f.close()
            meta_file.close()
            return instances
        except KeyError:
            logger = logging.getLogger()

            logger.warning(f'Warning: {filename}')
            f.close()
            meta_file.close()
            return None
Пример #4
0
    def load_aux_info_only(self, filename, single_state_indices):
        """
        Loads aux-info (currently only flowfronts) for all given timesteps of a given run.
        Intended for loading aux-info on demand during evaluation.

        Args:
            filename (pathlib.Path) : full path to an ERFH5-file to load aux-info from
            single_state_indices (1D torch.Tensor [int]): which time steps to load aux-info from

        Returns:
            flowfronts (3D numpy.ndarray): array of flowfront images
        """
        file = h5py.File(filename, "r")
        meta_file = h5py.File(
            str(filename).replace("RESULT.erfh5", "meta_data.hdf5"), 'r')
        output_len = single_state_indices.size()[0]
        single_state_indices = np.trim_zeros(single_state_indices.numpy(), 'b')
        states = file["post"]["singlestate"]
        states = list(states)[self.skip_indizes[0]:self.skip_indizes[1]:self.
                              skip_indizes[2]]
        _coords, flat_fillings = self.__get_filling_data(file, states)

        flowfronts = np.zeros(
            (output_len, self.image_size[0], self.image_size[1]))
        for i, ss_idx in enumerate(single_state_indices):
            flowfronts[i, :, :] = create_np_image(target_shape=self.image_size,
                                                  norm_coords=_coords,
                                                  data=flat_fillings[ss_idx])

        file.close()
        meta_file.close()
        return flowfronts
    def get_flowfront_bool_dryspot(self, filename, states=None):
        """
        Load the flow front for the all states or given states. Returns a bool label for dryspots.
        """
        f = h5py.File(filename, 'r')
        meta_file = h5py.File(
            str(filename).replace("RESULT.erfh5", "meta_data.hdf5"), 'r')
        try:
            if self.ignore_useless_states:
                useless_states = meta_file["useless_states/singlestates"][()]
            array_of_states = meta_file["dryspot_states/singlestates"][()]
            set_of_states = set(array_of_states.flatten())
            coord_as_np_array = f[
                "post/constant/entityresults/NODE/COORDINATE/ZONE1_set0/"
                "erfblock/res"][()]
            # Cut off last column (z), since it is filled with 1s anyway
            _coords = coord_as_np_array[:, :-1]
            _coords = normalize_coords(_coords)
            if not states:
                states = f["post"]["singlestate"]

            states = list(states)[self.skip_indizes[0]:self.
                                  skip_indizes[1]:self.skip_indizes[2]]

            filling_factors_at_certain_times = [
                f["post"]["singlestate"][state]["entityresults"]["NODE"]
                ["FILLING_FACTOR"]["ZONE1_set1"]["erfblock"]["res"][()]
                for state in states
            ]

            flat_fillings = np.squeeze(filling_factors_at_certain_times)
            instances = []
            for filling, state in zip(flat_fillings, states):
                if self.ignore_useless_states and len(
                        useless_states
                ) > 0 and state == f'state{useless_states[0]:012d}':
                    break
                label = 0
                if int(str(state).replace("state", "0")) in set_of_states:
                    label = 1
                instances.append((create_np_image(target_shape=self.image_size,
                                                  norm_coords=_coords,
                                                  data=filling), label))
            f.close()
            meta_file.close()
            return instances
        except KeyError:
            logger = logging.getLogger()

            logger.warning(f'Warning: {filename}')
            f.close()
            meta_file.close()
            return None
Пример #6
0
    def get_flowfront_sensor_and_flowfront_label(self, filename):
        """
        Load the flow front for the given states or all available states if states is None
        """
        states, fillings, velocities = self.extract_data_from_result_file(
            filename)
        meta_fn = str(filename).replace("RESULT.erfh5", "meta_data.hdf5")
        useless_states, set_of_dryspot_states = self.extract_data_from_meta_file(
            meta_fn)
        if states is None or \
                fillings is None or \
                velocities is None or \
                useless_states is None or \
                set_of_dryspot_states is None:
            return None
        instances = []
        frame_q = deque(maxlen=self.frame_count)
        for i, (velocity, filling,
                state) in enumerate(zip(velocities, fillings, states)):
            if self.ignore_useless_states \
                    and len(useless_states) > 0 \
                    and state == f'state{useless_states[0]:012d}':
                break
            binary_ff_sensor_values = np.ceil(filling[self.indeces_of_sensors])
            if self.use_binary_sensor_only:
                values = binary_ff_sensor_values
            else:
                values = self.add_velocity_factor(binary_ff_sensor_values,
                                                  values, velocity)
            label = create_np_image(target_shape=self.image_size,
                                    norm_coords=self.coords,
                                    data=filling)
            if self.frame_count <= 1:
                instances.append((values, label))
            else:
                frame_q.append(values)
                """ 
                Stack the current frames of the queue so that each frame is in one channel and start 
                using the data just after having enough data in the queue
                """
                if self.frame_count > i + 1:
                    continue
                else:
                    instances.append((np.stack(list(frame_q), axis=1), label))

        return instances
Пример #7
0
    def get_flowfront_to_perm_map(self, filename):
        try:
            per_step = 0.01
            # logger = logging.getLogger(__name__)
            # logger.debug(
            #     "Loading flow front and premeability maps from {}".format(
            #         filename)
            # )
            f = h5py.File(filename, "r")
            perm_map = self._get_fiber_fraction(f)
            perm_map = perm_map.astype(np.float) / 255
            multi_state_pressure = f[
                "/post/multistate/TIMESERIES1/multientityresults/SENSOR"
                "/PRESSURE/ZONE1_set1/erfblock/res"][()]
            m = multi_state_pressure.squeeze()
            states = list(f["post"]["singlestate"])

            fillings = []
            for state in states:
                try:
                    fillings.append(f["post"]["singlestate"][state]
                                    ["entityresults"]["NODE"]["FILLING_FACTOR"]
                                    ["ZONE1_set1"]["erfblock"]["res"][()])
                except KeyError as e:
                    return None
            fillings = np.stack(fillings).squeeze()
            activated_pixels = np.count_nonzero(fillings, axis=1)
            percentage_of_all_sensors = activated_pixels / 28464  # Number individual points
            sequence = np.zeros((100, 143, 111))
            current = 0
            coords = self.get_coords(filename)
            for i, sample in enumerate(percentage_of_all_sensors):
                if sample >= current:
                    sequence[int(round(current *
                                       100)), :, :] = create_np_image(
                                           (143, 111), coords, fillings[i, :])
                    current += per_step
            return [(sequence, np.array(perm_map))]
        except Exception as e:
            return None