Beispiel #1
0
    def load_train_data(self, files, layer_list=None, model_exists=False, load_test_data=False):
        err_tip = "\nTry clearing training before loading more data!"
        data = []
        stored_configs = []
        feature_lists = []
        frame_settings_list = []
        feature_map_dims = []
        files_loaded = []
        files_failed = []
        model_exists = self.model_data.loaded
        info = {
            "success": False,
            "message": "",
            "model_initialized": False,
        }
        for file in files:
            try:
                file_data = np.load(file, allow_pickle=True).item()
                conf = configs.load(file_data["sensor_config"])
                stored_configs.append(conf)
                feature_lists.append(file_data["feature_list"])
                frame_settings_list.append(file_data["frame_settings"])
                feature_map_dims.append(
                    file_data["frame_data"]["ml_frame_data"]["frame_list"][0]["feature_map"].shape
                )
                data.append(file_data)
            except Exception:
                print("File error in:\n", file)
                traceback.print_exc()
                files_failed.append(file)
            else:
                files_loaded.append(file)

        if not len(files_loaded):
            info["message"] = "No valid files found"
            return {"info": info}

        data_type = "training"
        if load_test_data:
            if self.labels_dict is None:
                message = "Load train data first"
                return {"info": info}
            data_type = "test"

        transpose = False
        if feature_map_dims[0][0] == 1 or feature_map_dims[0][1] == 1:
            model_dimension = 1
            if feature_map_dims[0][0] == 1:
                feature_map_dims[0] = feature_map_dims[0][::-1]
                transpose = True
        else:
            model_dimension = 2

        if data_type == "training":
            if not model_exists:
                self.model_data.feature_dimension = feature_map_dims[0]
        else:
            if not self.training_data["loaded"]:
                info["message"] = "Load training data first!"
                return {"info": info}

        for i in range(1, len(files_loaded)):
            # TODO: Check that files are compatible
            map_dims = self.model_data.feature_dimension
            current_dim = feature_map_dims[i]
            if transpose:
                current_dim = current_dim[::-1]
            if map_dims != current_dim:
                message = "Input dimenions not matching:\nModel {} - Data {}".format(
                    map_dims,
                    feature_map_dims[i])
                message += err_tip
                info["message"] = message
                print(files_loaded[i])
                return {"info": info}

        if data_type == "training":
            if not model_exists:
                if layer_list is not None:
                    self.set_model_layers(layer_list)

        raw_labels = []
        feature_maps = []
        frame_info = data[0]["frame_data"]["ml_frame_data"]["frame_info"]
        for data_index, d in enumerate(data):
            fdata = d["frame_data"]["ml_frame_data"]["frame_list"]
            if self.model_data.time_distributed:
                time_series = frame_info.get("time_series", 1)
                if self.model_data.time_distributed != time_series:
                    print("Inconsistent time series values found:")
                    print("Model: {}".format(self.model_data.time_distributed))
                    print("Data : {}".format(time_series))
                    frame_info["time_series"] = self.model_data.time_distributed
            for subdata_index, frame in enumerate(fdata):
                feature_map = frame["feature_map"]
                if self.model_data.time_distributed > 1:
                    feature_map = ml_helper.convert_time_series(feature_map, frame_info)
                if transpose:
                    feature_map = feature_map.T
                feature_maps.append(feature_map)
                raw_labels.append(frame["label"])

        feature_map_data = np.stack(feature_maps)
        if model_dimension == 2:
            feature_map_data = np.expand_dims(feature_map_data, -1)
        self.model_data.nr_of_training_maps = feature_map_data.shape[0]

        if data_type == "training":
            if not model_exists or not self.label_num:
                data_labels, self.label_num, self.labels_dict = self.label_conversion(raw_labels)
                self.model_data.output = self.label_num
            else:
                try:
                    data_labels = self.label_assignment(raw_labels, self.labels_dict)
                except Exception:
                    traceback.print_exc()
                    print("New labels found, clearing weights!")
                    model_exists = False
                    data_labels, self.label_num, self.labels_dict = self.label_conversion(
                        raw_labels
                    )
                    self.model_data.output = self.label_num
                output = self.model_data.output
                if self.label_num != output:
                    message = "Output dimensions not matching:\nModel {} - Data {}".format(
                        output,
                        self.label_num)
                    info["message"] = message + err_tip
                    return {"info": info}
        else:
            data_labels = self.label_assignment(raw_labels, self.labels_dict)

        label_categories = self.convert_to_categorical(data_labels, self.label_num)

        if data_type == "training":
            if not model_exists:
                if layer_list is not None:
                    self.set_model_layers(layer_list)
                model_status = self.clear_model(reinit=True)
            else:
                model_status = {"loaded": True, "model_message": ""}

        message = "Loaded {} data with shape {}\n".format(data_type, feature_map_data.shape)
        message += "Found labels:\n"
        for label in self.labels_dict:
            message += label + "\n"

        if files_failed:
            message += "Failed to load some files:\n"
            for f in files_failed:
                message += f + "\n"

        loaded_data = self.training_data
        if data_type == "training":
            self.training_data = {
                "loaded": True,
                "x_data": feature_map_data,
                "raw_labels": raw_labels,
            }
            model_data = {
                "loaded": model_status["loaded"],
                "y_labels": label_categories,
                "label_list": self.get_label_list(),
                "feature_list": feature_lists[0],
                "frame_settings": frame_settings_list[0],
                "sensor_config": stored_configs[0],
            }

            for key in model_data:
                self.model_data[key] = model_data[key]
        else:
            self.test_data = {
                "loaded": True,
                "test_data": feature_map_data,
                "raw_labels": raw_labels,
            }
            loaded_data = self.test_data

        info = {
            "success": True,
            "message": message,
            "model_initialized": model_status["loaded"],
            "model_status": model_status["model_message"],
        }

        if model_status["loaded"] is True:
            counted = self.count_variables()
            self.model_data.trainable = counted["trainable"]
            self.model_data.non_trainable = counted["non_trainable"]
            self.model_data.keras_layer_info = self.model.layers
        else:
            if not isinstance(model_status, str):
                info["model_status"] = "Model not initialized"

        return {"info": info, "model_data": self.model_data, "loaded_data": loaded_data}
Beispiel #2
0
    def load_model(self, file):
        try:
            self.clear_model()
            info = np.load(file, allow_pickle=True)
            self.model = info.item()["model"]
            self.labels_dict = info.item()["labels_dict"]
            model_dimensions = info.item()["model_dimensions"]
            self.label_num = model_dimensions["output"]
            feature_list = info.item()["feature_list"]
            sensor_config = configs.load(info.item()["sensor_config"])
            frame_settings = info.item()["frame_settings"]
            time_distributed = model_dimensions.get("time_distributed", 1)
            feature_dimension = model_dimensions.get(
                "feature_dimension", model_dimensions["input"][:-1])
            self.tf_session = K.get_session()
            self.tf_graph = tf.compat.v1.get_default_graph()
            with self.tf_session.as_default():
                with self.tf_graph.as_default():
                    self.model._make_predict_function()
        except Exception as e:
            error_text = self.error_to_text(e)
            message = "Error in load model:\n{}".format(error_text)
            return {"loaded": False}, message
        else:
            try:
                self.model_data.nr_of_training_maps = info.item(
                )["nr_of_training_maps"]
            except KeyError:
                self.model_data.nr_of_training_maps = 0
            gui_layer_conf = layer_definitions.get_layers()
            layer_list = []
            for l in self.model.layers:
                l_conf = l.get_config()
                l_name = l_conf["name"].rsplit("_", 1)[0]
                if "time_distributed" in l_name:
                    l_conf = l_conf["layer"]["config"]
                    l_name = l_name = l_conf["name"].rsplit("_", 1)[0]
                if l_name in gui_layer_conf:
                    g_conf = gui_layer_conf[l_name]["params"]
                    layer = {
                        "name": l_name,
                        "class": gui_layer_conf[l_name]["class_str"],
                        "params": {},
                    }
                    if g_conf is None:
                        layer["params"] = None
                    else:
                        for p in l_conf:
                            if p in g_conf:
                                if isinstance(l_conf[p], tuple):
                                    layer["params"][p] = list(l_conf[p])
                                else:
                                    layer["params"][p] = l_conf[p]
                    layer_list.append(layer)
                else:
                    if l_name != "input":
                        print(
                            "Keras layer {} not found in layer_definitions.py!"
                            .format(l_name))

        counted = self.count_variables()

        labels = self.get_label_list()
        label_categories = None
        if self.training_data["loaded"]:
            try:
                data_labels = self.label_assignment(
                    self.training_data["raw_labels"], self.labels_dict)
                label_categories = to_categorical(data_labels, self.label_num)
            except Exception as e:
                print("Loaded data incompatible with model data!\n", e)
                self.trainning = {"loaded": False}
                label_categories = None

        model_data = {
            "loaded": True,
            "y_labels": label_categories,
            "label_list": labels,
            "feature_list": feature_list,
            "sensor_config": sensor_config,
            "frame_settings": frame_settings,
            "layer_list": layer_list,  # GUI format layer list
            "keras_layer_info": self.model.layers,  # Keras format layer list
            "trainable": counted["trainable"],
            "non_trainable": counted["non_trainable"],
            "input": model_dimensions["input"],
            "output": model_dimensions["output"],
            "time_distributed": time_distributed,
            "feature_dimension": feature_dimension,
        }

        self.set_model_data(model_data)

        message = "Loaded model with:\n"
        message += "input shape    :{}\n".format(self.model_data.input)
        if self.model_data.time_distributed > 1:
            message += "time steps     :{}\n".format(
                self.model_data.time_distributed)
        message += "output shape   :{}\n".format(self.model_data.output)
        message += "nr of features :{}\n".format(len(feature_list))
        message += "labels         :{}\n".format(labels)
        message += "Trained with {} features".format(
            self.model_data.get("nr_of_training_maps", "N/A"))

        return self.model_data, message
 def sensor_config(self):
     return configs.load(self.sensor_config_dump, self.mode)
    def load_train_data(self,
                        files,
                        layer_list=None,
                        model_exists=False,
                        load_test_data=False):
        err_tip = "\nTry clearing training before loading more data!"
        data = []
        stored_configs = []
        feature_lists = []
        frame_settings_list = []
        feature_map_dims = []
        files_loaded = 0
        files_failed = []
        info = {
            "success": False,
            "message": "",
            "model_initialized": False,
        }
        for file in files:
            try:
                file_data = np.load(file, allow_pickle=True).item()
                conf = configs.load(file_data["sensor_config"])
                stored_configs.append(conf)
                feature_lists.append(file_data["feature_list"])
                frame_settings_list.append(file_data["frame_settings"])
                feature_map_dims.append(
                    file_data["frame_data"]["ml_frame_data"]["frame_list"][0]
                    ["feature_map"].shape)
                data.append(file_data)
            except Exception:
                print("File error in:\n", file)
                traceback.print_exc()
                files_failed.append(file)
            else:
                files_loaded += 1

        if not files_loaded:
            info["message"] = "No valid files found"
            return {"info": info}

        data_type = "training"
        if load_test_data:
            if self.labels_dict is None:
                message = "Load train data first"
                return {"info": info}
            data_type = "test"

        if feature_map_dims[0][1] == 1:
            model_dimension = 1
        else:
            model_dimension = 2

        if data_type == "training":
            if model_exists:
                self.model_data.input = self.model.input_shape[1:-1]
                self.model_data.output = self.model.output_shape[-1]
            else:
                self.model_data.input = feature_map_dims[0]
        else:
            if not self.training_data["loaded"]:
                info["message"] = "Load training data first!"
                return {"info": info}

        for i in range(1, files_loaded):
            # TODO: Check that files are compatible
            map_dims = self.model_data.input
            if map_dims != feature_map_dims[i]:
                message = "Input dimenions not matching:\nModel {} - Data {}".format(
                    map_dims, feature_map_dims[i])
                message += err_tip
                info["message"] = message
                return {"info": info}

        raw_labels = []
        feature_maps = []
        for d in data:
            fdata = d["frame_data"]["ml_frame_data"]["frame_list"]
            for data_idx in fdata:
                feature_maps.append(data_idx["feature_map"])
                raw_labels.append(data_idx["label"])

        feature_map_data = np.stack(feature_maps)
        if model_dimension == 2:
            feature_map_data = np.expand_dims(feature_map_data,
                                              model_dimension + 1)
        self.model_data.nr_of_training_maps = feature_map_data.shape[0]

        if data_type == "training":
            if not model_exists or not self.label_num:
                data_labels, self.label_num, self.labels_dict = self.label_conversion(
                    raw_labels)
                self.model_data.output = self.label_num
            else:
                data_labels = self.label_assignment(raw_labels,
                                                    self.labels_dict)
                output = self.model_data.output
                if self.label_num != output:
                    message = "Output dimensions not matching:\nModel {} - Data {}".format(
                        output, self.label_num)
                    info["message"] = message + err_tip
                    return {"info": info}
        else:
            data_labels = self.label_assignment(raw_labels, self.labels_dict)

        label_categories = to_categorical(data_labels, self.label_num)

        if data_type == "training":
            self.model_data.input = feature_map_data.shape[1:]
            if not model_exists:
                self.model_data.layer_list = layer_list
                model_status = self.clear_model(reinit=True)
            else:
                model_status = {"loaded": True, "model_message": ""}

        message = "Loaded {} data with shape {}\n".format(
            data_type, feature_map_data.shape)
        message += "Found labels:\n"
        for label in self.labels_dict:
            message += label + "\n"

        if files_failed:
            message += "Failed to load some files:\n"
            for f in files_failed:
                message += f + "\n"

        loaded_data = self.training_data
        if data_type == "training":
            self.training_data = {
                "loaded": True,
                "x_data": feature_map_data,
                "raw_labels": raw_labels,
            }
            model_data = {
                "loaded": model_status["loaded"],
                "y_labels": label_categories,
                "label_list": self.get_label_list(),
                "feature_list": feature_lists[0],
                "frame_settings": frame_settings_list[0],
                "sensor_config": stored_configs[0],
            }

            for key in model_data:
                self.model_data[key] = model_data[key]
        else:
            self.test_data = {
                "loaded": True,
                "test_data": feature_map_data,
                "raw_labels": raw_labels,
            }
            loaded_data = self.test_data

        info = {
            "success": True,
            "message": message,
            "model_initialized": model_status["loaded"],
            "model_status": model_status["model_message"],
        }

        if model_status["loaded"] is True:
            counted = self.count_variables()
            self.model_data.trainable = counted["trainable"]
            self.model_data.non_trainable = counted["non_trainable"]
            self.model_data.keras_layer_info = self.model.layers
            self.model_data.layer_list = layer_list
        else:
            if not isinstance(model_status, str):
                info["model_status"] = "Model not initialized"

        return {
            "info": info,
            "model_data": self.model_data,
            "loaded_data": loaded_data
        }