def load_model(self, file):
        self.clear_model()
        try:
            filename, file_extension = os.path.splitext(file)
            print(filename, file_extension)
            self.model = tf.keras.models.load_model(filename)
        except Exception as e:
            traceback.print_exc()
            return "Error loading model:\n{}".format(e)

        try:
            ml_info_file = os.path.join(filename, "ml_info.npy")
            info = np.load(ml_info_file, allow_pickle=True)
            self.labels_dict = info.item()["labels_dict"]
            model_dimensions = info.item()["model_dimensions"]
            self.label_num = model_dimensions["output"]
            feature_list = info.item()["feature_list"]
            sensor_config = configs.load(info.item()["sensor_config"])
            frame_settings = info.item()["frame_settings"]
            time_distributed = model_dimensions.get("time_distributed", 1)
            feature_dimension = model_dimensions.get(
                "feature_dimension", model_dimensions["input"][:-1])
        except Exception as e:
            error_text = self.error_to_text(e)
            message = "Error in load model:\n{}".format(error_text)
            return {"loaded": False}, message
        else:
            try:
                self.model_data.nr_of_training_maps = info.item(
                )["nr_of_training_maps"]
            except KeyError:
                self.model_data.nr_of_training_maps = 0
            gui_layer_conf = layer_definitions.get_layers()
            layer_list = []
            for l in self.model.layers:
                l_conf = l.get_config()
                l_name = l_conf["name"].rsplit("_", 1)[0]
                if "time_distributed" in l_name:
                    l_conf = l_conf["layer"]["config"]
                    l_name = l_name = l_conf["name"].rsplit("_", 1)[0]
                if l_name in gui_layer_conf:
                    g_conf = gui_layer_conf[l_name]["params"]
                    layer = {
                        "name": l_name,
                        "class": gui_layer_conf[l_name]["class_str"],
                        "params": {},
                    }
                    if g_conf is None:
                        layer["params"] = None
                    else:
                        for p in l_conf:
                            if p in g_conf:
                                if isinstance(l_conf[p], tuple):
                                    layer["params"][p] = list(l_conf[p])
                                else:
                                    layer["params"][p] = l_conf[p]
                    layer_list.append(layer)
                else:
                    if l_name != "input":
                        print(
                            "Keras layer {} not found in layer_definitions.py!"
                            .format(l_name))

        labels = self.get_label_list()
        label_categories = None
        if self.training_data["loaded"]:
            try:
                data_labels = self.label_assignment(
                    self.training_data["raw_labels"], self.labels_dict)
                label_categories = to_categorical(data_labels, self.label_num)
            except Exception as e:
                print("Loaded data incompatible with model data!\n", e)
                self.trainning = {"loaded": False}
                label_categories = None

        model_data = {
            "loaded": True,
            "y_labels": label_categories,
            "label_list": labels,
            "feature_list": feature_list,
            "sensor_config": sensor_config,
            "frame_settings": frame_settings,
            "layer_list": layer_list,  # GUI format layer list
            "keras_layer_info": self.model.layers,  # Keras format layer list
            "input": model_dimensions["input"],
            "output": model_dimensions["output"],
            "time_distributed": time_distributed,
            "feature_dimension": feature_dimension,
        }

        self.set_model_data(model_data)
        self.count_variables()

        message = "Loaded model with:\n"
        message += "input shape    :{}\n".format(self.model_data.input)
        if self.model_data.time_distributed > 1:
            message += "time steps     :{}\n".format(
                self.model_data.time_distributed)
        message += "output shape   :{}\n".format(self.model_data.output)
        message += "nr of features :{}\n".format(len(feature_list))
        message += "labels         :{}\n".format(labels)
        message += "Trained with {} features".format(
            self.model_data.get("nr_of_training_maps", "N/A"))

        return self.model_data, message
    def init_model(self):
        input_dim = self.model_data.feature_dimension
        output_dim = self.model_data.output

        if not isinstance(input_dim, list):
            input_dim = list(input_dim)

        print("\nInitiating model with {:d}x{:d} inputs"
              " and {:d} outputs".format(*input_dim, output_dim))

        layer_list = self.model_data.layer_list
        if layer_list is None:
            print("No layers defined!")
            return {"loaded": False, "model_message": "No Layers defined"}

        nr_layers = len(layer_list)
        layer_callbacks = layer_definitions.get_layers()

        lstm_mode = False
        time_series = False
        steps = self.model_data.time_distributed
        print("Building model with {} layers...".format(nr_layers))
        if steps > 1:
            input_dim[-1] = input_dim[-1] - steps + 1
            input_dim = [steps] + input_dim
            lstm_mode = True
            time_series = True
            print(
                "Building TimeDistributed model with {} steps!".format(steps))

        # Add single channel dimension
        if input_dim[-1] != 1:
            input_dim = input_dim + [1]

        self.model_data.input = input_dim

        inputs = Input(shape=input_dim)

        x = None
        nr_layers = len(layer_list)
        for idx, layer in enumerate(layer_list):
            if not layer.get("is_active", True):
                continue
            try:
                cb = layer_callbacks[layer['name']]['class']
            except KeyError:
                print("Layer {} not found in layer_definitions.py!".format(
                    layer['name']))

            # Wrap layers in TimeDistributed until first LSTM layer
            if layer['name'] == "lstm":
                lstm_mode = False
                time_series = False
                layer["params"].pop("steps")
            if lstm_mode:
                if layer['class'] not in not_time_distributed:
                    time_series = True
                else:
                    time_series = False

            try:
                options = {}
                if layer["params"] is not None:
                    for entry in layer["params"]:
                        opt = layer["params"][entry]
                        if isinstance(opt, list):
                            options[entry] = tuple(opt)
                        else:
                            options[entry] = opt
                print("{}: Adding {} with\n{}".format(idx + 1, layer['name'],
                                                      options))
                if idx == 0 and nr_layers > 1:
                    if time_series:
                        x = TimeDistributed(cb(**options))(inputs)
                    else:
                        x = cb(**options)(inputs)
                elif idx > 0 and idx < nr_layers - 1:
                    if time_series:
                        x = TimeDistributed(cb(**options))(x)
                    else:
                        x = cb(**options)(x)
                else:
                    options.pop("units", None)
                    predictions = cb(output_dim, **options)(x)
            except Exception as e:
                traceback.print_exc()
                return {
                    "loaded":
                    False,
                    "model_message":
                    "\nLayer nr. {} failed."
                    " Error adding {}\n{}".format(idx + 1, layer['name'], e)
                }

            if layer["name"] == "lstm":
                layer["params"]["steps"] = steps

        self.model = Model(inputs=inputs, outputs=predictions)

        if self.eager_execution:
            self.model.call = tf.function(self.model.call,
                                          experimental_relax_shapes=True)

        self.set_optimizer("Adam")

        self.count_variables()

        return {"loaded": True, "model_message": ""}