def __init__(self, arguments): logger.debug("Initializing %s: (args: %s)", self.__class__.__name__, arguments) self._args = arguments self.batch = list() self._serializer = get_serializer("json") self._pre_process = PostProcess(arguments) self._writer = self._get_writer() self._extractor = self._load_extractor() self._batchsize = self._get_batchsize(self._queue_size) self._model = self._load_model() self._output_indices = {"face": self._model.largest_face_index, "mask": self._model.largest_mask_index} self._predictor = self._model.converter(False) configfile = self._args.configfile if hasattr(self._args, "configfile") else None self._converter = Converter(self.output_size, self.coverage_ratio, self.draw_transparent, self.pre_encode, arguments, configfile=configfile) logger.debug("Initialized %s", self.__class__.__name__)
def build_recent_menu(self): """ Load recent files into menu bar """ logger.debug("Building Recent Files menu") serializer = get_serializer("json") menu_file = os.path.join(self._config.pathcache, ".recent.json") if not os.path.isfile(menu_file) or os.path.getsize(menu_file) == 0: self.clear_recent_files(serializer, menu_file) recent_files = serializer.load(menu_file) logger.debug("Loaded recent files: %s", recent_files) for recent_item in recent_files: filename, command = recent_item # Legacy project files didn't have a command stored command = command if command else "project" logger.debug("processing: ('%s', %s)", filename, command) if command.lower() == "project": load_func = self._config.project.load lbl = command kwargs = dict(filename=filename) else: load_func = self._config.tasks.load lbl = "{} Task".format(command) kwargs = dict(filename=filename, current_tab=False) self.recent_menu.add_command( label="{} ({})".format(filename, lbl.title()), command=lambda kw=kwargs, fn=load_func: fn(**kw)) self.recent_menu.add_separator() self.recent_menu.add_command( label="Clear recent files", underline=0, command=lambda srl=serializer, mnu=menu_file: self. clear_recent_files(srl, mnu)) logger.debug("Built Recent Files menu")
def _load_state_file(self) -> None: """ Load the current state file to :attr:`_state`. """ state_file = os.path.join(self._model_dir, f"{self._model_name}_state.json") logger.debug("Loading State: '%s'", state_file) serializer = get_serializer("json") self._state = serializer.load(state_file) logger.debug("Loaded state: %s", self._state)
def build_recent_menu(self): """ Load recent files into menu bar """ logger.debug("Building Recent Files menu") serializer = get_serializer("json") menu_file = os.path.join(self.config.pathcache, ".recent.json") if not os.path.isfile(menu_file) or os.path.getsize(menu_file) == 0: self.clear_recent_files(serializer, menu_file) recent_files = serializer.load(menu_file) logger.debug("Loaded recent files: %s", recent_files) for recent_item in recent_files: filename, command = recent_item logger.debug("processing: ('%s', %s)", filename, command) lbl_command = command if command else "All" self.recent_menu.add_command( label="{} ({})".format(filename, lbl_command.title()), command=lambda fnm=filename, cmd=command: self.config.load( cmd, fnm)) self.recent_menu.add_separator() self.recent_menu.add_command( label="Clear recent files", underline=0, command=lambda srl=serializer, mnu=menu_file: self. clear_recent_files(srl, mnu)) logger.debug("Built Recent Files menu")
def __init__(self, parent, top_level): logger.debug("Initializing: %s (top_level: %s)", self.__class__.__name__, top_level) self._parent = parent self._popup = top_level self._base_path = os.path.join(PATHCACHE, "presets") self._serializer = get_serializer("json") logger.debug("Initialized: %s", self.__class__.__name__)
def __init__(self, in_queue, queue_size, arguments): logger.debug( "Initializing %s: (args: %s, queue_size: %s, in_queue: %s)", self.__class__.__name__, arguments, queue_size, in_queue) self.batchsize = self.get_batchsize(queue_size) self.args = arguments self.in_queue = in_queue self.out_queue = queue_manager.get_queue("patch") self.serializer = get_serializer("json") self.faces_count = 0 self.verify_output = False if arguments.allow_growth: self.set_tf_allow_growth() self.model = self.load_model() self.output_indices = { "face": self.model.largest_face_index, "mask": self.model.largest_mask_index } self.predictor = self.model.converter(self.args.swap_model) self.queues = dict() self.thread = MultiThread(self.predict_faces, thread_count=1) self.thread.start() logger.debug("Initialized %s: (out_queue: %s)", self.__class__.__name__, self.out_queue)
def _parse_outputs(self, event): """ Parse the outputs from the stored model structure for mapping loss names to model outputs. Loss names are added to :attr:`_loss_labels` Parameters ---------- event: :class:`tensorflow.core.util.event_pb2` The event data containing the keras model structure to be parsed """ serializer = get_serializer("json") struct = event.summary.value[0].tensor.string_val[0] outputs = np.array( serializer.unmarshal(struct)["config"]["output_layers"]) logger.debug("Obtained model outputs: %s, shape: %s", outputs, outputs.shape) if outputs.ndim == 2: # Insert extra dimension for non learn mask models outputs = np.expand_dims(outputs, axis=1) logger.debug( "Expanded dimensions for non-learn_mask model. outputs: %s, shape: %s", outputs, outputs.shape) for side_outputs, side in zip(outputs, ("a", "b")): logger.debug("side: '%s', outputs: '%s'", side, side_outputs) for idx in range(len(side_outputs)): # First output is always face. Subsequent outputs are masks loss_name = f"face_{side}" if idx == 0 else f"mask_{side}" loss_name = loss_name if idx < 2 else f"{loss_name}_{idx}" if loss_name not in self._loss_labels: logger.debug("Adding loss name: '%s'", loss_name) self._loss_labels.append(loss_name) logger.debug("Collated loss labels: %s", self._loss_labels)
def __init__(self, default_font, root, path_cache): self._root = root self._font = default_font default = os.path.join(path_cache, "themes", "default.json") self._user_theme = get_serializer("json").load(default) self._style = ttk.Style() self._widgets = _Widgets(self._style) self._set_styles()
def __init__(self, folder, filename="alignments"): logger.debug("Initializing %s: (folder: '%s', filename: '%s')", self.__class__.__name__, folder, filename) self._serializer = get_serializer("compressed") self._file = self._get_location(folder, filename) self._data = self._load() self._update_legacy() self._hashes_to_frame = dict() logger.debug("Initialized %s", self.__class__.__name__)
def _get_session_names(self): """ Get the existing session names from a state file. """ serializer = get_serializer("json") state_file = os.path.join(self.model_dir, "{}_state.{}".format(self.model_name, serializer.file_extension)) state = serializer.load(state_file) session_names = ["session_{}".format(key) for key in state["sessions"].keys()] logger.debug("Session to restore: %s", session_names) return session_names
def build_recent_menu(self): """ Load recent files into menu bar """ logger.debug("Building Recent Files menu") serializer = get_serializer("json") menu_file = os.path.join(self._config.pathcache, ".recent.json") if not os.path.isfile(menu_file) or os.path.getsize(menu_file) == 0: self.clear_recent_files(serializer, menu_file) try: recent_files = serializer.load(menu_file) except FaceswapError as err: if "Error unserializing data for type" in str(err): # Some reports of corruption breaking menus logger.warning( "There was an error opening the recent files list so it has been " "reset.") self.clear_recent_files(serializer, menu_file) recent_files = [] logger.debug("Loaded recent files: %s", recent_files) removed_files = [] for recent_item in recent_files: filename, command = recent_item if not os.path.isfile(filename): logger.debug("File does not exist. Flagging for removal: '%s'", filename) removed_files.append(recent_item) continue # Legacy project files didn't have a command stored command = command if command else "project" logger.debug("processing: ('%s', %s)", filename, command) if command.lower() == "project": load_func = self._config.project.load lbl = command kwargs = dict(filename=filename) else: load_func = self._config.tasks.load lbl = "{} Task".format(command) kwargs = dict(filename=filename, current_tab=False) self.recent_menu.add_command( label="{} ({})".format(filename, lbl.title()), command=lambda kw=kwargs, fn=load_func: fn(**kw)) if removed_files: for recent_item in removed_files: logger.debug("Removing from recent files: `%s`", recent_item[0]) recent_files.remove(recent_item) serializer.save(menu_file, recent_files) self.recent_menu.add_separator() self.recent_menu.add_command( label="Clear recent files", underline=0, command=lambda srl=serializer, mnu=menu_file: self. clear_recent_files(srl, mnu)) logger.debug("Built Recent Files menu")
def __init__(self, model_dir=None, model_name=None): logger.debug("Initializing %s: (model_dir: %s, model_name: %s)", self.__class__.__name__, model_dir, model_name) self.serializer = get_serializer("json") self.state = None self.modeldir = model_dir # Set and reset by wrapper for training sessions self.modelname = model_name # Set and reset by wrapper for training sessions self.tb_logs = None self.initialized = False self.session_id = None # Set to specific session_id or current training session self.summary = SessionsSummary(self) logger.debug("Initialized %s", self.__class__.__name__)
def __init__(self, config, file_handler=None): # NB file_handler has to be passed in to avoid circular imports logger.debug("Initializing: %s: (config: %s, file_handler: %s)", self.__class__.__name__, config, file_handler) self._serializer = get_serializer("json") self._config = config self._options = None self._file_handler = file_handler self._filename = None self._saved_tasks = None self._modified = False logger.debug("Initialized: %s", self.__class__.__name__)
def __init__(self, root, cli_opts, scaling_factor, pathcache, statusbar, session): logger.debug( "Initializing %s: (root %s, cli_opts: %s, scaling_factor: %s, pathcache: %s, " "statusbar: %s, session: %s)", self.__class__.__name__, root, cli_opts, scaling_factor, pathcache, statusbar, session) self.root = root self.cli_opts = cli_opts self.scaling_factor = scaling_factor self.pathcache = pathcache self.statusbar = statusbar self.serializer = get_serializer("json") self.tk_vars = self.set_tk_vars() self.user_config = UserConfig(None) self.user_config_dict = self.user_config.config_dict self.command_notebook = None # set in command.py self.session = session logger.debug("Initialized %s", self.__class__.__name__)
def __init__(self, in_queue, queue_size, arguments): logger.debug( "Initializing %s: (args: %s, queue_size: %s, in_queue: %s)", self.__class__.__name__, arguments, queue_size, in_queue) self._batchsize = self._get_batchsize(queue_size) self._args = arguments self._in_queue = in_queue self._out_queue = queue_manager.get_queue("patch") self._serializer = get_serializer("json") self._faces_count = 0 self._verify_output = False self._model = self._load_model() self._sizes = self._get_io_sizes() self._coverage_ratio = self._model.coverage_ratio self._thread = self._launch_predictor() logger.debug("Initialized %s: (out_queue: %s)", self.__class__.__name__, self._out_queue)
def __init__(self, model_dir: str, model_name: str, config_changeable_items: dict, no_logs: bool) -> None: logger.debug( "Initializing %s: (model_dir: '%s', model_name: '%s', " "config_changeable_items: '%s', no_logs: %s", self.__class__.__name__, model_dir, model_name, config_changeable_items, no_logs) self._serializer = get_serializer("json") filename = f"{model_name}_state.{self._serializer.file_extension}" self._filename = os.path.join(model_dir, filename) self._name = model_name self._iterations = 0 self._sessions: Dict[int, dict] = {} self._lowest_avg_loss: Dict[str, float] = {} self._config = {} self._load(config_changeable_items) self._session_id = self._new_session_id() self._create_new_session(no_logs, config_changeable_items) logger.debug("Initialized %s:", self.__class__.__name__)
def _parse_outputs(self, event): """ Parse the outputs from the stored model structure for mapping loss names to model outputs. Loss names are added to :attr:`_loss_labels` Notes ----- The master model does not actually contain the specified output name, so we dig into the sub-model to obtain the name of the output layers Parameters ---------- event: :class:`tensorflow.core.util.event_pb2` The event data containing the keras model structure to be parsed """ serializer = get_serializer("json") struct = event.summary.value[0].tensor.string_val[0] config = serializer.unmarshal(struct)["config"] model_outputs = self._get_outputs(config) for side_outputs, side in zip(model_outputs, ("a", "b")): logger.debug("side: '%s', outputs: '%s'", side, side_outputs) layer_name = side_outputs[0][0] output_config = next(layer for layer in config["layers"] if layer["name"] == layer_name)["config"] layer_outputs = self._get_outputs(output_config) for output in layer_outputs: # Drill into sub-model to get the actual output names loss_name = output[0][0] if loss_name[-2:] not in ( "_a", "_b"): # Rename losses to reflect the side output new_name = f"{loss_name.replace('_both', '')}_{side}" logger.debug("Renaming loss output from '%s' to '%s'", loss_name, new_name) loss_name = new_name if loss_name not in self._loss_labels: logger.debug("Adding loss name: '%s'", loss_name) self._loss_labels.append(loss_name) logger.debug("Collated loss labels: %s", self._loss_labels)
def __init__(self, model_dir, model_name, config_changeable_items, no_logs, pingpong, training_image_size): logger.debug("Initializing %s: (model_dir: '%s', model_name: '%s', " "config_changeable_items: '%s', no_logs: %s, pingpong: %s, " "training_image_size: '%s'", self.__class__.__name__, model_dir, model_name, config_changeable_items, no_logs, pingpong, training_image_size) self.serializer = get_serializer("json") filename = "{}_state.{}".format(model_name, self.serializer.file_extension) self.filename = str(model_dir / filename) self.name = model_name self.iterations = 0 self.session_iterations = 0 self.training_size = training_image_size self.sessions = dict() self.lowest_avg_loss = dict() self.inputs = dict() self.config = dict() self.load(config_changeable_items) self.session_id = self.new_session_id() self.create_new_session(no_logs, pingpong, config_changeable_items) logger.debug("Initialized %s:", self.__class__.__name__)
def get_serializer(filename, serializer): """ Set the serializer to be used for loading and saving alignments If a filename with a valid extension is passed in this will be used as the serializer, otherwise the specified serializer will be used """ logger.debug("Getting serializer: (filename: '%s', serializer: '%s')", filename, serializer) extension = os.path.splitext(filename)[1] if extension in (".json", ".p", ".yaml", ".yml"): logger.debug("Serializer set from filename extension: '%s'", extension) retval = get_serializer_from_filename(filename) elif serializer not in ("json", "pickle", "yaml"): raise ValueError("Error: {} is not a valid serializer. Use " "'json', 'pickle' or 'yaml'") else: logger.debug("Serializer set from argument: '%s'", serializer) retval = get_serializer(serializer) logger.verbose("Using '%s' serializer for alignments", retval.file_extension) return retval
def __init__(self, in_queue, queue_size, arguments): logger.debug( "Initializing %s: (args: %s, queue_size: %s, in_queue: %s)", self.__class__.__name__, arguments, queue_size, in_queue) self._batchsize = self._get_batchsize(queue_size) self._args = arguments self._in_queue = in_queue self._out_queue = queue_manager.get_queue("patch") self._serializer = get_serializer("json") self._faces_count = 0 self._verify_output = False if arguments.allow_growth: self._set_tf_allow_growth() self._model = self._load_model() self._output_indices = { "face": self._model.largest_face_index, "mask": self._model.largest_mask_index } self._predictor = self._model.converter(self._args.swap_model) self._thread = self._launch_predictor() logger.debug("Initialized %s: (out_queue: %s)", self.__class__.__name__, self._out_queue)