def __init__(self, context): self.context = context self.min_disk_demon = None self.merge_thread = Merge(self.context) self.residual_thread = Residual(self.context) self.waifu2x = self._get_waifu2x_class(self.context.waifu2x_type) self.compress_frames_thread = CompressFrames(self.context) self.dandere2x_cpp_thread = Dandere2xCppWrapper(self.context) self.status_thread = Status(context) # session specific self.resume_session = False self.first_frame = 1 if self.context.config_yaml['resume_settings']['resume_session']: print("is resume session") self.resume_session = True self.first_frame = int(self.context.config_yaml['resume_settings'] ['signal_merged_count']) else: print("is not resume session") # Threading Specific self.alive = True self.cancel_token = CancellationToken() self._stopevent = threading.Event() threading.Thread.__init__(self, name="dandere2x_thread")
def difference_only(self): self.pre_setup() dandere2xcpp_thread = Dandere2xCppWrapper( self.workspace, self.dandere2x_cpp_dir, self.frame_count, self.block_size, self.tolerance, self.mse_max, self.mse_min, self.step_size, resume=False, extension_type=self.extension_type) difference_thread = threading.Thread( target=difference_loop, args=(self.workspace, self.differences_dir, self.inversion_data_dir, self.pframe_data_dir, self.input_frames_dir, 1, self.frame_count, self.block_size, self.extension_type)) self.logger.info("Starting Threaded Processes..") difference_thread.start() dandere2xcpp_thread.start() dandere2xcpp_thread.join() difference_thread.join()
def run(self): self.log.info("Thread Started") self._pre_processing() # Assigning classes now that context is properly set. self.min_disk_demon = MinDiskUsage(self.context) self.status_thread = Status(self.context) self.dandere2x_cpp_thread = Dandere2xCppWrapper(self.context) self.waifu2x = self._get_waifu2x_class(self.context.waifu2x_type) self.residual_thread = Residual(self.context) self.merge_thread = Merge(self.context) self.log.info("Dandere2x Threads Set.. going live with the following context file.") self.context.log_all_variables() self.__extract_frames() self.__upscale_first_frame() self.min_disk_demon.start() self.dandere2x_cpp_thread.start() self.merge_thread.start() self.residual_thread.start() self.waifu2x.start() self.status_thread.start() self.alive = True
def __init__(self, context: Context): # Administrative Stuff import sys sys.excepthook = show_exception_and_exit # set a custom except hook to prevent window from closing. threading.Thread.__init__(self, name="Dandere2x Thread") self.color_log_format = "%(log_color)s%(asctime)-8s%(reset)s %(log_color)s%(levelname)-8s%(reset)s %(log_color)s%(filename)-8s%(reset)s %(log_color)s%(funcName)-8s%(reset)s: %(log_color)s%(message)s" self.file_log_format = "%(asctime)s %(levelname)s %(filename)s %(funcName)s %(message)s" self.set_console_logger() self.fh = None # File Handler for log # Class Specific self.context = context self.alive = False self.log = logging.getLogger() # Class Specific Future Declarations """ These are re-set later, but due to lack of python member-variable declarations, they're initially set here so the IDE can do autocomplete corrections / predictions. It's important they're correctly re-assigned when self.run() is called. """ self.min_disk_demon = MinDiskUsage(self.context) self.status_thread = Status(self.context) self.dandere2x_cpp_thread = Dandere2xCppWrapper(self.context) self.waifu2x = self._get_waifu2x_class(self.context.waifu2x_type) self.residual_thread = Residual(self.context) self.merge_thread = Merge(self.context)
def resume_concurrent(self): self.context.update_frame_count( ) # we need to count how many outputs there are after ffmpeg extracted stuff self.context.set_logger() if self.context.realtime_encoding_delete_files: print("CANNOT RESUME RUN ON DELETE FILES TYPED SESSION") sys.exit(1) if self.context.user_trim_video: trimed_video = os.path.join(self.context.workspace, "trimmed.mkv") self.context.input_file = trimed_video # get whatever waifu2x class we're using waifu2x = self.get_waifu2x_class(self.context.waifu2x_type) dandere2xcpp_thread = Dandere2xCppWrapper(self.context, resume=True) merge_thread = threading.Thread(target=merge_loop_resume, args=(self.context, )) difference_thread = threading.Thread(target=difference_loop_resume, args=(self.context, )) status_thread = threading.Thread(target=print_status, args=(self.context, )) compress_frames_thread = threading.Thread(target=compress_frames, args=(self.context, )) output_file = self.context.workspace + 'output.mkv' realtime_encode_thread = threading.Thread(target=run_realtime_encoding, args=(self.context, output_file)) self.context.logger.info("Starting Threaded Processes..") waifu2x.start() merge_thread.start() difference_thread.start() dandere2xcpp_thread.start() status_thread.start() compress_frames_thread.start() # these can obviously be combined but leaving them separate for readiability if self.context.realtime_encoding == 1: realtime_encode_thread.start() if self.context.realtime_encoding == 1: realtime_encode_thread.join() compress_frames_thread.join() merge_thread.join() dandere2xcpp_thread.join() difference_thread.join() waifu2x.join() status_thread.join() self.context.logger.info("Threaded Processes Finished successfully")
def resume_concurrent(self): if self.waifu2x_type == "caffe": waifu2x = Waifu2xCaffe(self.workspace, self.frame_count, self.waifu2x_caffe_cui_dir, self.model_dir, self.differences_dir, self.upscaled_dir, self.process_type, self.noise_level, self.scale_factor) elif self.waifu2x_type == "conv": waifu2x = Waifu2xConv(self.workspace, self.frame_count, self.waifu2x_conv_dir, self.waifu2x_conv_dir_dir, self.differences_dir, self.upscaled_dir, self.noise_level, self.scale_factor) dandere2xcpp_thread = Dandere2xCppWrapper( self.workspace, self.dandere2x_cpp_dir, self.frame_count, self.block_size, self.tolerance, self.mse_max, self.mse_min, self.step_size, resume=True, extension_type=self.extension_type) merge_thread = threading.Thread( target=merge_loop_resume, args=(self.workspace, self.upscaled_dir, self.merged_dir, self.inversion_data_dir, self.pframe_data_dir, self.correction_data_dir, self.frame_count, self.block_size, self.scale_factor, self.extension_type)) difference_thread = threading.Thread( target=difference_loop_resume, args=(self.workspace, self.upscaled_dir, self.differences_dir, self.inversion_data_dir, self.pframe_data_dir, self.input_frames_dir, self.frame_count, self.block_size, self.extension_type)) self.logger.info("Starting Threaded Processes..") waifu2x.start() merge_thread.start() difference_thread.start() dandere2xcpp_thread.start() merge_thread.join() dandere2xcpp_thread.join() difference_thread.join() waifu2x.join() self.logger.info("Threaded Processes Finished succcesfully")
def difference_only(self): self.pre_setup() dandere2xcpp_thread = Dandere2xCppWrapper(self.context, resume=False) difference_thread = threading.Thread(target=difference_loop, args=(self.context, 1)) self.context.logger.info("Starting Threaded Processes..") difference_thread.start() dandere2xcpp_thread.start() dandere2xcpp_thread.join() difference_thread.join()
def run_concurrent(self): self.pre_setup() self.set_mse() if self.context.waifu2x_type == "caffe": waifu2x = Waifu2xCaffe(self.context) Waifu2xCaffe.upscale_file( self.context, input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) elif self.context.waifu2x_type == "conv": waifu2x = Waifu2xConv(self.context) Waifu2xConv.upscale_file(self.context, input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) dandere2xcpp_thread = Dandere2xCppWrapper(self.context, resume=False) merge_thread = threading.Thread(target=merge_loop, args=(self.context, 1)) difference_thread = threading.Thread(target=difference_loop, args=(self.context, 1)) self.context.logger.info("Starting Threaded Processes..") waifu2x.start() merge_thread.start() difference_thread.start() dandere2xcpp_thread.start() merge_thread.join() dandere2xcpp_thread.join() difference_thread.join() waifu2x.join() self.context.logger.info("Threaded Processes Finished succcesfully")
def __init__(self, context: Context): # Administrative Stuff import sys sys.excepthook = show_exception_and_exit # set a custom except hook to prevent window from closing. threading.Thread.__init__(self, name="Dandere2x Thread") # Class Specific self.context = context self.alive = False # Declarations """ These are re-set later, but due to lack of python member-variable declarations, they're initially set here so the IDE can do autocomplete corrections / predictions. It's important they're correctly assigned when self.run() is called. """ self.min_disk_demon = MinDiskUsage(self.context) self.status_thread = Status(self.context) self.dandere2x_cpp_thread = Dandere2xCppWrapper(self.context) self.waifu2x = self._get_waifu2x_class(self.context.waifu2x_type) self.residual_thread = Residual(self.context) self.merge_thread = Merge(self.context)
def run(self): self._pre_processing() # Assigning classes now that context is properly set. self.min_disk_demon = MinDiskUsage(self.context) self.status_thread = Status(self.context) self.dandere2x_cpp_thread = Dandere2xCppWrapper(self.context) self.waifu2x = self._get_waifu2x_class(self.context.waifu2x_type) self.residual_thread = Residual(self.context) self.merge_thread = Merge(self.context) self.__extract_frames() self.__upscale_first_frame() self.min_disk_demon.start() self.dandere2x_cpp_thread.start() self.merge_thread.start() self.residual_thread.start() self.waifu2x.start() self.status_thread.start() self.alive = True
def run_concurrent(self): """ Starts the dandere2x_python process at large. Inputs: - context Pre-Reqs: 'This is all the stuff that needs to be done before dandere2x can officially start' - creates workspaces needed for dandere2x to work - edits the video if it's needed to be trimmed or needs resolution needs to be resized. - extracts all the frames in the video into it's own folder. - upscales the first frame using waifu2x and ensuring the genesis image upscaled correctly. Threading Area: - calls a series of threads for dandere2x_python to work (residuals, merging, waifu2x, dandere2xcpp, realtime-encoding) """ # load context output_file = self.context.output_file ############ # PRE REQS # ############ # The first thing to do is create the dirs we will need during runtime create_directories(self.context.directories) self.context.set_logger() # If the user wishes to trim the video, trim the video, then rename the file_dir to point to the trimmed video if self.context.user_trim_video: trimed_video = os.path.join(self.context.workspace, "trimmed.mkv") trim_video(self.context, trimed_video) self.context.input_file = trimed_video # Before we extract all the frames, we need to ensure the settings are valid. If not, resize the video # To make the settings valid somehow. if not valid_input_resolution(self.context.width, self.context.height, self.context.block_size): self.append_video_resize_filter() # Extract all the frames print("extracting frames from video... this might take a while..") extract_frames(self.context, self.context.input_file) self.context.update_frame_count() # Assign the waifu2x object to whatever waifu2x we're using waifu2x = self.get_waifu2x_class(self.context.waifu2x_type) # Upscale the first file (the genesis file is treated different in Dandere2x) one_frame_time = time.time() # This timer prints out how long it takes to upscale one frame waifu2x.upscale_file(input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) # Ensure the first file was able to get upscaled. We literally cannot continue if it doesn't. if not file_exists(self.context.merged_dir + "merged_1" + self.context.extension_type): print("Could not upscale first file.. check logs file to see what's wrong") logging.info("Could not upscale first file.. check logs file to see what's wrong") logging.info("Exiting Dandere2x...") sys.exit(1) print("\n Time to upscale an uncompressed frame: " + str(round(time.time() - one_frame_time, 2))) #################### # THREADING AREA # #################### # This is where Dandere2x's core functions start. Each core function is divided into a series of threads, # All with their own segregated tasks and goals. Dandere2x starts all the threads, and lets it go from there. compress_frames_thread = threading.Thread(target=compress_frames, args=(self.context,)) dandere2xcpp_thread = Dandere2xCppWrapper(self.context) merge_thread = threading.Thread(target=merge_loop, args=(self.context,)) residual_thread = threading.Thread(target=residual_loop, args=(self.context,)) status_thread = threading.Thread(target=print_status, args=(self.context,)) realtime_encode_thread = threading.Thread(target=run_realtime_encoding, args=(self.context, output_file)) logging.info("starting new d2x process") waifu2x.start() merge_thread.start() residual_thread.start() dandere2xcpp_thread.start() status_thread.start() compress_frames_thread.start() if self.context.realtime_encoding_enabled: realtime_encode_thread.start() compress_frames_thread.join() merge_thread.join() dandere2xcpp_thread.join() residual_thread.join() waifu2x.join() status_thread.join() if self.context.realtime_encoding_enabled: realtime_encode_thread.join() self.context.logger.info("Threaded Processes Finished succcesfully")
def run_concurrent(self): self.pre_setup() self.context.update_frame_count() verify_user_settings(self.context) start = time.time( ) # This timer prints out how long it takes to upscale one frame # set waifu2x to be whatever waifu2x type we are using if self.context.waifu2x_type == "caffe": waifu2x = Waifu2xCaffe(self.context) Waifu2xCaffe.upscale_file( self.context, input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) elif self.context.waifu2x_type == "conv": waifu2x = Waifu2xConv(self.context) Waifu2xConv.upscale_file(self.context, input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) print("\nTime to upscale an uncompressed frame: " + str(round(time.time() - start, 2))) output_file = self.context.workspace + 'output.mkv' # start all the threads needed for running compress_frames_thread = threading.Thread(target=compress_frames, args=(self.context, )) dandere2xcpp_thread = Dandere2xCppWrapper(self.context, resume=False) merge_thread = threading.Thread(target=merge_loop, args=(self.context, 1)) difference_thread = threading.Thread(target=difference_loop, args=(self.context, 1)) status_thread = threading.Thread(target=print_status, args=(self.context, )) realtime_encode_thread = threading.Thread(target=run_realtime_encoding, args=(self.context, output_file)) self.context.logger.info("Starting Threaded Processes..") waifu2x.start() merge_thread.start() difference_thread.start() dandere2xcpp_thread.start() status_thread.start() compress_frames_thread.start() if self.context.realtime_encoding == 1: realtime_encode_thread.start() compress_frames_thread.join() merge_thread.join() dandere2xcpp_thread.join() difference_thread.join() waifu2x.join() status_thread.join() if self.context.realtime_encoding == 1: realtime_encode_thread.join() self.context.logger.info("Threaded Processes Finished succcesfully")
def resume_concurrent(self): self.context.update_frame_count( ) # we need to count how many outputs there are after ffmpeg extracted stuff verify_user_settings(self.context) if self.context.waifu2x_type == "caffe": waifu2x = Waifu2xCaffe(self.context) Waifu2xCaffe.upscale_file( self.context, input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) elif self.context.waifu2x_type == "conv": waifu2x = Waifu2xConv(self.context) Waifu2xConv.upscale_file(self.context, input_file=self.context.input_frames_dir + "frame1" + self.context.extension_type, output_file=self.context.merged_dir + "merged_1" + self.context.extension_type) dandere2xcpp_thread = Dandere2xCppWrapper(self.context, resume=True) merge_thread = threading.Thread(target=merge_loop_resume, args=(self.context, )) difference_thread = threading.Thread(target=difference_loop_resume, args=(self.context, )) status_thread = threading.Thread(target=print_status, args=(self.context, )) compress_frames_thread = threading.Thread(target=compress_frames, args=(self.context, )) output_file = self.context.workspace + 'output.mkv' realtime_encode_thread = threading.Thread(target=run_realtime_encoding, args=(self.context, output_file)) self.context.logger.info("Starting Threaded Processes..") waifu2x.start() merge_thread.start() difference_thread.start() dandere2xcpp_thread.start() status_thread.start() compress_frames_thread.start() # these can obviously be combined but leaving them separate for readiability if self.context.realtime_encoding == 1: realtime_encode_thread.start() if self.context.realtime_encoding == 1: realtime_encode_thread.join() compress_frames_thread.join() merge_thread.join() dandere2xcpp_thread.join() difference_thread.join() waifu2x.join() status_thread.join() self.context.logger.info("Threaded Processes Finished successfully")