def start(web_dir, file_dir, rmd_version, relaunched=False): global _file_dir, _web_dir, _rmd_version _file_dir = os.path.abspath(file_dir) _web_dir = os.path.abspath(web_dir) _rmd_version = rmd_version if not settings.get('interface.start_server'): print('WebUI is disabled by settings.') return False browser = settings.get('interface.browser').lower().strip() browser = None if (browser == 'off' or relaunched) else browser options = { 'mode': browser, 'host': settings.get('interface.host'), 'port': settings.get('interface.port'), 'chromeFlags': [] } eel.init(web_dir) eel.start('index.html', options=options, block=False, callback=_websocket_close) # interface.port print('Started WebUI!') if browser: print('Awaiting connection from browser...') else: print( 'Browser auto-opening is disabled! Please open a browser to http://%s:%s/index.html !' % (options['host'], options['port'])) return True
def run(self): max_threads = settings.get('threading.max_handler_threads') HandlerThread.reset() self.threads = [] # start threads for i in range(max_threads): ht = HandlerThread('Handler - %s' % (i+1), self._loader) ht.start() self.threads.append(ht) try: clear = settings.get('threading.display_clear_screen') refresh_rate = max(0.1, settings.get('threading.display_refresh_rate')) while self.is_running(): # Threads are still running... self.redraw(clear) # Sleep... if refresh_rate > 5: # !cover steps = max(1, int(refresh_rate/5)) for t in range(steps): # Break custom output delay down, so we can exit early if finished. time.sleep(refresh_rate/steps) if not any([t.keep_running for t in self.threads]): break time.sleep(refresh_rate % 5) # Add any extra time on there, to be precise. else: time.sleep(refresh_rate) self.total_posts = sum(th.total_new_posts for th in self.threads) self.total_urls = sum(th.total_new_urls for th in self.threads) self.failed_urls = sum(th.total_failed_urls for th in self.threads) self.redraw(clear) print("\r\nQueue finished! (Total Processed: %s)" % self._loader.count_total()) except Exception: raise finally: self.stop_process()
def check_duplicates(self, file_path): """ Check the given file path to see if another file like it already exists. Purges worse copies. Returns the filename that the file now exists under. """ if not file_path: return file_path # !cover with HandlerThread.ele_lock: # The IO here could cause issues if multiple Threads tried to delete the same files, so safety lock. # Files currently downloading won't exist in the hashjar yet, so there's no risk of catching one in progress. if not settings.get('output.deduplicate_files'): # Deduplication disabled. return file_path # !cover was_new, existing_path = hashjar.add_hash( file_path) # Check if the file exists already. if not was_new and existing_path != file_path: # Quick and dirty comparison, assumes larger filesize means better quality. if os.path.isfile(file_path) and os.path.isfile(existing_path): if os.path.getsize(file_path) > os.path.getsize( existing_path): manifest.remove_file_hash(existing_path) os.remove(existing_path) manifest.remap_filepath(existing_path, file_path) return file_path else: manifest.remove_file_hash(file_path) os.remove(file_path) return existing_path return file_path
def build_file_info(self, reddit_element): """ Generates a dict of file locations and element data that is passed down to every handler, so they can choose where best to save for themselves. """ with HandlerThread.ele_lock: dir_pattern = './%s' % settings.save_subdir() file_pattern = '%s/%s' % (dir_pattern, settings.save_filename()) basedir = stringutil.insert_vars(dir_pattern, reddit_element) basefile = stringutil.insert_vars(file_pattern, reddit_element) if basedir is None or basefile is None: # Cannot download this file, because the file path generated for it is too long return None # !cover og = basefile i = 2 while basefile in HandlerThread.used_files or manifest.get_file_matching(basefile): # Use local list of filenames used here, since used filenames won't be updated until done otherwise. basefile = og+' . '+str(i) basefile = stringutil.normalize_file(basefile) i += 1 HandlerThread.used_files.append(basefile) # blacklist this base name while we download. self.release_filenames.append(basefile) # Build an array of pre-generated possible locations & important data for handlers to have access to. return { 'parent_dir' : basedir, # Some handlers will need to build the parent directory for their single file first. 'single_file' : basefile+"%s", # If this handler can output a single file, it will use this path. 'multi_dir' : basefile+"/", # Save directory for multi-file downloads. 'post_title' : reddit_element.title, # The title of the Reddit post. 'post_subreddit': reddit_element.subreddit, # The subreddit this post came from. 'user_agent' : settings.get('auth.user_agent'), }
def api_get_oauth_url(): port = 7505 url = False message = '' if settings.get('interface.port') != port: message = 'The UI is not using the default port (%s), and cannot use the premade App to authenticate!' % port else: url = praw_wrapper.get_reddit_token_url() return {'url': url, 'message': message}
def _authorize_rmd_token(): state = eel.btl.request.query.state print('New refresh code request: ', state, eel.btl.request.query.code) if state.strip() == settings.get('auth.oauth_key').strip(): code = eel.btl.request.query.code print('Saving new reddit code.') refresh = praw_wrapper.get_refresh_token(code) if refresh: settings.put('auth.refresh_token', refresh) return 'Saved authorization token! Close this page to continue.' return 'Cannot save the new auth key, something went wrong.<br><a href="../index.html">Back</a>'
def _websocket_close(page, old_websockets): print('A WebUI just closed. Checking for other connections... (%s)[%s]' % (page, len(old_websockets))) for i in range(80): eel.sleep(.1) # noinspection PyProtectedMember if len(eel._websockets) > 0: print('Open connections still exist. Not stopping UI server.') return if not settings.get('interface.keep_open'): print( 'WebUI keep_open is disabled, and all open clients have closed.\nExiting.' ) if _downloader: _downloader.stop() sys.exit(0) else: print('Keeping UI server open...')
) if console.confirm( "Are you sure you'd like to edit settings without the UI (if 'yes', these prompts will not show again)?" ): settings.put('interface.start_server', False) # Creates a save. print( 'A settings file has been created for you, at "%s". Please customize it.' % settings_file) else: print('Please re-run RMD to configure again.') sys.exit(1) else: mode = console.prompt_list( 'How would you like to open the UI?', settings.get('interface.browser', full_obj=True).opts) settings.put('interface.browser', mode, save_after=False) settings.put('interface.start_server', True) else: print( 'Skipping prompts is enabled, please edit the settings file yourself.' ) settings.put('interface.start_server', False) sys.exit(1) if settings.get( 'interface.start_server') and not args.no_restart and not args.test: # If run in UI mode, the initial script will stick here & reboot copies as needed. # A new RMD instance is only started if the last one exited with the special "restart" code. # This should always be performed before any DB or PRAW initialization, because it needs neither. sargs = list(filter(lambda x: not x.startswith('--update'),
def api_get_oauth_url(): if settings.get('interface.port') != 7505: return False return praw_wrapper.get_reddit_token_url()