def cleanup_cache(config: util.SearchConfig) -> None: cache_dir = config.cache_web_dir if os.path.isdir(cache_dir): logging.debug('Cleanup cache at "{}"...'.format(cache_dir)) now = time.time() deleted = 0 for (dirpath, dirnames, filenames) in os.walk(cache_dir): for filename in filenames: file = os.path.join(dirpath, filename) mtime = os.path.getmtime(file) if now - mtime > config.cache_web_ttl_sec: try: os.remove(file) deleted += 1 except OSError as e: logging.debug('Failed to delete "{}" from cache: {}'.format(file, e.strerror)) total_time = time.time() - now logging.debug('Cache cleanup completed, deleted {} files from {}, {}'.format(deleted, cache_dir, util.format_time_ms(total_time))) else: logging.debug('Cache cleanup aborted: "{}" doesn\'t exist'.format(cache_dir))
def download_webpage(self, url, headers=None): start = time.time() cache_file_name = self.get_cache_file_name(url) cached_page = self.get_page_from_cache(cache_file_name) if cached_page: logging.debug('Get web-page from cache "{}": "{}", {}, {}'.format( url, cache_file_name, util.format_file_size(self.get_psize(cached_page)), util.format_time_ms(time.time() - start))) return cached_page req = urllib.request.Request(url, headers=self.get_request_headers(headers)) try: with urllib.request.urlopen( req, timeout=self.config.web_timeout_sec) as response: page = response.read() self.put_page_to_cache(cache_file_name, page, url) logging.debug('Download web-page from "{}", {}, {}'.format( url, util.format_file_size(self.get_psize(page)), util.format_time_ms(time.time() - start))) return page except urllib.error.HTTPError as err: logging.debug( 'Failed to download web-page from "{}", HTTP error: {}, {}'. format(url, err.code, err.reason)) return None except urllib.error.URLError as err: logging.debug( 'Failed to download web-page from "{}", error: {}'.format( url, err.reason)) return None except ConnectionResetError as err: logging.debug( 'Failed to download web-page from "{}", error: {}, {}'.format( url, err.errno, err.strerror)) return None except socket.timeout: logging.debug( 'Failed to download web-page from "{}", timed out'.format(url))
def do_search(plugin, artist, title): logging.info('Search lyrics on "{}" [{}]...'.format(plugin.plugin_name, plugin.ID)) try: start_time = time.time() song = plugin.search_song(artist, title) total_time = time.time() - start_time if song: song.plugin_id = plugin.ID song.plugin_rank = plugin.RANK song.plugin_name = plugin.plugin_name logging.info('Found song info on "{}" [{}], {}'.format(plugin.plugin_name, plugin.ID, util.format_time_ms(total_time))) return song else: logging.info('Nothing was found on "{}" [{}], {}'.format(plugin.plugin_name, plugin.ID, util.format_time_ms(total_time))) except Exception: logging.exception('Failed to get info from "{}" [{}]'.format(plugin.plugin_name, plugin.ID)) pass
def finish_search(self, worker_id, artist, title, songs, total_time, background): if background: self.set_status_message('Listening to the player...') self.main_window.lyrics_table_model.update_data(songs) else: if not self.worker_search or (self.worker_search.worker_id != worker_id): return self.init_layout(State.waiting) self.set_status_message('Search completed in {}'.format(util.format_time_ms(total_time))) self.main_window.lyrics_table_model.update_data(songs) self.show_tray_notification(artist, title, songs)