def do_mythtv_update_autodownload(self, line): """Update feeds and auto-download""" logging.info("Starting auto downloader...") autodler.start_downloader() feed.expire_items() starttime = clock() logging.timing("Icon clear: %.3f", clock() - starttime) logging.info("Starting video updates") moviedata.movieDataUpdater.startThread() parse_command_line_args() # autoupdate.check_for_updates() # Wait a bit before starting the downloader daemon. It can cause a bunch # of disk/CPU load, so try to avoid it slowing other stuff down. eventloop.addTimeout(5, downloader.startupDownloader, "start downloader daemon") # ditto for feed updates eventloop.addTimeout(30, feed.start_updates, "start feed updates") # ditto for clearing stale icon cache files, except it's the very lowest # priority eventloop.addTimeout(10, iconcache.clear_orphans, "clear orphans")
def time_trap_call(when, function, *args, **kwargs): global cancel cancel = False start = clock() retval = trap_call(when, function, *args, **kwargs) end = clock() if cancel: return retval if end-start > 1.0: logging.timing("WARNING: %s too slow (%.3f secs)", when, end-start) if TRACK_CUMULATIVE: try: total = cumulative[when] except KeyError: total = 0 total += end - start cumulative[when] = total if total > 5.0: logging.timing("%s cumulative is too slow (%.3f secs)", when, total) cumulative[when] = 0 return retval cancel = True return retval
def _check_time(self, sql, query_time): SINGLE_QUERY_LIMIT = 0.5 CUMULATIVE_LIMIT = 1.0 if query_time > SINGLE_QUERY_LIMIT: logging.timing("query slow (%0.3f seconds): %s", query_time, sql) return # comment out to test cumulative query times # more than half a second in the last old_times = self._query_times.setdefault(sql, []) now = time.time() dropoff_time = now - 5 cumulative = query_time for i in reversed(xrange(len(old_times))): old_time, old_query_time = old_times[i] if old_time < dropoff_time: old_times = old_times[i+1:] break cumulative += old_query_time old_times.append((now, query_time)) if cumulative > CUMULATIVE_LIMIT: logging.timing('query cumulatively slow: %0.2f ' '(%0.03f): %s', cumulative, query_time, sql)
def dispatch(self): success = True if not self.canceled: when = "While handling %s" % self.name start = clock() success = trapcall.trap_call(when, self.function, *self.args, **self.kwargs) end = clock() if end-start > 0.5: logging.timing("%s too slow (%.3f secs)", self.name, end-start) try: total = cumulative[self.name] except (KeyError, AttributeError): total = 0 total += end - start cumulative[self.name] = total if total > 5.0: logging.timing("%s cumulative is too slow (%.3f secs)", self.name, total) cumulative[self.name] = 0 self._unlink() return success
def time_trap_call(when, function, *args, **kwargs): global cancel cancel = False start = clock() retval = trap_call(when, function, *args, **kwargs) end = clock() if cancel: return retval if end - start > 1.0: logging.timing("WARNING: %s too slow (%.3f secs)", when, end - start) if TRACK_CUMULATIVE: try: total = cumulative[when] except KeyError: total = 0 total += end - start cumulative[when] = total if total > 5.0: logging.timing("%s cumulative is too slow (%.3f secs)", when, total) cumulative[when] = 0 return retval cancel = True return retval
description = _( "You have a database that was saved with a newer version of " "%(appname)s. You must download the latest version of " "%(appname)s and run that.", {"appname": app.config.get(prefs.SHORT_APP_NAME)}, ) raise StartupError(summary, description) except storedatabase.UpgradeErrorSendCrashReport, e: send_startup_crash_report(e.report) return except storedatabase.UpgradeError: raise StartupError(None, None) database.initialize() downloader.reset_download_stats() end = time.time() logging.timing("Database upgrade time: %.3f", end - start) if app.db.startup_version != app.db.current_version: databaselog.info("Upgraded database from version %s to %s", app.db.startup_version, app.db.current_version) databaselog.print_old_log_entries() models.initialize() if DEBUG_DB_MEM_USAGE: util.db_mem_usage_test() mem_usage_test_event.set() dbupgradeprogress.upgrade_end() app.startup_timer.log_time("after db upgrade") app.icon_cache_updater = iconcache.IconCacheUpdater() setup_global_feeds()
def log_total_time(self): logging.timing("total time: %0.3f", clock() - self.start_time)
def log_time(self, msg): current_time = clock() logging.timing("%s: %0.4f", msg, current_time - self.last_time) self.last_time = current_time
summary = _("Database too new") description = _( "You have a database that was saved with a newer version of " "%(appname)s. You must download the latest version of " "%(appname)s and run that.", {"appname": app.config.get(prefs.SHORT_APP_NAME)}, ) raise StartupError(summary, description) except storedatabase.UpgradeErrorSendCrashReport, e: send_startup_crash_report(e.report) return except storedatabase.UpgradeError: raise StartupError(None, None) database.initialize() end = time.time() logging.timing("Database upgrade time: %.3f", end - start) if app.db.startup_version != app.db.current_version: databaselog.info("Upgraded database from version %s to %s", app.db.startup_version, app.db.current_version) databaselog.print_old_log_entries() models.initialize() if DEBUG_DB_MEM_USAGE: util.db_mem_usage_test() mem_usage_test_event.set() # MetadataProgressUpdater needs to be installed before ItemInfoCache, # since ItemInfoCache may create items if it uses failsafe mode app.metadata_progress_updater = metadataprogress.MetadataProgressUpdater() app.item_info_cache = iteminfocache.ItemInfoCache() app.item_info_cache.load() dbupgradeprogress.upgrade_end()