def _DisplayMain(self): query = models.AppleSUSProduct.AllActive().order('-apple_mtime') products = [] # NOTE(user): the following adds about 700ms onto the request, so we may # want to pre-calculate this in a cron in the future. for p in gae_util.QueryIterator(query, step=100): if common.STABLE not in p.tracks: p.stable_promote_date = applesus.GetAutoPromoteDate( common.STABLE, p) if common.TESTING not in p.tracks: p.testing_promote_date = applesus.GetAutoPromoteDate( common.TESTING, p) products.append(p) catalogs = [] for os_version in applesus.OS_VERSIONS: os_catalogs = {'os_version': os_version} for track in ['untouched'] + common.TRACKS: catalog_key = '%s_%s' % (os_version, track) c = models.AppleSUSCatalog.MemcacheWrappedGet(catalog_key) os_catalogs[track] = c.mtime if c else None catalogs.append(os_catalogs) catalogs_pending = {} for track in common.TRACKS: catalogs_pending[track] = False for os_version in applesus.OS_VERSIONS: lock_name = applesus.CatalogRegenerationLockName( track, os_version) catalogs_pending[track] |= gae_util.LockExists(lock_name) install_counts, counts_mtime = models.ReportsCache.GetInstallCounts() data = { 'catalogs': catalogs, 'catalogs_pending': catalogs_pending, 'products': products, 'install_counts': install_counts, 'install_counts_mtime': counts_mtime, 'tracks': common.TRACKS, 'auto_promote_enabled': settings.APPLE_AUTO_PROMOTE_ENABLED, 'auto_promote_stable_weekday': calendar.day_name[settings.APPLE_AUTO_PROMOTE_STABLE_WEEKDAY], 'unstable_grace_period_days': settings.APPLE_UNSTABLE_GRACE_PERIOD_DAYS, 'testing_grace_period_days': settings.APPLE_TESTING_GRACE_PERIOD_DAYS, 'report_type': 'apple_applesus' } self.Render('applesus_list.html', data)
def _GenerateTrendingInstallsCache(since_hours=None): """Generates trending install and failure data.""" trending = {'success': {}, 'failure': {}} total_success = 0 total_failure = 0 if not since_hours: since_hours = 1 dt = datetime.datetime.utcnow() - datetime.timedelta(minutes=since_hours * 60) query = models.InstallLog.all().filter('mtime >', dt) for install in gae_util.QueryIterator(query): pkg = install.package.encode('utf-8') if install.IsSuccess(): trending['success'][pkg] = trending['success'].setdefault(pkg, 0) + 1 total_success += 1 else: trending['failure'][pkg] = trending['failure'].setdefault(pkg, 0) + 1 total_failure += 1 # Get the top trending installs and failures. success = sorted(trending['success'].items(), key=lambda i: i[1], reverse=True) success = success[:TRENDING_INSTALLS_LIMIT] success = [(pkg, count, float(count) / total_success * 100) for pkg, count in success] failure = sorted(trending['failure'].items(), key=lambda i: i[1], reverse=True) failure = failure[:TRENDING_INSTALLS_LIMIT] failure = [(pkg, count, float(count) / total_failure * 100) for pkg, count in failure] trending = { 'success': { 'packages': success, 'total': total_success }, 'failure': { 'packages': failure, 'total': total_failure }, } models.ReportsCache.SetTrendingInstalls(since_hours, trending)
def All(self, min_age_seconds=None): """Iterate through all session entities, yielding each. Args: min_age_seconds: int seconds of minimum age sessions to return. Yields: session entity object """ if min_age_seconds: delta = datetime.timedelta(seconds=min_age_seconds) min_datetime = datetime.datetime.utcnow() - delta q = self.model.all().filter('mtime <', min_datetime) else: q = self.model.all() for session in gae_util.QueryIterator(q, step=100): yield session
def get(self): """Handle GET.""" pkgs, unused_dt = models.ReportsCache.GetInstallCounts() for p in gae_util.QueryIterator(models.PackageInfo.all()): if not p.plist: continue # skip over pkginfos without plists. if p.munki_name not in pkgs: # Skip pkginfos that ReportsCache lacks. continue elif not pkgs[p.munki_name].get('duration_seconds_avg', None): # Skip pkginfos where there is no known average duration. continue # Obtain a lock on the PackageInfo entity for this package, or skip. lock = models.GetLockForPackage(p.filename) try: lock.Acquire(timeout=600, max_acquire_attempts=5) except datastore_locks.AcquireLockError: continue # Skip; it'll get updated next time around. # Append the avg duration text to the description; in the future the # avg duration time and overall install count will be added to they're # own pkginfo keys so the information can be displayed independantly. # This requires MSU changes to read and display such values, so for now # simply append text to the description. old_desc = p.plist['description'] avg_duration_text = models.PackageInfo.AVG_DURATION_TEXT % ( pkgs[p.munki_name]['duration_count'], pkgs[p.munki_name]['duration_seconds_avg']) p.description = '%s\n\n%s' % (p.description, avg_duration_text) if p.plist['description'] != old_desc: p.put(avoid_mtime_update=True) lock.Release() # Asyncronously regenerate all Catalogs to include updated pkginfo plists. delay = 0 for track in common.TRACKS: delay += 5 models.Catalog.Generate(track, delay=delay)
def testIteration(self): """.""" mock_query = self.mox.CreateMockAnything() step = 2 first_iteration = [1, 2] second_iteration = [3, 4] entities = first_iteration + second_iteration mock_query.fetch(step).AndReturn(first_iteration) mock_query.cursor().AndReturn('cursor1') mock_query.with_cursor('cursor1') mock_query.fetch(step).AndReturn(second_iteration) mock_query.cursor().AndReturn('cursor2') mock_query.with_cursor('cursor2') mock_query.fetch(step).AndReturn([]) self.mox.ReplayAll() out = [] for entity in gae_util.QueryIterator(mock_query, step=step): out.append(entity) self.assertEqual(out, entities) self.mox.VerifyAll()