def Generate(cls, name, delay=0): """Generates a Catalog plist and entity from matching PackageInfo entities. Args: name: str, catalog name. all PackageInfo entities with this name in the "catalogs" property will be included in the generated catalog. delay: int, if > 0, Generate call is deferred this many seconds. """ if delay: now = datetime.datetime.utcnow() now_str = '%s-%d' % (now.strftime('%Y-%m-%d-%H-%M-%S'), now.microsecond) deferred_name = 'create-catalog-%s-%s' % (name, now_str) deferred.defer(cls.Generate, name, _name=deferred_name, _countdown=delay) return lock_name = 'catalog_lock_%s' % name lock = datastore_locks.DatastoreLock(lock_name) try: lock.Acquire(timeout=600, max_acquire_attempts=2) except datastore_locks.AcquireLockError: # If catalog creation for this name is already in progress then delay. logging.debug('Catalog creation for %s is locked. Delaying....', name) cls.Generate(name, delay=10) return package_names = [] try: midnight = datetime.datetime.combine( datetime.date.today(), datetime.time(0, 0)) # new catalog has updated average install durations, # download daily. mtimes = [midnight] pkgsinfo_dicts = [] package_infos = PackageInfo.all().filter('catalogs =', name).fetch(None) if not package_infos: logging.warning('No PackageInfo entities with catalog: %s', name) for p in package_infos: package_names.append(p.name) pkgsinfo_dicts.append(p.plist.GetXmlContent(indent_num=1)) mtimes.append(p.mtime) catalog = constants.CATALOG_PLIST_XML % '\n'.join(pkgsinfo_dicts) c = cls.get_or_insert(name) c.package_names = package_names c.name = name c.plist = catalog c.mtime = max(mtimes) c.put(avoid_mtime_update=True) cls.DeleteMemcacheWrap(name) # Generate manifest for newly generated catalog. Manifest.Generate(name, delay=1) except (db.Error, plist_lib.Error): logging.exception('Catalog.Generate failure for catalog: %s', name) raise finally: lock.Release()
def testGenerateAppleSUSCatalog(self): """Test GenerateAppleSUSCatalog().""" catalog_xml = self._GetTestData('applesus.sucatalog') track = 'testing' os_version = '10.6' product_one = self.mox.CreateMockAnything() product_one.product_id = 'ID1' product_two = self.mox.CreateMockAnything() product_two.product_id = 'ID3' products = [product_one, product_two] mock_catalog_obj = self.mox.CreateMockAnything() mock_catalog_obj.plist = catalog_xml mock_query = self.mox.CreateMockAnything() mock_new_catalog_obj = self.mox.CreateMockAnything() self.mox.StubOutWithMock(applesus.models.AppleSUSCatalog, 'get_by_key_name') self.mox.StubOutWithMock(applesus.models, 'AppleSUSCatalog') self.mox.StubOutWithMock(applesus.models.AppleSUSProduct, 'AllActive') applesus.models.AppleSUSCatalog.get_by_key_name( '%s_untouched' % os_version).AndReturn(mock_catalog_obj) applesus.models.AppleSUSProduct.AllActive().AndReturn(mock_query) mock_query.filter('tracks =', track).AndReturn(products) mock_datetime = self.mox.CreateMockAnything() utcnow = datetime.datetime(2010, 9, 2, 19, 30, 21, 377827) now_str = '2010-09-02-19-30-21' mock_datetime.utcnow().AndReturn(utcnow) applesus.models.AppleSUSCatalog( key_name='backup_%s_%s_%s' % (os_version, track, now_str)).AndReturn(mock_new_catalog_obj) mock_new_catalog_obj.put().AndReturn(None) applesus.models.AppleSUSCatalog( key_name='%s_%s' % (os_version, track)).AndReturn(mock_new_catalog_obj) mock_new_catalog_obj.put().AndReturn(None) lock_name = 'lock_name' lock = datastore_locks.DatastoreLock(lock_name) lock.Acquire() self.mox.ReplayAll() _, new_plist = applesus.GenerateAppleSUSCatalog(os_version, track, mock_datetime, catalog_lock=lock) self.assertTrue('ID1' in new_plist['Products']) self.assertTrue('ID2' not in new_plist['Products']) self.assertTrue('ID3' in new_plist['Products']) self.assertTrue('ID4' not in new_plist['Products']) self.mox.VerifyAll() self.assertFalse(gae_util.LockExists(lock_name))
def testUpdateWithObtainLockFailure(self): """Test Update() with a failure obtaining the lock.""" p = models.PackageInfo() p.filename = 'foofile.dmg' datastore_locks.DatastoreLock('pkgsinfo_%s' % p.filename).Acquire() self.mox.ReplayAll() self.assertRaises(models.PackageInfoLockError, p.Update) self.mox.VerifyAll()
def GenerateAppleSUSCatalogs(track=None, tracks=None, delay=0): """Generates Apple SUS catalogs for a given track, set of tracks, or all. Note: this generates tracks for all os_versions on the given track/tracks. Args: track: string track to generate catalog for. OR, tracks: list of string tracks. delay: int. if > 0, defer generating the catalogs by this many seconds. """ if track and tracks: raise ValueError('only one of track and tracks is allowed') elif not tracks and not track: tracks = common.TRACKS elif track: tracks = [track] for track in tracks: for os_version in OS_VERSIONS: lock_name = CatalogRegenerationLockName(track, os_version) lock = datastore_locks.DatastoreLock(lock_name) try: lock.Acquire(timeout=600 + delay, max_acquire_attempts=1) except datastore_locks.AcquireLockError: continue if delay: now_str = datetime.datetime.utcnow().strftime( '%Y-%m-%d-%H-%M-%S') deferred_name = 'gen-applesus-catalog-%s-%s-%s' % ( os_version, track, now_str) deferred_name = re.sub(r'[^\w-]', '', deferred_name) try: deferred.defer(GenerateAppleSUSCatalog, os_version, track, catalog_lock=lock, _countdown=delay, _name=deferred_name) except taskqueue.TaskAlreadyExistsError: logging.info( 'Skipping duplicate Apple SUS Catalog generation task.' ) else: GenerateAppleSUSCatalog(os_version, track, catalog_lock=lock) if delay: now_str = datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M-%S') deferred_name = 'gen-sus-metadata-catalog-%s' % now_str deferred_name = re.sub(r'[^\w-]', '', deferred_name) try: deferred.defer(GenerateAppleSUSMetadataCatalog, _name=deferred_name) except taskqueue.TaskAlreadyExistsError: logging.info( 'Skipping duplicate Apple SUS Catalog generation task.') else: GenerateAppleSUSMetadataCatalog()
def Generate(cls, name, delay=0): """Generates a Manifest plist and entity from matching PackageInfo entities. Args: name: str, manifest name. all PackageInfo entities with this name in the "manifests" property will be included in the generated manifest. delay: int. if > 0, Generate call is deferred this many seconds. """ if delay: now = datetime.datetime.utcnow() now_str = '%s-%d' % (now.strftime('%Y-%m-%d-%H-%M-%S'), now.microsecond) deferred_name = 'create-manifest-%s-%s' % (name, now_str) deferred.defer(cls.Generate, name, _name=deferred_name, _countdown=delay) return lock_name = 'manifest_lock_%s' % name lock = datastore_locks.DatastoreLock(lock_name) try: lock.Acquire(timeout=30, max_acquire_attempts=1) except datastore_locks.AcquireLockError: logging.debug( 'Manifest.Generate for %s is locked. Delaying....', name) cls.Generate(name, delay=5) return try: install_types = {} package_infos = PackageInfo.all().filter('manifests =', name).fetch(None) if not package_infos: logging.warning('No PackageInfo entities with manifest: %s', name) for p in package_infos: # Add all installs to their appropriate install type containers. for install_type in p.install_types: if install_type not in install_types: install_types[install_type] = [] install_types[install_type].append(p.name) # Generate a dictionary of the manifest data. manifest_dict = {'catalogs': [name, 'apple_update_metadata']} for k, v in install_types.iteritems(): manifest_dict[k] = v # Save the new manifest to Datastore. manifest_entity = cls.get_or_insert(name) # Turn the manifest dictionary into XML. manifest_entity.plist.SetContents(manifest_dict) manifest_entity.put() cls.DeleteMemcacheWrap(name) except (db.Error, plist_lib.Error): logging.exception('Manifest.Generate failure: %s', name) raise finally: lock.Release()
def testGetSuccessWhenHashLockFail(self): """Test get() with success when hash header is requested and lock fails.""" filename = 'pkg name.dmg' filename_quoted = 'pkg%20name.dmg' self.MockDoAnyAuth() self.request.get('hash').AndReturn('1') datastore_locks.DatastoreLock('pkgsinfo_%s' % filename).Acquire() self.response.set_status(httplib.FORBIDDEN).AndReturn(None) self.response.out.write('Could not lock pkgsinfo').AndReturn(None) self.mox.ReplayAll() self.c.get(filename_quoted) self.mox.VerifyAll()
def testGenerateLocked(self): """Tests Manifest.Generate() where name is locked.""" name = 'lockedname' datastore_locks.DatastoreLock('manifest_lock_%s' % name).Acquire() # here is where Manifest.Generate calls itself; can't stub the method we're # testing, so mock the calls that happen as a result. self.stubs.Set(models.deferred, 'defer', self.mox.CreateMockAnything()) models.deferred.defer(models.Manifest.Generate, name, _name=mox.IgnoreArg(), _countdown=5) self.mox.ReplayAll() models.Manifest.Generate(name) self.mox.VerifyAll()
def GetLockForPackage(filename): lock_name = PACKAGE_LOCK_PREFIX + filename lock = datastore_locks.DatastoreLock(lock_name) return lock
class PackagesInfo(handlers.AuthenticationHandler): """Handler for /pkgsinfo/""" def get(self, filename=None): """GET Args: filename: string like Firefox-1.0.dmg """ auth_return = auth.DoAnyAuth() if hasattr(auth_return, 'email'): email = auth_return.email() if not any(( auth.IsAdminUser(email), auth.IsSupportUser(email), )): raise auth.IsAdminMismatch if filename: filename = urllib.unquote(filename) hash_str = self.request.get('hash') if hash_str: lock = models.GetLockForPackage(filename) try: lock.Acquire(timeout=30, max_acquire_attempts=5) except datastore_locks.AcquireLockError: self.response.set_status(httplib.FORBIDDEN) self.response.out.write('Could not lock pkgsinfo') return pkginfo = models.PackageInfo.get_by_key_name(filename) if pkginfo: self.response.headers[ 'Content-Type'] = 'text/xml; charset=utf-8' if hash_str: self.response.headers['X-Pkgsinfo-Hash'] = self._Hash( pkginfo.plist) self.response.out.write(pkginfo.plist) else: if hash_str: lock.Release() self.response.set_status(httplib.NOT_FOUND) return if hash_str: lock.Release() else: query = models.PackageInfo.all() filename = self.request.get('filename') if filename: query.filter('filename', filename) install_types = self.request.get_all('install_types') for install_type in install_types: query.filter('install_types =', install_type) catalogs = self.request.get_all('catalogs') for catalog in catalogs: query.filter('catalogs =', catalog) pkgs = [] for p in query: pkg = {} for k in p.properties(): if k != '_plist': pkg[k] = getattr(p, k) pkgs.append(pkg) self.response.out.write('<?xml version="1.0" encoding="UTF-8"?>\n') self.response.out.write(plist.GetXmlStr(pkgs)) self.response.headers['Content-Type'] = 'text/xml; charset=utf-8' def _Hash(self, s): """Return a sha256 hash for a string. Args: s: str Returns: str, sha256 digest """ h = hashlib.sha256(str(s)) return h.hexdigest() def put(self, filename): """PUT Args: filename: string like Firefox-1.0.dmg """ session = gaeserver.DoMunkiAuth( require_level=gaeserver.LEVEL_UPLOADPKG) filename = urllib.unquote(filename) hash_str = self.request.get('hash') catalogs = self.request.get('catalogs', None) manifests = self.request.get('manifests', None) install_types = self.request.get('install_types') if catalogs == '': catalogs = [] elif catalogs: catalogs = catalogs.split(',') if manifests == '': manifests = [] elif manifests: manifests = manifests.split(',') if install_types: install_types = install_types.split(',') mpl = MunkiPackageInfoPlistStrict(self.request.body) try: mpl.Parse() except plist.PlistError, e: logging.exception('Invalid pkginfo plist PUT: \n%s\n', self.request.body) self.response.set_status(httplib.BAD_REQUEST) self.response.out.write(str(e)) return lock_name = 'pkgsinfo_%s' % filename lock = datastore_locks.DatastoreLock(lock_name) try: lock.Acquire(timeout=30, max_acquire_attempts=5) except datastore_locks.AcquireLockError: self.response.set_status(httplib.FORBIDDEN) self.response.out.write('Could not lock pkgsinfo') return # To avoid pkginfo uploads without corresponding packages, only allow # updates to existing PackageInfo entities, not creations of new ones. pkginfo = models.PackageInfo.get_by_key_name(filename) if pkginfo is None: logging.warning( 'pkginfo "%s" does not exist; PUT only allows updates.', filename) self.response.set_status(httplib.FORBIDDEN) self.response.out.write('Only updates supported') lock.Release() return # If the pkginfo is not modifiable, ensure only manifests have changed. if not pkginfo.IsSafeToModify(): if not mpl.EqualIgnoringManifestsAndCatalogs(pkginfo.plist): logging.warning( 'pkginfo "%s" is in stable or testing; change prohibited.', filename) self.response.set_status(httplib.FORBIDDEN) self.response.out.write('Changes to pkginfo not allowed') lock.Release() return # If the update parameter asked for a careful update, by supplying # a hash of the last known pkgsinfo, then compare the hash to help # the client make a non destructive update. if hash_str: if self._Hash(pkginfo.plist) != hash_str: self.response.set_status(httplib.CONFLICT) self.response.out.write('Update hash does not match') lock.Release() return # All verification has passed, so let's create the PackageInfo entity. pkginfo.plist = mpl pkginfo.name = mpl.GetPackageName() if catalogs is not None: pkginfo.catalogs = catalogs if manifests is not None: pkginfo.manifests = manifests if install_types: pkginfo.install_types = install_types pkginfo.put() lock.Release() for track in pkginfo.catalogs: models.Catalog.Generate(track, delay=1) # Log admin pkginfo put to Datastore. user = session.uuid admin_log = models.AdminPackageLog(user=user, action='pkginfo', filename=filename, catalogs=pkginfo.catalogs, manifests=pkginfo.manifests, install_types=pkginfo.install_types, plist=pkginfo.plist.GetXml()) admin_log.put()
def _GenerateMsuUserSummary(self, since_days=None, now=None): """Generate summary of MSU user data. Args: since_days: int, optional, only report on the last x days now: datetime.datetime, optional, supply an alternative value for the current date/time """ lock_name = 'msu_user_summary_lock' cursor_name = 'msu_user_summary_cursor' if since_days is None: since = None else: since = '%dD' % since_days lock_name = '%s_%s' % (lock_name, since) cursor_name = '%s_%s' % (cursor_name, since) lock = datastore_locks.DatastoreLock(lock_name) try: lock.Acquire(timeout=RUNTIME_MAX_SECS + 10, max_acquire_attempts=2) except datastore_locks.AcquireLockError: logging.warning('GenerateMsuUserSummary lock found; exiting.') return interested_events = self.USER_EVENTS lquery = models.ComputerMSULog.all() cursor = models.KeyValueCache.MemcacheWrappedGet( cursor_name, 'text_value') summary, unused_dt = models.ReportsCache.GetMsuUserSummary( since=since, tmp=True) if cursor and summary: lquery.with_cursor(cursor) else: summary = {} for event in interested_events: summary[event] = 0 summary['total_events'] = 0 summary['total_users'] = 0 summary['total_uuids'] = 0 models.ReportsCache.SetMsuUserSummary( summary, since=since, tmp=True) begin = time.time() if now is None: now = datetime.datetime.utcnow() while True: reports = lquery.fetch(self.FETCH_LIMIT) if not reports: break userdata = {} last_user = None last_user_cursor = None prev_user_cursor = None n = 0 for report in reports: userdata.setdefault(report.user, {}) userdata[report.user].setdefault( report.uuid, {}).update( {report.event: report.mtime}) if last_user != report.user: last_user = report.user prev_user_cursor = last_user_cursor last_user_cursor = str(lquery.cursor()) n += 1 if n == self.FETCH_LIMIT: # full fetch, might not have finished this user -- rewind del userdata[last_user] last_user_cursor = prev_user_cursor for user in userdata: events = 0 for uuid in userdata[user]: if 'launched' not in userdata[user][uuid]: continue for event in userdata[user][uuid]: if since_days is None or IsTimeDelta( userdata[user][uuid][event], now, days=since_days): summary.setdefault(event, 0) summary[event] += 1 summary['total_events'] += 1 events += 1 if events: summary['total_uuids'] += 1 if events: summary['total_users'] += 1 summary.setdefault('total_users_%d_events' % events, 0) summary['total_users_%d_events' % events] += 1 lquery = models.ComputerMSULog.all() lquery.with_cursor(last_user_cursor) end = time.time() if (end - begin) > RUNTIME_MAX_SECS: break if reports: models.ReportsCache.SetMsuUserSummary( summary, since=since, tmp=True) models.KeyValueCache.MemcacheWrappedSet( cursor_name, 'text_value', last_user_cursor) if since_days: args = '/%d' % since_days else: args = '' taskqueue.add( url='/cron/reports_cache/msu_user_summary%s' % args, method='GET', countdown=5) else: models.ReportsCache.SetMsuUserSummary( summary, since=since) models.KeyValueCache.DeleteMemcacheWrap( cursor_name, prop_name='text_value') models.ReportsCache.DeleteMsuUserSummary(since=since, tmp=True) lock.Release()
def _GenerateInstallCounts(): """Generates a dictionary of all installs names and the count of each.""" # Obtain a lock. lock_name = 'pkgs_list_cron_lock' lock = datastore_locks.DatastoreLock(lock_name) try: lock.Acquire(timeout=600, max_acquire_attempts=1) except datastore_locks.AcquireLockError: logging.warning('GenerateInstallCounts: lock found; exiting.') return # Get a list of all packages that have previously been pushed. pkgs, unused_dt = models.ReportsCache.GetInstallCounts() # Generate a query of all InstallLog entites that haven't been read yet. query = models.InstallLog.all().order('server_datetime') cursor_obj = models.KeyValueCache.get_by_key_name('pkgs_list_cursor') if cursor_obj: query.with_cursor(cursor_obj.text_value) # Loop over new InstallLog entries. try: installs = query.fetch(1000) except db.Error: installs = None if not installs: models.ReportsCache.SetInstallCounts(pkgs) lock.Release() return for install in installs: pkg_name = install.package if pkg_name not in pkgs: pkgs[pkg_name] = { 'install_count': 0, 'install_fail_count': 0, 'applesus': install.applesus, } if install.IsSuccess(): pkgs[pkg_name]['install_count'] = ( pkgs[pkg_name].setdefault('install_count', 0) + 1) # (re)calculate avg_duration_seconds for this package. if 'duration_seconds_avg' not in pkgs[pkg_name]: pkgs[pkg_name]['duration_count'] = 0 pkgs[pkg_name]['duration_total_seconds'] = 0 pkgs[pkg_name]['duration_seconds_avg'] = None # only proceed if entity has "duration_seconds" property != None. if getattr(install, 'duration_seconds', None) is not None: pkgs[pkg_name]['duration_count'] += 1 pkgs[pkg_name]['duration_total_seconds'] += ( install.duration_seconds) pkgs[pkg_name]['duration_seconds_avg'] = int( pkgs[pkg_name]['duration_total_seconds'] / pkgs[pkg_name]['duration_count']) else: pkgs[pkg_name]['install_fail_count'] = ( pkgs[pkg_name].setdefault('install_fail_count', 0) + 1) # Update any changed packages. models.ReportsCache.SetInstallCounts(pkgs) if not cursor_obj: cursor_obj = models.KeyValueCache(key_name='pkgs_list_cursor') cursor_txt = str(query.cursor()) cursor_obj.text_value = cursor_txt cursor_obj.put() # Delete the lock. lock.Release() deferred.defer(_GenerateInstallCounts)