def CheckAndSave(cls, data): """Return True,key if saved, otherwise False, Key if already exists.""" new_hash = getGitBlobHash(data) theDataKey = cls.all(keys_only=True).filter('__key__ =', db.Key.from_path(u'DB_FileContent', new_hash)).get() if theDataKey: return False, theDataKey theDataKey = cls(key_name = new_hash, data = db.Blob(data)).put() return True, theDataKey
def getCurCacheVer(filename, tag, forceRebuild=False): """ tag is from the DB_FileVersion.tags field. right now it's 'z' or 'a' """ fv = InAppMemConfig.Current().fileVersion buildkey = "%s:%s:" % (filename, tag) # the front... no version... fvmckey = "fcv:%s:%s" % (fv, buildkey) # file cache version res = _memcache.get(fvmckey) # res is a curcachever if not forceRebuild and res: return res # fast track. fb = datamodel.DB_FileBuild.get_by_key_name(buildkey) if not forceRebuild and fb: if fb.fileVerCheckTime == fv: # simply evicted from memcache _memcache.set(fvmckey, fb.cacheVerNum) return fb.cacheVerNum data = getJsFileString(filename, tag) hashVal = getGitBlobHash(data) if fb.hashVal == hashVal: # nothing changed... update fileVerCheckTime to current version fb.fileVerCheckTime = fv _memcache.set(fvmckey, fb.cacheVerNum) db.put_async(fb) # don't care when we do it since it is idempotent return fb.cacheVerNum # it changed... fall through to update it. else: data = getJsFileString(filename, tag) hashVal = getGitBlobHash(data) theKey = "%s:%s:%s" % (filename, tag, fv) memck = "datagen" + theKey # create a new build with the current specific version fb = datamodel.DB_FileBuild(key_name=theKey, hashVal=hashVal, cacheVerNum=fv, fileVerCheckTime=fv, data=data) # update the non-version one to point to the most current one. fb2 = datamodel.DB_FileBuild(key_name=buildkey, hashVal=hashVal, cacheVerNum=fv, fileVerCheckTime=fv) db.put([fb, fb2]) _memcache.set(fvmckey, fv) # fast track for this function... _memcache.set(memck, data) # fast track for getCurCacheBlob return fv
def admin_RestoreFromBackupZip(req, user): import zipfile file = os.path.join(os.path.dirname(__file__), 'zips/orig.zip') zf = zipfile.ZipFile(file, 'r') allFiles = getAllFromFromQuery(datamodel.DB_FileVersion.all().order('version')) byName = {} # most recent version. for x in allFiles: byName[x.filename] = x extractStats = {} filesInZip = zf.namelist() forceFiles = req.get('forceFiles') if forceFiles == '__all__': forceFiles = set(filesInZip) elif forceFiles: forceFiles = set(forceFiles.split(',')) else: forceFiles = set() for fiz in filesInZip: extractStats[fiz] = '' data = zf.read(fiz) new_hash = getGitBlobHash(data) if fiz in byName: curVer = byName[fiz] oldHash = curVer.key().parent().name() if oldHash == new_hash: extractStats[fiz] += 'Same version already the head version in the system. ' continue else: extractStats[fiz] += 'Different version in zip file and system head. ' if fiz not in forceFiles: continue theDataKey = datamodel.DB_FileContent.all(keys_only=True).filter('__key__ =', db.Key.from_path(u'DB_FileContent', new_hash)).get() if not theDataKey: theDataKey = datamodel.DB_FileContent(key_name = new_hash, data = db.Blob(data)).put() extractStats[fiz] += 'Added to cache. ' else: extractStats[fiz] += 'Already in cache. ' nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey) tags = ['z', 'a'] # make it head newFileVer = datamodel.DB_FileVersion(parent=theDataKey, key_name=str(nextVersionNum), filename=fiz,version=nextVersionNum,uid=user.key().id(),tags=tags) newFileVer.put() extractStats[fiz] += 'Added version %s with hash: %s ' % (nextVersionNum, new_hash) #forceCacheRebuild(fiz, 'z') zf.close() return RetType.JSONSUCCESS, {'stats':extractStats}
def internalAddFile(filename, content, userkey, extra_tags=None, genFromFile=None, genFromVer=None): """ extra_tags is a set like {'a','m'} - but usually either None or {'a'} - 'z' always gets added. """ fn = filename new_hash = getGitBlobHash(content) theDataKey = datamodel.DB_FileContent.all(keys_only=True).filter('__key__ =', db.Key.from_path(u'DB_FileContent', new_hash)).get() if not theDataKey: theDataKey = datamodel.DB_FileContent(key_name = new_hash, data = db.Blob(content)).put() hashAlreadyExists = False else: hashAlreadyExists = True latestVersion = datamodel.DB_FileVersion.getMostRecent(fn, 'z', keys_only=True) if latestVersion and latestVersion.parent().name() == new_hash: return False, {'text':'Failed because the latest version has the same hash'} if genFromFile and genFromVer: genFrom = datamodel.DB_FileVersion.getSpecificVersion(genFromFile, 'z', int(genFromVer), keys_only=True) if not genFrom: return False, {'text':'Generated from version doesn\'t exist'} else: genFrom = None isFirstVersion = False if latestVersion else True nextVersionNum = datamodel.DB_JV_AtomicCounter.GetNextCounter(_fileVerKey) if isFirstVersion: tags = {'z', 'a'} else: tags = {'z'} if extra_tags: tags.update(extra_tags) newFileVer = datamodel.DB_FileVersion(parent=theDataKey, key_name=str(nextVersionNum), filename=fn,version=nextVersionNum,uid=userkey.id(),tags=list(tags), generatedFrom=genFrom) newFileVer.put() forceCacheRebuild(fn, 'z') return True, {'ver': nextVersionNum, 'hash':new_hash, 'isFirstVersion':isFirstVersion, 'hashAlreadyExists':hashAlreadyExists, 'filename':fn}