def getMetaData(self): if not self.makeCache(ALL_TYPE): Log.cout(Log.ERROR, 'Make cache failed: %s' % self.repoid) return None if file_util.exists(self._getRepoDataDir() + '/' + PRIMARYDB_FILE): Log.cout(Log.INFO, 'Using primatydb format: %s' % self.repoid) return sqlite_loader.loadFrom(self._getRepoDataDir() + '/' + PRIMARYDB_FILE) elif file_util.exists(self._getRepoDataDir() + '/' + PRIMARY_FILE): Log.cout(Log.INFO, 'Using primaty format: %s' % self.repoid) return cache.loadFromFile(self._getRepoDataDir() + '/' + PRIMARY_FILE) Log.cout(Log.ERROR, 'repodb not found: %s' % self.repoid) return None
def _linkPkgToRoot(self, rpmFileInfoList, linkList, mkdirList): for rpmFileInfo in rpmFileInfoList: items = rpmFileInfo.relativePath.split('/') if len(items) < 2: Log.cout(Log.ERROR, 'Invalid file path %s' % rpmFileInfo.relativePath) return False # disable dir check # if items[0] != 'ainst' and not self._ainstRoot.isInRootDir(items[0]): # Log.cout(Log.ERROR, 'Invalid file path %s' % rpmFileInfo.relativePath) # return False if rpmFileInfo.isDir: dirPath = self._ainstRoot.getRoot( ) + '/' + rpmFileInfo.relativePath ret, dirList = file_util.makeDir2(dirPath) if not ret: Log.cout(Log.ERROR, 'Makedir %s failed' % dirName) return False mkdirList.extend(dirList) else: srcPath = self._ainstPkgDir + '/' + rpmFileInfo.relativePath destPath = self._ainstRoot.getRoot( ) + '/' + rpmFileInfo.relativePath if not file_util.isLink(srcPath) and not file_util.exists( srcPath): Log.cout(Log.ERROR, 'Activate failed: not exists file %s' % srcPath) return False if file_util.exists(destPath): if self._isActiveFile(rpmFileInfo.relativePath) or\ not file_util.remove(destPath): Log.cout( Log.ERROR, 'File conflict %s, ignore and continue.' % destPath) continue dirPath = os.path.dirname(destPath) ret, dirList = file_util.makeDir2(dirPath) if not ret: Log.cout(Log.ERROR, 'Makedir %s failed' % dirName) return False mkdirList.extend(dirList) if not rpmFileInfo.isConfigFile(): if not file_util.link(srcPath, destPath): return False linkList.append(rpmFileInfo.relativePath) return True
def _generateConfigToRoot(self, ainstPkgDir, aicfInfo, settingMap, confDict): if aicfInfo: for path, configInfo in aicfInfo.configs.iteritems(): srcConfigPath = ainstPkgDir + '/' + path destConfigPath = self._ainstRoot.getRoot() + '/' + path if not file_util.isFile(srcConfigPath): Log.cout(Log.ERROR, 'Config file %s is not exists' % srcConfigPath) return False if not file_util.exists(destConfigPath): Log.cout(Log.ERROR, 'Dest config file %s is not exists' % destConfigPath) return False tmpDirName = self._ainstRoot.getRootVarAinstDir('tmp') tmpPath = tmpDirName + '/' + os.path.basename(destConfigPath) + '.tmp.set' if not file_util.move(destConfigPath, tmpPath): Log.cout(Log.ERROR, 'Backup config file %s failed' % destConfigPath) return False confDict[path] = (tmpPath, destConfigPath) configGenerator = ConfigGenerator() if not configGenerator.generateConfig(srcConfigPath, destConfigPath, configInfo.mode, configInfo.noReplace, settingMap): Log.cout(Log.ERROR, 'Generate Config file %s failed' % path) return False else: Log.cout(Log.DEBUG, 'No aicf file, so no config will be changed') return True
def getPackage(self, pkg): if self._getRepoDataDir() is None: return None pkgCacheDir = self._getRepoDataDir() + self.packageCacheDir preUmask = os.umask(0) if not file_util.makeDir(pkgCacheDir): os.umask(preUmask) Log.cout(Log.ERROR, 'Make cache dir [%s] failed' % pkgCacheDir) return None os.umask(preUmask) uri = pkg.getLocation() if not uri.lower().startswith('http'): return None pkgFilePath = pkgCacheDir + os.path.basename(uri) if file_util.exists(pkgFilePath): ctime = os.stat(pkgFilePath).st_ctime nowtime = time.time() if nowtime - ctime < self.expireTime: Log.cout(Log.DEBUG, "Get package [%s] from cache" % pkg) return pkgFilePath if not file_util.remove(pkgFilePath): Log.cout(Log.DEBUG, "Remove old package [%s] because of expired" % pkg) return None if not self.fileFetcher.fetch(uri, pkgFilePath) or \ not file_util.chmod(pkgFilePath, 0666): return None return pkgFilePath
def getPackage(self, pkg): if self._getRepoDataDir() is None: return None pkgCacheDir = self._getRepoDataDir() + self.packageCacheDir preUmask = os.umask(0) if not file_util.makeDir(pkgCacheDir): os.umask(preUmask) Log.cout(Log.ERROR, 'Make cache dir [%s] failed' % pkgCacheDir) return None os.umask(preUmask) uri = pkg.getLocation() if not uri.lower().startswith('http'): return None pkgFilePath = pkgCacheDir + os.path.basename(uri) if file_util.exists(pkgFilePath): ctime = os.stat(pkgFilePath).st_ctime nowtime = time.time() if nowtime - ctime < self.expireTime: Log.cout(Log.DEBUG, "Get package [%s] from cache" % pkg) return pkgFilePath if not file_util.remove(pkgFilePath): Log.cout(Log.DEBUG, "Remove old package [%s] because of expired" % pkg) return None if not self.fileFetcher.fetch(uri, pkgFilePath) or \ not file_util.chmod(pkgFilePath, 0666): return None
def _checkMetaFile(self, repoMd, metaName, fileName): metaFile = self._getRepoDataDir() + '/' + fileName if not file_util.exists(metaFile): return False if not repoMd.repoMdDatas.has_key(metaName): Log.cout(Log.ERROR, '%s not found in repomd.xml' % metaName) return False metaObj = repoMd.repoMdDatas[metaName] return self._checkSumValid(metaObj, metaFile)
def _removeCrontabFile(self): cronFilePath = self._ainstRoot.getRootVarDir('cron') + self._pkg.name if file_util.exists(cronFilePath): tmpDirName = self._ainstRoot.getRootVarAinstDir('tmp') tmpPath = tmpDirName + '/' + self._pkg.name + '.crontab.tmp' if not file_util.move(cronFilePath, tmpPath): return False self._crontabSrcDest = (cronFilePath, tmpPath) return True
def _getRepoFileList(self, reposdir): repoFileList = [] for repodir in reposdir: if not file_util.exists(repodir): continue files = file_util.listDir(repodir) if files: for fileName in files: if fileName.endswith('.repo'): repoFileList.append(repodir + '/' + fileName) return repoFileList
def _needMakeCache(self, metatype): #check cachecookie cachecookie = self._getRepoDataDir() + '/' + 'cachecookie' if not file_util.exists(cachecookie): return (True, metatype) ctime = os.stat(cachecookie).st_ctime nowtime = time.time() if nowtime - ctime > self.expireTime: return (True, metatype) #check repomd.xml repomdFile = self._getRepoDataDir() + '/' + self.repomdLocation.split( '/')[-1] if not file_util.exists(repomdFile): return (True, metatype) mdParser = RepoMdParser() repoMd = mdParser.parse(repomdFile) if repoMd is None: return (True, metatype) metatype = self._filterMetaTypeByRepoMD(metatype, repoMd) #check metatype if metatype == PRIMARY_TYPE: if not self._checkMetaFile(repoMd, PRIMARY_NAME, PRIMARY_FILE): return (True, metatype) elif metatype == PRIMARYDB_TYPE: if not self._checkMetaFile(repoMd, PRIMARYDB_NAME, PRIMARYDB_FILE): return (True, metatype) elif metatype == ALL_TYPE: primaryRet = self._checkMetaFile(repoMd, PRIMARY_NAME, PRIMARY_FILE) primarydbRet = self._checkMetaFile(repoMd, PRIMARYDB_NAME, PRIMARYDB_FILE) if primaryRet and not primarydbRet: return (True, PRIMARYDB_TYPE) elif not primaryRet and primarydbRet: return (True, PRIMARY_TYPE) elif not primaryRet and not primarydbRet: return (True, ALL_TYPE) return (False, None)
def _getRepoDataDir(self): if self.repoDataDir: return self.repoDataDir if not self.cacheDir or not self.repoid: return None self.repoDataDir = self.cacheDir + '/' + self.repoid if not file_util.exists(self.repoDataDir): preUmask = os.umask(0) ret = file_util.makeDir(self.repoDataDir, True, 0777) os.umask(preUmask) if not ret: Log.cout(Log.ERROR, 'make repo data dir %s failed' % self.repoDataDir) return None return self.repoDataDir
def _needMakeCache(self, metatype): #check cachecookie cachecookie = self._getRepoDataDir() + '/' + 'cachecookie'; if not file_util.exists(cachecookie): return (True, metatype) ctime = os.stat(cachecookie).st_ctime nowtime = time.time() if nowtime - ctime > self.expireTime: return (True, metatype) #check repomd.xml repomdFile = self._getRepoDataDir() + '/' + self.repomdLocation.split('/')[-1] if not file_util.exists(repomdFile): return (True, metatype) mdParser = RepoMdParser() repoMd = mdParser.parse(repomdFile) if repoMd is None: return (True, metatype) metatype = self._filterMetaTypeByRepoMD(metatype, repoMd) #check metatype if metatype == PRIMARY_TYPE: if not self._checkMetaFile(repoMd, PRIMARY_NAME, PRIMARY_FILE): return (True, metatype) elif metatype == PRIMARYDB_TYPE: if not self._checkMetaFile(repoMd, PRIMARYDB_NAME, PRIMARYDB_FILE): return (True, metatype) elif metatype == ALL_TYPE: primaryRet = self._checkMetaFile(repoMd, PRIMARY_NAME, PRIMARY_FILE) primarydbRet = self._checkMetaFile(repoMd, PRIMARYDB_NAME, PRIMARYDB_FILE) if primaryRet and not primarydbRet: return (True, PRIMARYDB_TYPE) elif not primaryRet and primarydbRet: return (True, PRIMARY_TYPE) elif not primaryRet and not primarydbRet: return (True, ALL_TYPE) return (False, None)
def _initRoot(self, initTrace): for name in self._rootDirs: if file_util.isDir(self._rootDirs[name]): continue if not file_util.makeDir(self._rootDirs[name]): self.clearInit(initTrace) return False initTrace.append(self._rootDirs[name]) for name in self._rootVarDirs: if file_util.isDir(self._rootVarDirs[name]): continue if not file_util.makeDir(self._rootVarDirs[name]): self.clearInit(initTrace) return False initTrace.append(self._rootVarDirs[name]) for name in self._rootVarAinstDirs: if file_util.isDir(self._rootVarAinstDirs[name]): continue if not file_util.makeDir(self._rootVarAinstDirs[name]): self.clearInit(initTrace) return False initTrace.append(self._rootVarAinstDirs[name]) initRootState = self._rootVarAinstDirs['save'] + 'root-state-0' if not file_util.exists(initRootState): state = RootState(time.time(), '', self._root, common.AINST_VERSION, [], {}) content = RootStateStreamer().toString(state) if not content or not file_util.writeToFile(initRootState, content): self.clearInit(initTrace) return False initTrace.append(initRootState) if not self._clearTmp: if not file_util.remove(self._rootVarAinstDirs['tmp']) or\ not file_util.makeDir(self._rootVarAinstDirs['tmp']): self.clearInit(initTrace) return False self._clearTmp = True return True
def get_raw_tuples(tuple_id_range): """ Actually gives the raw implicitome information using one of two methods: 1. Reads the information from disk if it exists. 2. If no cached version is available, reads directly from the database and then caches to disk for next time. """ fname = cache_filename(tuple_id_range) if not exists(fname): logging.debug( "Querying implicitome with tuple range {0}".format(tuple_id_range)) raw_tuples = get_db_raw_tuples(tuple_id_range) with open(fname, "w") as out: for raw_tuple in raw_tuples: out.write("{0}|{1}|{2}\n".format(raw_tuple[0], raw_tuple[1], raw_tuple[2])) return read_cached_tuples(fname)
def parse(self, path): if not file_util.exists(path): Log.cout(Log.DEBUG, 'Path [%s] not exists' % path) return {} content = file_util.readFromFile(path) if content is None: Log.cout(Log.ERROR, 'Read setting file [%s] failed' % path) return None lines = content.split('\n') settingMap = {} for line in lines: if line.strip() == '': continue pos = line.find('=') if pos == -1: Log.cout(Log.ERROR, 'Setting file [%s ]illega' % path) return None key = line[0:pos] value = line[pos+1:] settingMap[key] = value return settingMap
def _processTmpFile(self, cacheType, success): nameList = [] if cacheType == PRIMARY_TYPE: nameList.append(PRIMARY_FILE) elif cacheType == PRIMARYDB_TYPE: nameList.append(PRIMARYDB_FILE) elif cacheType == ALL_TYPE: nameList.append(PRIMARY_FILE) nameList.append(PRIMARYDB_FILE) for name in nameList: fileName = self._getRepoDataDir() + '/' + name if not file_util.exists(fileName + '.tmp'): continue if success: if not file_util.rename(fileName + '.tmp', fileName): return False else: if not file_util.remove(fileName + '.tmp'): return False if not file_util.remove(fileName + '.gz'): return False return True
def checkRoot(self): if not self._root: return False if not file_util.isFile(self._root + '/' + self._ainstRootMarkFile): return False for name in self._rootDirs: if not file_util.isDir(self._rootDirs[name]): return False for name in self._rootVarDirs: if not file_util.isDir(self._rootVarDirs[name]): return False for name in self._rootVarAinstDirs: if not file_util.isDir(self._rootVarAinstDirs[name]): return False initRootState = self._rootVarAinstDirs['save'] + 'root-state-0' if not file_util.exists(initRootState): return False os.environ[self._installRootEnvKey] = self._root return True
def get_semmed_tuples(): inloc = "/home/toby/databases/semmed/data" if not exists("semmed_raw_info.txt", inloc): cache_db_query(inloc) return read_cached_info(inloc)
def test_delete_recursively(self): test_root = self.create_tempdir().full_path file_util.makedirs('%s/my/favorite/multilevel/directory' % test_root) file_util.delete_recursively('%s/my/favorite' % test_root) self.assertTrue(file_util.exists('%s/my' % test_root)) self.assertFalse(file_util.exists('%s/my/favorite' % test_root))
def test_makedirs(self): test_root = self.create_tempdir().full_path file_util.makedirs('%s/my/multilevel/directory' % test_root) self.assertTrue(file_util.exists('%s/my/multilevel/directory' % test_root))