def _readDependentPackageData(self, line): strUtils = StringUtils() listPackages = line.split(",") listdependentpkgs = [] for line in listPackages: line = strUtils.getStringInConditionalBrackets(line) listContents = line.split() totalContents = len(listContents) i = 0 while i < totalContents: dpkg = dependentPackageData() compare = None packageName = listContents[i] if listContents[i].startswith("/"): provider = constants.providedBy.get(listContents[i], None) if provider is not None: packageName = provider else: raise Exception('What package does provide %s? Please modify providedBy in constants.py' % (listContents[i])) i += 1 if i + 2 < len(listContents): if listContents[i+1] in (">=", "<=", "=", "<", ">"): compare = listContents[i+1] if compare is not None: dpkg.package = packageName dpkg.compare = compare dpkg.version = listContents[i+2] i = i + 3 else: dpkg.package = packageName i = i + 1 listdependentpkgs.append(dpkg) return listdependentpkgs
def _readDependentPackageData(self, line): strUtils = StringUtils() listPackages = line.split(",") listdependentpkgs = [] for line in listPackages: line = strUtils.getStringInConditionalBrackets(line) listContents = line.split() totalContents = len(listContents) i = 0 while i < totalContents: dpkg = dependentPackageData() compare = None packageName = listContents[i] if listContents[i].startswith("/"): provider = constants.providedBy.get(listContents[i], None) i += 1 if provider is not None: packageName = provider else: continue if i + 2 < len(listContents): if listContents[i + 1] in (">=", "<=", "=", "<", ">"): compare = listContents[i + 1] if compare is not None: dpkg.package = packageName dpkg.compare = compare dpkg.version = listContents[i + 2] i = i + 3 else: dpkg.package = packageName i = i + 1 listdependentpkgs.append(dpkg) return listdependentpkgs
def readChecksum(self,line,pkg): strUtils = StringUtils() line=pkg.decodeContents(line) data = line.strip(); words=data.split(" ") nrWords = len(words) if (nrWords != 3): print "Error: Unable to parse line: "+line return False value=words[2].split("=") if (len(value) != 2): print "Error: Unable to parse line: "+line return False matchedSources=[] for source in pkg.sources: sourceName=strUtils.getFileNameFromURL(source) if (sourceName.startswith(value[0])): matchedSources.append(sourceName) if (len(matchedSources) == 0): print "Error: Can not find match for sha1 "+value[0] return False if (len(matchedSources) > 1): print "Error: Too many matches in sources: "+matchedSources+" for sha1 "+value[0] return False pkg.checksums[sourceName] = value[1] return True;
def _buildPackage(self): chroot = None try: chroot = Chroot(self.logger) chroot.create(self.package + "-" + self.version) tUtils = ToolChainUtils(self.logName, self.logPath) tUtils.installToolChainRPMS(chroot, self.package, self.version, self.logPath) listDependentPackages, listTestPackages, listInstalledPackages, listInstalledRPMs = ( self._findDependentPackagesAndInstalledRPM(chroot)) pkgUtils = PackageUtils(self.logName, self.logPath) if listDependentPackages: self.logger.debug( "Installing the build time dependent packages......") for pkg in listDependentPackages: packageName, packageVersion = StringUtils.splitPackageNameAndVersion( pkg) self._installPackage(pkgUtils, packageName, packageVersion, chroot, self.logPath, listInstalledPackages, listInstalledRPMs) for pkg in listTestPackages: flag = False packageName, packageVersion = StringUtils.splitPackageNameAndVersion( pkg) for depPkg in listDependentPackages: depPackageName, depPackageVersion = StringUtils.splitPackageNameAndVersion( depPkg) if depPackageName == packageName: flag = True break if flag == False: self._installPackage(pkgUtils, packageName, packageVersion, chroot, self.logPath, listInstalledPackages, listInstalledRPMs) pkgUtils.installRPMSInOneShot(chroot) self.logger.debug( "Finished installing the build time dependent packages...." ) pkgUtils.adjustGCCSpecs(chroot, self.package, self.version) pkgUtils.buildRPMSForGivenPackage(chroot, self.package, self.version, self.logPath) self.logger.debug("Successfully built the package:" + self.package) except Exception as e: self.logger.error("Failed while building package:" + self.package) self.logger.debug("Chroot: " + chroot.getPath() + " not deleted for debugging.") logFileName = os.path.join(self.logPath, self.package + ".log") fileLog = os.popen('tail -n 100 ' + logFileName).read() self.logger.info(fileLog) raise e if chroot: chroot.destroy()
def _readChecksum(self, line, pkg): strUtils = StringUtils() line = self._replaceMacros(line) data = line.strip() words = data.split() nrWords = len(words) if nrWords != 3: print("Error: Unable to parse line: " + line) return False value = words[2].split("=") if len(value) != 2: print("Error: Unable to parse line: "+line) return False matchedSources = [] for source in pkg.sources: sourceName = strUtils.getFileNameFromURL(source) if sourceName.startswith(value[0]): matchedSources.append(sourceName) if not matchedSources: print("Error: Can not find match for sha1 " + value[0]) return False if len(matchedSources) > 1: print("Error: Too many matched Sources:" + ' '.join(matchedSources) + " for sha1 " + value[0]) return False pkg.checksums[sourceName] = value[1] return True
def buildSourcesList(specPath, yamlDir, singleFile=False): strUtils = StringUtils() if singleFile: yamlFile = open(yamlDir+"sources_list.yaml", "w") lst = os.listdir(specPath) lst.sort() for dirEntry in lst: specDir = os.path.join(specPath, dirEntry) if os.path.isdir(specDir): for specEntry in os.listdir(specDir): specFile = os.path.join(specDir, specEntry) if os.path.isfile(specFile) and specFile.endswith(".spec"): spec=Specutils(specFile) modified = len(spec.getPatchNames()) > 0 ss=spec.getSourceURLs() for s in ss: if (s.startswith("http") or s.startswith("ftp")): ossname=strUtils.getPackageNameFromURL(s) ossversion=strUtils.getPackageVersionFromURL(s) if not singleFile: yamlFile = open(yamlDir+ossname+"-"+ossversion+".yaml", "w") yamlFile.write("vmwsource:"+ossname+":"+ossversion+":\n") yamlFile.write(" repository: VMWsource\n") yamlFile.write(" name: '"+ossname+"'\n") yamlFile.write(" version: '"+ossversion+"'\n") yamlFile.write(" url: "+s+"\n") yamlFile.write(" license: UNKNOWN\n") if modified: yamlFile.write(" modified: true\n") yamlFile.write("\n") if not singleFile: yamlFile.close() if singleFile: yamlFile.close()
def _readChecksum(self, line, pkg): strUtils = StringUtils() line = self._replaceMacros(line) data = line.strip() words = data.split() nrWords = len(words) if nrWords != 3: print("Error: Unable to parse line: " + line) return False value = words[2].split("=") if len(value) != 2: print("Error: Unable to parse line: " + line) return False matchedSources = [] for source in pkg.sources: sourceName = strUtils.getFileNameFromURL(source) if sourceName.startswith(value[0]): matchedSources.append(sourceName) if not matchedSources: print("Error: Can not find match for sha1 " + value[0]) return False if len(matchedSources) > 1: print("Error: Too many matched Sources:" + ' '.join(matchedSources) + " for sha1 " + value[0]) return False pkg.checksums[sourceName] = value[1] return True
def readChecksum(self, line, pkg): strUtils = StringUtils() line = pkg.decodeContents(line) data = line.strip() words = data.split(" ") nrWords = len(words) if (nrWords != 3): print "Error: Unable to parse line: " + line return False value = words[2].split("=") if (len(value) != 2): print "Error: Unable to parse line: " + line return False matchedSources = [] for source in pkg.sources: sourceName = strUtils.getFileNameFromURL(source) if (sourceName.startswith(value[0])): matchedSources.append(sourceName) if (len(matchedSources) == 0): print "Error: Can not find match for sha1 " + value[0] return False if (len(matchedSources) > 1): print "Error: Too many matches in sources: " + matchedSources + " for sha1 " + value[ 0] return False pkg.checksums[sourceName] = value[1] return True
def _getPatchNames(self): patchNames = [] strUtils = StringUtils() pkg = self.packages.get('default') for patch in pkg.patches: patchName = strUtils.getFileNameFromURL(patch) patchNames.append(patchName) return patchNames
def _getSourceNames(self): sourceNames = [] strUtils = StringUtils() pkg = self.packages.get('default') for source in pkg.sources: sourceName = strUtils.getFileNameFromURL(source) sourceNames.append(sourceName) return sourceNames
def getPatchNames(self): patchNames = [] strUtils = StringUtils() pkg = self.spec.packages.get("default") if pkg is None: return None for patch in pkg.patches: patchName = strUtils.getFileNameFromURL(patch) patchNames.append(patchName) return patchNames
def getSourceNames(self): sourceNames = [] strUtils = StringUtils() pkg = self.spec.packages.get("default") if pkg is None: return None for source in pkg.sources: sourceName = strUtils.getFileNameFromURL(source) sourceNames.append(sourceName) return sourceNames
def _installDependentRunTimePackages(self, pkgUtils, package, packageVersion, sandbox, destLogPath, listInstalledPackages, listInstalledRPMs, arch): listRunTimeDependentPackages = self._findRunTimeRequiredRPMPackages( package, packageVersion, arch) if listRunTimeDependentPackages: for pkg in listRunTimeDependentPackages: if pkg in self.mapPackageToCycles: continue packageName, packageVersion = StringUtils.splitPackageNameAndVersion( pkg) rpmfile = pkgUtils.findRPMFile(packageName, packageVersion, arch, True) if rpmfile is None: self.logger.error("No rpm file found for package: " + packageName + "-" + packageVersion) raise Exception("Missing rpm file") latestPkgRPM = os.path.basename(rpmfile).replace(".rpm", "") if pkg in listInstalledPackages and latestPkgRPM in listInstalledRPMs: continue self._installPackage(pkgUtils, packageName, packageVersion, sandbox, destLogPath, listInstalledPackages, listInstalledRPMs, arch)
def setParams(sortedList, listOfAlreadyBuiltPackages): Scheduler.sortedList = sortedList Scheduler.listOfAlreadyBuiltPackages = listOfAlreadyBuiltPackages for pkg in Scheduler.sortedList: pkgName, pkgVersion = StringUtils.splitPackageNameAndVersion(pkg) if (pkg not in Scheduler.listOfAlreadyBuiltPackages or pkgName in constants.testForceRPMS): Scheduler.listOfPackagesToBuild.append(pkg) Scheduler.listOfPackagesCurrentlyBuilding = set() Scheduler.listOfPackagesNextToBuild = PriorityQueue() Scheduler.listOfFailedPackages = [] # When performing (only) make-check, package dependencies are # irrelevant; i.e., all the packages can be "make-checked" in # parallel. So skip building the dependency graph. This is not # merely an optimization! A given package can define # additional packages to be installed in its build environment # when performing a make-check, under %if %{with_check}. # However, these are not really build-time-dependencies in the # usual sense; i.e., there is no ordering requirement when # building these packages; they only make sense when running a # `make check`. Hence, trying to build a dependency graph out # of them will result in anomalies such as cycles in the # graph. So skip building the graph altogether and schedule # all the `make check`s in parallel. skipGraphBuild = constants.rpmCheck Scheduler._setPriorities(skipGraphBuild) if constants.publishBuildDependencies: # This must be called only after calling _setPriorities(), # which builds the dependency graph. Scheduler._publishBuildDependencies()
def _calculateParams(self, listPackages): self.mapCyclesToPackageList.clear() self.mapPackageToCycle.clear() self.sortedPackageList = [] self.listOfPackagesAlreadyBuilt = list( self._readAlreadyAvailablePackages()) updateBuiltRPMSList = False while not updateBuiltRPMSList: updateBuiltRPMSList = True listOfPackagesAlreadyBuilt = self.listOfPackagesAlreadyBuilt for pkg in listOfPackagesAlreadyBuilt: packageName, packageVersion = StringUtils.splitPackageNameAndVersion( pkg) listDependentRpmPackages = SPECS.getData( ).getRequiresAllForPackage(packageName, packageVersion) needToRebuild = False for dependentPkg in listDependentRpmPackages: if dependentPkg not in self.listOfPackagesAlreadyBuilt: needToRebuild = True updateBuiltRPMSList = False if needToRebuild: self.listOfPackagesAlreadyBuilt.remove(pkg) listPackagesToBuild = copy.copy(listPackages) for pkg in listPackages: if (pkg in self.listOfPackagesAlreadyBuilt and not constants.rpmCheck): listPackagesToBuild.remove(pkg) if not self._readPackageBuildData(listPackagesToBuild): return False return True
def _buildPackage(self): try: self.sandbox.create(self.package + "-" + self.version) tUtils = ToolChainUtils(self.logName, self.logPath) if self.sandbox.hasToolchain(): tUtils.installExtraToolchainRPMS(self.sandbox, self.package, self.version) else: tUtils.installToolchainRPMS(self.sandbox, self.package, self.version, availablePackages=self.doneList) listDependentPackages, listTestPackages, listInstalledPackages, listInstalledRPMs = ( self._findDependentPackagesAndInstalledRPM(self.sandbox)) pkgUtils = PackageUtils(self.logName, self.logPath) if listDependentPackages: self.logger.debug("Installing the build time dependent packages......") for pkg in listDependentPackages: packageName, packageVersion = StringUtils.splitPackageNameAndVersion(pkg) self._installPackage(pkgUtils, packageName, packageVersion, self.sandbox, self.logPath,listInstalledPackages, listInstalledRPMs) for pkg in listTestPackages: flag = False packageName, packageVersion = StringUtils.splitPackageNameAndVersion(pkg) for depPkg in listDependentPackages: depPackageName, depPackageVersion = StringUtils.splitPackageNameAndVersion(depPkg) if depPackageName == packageName: flag = True break; if flag == False: self._installPackage(pkgUtils, packageName,packageVersion, self.sandbox, self.logPath,listInstalledPackages, listInstalledRPMs) pkgUtils.installRPMSInOneShot(self.sandbox) self.logger.debug("Finished installing the build time dependent packages....") pkgUtils.adjustGCCSpecs(self.sandbox, self.package, self.version) pkgUtils.buildRPMSForGivenPackage(self.sandbox, self.package, self.version, self.logPath) self.logger.debug("Successfully built the package: " + self.package) except Exception as e: self.logger.error("Failed while building package: " + self.package) self.logger.debug("Sandbox: " + self.sandbox.getID() + " not deleted for debugging.") logFileName = os.path.join(self.logPath, self.package + ".log") fileLog = os.popen('tail -n 100 ' + logFileName).read() self.logger.info(fileLog) raise e if self.sandbox: self.sandbox.destroy()
def getSourceURLs(self): sourceNames = [] strUtils = StringUtils() pkg = self.spec.packages.get('default') if pkg is None: return None for source in pkg.sources: sourceNames.append(source) return sourceNames
def buildSourcesList(specPath, sourcePath, yamlDir, logger, singleFile=True): strUtils = StringUtils() if singleFile: yamlFile = open(yamlDir + "sources_list.yaml", "w") lst = os.listdir(specPath) lst.sort() import PullSources for dirEntry in lst: specDir = os.path.join(specPath, dirEntry) if os.path.isdir(specDir): for specEntry in os.listdir(specDir): specFile = os.path.join(specDir, specEntry) if os.path.isfile(specFile) and specFile.endswith(".spec"): spec = Specutils(specFile) modified = len(spec.getPatchNames()) > 0 listSourceURLs = spec.getSourceURLs() ossname = spec.getBasePackageName() ossversion = spec.getVersion() url = None listSourceNames = spec.getSourceNames() sourceName = None if len(listSourceNames) > 0: sourceName = listSourceNames[0] sha1 = spec.getChecksumForSource(sourceName) if sha1 is not None: PullSources.get(sourceName, sha1, sourcePath, constants.pullsourcesConfig) if len(listSourceURLs) > 0: sourceURL = listSourceURLs[0] if sourceURL.startswith( "http") or sourceURL.startswith("ftp"): url = sourceURL else: url = spec.getURL(ossname) if not singleFile: yamlFile = open( yamlDir + "/" + ossname + "-" + ossversion + ".yaml", "w") yamlFile.write("vmwsource:" + ossname + ":" + ossversion + ":\n") yamlFile.write(" repository: VMWsource\n") yamlFile.write(" name: '" + ossname + "'\n") yamlFile.write(" version: '" + ossversion + "'\n") yamlFile.write(" url: " + str(url) + "\n") yamlFile.write(" license: UNKNOWN\n") if sourceName is not None: yamlFile.write(" vmwsource-distribution: " + str(sourceName) + "\n") if modified: yamlFile.write(" modified: true\n") yamlFile.write("\n") if not singleFile: yamlFile.close() if singleFile: yamlFile.close() logger.info("Generated source yaml files for all packages")
def _readDependentPackageData(self, line): strUtils = StringUtils() listPackages = line.split(",") listdependentpkgs = [] for line in listPackages: line = strUtils.getStringInConditionalBrackets(line) listContents = line.split() totalContents = len(listContents) i = 0 while i < totalContents: dpkg = dependentPackageData() compare = None packageName = listContents[i] if listContents[i].startswith("/"): provider = constants.providedBy.get(listContents[i], None) i += 1 if provider is not None: packageName = provider else: continue if i + 2 < len(listContents): if listContents[i+1] == ">=": compare = "gte" elif listContents[i+1] == "<=": compare = "lte" elif listContents[i+1] == "==": compare = "eq" elif listContents[i+1] == "<": compare = "lt" elif listContents[i+1] == ">": compare = "gt" elif listContents[i+1] == "=": compare = "eq" if compare is not None: dpkg.package = packageName dpkg.compare = compare dpkg.version = listContents[i+2] i = i + 3 else: dpkg.package = packageName i = i + 1 listdependentpkgs.append(dpkg) return listdependentpkgs
def _createNodes(): # Create a graph node to represent every package for package in Scheduler.sortedList: packageName, packageVersion = StringUtils.splitPackageNameAndVersion(package) node = DependencyGraphNode(packageName, packageVersion, Scheduler._getWeight(package)) Scheduler.mapPackagesToGraphNodes[package] = node if package in Scheduler.listOfAlreadyBuiltPackages: node.built = 1
def _getWeight(package): # Package weights are assumed to be independent of package # version (i.e., in the case of multi-version packages such as # Go or Kubernetes, all the versions have the same weight). So # convert packageName-version to packageName before looking up # the package weight. package, _ = StringUtils.splitPackageNameAndVersion(package) try: return int(Scheduler.pkgWeights[package]) + 1 except KeyError: return 1
def buildSRPMList(specPath, srpmPath, yamlDir, logger, singleFile=True): strUtils = StringUtils() if singleFile: yamlFile = open(yamlDir + "srpm_list.yaml", "w") lst = os.listdir(specPath) lst.sort() cmdUtils = CommandUtils() for dirEntry in lst: specDir = os.path.join(specPath, dirEntry) if os.path.isdir(specDir): for specEntry in os.listdir(specDir): specFile = os.path.join(specDir, specEntry) if os.path.isfile(specFile) and specFile.endswith(".spec"): spec = Specutils(specFile) ossname = spec.getBasePackageName() ossversion = spec.getVersion() ossrelease = spec.getRelease() listFoundSRPMFiles = cmdUtils.findFile( ossname + "-" + ossversion + "-" + ossrelease + ".src.rpm", srpmPath) srpmName = None if len(listFoundSRPMFiles) == 1: srpmFullPath = listFoundSRPMFiles[0] srpmName = os.path.basename(srpmFullPath) cpcmd = "cp " + srpmFullPath + " " + yamlDir + "/" returnVal = cmdUtils.runCommandInShell(cpcmd) if not returnVal: logger.error( "Copy SRPM File is failed for package:" + ossname) else: logger.error("SRPM file is not found:" + ossname) if not singleFile: yamlFile = open( yamlDir + "/" + ossname + "-" + ossversion + "-" + ossrelease + ".yaml", "w") yamlFile.write("baseos:" + ossname + ":" + ossversion + "-" + ossrelease + ":\n") yamlFile.write(" repository: BaseOS\n") yamlFile.write(" name: '" + ossname + "'\n") yamlFile.write(" version: '" + ossversion + "-" + ossrelease + "'\n") yamlFile.write(" baseos-style: rpm\n") yamlFile.write(" baseos-source: '" + str(srpmName) + "'\n") yamlFile.write(" baseos-osname: 'photon'\n") yamlFile.write("\n") if not singleFile: yamlFile.close() if singleFile: yamlFile.close() logger.info("Generated srpm yaml files for all packages")
def _installDependentRunTimePackages(self, pkgUtils, package, packageVersion, sandbox, destLogPath, listInstalledPackages, listInstalledRPMs): listRunTimeDependentPackages = self._findRunTimeRequiredRPMPackages(package, packageVersion) if listRunTimeDependentPackages: for pkg in listRunTimeDependentPackages: if pkg in self.mapPackageToCycles: continue packageName, packageVersion = StringUtils.splitPackageNameAndVersion(pkg) latestPkgRPM = os.path.basename( pkgUtils.findRPMFile(packageName, packageVersion)).replace(".rpm", "") if pkg in listInstalledPackages and latestPkgRPM in listInstalledRPMs: continue self._installPackage(pkgUtils, packageName,packageVersion, sandbox, destLogPath,listInstalledPackages, listInstalledRPMs)
def readDependentPackageData(self,line): strUtils = StringUtils() listPackages=line.split(",") listdependentpkgs=[] for line in listPackages: line=strUtils.getStringInBrackets(line) listContents=line.split() totalContents = len(listContents) i=0 while i < totalContents: dpkg = dependentPackageData() compare=None if listContents[i].startswith("/"): i=i+1 continue if i+2 < len(listContents): if listContents[i+1] == ">=": compare="gte" elif listContents[i+1] == "<=": compare="lte" elif listContents[i+1] == "==": compare="eq" elif listContents[i+1] == "<": compare="lt" elif listContents[i+1] == ">": compare="gt" elif listContents[i+1] == "=": compare="eq" if compare is not None: dpkg.package=listContents[i] dpkg.compare=compare dpkg.version=listContents[i+2] i=i+3 else: dpkg.package=listContents[i] i=i+1 listdependentpkgs.append(dpkg) return listdependentpkgs
def _installDependencies(self, arch, deps=[]): listDependentPackages, listTestPackages, listInstalledPackages, listInstalledRPMs = ( self._findDependentPackagesAndInstalledRPM(self.sandbox, arch)) # PackageUtils should be initialized here - as per arch basis # Do not move it to __init__ pkgUtils = PackageUtils(self.logName, self.logPath) if listDependentPackages: self.logger.debug( "Installing the build time dependent packages for " + arch) for pkg in listDependentPackages: packageName, packageVersion = StringUtils.splitPackageNameAndVersion( pkg) self._installPackage(pkgUtils, packageName, packageVersion, self.sandbox, self.logPath, listInstalledPackages, listInstalledRPMs, arch) for pkg in listTestPackages: flag = False packageName, packageVersion = StringUtils.splitPackageNameAndVersion( pkg) for depPkg in listDependentPackages: depPackageName, depPackageVersion = StringUtils.splitPackageNameAndVersion( depPkg) if depPackageName == packageName: flag = True break if flag == False: self._installPackage(pkgUtils, packageName, packageVersion, self.sandbox, self.logPath, listInstalledPackages, listInstalledRPMs, arch) pkgUtils.installRPMSInOneShot(self.sandbox, arch) self.logger.debug( "Finished installing the build time dependent packages for " + arch)
def _createGraphNodes(): # GRAPH-BUILD STEP 1: Initialize graph nodes for each package. # # Create a graph with a node to represent every package and all # its dependent packages in the given list. for package in Scheduler.sortedList: packageName, packageVersion = StringUtils.splitPackageNameAndVersion(package) node = DependencyGraphNode(packageName, packageVersion, Scheduler._getWeight(package)) Scheduler.mapPackagesToGraphNodes[package] = node if package in Scheduler.listOfAlreadyBuiltPackages: node.built = 1 for package in Scheduler.sortedList: pkgNode = Scheduler.mapPackagesToGraphNodes[package] for childPackage in Scheduler._getBuildRequiredPackages(package): childPkgNode = Scheduler.mapPackagesToGraphNodes[childPackage] pkgNode.buildRequiresPkgNodes.add(childPkgNode) for childPackage in Scheduler._getRequiredPackages(package): childPkgNode = Scheduler.mapPackagesToGraphNodes[childPackage] pkgNode.installRequiresPkgNodes.add(childPkgNode) # GRAPH-BUILD STEP 2: Mark package dependencies in the graph. # # Add parent-child relationships between dependent packages. # If a package 'A' build-requires or install-requires package 'B', then: # - Mark 'B' as a child of 'A' in the graph. # - Mark 'A' as a parent of 'B' in the graph. # # A # # / \ # v v # # B C # for package in Scheduler.sortedList: pkgNode = Scheduler.mapPackagesToGraphNodes[package] for childPkgNode in pkgNode.buildRequiresPkgNodes: pkgNode.childPkgNodes.add(childPkgNode) childPkgNode.parentPkgNodes.add(pkgNode) for childPkgNode in pkgNode.installRequiresPkgNodes: pkgNode.childPkgNodes.add(childPkgNode) childPkgNode.parentPkgNodes.add(pkgNode)
def buildSourcesList(yamlDir, logger, singleFile=True): strUtils = StringUtils() if singleFile: yamlFile = open(yamlDir + "/sources_list.yaml", "w") listPackages = constants.specData.getListPackages() listPackages.sort() import PullSources for package in listPackages: ossname = package ossversion = constants.specData.getVersion(package) modified = False listPatches = constants.specData.getPatches(package) if listPatches is not None and len(listPatches) > 0: modified = True url = constants.specData.getSourceURL(package) if url is None: url = constants.specData.getURL(package) sourceName = None listSourceNames = constants.specData.getSources(package) if len(listSourceNames) > 0: sourceName = listSourceNames[0] sha1 = constants.specData.getSHA1(package, sourceName) if sha1 is not None: PullSources.get(sourceName, sha1, yamlDir, constants.pullsourcesConfig) if not singleFile: yamlFile = open( yamlDir + "/" + ossname + "-" + ossversion + ".yaml", "w") yamlFile.write("vmwsource:" + ossname + ":" + ossversion + ":\n") yamlFile.write(" repository: VMWsource\n") yamlFile.write(" name: '" + ossname + "'\n") yamlFile.write(" version: '" + ossversion + "'\n") yamlFile.write(" url: " + str(url) + "\n") yamlFile.write(" license: UNKNOWN\n") if sourceName is not None: yamlFile.write(" vmwsource-distribution: " + str(sourceName) + "\n") if modified: yamlFile.write(" modified: true\n") yamlFile.write("\n") if not singleFile: yamlFile.close() if singleFile: yamlFile.close() logger.info("Generated source yaml files for all packages")
def _installDependentRunTimePackages(self, pkgUtils, package, packageVersion, sandbox, destLogPath, listInstalledPackages, listInstalledRPMs): listRunTimeDependentPackages = self._findRunTimeRequiredRPMPackages(package, packageVersion) if listRunTimeDependentPackages: for pkg in listRunTimeDependentPackages: if pkg in self.mapPackageToCycles: continue packageName, packageVersion = StringUtils.splitPackageNameAndVersion(pkg) rpmfile = pkgUtils.findRPMFile(packageName, packageVersion) if rpmfile is None: self.logger.error("No rpm file found for package: " + packageName + "-" + packageVersion) raise Exception("Missing rpm file") latestPkgRPM = os.path.basename(rpmfile).replace(".rpm", "") if pkg in listInstalledPackages and latestPkgRPM in listInstalledRPMs: continue self._installPackage(pkgUtils, packageName,packageVersion, sandbox, destLogPath,listInstalledPackages, listInstalledRPMs)
def lines_of_code(self): files_paths = StringUtils.search_java_files(self.project_directory) print(f"Files paths: {files_paths}") print(f"Files paths len: {len(files_paths)}") project_lines = 0 for path in files_paths: with open(path, 'r') as f: line = f.readline() while line: print(f"LINE: {line}") line = f.readline() # if self.is_valid_line(line): project_lines += 1 print(f"PROJECT LINES: {project_lines}")
def buildSRPMList(srpmPath, yamlDir, logger, singleFile=True): strUtils = StringUtils() if singleFile: yamlFile = open(yamlDir + "/srpm_list.yaml", "w") listPackages = constants.specData.getListPackages() listPackages.sort() cmdUtils = CommandUtils() for package in listPackages: ossname = package ossversion = constants.specData.getVersion(package) ossrelease = constants.specData.getRelease(package) listFoundSRPMFiles = cmdUtils.findFile( ossname + "-" + ossversion + "-" + ossrelease + ".src.rpm", srpmPath) srpmName = None if len(listFoundSRPMFiles) == 1: srpmFullPath = listFoundSRPMFiles[0] srpmName = os.path.basename(srpmFullPath) cpcmd = "cp " + srpmFullPath + " " + yamlDir + "/" returnVal = cmdUtils.runCommandInShell(cpcmd) if not returnVal: logger.error("Copy SRPM File is failed for package:" + ossname) else: logger.error("SRPM file is not found:" + ossname) if not singleFile: yamlFile = open( yamlDir + "/" + ossname + "-" + ossversion + "-" + ossrelease + ".yaml", "w") yamlFile.write("baseos:" + ossname + ":" + ossversion + "-" + ossrelease + ":\n") yamlFile.write(" repository: BaseOS\n") yamlFile.write(" name: '" + ossname + "'\n") yamlFile.write(" version: '" + ossversion + "-" + ossrelease + "'\n") yamlFile.write(" baseos-style: rpm\n") yamlFile.write(" baseos-source: '" + str(srpmName) + "'\n") yamlFile.write(" baseos-osname: 'photon'\n") yamlFile.write("\n") if not singleFile: yamlFile.close() if singleFile: yamlFile.close() logger.info("Generated srpm yaml files for all packages")
def build(self, pkg, doneList): packageName, packageVersion = StringUtils.splitPackageNameAndVersion(pkg) #do not build if RPM is already built #test only if the package is in the testForceRPMS with rpmCheck #build only if the package is not in the testForceRPMS with rpmCheck if not constants.rpmCheck or packageName in constants.testForceRPMS: if self._checkIfPackageIsAlreadyBuilt(packageName, packageVersion, doneList): return self._buildPackagePrepareFunction(packageName, packageVersion, doneList) try: self._buildPackage() except Exception as e: # TODO: self.logger might be None self.logger.exception(e) raise e
def _createGraphNodes(): # GRAPH-BUILD STEP 1: Initialize graph nodes for each package. # # Create a graph with a node to represent every package and all # its dependent packages in the given list. for package in Scheduler.sortedList: packageName, packageVersion = StringUtils.splitPackageNameAndVersion(package) node = DependencyGraphNode(packageName, packageVersion, Scheduler._getWeight(package)) Scheduler.mapPackagesToGraphNodes[package] = node for package in Scheduler.sortedList: pkgNode = Scheduler.mapPackagesToGraphNodes[package] for childPackage in Scheduler._getBuildRequiredPackages(package): childPkgNode = Scheduler.mapPackagesToGraphNodes[childPackage] pkgNode.buildRequiresPkgNodes.add(childPkgNode) for childPackage in Scheduler._getRequiredPackages(package): childPkgNode = Scheduler.mapPackagesToGraphNodes[childPackage] pkgNode.installRequiresPkgNodes.add(childPkgNode) # GRAPH-BUILD STEP 2: Mark package dependencies in the graph. # # Add parent-child relationships between dependent packages. # If a package 'A' build-requires or install-requires package 'B', then: # - Mark 'B' as a child of 'A' in the graph. # - Mark 'A' as a parent of 'B' in the graph. # # A # # / \ # v v # # B C # for package in Scheduler.sortedList: pkgNode = Scheduler.mapPackagesToGraphNodes[package] for childPkgNode in pkgNode.buildRequiresPkgNodes: pkgNode.childPkgNodes.add(childPkgNode) childPkgNode.parentPkgNodes.add(pkgNode) for childPkgNode in pkgNode.installRequiresPkgNodes: pkgNode.childPkgNodes.add(childPkgNode) childPkgNode.parentPkgNodes.add(pkgNode)
def buildSourcesList(specPath, yamlDir, singleFile=False): strUtils = StringUtils() if singleFile: yamlFile = open(yamlDir + "sources_list.yaml", "w") lst = os.listdir(specPath) lst.sort() for dirEntry in lst: specDir = os.path.join(specPath, dirEntry) if os.path.isdir(specDir): for specEntry in os.listdir(specDir): specFile = os.path.join(specDir, specEntry) if os.path.isfile(specFile) and specFile.endswith(".spec"): spec = Specutils(specFile) modified = len(spec.getPatchNames()) > 0 listSourceURLs = spec.getSourceURLs() ossname = spec.getBasePackageName() ossversion = spec.getVersion() url = None if len(listSourceURLs) > 0: sourceURL = listSourceURLs[0] if sourceURL.startswith( "http") or sourceURL.startswith("ftp"): url = sourceURL else: url = spec.getURL(ossname) if not singleFile: yamlFile = open( yamlDir + ossname + "-" + ossversion + ".yaml", "w") yamlFile.write("vmwsource:" + ossname + ":" + ossversion + ":\n") yamlFile.write(" repository: VMWsource\n") yamlFile.write(" name: '" + ossname + "'\n") yamlFile.write(" version: '" + ossversion + "'\n") yamlFile.write(" url: " + str(url) + "\n") yamlFile.write(" license: UNKNOWN\n") if modified: yamlFile.write(" modified: true\n") yamlFile.write("\n") if not singleFile: yamlFile.close() if singleFile: yamlFile.close()
def getBasePkg(self, pkg): package, version = StringUtils.splitPackageNameAndVersion(pkg) return self.getSpecName(package) + "-" + version
def getPackagesForPkg(self, pkg): pkgs = [] package, version = StringUtils.splitPackageNameAndVersion(pkg) for p in self.getPackages(package, version): pkgs.append(p + "-" + version) return pkgs
def getRequiresForPkg(self, pkg): package, version = StringUtils.splitPackageNameAndVersion(pkg) return self.getRequiresForPackage(package, version)
def getPackagesForPkg(self, pkg): pkgs=[] package, version = StringUtils.splitPackageNameAndVersion(pkg) for p in self.getPackages(package, version): pkgs.append(p+"-"+version) return pkgs
def getBasePkg(self, pkg): package, version = StringUtils.splitPackageNameAndVersion(pkg) return self.getSpecName(package)+"-"+version
def main(): usage = "Usage: %prog [options]" parser = ArgumentParser(usage) parser.add_argument("-i", "--input-type", dest="input_type", default=DEFAULT_INPUT_TYPE) parser.add_argument("-p", "--pkg", dest="pkg") parser.add_argument("-f", "--file", dest="json_file", default="packages_minimal.json") parser.add_argument("-d", "--display-option", dest="display_option", default=DEFAULT_DISPLAY_OPTION) parser.add_argument("-s", "--spec-path", dest="spec_path", default=SPEC_FILE_DIR) parser.add_argument("-l", "--log-path", dest="log_path", default=LOG_FILE_DIR) parser.add_argument("-y", "--log-level", dest="log_level", default="info") parser.add_argument("-t", "--stage-dir", dest="stage_dir", default="../../stage") parser.add_argument("-a", "--input-data-dir", dest="input_data_dir", default="../../common/data/") parser.add_argument("-o", "--output-dir", dest="output_dir", default="../../stage/common/data") options = parser.parse_args() constants.setSpecPath(options.spec_path) constants.setLogPath(options.log_path) constants.setLogLevel(options.log_level) constants.initialize() cmdUtils = CommandUtils() logger = Logger.getLogger("SpecDeps", options.log_path, options.log_level) if not os.path.isdir(options.output_dir): cmdUtils.runCommandInShell2("mkdir -p "+options.output_dir) if not options.input_data_dir.endswith('/'): options.input_data_dir += '/' try: specDeps = SpecDependencyGenerator(options.log_path, options.log_level) if options.input_type == "remove-upward-deps": whoNeedsList = specDeps.process("get-upward-deps", options.pkg, options.display_option) logger.info("Removing upward dependencies: " + str(whoNeedsList)) for pkg in whoNeedsList: package, version = StringUtils.splitPackageNameAndVersion(pkg) release = SPECS.getData().getRelease(package, version) for p in SPECS.getData().getPackages(package,version): buildarch=SPECS.getData().getBuildArch(p, version) rpmFile = "stage/RPMS/" + buildarch + "/" + p + "-" + version + "-" + release + ".*" + buildarch+".rpm" cmdUtils.runCommandInShell2("rm -f "+rpmFile) elif options.input_type == "print-upward-deps": whoNeedsList = specDeps.process("get-upward-deps", options.pkg, options.display_option) logger.info("Upward dependencies: " + str(whoNeedsList)) # To display/print package dependencies on console elif (options.input_type == "pkg" or options.input_type == "who-needs"): specDeps.process(options.input_type, options.pkg, options.display_option) elif options.input_type == "json": list_json_files = options.json_file.split("\n") # Generate the expanded package dependencies json file based on package_list_file logger.info("Generating the install time dependency list for all json files") for json_file in list_json_files: shutil.copy2(json_file, options.output_dir) json_wrapper_option_list = JsonWrapper(json_file) option_list_json = json_wrapper_option_list.read() options_sorted = option_list_json.items() for install_option in options_sorted: output_file = None input_value = os.path.join(os.path.dirname(json_file), install_option[1]["file"]) if options.display_option == "tree" and install_option[1]["title"] == "ISO Packages": continue if options.display_option == "json": output_file = os.path.join(options.output_dir, install_option[1]["file"]) specDeps.process(options.input_type, input_value, options.display_option, output_file) except Exception as e: traceback.print_exc() sys.stderr.write(str(e)) sys.stderr.write("Failed to generate dependency lists from spec files\n") sys.exit(1) sys.exit(0)
def main(): usage = "Usage: %prog [options]" parser = ArgumentParser(usage) parser.add_argument("-i", "--input-type", dest="input_type", default=DEFAULT_INPUT_TYPE) parser.add_argument("-p", "--pkg", dest="pkg") parser.add_argument("-f", "--file", dest="json_file", default="packages_minimal.json") parser.add_argument("-d", "--display-option", dest="display_option", default=DEFAULT_DISPLAY_OPTION) parser.add_argument("-s", "--spec-path", dest="spec_path", default=SPEC_FILE_DIR) parser.add_argument("-l", "--log-path", dest="log_path", default=LOG_FILE_DIR) parser.add_argument("-y", "--log-level", dest="log_level", default="info") parser.add_argument("-t", "--stage-dir", dest="stage_dir", default="../../stage") parser.add_argument("-a", "--input-data-dir", dest="input_data_dir", default="../../common/data/") parser.add_argument("-o", "--output-dir", dest="output_dir", default="../../stage/common/data") options = parser.parse_args() constants.setSpecPath(options.spec_path) constants.setLogPath(options.log_path) constants.setLogLevel(options.log_level) constants.initialize() cmdUtils = CommandUtils() logger = Logger.getLogger("SpecDeps", options.log_path, options.log_level) if not os.path.isdir(options.output_dir): cmdUtils.runCommandInShell("mkdir -p "+options.output_dir) if not options.input_data_dir.endswith('/'): options.input_data_dir += '/' try: specDeps = SpecDependencyGenerator(options.log_path, options.log_level) if options.input_type == "remove-upward-deps": isToolChainPkg = specDeps.process("is-toolchain-pkg", options.pkg, options.display_option) if isToolChainPkg: logger.info("Removing all staged RPMs since toolchain packages were modified") cmdUtils.runCommandInShell("rm -rf stage/RPMS/") else: whoNeedsList = specDeps.process("get-upward-deps", options.pkg, options.display_option) logger.info("Removing upward dependencies: " + str(whoNeedsList)) for pkg in whoNeedsList: package, version = StringUtils.splitPackageNameAndVersion(pkg) release = SPECS.getData().getRelease(package, version) for p in SPECS.getData().getPackages(package,version): buildarch=SPECS.getData().getBuildArch(p, version) rpmFile = "stage/RPMS/" + buildarch + "/" + p + "-" + version + "-" + release + ".*" + buildarch+".rpm" cmdUtils.runCommandInShell("rm -f "+rpmFile) elif options.input_type == "print-upward-deps": whoNeedsList = specDeps.process("get-upward-deps", options.pkg, options.display_option) logger.info("Upward dependencies: " + str(whoNeedsList)) # To display/print package dependencies on console elif (options.input_type == "pkg" or options.input_type == "who-needs"): specDeps.process(options.input_type, options.pkg, options.display_option) elif options.input_type == "json": list_json_files = options.json_file.split("\n") # Generate the expanded package dependencies json file based on package_list_file logger.info("Generating the install time dependency list for all json files") if list_json_files: shutil.copy2(os.path.dirname(list_json_files[0]) + "/build_install_options_all.json", options.output_dir) for json_file in list_json_files: output_file = None if options.display_option == "json": output_file = os.path.join(options.output_dir, os.path.basename(json_file)) specDeps.process(options.input_type, json_file, options.display_option, output_file) except Exception as e: traceback.print_exc() sys.stderr.write(str(e)) sys.stderr.write("Failed to generate dependency lists from spec files\n") sys.exit(1) sys.exit(0)
def installToolchainRPMS(self, chroot, packageName=None, packageVersion=None, usePublishedRPMS=True, availablePackages=None): self.logger.debug("Installing toolchain RPMS.......") rpmFiles = "" packages = "" listBuildRequiresPackages = [] listRPMsToInstall = list(constants.listToolChainRPMsToInstall) if constants.crossCompiling: targetPackageName = packageName packageName = None packageVersion = None listRPMsToInstall.extend([ 'binutils-' + constants.targetArch + '-linux-gnu', 'gcc-' + constants.targetArch + '-linux-gnu' ]) if packageName: listBuildRequiresPackages = self.getListDependentPackages( packageName, packageVersion) for package in listRPMsToInstall: pkgUtils = PackageUtils(self.logName, self.logPath) rpmFile = None version = None # Get proper package version for depPkg in listBuildRequiresPackages: depPkgName, depPkgVersion = StringUtils.splitPackageNameAndVersion( depPkg) if depPkgName == package: version = depPkgVersion break if not version: version = SPECS.getData( constants.buildArch).getHighestVersion(package) if availablePackages is not None: basePkg = SPECS.getData( constants.buildArch).getSpecName(package) + "-" + version isAvailable = basePkg in availablePackages else: # if availablePackages is not provided (rear case) it is safe # to use findRPMFile() isAvailable = True if constants.rpmCheck: rpmFile = pkgUtils.findRPMFile(package, version, constants.buildArch) if rpmFile is None: # Honor the toolchain list order. # if index of depended package ('package') is more # then index of the current package that we are # building ('packageName'), then we _must_ use published # `package` rpm. if (packageName and packageName in listRPMsToInstall and listRPMsToInstall.index(packageName) < listRPMsToInstall.index(package)): isAvailable = False if isAvailable: rpmFile = pkgUtils.findRPMFile(package, version, constants.buildArch) if rpmFile is None: if not usePublishedRPMS or isAvailable or constants.crossCompiling: raise Exception( "%s-%s.%s not found in available packages" % (package, version, constants.buildArch)) # Safe to use published RPM # sqlite-autoconf package was renamed, but it still published as sqlite-autoconf if (package == "sqlite") and (constants.buildArch == "x86_64"): package = "sqlite-autoconf" rpmFile = self._findPublishedRPM(package, constants.prevPublishRPMRepo) if rpmFile is None: if package in constants.listOfRPMsProvidedAfterBuild: self.logger.debug( "No old version of " + package + " exists, skip until the new version is built") continue self.logger.error("Unable to find published rpm " + package) raise Exception("Input Error") rpmFiles += " " + rpmFile packages += " " + package + "-" + version self.logger.debug(rpmFiles) self.logger.debug(packages) cmd = (self.rpmCommand + " -i -v --nodeps --noorder --force --root " + chroot.getID() + " --define \'_dbpath /var/lib/rpm\' " + rpmFiles) retVal = CommandUtils.runCommandInShell(cmd, logfn=self.logger.debug) if retVal != 0: self.logger.debug("Command Executed:" + cmd) self.logger.error("Installing toolchain RPMS failed") raise Exception("RPM installation failed") self.logger.debug( "Successfully installed default toolchain RPMS in Chroot:" + chroot.getID()) if packageName: self.installExtraToolchainRPMS(chroot, packageName, packageVersion) if constants.crossCompiling: self.installTargetToolchain(chroot, targetPackageName)