def BuildContentState(self, log: Log, pathFileRecord: PathRecord, allowCaching: bool, allowNew: bool, cachedSyncState: Optional['SyncState'] = None) -> ContentState: fileState = ContentState() fileState.Name = pathFileRecord.RelativePath fileState.Length = os.path.getsize(pathFileRecord.ResolvedPath) fileState.ModifiedDate = self.__FileModificationDate(pathFileRecord.ResolvedPath) fileState.TagChecksum = '0' cachedState = cachedSyncState.TryGetFileState(fileState) if cachedSyncState is not None else None if allowCaching and cachedState is not None and fileState.Length == cachedState.Length and fileState.ModifiedDate == cachedState.ModifiedDate: fileState.Checksum = cachedState.Checksum fileState.TagChecksum = cachedState.TagChecksum log.LogPrintVerbose(2, "Using cached checksum for '{0}'".format(fileState.Name)) else: log.LogPrintVerbose(2, "Calculating checksum for '{0}'".format(fileState.Name)) fileState.Checksum = IOUtil.HashFile(pathFileRecord.ResolvedPath) # Mark the entry as being new #if (cachedState is None or CacheState.New) and allowNew: if cachedState is None and allowNew: fileState.CacheState = CacheState.New elif cachedState is not None and not fileState.IsSameState(cachedState): fileState.CacheState = CacheState.Modified fileState.ModificationComment = fileState.GetDifferenceString(cachedState) else: fileState.CacheState = CacheState.Unmodified return fileState
def BuildGeneratorCMakeConfig(log: Log, toolVersion: Version, platformName: str, buildVariantConfig: BuildVariantConfig, userCMakeConfig: Optional[UserCMakeConfig], cmakeConfiguration: CMakeConfiguration, defaultCompilerVersion: int, isCheckMode: bool) -> GeneratorCMakeConfig: """ Build the CMake config based on the supplied parameters and the default settings from the toolconfig """ # Setup default configuration buildDir = IOUtil.Join(cmakeConfiguration.DefaultBuildDir, platformName) generatorName = "" installPrefix = cmakeConfiguration.DefaultInstallPrefix # Give the platform a chance to override the config platformConfig = cmakeConfiguration.TryGetPlatformConfig(platformName) allowFindPackage = True if platformConfig is not None: if platformConfig.DefaultGeneratorName is not None: generatorName = platformConfig.DefaultGeneratorName if platformConfig.DefaultInstallPrefix is not None: installPrefix = platformConfig.DefaultInstallPrefix if platformConfig.AllowFindPackage is not None: allowFindPackage = platformConfig.AllowFindPackage log.LogPrintVerbose(2, "project defined AllowFindPackage to {0}".format(allowFindPackage)) # Apply the commandline overrides (so the user gets the final say) buildDirSetByUser = False if userCMakeConfig is not None: if userCMakeConfig.BuildDir is not None: buildDir = userCMakeConfig.BuildDir buildDirSetByUser = True if userCMakeConfig.GeneratorName is not None: generatorName = userCMakeConfig.GeneratorName if userCMakeConfig.InstallPrefix is not None: installPrefix = userCMakeConfig.InstallPrefix if userCMakeConfig.AllowFindPackage is not None: allowFindPackage = userCMakeConfig.AllowFindPackage log.LogPrintVerbose(2, "Command line set AllowFindPackage to {0}".format(allowFindPackage)) # If we still dont have a generator name then try to select a good default if len(generatorName) <= 0: # Try to determine the default generator name for the platform generatorName = CMakeHelper.GetPlatformDefaultCMakeGenerator(platformName, defaultCompilerVersion) cmakeVersion = CMakeUtil.GetVersion() cmakeConfigGlobalArgs = [] if userCMakeConfig is None else shlex.split(userCMakeConfig.ConfigGlobalArgs) cmakeConfigAppArgs = [] if userCMakeConfig is None else shlex.split(userCMakeConfig.ConfigAppArgs) checkDir = IOUtil.Join(buildDir, 'fsl') if isCheckMode: buildDir = checkDir return GeneratorCMakeConfig(toolVersion, platformName, buildVariantConfig, buildDir, buildDirSetByUser, checkDir, generatorName, installPrefix, cmakeVersion, cmakeConfigGlobalArgs, cmakeConfigAppArgs, allowFindPackage)
def Resolve(log: Log, allPackages: List[UnresolvedBasicPackage], dump: bool = False) -> List[ResolvedPackageInstance]: log.LogPrintVerbose(LocalVerbosityLevel.Info, "Initial package resolve") log.PushIndent() try: res = [] # type: List[ResolvedPackageInstance] # Build a package dictionary for quick lookup allPackageDict = dict() # type: Dict[str, UnresolvedBasicPackage] if len(allPackages) > 0: allPackages = list(allPackages) allPackages.sort(key=lambda s: s.Name.Value.upper()) log.LogPrintVerbose(LocalVerbosityLevel.Debug, "Available packages") #numTopLevel = 0 #for package in allPackages: # if package.Type == PackageType.TopLevel: # numTopLevel = numTopLevel + 1 # else: # log.LogPrintVerbose(LocalVerbosityLevel.Debug, "- {0}".format(package)) # allPackageDict[package.Name.Value] = package #if numTopLevel != 1: # raise Exception("Missing a top level package"); #graph = PackageGraphBuilder.Build(log, list(allPackageDict.values())) for package in allPackages: if package.Type == PackageType.TopLevel: raise Exception("Unsupported package type") if log.Verbosity >= LocalVerbosityLevel.Debug: log.LogPrint("- {0}".format(package)) allPackageDict[package.Name.Value] = package graph = PackageGraphBuilder.Build(log, allPackages) for node in graph.DebugNodes(): packageInstance = node.Source if isinstance(packageInstance, ResolvedPackageInstance): res.append(packageInstance) if dump: DotUtil.ToFile(log, "AllDependencies", graph) return res finally: log.PopIndent()
def __init__(self, log: Log, xmlElement: ET.Element, defaultName: str) -> None: super().__init__(log, xmlElement) self.ShortName = self._ReadAttrib(xmlElement, 'Name', defaultName) self.Version = self._TryReadAttribAsVersion( xmlElement, 'Version') # type: Optional[Version] self.Pipeline = self.__TryGetPipeline(xmlElement) self.ValidateInstallation = self.__TryGetValidateInstallation( log, xmlElement) self.ExternalInstallDirectory = self._TryReadAttrib( xmlElement, 'ExternalInstallDirectory') self.FindVersion = self._TryReadAttribAsVersion( xmlElement, 'FindVersion') # type: Optional[Version] self.FindTargetName = self._TryReadAttrib(xmlElement, 'FindTargetName') findResult = self._TryReadBoolAttrib(xmlElement, "Find", None) self.Find = False if findResult is None else findResult if self.FindVersion is not None: if findResult is not None and findResult == False: self.FindVersion = None log.LogPrintVerbose( 2, "Recipe specified Find=False, so discarding the specified FindVersion '{0}'" .format(self.FindVersion)) else: self.Find = True if self.Version is not None and not self.FindVersion.IsCompatible( self.Version): raise Exception( "Recipe '{0}' version {1} is not compatible with the specified FindVersion '{2}'." .format(self.ShortName, self.Version, self.FindVersion)) if self.FindTargetName is not None: if findResult is not None and findResult == False: self.FindTargetName = None log.LogPrintVerbose( 2, "Recipe specified Find=False, so discarding the specified FindTargetName '{0}'" .format(self.FindTargetName)) else: self.Find = True #if self.Pipeline is None and self.ExternalInstallDirectory is None: # raise Exception("Recipe '{0}' has to have either a pipeline or a ExternalInstallDirectory defined.".format(self.ShortName)) if not self.Pipeline is None and not self.ExternalInstallDirectory is None: raise Exception( "Recipe '{0}' can only a pipeline or a ExternalInstallDirectory defined not both." .format(self.ShortName)) self.FullName = self.__GenerateName(self.ShortName, self.Version)
def RunInAnotherThread(packageQueue: Any, cancellationToken: SimpleCancellationToken, mainLog: Log, toolConfig: ToolConfig, customPackageFileFilter: Optional[CustomPackageFileFilter], clangFormatConfiguration: ClangFormatConfiguration, clangExeInfo: ClangExeInfo, repairEnabled: bool) -> Tuple[int,int]: threadId = threading.get_ident() mainLog.LogPrintVerbose(4, "Starting thread {0}".format(threadId)) examinedCount = 0 processedCount = 0 keepWorking = True package = None # type: Optional[Package] try: while keepWorking and not cancellationToken.IsCancelled(): try: # Since the queue is preloaded this is ok package = packageQueue.get_nowait() except: package = None if package is None: keepWorking = False else: if mainLog.Verbosity >= 4: mainLog.LogPrint("- clang-format on package '{0}' on thread {1}".format(package.Name, threadId)) else: mainLog.LogPrint("- clang-format on package '{0}'".format(package.Name)) captureLog = CaptureLog(mainLog.Title, mainLog.Verbosity) try: filteredFiles = None if customPackageFileFilter is not None: filteredFiles = customPackageFileFilter.TryLocateFilePatternInPackage(captureLog, package, clangFormatConfiguration.FileExtensions) if customPackageFileFilter is None or filteredFiles is not None: processedCount += 1 _RunClangFormat(captureLog, toolConfig, clangFormatConfiguration, clangExeInfo, package, filteredFiles, repairEnabled) examinedCount += 1 finally: try: if len(captureLog.Captured) > 0: capturedLog = "Package: '{0}' result:\n{1}".format(package.Name, "\n".join(captureLog.Captured)) mainLog.DoPrint(capturedLog) except: pass except Exception as ex: cancellationToken.Cancel() mainLog.DoPrintError("Cancelling tasks due to exception: {0}") raise finally: mainLog.LogPrintVerbose(4, "Ending thread {0}".format(threadId)) return (examinedCount, processedCount)
def __CreateTopLevelPackage(log: Log, graph: DependencyGraph) -> EvaluationPackage: # Extract the top level nodes and add them as direct dependencies to a 'top level node' (so we can determine the global build order) topLevelNodes = graph.FindNodesWithNoIncomingDependencies( ) # type: List[DependencyGraphNode] if len(topLevelNodes) < 1: raise Exception( "Internal error, no top level nodes found, so either no packages are available or there is a circular dependency" ) # If there are no top level nodes, that means we are likely to have a circular dependency # so we just rely on the circular dependency detection to handle it log.LogPrintVerbose(LocalVerbosityLevel.Info, "Main packages: {0}".format(len(topLevelNodes))) unresolvedDirectDependencies = [ ] # type: List[UnresolvedPackageDependency] packageDirectDependencies = [ ] # type: List[EvaluationPackage.DependencyRecord] for package in topLevelNodes: packageDirectDependencies.append( EvaluationPackage.DependencyRecord(package.Source, None)) unresolvedDirectDependencies.append( UnresolvedPackageDependency( package.Source.Name, AccessType.Public, )) newTopLevel = UnresolvedBasicPackage( UnresolvedPackageName(PackageNameMagicString.TopLevelName), PackageType.TopLevel, unresolvedDirectDependencies, []) return EvaluationPackage(newTopLevel.Name, newTopLevel, packageDirectDependencies)
def Build(log: Log, configBuildDir: str, configDisableWrite: bool, toolConfig: ToolConfig, packagePath: PackagePath, featureList: List[str], outputPath: Optional[str] = None) -> None: currentPath = packagePath.AbsoluteDirPath contentBuildDir = ToolSharedValues.CONTENT_BUILD_FOLDER_NAME contentBuildPath = IOUtil.Join(currentPath, contentBuildDir) contentOutputPath = GetContentOutputPath( packagePath) if outputPath is None else outputPath if not IOUtil.IsDirectory(contentBuildPath): log.LogPrintVerbose( 1, "No '{0}' directory present at '{1}' so there is no content to process." .format(contentBuildDir, currentPath)) return packageBuildPath = IOUtil.Join(currentPath, configBuildDir) if not configDisableWrite: IOUtil.SafeMakeDirs(packageBuildPath) contentProcessorManager = GetContentProcessorManager( log, toolConfig, featureList) Builder(log, configDisableWrite, toolConfig, packageBuildPath, contentBuildPath, contentOutputPath, contentProcessorManager)
def ValidateInstallationForPackages(log: Log, configSDKPath: str, generatorContext: GeneratorContext, resolvedBuildOrder: List[Package], builderSettings: BuilderSettings = BuilderSettings(), packageRecipeResultManager: Optional[PackageRecipeResultManager] = None) -> None: if packageRecipeResultManager is None: packageRecipeResultManager = PackageRecipeResultManager(log) if not generatorContext.RecipePathBuilder.IsEnabled: log.LogPrintVerbose(3, "External building has been disabled in the Project.gen file") return if generatorContext.RecipePathBuilder.TargetLocation is None: raise Exception("Invalid path builder") # Claim the 'package' install directory to prevent multiple builds from using the same # as it would give concurrency issues BuildAreaInfoFileUtil.ProcessInstallDirClaim(log, generatorContext.RecipePathBuilder.TargetLocation.ResolvedPath, configSDKPath, builderSettings.ForceClaimInstallArea, __g_installAreaInformationFilename) if resolvedBuildOrder is None: return # Filter all packages that don't have a experimental recipe resolvedBuildOrder = [entry for entry in resolvedBuildOrder if not entry.ResolvedDirectExperimentalRecipe is None] if len(resolvedBuildOrder) == 0: return recipePackageStateCache = RecipePackageStateCache(log) # Here we basically run the installation validation engine and see if there is anything that triggers a exception validationEngine = ValidationEngine(log, generatorContext.VariableProcessor, packageRecipeResultManager, generatorContext.ErrorHelpManager) __FindMissingInstallations(log, validationEngine, resolvedBuildOrder, recipePackageStateCache, generatorContext.CMakeConfig)
def __TryCreateReadonlyCache( self, log: Log, basedUponXML: Optional[ XmlExperimentalDefaultThirdPartyInstallReadonlyCacheDirectory] ) -> Optional[ToolConfigExperimentalDefaultThirdPartyInstallDirectory]: entryName = "DefaultThirdPartyInstallReadonlyCacheDirectory" if basedUponXML is None: raise Exception( "No '{0}' was defined in the xml".format(entryName)) variableProcessor = VariableProcessor(log) env = variableProcessor.TryExtractLeadingEnvironmentVariableName( basedUponXML.Name, False) if env is None: raise Exception( "The {0} is expected to contain a environment variable '{1}'". format(entryName, basedUponXML.Name)) resolvedPath = IOUtil.TryGetEnvironmentVariable(env) if resolvedPath is None: log.LogPrintVerbose( 2, "Read only cache environment variable {0} not set, disabling cache" .format(env)) return None return ToolConfigExperimentalDefaultThirdPartyInstallDirectory( log, basedUponXML, entryName, True)
def __AddParentFeatures(log: Log, featureNameList: List[str], requirementTree: Union[RequirementTree, AppInfoGlobalRequirementTree], useStrictFeatureWarning: bool) -> List[str]: if '*' in featureNameList: return featureNameList featureNameList.sort() if log.Verbosity > 1: log.LogPrint("Automatically adding features to supplied feature list {0}".format(featureNameList)) featureNameSet = set(featureNameList) for featureName in featureNameList: if featureName in requirementTree.FeatureToNodeDict: requirementNode = requirementTree.FeatureToNodeDict[featureName] currentNode = requirementNode # type: Optional[Union[RequirementTreeNode, AppInfoGlobalRequirementTreeNode]] while currentNode is not None: if currentNode.Content is not None: if not currentNode.Content.Name in featureNameSet: featureNameSet.add(currentNode.Content.Name) if log.Verbosity > 1 and requirementNode.Content is not None: log.LogPrint("- '{0}' because '{1}' depends on it".format(currentNode.Content.Name, requirementNode.Content.Name)) currentNode = currentNode.Parent else: featureNameSet.remove(featureName) if useStrictFeatureWarning: log.DoPrintWarning("Unknown feature name '{0}' in filterNameList {1}".format(featureName, featureNameList)) else: # For now just log a warning log.LogPrintVerbose(5, "Unknown feature name '{0}' in filterNameList {1}".format(featureName, featureNameList)) resultList = list(featureNameSet) resultList.sort() return resultList
def __BuildTargetLinkLibrariesForDirectExternalDependencies(log: Log, package: Package, resolvedDirectExternalDependencies: Union[List[PackageExternalDependency], List[PackagePlatformExternalDependency]], ignoreLibs: Optional[List[str]] = None) -> str: if ignoreLibs is None: ignoreLibs = [] isExternalLibrary = package.Type == PackageType.ExternalLibrary deps = "" for entry in resolvedDirectExternalDependencies: libraryName = LibUtil.ToUnixLibName(entry.Name) if libraryName not in ignoreLibs: if entry.Type == ExternalDependencyType.StaticLib or (entry.Type == ExternalDependencyType.DLL and entry.Name.lower().endswith(".so")): location = entry.Location if entry.Location is not None and (entry.IsManaged or not isExternalLibrary) else "" libraryName = libraryName if len(location) <= 0 else entry.Name fullPathLinkDir = Util.ChangeToCMakeEnvVariables(IOUtil.Join(location, libraryName)) if entry.DebugName != entry.Name: deps += "\n {0} optimized {1}".format(GetAccessTypeString(package, entry.Access, False), fullPathLinkDir) libraryName = LibUtil.ToUnixLibName(entry.DebugName) fullPathLinkDir = Util.ChangeToCMakeEnvVariables(IOUtil.Join(location, libraryName)) deps += "\n {0} debug {1}".format(GetAccessTypeString(package, entry.Access, False), fullPathLinkDir) else: deps += "\n {0} {1}".format(GetAccessTypeString(package, entry.Access, False), fullPathLinkDir) if entry.Type == ExternalDependencyType.Find: linkName = "${%s_LIBRARY}" % (libraryName) deps += "\n {0} {1}".format(GetAccessTypeString(package, entry.Access, False), linkName) else: log.LogPrintVerbose(2, "INFO: Force ignored '{0}'".format(libraryName)) return deps
def Save(log: Log, cacheFilename: str, buildConfigureCache: 'BuildConfigureCache') -> None: log.LogPrintVerbose(4, "- Saving generated file hash cache") jsonText = json.dumps(buildConfigureCache.__dict__, ensure_ascii=False, sort_keys=True, indent=2) IOUtil.WriteFileIfChanged(cacheFilename, jsonText)
def Run(log: Log, createInfo: OpenProjectCreateInfo) -> None: log.LogPrintVerbose( 1, "Configuring and launching Visual Studio Code for path '{0}'". format(createInfo.SourcePath)) buildPlatformType = PlatformUtil.DetectBuildPlatformType() vsCodeConfigPath = IOUtil.Join(createInfo.SourcePath, ".vscode") launchFilePath = IOUtil.Join(vsCodeConfigPath, "launch.json") settingsFilePath = IOUtil.Join(vsCodeConfigPath, "settings.json") IOUtil.SafeMakeDirs(vsCodeConfigPath) log.LogPrintVerbose( 1, "- Patching settings at '{0}'".format(settingsFilePath)) log.PushIndent() try: VSCodeSettingsJsonUtil.Patch(log, settingsFilePath, createInfo.CMakeInfo) finally: log.PopIndent() exeInfo = createInfo.ExeInfo if exeInfo is not None: if log.Verbosity >= 1: log.LogPrint("- Patching launch settings at '{0}'".format( launchFilePath)) log.LogPrint(" - Exe: '{0}'".format(exeInfo.Executable)) log.LogPrint(" - Cwd: '{0}'".format( exeInfo.CurrentWorkingDirectory)) if not VSCodeLaunchJsonUtil.TryPatch( launchFilePath, buildPlatformType, exeInfo.Executable, exeInfo.CurrentWorkingDirectory): log.LogPrintVerbose( 1, "WARNING Failed to patch launch file '{0}'".format( launchFilePath)) else: log.LogPrintVerbose(1, "- Launch: No executable information found") log.PushIndent() try: OpenProjectUtil.__RunVSCode(log, buildPlatformType, createInfo.SourcePath) finally: log.PopIndent()
def Save(log: Log, cacheFilename: str, JsonProjectIdCache: 'JsonProjectIdCache') -> None: log.LogPrintVerbose(LocalVerbosityLevel.Trace, "- Saving cache '{0}'".format(cacheFilename)) jsonText = json.dumps(JsonProjectIdCache.__dict__, ensure_ascii=False, sort_keys=True, indent=2) IOUtil.WriteFileIfChanged(cacheFilename, jsonText)
def __BuildSyncState(log: Log, absoluteCacheFileName: str, content: Content, cachedSyncState: Optional[SyncState], allowCaching: bool, allowNew: bool, addNewFilesAndDirs: bool, allowCacheLoad: bool) -> SyncState: """ absoluteCacheFileName must be a absolute filename """ content.RemoveFileByResolvedSourcePath(absoluteCacheFileName) if cachedSyncState is None and allowCacheLoad: log.LogPrintVerbose(2, "Examining content of '{0}'".format(content.PrimaryFolder)) cachedSyncState = SyncState(absoluteCacheFileName) cachedSyncState.Load(log) else: log.LogPrintVerbose(2, "Rescanning content of '{0}'".format(content.PrimaryFolder)) syncState = SyncState(absoluteCacheFileName) for pathDirRecord in content.Dirs: dirState = CreateDirEntry(pathDirRecord.RelativePath) cachedState = cachedSyncState.TryGetDirState(dirState) if cachedSyncState is not None else None if cachedState is None and allowNew: dirState.CacheState = CacheState.New if addNewFilesAndDirs or dirState.CacheState != CacheState.New: syncState.AddDir(dirState) for pathFileRecord in content.Files: fileState = syncState.BuildContentState(log, pathFileRecord, allowCaching, allowNew, cachedSyncState) if addNewFilesAndDirs or fileState.CacheState != CacheState.New: syncState.Add(fileState) # Tag the sync state with information about which files were removed compared the the last time we synced if cachedSyncState is not None: for entry in list(cachedSyncState.Entries.values()): if not entry.Name in syncState.Entries: syncState.AddRemoved(entry) # Tag the sync state with information about which dirs were removed compared the the last time we synced for entry in list(cachedSyncState.Dirs.values()): if not entry.Name in syncState.Dirs: syncState.AddRemoved(entry) return syncState
def ValidatePlatform(log: Log, platformName: str, features: List[str], verbosityLevel: int = 1) -> None: if platformName.lower() == PackageConfig.PlatformNameString.WINDOWS.lower( ): CheckWindows(log, features, verbosityLevel) elif platformName.lower() == PackageConfig.PlatformNameString.UBUNTU.lower( ): CheckUbuntu(log, features, verbosityLevel) elif platformName.lower() == PackageConfig.PlatformNameString.YOCTO.lower( ): CheckYocto(log, features, verbosityLevel) else: log.LogPrintVerbose( verbosityLevel, "No configuration checks available for this platform") log.LogPrintVerbose(verbosityLevel, "Configuration check completed successfully.")
def ToFile(log: Log, filename: str, graph: ResolvedPackageGraph) -> None: lines = DotUtil.__ToDotFile(graph) content = "\n".join(lines) dotFilename = "{0}.dot".format(filename) log.LogPrintVerbose(LocalVerbosityLevel.Debug, "Writing dot file to '{0}'".format(dotFilename)) IOUtil.WriteFileIfChanged(dotFilename, content) outputFile = "{0}.png".format(filename) try: log.LogPrintVerbose(LocalVerbosityLevel.Debug, "Writing png file to '{0}'".format(outputFile)) subprocess.call(["dot", "-Tpng", "-o{0}".format(outputFile), dotFilename]) os.remove(dotFilename) except Exception as ex: print("WARNING: Failed to execute dot, is it part of the path?") os.remove(dotFilename) raise
def CheckYocto(log: Log, features: List[str], verbosityLevel: int) -> None: log.LogPrintVerbose(verbosityLevel, "Running Yocto checks") CheckCommon(log, verbosityLevel) checkedGLES = False for feature in features: if not checkedGLES and (feature == "EGL" or feature == 'OpenGLES2' or feature == 'OpenGLES3' or feature == 'OpenGLES3.1'): #CheckYoctoGLES(log) checkedGLES = True
def FindRequestedPackages(log: Log, allPackages: List[Package], requestedFiles: Optional[List[str]]) -> List[Package]: if requestedFiles is None or len(requestedFiles) <= 0: return [] requestedFileSet = set(requestedFiles) allRequestedPackages = [] # type: List[Package] for package in allPackages: if package.TraceContext.PackageFile is not None and package.TraceContext.PackageFile.AbsoluteFilePath in requestedFileSet: allRequestedPackages.append(package) if len(allRequestedPackages) != len(requestedFiles): log.LogPrintVerbose(2, "Could not locate all requested files") return allRequestedPackages
def __TryValidateInstallation(log: Log, validationEngine: ValidationEngine, package: Package, packagesToBuild: List[Package], recipePackageStateCache: RecipePackageStateCache, cmakeConfig: GeneratorCMakeConfig) -> bool: if package.ResolvedDirectExperimentalRecipe is None: raise Exception("Invalid package") sourceRecipe = package.ResolvedDirectExperimentalRecipe installPath = sourceRecipe.ResolvedInstallLocation if installPath is not None: if not IOUtil.IsDirectory(installPath.ResolvedPath): log.LogPrintVerbose(2, "Installation directory not located: {0}".format(installPath.ResolvedPath)) return False elif log.Verbosity >= 2: log.LogPrint("Installation directory located at '{0}'".format(installPath.ResolvedPath)) # Check if the user decided to do a build override by creating the required file. # This allows the user to tell the system that it has been build and it should mind its own buisness packageHasUserBuildOverride = False if not installPath is None: overrideFilename = IOUtil.Join(installPath.ResolvedPath, __g_BuildPackageInformationOverrideFilename) packageHasUserBuildOverride = IOUtil.IsFile(overrideFilename) if packageHasUserBuildOverride: log.LogPrint("Package {0} contained a build override file '{1}'".format(package.Name, __g_BuildPackageInformationOverrideFilename)) if not __RunValidationEngineCheck(validationEngine, package): if packageHasUserBuildOverride: raise Exception("Package {0} contained a build override file '{1}', but it failed validation. Fix the issues or delete the override file '{2}'".format(package.Name, __g_BuildPackageInformationOverrideFilename, overrideFilename)) log.LogPrintVerbose(2, "Install validation failed") return False # If there is a user build override we dont check the build dependency json file if packageHasUserBuildOverride: return True # If there is no build pipeline we consider the validation to be completed, else we need to check the saved build info if not PackageRecipeUtil.HasBuildPipeline(package): return True if not BuildInfoFileUtil.TryValidateBuildInformation(log, package, packagesToBuild, recipePackageStateCache, cmakeConfig, __g_BuildPackageInformationFilename): log.LogPrintVerbose(2, "Install validator failed to load build information") return False return True
def __ResolveEvaluationPackages( log: Log, allPackages: List[UnresolvedBasicPackage] ) -> List[EvaluationPackage]: # Build lookup dict packageNameToNodeDict = PackageBuildOrder.__CreatePackageNameToPackageDict( log, allPackages) log.LogPrintVerbose(LocalVerbosityLevel.Info, "Building evaluation packages") # then resolve all direct dependencies res = [] # type: List[EvaluationPackage] exceptionList = None # type: Optional[List[Exception]] for record in packageNameToNodeDict.values(): package = record.Unresolved #packageDependencies = [] # type: List[EvaluationPackage] for dep in package.DirectDependencies: if dep.Name not in packageNameToNodeDict: exceptionList = PackageBuildOrder.__AddException( exceptionList, PackageBuildOrder. __CreatePackageDependencyNotFoundException( package, dep, allPackages)) else: depRecord = packageNameToNodeDict[ dep.Name] # type: ResolveDepRecord record.Resolved.DirectDependencies.append( EvaluationPackage.DependencyRecord( depRecord.Resolved, None)) for flavor in package.Flavors: for flavorOption in flavor.Options: for dep in flavorOption.DirectDependencies: if dep.Name not in packageNameToNodeDict: exceptionList = PackageBuildOrder.__AddException( exceptionList, PackageBuildOrder. __CreatePackageFlavorOptionDependencyNotFoundException( package, flavor, flavorOption, dep, allPackages)) else: depRecord = packageNameToNodeDict[dep.Name] record.Resolved.DirectDependencies.append( EvaluationPackage.DependencyRecord( depRecord.Resolved, FlavorInfo(flavor.Name, flavorOption.Name))) record.Resolved.Seal() res.append(record.Resolved) PackageBuildOrder.__CheckForExceptions(exceptionList) return res
def ResolveBuildOrder( log: Log, allPackages: List[UnresolvedBasicPackage] ) -> List[UnresolvedBasicPackage]: log.LogPrintVerbose(LocalVerbosityLevel.Info, "Resolve build order") log.PushIndent() try: finalBuildOrder = [] # type: List[UnresolvedBasicPackage] if len(allPackages) <= 0: log.LogPrintVerbose(LocalVerbosityLevel.Info, "No packages supplied") finalBuildOrder.append( UnresolvedBasicPackage.Create2( UnresolvedPackageName( PackageNameMagicString.TopLevelName), PackageType.TopLevel)) else: evaluationPackages = PackageBuildOrder.__ResolveEvaluationPackages( log, allPackages) # type: List[EvaluationPackage] PackageBuildOrder.__ValidateDependencies( log, evaluationPackages) log.LogPrintVerbose( LocalVerbosityLevel.Info, "Determining build order for {0} packages".format( len(allPackages))) graph, topLevelNode = PackageBuildOrder.__CreateDependencyGraph( log, evaluationPackages) buildOrder = graph.DetermineBuildOrder(topLevelNode.Source) for entry in buildOrder: finalBuildOrder.append(entry.SourcePackage) finalBuildOrder.append(topLevelNode.Source.SourcePackage) PackageBuildOrder.__ValidateConstraints(log, finalBuildOrder) return finalBuildOrder finally: log.PopIndent()
def __CreateTopLevelNode(log: Log, graph: DependencyGraph) -> DependencyGraphNode: log.LogPrintVerbose( LocalVerbosityLevel.Info, "Creating top level node to determine global build order") # Extract the top level nodes topLevelPackage = PackageBuildOrder.__CreateTopLevelPackage( log, graph) # type: EvaluationPackage # Add the top level node to the graph with all it dependency edges node = graph.AddNode(topLevelPackage) graph.AddPackageDirectDependencies(node) return node
def __ValidateConstraints( log: Log, finalBuildOrder: List[UnresolvedBasicPackage]) -> None: log.LogPrintVerbose(LocalVerbosityLevel.Info, "Validating flavor constraints") lookupDict = dict( ) # type: Dict[UnresolvedPackageName, UnresolvedBasicPackage] for package in finalBuildOrder: lookupDict[package.Name] = package depStack = [] # type: List[UnresolvedPackageName] for package in finalBuildOrder: PackageBuildOrder.__ValidatePackageConstraints( log, lookupDict, depStack, package)
def Build( log: Log, allPackages: List[UnresolvedBasicPackage]) -> ResolvedPackageGraph: buildOrder = PackageBuildOrder.ResolveBuildOrder( log, allPackages) # type: List[UnresolvedBasicPackage] log.LogPrintVerbose(LocalVerbosityLevel.Info, "Building instance graph") log.PushIndent() try: queue = PackageResolveQueue(buildOrder) return PackageGraphBuilder.__BuildInstanceGraph(log, queue) finally: log.PopIndent()
def __ValidateDependencies( log: Log, evaluationPackages: List[EvaluationPackage]) -> None: log.LogPrintVerbose(LocalVerbosityLevel.Info, "Validating that the dependencies are acyclic") exceptionList = None # type: Optional[List[Exception]] for package in evaluationPackages: try: PackageBuildOrder.__ValidateDependenciesFor(package) except Exception as ex: # pylint: disable=broad-except exceptionList = PackageBuildOrder.__AddException( exceptionList, ex) break # for now just break on the first exception PackageBuildOrder.__CheckForExceptions(exceptionList)
def FilterOnConditions(log: Log, generatorInfo: GeneratorInfo, sourceList: List[FilterElementType], debugHelp: str) -> List[FilterElementType]: result = [] # type: List[FilterElementType] for element in sourceList: if element.IfCondition is None or ElementIfConditionUtil.CheckCondition( element.IfCondition, generatorInfo, debugHelp): result.append(element) else: log.LogPrintVerbose( 2, "Skipped {0} name '{1}' because of condition '{2}'".format( debugHelp, element.Name, element.IfCondition)) return result
def FilterOnConditionsDependency( log: Log, generatorInfo: GeneratorInfo, sourceList: List[XmlGenFileDependency] ) -> List[XmlGenFileDependency]: result = [] # type: List[XmlGenFileDependency] for element in sourceList: if element.IfCondition is None or ElementIfConditionUtil.CheckCondition( element.IfCondition, generatorInfo): result.append(element) else: log.LogPrintVerbose( 2, "Skipped Dependency name '{0}' because of condition '{1}'". format(element.Name, element.IfCondition)) return result
def __ProcessSourceFile(log: Log, package: Package, fullPath: str, repairEnabled: bool, thirdpartyExceptionDir: Optional[str], disableWrite: bool) -> bool: log.LogPrintVerbose(10, "- Scanning '{0}'".format(fullPath)) noErrors = True asciiRepair = False sourceFile = SourceFile(package, fullPath) if not __CheckASCII(log, sourceFile, repairEnabled): # The ASCII repair is not safe, so dont do it #asciiRepair = True noErrors = False if not __CheckTabs(log, sourceFile, repairEnabled, thirdpartyExceptionDir): noErrors = False if repairEnabled: __Repair(log, sourceFile, asciiRepair, disableWrite) return noErrors
def __ProcessSyncFiles(self, log: Log, contentBuildPath: str, contentOutputPath: str, srcContent: Content, syncState: BuildState.SyncState, outputSyncState: BuildState.SyncState) -> None: dstRoot = GetContentOutputContentRootRecord(log, contentOutputPath) for contentFile in srcContent.Files: # Generate the output file record outputFileRecord = GetContentSyncOutputFilename( log, dstRoot, contentFile) outputFileName = contentFile.RelativePath ## Query the sync state of the content file syncStateFileName = self.__GetSyncStateFileName( contentFile.SourceRoot.ResolvedPath, contentFile.RelativePath) contentState = syncState.TryGetFileStateByFileName( syncStateFileName) buildResource = contentState is None or contentState.CacheState != BuildState.CacheState.Unmodified if not buildResource: # It was unmodified, so we need to examine the state of the output file to # determine if its safe to skip the building syncStateOutputFileName = self.__GetSyncStateFileName( contentOutputPath, outputFileName) outputContentState = outputSyncState.TryGetFileStateByFileName( syncStateOutputFileName) buildResource = not outputContentState or outputContentState.CacheState != BuildState.CacheState.Unmodified if buildResource: try: log.LogPrintVerbose( 2, "Copying '{0}' to '{1}'".format( contentFile.ResolvedPath, outputFileRecord.ResolvedPath)) dstDirPath = IOUtil.GetDirectoryName( outputFileRecord.ResolvedPath) IOUtil.SafeMakeDirs(dstDirPath) shutil.copy(contentFile.ResolvedPath, outputFileRecord.ResolvedPath) except: # Save if a exception occured to prevent reprocessing the working files outputSyncState.Save() syncState.Save() raise # Add a entry for the output file outputFileState = outputSyncState.BuildContentState( log, outputFileRecord, True, True) outputSyncState.Add(outputFileState)