def __AppendAdditional( self, log: Log, rPathDirRecords: List[PathRecord], pathFileRecords: List[PathRecord], additionalFiles: Optional[List[PathRecord]]) -> None: if additionalFiles is None: return uniqueDirs = set() for dirEntry in rPathDirRecords: uniqueDirs.add(dirEntry.RelativeId) uniqueFiles = {} for entry in pathFileRecords: uniqueFiles[entry.RelativeId] = entry for entry in additionalFiles: dirName = IOUtil.GetDirectoryName(entry.RelativePath) if len(dirName) > 0: dirId = dirName.lower() if not dirId in uniqueDirs: uniqueDirs.add(dirId) rPathDirRecords.append( PathRecord(log, entry.SourceRoot, dirName)) if entry.RelativeId in uniqueFiles: raise Exception( "The relative file name '{0}' has already been added by '{1}' and '{2}' tried to add it again" .format(uniqueFiles[entry.RelativeId].RelativePath, uniqueFiles[entry.RelativeId].ResolvedPath, entry.ResolvedPath)) pathFileRecords.append(entry) uniqueFiles[entry.RelativeId] = entry
def __init__(self, log: Log, sourcePath: str, includeSourcePathContent: bool, additionalFiles: Optional[List[PathRecord]] = None) -> None: super().__init__() #sourcePath = resolvedSourcePath.ResolvedPath folderRoot = ContentRootRecord(log, sourcePath) pathDirRecords = [] # type: List[PathRecord] pathFileRecords = [] # type: List[PathRecord] if includeSourcePathContent: dirs, files = self.__GetDirAndFilePaths(sourcePath) #rootDict = {} #rootDict[folderRoot.Id] = folderRoot # if additionalFiles != None: # for entry in additionalFiles: # if not entry.SourceRoot.Id in rootDict: # rootDict[entry.SourceRoot.Id] = entry.SourceRoot # files.append(entry) # We sort it so that the longest paths come first meaning we will always find the most exact match first # if searching from the front to the end of the list and comparing to 'startswith' #uniqueRootNames = list(rootDict.values()) #uniqueRootNames.sort(key=lambda s: -len(s.Id)) for dirEntry in dirs: pathDirRecords.append( PathRecord(log, folderRoot, dirEntry[len(folderRoot.ResolvedPath) + 1:])) for file in files: pathFileRecords.append( PathRecord(log, folderRoot, file[len(folderRoot.ResolvedPath) + 1:])) self.__AppendAdditional(log, pathDirRecords, pathFileRecords, additionalFiles) self.PrimaryFolder = sourcePath self.Dirs = pathDirRecords self.Files = pathFileRecords self.Dirs.sort(key=lambda s: s.Id) self.Files.sort(key=lambda s: s.Id)
def __ProcessContentFiles(self, log: Log, configDisableWrite: bool, contentBuildPath: str, contentOutputPath: str, contentProcessorManager: ContentProcessorManager, srcContent: Content, syncState: BuildState.SyncState, outputSyncState: BuildState.SyncState) -> None: dstRoot = ContentRootRecord(log, contentOutputPath) for contentFile in srcContent.Files: processor = contentProcessorManager.TryFindContentProcessor( contentFile) if processor is not None: # Query the processor for the output filename outputFileName = processor.GetOutputFileName( log, contentOutputPath, contentFile) outputFileRecord = PathRecord( log, dstRoot, outputFileName[len(dstRoot.ResolvedPath) + 1:]) # Query the sync state of the content file syncStateFileName = self.__GetSyncStateFileName( contentBuildPath, contentFile.RelativePath) contentState = syncState.TryGetFileStateByFileName( syncStateFileName) buildResource = contentState is None or contentState.CacheState != BuildState.CacheState.Unmodified if buildResource is not None: # It was unmodified, so we need to examine the state of the output file to # determine if its safe to skip the building syncStateOutputFileName = self.__GetSyncStateFileName( contentOutputPath, outputFileName) outputContentState = outputSyncState.TryGetFileStateByFileName( syncStateOutputFileName) buildResource = (outputContentState is None or outputContentState.CacheState != BuildState.CacheState.Unmodified or (contentState is None or contentState.Checksum != outputContentState.TagChecksum)) if buildResource: try: processor.Process(log, configDisableWrite, contentBuildPath, contentOutputPath, contentFile) except: # Save if a exception occured to prevent reprocessing the working files, but we invalidate outputSyncState.Save() syncState.Save() raise # Add a entry for the output file outputFileState = outputSyncState.BuildContentState( log, outputFileRecord, True, True) # Tag it with the source file checksum so we have another way to detect changes if contentState is not None: outputFileState.TagChecksum = contentState.Checksum outputSyncState.Add(outputFileState)
def __ProcessSyncFiles(self, log: Log, contentBuildPath: str, contentOutputPath: str, srcContent: Content, syncState: BuildState.SyncState, outputSyncState: BuildState.SyncState) -> None: dstRoot = ContentRootRecord(log, contentOutputPath) for contentFile in srcContent.Files: # Generate the output file record outputFileName = contentFile.RelativePath outputFileRecord = PathRecord(log, dstRoot, outputFileName) ## Query the sync state of the content file syncStateFileName = self.__GetSyncStateFileName( contentFile.SourceRoot.ResolvedPath, contentFile.RelativePath) contentState = syncState.TryGetFileStateByFileName( syncStateFileName) buildResource = contentState is None or contentState.CacheState != BuildState.CacheState.Unmodified if not buildResource: # It was unmodified, so we need to examine the state of the output file to # determine if its safe to skip the building syncStateOutputFileName = self.__GetSyncStateFileName( contentOutputPath, outputFileName) outputContentState = outputSyncState.TryGetFileStateByFileName( syncStateOutputFileName) buildResource = not outputContentState or outputContentState.CacheState != BuildState.CacheState.Unmodified if buildResource: try: log.LogPrintVerbose( 2, "Copying '{0}' to '{1}'".format( contentFile.ResolvedPath, outputFileRecord.ResolvedPath)) dstDirPath = IOUtil.GetDirectoryName( outputFileRecord.ResolvedPath) IOUtil.SafeMakeDirs(dstDirPath) shutil.copy(contentFile.ResolvedPath, outputFileRecord.ResolvedPath) except: # Save if a exception occured to prevent reprocessing the working files outputSyncState.Save() syncState.Save() raise # Add a entry for the output file outputFileState = outputSyncState.BuildContentState( log, outputFileRecord, True, True) outputSyncState.Add(outputFileState)
def __ProcessContentFiles(self, config: Config, contentBuildPath: str, contentOutputPath: str, toolFinder: ToolFinder, contentProcessors: List[BasicContentProcessor], srcContent: Content, syncState: BuildState.SyncState, outputSyncState: BuildState.SyncState) -> None: dstRoot = ContentRootRecord(config, contentOutputPath) for contentFile in srcContent.Files: processors = self.__FindProcessors(contentProcessors, contentFile.ResolvedPath) if len(processors) > 1: contentProcessorNames = [ processor.Name for processor in processors ] raise Exception( "Multiple content processors '%s' available for '%s'" % (contentProcessorNames, contentFile.ResolvedPath)) if len(processors) == 1: # Query the processor for the output filename outputFileName = processors[0].GetOutputFileName( config, contentOutputPath, contentFile) outputFileRecord = PathRecord( config, dstRoot, outputFileName[len(dstRoot.ResolvedPath) + 1:]) # Query the sync state of the content file syncStateFileName = self.__GetSyncStateFileName( contentBuildPath, contentFile.RelativePath) contentState = syncState.TryGetFileStateByFileName( syncStateFileName) buildResource = contentState is None or contentState.CacheState != BuildState.CacheState.Unmodified if buildResource is not None: # It was unmodified, so we need to examine the state of the output file to # determine if its safe to skip the building syncStateOutputFileName = self.__GetSyncStateFileName( contentOutputPath, outputFileName) outputContentState = outputSyncState.TryGetFileStateByFileName( syncStateOutputFileName) buildResource = (outputContentState is None or outputContentState.CacheState != BuildState.CacheState.Unmodified or (contentState is None or contentState.Checksum != outputContentState.TagChecksum)) if buildResource: try: processors[0].Process(config, contentBuildPath, contentOutputPath, contentFile, toolFinder) except: # Save if a exception occured to prevent reprocessing the working files, but we invalidate outputSyncState.Save() syncState.Save() raise # Add a entry for the output file outputFileState = outputSyncState.BuildContentState( config, outputFileRecord, True, True) # Tag it with the source file checksum so we have another way to detect changes if contentState is not None: outputFileState.TagChecksum = contentState.Checksum outputSyncState.Add(outputFileState)
def GetContentSyncOutputFilename(log: Log, dstRoot: ContentRootRecord, contentFile: PathRecord) -> PathRecord: return PathRecord(log, dstRoot, contentFile.RelativePath)