class ProjData (object) : def __init__(self, pid, gid = None) : '''Intitate the whole class and create the object.''' self.pid = pid self.gid = gid self.tools = Tools() self.user = UserConfig() self.userConfig = self.user.userConfig self.local = ProjLocal(pid) self.log = ProjLog(pid) self.projHome = os.path.join(os.path.expanduser(self.userConfig['Resources']['projects']), self.pid) self.projList = self.tools.getProjIdList(os.path.expanduser(self.userConfig['Resources']['projects'])) # Log messages for this module self.errorCodes = { '1220' : ['LOG', 'Project [<<1>>] already registered in the system.'], '1240' : ['ERR', 'Could not find/open the Project configuration file for [<<1>>]. Project could not be registered!'], '3410' : ['LOG', 'Backup file cull skipping: [<<1>>] Not a recognized Rapuma backup file name format.'], '3510' : ['ERR', 'The path (or name) given is not valid: [<<1>>].'], '3530' : ['MSG', 'Project backup: [<<1>>] has been restored to: [<<2>>]. A backup of the orginal project remains and must be manually removed.'], '3550' : ['ERR', 'Project backup version request: [<<1>>] exceeds the maxium number which could be in storage which is: [<<2>>]. Request an earlier (lesser) version.'], '3610' : ['ERR', 'The [<<1>>]. project is not registered. No backup was done.'], '3620' : ['ERR', 'The path to the backup folder is not valid [<<1>>]. Please try again.'], '3622' : ['ERR', 'The path to the backup folder is not set. Please set it and try again.'], '3625' : ['ERR', 'The path given to the backup folder is not valid [<<1>>]. Please set the system backup path.'], '3630' : ['MSG', 'Backup for [<<1>>] created and saved to: [<<2>>]'], '4110' : ['MSG', 'Completed merging data.'], '4120' : ['MSG', 'No files updated.'], '4130' : ['MSG', 'Added: <<1>> file(s).'], '4140' : ['MSG', 'Updated: <<1>> file(s)'], '4150' : ['WRN', 'The project data in: [<<1>>] will be replaced with the data from: [<<2>>].'], '4210' : ['MSG', 'Completed pulling/restoring data from the cloud.'], '4220' : ['ERR', 'Cannot resolve path: [<<1>>]'], '4250' : ['ERR', 'The cloud project [<<1>>] you want to pull from is owned by [<<2>>]. Use force (-f) to pull the project and change the local owner ID.'], '4260' : ['ERR', 'The local project [<<1>>] is newer than the cloud copy. If you seriously want to overwrite it, use force (-f) to do so.'], '4270' : ['MSG', 'Restored the project [<<1>>] from the cloud copy. Local copy is owned by [<<2>>].'], } ############################################################################### ############################## General Functions ############################## ############################################################################### ####################### Error Code Block Series = 1000 ######################## ############################################################################### ############################################################################### ########################## Archive Project Functions ########################## ############################################################################### ####################### Error Code Block Series = 2000 ######################## ############################################################################### def makeExcludeFileList (self, source) : '''Return a list of files that are not necessary to be included in a backup template or an archive. These will be all auto-generated files that containe system- specific paths, etc.''' excludeFiles = [] excludeTypes = ['delayed', 'log', 'notepages', 'parlocs', 'pdf', 'tex', 'piclist', 'adj', 'zip'] excludeFolders = ['Draft', 'Final', 'HelperScript', 'Proof'] # Process the excluded folders for root, dirs, files in os.walk(source) : for fileName in files : if os.path.basename(root) in excludeFolders : excludeFiles.append(os.path.join(root, fileName)) else : # Get rid of edited backup files if fileName[-1] == '~' : excludeFiles.append(os.path.join(root, fileName)) continue ext = os.path.splitext(fileName)[1][1:] if ext in excludeTypes : # A special indicator for file we want to keep if fileName.find('-ext.') > 0 : continue return excludeFiles # FIXME: Should archiveProject() use self.pid instead of explicitly passing in a pid? def archiveProject (self, pid, path = None) : '''Archive a project. Send the compressed archive file to the user-specified archive folder. If none is specified, put the archive in cwd. If a valid path is specified, send it to that location. Like backup, this too will overwrite any existing file of the same name. The difference is that this will also disable the project so it cannot be accesses by Rapuma. When a project is archived, all work should cease on the project.''' # Make a private project object just for archiving aProject = Project(pid, self.gid) # Set some paths and file names archName = aProject.projectIDCode + '.rapuma' userArchives = self.userConfig['Resources']['archive'] archTarget = '' if path : path = self.tools.resolvePath(path) if os.path.isdir(path) : archTarget = os.path.join(path, archName) else : self.tools.terminal('\nError: The path given is not valid: [' + path + ']\n') self.tools.dieNow() elif os.path.isdir(userArchives) : archTarget = os.path.join(userArchives, archName) elif os.path.isdir(os.path.dirname(aProject.local.projHome)) : # Default to the dir just above the project archTarget = os.path.dirname(aProject.local.projHome) else : self.tools.terminal('\nError: Cannot resolve a path to create the archive file!\n') self.tools.dieNow() # Get a list of files we don't want excludeFiles = self.makeExcludeFileList(source) self.zipUpProject(archTarget, excludeFiles) # Rename the source dir to indicate it was archived bakArchProjDir = aProject.local.projHome + '(archived)' if os.path.isdir(bakArchProjDir) : self.tools.terminal('\nError: Cannot complete archival process!\n') self.tools.terminal('\nAnother archived version of this project exsits with the folder name of: ' + self.tools.fName(bakArchProjDir) + '\n') self.tools.terminal('\nPlease remove or rename it and then repete the process.\n') self.tools.dieNow() else : os.rename(aProject.local.projHome, bakArchProjDir) # Finish here self.tools.terminal('Archive for [' + pid + '] created and saved to: ' + archTarget + '\n') def zipUpProject (self, target, excludeFiles = None) : '''Zip up a project and deposit it to target location. Be sure to strip out all all auto-created, user-specific files that could mess up a transfer to another system. This goes for archives and backups''' # import pdb; pdb.set_trace() # In case an exclude list is not given if not excludeFiles : excludeFiles = [] # Do the zip magic here root_len = len(self.local.projHome) with zipfile.ZipFile(target, 'w', compression=zipfile.ZIP_DEFLATED) as myzip : sys.stdout.write('Backing up files') sys.stdout.flush() for root, dirs, files in os.walk(self.local.projHome) : # Chop off the part of the path we do not need to store zip_root = os.path.abspath(root)[root_len:] for f in files : if os.path.join(root, f) in excludeFiles : continue if not f[-1] == '~' : fn, fx = os.path.splitext(f) fullpath = os.path.join(root, f) zip_name = os.path.join(zip_root, f) sys.stdout.write('.') sys.stdout.flush() myzip.write(fullpath, zip_name, zipfile.ZIP_DEFLATED) # Add a space before the next message print '\n' # FIXME: Should restoreArchive() use self.pid instead of explicitly passing in a pid? def restoreArchive (self, pid, targetPath, sourcePath = None) : '''Restore a project from the user specified storage area or sourcePath if specified. Use targetPath to specify where the project will be restored. Rapuma will register the project there.''' # Check to see if the user included the extension try : pid.split('.')[1] == 'rapuma' archName = pid pid = pid.split('.')[0] except : archName = pid + '.rapuma' archSource = '' archTarget = '' userArchives = '' # First look for the archive that is to be restored if sourcePath : if os.path.isdir(sourcePath) : archSource = os.path.join(sourcePath, archName) elif os.path.isdir(self.userConfig['Resources']['archive']) : userArchives = self.userConfig['Resources']['archive'] archSource = os.path.join(userArchives, archName) else : self.tools.terminal('\nError: The path (or name) given is not valid: [' + archSource + ']\n') self.tools.dieNow() # Now set the target params if targetPath : if not os.path.isdir(targetPath) : self.tools.terminal('\nError: The path given is not valid: [' + targetPath + ']\n') self.tools.dieNow() else : archTarget = os.path.join(targetPath, pid) # If we made it this far, extract the archive with zipfile.ZipFile(archSource, 'r') as myzip : myzip.extractall(archTarget) # Permission for executables is lost in the zip, fix it here for folder in ['Scripts', os.path.join('Macros', 'User')] : self.tools.fixExecutables(os.path.join(archTarget, folder)) # FIXME: This will need some work # Add project to local Rapuma project registry # To do this we need to open up the restored project config file # and pull out some settings. local = ProjLocal(pid) pc = Config(pid) log = ProjLog(pid) aProject = Project(pid, self.gid) # import pdb; pdb.set_trace() # Finish here self.tools.terminal('\nRapuma archive [' + pid + '] has been restored to: ' + archTarget + '\n') ############################################################################### ########################### Backup Project Functions ########################## ############################################################################### ####################### Error Code Block Series = 3000 ######################## ############################################################################### def cullBackups (self, maxBak, bakDir) : '''Remove any excess backups from the backup folder in this project.''' # Get number of maximum backups to store maxStoreBackups = int(maxBak) if not maxStoreBackups or maxStoreBackups == 0 : maxStoreBackups = 1 # Build the cullList cullList = [] files = os.listdir(bakDir) for f in files : try : cullList.append(int(f.split('.')[0])) except : self.log.writeToLog(self.errorCodes['3410'], [f]) # Remove oldest file(s) while len(cullList) > maxStoreBackups : fn = min(cullList) cullList.remove(min(cullList)) os.remove(os.path.join(bakDir, str(fn) + '.zip')) def backupProject (self, targetPath=None) : '''Backup a project. Send the compressed backup file with a date-stamp file name to the user-specified backup folder. If a target path is specified, put the archive there but use the PID in the name. If other backups with the same name exist there, increment with a number.''' # First see if this is even a valid project if self.pid not in self.projList : self.log.writeToLog(self.errorCodes['3610'], [self.pid]) # Set some paths and file names if not targetPath : # Now check for a valid location to backup to if self.local.userLibBackup == '' : self.log.writeToLog(self.errorCodes['3622']) elif not os.path.exists(self.local.userLibBackup) : self.log.writeToLog(self.errorCodes['3620'], [self.local.userLibBackup]) projBackupFolder = os.path.join(self.local.userLibBackup, self.pid) backupTarget = os.path.join(projBackupFolder, self.tools.fullFileTimeStamp() + '.zip') else : projBackupFolder = self.tools.resolvePath(targetPath) # Now check for a valid target path if not os.path.exists(projBackupFolder) : self.log.writeToLog(self.errorCodes['3625'], [targetPath]) backupTarget = self.tools.incrementFileName(os.path.join(projBackupFolder, self.pid + '.zip')) # Make sure the dir is there if not os.path.exists(projBackupFolder) : os.makedirs(projBackupFolder) # import pdb; pdb.set_trace() # Zip up but use a list of files we don't want self.zipUpProject(backupTarget, self.makeExcludeFileList(source)) # Cull out any excess backups if not targetPath : self.cullBackups(self.userConfig['System']['maxStoreBackups'], projBackupFolder) # Finish here pc = Config(self.pid) pc.getProjectConfig() pc.projectConfig['Backup']['lastBackup'] = self.tools.fullFileTimeStamp() self.tools.writeConfFile(pc.projectConfig) self.log.writeToLog(self.errorCodes['3630'], [self.pid,backupTarget]) return True def backupRestore (self, backup, target = None) : '''Restore a backup to the current or specified project.''' # import pdb; pdb.set_trace() if not target : target = self.local.projHome # Now remove the orginal if os.path.exists(target) : shutil.rmtree(target) # Create an empty folder to restore to os.makedirs(target) # If we made it this far, extract the archive with zipfile.ZipFile(backup, 'r') as myzip : myzip.extractall(target) return True # def restoreLocalBackup (self, bakFile) : # '''Restore from a project backup. As a project may have multiple backups in # its backup folder, the user will need to provide a number from 1 to n (n being # the number of backups in the folder, 1 being the most recent and n being the # oldest). If no number is provided, 1, (the most recent) will be restored.''' # # Adjust bNum if needed # maxBak = int(self.userConfig['System']['maxStoreBackups']) # if not bNum : # bNum = 0 # else : # bNum = int(bNum) # if bNum <= 0 : # bNum = 0 # elif bNum > maxBak : # self.log.writeToLog(self.errorCodes['3550'], [str(bNum), str(maxBak)]) # else : # bNum = bNum-1 # # Get vals we need # projHome = self.getProjHome() # projBackupFolder = self.tools.resolvePath(os.path.join(self.userConfig['Resources']['backup'], self.pid)) # # Get the archive file name # files = os.listdir(projBackupFolder) # fns = [] # for f in files : # fns.append(int(f.split('.')[0])) # # Sort the list, last (latest) first # fns.sort(reverse=True) # # Make file path/name # backup = os.path.join(projBackupFolder, str(fns[bNum]) + '.zip') # if not os.path.exists(backup) : # self.log.writeToLog(self.errorCodes['3510'], [backup]) # # Restore the backup # self.backupRestore(backup, projHome) # # Permission for executables is lost in the zip, fix them here # self.tools.fixExecutables(projHome) # # Add helper scripts if needed # if self.tools.str2bool(self.userConfig['System']['autoHelperScripts']) : # ProjCommander(self.pid).updateScripts() # # Finish here (We will leave the project backup in place) # self.log.writeToLog(self.errorCodes['3530'], [self.tools.fName(backup),projHome]) def restoreExternalBackup (self, source, target = None, force = False) : '''Restore a non-existant project from an external backup to a target folder. If no target is provided the project will be installed in the default project folder. The source path and ZIP file must be valid''' # Get/make the (localized) project home reference projHome = self.getProjHome(target) # import pdb; pdb.set_trace() # Create the source backup file name # source = os.path.join(source, self.pid + '.zip') # FIXME: This needs some review and rework # Restore the backup if self.backupRestore(source, projHome) : # Permission for executables is lost in the zip, fix them here self.tools.fixExecutables(projHome) # If this is a new project we will need to register it now self.registerProject(projHome) # Add helper scripts if needed if self.tools.str2bool(self.userConfig['System']['autoHelperScripts']) : ProjCommander(self.pid).updateScripts() # Finish here (We will leave the backup-backup in place) self.tools.terminal('\nRapuma backup [' + self.pid + '] has been restored to: ' + projHome + '\n') return True ############################################################################### ############################ Cloud Backup Functions ########################### ############################################################################### ####################### Error Code Block Series = 4000 ######################## ############################################################################### def isNewerThanCloud (self, cloud, projectConfig) : '''Compare time stamps between the cloud and the local project. Return True if the local project is newer or the same age as the copy in the cloud. Return True if the project does not exist in the local copy of the cloud.''' # First see if it exists cConfig = self.getConfig(cloud) if not cConfig : return True elif not cConfig.has_key('Backup') : return True elif not cConfig['Backup'].has_key('lastCloudPush') : return True # Check local for key if not projectConfig.has_key('Backup') : return False elif not projectConfig['Backup'].has_key('lastCloudPush') : return False # Compare if we made it this far cStamp = cConfig['Backup']['lastCloudPush'] lStamp = projectConfig['Backup']['lastCloudPush'] if lStamp >= cStamp : return True def isNewerThanLocal (self, cloud, projectConfig) : '''Compare time stamps between the cloud and the local project. Return True if the cloud project is newer or the same age as the local copy. Return True if the project does not exist in as a local copy.''' # First see if the local exists if not projectConfig : return True # See if cloud is there and up-to-date cloudConfig = self.getConfig(cloud) if not cloudConfig : return False # Compare if we made it this far cStamp = cloudConfig['Backup']['lastCloudPush'] # It is possible the local has never been pushed # If that is the case, local is assumed older try : pStamp = projectConfig['Backup']['lastCloudPush'] except : return False if cStamp >= pStamp : return True def getConfig (self, projHome) : '''Return a valid config object from cloud project.''' # import pdb; pdb.set_trace() projectConfigFile = os.path.join(projHome, 'Config', 'project.conf') if os.path.exists(projectConfigFile) : return ConfigObj(projectConfigFile, encoding='utf-8') def getCloudOwner (self, cloud) : '''Return the owner of a specified cloud project.''' try : return self.getConfig(cloud)['Backup']['ownerID'] except : return None def getLocalOwner (self) : '''Return the owner of a specified cloud project.''' return self.userConfig['System']['userID'] def sameOwner (self, cloud) : '''Return True if the owner of a given cloud is the same as the system user. Also return True if the cloud owner is not present.''' # First check for existence if not self.getCloudOwner(cloud) : return True # Compare if we made it to this point if self.getCloudOwner(cloud) == self.getLocalOwner() : return True def setCloudPushTime (self, projectConfig) : '''Set/reset the lastPush time stamp setting.''' projectConfig['Backup']['lastCloudPush'] = self.tools.fullFileTimeStamp() self.tools.writeConfFile(projectConfig) def buyCloud (self, projectConfig) : '''Change the ownership on a project in the cloud by assigning your userID to the local project cloudOwnerID. Then, using force the next time the project is pushed to the cloud, you will own it.''' projOwnerID = self.userConfig['System']['userID'] projectConfig['Backup']['ownerID'] = projOwnerID self.tools.writeConfFile(projectConfig) def buyLocal (self, projectConfig) : '''Change the ownership on a local project by assigning your userID to it.''' projOwnerID = self.userConfig['System']['userID'] projectConfig['Backup']['ownerID'] = projOwnerID self.tools.writeConfFile(projectConfig) def replaceProject (self, source, target) : '''This will completly replace an existing project (target) with data from another project (source). This assumes source and target are valid.''' # We simply just get rid of the target before doing a merge shutil.rmtree(target) self.log.writeToLog(self.errorCodes['4150'], [target, source]) self.mergeProjects(source, target) def mergeProjects(self, source, target) : '''This will merge two Rapuma projects and try to preserve data in the target that is newer than the source. This assumes target and source are valid.''' # Get a list of files we do not want excludeFiles = self.makeExcludeFileList(source) # Get a total list of files from the project cn = 0 cr = 0 # Add space for output message sys.stdout.write('\n') sys.stdout.write('Merging files from: ' + source + ' to: ' + target) sys.stdout.flush() for folder, subs, files in os.walk(source): for fileName in files: # Do not include any backup files we find if fileName[-1] == '~' : continue if os.path.join(folder, fileName) not in excludeFiles : if not os.path.isdir(folder.replace(source, target)) : os.makedirs(folder.replace(source, target)) targetFile = os.path.join(folder, fileName).replace(source, target) sourceFile = os.path.join(folder, fileName) if not os.path.isfile(targetFile) : sys.stdout.write('.') sys.stdout.flush() shutil.copy(sourceFile, targetFile) cn +=1 # Otherwise if the cloud file is older than # the project file, refresh it elif self.tools.isOlder(targetFile, sourceFile) : if os.path.isfile(targetFile) : os.remove(targetFile) sys.stdout.write('.') sys.stdout.flush() shutil.copy(sourceFile, targetFile) cr +=1 # Add space for next message sys.stdout.write('\n') # Report what happened self.log.writeToLog(self.errorCodes['4110']) if cn == 0 and cr == 0 : self.log.writeToLog(self.errorCodes['4120']) else : if cn > 0 : self.log.writeToLog(self.errorCodes['4130'], [str(cn)]) if cr > 0 : self.log.writeToLog(self.errorCodes['4140'], [str(cr)]) return True def getProjHome (self, tPath = None) : '''Return a project home path by checking to see what the best path might be. Provided path gets first dibs, then ''' if tPath : if os.path.isfile(tPath) : return self.local.projHome elif self.tools.resolvePath(tPath) : tPath = self.tools.resolvePath(tPath) lastFolder = os.path.basename(tPath) if lastFolder == self.pid : return tPath else : return os.path.join(tPath, self.pid) else : self.log.writeToLog(self.errorCodes['4220'], [tPath]) elif self.local.projHome : return self.local.projHome else : return self.tools.resolvePath(os.path.join(self.userConfig['Resources']['projects'], self.pid))
class Template (object) : def __init__(self, pid) : '''Intitate the whole class and create the object.''' self.pid = pid self.tools = Tools() self.user = UserConfig() self.userConfig = self.user.userConfig self.local = ProjLocal(pid) self.log = ProjLog(pid) self.projData = ProjData(pid) def projectToTemplate (self, tid = None) : '''Preserve critical project information in a template. The pid is the project that the template will be bassed from. The template will go in the template lib.''' # Set needed vals if not tid : tid = self.pid tempDir = os.path.join(tempfile.mkdtemp(), tid) target = os.path.join(self.local.userLibTemplate, tid + '.zip') # Make a temp copy of the project that we can manipulate shutil.copytree(self.local.projHome, tempDir) # Now make the config files generic for use with any project tc = ConfigObj(os.path.join(tempDir, 'Config', 'project.conf'), encoding='utf-8') tc['ProjectInfo']['projectTitle'] = '' tc['ProjectInfo']['projectIDCode'] = '' tc['ProjectInfo']['projectCreateDate'] = '' tc['ProjectInfo']['projectCreateDate'] = '' # Remove unnecessary folders needNot = ['Component', 'Deliverable', 'Illustration'] for f in needNot : fld = os.path.join(tempDir, f) if os.path.exists(fld) : shutil.rmtree(fld) # Remove unnecessary config files needNot = ['adjustment', 'illustration'] for f in needNot : fl = os.path.join(tempDir, 'Config', f + '.conf') if os.path.exists(fl) : os.remove(fl) # Remove unnecessary project config stuff needNot = ['Groups', 'Backup'] for s in needNot : if tc.has_key(s) : del tc[s] # Write out the new template project config file tc.filename = os.path.join(tempDir, 'Config', 'project.conf') tc.write() # Kill the log file os.remove(os.path.join(tempDir, 'rapuma.log')) # Exclude files excludeFiles = self.projData.makeExcludeFileList(source) # Zip it up using the above params root_len = len(tempDir) with zipfile.ZipFile(target, 'w', compression=zipfile.ZIP_DEFLATED) as myzip : sys.stdout.write('Creating template') sys.stdout.flush() for root, dirs, files in os.walk(tempDir): # Chop off the part of the path we do not need to store zip_root = os.path.abspath(root)[root_len:] for f in files: if f[-1] == '~' : continue elif f in excludeFiles : continue elif f.rfind('.') != -1 : fullpath = os.path.join(root, f) zip_name = os.path.join(zip_root, f) sys.stdout.write('.') sys.stdout.flush() myzip.write(fullpath, zip_name, zipfile.ZIP_DEFLATED) # Add space for next message sys.stdout.write('\n') # Remove the temp project dir we made self.tools.terminal('\nCompleted creating template: ' + target + '\n') def templateToProject (self, targetDir = None, source = None) : '''Create a new project based on the provided template ID. If a path to the template is not provided it will look in the users template lib. A PID must be provided. That is checked with the system. If the same PID is found in the system, it must be removed before reruning this function. If a non-default location is needed, a target path must be provided.''' # import pdb; pdb.set_trace() # Set a default target path projHome = os.path.join(self.local.projParentDefaultFolder, self.pid) # See if we can build a better target path if targetDir != '' : projHome = os.path.join(targetDir, self.pid) elif self.local.projHome : projHome = os.path.join(self.local.projHome, self.pid) # self.tools.dieNow() # Test to see if the project already exists if self.pid in self.projList : self.tools.terminal('\nError: Project ID [' + self.pid + '] is already exists on this system. Use the remove command to remove it.') self.tools.dieNow() # Test for source template file if not source : source = os.path.join(self.local.userLibTemplate, self.pid + '.zip') if not os.path.exists(source) : self.tools.terminal('\nError: No template can be found for [' + self.pid + ']\n') self.tools.dieNow() # Unzip the template in place to start the new project with zipfile.ZipFile(source, 'r') as myzip : myzip.extractall(projHome) # Peek into the project pc = ConfigObj(os.path.join(projHome, 'Config', 'project.conf'), encoding='utf-8') pc['ProjectInfo']['projectCreateDate'] = self.tools.tStamp() pc['ProjectInfo']['projectIDCode'] = self.pid pc.filename = os.path.join(projHome, 'Config', 'project.conf') pc.write() # Get the media type from the newly placed project for registration projectMediaIDCode = pc['ProjectInfo']['projectMediaIDCode'] # Reset the local settings self.local = ProjLocal(self.pid) # Create any folders that might be needed for fld in self.local.projFolders : folder = os.path.join(self.local.projHome, fld) if not os.path.exists(folder) : os.makedirs(folder) # Report what happened self.tools.terminal('A new project [' + self.pid + '] has been created based on the [' + self.tools.fName(source) + '] template.')
class ProjCommander (object) : def __init__(self, pid) : '''Intitate the whole class and create the object.''' self.pid = pid self.tools = Tools() self.user = UserConfig() self.userConfig = self.user.userConfig self.projHome = os.path.join(os.environ['RAPUMA_PROJECTS'], self.pid) self.local = ProjLocal(self.pid) self.proj_config = Config(pid) self.proj_config.getProjectConfig() self.projectConfig = self.proj_config.projectConfig self.projectMediaIDCode = self.projectConfig['ProjectInfo']['projectMediaIDCode'] # Log messages for this module self.errorCodes = { '0000' : ['MSG', 'Placeholder message'], } ############################################################################### ########################## Command Creation Functions ######################### ############################################################################### def removeScripts (self) : '''Remove any unnecessary group control scripts from the project.''' self.tools.dieNow('removeScripts() not implemented yet.') def updateScripts (self) : '''Update all the helper command scripts in a project.''' self.makeStaticScripts() self.makeGrpScripts() def makeGrpScripts (self) : '''Create scripts that process specific group components.''' if not os.path.isdir(self.local.projHelpScriptFolder) : os.mkdir(self.local.projHelpScriptFolder) # Output the scripts (If this is a new project we need to pass) if self.projectConfig.has_key('Groups') : for gid in self.projectConfig['Groups'].keys() : allScripts = self.getGrpScripInfo(gid) for key in allScripts.keys() : fullFile = os.path.join(self.local.projHelpScriptFolder, key) + gid with codecs.open(fullFile, "w", encoding='utf_8') as writeObject : writeObject.write(self.makeScriptHeader(allScripts[key][0], allScripts[key][1])) # Strip out extra spaces from command cmd = re.sub(ur'\s+', ur' ', allScripts[key][1]) writeObject.write(cmd + '\n\n') # Make the script executable self.tools.makeExecutable(fullFile) self.tools.terminal('\nCompleted creating/recreating group helper scripts.\n') else : pass def makeStaticScripts (self) : '''Create helper scripts for a project to help with repetitive tasks. If any scripts are present with the same name they will be overwritten. Note: This is only for temporary use due to the lack of an interface at this time (20130306140636). It assumes the cType is usfm which, at some point may not be the case.''' if not os.path.isdir(self.local.projHelpScriptFolder) : os.mkdir(self.local.projHelpScriptFolder) # Output the scripts allScripts = self.getStaticScripInfo() for key in allScripts.keys() : fullFile = os.path.join(self.local.projHelpScriptFolder, key) with codecs.open(fullFile, "w", encoding='utf_8') as writeObject : writeObject.write(self.makeScriptHeader(allScripts[key][0], allScripts[key][1])) writeObject.write(allScripts[key][1] + '\n\n') # Make the script executable self.tools.makeExecutable(fullFile) self.tools.terminal('\nCompleted creating/recreating static helper scripts.\n') def makeScriptHeader (self, desc, cmd) : '''Make a helper script header.''' return '#!/bin/sh\n\n# Description: ' + desc + '\n\necho \necho Rapuma helper script: ' + desc + '\n\necho \necho command: ' + self.echoClean(cmd) + '\n\n' def echoClean (self, cmdStr) : '''Clean up a string for an echo statement in a shell script.''' clean = re.sub(ur'\;', ur'\\;', cmdStr) clean = re.sub(ur'\s+', ur' ', clean) return clean def getStaticScripInfo (self) : '''Create a dictionary of all the static auxillary script information used in most projects.''' pid = self.pid mid = self.projectMediaIDCode return { 'addBible' : ['Add Scripture components for a Bible group.', 'rapuma group ' + pid + ' BIBLE group add --source_path $1 '], 'addNT' : ['Add Scripture components for an NT group.', 'rapuma group ' + pid + ' NT group add --source_path $1 '], 'addOT' : ['Add Scripture components for an OT group.', 'rapuma group ' + pid + ' OT group add --source_path $1 '], 'archive' : ['Archive this project', 'rapuma project ' + pid + ' archive save '], 'backup' : ['Backup this project', 'rapuma project ' + pid + ' backup save '], 'cloudPull' : ['Pull data for this project from the cloud', 'rapuma project ' + pid + ' cloud restore '], 'cloudPush' : ['Push data from this project to the cloud', 'rapuma project ' + pid + ' cloud save $1 '], 'restore' : ['Restore a backup.', 'rapuma project ' + pid + ' backup restore '], 'template' : ['Create a template of the project.', 'rapuma project ' + pid + ' template save --id $1 '], 'updateScripts' : ['Update the project scripts.', 'rapuma project ' + pid + ' project update --update_type helper '], 'bind' : ['Create the binding PDF file', 'if [ "$1" ]; then CMD=" $1"; fi; if [ "$2" ]; then CMD=" $1 $2"; fi; rapuma project ' + pid + ' project bind $CMD '], 'placeholdOff' : ['Turn off illustration placeholders.', 'rapuma settings ' + pid + ' ' + mid + '_layout Illustrations useFigurePlaceHolders False '], 'placeholdOn' : ['Turn on illustration placeholders.', 'rapuma settings ' + pid + ' ' + mid + '_layout Illustrations useFigurePlaceHolders True '] } def getGrpScripInfo (self, gid) : '''Create a dictionary of the auxillary group script information used in most projects.''' # import pdb; pdb.set_trace() # Set the vars for this function pid = self.pid cType = self.projectConfig['Groups'][gid]['cType'] Ctype = cType.capitalize() renderer = self.projectConfig['CompTypes'][Ctype]['renderer'] self.proj_macro = Macro(self.pid, gid) macroConfig = self.proj_macro.macroConfig font = '' if macroConfig and macroConfig['FontSettings'].has_key('primaryFont') : font = macroConfig['FontSettings']['primaryFont'] macro = self.projectConfig['CompTypes'][Ctype]['macroPackage'] mid = self.projectMediaIDCode # Return a dictionary of all the commands we generate return { 'compare' : ['Compare component working text with backup.', 'if [ "$1" ]; then CMD="--cid_list $1"; fi; rapuma group ' + pid + ' ' + gid + ' group compare --compare_type backup $CMD '], 'render' : ['Render ' + gid + ' group PDF file.', 'if [ "$1" ]; then CMD="--cid_list $1"; fi; if [ "$2" ]; then CMD="--cid_list $1 $2"; fi; if [ "$3" ]; then CMD="--cid_list $1 $2 $3"; fi; rapuma group ' + pid + ' ' + gid + ' group render $CMD '], 'update' : ['Update the ' + gid + ' group from its source.', 'if [ "$2" ]; then CMD="--cid_list $2"; fi; rapuma group ' + pid + ' ' + gid + ' group update --source_path $1 $CMD '], 'background' : ['Re/Create the project background.', 'rapuma project ' + pid + ' project update --update_type background '], 'transparency' : ['Re/Create the project diagnostic layer.', 'rapuma project ' + pid + ' project update --update_type diagnostic '], 'addFont' : ['Add a font to the ' + gid + ' group.', 'rapuma package ' + pid + ' ' + gid + ' $1 font add -f '], 'removeFont' : ['Remove a font from the ' + gid + ' group.', 'rapuma package ' + pid + ' ' + gid + ' $1 font remove -f '], 'primaryFont' : ['Make font primary for the ' + gid + ' group.', 'rapuma package ' + pid + ' ' + gid + ' $1 font primary -f '], 'updateFont' : ['Update the ' + gid + ' font.', 'rapuma package ' + pid + ' ' + gid + ' \"' + font + '\" font update '], 'updateMacro' : ['Update the ' + gid + ' macro package.', 'rapuma package ' + pid + ' ' + gid + ' \"' + macro + '\" macro update '] }
class Xetex (Manager) : # Shared values xmlConfFile = 'xetex.xml' def __init__(self, project, cfg, cType) : '''Do the primary initialization for this manager.''' super(Xetex, self).__init__(project, cfg) # import pdb; pdb.set_trace() # Create all the values we can right now for this manager. # Others will be created at run time when we know the cid. self.tools = Tools() self.project = project self.local = project.local self.log = project.log self.cfg = cfg self.pid = project.projectIDCode self.gid = project.gid self.cType = cType self.Ctype = cType.capitalize() self.mType = project.projectMediaIDCode self.renderer = 'xetex' self.manager = self.cType + '_' + self.renderer.capitalize() self.managers = project.managers self.pg_back = ProjBackground(self.pid, self.gid) self.fmt_diagnose = ProjDiagnose(self.pid, self.gid) self.proj_config = Config(self.pid, self.gid) self.proj_config.getProjectConfig() self.proj_config.getLayoutConfig() self.proj_config.getFontConfig() self.proj_config.getMacroConfig() # Bring in some manager objects we will need self.proj_font = ProjFont(self.pid) self.proj_illustration = ProjIllustration(self.pid, self.gid) self.proj_hyphenation = ProjHyphenation(self.pid, self.gid) self.usfmData = UsfmData() self.cidChapNumDict = self.usfmData.cidChapNumDict() self.cidPtIdDict = self.usfmData.cidPtIdDict() # Get config objs self.projectConfig = self.proj_config.projectConfig self.layoutConfig = self.proj_config.layoutConfig self.fontConfig = self.proj_config.fontConfig self.macroConfig = self.proj_config.macroConfig self.userConfig = self.project.userConfig self.macPackId = self.projectConfig['CompTypes'][self.Ctype]['macroPackage'] # Some config settings self.pdfViewerCmd = self.tools.getPdfViewerCommand(self.userConfig, self.projectConfig) self.sourceEditor = self.projectConfig['CompTypes'][self.Ctype]['sourceEditor'] self.useBackground = self.tools.str2bool(self.layoutConfig['DocumentFeatures']['useBackground']) self.useDiagnostic = self.tools.str2bool(self.layoutConfig['DocumentFeatures']['useDiagnostic']) self.useDocInfo = self.tools.str2bool(self.layoutConfig['DocumentFeatures']['useDocInfo']) # Get settings for this component self.managerSettings = self.projectConfig['Managers'][self.manager] for k, v in self.managerSettings.iteritems() : if v == 'True' or v == 'False' : setattr(self, k, self.tools.str2bool(v)) else : setattr(self, k, v) # Set some Booleans (this comes after persistant values are set) # Setting hyphenation is a 2 step process, first check global, then group self.useHyphenation = False if self.tools.str2bool(self.projectConfig['ProjectInfo']['hyphenationOn']) : if self.tools.str2bool(self.projectConfig['Groups'][self.gid]['useHyphenation']) : self.useHyphenation = True # In case the macro is not installed we need to skip over this try : self.chapNumOffSingChap = self.tools.str2bool(self.macroConfig['Macros'][self.macPackId]['ChapterVerse']['omitChapterNumberOnSingleChapterBook']) except : self.chapNumOffSingChap = None # Make any dependent folders if needed if not os.path.isdir(self.local.projGidFolder) : os.makedirs(self.local.projGidFolder) # Record some error codes # FIXME: much more needs to be done with this self.xetexErrorCodes = { 0 : 'Rendering succeful.', 256 : 'Something really awful happened.' } # Log messages for this module self.errorCodes = { '1005' : ['ERR', 'PDF viewer failed with error: [<<1>>]'], '1010' : ['ERR', 'Style file [<<1>>] could not be created.'], '1040' : ['LOG', 'Created: [<<1>>]'], '0420' : ['WRN', 'TeX settings file has been frozen for debugging purposes.'], '0440' : ['LOG', 'Created: [<<1>>]'], '0460' : ['LOG', 'Settings changed in [<<1>>], [<<2>>] needed to be recreated.'], '0465' : ['LOG', 'File: [<<1>>] missing, created a new one.'], '0470' : ['ERR', 'Macro package [<<1>>] is not recognized by the system.'], '0600' : ['MSG', '<<1>> cannot be viewed, PDF viewer turned off.'], '0610' : ['LOG', 'Recorded [<<1>>] rendered pages in the [<<2>>] group.'], '0615' : ['ERR', 'XeTeX failed to execute with error: <<1>>'], '0617' : ['ERR', 'XeTeX failed to execute with this error: [<<1>>]'], '0620' : ['DBG', 'xetex command in <<1>>: <<2>> <<3>>'], '0625' : ['MSG', 'Rendering of [<<1>>] successful.'], '0630' : ['ERR', 'Rendering [<<1>>] was unsuccessful. <<2>> (<<3>>)'], '0635' : ['ERR', 'XeTeX error code [<<1>>] not understood by Rapuma.'], '0650' : ['ERR', 'Component type [<<1>>] not supported!'], '0690' : ['MSG', 'Dependent files unchanged, rerendering of [<<1>>] un-necessary.'], '0695' : ['MSG', 'Routing <<1>> to PDF viewer.'], '0700' : ['ERR', 'Rendered file not found: <<1>>'], '0710' : ['WRN', 'PDF viewing is disabled.'], '0720' : ['MSG', 'Saved rendered file to: [<<1>>]'], '0730' : ['ERR', 'Failed to save rendered file to: [<<1>>]'], '1000' : ['WRN', 'XeTeX debugging is set to [<<1>>]. These are the paths XeTeX is seeing: [<<2>>]'], '1090' : ['ERR', 'Invalid value [<<1>>] used for XeTeX debugging. Must use an integer of 0, 1, 2, 4, 8, 16, or 32'] } # FIXME: It would be good if we could do a check for dependent files here ############################################################################### ############################ Manager Level Functions ########################## ############################################################################### ######################## Error Code Block Series = 1000 ####################### ############################################################################### def checkStartPageNumber (self) : '''Adjust page number for the current group. The current logic is if there is no number in the startPageNumber setting, we can put one in there as a suggestion. If there is already one there, the user will be responsible for seeing that it is correct.''' # import pdb; pdb.set_trace() try : # Simply try to return anything that is in the field cStrPgNo = self.projectConfig['Groups'][self.gid]['startPageNumber'] if cStrPgNo != '' : return cStrPgNo except : # If nothing is there, we'll make a suggestion pGrp = str(self.projectConfig['Groups'][self.gid]['precedingGroup']) if pGrp == 'None' : self.projectConfig['Groups'][self.gid]['startPageNumber'] = 1 self.tools.writeConfFile(self.projectConfig) return '1' else : # Calculate the suggested number based on the preceeding group try : cStrPgNo = str(self.projectConfig['Groups'][self.gid]['startPageNumber']) except : cStrPgNo = 1 self.projectConfig['Groups'][self.gid]['startPageNumber'] = 1 try : pGrpPgs = int(self.projectConfig['Groups'][pGrp]['totalPages']) pGrpStrPgNo = int(self.projectConfig['Groups'][pGrp]['startPageNumber']) except : # FIXME: Maybe this could go out and find out exactly how many pages were in the preceeding group pGrpPgs = 1 pGrpStrPgNo = 1 self.projectConfig['Groups'][pGrp]['totalPages'] = 1 self.projectConfig['Groups'][pGrp]['startPageNumber'] = 1 # Whether this is right or wrong set it the way it is self.projectConfig['Groups'][self.gid]['startPageNumber'] = (pGrpStrPgNo + pGrpPgs) self.tools.writeConfFile(self.projectConfig) return self.projectConfig['Groups'][pGrp]['startPageNumber'] def makeExtFile (self, fileName, description) : '''Generic function to create an extension file if one does not already exist.''' if not os.path.exists(fileName) : with codecs.open(fileName, "w", encoding='utf_8') as writeObject : writeObject.write(self.tools.makeFileHeader(fileName, description, False)) self.log.writeToLog(self.errorCodes['1040'], [self.tools.fName(fileName)]) return True def makeCmpExtTexFileOn (self, fileName) : '''Create a component TeX extention macro "on" file for a specified component. A matching "off" file will be created as well.''' description = 'This is a component (on) TeX macro extension file which may override any macros \ which were loaded for this rendering process. This file is read just before the component \ working file. After the component is rendered, the accompanying off TeX file will be \ loaded which will turn off any modified macro commands that this TeX file has set. The \ user must edit this file in order for it to work right.' return self.makeExtFile(fileName, description) def makeCmpExtTexFileOff (self, fileName) : '''Create a component TeX extention macro "off" file for a specified component. This is to match the "on" file that was created.''' description = 'This is a component (off) style extension file which overrides the settings \ that were loaded for this rendering process just prior to loading the component working \ file. The commands in this style file will off-set the "on" settings causing the macro to \ render as it did before the "on" styles were loaded. The user must edit this file for it \ to work properly.' return self.makeExtFile(fileName, description) def makeCmpExtStyFileOn (self, fileName) : '''Create a component style extentions "on" file for a specified component. A matching "off" file will be created as well.''' description = 'This is a component (on) style extension file which overrides any settings \ which were loaded for this rendering process. This file is read just before the component \ working file. After the component is rendered, the accompanying off style file will be \ loaded which will turn off any modified style commands that this style file has set. The \ user must edit this file in order for it to work right.' return self.makeExtFile(fileName, description) def makeCmpExtStyFileOff (self, fileName) : '''Create a component style extentions "off" file for a specified component. This is to match the "on" file that was created.''' description = 'This is a component (off) style extension file which overrides the settings \ that were loaded for this rendering process just prior to loading the component working \ file. The commands in this style file will off-set the "on" settings causing the macro to \ render as it did before the "on" styles were loaded. The user must edit this file for it \ to work properly.' return self.makeExtFile(fileName, description) def makeGrpExtTexFile (self) : '''Create a group TeX extentions file for a specified group.''' description = 'This is the group TeX extention macro file which overrides settings in \ the global TeX extension macro file.' return self.makeExtFile(self.local.grpExtTexFile, description) def makeGrpExtStyFile (self) : '''Create a group Style extentions file to a specified group.''' description = 'This is the group style extention file which overrides settings in \ the main default component extentions settings style file.' return self.makeExtFile(self.local.grpExtStyFile, description) ############################################################################### ############################# DEPENDENCY FUNCTIONS ############################ ############################################################################### ######################## Error Code Block Series = 0400 ####################### ############################################################################### def makeSettingsTexFile (self) : '''Create the primary TeX settings file.''' # import pdb; pdb.set_trace() description = 'This is the primary TeX settings file for the ' + self.gid + ' group. \ It is auto-generated so editing can be a rather futile exercise. This is unless you \ set freezeTexSettings to True in the XeTeX manager configuration of the project.conf \ file. Doing that will prevent the file from being remade. However, no configuration \ changes will be reflected in the static settings file. Use this with care.' # Setting for internal testing outputTest = False # Check for freezeTexSettings in project.conf if self.projectConfig['Managers'][self.cType + '_Xetex'].has_key('freezeTexSettings') and \ self.tools.str2bool(self.projectConfig['Managers'][self.cType + '_Xetex']['freezeTexSettings']) : self.log.writeToLog(self.errorCodes['0420']) return False def appendLine(line, realVal) : '''Use this to shorten the code and look for listy things.''' if type(line) == list : for s in line : linesOut.append(self.proj_config.processNestedPlaceholders(s, realVal)) else : linesOut.append(self.proj_config.processNestedPlaceholders(line, realVal)) # Open a fresh settings file with codecs.open(self.local.macSettingsFile, "w", encoding='utf_8') as writeObject : writeObject.write(self.tools.makeFileHeader(self.local.macSettingsFileName, description)) # Build a dictionary from the default XML settings file # Create a dict that contains only the data we need here macPackDict = self.tools.xmlFileToDict(self.local.macPackConfXmlFile) for sections in macPackDict['root']['section'] : for section in sections : secItem = sections[section] linesOut = [] if type(secItem) is list : if outputTest : print sections['sectionID'] linesOut.append('% ' + sections['sectionID'].upper()) for setting in secItem : for k in setting.keys() : if k == 'texCode' : if outputTest : print '\t', setting['key'] realVal = self.macroConfig['Macros'][self.macPackId][sections['sectionID']][setting['key']] # Test any boolDepends that this setting might have if setting.has_key('boolDepend') : result = [] if type(setting['boolDepend']) == list : for i in setting['boolDepend'] : result.append(self.affirm(i)) else : result.append(self.affirm(setting['boolDepend'])) # If 'None' didn't end up in the list, that means # every bool tested good so we can output the line if None not in result : if outputTest : print '\t', setting.get(k) appendLine(setting['texCode'], realVal) # Normal setting output elif setting.get(k) : if setting.get(k) != None : # We filter out zero values here (But what if we need one of them?) if not self.proj_config.processNestedPlaceholders(realVal) == '0' : if outputTest : print '\t', setting.get(k) appendLine(setting['texCode'], realVal) # Only write out sections that have something in them if len(linesOut) > 1 : writeObject.write('\n') for line in linesOut : writeObject.write(line + '\n') # Continue here with injecting the font settings which are guided by # the config file because the XML source(s) could vary writeObject.write('\n% INSTALLED FONTS\n') installedFonts = self.fontConfig['Fonts'].keys() cTypeFont = self.projectConfig['CompTypes'][self.cType.capitalize()]['fontName'] for font in installedFonts : if font == cTypeFont : # Output the primary font for key in self.fontConfig['Fonts'][font]['TexMapping']['PrimaryFont'].keys() : writeObject.write(self.proj_config.processNestedPlaceholders(self.fontConfig['Fonts'][font]['TexMapping']['PrimaryFont'][key]) + '\n') # Output the seconday settings as well for this font for key in self.fontConfig['Fonts'][font]['TexMapping']['SecondaryFont'].keys() : writeObject.write(self.proj_config.processNestedPlaceholders(self.fontConfig['Fonts'][font]['TexMapping']['SecondaryFont'][key]) + '\n') else : # There can only be one primary font, this is not it for key in self.fontConfig['Fonts'][font]['TexMapping']['SecondaryFont'].keys() : writeObject.write(self.proj_config.processNestedPlaceholders(self.fontConfig['Fonts'][font]['TexMapping']['SecondaryFont'][key]) + '\n') writeObject.write('\n') # Die here if testing if outputTest : self.tools.dieNow() # Report finished if not return True def affirm (self, boolDependInfo) : '''Affirm by returning True if the actual bool matches its state setting. Returning 'None' will cause a setting to be skipped.''' realBool = self.returnConfRefValue(boolDependInfo['#text']).lower() if boolDependInfo['@state'].lower() == realBool.lower() : return True def returnConfRefValue (self, ref) : '''Return the value of a given config reference. The ref syntax is as follows: [config:configObj|section|key]. This should be able to recuse as deep as necessary.''' # import pdb; pdb.set_trace() ref = ref.lstrip('[').rstrip(']') (holderType, holderKey) = ref.split(':', 1) if holderType.lower() == 'config' : val = holderKey.split('|') dct = ['self.' + val[0]] val.remove(val[0]) for i in val : i = self.proj_config.processNestedPlaceholders(i, '') dct.append('["' + i + '"]') return eval(''.join(dct)) def makeGidTexFile (self, cidList) : '''Create the main gid TeX control file.''' description = 'This is the group TeX control file. XeTeX will \ read this file to get all of links to other instructions (macros) \ needed to render the group, or a component of a group.' # Since a render run could contain any number of components # in any order, we will remake this file on every run. No need # for dependency checking if os.path.exists(self.local.gidTexFile) : os.remove(self.local.gidTexFile) # Create the main TeX settings file (made on every run) self.makeSettingsTexFile() # Start writing out the gid.tex file. Check/make dependencies as we go. # If we fail to make a dependency it will die and report during that process. # We bring in each element in the order necessary with codecs.open(self.local.gidTexFile, "w", encoding='utf_8') as gidTexObject : # Write out the file header gidTexObject.write(self.tools.makeFileHeader(self.local.gidTexFileName, description)) # First bring in the main macro file gidTexObject.write('\\input \"' + self.local.primaryMacroFile + '\"\n') # Check for a preStyle extension file and load if it is there if os.path.exists(self.local.preStyTexExtFile) : gidTexObject.write('\\input \"' + self.local.preStyTexExtFile + '\"\n') ######## # FIXME? To avoid problems with the usfmTex marginalverses macro code, we bring # in the stylesheets now. Will this cause any problems with other macPacks? ######## # Load style files (default and extention come with the package) gidTexObject.write('\\stylesheet{' + self.local.defaultStyFile + '}\n') # Load the global style extensions gidTexObject.write('\\stylesheet{' + self.local.glbExtStyFile + '}\n') # Load the group style extensions (if needed) if self.projectConfig['Groups'][self.gid].has_key('useGrpStyOverride') and self.tools.str2bool(self.projectConfig['Groups'][self.gid]['useGrpStyOverride']) : self.makeGrpExtStyFile() gidTexObject.write('\\stylesheet{' + self.local.grpExtStyFile + '}\n') # Load the settings (usfmTex: if marginalverses, load code in this) gidTexObject.write('\\input \"' + self.local.macSettingsFile + '\"\n') # Load the TeX macro extensions for this macro package gidTexObject.write('\\input \"' + self.local.extTexFile + '\"\n') # Load the group TeX macro extensions (if needed) if self.projectConfig['Groups'][self.gid].has_key('useGrpTexOverride') and self.tools.str2bool(self.projectConfig['Groups'][self.gid]['useGrpTexOverride']) : self.makeGrpExtTexFile() gidTexObject.write('\\input \"' + self.local.grpExtTexFile + '\"\n') # Load hyphenation data if needed if self.useHyphenation : # This is the main hyphenation settings file, this must be loaded first gidTexObject.write('\\input \"' + self.proj_hyphenation.projHyphSetTexFile + '\"\n') # This is the character definition file for hyphenation, this should be loaded second gidTexObject.write('\\input \"' + self.proj_hyphenation.projHyphCharTexFile + '\"\n') # This is the exception words list (all the hyphenated words), this is loaded last gidTexObject.write('\\input \"' + self.proj_hyphenation.projHyphExcTexFile + '\"\n') # If this is less than a full group render, just go with default pg num (1) if cidList == self.projectConfig['Groups'][self.gid]['cidList'] : # Check if this setting is there startPageNumber = self.checkStartPageNumber() if startPageNumber > 1 : gidTexObject.write('\\pageno = ' + str(startPageNumber) + '\n') # Insert Document properties and x1a compliant info if needed gidTexObject.write(self.makeXoneACompliant()) # Now add in each of the components for cid in cidList : # Output files and commands for usfm cType if self.cType == 'usfm' : cidSource = os.path.join(self.local.projComponentFolder, cid, self.project.groups[self.gid].makeFileNameWithExt(cid)) cidTexFileOn = os.path.join(self.local.projTexFolder, self.gid + '-' + cid + '-On-ext.tex') cidTexFileOff = os.path.join(self.local.projTexFolder, self.gid + '-' + cid + '-Off-ext.tex') cidStyFileOn = os.path.join(self.local.projStyleFolder, self.gid + '-' + cid + '-On-ext.sty') cidStyFileOff = os.path.join(self.local.projStyleFolder, self.gid + '-' + cid + '-Off-ext.sty') # Check to see if a TeX macro override is needed if self.projectConfig['Groups'][self.gid].has_key('compTexOverrideList') and cid in self.projectConfig['Groups'][self.gid]['compTexOverrideList'] : self.makeCmpExtTexFileOn(cidTexFileOn) gidTexObject.write('\\input \"' + cidTexFileOn + '\"\n') # Check to see if a style override is needed (if so create "on" file) if self.projectConfig['Groups'][self.gid].has_key('compStyOverrideList') and cid in self.projectConfig['Groups'][self.gid]['compStyOverrideList'] : self.makeCmpExtStyFileOn(cidStyFileOn) gidTexObject.write('\\stylesheet{' + cidStyFileOn + '}\n') # Check for short books add omit statement if self.chapNumOffSingChap and self.cidChapNumDict[cid] == 1 : gidTexObject.write('\\OmitChapterNumbertrue\n') # Add the working file here gidTexObject.write('\\ptxfile{' + cidSource + '}\n') # Check again for short books turn off omit statement if self.chapNumOffSingChap and self.cidChapNumDict[cid] == 1 : gidTexObject.write('\\OmitChapterNumberfalse\n') # Check for for style override and add the "Off" style file here if self.projectConfig['Groups'][self.gid].has_key('compStyOverrideList') and cid in self.projectConfig['Groups'][self.gid]['compStyOverrideList'] : self.makeCmpExtStyFileOn(cidStyFileOff) gidTexObject.write('\\stylesheet{' + cidStyFileOff + '}\n') # Check for for TeX macro override and add the "Off" TeX file here if self.projectConfig['Groups'][self.gid].has_key('compTexOverrideList') and cid in self.projectConfig['Groups'][self.gid]['compTexOverrideList'] : self.makeCmpExtTexFileOff(cidTexFileOff) gidTexObject.write('\\input \"' + cidTexFileOff + '\"\n') else : self.log.writeToLog(self.errorCodes['0650'], [self.cType]) # This can only hapen once in the whole process, this marks the end gidTexObject.write('\\bye\n') return True def makeXoneACompliant (self) : '''Insert the necessary TeX code into the header to give the appearance of being PDF x1-a compliant. If the feature is turned off then it will only inject relevant document properties that are good to have included no mater what. XeTeX output is x1-a for the most part, it just doesn't brag about it. The output here mostly works. :-) ''' # import pdb; pdb.set_trace() allLines = '' # Set some of the vars for the output title = self.projectConfig['ProjectInfo']['projectTitle'] subject = self.projectConfig['ProjectInfo']['projectDescription'] author = ''.join(self.projectConfig['ProjectInfo']['translators']) creator = ''.join(self.projectConfig['ProjectInfo']['typesetters']) # I don't think this next bit is not right, what does +7 mean anyway? # It works for CDT time anyway, which I thought was -6 offSet = "+07\'00\'" # To get the date stamp right, we strip out all the non-number # characters so we are left with: yyyymmddhhmmss comp = self.projectConfig['ProjectInfo']['projectCreateDate'].replace('-', '').replace(':', '').replace(' ', '') cDate = 'D:' + comp + offSet mDate = 'D:' + self.tools.fullFileTimeStamp() + offSet lines = [ '\special{pdf:docinfo<<', '/Title(' + title + ')%', '/Subject(' + subject + ')%', '/Author(' + author + ')%', '/Creator(' + creator + ')%', '/CreationDate(' + cDate + ')%', '/ModDate(' + mDate + ')%', '/Producer(XeTeX with Rapuma)%', '/Trapped /False', '/GTS_PDFXVersion(PDF/X-1:2003)%', '/GTS_PDFXConformance(PDF/X-1a:2003)%', '>> }' ] # Add PDF header declairations for PDF X1-A:2003 compliance (default is True) if self.tools.str2bool(self.layoutConfig['DocumentFeatures']['pdfX1a']) : icc = os.path.join(self.local.rapumaConfigFolder, 'ps_cmyk.icc') # Now create the insert line list xtralines = [ '\special{pdf:fstream @OBJCVR (' + icc + ')}', '\special{pdf:put @OBJCVR <</N 4>>}', '%\special{pdf:close @OBJCVR}', '\special{pdf:docview <<', '/OutputIntents [ <<', '/Type/OutputIndent', '/S/GTS_PDFX', '/OutputCondition (An Unknown print device)', '/OutputConditionIdentifier (Custom)', '/DestOutputProfile @OBJCVR', '/RegistryName (http://www.color.og)', '>> ] >>}' ] lines = lines + xtralines # Whatever our output, process the lines for l in lines : allLines = allLines + l + ' \n' return allLines ############################################################################### ################################# Main Function ############################### ############################################################################### ######################## Error Code Block Series = 0600 ####################### ############################################################################### def run (self, gid, cidList, pgRange, override, save) : '''This will check all the dependencies for a group and then use XeTeX to render the whole group or a subset of components and even a page range in a single component.''' # import pdb; pdb.set_trace() # There must be a cidList. If one was not passed, default to # the group list cidListSubFileName = '' saveFile = '' saveFileName = '' if not cidList : cidList = self.projectConfig['Groups'][gid]['cidList'] else : # If there is a cidList, create an alternate ouput name. # This is so if the file is saved it will have a unique # name. the name needs to be ordered by ###-cid-gid. # We need to do this sooner than later. if len(cidList) > 1 : cidListSubFileName = '-'.join(cidList) else : cid = cidList[0] # Add a filler character to the ID cnid = "{:0>3}".format(self.cidPtIdDict[cid]) cidListSubFileName = cnid + '-' + cid # Create, if necessary, the gid.tex file # First, go through and make/update any dependency files self.makeSettingsTexFile() # Now make the gid main setting file self.makeGidTexFile(cidList) # Dynamically create a dependency list for the render process # Note: gidTexFile is remade on every run, do not test against that file dep = [self.local.extTexFile, self.local.projectConfFile, self.local.layoutConfFile, self.local.macroConfFile, self.local.illustrationConfFile, ] # Add component dependency files for cid in cidList : cidUsfm = self.project.groups[gid].getCidPath(cid) cidIlls = self.proj_illustration.getCidPiclistFile(cid) for f in [cidUsfm, cidIlls] : if os.path.exists(f) : dep.append(f) # Treat adjustment file separate if self.cType == 'usfm' : cidAdj = self.project.groups[gid].getCidAdjPath(cid) if os.path.exists(cidAdj) : dep.append(cidAdj) # Call the renderer # Create the environment that XeTeX will use. This will be temporarily set # by subprocess.call() just before XeTeX is run. texInputsLine = self.project.local.projHome + ':' \ + self.local.projStyleFolder + ':' \ + self.local.projTexFolder + ':' \ + self.local.projMacPackFolder + ':' \ + self.local.projMacroFolder + ':' \ + self.local.projGidFolder + ':.' # Create the environment dictionary that will be fed into subprocess.call() #envDict = dict(os.environ) envDict={} # These are project environment vars envDict['TEXINPUTS'] = texInputsLine # These are XeTeX environment vars that are run if the internal (fast) version # of XeTeX is being run, which is the default. If runExternalXetex is set to # False, the following special environment vars will be run. If set to true, # an external version of XeTeX, provided it is installed, will run with its own # environment vars set elsewhere runExternal = self.tools.str2bool(self.projectConfig['Managers'][self.cType + '_Xetex'].get('runExternalXetex', '')) if not runExternal : envDict['PATH'] = os.path.join(self.local.rapumaXetexFolder, 'bin', 'x86_64-linux') envDict['TEXMFCNF'] = os.path.join(self.local.rapumaXetexFolder, 'texmf-local', 'web2c') envDict['TEXFORMATS'] = os.path.join(self.local.rapumaXetexFolder, 'texmf-local', 'web2c', 'xetex') # To help with debugging the following hook has been added. This is not # something the user would ever use. It is only for developer diagnostics. # for infomation on what integers can be used refer to this URL: # http://www.dcs.ed.ac.uk/home/latex/Informatics/Obsolete/html/kpathsea/kpathsea.html debugXetex = self.projectConfig['Managers'][self.cType + '_Xetex'].get('debugKpse', None) if debugXetex : try : if int(debugXetex) > 0 : envDict['KPATHSEA_DEBUG'] = debugXetex self.log.writeToLog(self.errorCodes['1000'], [str(debugXetex), str(envDict)]) except : self.log.writeToLog(self.errorCodes['1090'], [debugXetex]) else : envDict.update(os.environ) # Create the XeTeX command argument list that subprocess.call() will run with # the environment vars we set above cmds = ['xetex', '-output-directory=' + self.local.projGidFolder, self.local.gidTexFile] # For debugging purposes, output the following DBG message if self.projectConfig['Managers'][self.cType + '_Xetex'].has_key('freezeTexSettings') and \ self.tools.str2bool(self.projectConfig['Managers'][self.cType + '_Xetex']['freezeTexSettings']) : self.log.writeToLog(self.errorCodes['0620'], [os.getcwd(), str(envDict), " ".join(cmds)]) # Run the XeTeX and collect the return code for analysis try : rCode = subprocess.call(cmds, env = envDict) # Analyse the return code if rCode == int(0) : self.log.writeToLog(self.errorCodes['0625'], [self.local.gidTexFileName]) elif rCode in self.xetexErrorCodes : self.log.writeToLog(self.errorCodes['0630'], [self.local.gidTexFileName, self.xetexErrorCodes[rCode], str(rCode)]) else : self.log.writeToLog(self.errorCodes['0635'], [str(rCode)]) except Exception as e : # If subprocess fails it might be because XeTeX did not execute # we will try to report back something useful self.log.writeToLog(self.errorCodes['0615'], [str(e)]) # Collect the page count and record in group (Write out at the end of the opp.) self.projectConfig['Groups'][gid]['totalPages'] = str(PdfFileReader(open(self.local.gidPdfFile)).getNumPages()) # Write out any changes made to the project.conf file that happened during this opp. self.tools.writeConfFile(self.projectConfig) # Pull out pages if requested (use the same file for output) if pgRange : self.tools.pdftkPullPages(self.local.gidPdfFile, self.local.gidPdfFile, pgRange) # The gidPdfFile is the residue of the last render and if approved, can be # used for the binding process. In regard to saving and file naming, the # gidPdfFile will be copied but never renamed. It must remain intact. # If the user wants to save this file or use a custom name, do that now if save and not override : saveFileName = self.pid + '_' + gid if cidListSubFileName : saveFileName = saveFileName + '_' + cidListSubFileName if pgRange : saveFileName = saveFileName + '_pg(' + pgRange + ')' # Add date stamp saveFileName = saveFileName + '_' + self.tools.ymd() # Add render file extention saveFileName = saveFileName + '.pdf' # Save this to the Deliverable folder (Make sure there is one) if not os.path.isdir(self.local.projDeliverableFolder) : os.makedirs(self.local.projDeliverableFolder) # Final file name and path saveFile = os.path.join(self.local.projDeliverableFolder, saveFileName) # Copy, no news is good news if shutil.copy(self.local.gidPdfFile, saveFile) : self.log.writeToLog(self.errorCodes['0730'], [saveFileName]) else : self.log.writeToLog(self.errorCodes['0720'], [saveFileName]) # If given, the override file name becomes the file name if override : saveFile = override # With shutil.copy(), no news is good news if shutil.copy(self.local.gidPdfFile, saveFile) : self.log.writeToLog(self.errorCodes['0730'], [saveFileName]) else : self.log.writeToLog(self.errorCodes['0720'], [saveFileName]) # Once we know the file is successfully generated, add a background if defined viewFile = '' if self.useBackground : if saveFile : viewFile = self.pg_back.addBackground(saveFile) else : viewFile = self.pg_back.addBackground(self.local.gidPdfFile) # Add a timestamp and doc info if requested in addition to background if self.useDocInfo : if saveFile : if os.path.isfile(viewFile) : viewFile = self.pg_back.addDocInfo(viewFile) else : viewFile = self.pg_back.addDocInfo(saveFile) else : if os.path.isfile(viewFile) : viewFile = self.pg_back.addDocInfo(viewFile) else : viewFile = self.pg_back.addDocInfo(self.local.gidPdfFile) # Add a diagnostic layer to the rendered output. Normally this is # not used with a normal background layer if self.useDiagnostic : if saveFile : viewFile = self.fmt_diagnose.addTransparency(saveFile) else : viewFile = self.fmt_diagnose.addTransparency(self.local.gidPdfFile) # To avoid confusion with file names, if this is a saved file, # and it has a background, we need to remove the original, non- # background file (remembering originals are kept in the group # Component folder), then rename the -view version to whatever # the saved name should be if save or override : if os.path.isfile(saveFile) and os.path.isfile(viewFile) : # First remove os.remove(saveFile) # Next rename os.rename(viewFile, saveFile) ##### Viewing ##### # First get the right file name to view if saveFile : # If there was a saveFile, that will be the viewFile viewFile = saveFile else : # The view file in this case is just temporary if not os.path.isfile(viewFile) : viewFile = self.local.gidPdfFile.replace(gid + '.pdf', gid + '-view.pdf') shutil.copy(self.local.gidPdfFile, viewFile) # Now view it if os.path.isfile(viewFile) : if self.pdfViewerCmd : # Add the file to the viewer command self.pdfViewerCmd.append(viewFile) # Run the XeTeX and collect the return code for analysis try : subprocess.Popen(self.pdfViewerCmd) return True except Exception as e : # If we don't succeed, we should probably quite here self.log.writeToLog(self.errorCodes['1005'], [str(e)]) else : self.log.writeToLog(self.errorCodes['0710']) else : self.log.writeToLog(self.errorCodes['0700'], [self.tools.fName(viewFile)]) # If we made it this far, return True return True
class Text (Manager) : # Shared values xmlConfFile = 'text.xml' def __init__(self, project, cfg, cType) : '''Do the primary initialization for this manager.''' super(Text, self).__init__(project, cfg) # Set values for this manager self.gid = project.gid self.pid = project.projectIDCode self.tools = Tools() self.project = project self.projectConfig = project.projectConfig self.cfg = cfg self.cType = cType self.Ctype = cType.capitalize() self.log = project.log self.manager = self.cType + '_Text' self.managers = project.managers self.rapumaXmlTextConfig = os.path.join(self.project.local.rapumaConfigFolder, self.xmlConfFile) # import pdb; pdb.set_trace() # Get persistant values from the config if there are any newSectionSettings = self.tools.getPersistantSettings(self.project.projectConfig['Managers'][self.manager], self.rapumaXmlTextConfig) if newSectionSettings != self.project.projectConfig['Managers'][self.manager] : self.project.projectConfig['Managers'][self.manager] = newSectionSettings self.tools.writeConfFile(self.project.projectConfig) self.compSettings = self.project.projectConfig['Managers'][self.manager] for k, v in self.compSettings.iteritems() : setattr(self, k, v) # Log messages for this module self.errorCodes = { 'TEXT-000' : ['MSG', 'Text module messages'], 'TEXT-005' : ['ERR', 'Component type [<<1>>] is not supported by the text manager.'], 'TEXT-015' : ['MSG', 'TEXT-015 - Unassigned error message ID.'], 'TEXT-030' : ['LOG', 'Copied [<<1>>] to [<<2>>] in project.'], 'TEXT-040' : ['WRN', 'The [<<1>>] component is locked. It must be unlocked before any modifications can be made.'], 'TEXT-050' : ['LOG', 'Working text file for [<<1>>] has been completed.'], 'TEXT-055' : ['ERR', 'TEXT-055 - Unassigned error message ID.'], 'TEXT-080' : ['LOG', 'Validating text using the [<<1>>] style file.'], 'TEXT-150' : ['MSG', 'USFM file: [<<1>>] is valid.'], 'TEXT-160' : ['ERR', 'Unable to complete working text installation for [<<1>>]. May require \"force\" (-f).'], '0000' : ['MSG', 'Placeholder message'], } ############################################################################### ############################ Project Level Functions ########################## ############################################################################### # def setSourceEditor (self, editor) : # '''Set the source editor for the cType. It assumes the editor is valid. # This cannot fail.''' # se = '' # if self.project.projectConfig['CompTypes'][self.Ctype].has_key('sourceEditor') : # se = self.project.projectConfig['CompTypes'][self.Ctype]['sourceEditor'] # if se != editor : # self.project.projectConfig['CompTypes'][self.Ctype]['sourceEditor'] = editor # self.tools.writeConfFile(self.project.projectConfig) # FIXME: Get rid of the PT dependencies #def updateManagerSettings (self, gid) : #'''Update the settings for this manager if needed.''' ## import pdb; pdb.set_trace() #sourceEditor = self.pt_tools.getSourceEditor() ## If the source editor is PT, then a lot of information can be ## gleaned from the .ssf file. Otherwise we will go pretty much with ## the defaults and hope for the best. #if sourceEditor.lower() == 'paratext' : ## Do a compare on the settings #ptSet = self.pt_tools.getPTSettings() #oldCompSet = self.compSettings.dict() ## Don't overwrite manager settings (default sets reset to False) if ## there already is a setting present on the nameFormID. #if self.project.projectConfig['Managers'][self.cType + '_Text']['nameFormID'] : #newCompSet = self.pt_tools.mapPTTextSettings(self.compSettings.dict(), ptSet) #else : #newCompSet = self.pt_tools.mapPTTextSettings(self.compSettings.dict(), ptSet, True) #if not newCompSet == oldCompSet : #self.compSettings.merge(newCompSet) #self.tools.writeConfFile(self.project.projectConfig) ## Be sure to update the current session settings #for k, v in self.compSettings.iteritems() : #setattr(self, k, v) ## A generic editor means we really do not know where the text came ## from. In that case, we just do the best we can. #elif sourceEditor.lower() == 'generic' : #if not self.project.projectConfig['Managers'][self.cType + '_Text']['nameFormID'] or \ #not self.project.projectConfig['Managers'][self.cType + '_Text']['postPart'] : #self.project.projectConfig['Managers'][self.cType + '_Text']['nameFormID'] = 'USFM' #self.project.projectConfig['Managers'][self.cType + '_Text']['postPart'] = 'usfm' #self.tools.writeConfFile(self.project.projectConfig) #else : #self.project.log.writeToLog('TEXT-010', [sourceEditor]) #self.tools.dieNow() #return True def testCompTextFile (self, cName, source, projSty = None) : '''This will direct a request to the proper validator for testing the source of a component text file.''' if self.cType == 'usfm' : # If this fails it will die at the validation process if self.project.components[cName].usfmTextFileIsValid(source, projSty) : self.project.log.writeToLog('TEXT-150', [source]) return True else : self.project.log.writeToLog('TEXT-005', [self.cType]) self.tools.dieNow()
class ProjLog (object) : def __init__(self, pid) : '''Do the primary initialization for this manager.''' self.tools = Tools() self.pid = pid self.rapumaHome = os.environ.get('RAPUMA_BASE') self.userHome = os.environ.get('RAPUMA_USER') self.user = UserConfig() self.userConfig = self.user.userConfig self.local = ProjLocal(pid) ############################################################################### ############################### Logging Functions ############################# ############################################################################### # These have to do with keeping a running project log file. Everything done is # recorded in the log file and that file is trimmed to a length that is # specified in the system settings. Everything is channeled to the log file but # depending on what has happened, they are classed in three levels: # 1) [MSG] - Common event going to log and terminal # 2) [WRN] - Warning event going to log and terminal if debugging is turned on # 3) [ERR] - Error event going to the log and terminal and kills the process # 4) [LOG] - Messages that go only to the log file to help with debugging # FIXME: Following not implemented yet # 5) [TOD] - To do list. Output to a file that helps guide the user. def writeToLog (self, errCode, args=None, location=None) : '''Send an event to one of the log files or the terminal if specified. Everything gets written to a log. Where a message gets written to depends on what type code it is. The type code is in with the error code data. There are five type codes: MSG = General messages go to both the terminal and log file LOG = Messages that go only to the log file WRN = Warnings that go to the terminal and log file ERR = Errors that go to both the terminal and log file TOD = Messages that will go to a special todo file to guide the user The errCode points to a specific message that will be sent to a log file. The args parameter can contain extra information like file names to help the user better figure out what happened.''' # Get the message from the errorCode list the module sent if type(errCode) == list : if location : msg = errCode[1] + ' : (' + location + ')' else : msg = errCode[1] code = errCode[0] else : self.tools.terminal('\nThe code: [' + errCode + '] is not recognized by the Rapuma system.') return # If args were given, do s/r on them and add # args info that needs to be added to msg. # Look for a <<#>> pattern replace it with # the corresponding position in the args list. if args : for count, arg in enumerate(args) : msg = msg.replace('<<' + str(count+1) + '>>', arg) # Write out everything but LOG messages to the terminal if code != 'LOG' and code != 'TOD' : self.tools.terminal('\n' + code + ' - ' + msg) # Test to see if this is a live project by seeing if the project conf is # there. If it is, we can write out log files. Otherwise, why bother? if self.local.projectConfFile and os.path.exists(self.local.projectConfFile) : # Build the event line eventLine = '\"' + self.tools.tStamp() + '\", \"' + code + '\", \"' + msg + '\"' # Do we need a log file made? try : if not os.path.isfile(self.local.projLogFile) or os.path.getsize(self.local.projLogFile) == 0 : writeObject = codecs.open(self.local.projLogFile, "w", encoding='utf_8') writeObject.write('Rapuma event log file created: ' + self.tools.tStamp() + '\n') writeObject.close() # Now log the event to the top of the log file using preAppend(). self.preAppend(eventLine, self.local.projLogFile) # FIXME: Add the TOD list output here, also, output any TODs # to the error log as well as these are bad errors. # Write errors and warnings to the error log file if code == 'WRN' and self.userConfig['System']['debugging'] == 'True': self.writeToErrorLog(self.local.projErrorLogFile, eventLine) if code == 'ERR' : self.writeToErrorLog(self.local.projErrorLogFile, eventLine) except Exception as e : # If we don't succeed, we should probably quite here self.tools.terminal("Failed to write message to log file: " + msg) self.tools.terminal('Internal error: [' + str(e) + ']') self.tools.dieNow() # Halt the process if this was an 'ERR' level type code if code == 'ERR' : self.tools.dieNow('Sorry, I have to stop.') else : return def writeToErrorLog (self, errorLog, eventLine) : '''In a perfect world there would be no errors, but alas there are and we need to put them in a special file that can be accessed after the process is run. The error file from the previous session is deleted at the beginning of each new run.''' try : # Because we want to read errors from top to bottom, we don't pre append # them to the error log file. if not os.path.isfile(errorLog) : writeObject = codecs.open(errorLog, "w", encoding='utf_8') else : writeObject = codecs.open(errorLog, "a", encoding='utf_8') # Write and close writeObject.write(eventLine + '\n') writeObject.close() except : self.tools.terminal('Error writing this event to error log: ' + eventLine) return def preAppend (self, line, file_name) : '''Got the following code out of a Python forum. This will pre-append a line to the beginning of a file.''' # import pdb; pdb.set_trace() fobj = fileinput.FileInput(file_name, inplace=1) first_line = fobj.readline() sys.stdout.write("%s\n%s" % (line, first_line)) for line in fobj: sys.stdout.write("%s" % line) fobj.close()
class ProjProcess (object) : def __init__(self, pid, gid = None, projectConfig = None) : '''Intitate the whole class and create the object.''' self.pid = pid self.tools = Tools() self.user = UserConfig() self.userConfig = self.user.userConfig if projectConfig : self.projectConfig = projectConfig else : self.proj_config = Config(self.pid) self.proj_config.getProjectConfig() self.projectConfig = self.proj_config.projectConfig self.local = ProjLocal(pid, gid, self.projectConfig) self.log = ProjLog(pid) # Log messages for this module self.errorCodes = { '0000' : ['MSG', 'Placeholder message'], 'XPRT-000' : ['MSG', 'Messages for export issues (probably only in project.py)'], 'XPRT-005' : ['MSG', 'Unassigned error message ID.'], 'XPRT-010' : ['ERR', 'Export file name could not be formed with available configuration information.'], 'XPRT-020' : ['ERR', 'Unable to export: [<<1>>].'], 'XPRT-030' : ['MSG', 'Files exported to [<<1>>].'], 'XPRT-040' : ['MSG', 'Beginning export, please wait...'], 'XPRT-050' : ['MSG', 'Unassigned error message ID.'], '1210' : ['MSG', 'Processes completed successfully on: [<<1>>] by [<<2>>]'], '1220' : ['ERR', 'Processes for [<<1>>] failed. Script [<<2>>] returned this error: [<<3>>]'], '1240' : ['MSG', 'Component group preprocessing [<<1>>] for group [<<2>>].'], '1260' : ['ERR', 'Installed the default component preprocessing script. Editing will be required for it to work with your project.'], '1265' : ['LOG', 'Component preprocessing script is already installed.'], } ############################################################################### ############################### Export Functions ############################## ############################################################################### ####################### Error Code Block Series = 0200 ######################## ############################################################################### # FIXME: This needs to be rewritten #def export (self, cType, cName, path = None, script = None, bundle = False, force = False) : #'''Facilitate the exporting of project text. It is assumed that the #text is clean and ready to go and if any extraneous publishing info #has been injected into the text, it will be removed by an appropreate #post-process that can be applied by this function. No validation #will be initiated by this function.''' ## FIXME - Todo: add post processing script feature ## Probably need to create the component object now #self.createComponent(cName) ## Figure out target path #if path : #path = self.tools.resolvePath(path) #else : #parentFolder = os.path.dirname(self.local.projHome) #path = os.path.join(parentFolder, 'Export') ## Make target folder if needed #if not os.path.isdir(path) : #os.makedirs(path) ## Start a list for one or more files we will process #fList = [] ## Will need the stylesheet for copy #projSty = self.projectConfig['Managers'][cType + '_Style']['mainStyleFile'] #projSty = os.path.join(self.local.projStyleFolder, projSty) ## Process as list of components #self.log.writeToLog('XPRT-040') #for cid in self.components[cName].getSubcomponentList(cName) : #cidCName = self.components[cName].getRapumaCName(cid) #ptName = PT_Tools(self).formPTName(cName, cid) ## Test, no name = no success #if not ptName : #self.log.writeToLog('XPRT-010') #self.tools.dieNow() #target = os.path.join(path, ptName) #source = os.path.join(self.local.projComponentFolder, cidCName, cid + '.' + cType) ## If shutil.copy() spits anything back its bad news #if shutil.copy(source, target) : #self.log.writeToLog('XPRT-020', [self.tools.fName(target)]) #else : #fList.append(target) ## Start the main process here #if bundle : #archFile = os.path.join(path, cName + '_' + self.tools.ymd() + '.zip') ## Hopefully, this is a one time operation but if force is not True, ## we will expand the file name so nothing is lost. #if not force : #if os.path.isfile(archFile) : #archFile = os.path.join(path, cName + '_' + self.tools.fullFileTimeStamp() + '.zip') #myzip = zipfile.ZipFile(archFile, 'w', zipfile.ZIP_DEFLATED) #for f in fList : ## Create a string object from the contents of the file #strObj = StringIO.StringIO() #for l in open(f, "rb") : #strObj.write(l) ## Write out string object to zip #myzip.writestr(self.tools.fName(f), strObj.getvalue()) #strObj.close() ## Close out the zip and report #myzip.close() ## Clean out the folder #for f in fList : #os.remove(f) #self.log.writeToLog('XPRT-030', [self.tools.fName(archFile)]) #else : #self.log.writeToLog('XPRT-030', [path]) #return True ############################################################################### ########################## Text Processing Functions ########################## ############################################################################### ######################## Error Code Block Series = 1200 ####################### ############################################################################### def turnOnOffPreprocess (self, gid, onOff) : '''Turn on or off preprocessing on incoming component text.''' self.projectConfig['Groups'][gid]['usePreprocessScript'] = onOff self.tools.writeConfFile(self.projectConfig) self.log.writeToLog(self.errorCodes['1240'], [str(onOff), gid]) def checkForPreprocessScript (self, gid) : '''Check to see if a preprocess script is installed. If not, install the default script and give a warning that the script is not complete.''' # First make sure the Scripts folder is there if not os.path.isdir(self.local.projScriptFolder) : os.makedirs(self.local.projScriptFolder) # Check and copy if needed if not os.path.isfile(self.local.groupPreprocessFile) : shutil.copy(self.local.rpmPreprocessFile, self.local.groupPreprocessFile) self.tools.makeExecutable(self.local.groupPreprocessFile) self.log.writeToLog(self.errorCodes['1260']) else : self.log.writeToLog(self.errorCodes['1265']) def runProcessScript (self, target, scriptFile) : '''Run a text processing script on a component. This assumes the component and the script are valid and the component lock is turned off. If not, you cannot expect any good to come of this.''' # subprocess will fail if permissions are not set on the # script we want to run. The correct permission should have # been set when we did the installation. err = subprocess.call([scriptFile, target]) if err == 0 : self.log.writeToLog(self.errorCodes['1210'], [self.tools.fName(target), self.tools.fName(scriptFile)]) else : self.log.writeToLog(self.errorCodes['1220'], [self.tools.fName(target), self.tools.fName(scriptFile), str(err)]) return False return True def scriptInstall (self, source, target) : '''Install a script. A script can be a collection of items in a zip file or a single .py script file.''' scriptTargetFolder, fileName = os.path.split(target) if self.tools.isExecutable(source) : shutil.copy(source, target) self.tools.makeExecutable(target) elif self.tools.fName(source).split('.')[1].lower() == 'zip' : myZip = zipfile.ZipFile(source, 'r') for f in myZip.namelist() : data = myZip.read(f, source) # Pretty sure zip represents directory separator char as "/" regardless of OS myPath = os.path.join(scriptTargetFolder, f.split("/")[-1]) try : myFile = open(myPath, "wb") myFile.write(data) myFile.close() except : pass myZip.close() return True else : self.tools.dieNow('Script is an unrecognized type: ' + self.tools.fName(source) + ' Cannot continue with installation.') def installPostProcess (self, cType, script, force = None) : '''Install a post process script into the main components processing folder for a specified component type. This script will be run on every file of that type that is imported into the project. Some projects will have their own specially developed post process script. Use the "script" var to specify a process (which should be bundled in a system compatable way). If "script" is not specified we will copy in a default script that the user can modify. This is currently limited to Python scripts only which do in-place processes on the target files. The script needs to have the same name as the zip file it is bundled in, except the extention is .py instead of the bundle .zip extention.''' # Define some internal vars Ctype = cType.capitalize() oldScript = '' scriptName = os.path.split(script)[1] scriptSourceFolder = os.path.split(script)[0] scriptTarget = os.path.join(self.local.projScriptFolder, self.tools.fName(script).split('.')[0] + '.py') if scriptName in self.projectConfig['CompTypes'][Ctype]['postprocessScripts'] : oldScript = scriptName # First check for prexsisting script record if not force : if oldScript : self.log.writeToLog('POST-080', [oldScript]) return False # In case this is a new project we may need to install a component # type and make a process (components) folder if not self.components[cType] : self.tools.addComponentType(self.projectConfig, self.local, cType) # Make the target folder if needed if not os.path.isdir(self.local.projScriptFolder) : os.makedirs(self.local.projScriptFolder) # First check to see if there already is a script file, return if there is if os.path.isfile(scriptTarget) and not force : self.log.writeToLog('POST-082', [self.tools.fName(scriptTarget)]) return False # No script found, we can proceed if not os.path.isfile(scriptTarget) : self.scriptInstall(script, scriptTarget) if not os.path.isfile(scriptTarget) : self.tools.dieNow('Failed to install script!: ' + self.tools.fName(scriptTarget)) self.log.writeToLog('POST-110', [self.tools.fName(scriptTarget)]) elif force : self.scriptInstall(script, scriptTarget) if not os.path.isfile(scriptTarget) : self.tools.dieNow('Failed to install script!: ' + self.tools.fName(scriptTarget)) self.log.writeToLog('POST-115', [self.tools.fName(scriptTarget)]) # Record the script with the cType post process scripts list scriptList = self.projectConfig['CompTypes'][Ctype]['postprocessScripts'] if self.tools.fName(scriptTarget) not in scriptList : self.projectConfig['CompTypes'][Ctype]['postprocessScripts'] = self.tools.addToList(scriptList, self.tools.fName(scriptTarget)) self.tools.writeConfFile(self.projectConfig) return True def removePostProcess (self, cType) : '''Remove (actually disconnect) a preprocess script from a component type. This will not actually remove the script. That would need to be done manually. Rather, this will remove the script name entry from the component type so the process cannot be accessed for this specific component type.''' Ctype = cType.capitalize() # Get old setting old = self.projectConfig['CompTypes'][Ctype]['postprocessScripts'] # Reset the field to '' if old != '' : self.projectConfig['CompTypes'][Ctype]['postprocessScripts'] = '' self.tools.writeConfFile(self.projectConfig) self.log.writeToLog('POST-130', [old,Ctype]) else : self.log.writeToLog('POST-135', [cType.capitalize()]) return True
class Usfm (Group) : '''This class contains information about a type of component used in a type of project.''' # Shared values xmlConfFile = 'usfm.xml' def __init__(self, project, cfg) : super(Usfm, self).__init__(project, cfg) # import pdb; pdb.set_trace() # Set values for this manager self.pid = project.projectIDCode self.gid = project.gid self.cType = 'usfm' self.Ctype = self.cType.capitalize() self.project = project self.local = project.local self.tools = Tools() self.proj_font = ProjFont(self.pid) self.proj_illustration = ProjIllustration(self.pid, self.gid) self.proj_config = Config(self.pid, self.gid) self.proj_config.getProjectConfig() self.proj_config.getAdjustmentConfig() self.projectConfig = self.proj_config.projectConfig self.adjustmentConfig = self.proj_config.adjustmentConfig self.log = project.log self.cfg = cfg self.mType = project.projectMediaIDCode self.renderer = project.projectConfig['CompTypes'][self.Ctype]['renderer'] self.sourceEditor = project.projectConfig['CompTypes'][self.Ctype]['sourceEditor'] self.macPackId = project.projectConfig['CompTypes'][self.Ctype]['macroPackage'] # Get the comp settings self.compSettings = project.projectConfig['CompTypes'][self.Ctype] # Build a tuple of managers this component type needs to use self.usfmManagers = ('text', self.renderer) # Init the general managers for self.mType in self.usfmManagers : self.project.createManager(self.mType) # Create the internal ref names we use in this module self.text = self.project.managers[self.cType + '_Text'] # File names # Folder paths self.projScriptFolder = self.local.projScriptFolder self.projComponentFolder = self.local.projComponentFolder self.gidFolder = os.path.join(self.projComponentFolder, self.gid) # File names with folder paths self.rapumaXmlCompConfig = os.path.join(self.project.local.rapumaConfigFolder, self.xmlConfFile) # Get persistant values from the config if there are any newSectionSettings = self.tools.getPersistantSettings(self.projectConfig['CompTypes'][self.Ctype], self.rapumaXmlCompConfig) if newSectionSettings != self.projectConfig['CompTypes'][self.Ctype] : self.projectConfig['CompTypes'][self.Ctype] = newSectionSettings # Set them here for k, v in self.compSettings.iteritems() : setattr(self, k, v) # Module Error Codes self.errorCodes = { #'USFM-000' : ['MSG', 'Messages for the USFM module.'], #'USFM-005' : ['MSG', 'Unassigned error message ID.'], #'USFM-010' : ['ERR', 'Could not process character pair. This error was found: [<<1>>]. Process could not complete. - usfm.pt_tools.getNWFChars()'], #'USFM-020' : ['ERR', 'Improper character pair found: [<<1>>]. Process could not complete. - usfm.pt_tools.getNWFChars()'], #'USFM-025' : ['WRN', 'No non-word-forming characters were found in the PT settings file. - usfm.pt_tools.getNWFChars()'], #'USFM-040' : ['ERR', 'Hyphenation source file not found: [<<1>>]. Process halted!'], #'USFM-080' : ['LOG', 'Normalizing Unicode text to the [<<1>>] form.'], #'USFM-090' : ['ERR', 'USFM file: [<<1>>] did NOT pass the validation test. Because of an encoding conversion, the terminal output is from the file [<<2>>]. Please only edit [<<1>>].'], #'USFM-095' : ['WRN', 'Validation for USFM file: [<<1>>] was turned off.'], #'USFM-100' : ['MSG', 'Source file editor [<<1>>] is not recognized by this system. Please double check the name used for the source text editor setting.'], #'USFM-110' : ['ERR', 'Source file name could not be built because the Name Form ID for [<<1>>] is missing or incorrect. Double check to see which editor created the source text.'], #'USFM-120' : ['ERR', 'Source file: [<<1>>] not found! Cannot copy to project. Process halting now.'], #'USFM-130' : ['ERR', 'Failed to complete preprocessing on component [<<1>>]'], #'USFM-140' : ['MSG', 'Completed installation on [<<1>>] component working text.'], #'USFM-150' : ['ERR', 'Unable to copy [<<1>>] to [<<2>>] - error in text.'], '0010' : ['LOG', 'Created the [<<1>>] master adjustment file.'], '0220' : ['ERR', 'Cannot find: [<<1>>] working file, unable to complete preprocessing for rendering.'], '0230' : ['LOG', 'Created the [<<1>>] component adjustment file.'], '0240' : ['LOG', 'Could not find adjustments section for [<<1>>], created place holder setting.'], '0245' : ['LOG', 'Could not find adjustments for [<<1>>]. No ajustment file has been output.'], '0255' : ['LOG', 'Illustrations not being used. The piclist file has been removed from the [<<1>>] illustrations folder.'], '0260' : ['LOG', 'Piclist file for [<<1>>] has been created.'], '0265' : ['ERR', 'Failed to create piclist file for [<<1>>]!'], '0300' : ['ERR', 'One or more illustration files are missing from the project. Please import these files before continuing.'] } ############################################################################### ############################ Functions Begin Here ############################# ############################################################################### ######################## Error Code Block Series = 0200 ####################### ############################################################################### def makeFileName(self, cid) : '''From what we know, return the full file name.''' # FIXME: We default this to "base" but for a diglot implementation # this is not going to work because we need to have a second # file name. Cross that bridge... return cid + '_base' def makeFileNameWithExt(self, cid) : '''From what we know, return the full file name.''' return self.makeFileName(cid) + '.' + self.cType def getCidPath (self, cid) : '''Return the full path of the cName working text file. This assumes the cid is valid.''' return os.path.join(self.local.projComponentFolder, cid, self.makeFileNameWithExt(cid)) def getCidAdjPath (self, cid) : '''Return the full path of the cName working text adjustments file. This assumes the cName is valid. Note that all macro packages that have a manual adjustment feature must use this naming scheme. The name syntax comes from the "mother" macro package which is ptx2pdf.''' return os.path.join(self.local.projComponentFolder, cid, self.makeFileNameWithExt(cid) + '.adj') def render(self, gid, cidList, pages, override, save) : '''Does USFM specific rendering of a USFM component''' # import pdb; pdb.set_trace() # If the whole group is being rendered, we need to preprocess it cids = [] if not cidList : cids = self.projectConfig['Groups'][gid]['cidList'] else : cids = cidList # Preprocess all subcomponents (one or more) # Stop if it breaks at any point for cid in cids : if not self.preProcessGroup(gid, [cid]) : return False # With everything in place we can render the component. # Note: We pass the cidList straight through self.project.managers['usfm_' + self.renderer.capitalize()].run(gid, cidList, pages, override, save) return True def preProcessGroup (self, gid, cidList) : '''This will prepare a component group for rendering by checking for and/or creating any dependents it needs to render properly.''' # import pdb; pdb.set_trace() # Get some relevant settings # FIXME: Note page border has not really been implemented yet. # It is different from backgound management useIllustrations = self.tools.str2bool(self.projectConfig['Groups'][gid]['useIllustrations']) useManualAdjustments = self.tools.str2bool(self.projectConfig['Groups'][gid]['useManualAdjustments']) # See if the working text is present for each subcomponent in the # component and try to install it if it is not for cid in cidList : cType = self.cfg['cType'] cidUsfm = self.getCidPath(cid) # Test for source here and die if it isn't there if not os.path.isfile(cidUsfm) : self.log.writeToLog(self.errorCodes['0220'], [cidUsfm], 'usfm.preProcessGroup():0220') # Add/manage the dependent files for this cid # FIXME: Some changes may be needed here to guide creation of adjustment files # Component adjustment file cidAdjFile = self.getCidAdjPath(cid) if useManualAdjustments : self.createCompAdjustmentFile(cid) else : # If no adjustments, remove any exsiting file if os.path.isfile(cidAdjFile) : os.remove(cidAdjFile) # Component piclist file cidPiclistFile = self.proj_illustration.getCidPiclistFile(cid) if useIllustrations : if self.proj_illustration.hasIllustrations(cid) : # Check for missing illustrations (die here if not found) if self.proj_illustration.missingIllustrations(cid) : self.log.writeToLog(self.errorCodes['0300']) # Create piclist file if not there or if the config has changed if not os.path.isfile(cidPiclistFile) or self.tools.isOlder(cidPiclistFile, self.local.illustrationConfFile) : # Now make a fresh version of the piclist file if self.proj_illustration.createPiclistFile(cid) : self.log.writeToLog(self.errorCodes['0260'], [cid]) else : self.log.writeToLog(self.errorCodes['0265'], [cid]) else : for f in [self.local.layoutConfFile, self.local.illustrationConfFile] : if self.tools.isOlder(cidPiclistFile, f) or not os.path.isfile(cidPiclistFile) : # Remake the piclist file if self.proj_illustration.createPiclistFile(cid) : self.log.writeToLog(self.errorCodes['0260'], [cid]) else : self.log.writeToLog(self.errorCodes['0265'], [cid]) else : # Does not seem to be any illustrations for this cid # clean out any piclist file that might be there if os.path.isfile(cidPiclistFile) : os.remove(cidPiclistFile) else : # If we are not using illustrations then any existing piclist file will be removed if os.path.isfile(cidPiclistFile) : os.remove(cidPiclistFile) self.log.writeToLog(self.errorCodes['0255'], [cid]) # Any more stuff to run? return True # FIXME: Moved this to xetex.py as that was the only place it was called from #def checkStartPageNumber (self) : #'''Adjust page number for the current group. The current logic is #if there is no number in the startPageNumber setting, we can put #one in there as a suggestion. If there is already one there, the #user will be responsible for seeing that it is correct.''' ## import pdb; pdb.set_trace() #try : ## Simply try to return anything that is in the field #cStrPgNo = self.projectConfig['Groups'][self.gid]['startPageNumber'] #if cStrPgNo != '' : #return cStrPgNo #except : ## If nothing is there, we'll make a suggestion #pGrp = str(self.projectConfig['Groups'][self.gid]['precedingGroup']) #if pGrp == 'None' : #self.projectConfig['Groups'][self.gid]['startPageNumber'] = 1 #self.tools.writeConfFile(self.projectConfig) #return '1' #else : ## Calculate the suggested number based on the preceeding group #try : #cStrPgNo = str(self.projectConfig['Groups'][self.gid]['startPageNumber']) #except : #cStrPgNo = 1 #self.projectConfig['Groups'][self.gid]['startPageNumber'] = 1 #try : #pGrpPgs = int(self.projectConfig['Groups'][pGrp]['totalPages']) #pGrpStrPgNo = int(self.projectConfig['Groups'][pGrp]['startPageNumber']) #except : ## FIXME: Maybe this could go out and find out exactly how many pages were in the preceeding group #pGrpPgs = 1 #pGrpStrPgNo = 1 #self.projectConfig['Groups'][pGrp]['totalPages'] = 1 #self.projectConfig['Groups'][pGrp]['startPageNumber'] = 1 ## Whether this is right or wrong set it the way it is #self.projectConfig['Groups'][self.gid]['startPageNumber'] = (pGrpStrPgNo + pGrpPgs) #self.tools.writeConfFile(self.projectConfig) #return self.projectConfig['Groups'][pGrp]['startPageNumber'] def createCompAdjustmentFile (self, cid) : '''Create an adjustment file for this cid. If entries exsist in the adjustment.conf file.''' description = 'Auto-generated text adjustments file for: ' + cid + '\n' # import pdb; pdb.set_trace() # Check for a master adj conf file if os.path.exists(self.local.adjustmentConfFile) : adjFile = self.getCidAdjPath(cid) # Clean up old file if there is one so we can start fresh if os.path.exists(adjFile) : os.remove(adjFile) # Nothing to do if no gid section is found if not self.adjustmentConfig.has_key(self.gid) : self.tools.buildConfSection(self.adjustmentConfig, self.gid) if not self.adjustmentConfig[self.gid].has_key(cid) : self.tools.buildConfSection(self.adjustmentConfig[self.gid], cid) self.adjustmentConfig[self.gid][cid]['%1.1'] = '1' self.tools.writeConfFile(self.adjustmentConfig) self.log.writeToLog(self.errorCodes['0240'], [cid]) return False # Sort through commented adjustment lines () if self.adjustmentConfig[self.gid].has_key(cid) : c = False for k in self.adjustmentConfig[self.gid][cid].keys() : if not re.search(r'%|#', k) : c = True if not c : self.log.writeToLog(self.errorCodes['0245'], [cid]) return False # If we make it this far, create the new adjustment file with codecs.open(adjFile, "w", encoding='utf_8') as writeObject : writeObject.write(self.tools.makeFileHeader(adjFile, description, True)) # Output like this: JAS 1.13 +1 for k, v in self.adjustmentConfig[self.gid][cid].iteritems() : if re.search(r'%|#', k) : continue adj = v if int(v) > 0 : adj = '+' + str(v) writeObject.write(cid.upper() + ' ' + k + ' ' + adj + '\n') self.log.writeToLog(self.errorCodes['0230'], [self.tools.fName(adjFile)]) return True def createProjAdjustmentConfFile (self) : '''Create a project master component adjustment file that group component ajustment files will be created automatically from. This will run every time preprocess is run but after the first time it will only add a sections for new groups or components.''' if not os.path.exists(self.adjustmentConfFile) : self.adjustmentConfig = ConfigObj(self.adjustmentConfFile, encoding='utf-8') self.adjustmentConfig.filename = self.adjustmentConfFile self.updateCompAdjustmentConf() return True def updateCompAdjustmentConf (self) : '''Update an adjustmentConfig based on changes in the projectConfig.''' for gid in self.projectConfig['Groups'].keys() : if gid not in self.adjustmentConfig.keys() : self.tools.buildConfSection(self.adjustmentConfig, gid) for comp in self.projectConfig['Groups'][gid]['cidList'] : if not self.adjustmentConfig[gid].has_key(comp) : self.tools.buildConfSection(self.adjustmentConfig[gid], comp) self.adjustmentConfig[gid][comp]['%1.1'] = '1' self.tools.writeConfFile(self.adjustmentConfig) return True ############################################################################### ######################## USFM Component Text Functions ######################## ############################################################################### ######################## Error Code Block Series = 0400 ####################### ############################################################################### def getComponentType (self, gid) : '''Return the cType for a component.''' # import pdb; pdb.set_trace() try : cType = self.projectConfig['Groups'][gid]['cType'] except Exception as e : # If we don't succeed, we should probably quite here self.log.writeToLog('COMP-200', ['Key not found ' + str(e)]) self.tools.dieNow() return cType def isCompleteComponent (self, gid, cid) : '''A two-part test to see if a component has a config entry and a file.''' if self.hasCidFile(gid, cid) : return True def hasUsfmCidInfo (self, cid) : '''Return True if this cid is in the PT USFM cid info dictionary.''' if cid in self.usfmCidInfo().keys() : return True def hasCidFile (self, gid, cid) : '''Return True or False depending on if a working file exists for a given cName.''' cType = self.projectConfig['Groups'][gid]['cType'] return os.path.isfile(os.path.join(self.local.projComponentFolder, cid, cid + '.' + cType)) def usfmCidInfo (self) : '''Return a dictionary of all valid information about USFMs used in PT. Note that a couple special non-standard IDs have been added at the top of the list.''' # ID Comp Name Comp ID PT ID Chps return { '_z_' : ['USFM InternalCaller', 'usfm_internal_caller', '00', 0], 'gen' : ['Genesis', 'genesis', '01', 50], 'exo' : ['Exodus', 'exodus', '02', 40], 'lev' : ['Leviticus', 'leviticus', '03', 27], 'num' : ['Numbers', 'numbers', '04', 36], 'deu' : ['Deuteronomy', 'deuteronomy', '05', 34], 'jos' : ['Joshua', 'joshua', '06', 24], 'jdg' : ['Judges', 'judges', '07', 21], 'rut' : ['Ruth', 'ruth', '08', 4], '1sa' : ['1 Samuel', '1_samuel', '09', 31], '2sa' : ['2 Samuel', '2_samuel', '10', 24], '1ki' : ['1 Kings', '1_kings', '11', 22], '2ki' : ['2 Kings', '2_kings', '12', 25], '1ch' : ['1 Chronicles', '1_chronicles', '13', 29], '2ch' : ['2 Chronicles', '2_chronicles', '14', 36], 'ezr' : ['Ezra', 'ezra', '15', 10], 'neh' : ['Nehemiah', 'nehemiah', '16', 13], 'est' : ['Esther', 'esther', '17', 10], 'job' : ['Job', 'job', '18', 42], 'psa' : ['Psalms', 'psalms', '19', 150], 'pro' : ['Proverbs', 'proverbs', '20', 31], 'ecc' : ['Ecclesiastes', 'ecclesiastes', '21', 12], 'sng' : ['Song of Songs', 'song_of_songs', '22', 8], 'isa' : ['Isaiah', 'isaiah', '23', 66], 'jer' : ['Jeremiah', 'jeremiah', '24', 52], 'lam' : ['Lamentations', 'lamentations', '25', 5], 'ezk' : ['Ezekiel', 'ezekiel', '26', 48], 'dan' : ['Daniel', 'daniel', '27', 12], 'hos' : ['Hosea', 'hosea', '28', 14], 'jol' : ['Joel', 'joel', '29', 3], 'amo' : ['Amos', 'amos', '30', 9], 'oba' : ['Obadiah', 'obadiah', '31', 1], 'jon' : ['Jonah', 'jonah', '32', 4], 'mic' : ['Micah', 'micah', '33', 7], 'nam' : ['Nahum', 'nahum', '34', 3], 'hab' : ['Habakkuk', 'habakkuk', '35', 3], 'zep' : ['Zephaniah', 'zephaniah', '36', 3], 'hag' : ['Haggai', 'haggai', '37', 2], 'zec' : ['Zechariah', 'zechariah', '38', 14], 'mal' : ['Malachi', 'malachi', '39', 4], 'mat' : ['Matthew', 'matthew', '41', 28], 'mrk' : ['Mark', 'mark', '42', 16], 'luk' : ['Luke', 'luke', '43', 24], 'jhn' : ['John', 'john', '44', 21], 'act' : ['Acts', 'acts', '45', 28], 'rom' : ['Romans', 'romans', '46', 16], '1co' : ['1 Corinthians', '1_corinthians', '47', 16], '2co' : ['2 Corinthians', '2_corinthians', '48', 13], 'gal' : ['Galatians', 'galatians', '49', 6], 'eph' : ['Ephesians', 'ephesians', '50', 6], 'php' : ['Philippians', 'philippians', '51', 4], 'col' : ['Colossians', 'colossians', '52', 4], '1th' : ['1 Thessalonians', '1_thessalonians', '53', 5], '2th' : ['2 Thessalonians', '2_thessalonians', '54', 3], '1ti' : ['1 Timothy', '1_timothy', '55', 6], '2ti' : ['2 Timothy', '2_timothy', '56', 4], 'tit' : ['Titus', 'titus', '57', 3], 'phm' : ['Philemon', 'philemon', '58', 1], 'heb' : ['Hebrews', 'hebrews', '59', 13], 'jas' : ['James', 'james', '60', 5], '1pe' : ['1 Peter', '1_peter', '61', 5], '2pe' : ['2 Peter', '2_peter', '62', 3], '1jn' : ['1 John', '1_john', '63', 5], '2jn' : ['2 John', '2_john', '64', 1], '3jn' : ['3 John', '3_john', '65', 1], 'jud' : ['Jude', 'jude', '66', 1], 'rev' : ['Revelation', 'revelation', '67', 22], 'tob' : ['Tobit', 'tobit', '68', '?'], 'jdt' : ['Judith', 'judith', '69', '?'], 'esg' : ['Esther', 'esther', '70', '?'], 'wis' : ['Wisdom of Solomon', 'wisdom_of_solomon', '71', '?'], 'sir' : ['Sirach', 'sirach', '72', '?'], 'bar' : ['Baruch', 'baruch', '73', '?'], 'lje' : ['Letter of Jeremiah', 'letter_of_jeremiah', '74', '?'], 's3y' : ['Song of the Three Children', 'song_3_children', '75', '?'], 'sus' : ['Susanna', 'susanna', '76', '?'], 'bel' : ['Bel and the Dragon', 'bel_dragon', '77', '?'], '1ma' : ['1 Maccabees', '1_maccabees', '78', '?'], '2ma' : ['2 Maccabees', '2_maccabees', '79', '?'], '3ma' : ['3 Maccabees', '3_maccabees', '80', '?'], '4ma' : ['4 Maccabees', '4_maccabees', '81', '?'], '1es' : ['1 Esdras', '1_esdras', '82', '?'], '2es' : ['2 Esdras', '2_esdras', '83', '?'], 'man' : ['Prayer of Manasses', 'prayer_of_manasses', '84', '?'], 'ps2' : ['Psalms 151', 'psalms_151', '85', '?'], 'oda' : ['Odae', 'odae', '86', '?'], 'pss' : ['Psalms of Solomon', 'psalms_of_solomon', '87', '?'], 'jsa' : ['Joshua A', 'joshua_a', '88', '?'], 'jdb' : ['Joshua B', 'joshua_b', '89', '?'], 'tbs' : ['Tobit S', 'tobit_s', '90', '?'], 'sst' : ['Susannah (Theodotion)', 'susannah_t', '91', '?'], 'dnt' : ['Daniel (Theodotion)', 'daniel_t', '92', '?'], 'blt' : ['Bel and the Dragon (Theodotion)', 'bel_dragon_t', '93', '?'], 'frt' : ['Front Matter', 'front_matter', 'A0', 0], 'int' : ['Introductions', 'introductions', 'A7', 0], 'bak' : ['Back Matter', 'back_matter', 'A1', 0], 'cnc' : ['Concordance', 'concordance', 'A8', 0], 'glo' : ['Glossary', 'glossary', 'A9', 0], 'tdx' : ['Topical Index', 'topical_index', 'B0', 0], 'ndx' : ['Names Index', 'names_index', 'B1', 0], 'xxa' : ['Extra A', 'extra_a', '94', 0], 'xxb' : ['Extra B', 'extra_b', '95', 0], 'xxc' : ['Extra C', 'extra_c', '96', 0], 'xxd' : ['Extra D', 'extra_d', '97', 0], 'xxe' : ['Extra E', 'extra_e', '98', 0], 'xxf' : ['Extra F', 'extra_f', '99', 0], 'xxg' : ['Extra G', 'extra_g', '100', 0], 'oth' : ['Other', 'other', 'A2', 0], 'eza' : ['Apocalypse of Ezra', 'apocalypse_of_ezra', 'A4', '?'], '5ez' : ['5 Ezra', '5_ezra_lp', 'A5', '?'], '6ez' : ['6 Ezra (Latin Epilogue)', '6_ezra_lp', 'A6', '?'], 'dag' : ['Daniel Greek', 'daniel_greek', 'B2', '?'], 'ps3' : ['Psalms 152-155', 'psalms_152-155', 'B3', '?'], '2ba' : ['2 Baruch (Apocalypse)', '2_baruch_apocalypse', 'B4', '?'], 'lba' : ['Letter of Baruch', 'letter_of_baruch', 'B5', '?'], 'jub' : ['Jubilees', 'jubilees', 'B6', '?'], 'eno' : ['Enoch', 'enoch', 'B7', '?'], '1mq' : ['1 Meqabyan', '1_meqabyan', 'B8', '?'], '2mq' : ['2 Meqabyan', '2_meqabyan', 'B9', '?'], '3mq' : ['3 Meqabyan', '3_meqabyan', 'C0', '?'], 'rep' : ['Reproof (Proverbs 25-31)', 'reproof_proverbs_25-31', 'C1', '?'], '4ba' : ['4 Baruch (Rest of Baruch)', '4_baruch', 'C2', '?'], 'lao' : ['Laodiceans', 'laodiceans', 'C3', '?'] }
class ProjEdit (object) : def __init__(self, pid) : '''Intitate the whole class and create the object.''' self.pid = pid self.tools = Tools() self.rapumaHome = os.environ.get('RAPUMA_BASE') self.userHome = os.environ.get('RAPUMA_USER') self.user = UserConfig(self.rapumaHome, self.userHome) self.userConfig = self.user.userConfig self.projectConfig = Config(pid).projectConfig self.projHome = None self.local = None self.finishInit() # Log messages for this module self.errorCodes = { 'EDIT-000' : ['MSG', 'Messages for editing project and setting files.'], 'EDIT-005' : ['MSG', 'Unassigned error message ID.'], 'EDIT-010' : ['ERR', 'The component [<<1>>] has multiple subcomponents and cannot be opened for editing. Please work with the individual subcomponents.'], 'EDIT-020' : ['ERR', 'Working text file [<<1>>] not found.'], 'EDIT-030' : ['ERR', 'No files found to edit with the commands supplied.'], 'EDIT-040' : ['MSG', 'Component files for [<<1>>] have been opened in your file editor.'], '0000' : ['MSG', 'Placeholder message'], } def finishInit (self, projHome = None) : '''Finishing collecting settings that would be needed for most functions in this module.''' # Look for an existing project home path if self.tools.isProject(self.pid) : localProjHome = os.path.join(self.userConfig['Resources']['projects'], self.pid) else : localProjHome = '' # Testing: The local project home wins over a user provided one if localProjHome and not projHome : self.projHome = localProjHome elif projHome : self.projHome = projHome # If a projHome was succefully found, we can go on if self.projHome : self.local = ProjLocal(self.rapumaHome, self.userHome, self.projHome) ############################################################################### ################################ Edit Functions ############################### ############################################################################### ####################### Error Code Block Series = 0200 ######################## ############################################################################### # FIXME: Still lots to do on this next function def edit (self, gid, cName = None, glob = False, sys = False) : '''Call editing application to edit various project and system files.''' editDocs = ['gedit'] # If a subcomponent is called, pull it up and its dependencies # This will not work with components that have more than one # subcomponent. if cName : # Probably need to create the component object now self.createComponent(cName) cid = self.components[cName].getUsfmCid(cName) cType = self.groups[gid].getComponentType(gid) self.buildComponentObject(cType, cid) cidList = self.groups[gid].getSubcomponentList(gid) if len(cidList) > 1 : self.log.writeToLog('EDIT-010', [cid]) self.tools.dieNow() self.createManager('text') compWorkText = self.groups[gid].getCidPath(cid) if os.path.isfile(compWorkText) : editDocs.append(compWorkText) compTextAdj = self.components[cName].getCidAdjPath(cid) compTextIlls = self.components[cName].getCidPiclistPath(cid) dep = [compTextAdj, compTextIlls] for d in dep : if os.path.isfile(d) : editDocs.append(d) else : self.log.writeToLog('EDIT-020', [self.tools.fName(compWorkText)]) self.tools.dieNow() # Look at project global settings if glob : for files in os.listdir(self.local.projConfFolder): if files.endswith(".conf"): editDocs.append(os.path.join(self.local.projConfFolder, files)) globSty = os.path.join(self.local.projStyleFolder, self.projectConfig['Managers']['usfm_Style']['mainStyleFile']) custSty = os.path.join(self.local.projStyleFolder, self.projectConfig['Managers']['usfm_Style']['customStyleFile']) if os.path.isfile(globSty) : editDocs.append(globSty) if os.path.isfile(custSty) : editDocs.append(custSty) # FIXME: This next part is hard-wired, be nice to do better fileName = 'xetex_settings_usfm-ext.tex' macExt = os.path.join(self.local.projMacroFolder, 'usfmTex', fileName) editDocs.append(macExt) # Look at system setting files if sys : editDocs.append(self.local.userConfFile) # Pull up our docs in the editor if len(editDocs) > 1 : subprocess.call(editDocs) self.log.writeToLog('EDIT-040', [cName]) else : self.log.writeToLog('EDIT-030')