def procCustFiles(self): tgtFiles = [] fnl = self.ib.custFileName.split(",") for fnp in fnl: fs = self.checkIncFiles(fnp) if len(fs) == 1: fnOff = len("_YYYYMMDD.csv") self.log.info("Processing %s " % fs[0]) fn = fu.getFileBaseName(fs[0]) d = "%s/%s" % (self.ib.archDir, fn) rc = fu.copyFile(fs[0], d) self.log.info("Archiving %s to %s\trc=%s" % (fs[0], d, rc)) d = "%s/%s.csv" % (self.ib.workDir, fn[:-fnOff]) rc = fu.moveFile(fs[0], d) self.log.info("Moving %s to %s\trc=%s" % (fs[0], d, rc)) tgtFiles.append(d) else: fnOff = len("_*[0-9].csv") fn = fu.getFileBaseName(fnp) d = "%s/%s.csv" % (self.ib.workDir, fn[:-fnOff]) self.log.debug("fn = %s, dest = %s" % (fn, d)) self.log.warn("File %s was not found" % fnp) rc = fu.crtEmpFile(d) self.log.info("Creating 0 byte file %s\trc=%s" % (d, rc)) # Verify Column Numbers: if self.checkFileColsFlag is True: rc = self.checkFileCols(tgtFiles, ",") if rc != 0: self.log.error("Issue with column number. PLease check bad directory under %s" % self.ib.badDir) # Implement email notification. # return rc return 0
def chkCompleteSet(self, dayr,fL): rc = 0 for fnp in self.incFiles: fn = fu.getFileBaseName(fnp) idx = fn.find('_',3,len(fn)) if idx < 3: rc+=1 self.log.error('fn = %s bad idx = %d' % (fn,idx)) continue ns = fn[idx+1:-4] if dayr != ns: rc+=1 self.log.error('fn = %s bad pattern = %s dayr = %s ' % (fn,ns,dayr)) continue srcf = '%s.txt' % fn[0:idx] self.log.debug('Key SrcFile %s ' % srcf) # Check if file is a member of pre-defined set. try : self.ib.srcFile.index(srcf) except ValueError: self.log.error('filename ', srcf, ' not found in set list :', self.ib.srcFile) rc+=1 self.log.debug('fn = %s ns = %s' % (fn,ns)) return rc
def procIncFiles(self): if len(self.workFiles) < 1 : self.log.error('No files to process') return 1 self.workFiles.sort() rc = 0 # Files in the working directory: i = 1 self.log.debug('Will Process a total of %d file(s) ' % len(self.workFiles)) for fnp in self.workFiles: self.log.info('\nProcessing File (%d) => %s ' % (i,fnp)) fn = fu.getFileBaseName(fnp) if self.verifyCSVFlg is True: r,b = fu.readCSV(fnp, FLDxROW, SEP) if len(b) > 0 : fbad = '%s/%s.bad' % (self.ib.badDir,fn) rc = fu.createFile(fbad , b) self.log.error("No of %d bad row(s) on %s" % (len(b),fnp)) self.log.error("Creating file %s rc = %s" % (fbad,rc)) self.log.debug("Bad rows = , ", b) return 5 if len(r) == 0 : self.log.error("No rows to process on file %s" % fnp) return 6 t = '%s/%s' % (self.ib.workDir , self.ib.srcFile[0]) rc = fu.moveFile(fnp, t) self.log.info('Renaming %s to %s rc = %s' % (fnp,t,rc)) if rc != 0 : self.log.error('Could not move File %s to %s' % (fnp,t)) return 7 # Invoke workflow. rc = self._wkfCodeOvrd() if rc != 0 : self.log.error('Running %s.%s rc = %s' % (self.ib.fld,self.ib.wkf,rc)) if self.exitOnError: self.log.debug('ExitOnError is TRUE rc = %s' % (rc)) return rc else : self.log.info('Ran %s.%s rc = %s' % (self.ib.fld,self.ib.wkf,rc)) r = fu.delFile(t) self.log.debug('Deleting File %s rc = %s' % (t,r)) i+=1 return rc
def mvFileToWorkDir(self): self.workFiles = [] for src in self.incFiles: fn = fu.getFileBaseName(src) d = '%s/%s' % (self.ib.workDir, fn) rc = fu.moveFile(src, d) if rc == 0 : self.workFiles.append(d) self.log.info('Moving file %s to %s rc= %s' % (src, d, rc)) if len(self.workFiles) < 1 : return 1 else : return 0
def archFiles(self) : rc = 0 for src in self.incFiles: fn = fu.getFileBaseName(src) d = '%s/%s' % (self.ib.archDir, fn) r = fu.moveFile(src, d) if r != 0 : rc += 1 self.log.info('Moving file %s to %s rc= %s' % (src, d, rc)) zf = '%s/%s.%s' % (self.ib.archDir , fn, self.ib.archSuff) r = fu.compressFile(zf, d) if r != 0 : self.log.warn ("Cannot compresss %s r = %s. " % (zf, r)) else : self.log.info ("Compressed %s r = %s " % (zf, r)) return rc
def mvTrigFileToArchDir(self): rc = 0 for src in self.trigFiles: fn = fu.getFileBaseName(src) d = '%s/%s.%s' % (self.ib.archDir, fn, su.getTimeSTamp()) r = fu.moveFile(src, d) if r != 0 : rc += 1 self.log.error ("Error moving trigger file %s r = %s. " % (src, r)) else : self.log.info('Moving Trigger file %s to %s rc= %s' % (src, d, r)) return rc # Only reach this line if trigger file had been removed externa to the program !
def procXMLFiles(self): rc = 0; r = False ; x = [] ; cdrRes = [] # List that contains all the responses. if len(self.cdrFilestoProc) < 1 : self.log.error('No incoming CDR files to process : cdrFilestoProc') return 1 pxmlCDR = pxml.ProcXMLCDR(self.log) self.log.debug("Files to process %s" % ''.join(self.cdrFilestoProc)) for fn in self.cdrFilestoProc: self.log.debug("processing %s" % fn) r = pxmlCDR.isValidXML(fn) if r == False: rc+=1 else: x = pxmlCDR.parseCDR(fn) ftn = fu.getFileBaseName(fn) if x is None or len(x) != 3: self.log.error('Error Parsing %s ' % fn) tgt = '%s/%s.%s' % (self.ib.badDir,ftn,self.ts) else: self.log.debug('fn=%s rfId=%s rCode=%s desc=%s' % (fn,x[0],x[1],x[2])) r = su.toInt(x[1]) if r != 0 : self.log.error('fn=%s rfId=%s rCode=%s desc=%s' % (fn,x[0],x[1],x[2])) tgt = '%s/%s.%s' % (self.ib.badDir,ftn,self.ts) rc+=1 else : cdrRes.append('%s %s %s\n' % (x[0],self.ib.FS,x[1])) tgt = '%s/%s.%s' % (self.ib.archDir,ftn,self.ts) # Move files r = fu.moveFile(fn, tgt) if r == 0 : self.log.info( 'mv %s to %s' % (fn,tgt)) else : self.log.error('mv %s to %s r = %s ' % (fn,tgt,r)) self.log.debug("cdrRes No of elements %d " % len(cdrRes)) if len(cdrRes) < 1: self.lo.error('cdrRes Did not find any valid element !') return 2 fn = '%s/CDR_%s.txt' % (self.ib.cdrOutDir, self.ts) r = fu.createFile(fn,cdrRes) if r == 0: self.log.info('Created file %s with CDR responses' % fn) else : self.log.error('Could not create file %s with CDR responses' % fn) rc=1 return rc
def mvBillFiles(self): rc = 0; r = 1; bf = self.procIncFiles(self.ib.billDir,self.ib.billFileName) flen = len(bf) self.log.debug(' %d Files to process %s %s' % (flen,self.ib.billFileName,''.join(bf))) for fsrc in bf: fn = fu.getFileBaseName(fsrc) ftgt = '%s/%s' % (self.ib.ebillDir,fn) r = fu.moveFile(fsrc,ftgt) self.log.debug('r = %d mv %s %s' % (r,fsrc,ftgt)) if r != 0: self.log.error ('r = %d mv %s %s' % (r,fsrc,ftgt)) rc+=r return rc
def archGenFiles(self,fls,ts,cf=False) : rc = 0 for src in fls: fn = fu.getFileBaseName(src) if ts == '' : d = '%s/%s' % (self.ib.archDir, fn) else : d = '%s/%s.%s' % (self.ib.archDir,ts, fn) r = fu.moveFile(src, d) if r != 0 : rc += 1 self.log.info('Moving file %s to %s rc= %s' % (src, d, rc)) if cf is True: zf = '%s/%s.%s' % (self.ib.archDir , fn, self.ib.archSuff) r = fu.compressFile(zf, d) if r != 0 : self.log.warn ("Cannot compresss %s r = %s. " % (zf, r)) else : self.log.info ("Compressed %s r = %s " % (zf, r)) return rc
def _procFiles(self,filestoProc): rc = 0; r = 1 for ef in filestoProc: cmd = '%s/ArchivoBatch.exe %s' % (self.ib.scriptDir,ef) r,rmsg = p.runSync(cmd, self.log) ftn = fu.getFileBaseName(ef) self.log.debug('r = %s cmd = %s ftn = %s' % (r,cmd,ftn)) if r != 0 and r != 95: tgt = '%s/%s.%s' % (self.ib.badDir,ftn,self.ts) self.log.error ('r = %s cmd = %s rmsg=%s' % (r,cmd,rmsg)) rc+=r else : self.log.info ('r = %s cmd = %s rmsg=%s' % (r,cmd,rmsg)) tgt = '%s/%s.%s' % (self.ib.archDir,ftn,self.ts) r = fu.moveFile(ef, tgt) if r == 0 : self.log.info( 'mv %s to %s' % (ef,tgt)) else : self.log.error('mv %s to %s r = %s ' % (ef,tgt,r)) return rc
def unzipCDRFiles(self): rc = 0; r = 1 path = '%s/%s' % (self.ib.shareDir,self.ib.workDirName) for fzp in self.cdrZipFiles: r = fu.uncompressFile(fzp, path ,self.log) if r != 0 : self.log.error('Error unziping %s ' % fzp) rc+=r else : self.log.info('Uncompressed %s to %s' % (fzp,path)) fn = fu.getFileBaseName(fzp) tgt = '%s/%s%s' % (self.ib.archDir,fn,self.ts) r = fu.moveFile(fzp, tgt) if r == 0 : self.log.info( 'mv %s to %s' % (fzp,tgt)) else : self.log.error('mv %s to %s r = %s ' % (fzp,tgt,r)) rc+=r return rc
def checkFileCols(self,flst,FLD_SEP,strp=' \t\n\r'): rc = 0 for fnp in flst: bfnm = fu.getFileBaseName(fnp) colc = self.ib.FileColCnt.get(bfnm) self.log.debug('fkey = %s colc= %s fp = %s ' %(bfnm,colc,fnp)) if colc is not None: x,b = fu.readCSV(fnp,colc,FLD_SEP,strp) if len(b) < 1 : self.log.debug('Columns number [%d] match on file %s ' % (colc,fnp)) else : rc += 1 badf = '%s/%s%s.bad' % (self.ib.badDir,bfnm,su.getTimeSTamp()) f = fu.createFile(badf,b) if f == 0 : self.log.error('Columns number (%d) did not match on %s.\nPlease see Bad file %s .' % (colc,fnp,badf)) else : self.log.error('Columns number (%d) did not match on %s.\n.COULD NOT create file %s .' % (colc,fnp,badf)) self.log.error('BAD Rows===========\n', b) else: rc += 1 self.log.error("Did not find Column Count for %s. Unable to verify Column Numbers !" % bfnm) return rc
def procIncFiles(self): if len(self.workFiles) < 1 : self.log.error('No files to process') return 1 ctlFile = '%s/%s.ctl' % (self.ib.ctlDir,self.appName) self.workFiles.sort() rc = 0 # Files in the working directory: i = 1 self.log.debug('Will Process a total of %d file(s) ' % len(self.workFiles)) for fnp in self.workFiles: self.log.info('\nProcessing File (%d) => %s ' % (i,fnp)) # Get date run from 1st filename fn = fu.getFileBaseName(fnp) cur_dayr = self._getDateRunStr(fn) if cur_dayr is None : self.log.error('No Date String %s ' % cur_dayr) return 1 date = '%s/%s/%s' % (cur_dayr[4:6],cur_dayr[6:8],cur_dayr[:4]) rc = su.isValidDate(date) if rc is False : self.log.error('Invalid Date %s on file %s ' % (date,fn)) return 2 self.fileDate = date self.log.debug('self.fileDate = %s' % (self.fileDate)) if self.checkNextRunFlg is True: # Get Previous Run Info. File should contain one line only : YYYYMMDD from storage. prev_dayr = self._getCtlFile() if prev_dayr is None : return 3 pd,pr = self._getDay(prev_dayr,DP_LEN) rc = self._chkNextRun(cur_dayr,prev_dayr,pd,pr,RUN_PER_DAY) if rc != 0 : self.log.error("self._chkNextRun rc = %s" % rc) return rc procFiles = self.chkTrailer([fnp,],fn,cur_dayr) if len(procFiles) != self.FILE_SET_LEN : self.log.error("chkTrailer Files that were OK ", procFiles) return 4 t = '%s/%s' % (self.ib.workDir , self.ib.srcFile) rc = fu.moveFile(fnp, t) if rc != 0 : self.log.error('Could not move File %s to %s' % (fnp,t)) return 5 # Invoke workflow. rc = pi.runWkflWait(self.ib,self.log) if rc != 0 : self.log.error('Running %s.%s rc = %s' % (self.ib.fld,self.ib.wkf,rc)) if self.exitOnError: self.log.debug('ExitOnError is TRUE rc = %s' % (rc)) return rc else : self.log.info('Ran %s.%s rc = %s' % (self.ib.fld,self.ib.wkf,rc)) # PostCheck Every File, before updating flag: if self.postCheckFlg is True : rc = self._postCheck() if rc != 0 : self.log.warn('Post Check Failed !!! Did not Update Load Date %s, Control File %s rc = %s' % (cur_dayr,ctlFile,rc)) return rc # Loading Staging Succeeded. Update the control file. rc = fu.updFile(ctlFile,cur_dayr) if rc == 0 : if self.checkNextRunFlg: self.log.info('Updated Cur Load Date from %s to %s , Control File %s' % (prev_dayr,cur_dayr, ctlFile)) else : self.log.info('Overwriting Cur Load Date to %s , Control File %s' % (cur_dayr, ctlFile)) else : self.log.error('Could not Update Load Date %s, Control File %s rc = %s' % (cur_dayr,ctlFile,rc)) return rc # r = fu.delFile(t) i+=1 return rc
def procIncFiles(self): if len(self.workFiles) < 1 : self.log.error('No files to process') return 1 ctlFile = '%s/%s.ctl' % (self.ib.ctlDir,self.appName) self.workFiles.sort() rc = 0 # Files in the working directory: i = 1 self.log.debug('Will Process a total of %d file(s) ' % len(self.workFiles)) for fnp in self.workFiles: self.log.info('\nProcessing File (%d) => %s ' % (i,fnp)) # Get date run from 1st filename fn = fu.getFileBaseName(fnp) cur_dayr = self._getDateRunStr(fn) if cur_dayr is None : self.log.error('No Date String %s ' % cur_dayr) return 1 date = '%s%s' % (cur_dayr[:4],cur_dayr[4:6]) rc = su.isValidDate(date,'%Y%m') if rc is False : self.log.error('Invalid Date %s on file %s ' % (date,fn)) return 2 self.fileDate = date self.log.debug('self.fileDate = %s' % (self.fileDate)) ctlFile = '%s/%s.ctl' % (self.ib.ctlDir,self.appName) self.log.debug('self.checkNextRunFlg is %s' % self.checkNextRunFlg) prev_dayr = self._getCtlFile() if self.checkNextRunFlg is True: if prev_dayr is None or prev_dayr.strip() == '': self.log.error("Could not find control file or No Data") return -1 rc = psc.getNextRunDate(prev_dayr, cur_dayr, SCH_FREQ, self.log,sch) if rc != 0 : self.log.error("self._chkNextRun rc = %s" % rc) return rc procFiles = self.chkTrailer([fnp,],fn,cur_dayr) if len(procFiles) != self.FILE_SET_LEN : self.log.error("chkTrailer Files that were OK ", procFiles) return 4 t = '%s/%s' % (self.ib.workDir , self.ib.srcFile) self.log.debug('fnp =%s Move to %s ' % (fnp,t)) rc = fu.moveFile(fnp, t) if rc != 0 : self.log.error('Could not move File %s to %s' % (fnp,t)) return 5 self.log.info('mv src file fnp %s -> t %s' % (fnp,t)) # Invoke workflow. rc = pi.runWkflWait(self.ib,self.log) if rc != 0 : self.log.error('Running %s.%s rc = %s' % (self.ib.fld,self.ib.wkf,rc)) if self.exitOnError: self.log.debug('ExitOnError is TRUE rc = %s' % (rc)) return rc else : self.log.info('Ran %s.%s rc = %s' % (self.ib.fld,self.ib.wkf,rc)) # Loading Staging Succeeded. Update the control file. rc = fu.updFile(ctlFile,cur_dayr) if rc == 0 : if self.checkNextRunFlg: self.log.info('Updated Cur Load Date from %s to %s , Control File %s' % (prev_dayr,cur_dayr, ctlFile)) else : self.log.info('Overwriting Cur Load Date to %s , Control File %s' % (cur_dayr, ctlFile)) else : self.log.error('Could not Update Load Date %s, Control File %s rc = %s' % (cur_dayr,ctlFile,rc)) return rc i+=1 return rc
def procIncFiles(self): # 'B' : self.getIncSetFiles, populates self.incFileSet, which is [filename][sets] if len(self.incFileSet) != self.FILE_SET_LEN: self.log.error("Invalid Len for incFileSet = %d " % len(self.incFileSet)) return 1 ctlFile = '%s/%s.ctl' % (self.ib.ctlDir,self.appName) i = 0 # Get complete number of fileset to process. It is an Array of 5 elem. # Each element is a bucket containing a list of similar files (n numebr of runs) # FileSet[i] is a bucket that contains file(s) for the same table. # FileSet[i] is already sorted. # e.g. #[ $PATH:/Ap_sce5100_20120811_3.txt' # $PATH:/Ap_sce5100_20120812_1.txt' # $PATH:/Ap_sce5100_20120812_2.txt' # $PATH:/Ap_sce5100_20120813_1.txt'] self.log.debug("self.incFileSet ", self.incFileSet , " len = " , len (self.incFileSet) ) # Get the minimun len from each file(s) with the same pattern (self.incFileSet). It will tell us how many complete FS we have. setn = min(len(self.incFileSet[0]), len(self.incFileSet[1]),len(self.incFileSet[2]),len(self.incFileSet[3]),len(self.incFileSet[4])) self.log.info( ' ---- Starting Processing. Total of %d iteration(s) ----' % setn) while i < setn: self.incFiles = self.incFileSet[0][i], self.incFileSet[1][i],self.incFileSet[2][i],self.incFileSet[3][i],self.incFileSet[4][i] self.log.debug(' iter= ', i, '\tincFiles =', self.incFiles) i+=1 if len (self.incFiles) != self.FILE_SET_LEN : self.log.error('Invalid FileSet len = %d should be = %d' % (len (self.incFiles),self.FILE_SET_LEN)) return 1 # Get date run from 1st filename fn = fu.getFileBaseName(self.incFiles[0]) cur_dayr = self._getDateRunStr(fn) if cur_dayr is None : self.log.error('No Date Run String %s ' % cur_dayr) return 1 rc = self.chkCompleteSet(cur_dayr,self.incFiles) if rc != 0 : self.log.error("chkCompleteSet() rc = %s" % rc) return rc self.log.debug('self.checkNextRunFlg is %s' % self.checkNextRunFlg) if self.checkNextRunFlg is True: # Get Previous Run Info. File should contain one line only : YYYYMMDD_R from storage. prev_dayr = self._getCtlFile() #pd,pr = self._getDay(prev_dayr,DP_LEN) if prev_dayr is None or prev_dayr.strip() == '': self.log.error("Could not find control file or No Data") return -1 #rc = self._chkNextRun(cur_dayr,prev_dayr,pd,pr,RUN_PER_DAY) rc = psc.getNextRunDate(prev_dayr, cur_dayr, SCH_FREQ, self.log,sch) if rc != 0 : self.log.error("self._chkNextRun rc = %s" % rc) return rc rc = self.cpFileToWorkDir() if rc != 0 : self.log.error(" cpFileToWorkDir() rc = %s" % rc) return rc rc = self.archFiles() if rc != 0 : self.log.error(" archFiles() rc = %s" % rc) return rc procFiles = self.chkTrailer(self.workFiles,fn,cur_dayr) if len(procFiles) != self.FILE_SET_LEN : self.log.error("chkTrailer Files that were OK ", procFiles) return 1 # At this point all files are valid for processing and filenames are sorted. for fnp in procFiles: fn = fu.getFileBaseName(fnp) f = self._getFileProcName(fn) t = '%s/%s' % (self.ib.workDir , f) rc = fu.moveFile(fnp, t) if rc == 0 : self.log.info('Renaming File %s to %s' % (fnp,f)) else: self.log.error('Could not rename File %s to %s' % (fnp,t)) continue #rc = 0 # Remove after testing # Invoke workflow(s). self.log.debug('self.runWkfFlowFlg is %s' % self.runWkfFlowFlg) if self.runWkfFlowFlg == True: rc = self.wFinDetailStg() if rc != 0 : return rc rc = self.wFinDetail() if rc != 0 : return rc # End to End Loading Succeeded. Update the control file. rc = fu.updFile(ctlFile,cur_dayr) if rc == 0 : self.log.info('Updated Cur Load Date %s, Control File %s' % (cur_dayr,ctlFile)) else : self.log.error('Could not Update Load Date %s, Control File rc = %s' % (cur_dayr,ctlFile,rc)) return rc #r = fu.delFile(t) return rc
def procIncFiles(self): if len(self.workFiles) < 1: self.log.error("No files to process") return 1 ctlFile = "%s/%s.ctl" % (self.ib.ctlDir, self.appName) self.workFiles.sort() rc = 0 # Files in the working directory: i = 1 self.log.debug("Will Process a total of %d file(s) " % len(self.workFiles)) for fnp in self.workFiles: self.log.info("\nProcessing File (%d) => %s " % (i, fnp)) # Get date run from 1st filename fn = fu.getFileBaseName(fnp) cur_dayr = self._getDateRunStr(fn) if cur_dayr is None: self.log.error("No Date String %s " % cur_dayr) return 1 fmt = "%Y%m" date = "%s%s" % (cur_dayr[0:4], cur_dayr[4:6]) rc = su.isValidDate(date, fmt) if rc is False: self.log.error("Invalid Date %s on file %s " % (date, fn)) return 2 self.fileDate = date self.log.debug("self.fileDate = %s" % (self.fileDate)) if self.checkNextRunFlg is True: # Get Previous Run Info. File should contain one line only : YYYYMM from storage. prev_dayr = self._getCtlFile() if prev_dayr is None: return 3 pd, pr = self._getMonth(prev_dayr, DP_LEN) if pd is None: return 4 # rc = self._chkNextRun(cur_dayr,prev_dayr,pd,pr,RUN_PER_MTH) rc = psch.getNextRunDate(pd, cur_dayr, "Mthly", self.log) if rc != 0: self.log.error("self._chkNextRun rc = %s" % rc) return rc if self.verifyCSVFlg is True: r, b = fu.readCSV(fnp, FLDxROW, SEP) if len(b) > 0: fbad = "%s/%s.bad" % (self.ib.badDir, fn) rc = fu.createFile(fbad, b) self.log.error("No of %d bad row(s) on %s" % (len(b), fnp)) self.log.error("Creating file %s rc = %s" % (fbad, rc)) self.log.debug("Bad rows = , ", b) return 5 if len(r) == 0: self.log.error("No rows to process on file %s" % fnp) return 6 t = "%s/%s" % (self.ib.workDir, self.ib.srcFile[0]) rc = fu.moveFile(fnp, t) self.log.info("Renaming %s to %s rc = %s" % (fnp, t, rc)) if rc != 0: self.log.error("Could not move File %s to %s" % (fnp, t)) return 7 # Invoke workflow. rc = self._wkfIMSSftyLoc() if rc != 0: self.log.error("Running %s.%s rc = %s" % (self.ib.fld, self.ib.wkf, rc)) if self.exitOnError: self.log.debug("ExitOnError is TRUE rc = %s" % (rc)) return rc else: self.log.info("Ran %s.%s rc = %s" % (self.ib.fld, self.ib.wkf, rc)) # Loading Staging Succeeded. Update the control file. rc = fu.updFile(ctlFile, cur_dayr) if rc == 0: if self.checkNextRunFlg: self.log.info( "Updated Cur Load Date from %s to %s , Control File %s" % (prev_dayr, cur_dayr, ctlFile) ) else: self.log.info("Overwriting Cur Load Date to %s , Control File %s" % (cur_dayr, ctlFile)) else: self.log.error("Could not Update Load Date %s, Control File %s rc = %s" % (cur_dayr, ctlFile, rc)) return rc r = fu.delFile(t) self.log.debug("Deleting File %s rc = %s" % (t, r)) i += 1 return rc