def __init__(self, configPath, defaultDict={}): if os.path.isdir(configPath): raise "config file dict can only be created with file." self.configPath = configPath try: f = open(self.configPath, "r") self.config = json.load(f) f.close() except IOError: print "no config file found:", self.configPath self.config = defaultDict
def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt'): print "src root is:", srcRoot self.srcRoot = transform.transformDirToInternal(srcRoot) self.storageRoot = transform.transformDirToInternal(storageRoot) self.stateStoragePath = stateStoragePath try: f = open(self.stateStoragePath,'r') self.config = json.load(f) f.close() except IOError: self.config = {}
def readEncryptedZipLog(self, encryptedZipLogPath): zipLogPath = os.path.join(self.decryptionWorkingDir, os.path.basename(encryptedZipLogPath).replace('.enclog', '.log')) self.decCopier.copy(encryptedZipLogPath, zipLogPath) try: f = open(zipLogPath,'r') res = json.load(f) f.close() return res except IOError: return {}
def processItem(self, job, item): monitoringFullPath = transform.transformDirToInternal(item['monitoringPath']) archiveId = gZipFolderCollectionPrefix + monitoringFullPath if not self.collectionInDbForMonitoringPath.has_key(monitoringFullPath): self.collectionInDbForMonitoringPath[monitoringFullPath] = collectionDatabase.collectionOnMongoDbBase(archiveId, self.dbInst.getCollectionDb()) objUuid = self.dbInst.addVirtualObj({"monitoringPath": monitoringFullPath, "zippedInfoCollectionId": archiveId}); idInCol = objUuid self.zippedInfoCollectionList.addObj(idInCol, objUuid) #Save the item in the archive collection: zippedInfoColllection://D:/tmp/ fullPath = transform.transformDirToInternal(item["fullPath"]) relativePath = transform.getRelativePathFromFull(fullPath, monitoringFullPath) if not os.path.exists(fullPath): job.delete() return False#No job release, job was deleted. ################################################################# # Start process the ################################################################# if not self.collectionInDbForMonitoringPath[monitoringFullPath].exists(relativePath): #This item is not in the collection, so we need to extract info from this item newObj = self.dbInst.getFsObjFromFullPath(fullPath) self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"]) zipFilePath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(self.workingDir, gInfoFileDecryptedExt, gInfoFilePrefix)) self.decCopier.copy(fullPath, zipFilePath) for i in zippedInfo(self.workingDir).enumItems(zipFilePath): print '--------------------------------------------------' print i fp = open(i, 'r') loadedFileInfo = json.load(fp) print loadedFileInfo for i in zippedInfo(self.workingDir).enumZippedFiles(zipFilePath): fp = open(i, 'r') print 'data file extracted:', i ''' else: #This item is not in the collection, so we need to extract info from this item newObj = self.dbInst.getFsObjFromFullPath(fullPath) self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"]) zipFilePath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(self.workingDir, gInfoFileDecryptedExt, gInfoFilePrefix)) self.decCopier.copy(fullPath, zipFilePath) for i in zippedInfo(self.workingDir).enumItems(zipFilePath): print '--------------------------------------------------' print i fp = open(i, 'r') loadedFileInfo = json.load(fp) print loadedFileInfo for i in zippedInfo(self.workingDir).enumZippedFiles(zipFilePath): fp = open(i, 'r') print 'data file extracted:', i ''' return True#Release job
def __init__(self, srcRoot, storageRoot, stateStoragePath = 'd:/state.txt'): #print 'src root:',srcRoot self.srcRoot = transform.transformDirToInternal(srcRoot) self.storageRoot = transform.transformDirToInternal(storageRoot) #print self.srcRoot #print self.storageRoot self.stateStoragePath = stateStoragePath try: f = open(self.stateStoragePath,'r') self.config = json.load(f) f.close() except IOError: self.config = {} #print 'storage root:', self.storageRoot self.curArchivedSize = 0 self.curArchive = None
def processJob(self, job, item): monitoringFullPath = transform.transformDirToInternal(item['monitoringPath']) archiveId = "zippedInfoColllection://" + monitoringFullPath if not self.collectionInDbForMonitoringPath.has_key(monitoringFullPath): self.collectionInDbForMonitoringPath[monitoringFullPath] = collectionDatabase.collectionOnMongoDbBase(archiveId, self.dbInst.getCollectionDb()) #Save the item in the archive collection: zippedInfoColllection://D:/tmp/ fullPath = transform.transformDirToInternal(item["fullPath"]) relativePath = transform.getRelativePathFromFull(fullPath, monitoringFullPath) if not self.collectionInDbForMonitoringPath[monitoringFullPath].exists(relativePath): #This item is not in the collection, so we need to extract info from this item newObj = self.dbInst.getFsObjFromFullPath(fullPath) self.collectionInDbForMonitoringPath[monitoringFullPath].addObj(relativePath, newObj["uuid"]) for i in zippedInfo(self.workingDir).enumItems(fullPath): fp = open(i, 'r') loadedFileInfo = json.load(fp) print loadedFileInfo return True
def importDb(input, password = None): dbSysInst = dbSys.dbSysSmart() sf = open(input, 'r') package = json.load(sf) en = enc.encryptorBase64Out(password) l = package["add"] if (password is None) and package.has_key("encPass"): print 'need password for importing' return if (not (password is None)): if unicode(str(md5.new(password).hexdigest())) != package["encPass"]: print 'pass not match:', unicode(str(md5.new(password+package["time-duration"]).hexdigest())), package["encPass"] return else: l = listEncryptor.dictListDecryptor(l, en) l = listEncryptor.dictListEncryptor(l) dbSysInst.importDb(l)
def decryptBackup(sourceFullPath, target, password = None): sf = open(sourceFullPath, 'r') l = json.load(sf) if l.has_key("encPass"): if password is None: print 'need password' return else: en = enc.encryptorBase64Out(password) if str(md5.new(password+l["time-duration"]).hexdigest()) != l["encPass"]: print 'pass not match:', str(md5.new(password+l["time-duration"]).hexdigest()), l["encPass"] return res = dbExporterV3.dictListDecryptor(l["add"], en) else: res = l["add"] l["add"] = res del l["encPass"] s = json.dumps(l, sort_keys=True, indent=4) f = open(target,'w') f.write(s) f.close()
def importDb(input, username, targetPasswd, password = None): dbSysInst = dbSys.dbSysSmart() sf = open(input, 'r') package = json.load(sf) en = enc.encryptorBase64Out(password) l = package["add"] if (password is None) and package.has_key("encPass"): print 'need password for importing' return if (not (password is None)): if unicode(str(md5.new(password).hexdigest())) != package["encPass"]: print 'pass not match:', unicode(str(md5.new(password+package["time-duration"]).hexdigest())), package["encPass"] return else: l = listEncryptor.dictListDecryptor(l, en) sysUser = service.ufsUser(u'system.user', u'system.pass') if userMan.userManager().verifyPasswd(username, targetPasswd, dbSys.dbSysSmart(sysUser).getDb("passwdDb")): print 'importing' l = listEncryptor.dictListEncryptor(l, xorEnc.encryptorTxtOut(targetPasswd)) #print l l = listEncryptor.setUser(l, username) #print l dbSysInst.importDb(l)
def subClassProcessItem(self, processingObj): encZipFileFullPath = processingObj["fullPath"] encryptedZipLogPath = encZipFileFullPath ncl(encryptedZipLogPath) zipLogPath = os.path.join(self.decryptionWorkingDir, os.path.basename(encryptedZipLogPath).replace(gTargetEncLog, gZipLogExt)) cl(zipLogPath) self.decCopier.copy(encryptedZipLogPath, zipLogPath) #Extract log file from zip logZip = zipClass.ZFile(zipLogPath, 'r') #logPath = zipLogPath.replace(gZipLogExt, gLogExt) logLoaded = False for logFileName in logZip.list(): logZip.extract(logFileName, self.decryptionWorkingDir) logPath = os.path.join(self.decryptionWorkingDir, logFileName) #cl(logPath) #Read log file try: f = open(logPath,'r') newLog = json.load(f) f.close() except IOError: newLog = None raise 'log not read' if not (newLog is None): ######################### #Log loaded, update collection ######################### logLoaded = True print 'extracting info from log', encZipFileFullPath for i in newLog: relaPath = transform.formatRelativePath(i) ncl(newLog[i]) #Remove the uuid in log file if newLog[i].has_key("uuid"): newLog[i]["originalUuid"] = newLog[i]["uuid"] del newLog[i]["uuid"] ################# #Get collection ################# if self.logCollection.exists(relaPath): itemUuid = self.logCollection.getObjUuid(relaPath) ncl("returned uuid:", itemUuid) item = self.objDb.getObjFromUuid(itemUuid) #Conflict, check if update needed ncl(newLog[i]["timestamp"]) ncl(item["timestamp"]) if newLog[i]["timestamp"] > item["timestamp"]: #The new item is newer, replace the old one ncl('updating duplicated item to 1st one:', newLog[i]["timestamp"], item["timestamp"]) objUuid = self.objDb.addVirtualObj(newLog[i]) self.logCollection.updateObjUuid(relaPath, objUuid) else: ncl("no update, ignore") else: #Add object to obj objDb objUuid = self.objDb.addVirtualObj(newLog[i]) #Add obj to collection self.logCollection.addObj(relaPath, objUuid) ncl("added new item", relaPath, newLog[i]) ''' encZipFileFullPath = transform.transformDirToInternal(encZipFileFullPath) zipFileFullPath = self.getZipFile(encZipFileFullPath.replace(gTargetEncLog, gTargetEnc)) extractedItemInfo = self.getItemState(relaPath) extractedItemFullPath = os.path.join(self.workingDir, i) extractedItem = extractedZipStorageItem(self.workingDir, extractedItemFullPath, extractedItemInfo, zipFileFullPath, None, relaPath) ''' yield newLog[i] '''
def subClassProcessItem(self, processingObj): #Check if the log file and data file are both OK encZipFileFullPath = processingObj["fullPath"] if not (re.search('\.enclog$', encZipFileFullPath) is None): #An encrypted zip file, go on if os.path.exists(encZipFileFullPath.replace('.enclog', '.enc')): #Log and Data are both OK encryptedZipLogPath = encZipFileFullPath ncl(encryptedZipLogPath) zipLogPath = os.path.join(self.decryptionWorkingDir, os.path.basename(encryptedZipLogPath).replace('.enclog', '.log')) ncl(zipLogPath) self.decCopier.copy(encryptedZipLogPath, zipLogPath) try: f = open(zipLogPath,'r') newLog = json.load(f) f.close() except IOError: newLog = None if not (newLog is None): ######################### #Log loaded, update collection ######################### for i in newLog: relaPath = transform.formatRelativePath(i) ncl(newLog[i]) ################# #Get collection ################# if self.logCollection.exists(relaPath): itemUuid = self.logCollection.getObjUuid(relaPath) ncl("returned uuid:", itemUuid) item = self.db.getObjFromUuid(itemUuid) #Conflict, check if update needed ncl(newLog[i]["timestamp"]) ncl(item["timestamp"]) if newLog[i]["timestamp"] > item["timestamp"]: #The new item is newer, replace the old one ncl('updating duplicated item to 1st one:', newLog[i]["timestamp"], item["timestamp"]) objUuid = self.db.addVirtualObj(newLog[i]) self.logCollection.updateObjUuid(relaPath, objUuid) else: ncl("no update, ignore") else: #Add object to obj db objUuid = self.db.addVirtualObj(newLog[i]) #Add obj to collection self.logCollection.addObj(relaPath, objUuid) ncl("added new item", relaPath, newLog[i]) ################################# #Process data ################################# encZipFileFullPath = transform.transformDirToInternal(encZipFileFullPath) ncl(encZipFileFullPath) zipFileFullPath = self.getZipFile(encZipFileFullPath.replace(".enclog", ".enc")) #For all element in the zip file #Enumerate all files in the decrypted zip file zf = zipClass.ZFile(zipFileFullPath, 'r') #Generate a log file if it does not exist for i in zf.list(): #zf.extract(i, self.workingDir) extractedItemFullPath = os.path.join(self.workingDir, i) relaPath = transform.formatRelativePath(i) extractedItemInfo = self.getItemState(relaPath) extractedItem = encZipStorage.extractedZipStorageItem(self.workingDir, extractedItemFullPath, extractedItemInfo, zf, relaPath) ########################### #Store the file ########################### self.targetCollection.store(extractedItem) ########################## #Everything goes OK #Quit ########################## return else: cl('Load log file failed',encZipFileFullPath) else: #Data file not exist, push it back ncl('Data file not exist, push it back: ',encZipFileFullPath) else: ncl('not a encrypted zip file: ',encZipFileFullPath) return ############ #Item not processed, push it back ############ processorBase.cacheCollectionProcessorBase.subClassProcessItem(self, processingObj)
def subClassProcessItem(self, processingObj): encZipFileFullPath = processingObj["fullPath"] encryptedZipLogPath = encZipFileFullPath ncl(encryptedZipLogPath) zipLogPath = os.path.join( self.decryptionWorkingDir, os.path.basename(encryptedZipLogPath).replace(gTargetEncLog, gZipLogExt) ) cl(zipLogPath) self.decCopier.copy(encryptedZipLogPath, zipLogPath) # Extract log file from zip logZip = zipClass.ZFile(zipLogPath, "r") # logPath = zipLogPath.replace(gZipLogExt, gLogExt) logLoaded = False for logFileName in logZip.list(): logZip.extract(logFileName, self.decryptionWorkingDir) logPath = os.path.join(self.decryptionWorkingDir, logFileName) # cl(logPath) # Read log file try: f = open(logPath, "r") newLog = json.load(f) f.close() except IOError: newLog = None raise "log not read" if not (newLog is None): ######################### # Log loaded, update collection ######################### logLoaded = True print "extracting info from log", encZipFileFullPath for i in newLog: relaPath = transform.formatRelativePath(i) ncl(newLog[i]) # Remove the uuid in log file if newLog[i].has_key("uuid"): newLog[i]["originalUuid"] = newLog[i]["uuid"] del newLog[i]["uuid"] ################# # Get collection ################# if self.logCollection.exists(relaPath): itemUuid = self.logCollection.getObjUuid(relaPath) ncl("returned uuid:", itemUuid) item = self.objDb.getObjFromUuid(itemUuid) # Conflict, check if update needed ncl(newLog[i]["timestamp"]) ncl(item["timestamp"]) if newLog[i]["timestamp"] > item["timestamp"]: # The new item is newer, replace the old one ncl("updating duplicated item to 1st one:", newLog[i]["timestamp"], item["timestamp"]) objUuid = self.objDb.addVirtualObj(newLog[i]) self.logCollection.updateObjUuid(relaPath, objUuid) else: ncl("no update, ignore") else: # Add object to obj objDb objUuid = self.objDb.addVirtualObj(newLog[i]) # Add obj to collection self.logCollection.addObj(relaPath, objUuid) ncl("added new item", relaPath, newLog[i]) if logLoaded: ################################# # Process data ################################# print "extracting info from log complete", encZipFileFullPath encZipFileFullPath = transform.transformDirToInternal(encZipFileFullPath) ncl(encZipFileFullPath) zipFileFullPath = self.getZipFile(encZipFileFullPath.replace(gTargetEncLog, gTargetEnc)) # For all element in the zip file # Enumerate all files in the decrypted zip file zf = zipClass.ZFile(zipFileFullPath, "r") # Generate a log file if it does not exist for i in zf.list(): # zf.extract(i, self.workingDir) extractedItemFullPath = os.path.join(self.workingDir, i) relaPath = transform.formatRelativePath(i) ncl(relaPath) extractedItemInfo = self.getItemState(relaPath) extractedItem = extractedZipStorageItem( self.workingDir, extractedItemFullPath, extractedItemInfo, zipFileFullPath, zf, relaPath ) """ ########################### #Store the file ########################### #self.targetCollection.store(extractedItem) """ ########################### # Returning object ########################### yield extractedItem ########################## # Everything goes OK # Quit ########################## return else: cl("Load log file failed", encZipFileFullPath)