def callback(self, pathToWatch, relativePath, changeType): fullPath = transform.transformDirToInternal(os.path.join(pathToWatch, relativePath)) itemDict = {"monitoringPath": transform.transformDirToInternal(pathToWatch), "fullPath": fullPath, "changeType":changeType, "timestamp": time.time()} s = json.dumps(itemDict, sort_keys=True, indent=4) beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort) beanstalk.use(self.targetTube) #print beanstalk.using() s = json.dumps(itemDict, sort_keys=True, indent=4) job = beanstalk.put(s)
def backupDbSingle(dbName, targetDir, password, user): if user is None: dbSysInst = dbSys.dbSysSmart() else: dbSysInst = dbSys.dbSysSmart(sessionBase.sessionInstanceBase(user)) db = dbSysInst.getDb(dbName) res = [] e = enc.encryptor() if not os.path.exists(targetDir): misc.ensureDir(targetDir) for i in db.keys(): #print i #values = db.testFunc(i) values = db[i] encryptedValues = [] for i in values: print i.encode('gbk', 'replace') encryptedValues.append(e.en(i, password)) j = {"key":e.en(i, password), "value":encryptedValues, "encHash":unicode(md5.new(password).hexdigest())} res.append(j) #print res s = json.dumps(res, sort_keys=True, indent=4) f = open(os.path.join(targetDir, dbName+'_for_user_'+str(user)+'.json'),'w') f.write(s) f.close()
def exportDb(targetDir, password=None, hostname=None): dbSysInst = dbSys.dbSysSmart() startTime = findLastTimestamp(targetDir, hostname) endTime = dbSysInst.getTimeStamp() finalRes = dbSysInst.exportDb(startTime, endTime, hostname) if len(finalRes) == 0: print "no update need to be exported" return finalRes = dictListDecryptor(finalRes) timeStampName = "%s-%s" % (startTime, endTime) package = {"backup-id": unicode(str(uuid.uuid4())), "time-duration": timeStampName} if password is None: package["add"] = finalRes fullname = "%s_%s.noenc.json" % (hostname, timeStampName) else: fullname = "%s_%s.json" % (hostname, timeStampName) en = enc.encryptorBase64Out(password) finalRes = dictListEncryptor(finalRes, en) package["encPass"] = unicode(str(md5.new(password + timeStampName).hexdigest())) package["add"] = finalRes targetFullPath = os.path.join(targetDir, fullname) s = json.dumps(package, sort_keys=True, indent=4) # s = json.dumps(package) f = open(targetFullPath, "w") f.write(s) f.close()
def backupDbAuto(dbName, targetDir, password, user, timeStamp): finalRes = [] if user is None: dbSysInst = dbSys.dbSysSmart() else: dbSysInst = dbSys.dbSysSmart(sessionBase.sessionInstanceBase(user)) beforeTimeStamp = dbSysInst.getTimeStamp() if dbName is None: dbList = dbSysInst.getDbNameList() else: dbList = [dbName] for i in dbList: print i res = backupDbSingle(i, targetDir, password, user, timeStamp, beforeTimeStamp) finalRes.extend(res) #res = dictListEncryptor(res, password) #res = dictListDecryptor(res) s = json.dumps(finalRes, sort_keys=True, indent=4) #s = json.dumps(finalRes) f = open(os.path.join(targetDir, str(dbName)+'_for_user_'+str(user)+'.json'),'w') f.write(s) f.close() dbSysInst = dbSys.dbSysSmart() c = cfg.configuration(dbSysInst) c[u"mongoBackupTimeStamp"] = beforeTimeStamp
def encZip(self): #Must close the zip before encrypt it, otherwise, the file are not integrate if self.curArchive is None: return self.curArchive.close() self.curArchive = None ############################ # Encrypt the zip file ############################ targetPath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(self.zipStorageDir, '.enc')) print 'copying "%s" to "%s"'%(self.curArchiveName, targetPath) #import shutil #shutil.copy(self.curArchiveName, targetPath+'.backup.zip') self.encCopier.copy(self.curArchiveName, targetPath) ############################ # Save info for zipped files ############################ s = json.dumps(self.zippedFileInfo, sort_keys=True, indent=4) f = open(self.curArchiveName.replace('.zip', '.log'),'w') f.write(s) f.close() self.encCopier.copy(self.curArchiveName.replace('.zip', '.log'), targetPath.replace('.enc', '.enclog')) ############################ # Update state in storage state ############################ self.updateZipLog(self.zippedFileInfo) #Clean the current zipped file info self.zippedFileInfo = {} zipFileFolderStorageItem = folderStorage.folderStorageItem(self.zipStorageDir, targetPath) self.lastState.zipFileUpdate(zipFileFolderStorageItem)
def stop_beanstalkd_service(tube_name, beanstalk = None): if beanstalk is None: beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort).use(tube_name) item_dict = {"command": "quit"} s = json.dumps(item_dict, sort_keys=True, indent=4) #1000 is a relative high priority. than 2^-31 beanstalk.put(s, priority = 1000)
def encInfoZip(self, pendingCollection): ############################ # Save info for zipped files ############################ logFilePath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(self.workingDir, '.log')) s = json.dumps(self.zippedFileInfo, sort_keys=True, indent=4) f = open(logFilePath,'w') f.write(s) f.close() logZipPath = logFilePath.replace(u'.log',u'.log.zip') logZip = zipClass.ZFile(logZipPath, 'w') logZip.addfile(unicode(logFilePath), os.path.basename(logFilePath)) logZip.close() gTimeV = time.gmtime() yearStr = time.strftime("%Y", gTimeV) monthStr = time.strftime("%m", gTimeV) dayStr = time.strftime("%d", gTimeV) dateTimeDir = yearStr+"/"+monthStr+"/"+dayStr newEncDir = unicode(os.path.join(self.zipStorageDir, dateTimeDir)) misc.ensureDir(newEncDir) targetPath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(newEncDir, '.enc')) self.encCopier.copy(logZipPath, targetPath.replace('.enc', '.encziplog')) ############################ # Update state in storage state ############################ self.updateZipLog(self.zippedFileInfo, pendingCollection) #Clean the current zipped file info self.zippedFileInfo = {}
def saveRegeneratedState(self, encZipFileFullPath, zipFileFullPath): ############################ #Save info for zipped files ############################ s = json.dumps(self.zippedFileInfoRegenerating, sort_keys=True, indent=4) f = open(zipFileFullPath.replace('.zip', '.log'),'w') f.write(s) f.close() self.encCopier.copy(zipFileFullPath.replace('.zip', '.log'), encZipFileFullPath.replace('.enc', '.enclog')) self.zippedFileInfoRegenerating = {}
def put_item(self, item_dict, target_tube, priority = beanstalkc.DEFAULT_PRIORITY): beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort) try: beanstalk.use(target_tube) except: print 'using: "%s"',target_tube s = json.dumps(item_dict, sort_keys=True, indent=4) print "add item:", s, self.tubeName, priority job = beanstalk.put(s, priority = priority) return job
def main(): d = testDbSys.testDbSys() res = submitter.packagePathRecurse("d:/tmp", d) s = json.dumps(res, sort_keys=True, indent=4) jsonRes = u"\n".join([l.rstrip() for l in s.splitlines()]) print jsonRes f = open("d:/tmp/dirJson.json", "w") f.write(jsonRes) f.close() r = json.loads(jsonRes) print r
def finalizeZipFile(self): #Add info to zip file self.additionalInfoDict["collectionContentInfo"] = self.collectionInfoDict ncl(self.collectionInfoDict) s = json.dumps(self.additionalInfoDict, sort_keys=True, indent=4) infoFilePath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(self.workingDir, "."+gInfoFileExt, gInfoFilePrefix)) logFile = open(infoFilePath, 'w') logFile.write(s) logFile.close() self.getZipFile().addfile(unicode(infoFilePath), unicode(infoFilePath)) self.package_file.close() #Set attribute so new zip will be created if this object is still in use self.package_file = None self.additionalInfoDict = {} return self.package_file_full_path
def encZip(self, pendingCollection): #Must close the zip before encrypt it, otherwise, the file are not integrate if self.curArchive is None: return self.curArchive.close() self.curArchive = None ############################ # Encrypt the zip file ############################ cl('copying "%s" to "%s"'%(self.curArchiveName, self.targetPath)) ############################## ############################################ #TODO: update the processed item list, so this new created item will not be processed by the extractor again self.encCopier.copy(self.curArchiveName, self.targetPath) ############################ # Save info for zipped files ############################ s = json.dumps(self.zippedFileInfo, sort_keys=True, indent=4) logFilePath = self.curArchiveName.replace(u'.zip', u'.log') f = open(logFilePath,'w') f.write(s) f.close() logZipPath = self.curArchiveName.replace(u'.zip',u'.log.zip') logZip = zipClass.ZFile(logZipPath, 'w') logZip.addfile(unicode(logFilePath), os.path.basename(logFilePath)) logZip.close() ############################################ #TODO: update the processed item list, so this new created item will not be processed by the extractor again self.encCopier.copy(logZipPath, self.targetPath.replace('.enc', '.encziplog')) ############################ # Update state in storage state ############################ self.updateZipLog(self.zippedFileInfo, pendingCollection) #Clean the current zipped file info self.zippedFileInfo = {}
def decryptBackup(sourceFullPath, target, password = None): sf = open(sourceFullPath, 'r') l = json.load(sf) if l.has_key("encPass"): if password is None: print 'need password' return else: en = enc.encryptorBase64Out(password) if str(md5.new(password+l["time-duration"]).hexdigest()) != l["encPass"]: print 'pass not match:', str(md5.new(password+l["time-duration"]).hexdigest()), l["encPass"] return res = dbExporterV3.dictListDecryptor(l["add"], en) else: res = l["add"] l["add"] = res del l["encPass"] s = json.dumps(l, sort_keys=True, indent=4) f = open(target,'w') f.write(s) f.close()
def finalize(self): #print self.info_dict #print len(self.info_dict) if len(self.info_dict) == 0: print "finalize without any content, return directly" return s = json.dumps(self.info_dict, sort_keys=True, indent=4) infoFilePath = transform.transformDirToInternal( fileTools.getTimestampWithFreeName(self.working_dir, "."+gInfoFileExt, gInfoFilePrefix)) logFile = open(infoFilePath, 'w') logFile.write(s) logFile.close() #print s info(infoFilePath) self.storage.add_file(infoFilePath) self.storage.finalize_one_trunk() for i in self.saving_items: self.collection.addObj(i, self.saving_items[i]) self.saving_items = {} self.info_dict = {} info("trunk finalized")
def addItem(self, fullPath): beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort) beanstalk.use(gMonitorServiceTubeName) itemDict = {"fullPath": fullPath} s = json.dumps(itemDict, sort_keys=True, indent=4) job = beanstalk.put(s)
def addItem(self, itemDict): beanstalk = beanstalkc.Connection(host=gBeanstalkdServerHost, port=gBeanstalkdServerPort) beanstalk.use(self.inputTubeName) s = json.dumps(itemDict, sort_keys=True, indent=4) job = beanstalk.put(s)
def saveState(self): s = json.dumps(self.config, sort_keys=True, indent=4) #s = json.dumps(package) f = open(self.stateStoragePath,'w') f.write(s) f.close()
def store(self): s = json.dumps(self.config, sort_keys=True, indent=4) f = open(self.configPath,'w') f.write(s) f.close()