def InitDirList(): '''Initialize the list of backup directories''' Mkdir(Config.get_WorkingFolder()) DirListAdd(Config.get_WorkingFolder()) DebugPrint(1, 'List of backup directories: ', backupDirList)
def CompressOutbox(probe_dir, outbox, outfiles): # Compress the probe_dir/outbox and stored the resulting tar.gz file # in probe_dir/staged global outstandingStagedTarCount staged_store = os.path.join(probe_dir, 'staged', 'store') Mkdir(staged_store) staging_name = GenerateFilename('tz.', staged_store) DebugPrint(1, 'Compressing outbox in tar.bz2 file: ' + staging_name) try: tar = tarfile.open(staging_name, 'w:bz2') except KeyboardInterrupt: raise except SystemExit: raise except Exception, e: DebugPrint( 0, 'Warning: Exception caught while opening tar.bz2 file: ' + staging_name + ':') DebugPrint(0, 'Caught exception: ', e) DebugPrintTraceback() return False
def CompressOutbox(probe_dir, outbox, outfiles): # Compress the probe_dir/outbox and stored the resulting tar.gz file # in probe_dir/staged global outstandingStagedTarCount staged_store = os.path.join(probe_dir, 'staged', 'store') Mkdir(staged_store) staging_name = GenerateFilename('tz.', staged_store) DebugPrint(1, 'Compressing outbox in tar.bz2 file: ' + staging_name) try: tar = tarfile.open(staging_name, 'w:bz2') except KeyboardInterrupt: raise except SystemExit: raise except Exception as e: DebugPrint(0, 'Warning: Exception caught while opening tar.bz2 file: ' + staging_name + ':') DebugPrint(0, 'Caught exception: ', e) DebugPrintTraceback() return False try: for f in outfiles: # Reduce the size of the file name in the archive arcfile = f.replace(Config.getFilenameFragment(), r'') arcfile = arcfile.replace('..', '.') tar.add(os.path.join(outbox, f), arcfile) except KeyboardInterrupt: raise except SystemExit: raise except Exception as e: DebugPrint(0, 'Warning: Exception caught while adding ' + f + ' from ' + outbox + ' to tar.bz2 file: ' + staging_name + ':') DebugPrint(0, 'Caught exception: ', e) DebugPrintTraceback() return False try: tar.close() except KeyboardInterrupt: raise except SystemExit: raise except Exception as e: DebugPrint(0, 'Warning: Exception caught while closing tar.bz2 file: ' + staging_name + ':') DebugPrint(0, 'Caught exception: ', e) DebugPrintTraceback() return False outstandingStagedTarCount += 1 return True
def QuarantineTransientInputFiles(self): ''' Copy to a quarantine directories any of the input files ''' quarantinedir = os.path.join(Config.get_DataFolder(), "quarantine") Mkdir(quarantinedir) for filename in self.TransientInputFiles: DebugPrint( 1, 'Moving transient input file: ' + filename + ' to quarantine in ' + quarantinedir) shutil.copy2(filename, quarantinedir) RemoveFile(filename) self.TransientInputFiles = []
def QuarantineFile(filename, isempty): # If we have trouble with a file, let's quarantine it # If the quarantine reason is 'only' that the file is empty, # list the file as such. dirname = os.path.dirname(filename) pardirname = os.path.dirname(dirname) if os.path.basename(dirname) != 'outbox': toppath = dirname else: if os.path.basename(pardirname) == 'staged': toppath = os.path.dirname(pardirname) else: toppath = pardirname quarantine = os.path.join(toppath, 'quarantine') Mkdir(quarantine) DebugPrint(0, 'Putting a quarantine file in: ' + quarantine) DebugPrint(3, 'Putting a file in quarantine: ' + os.path.basename(filename)) if isempty: try: emptyfiles = open(os.path.join(quarantine, 'emptyfile'), 'a') emptyfiles.write(filename + '\n') emptyfiles.close() except: DebugPrint( 0, 'failed to record that file was empty: ', filename, '--', sys.exc_info(), '--', sys.exc_info()[0], '++', sys.exc_info()[1], ) else: dest = os.path.join(quarantine, os.path.basename(filename)) try: shutil.copy2(filename, dest) except IOError, ie: DebugPrint( 1, "Unable to copy file %s to dest %s due to error: %s; ignoring" % (filename, dest, ie.strerror)) return
def LogToFile(message): '''Write a message to the Gratia log file''' global __logFileIsWriteable__ current_file = None filename = 'none' try: # Ensure the 'logs' folder exists if os.path.exists(getGratiaConfig().get_LogFolder()) == 0: Mkdir(getGratiaConfig().get_LogFolder()) filename = LogFileName() if os.path.exists(filename) and not os.access(filename, os.W_OK): os.chown(filename, os.getuid(), os.getgid()) os.chmod(filename, 0755) # Open/Create a log file for today's date current_file = open(filename, 'a') # Append the message to the log file current_file.write(message + '\n') __logFileIsWriteable__ = True except: if __logFileIsWriteable__: # Print the error message only once print >> sys.stderr, 'Gratia: Unable to log to file: ', filename, ' ', sys.exc_info(), '--', \ sys.exc_info()[0], '++', sys.exc_info()[1] __logFileIsWriteable__ = False if current_file != None: # Close the log file current_file.close()
def __createCertificateFile(self, keyfile, certfile): # Get a fresh certificate. # if (False): # cakey = createKeyPair(crypto.TYPE_RSA, 1024) # careq = createCertRequest(cakey, CN='Certificate Authority') # cacert = createCertificate(careq, (careq, cakey), 0, (0, 60*60*24*365*1)) # one year # open(keyfile, 'w').write(crypto.dump_privatekey(crypto.FILETYPE_PEM, cakey)) # open(certfile, 'w').write(crypto.dump_certificate(crypto.FILETYPE_PEM, cacert)) # return True # else: # Download it from the server. # Try this only once per run if isCertrequestRejected(): return False # qconnection = ProxyUtil.HTTPConnection(self.get_SSLRegistrationHost(), # http_proxy = ProxyUtil.findHTTPProxy()) qconnection = httplib.HTTPConnection(self.get_SSLRegistrationHost()) qconnection.connect() queryString = urllib.urlencode([('command', 'request'), ('from', self.get_ProbeName()), ('arg1', 'not really')]) headers = {'Content-type': 'application/x-www-form-urlencoded'} qconnection.request('POST', self.get_RegistrationService(), queryString, headers) responseString = utils.bytes2str(qconnection.getresponse().read()) resplist = responseString.split(':') if len(resplist) == 3 and resplist[0] == 'ok': # We received the info, let's store it # cert = crypto.load_certificate(crypto.FILETYPE_PEM,resplist[1]) # key = crypto.load_privatekey(crypto.FILETYPE_PEM,resplist[1]) # First create any sub-directory if needed. keydir = os.path.dirname(keyfile) if keydir != r'' and os.path.exists(keydir) == 0: Mkdir(keydir) certdir = os.path.dirname(certfile) if certdir != r'' and os.path.exists(certdir) == 0: Mkdir(certdir) # and then save the pem files open(keyfile, 'w').write(resplist[2]) open(certfile, 'w').write(resplist[1]) else: # We could do # os.chmod(keyfile,0600) DebugPrint(4, 'DEBUG: Connect: FAILED') DebugPrint(0, 'Error: while getting new certificate: ' + responseString) DebugPrintTraceback() setCertrequestRejected() return False return True
def OpenNewRecordFile(dirIndex): global outstandingRecordCount # The file name will be r$pid.ConfigFragment.gratia.xml__UNIQUE DebugPrint(3, 'Open request: ', dirIndex) index = 0 toomanyfiles = outstandingRecordCount >= Config.get_MaxPendingFiles() toomanystaged = outstandingStagedTarCount >= Config.get_MaxStagedArchives() if not toomanyfiles or not toomanystaged: for current_dir in backupDirList: index = index + 1 if index <= dirIndex or not os.path.exists(current_dir): continue DebugPrint(3, 'Open request: looking at ', current_dir) current_dir = os.path.join(current_dir, 'gratiafiles') probe_dir = os.path.join(current_dir, 'subdir.' + Config.getFilenameFragment()) working_dir = os.path.join(probe_dir, 'outbox') if toomanyfiles: if not os.path.exists(working_dir): continue # Need to find and pack the full outbox outfiles = os.listdir(working_dir) if len(outfiles) == 0: continue if CompressOutbox(probe_dir, working_dir, outfiles): # then delete the content for f in os.listdir(working_dir): RemoveRecordFile(os.path.join(working_dir, f)) # And reset the Bundle if needed. if global_state.CurrentBundle.nItems > 0: hasHandshake = global_state.CurrentBundle.nHandshakes > 0 global_state.CurrentBundle.clear() if hasHandshake: # Done to break circular dependency between send and sandbox_mgmt __import__( "gratia.common.send").common.send.Handshake() else: continue # and retry toomanyfiles = outstandingRecordCount >= Config.get_MaxPendingFiles( ) if toomanyfiles: # We did not suppress enough file, let's go on continue if not os.path.exists(working_dir): try: Mkdir(working_dir) except: continue if not os.path.exists(working_dir): continue if not os.access(working_dir, os.W_OK): continue try: filename = GenerateFilename('r.', working_dir) DebugPrint(3, 'Creating file:', filename) outstandingRecordCount += 1 f = open(filename, 'w') dirIndex = index return (f, dirIndex) except: continue else: DebugPrint( 0, 'DEBUG: Too many pending files, the record has not been backed up') f = sys.stdout dirIndex = index return (f, dirIndex)
def SearchOutstandingRecord(): '''Search the list of backup directories for''' global hasMoreOutstandingRecord global outstandingRecordCount global outstandingStagedTarCount global outstandingStagedRecordCount outstandingRecord.clear() outstandingRecordCount = 0 outstandingStagedTarCount = 0 outstandingStagedRecordCount = 0 fragment = Config.getFilenameFragment() DebugPrint(4, 'DEBUG: Starting SearchOutstandingRecord') for current_dir in backupDirList: DebugPrint(4, 'DEBUG: SearchOutstandingRecord ' + current_dir) DebugPrint( 4, 'DEBUG: Middle of SearchOutstandingRecord outbox:' + str(outstandingRecordCount) + ' staged outbox:' + str(outstandingStagedRecordCount) + ' tarfiles:' + str(outstandingStagedTarCount)) gratiapath = os.path.join(current_dir, 'gratiafiles') subpath = os.path.join(gratiapath, 'subdir.' + fragment) outbox = os.path.join(subpath, 'outbox') staged = os.path.join(subpath, 'staged') stagedoutbox = os.path.join(subpath, 'staged', 'outbox') # For backward compatibility still look for the records in the top level # gratiafiles directories. path = os.path.join(gratiapath, 'r*.' + Config.get_GratiaExtension()) files = glob.glob(path) + glob.glob(path + '__*') DebugPrint(4, 'DEBUG: Search add ' + str(len(files)) + ' for ' + path) outstandingRecordCount += len(files) for f in files: # Legacy reprocess files or ones with the correct fragment if re.search( r'/?r(?:[0-9]+)?\.?[0-9]+(?:\.' + fragment + r')?\.' + Config.get_GratiaExtension() + r'(?:__.{10})?$', f): AddOutstandingRecord(f) if len(outstandingRecord) >= __maxFilesToReprocess__: break # Record the number of tar file already on disk. stagedfiles = glob.glob(os.path.join(staged, 'store', 'tz.*')) outstandingStagedTarCount += len(stagedfiles) if len(outstandingRecord) >= __maxFilesToReprocess__: break # Now look for the record in the probe specific subdirectory. if ListOutstandingRecord(outbox, False): break prevOutstandingStagedRecordCount = outstandingStagedRecordCount if ListOutstandingRecord(stagedoutbox, True): break # If total number of outstanding files is less than the number of files already in the bundle, # Let's decompress one of the tar file (if any) needmorefiles = outstandingStagedRecordCount == 0 or \ outstandingRecordCount + outstandingStagedRecordCount <= global_state.CurrentBundle.nFiles if needmorefiles and len(stagedfiles) > 0: # the staged/outbox is low on files and we have some staged tar files in_stagedoutbox = outstandingStagedRecordCount - prevOutstandingStagedRecordCount if in_stagedoutbox != 0 and global_state.CurrentBundle.nFiles > 0: # This staged outbox is not empty, so let's first empty it. # NOTE: import statement is here to break circular dependency between bundle and sandbox_mgmt responseString, _ = __import__( "gratia.common.bundle").common.bundle.ProcessBundle( global_state.CurrentBundle) DebugPrint(0, responseString) DebugPrint( 0, '***********************************************************' ) if global_state.CurrentBundle.nItems > 0: # The upload did not work, there is no need to proceed with the record collection break # The staged outbox is empty, we can safely untar the file without risking over-writing # a files. stagedfile = stagedfiles[0] if UncompressOutbox(stagedfile, stagedoutbox): RemoveFile(stagedfile) else: Mkdir(os.path.join(staged, 'quarantine')) os.rename( stagedfile, os.path.join(staged, 'quarantine', os.path.basename(stagedfile))) outstandingStagedTarCount += -1 outstandingStagedRecordCount = prevOutstandingStagedRecordCount if ListOutstandingRecord(stagedoutbox, True): break # Mark that we probably have more outstanding record to look at. hasMoreOutstandingRecord = outstandingStagedTarCount > 0 or len( outstandingRecord) >= __maxFilesToReprocess__ DebugPrint(4, 'DEBUG: List of Outstanding records: ', outstandingRecord.keys()) DebugPrint( 4, 'DEBUG: After SearchOutstandingRecord outbox:' + str(outstandingRecordCount) + ' staged outbox:' + str(outstandingStagedRecordCount) + ' tarfiles:' + str(outstandingStagedTarCount))