def __cleanupSpool(self): meta = agn.mkpath(agn.base, 'var', 'spool', 'META') deleted = agn.mkpath(agn.base, 'var', 'spool', 'DELETED') def showCount(cnt, s): self.log( agn.LV_INFO, '%d files found, %d successful, %d failed: %s' % (sum(cnt), cnt[0], cnt[1], s)) # count = [0, 0] for path in self.__selectXML(meta, 30, True): if self.remove(path): count[0] += 1 else: count[1] += 1 showCount(count, 'removed from %s' % meta) # count = [0, 0] for path in self.__selectXML(deleted, 30, True): if self.remove(path): count[0] += 1 else: count[1] += 1 showCount(count, 'removed from %s' % deleted) # archive = agn.mkpath(agn.base, 'var', 'spool', 'ARCHIVE') if os.path.isdir(archive): self.cleanupTimestampDirectories(archive, 7, 30)
def __init__ (self): self.incoming = agn.mkpath (agn.base, 'var', 'spool', 'DIRECT') self.archive = agn.mkpath (agn.base, 'var', 'spool', 'ARCHIVE') self.recover = agn.mkpath (agn.base, 'var', 'spool', 'RECOVER') self.queues = [agn.mkpath (agn.base, 'var', 'spool', 'QUEUE')] self.cur = agn.multiprocessing.Value ('i', 0) self.mta = agn.MTA ()
def __init__(self, manager=None, ns=None): if hasattr(self, 'pluginVersion'): apiVersion = self.pluginVersion else: apiVersion = None plocal = agn.mkpath(agn.base, 'plugins', agn.logname) pdist = agn.mkpath(agn.base, 'scripts', 'plugins', agn.logname) if manager is None: manager = Manager self.mgr = manager(paths=[pdist, plocal], apiVersion=apiVersion) self.mgr.bootstrap(configFile='%s.cfg' % agn.logname, ns=ns)
class Mailing(agn.mutable): #{{{ meta = agn.mkpath(agn.base, 'var', 'spool', 'META') archive = agn.mkpath(agn.base, 'var', 'spool', 'ARCHIVE') track = agn.mkpath(agn.base, 'var', 'track') def __init__(self, statusID, statusField, mailingID, companyID, check): #{{{ self.statusID = statusID self.statusField = statusField self.mailingID = mailingID self.companyID = companyID self.check = check self.seen = None self.pattern = None self.tempFile = agn.mkpath(self.meta, '.recover-%d.temp' % self.statusID) self.recoverFile = agn.mkpath(self.meta, 'recover-%d.list' % self.statusID) self.count = 0 self.active = True self.current = 0 self.last = 0 #}}} def done(self): #{{{ for path in [self.tempFile, self.recoverFile]: if os.path.isfile(path): os.unlink(path) #}}} def __cmp__(self, other): #{{{ return cmp(self.statusID, other.statusID) #}}} def __parseXML(self, path): #{{{ pattern = re.compile('<receiver customer_id="([0-9]+)"') fd = gzip.open(path, 'r') try: current = set() mode = 0 for line in fd: if mode == 0: if '<receivers>' in line: mode = 1 elif mode == 1: mtch = pattern.search(line) if not mtch is None: current.add(int(mtch.groups()[0])) self.seen.update(current) except IOError, e: agn.log(agn.LV_WARNING, 'parse', 'Failed to parse "%s": %r' % (path, e.args)) fd.close()
class Rule: lifetime = 180 rulePattern = agn.mkpath (agn.base, 'var', 'lib', 'bav_%s.rule') ruleFile = agn.mkpath (agn.base, 'lib', 'bav.rule') enrichPattern = agn.mkpath (agn.base, 'var', 'lib', 'bav_%s.enrich') DSNRE = (re.compile ('[45][0-9][0-9] +([0-9]\\.[0-9]\\.[0-9]) +(.*)'), re.compile ('\\(#([0-9]\\.[0-9]\\.[0-9])\\)'), re.compile ('^([0-9]\\.[0-9]\\.[0-9])')) def __init__ (self, rid, now): self.rid = rid self.created = now self.sections = {} for fname in [Rule.rulePattern % rid, Rule.ruleFile]: try: fd = open (fname, 'r') agn.log (agn.LV_DEBUG, 'rule', 'Reading rules from %s' % fname) except IOError, e: agn.log (agn.LV_DEBUG, 'rule', 'Unable to open %s %s' % (fname, `e.args`)) fd = None if fd: break if fd: cur = None for line in [l.rstrip ('\r\n') for l in fd if len (l) > 0 and not l[0] in '\n#']: if line[0] == '[' and line[-1] == ']': name = line[1:-1] if self.sections.has_key (name): cur = self.sections[name] else: cur = Section (name) self.sections[name] = cur elif cur: cur.append (line) fname = Rule.enrichPattern % rid try: fd = open (fname) data = fd.read () fd.close () self.enrich = agn.Template (data, """ import codecs def dbstr (s): return codecs.encode (unicode (s, 'UTF-8'), mailinfo.charset) """) try: self.enrich.compile () except agn.error as e: self.enrich = None agn.log (agn.LV_ERROR, 'enrich', 'Failed to compile template for rule %s: %s' % (rid, e)) except IOError: self.enrich = None
def __init__ (self, statusID, statusField, mailingID, companyID, check): #{{{ self.statusID = statusID self.statusField = statusField self.mailingID = mailingID self.companyID = companyID self.check = check self.seen = None self.pattern = None self.tempFile = agn.mkpath (self.meta, '.recover-%d.temp' % self.statusID) self.recoverFile = agn.mkpath (self.meta, 'recover-%d.list' % self.statusID) self.count = 0 self.active = True self.current = 0 self.last = 0
class Data (object): configFilename = agn.mkpath (agn.base, 'var', 'lib', 'bav.conf') localFilename = agn.mkpath (agn.base, 'var', 'lib', 'bav.conf-local') ruleDirectory = agn.mkpath (agn.base, 'var', 'lib') ruleFile = agn.mkpath (agn.base, 'lib', 'bav.rule') rulePattern = re.compile ('bav_([0-9]+).rule') ruleFormat = agn.mkpath (ruleDirectory, 'bav_%d.rule') updateLog = agn.mkpath (agn.base, 'var', 'run', 'bav-update.log') controlSendmail = agn.mkpath (agn.base, 'bin', 'smctrl') restartSendmail = agn.mkpath (agn.base, 'var', 'run', 'sendmail-control.sh') sendmailBase = '/etc/mail' def __init__ (self): self.fixdomain = agn._syscfg.get ('filter-name', 'localhost') self.mta = agn.MTA () self.domains = [] self.prefix = 'aml_' self.last = None self.autoresponder = [] self.mtdom = {} self.readMailertable () try: files = os.listdir (Autoresponder.directory) for fname in files: if len (fname) > 8 and fname[:3] == 'ar_' and fname[-5:] == '.mail': rid = fname[3:-5] self.autoresponder.append (Autoresponder (rid, 0, None, None, None, None, None, None)) except OSError, e: agn.log (agn.LV_ERROR, 'data', 'Unable to read directory %s %s' % (Autoresponder.directory, `e.args`)) self.updateCount = 0
def collectSeen(self): #{{{ self.seen = set() self.pattern = re.compile( '^AgnMail(-[0-9]+)?=D[0-9]{14}=%d=%d=[^=]+=liaMngA\\.(stamp|final|xml\\.gz)$' % (self.companyID, self.mailingID)) self.__collect(self.meta, True) for sdir in self.check: spath = agn.mkpath(self.archive, sdir) if os.path.isdir(spath): self.__collect(spath, False) track = agn.mkpath(self.track, str(self.companyID), str(self.mailingID)) if os.path.isdir(track): self.__collectTrack(track)
def __init__ (self, *args, **kws): super (Sending, self).__init__ (*args, **kws) self.in_queue = collections.OrderedDict () self.in_progress = {} self.pending = None self.pending_path = agn.mkpath (agn.base, 'var', 'run', 'generate-%s.pending' % self.name) self.load_pending ()
def __setupDefault (self): try: user = pwd.getpwuid (os.getuid ()).pw_name except KeyError: try: user = os.environ['USER'] except KeyError: user = None if user is not None: defaults = {} dpath = agn.mkpath (agn.base, 'var', 'lib', 'activation.defaults') if os.path.isfile (dpath): fd = open (dpath) for line in fd: try: (statusKey, id) = line.strip ().split ('=') defaults[id] = {'activate': True, 'deactivate': False}[statusKey] except (ValueError, KeyError): pass fd.close () for mapping in self.__defaults, defaults: for (id, status) in mapping.items (): try: (duser, dservice) = id.split (':') if duser == user: try: status = {True: 1, False: 0}[status] except KeyError: pass self.__create (dservice, status) except ValueError: pass
def main(): global term maillog = '/var/log/maillog' saveFile = agn.mkpath(agn.base, 'var', 'run', 'slrtscn.save') bounceLog = agn.mkpath(agn.base, 'log', 'extbounce.log') providerLog = agn.normalize_path(None) (opts, param) = getopt.getopt(sys.argv[1:], 'vm:s:b:p:') for opt in opts: if opt[0] == '-v': agn.outlevel = agn.LV_DEBUG agn.outstream = sys.stderr elif opt[0] == '-m': maillog = opt[1] elif opt[0] == '-s': saveFile = opt[1] elif opt[0] == '-b': bounceLog = opt[1] elif opt[0] == '-p': providerLog = opt[1] scanners = { None: ScannerSendmail, 'sendmail': ScannerSendmail, 'postfix': ScannerPostfix } mta = agn.MTA() scanner = scanners.get(mta.mta, scanners[None])(maillog, saveFile, bounceLog, providerLog) # signal.signal(signal.SIGINT, handler) signal.signal(signal.SIGTERM, handler) signal.signal(signal.SIGHUP, signal.SIG_IGN) signal.signal(signal.SIGPIPE, signal.SIG_IGN) # agn.lock() agn.log(agn.LV_INFO, 'main', 'Starting up') agn.log(agn.LV_INFO, 'main', 'Scanning for %s using %s' % (mta.mta, scanner.__class__.__name__)) while not term: time.sleep(1) agn.mark(agn.LV_INFO, 'loop', 180) scanner.scan() # scanner.done() agn.log(agn.LV_INFO, 'main', 'Going down') agn.unlock()
def __init__ (self): self.path = agn.mkpath (agn.base, 'var', 'run', 'activator.db') self.db = None self.cursor = None try: self.defaultStatus = {True: 1, False: 0}[agn.atob (os.environ['ACTIVATOR_DEFAULT_STATUS'])] except KeyError: self.defaultStatus = 1
def collectSeen (self): #{{{ self.seen = set () self.pattern = re.compile ('^AgnMail(-[0-9]+)?=D[0-9]{14}=%d=%d=[^=]+=liaMngA\\.(stamp|final|xml\\.gz)$' % (self.companyID, self.mailingID)) self.__collect (self.meta, True) for sdir in self.check: spath = agn.mkpath (self.archive, sdir) if os.path.isdir (spath): self.__collect (spath, False)
def main (): db = agn.DBaseID ('db.name') db.open () c = db.cursor () ip_checker(c) create_logs() reconfig_dkim(c) pwd = agn.mkpath (agn.base, 'var', 'spool', 'sendmail-log') list_of_logs = filter(lambda x: x.endswith('.log'), os.listdir(pwd)) x_dmn = c.queryc ('SELECT client_id, domain FROM dkim_client_key_tbl WHERE %s' % ACTIVE) x_ip = c.queryc ('SELECT client_id, ip FROM dkim_client_ip_tbl WHERE %s' % ACTIVE) agn.log (agn.LV_DEBUG, 'main', 'x_dmn = %s' % x_dmn.data) agn.log (agn.LV_DEBUG, 'main', 'x_ip = %s' % x_ip.data) if list_of_logs: domains = {} for dmn in x_dmn: domains[dmn[1]] = dmn[0] count = 0 for i in list_of_logs: path = agn.mkpath(pwd,i) fd = open(path, "r") log = fd.read() fd.close() client_info = parsing(log) if client_info[0] != "None": client_checker_domains(c, client_info, domains) else: client_checker_ips(c, client_info, x_ip.data) count += 1 if count % 1000 == 0: c.sync () c.sync () for i in list_of_logs: path = agn.mkpath(pwd,i) os.unlink (path) c.close () db.close ()
def main (): global running setupSignals () maxChildren = 10 delay = 10 spooldir = agn.mkpath (agn.base, 'var', 'spool', 'mail') worksize = None size = 65536 (opts, parm) = getopt.getopt (sys.argv[1:], 'c:d:s:w:m:') for opt in opts: if opt[0] == '-c': maxChildren = int (opt[1]) elif opt[0] == '-d': delay = int (opt[1]) elif opt[0] == '-s': spooldir = opt[1] elif opt[0] == '-w': worksize = int (opt[1]) elif opt[0] == '-m': size = int (opt[1]) if len (parm) > 0: bavDebug (parm) sys.exit (0) agn.lock () agn.log (agn.LV_INFO, 'bavd', 'Starting up') lastCheck = -1 children = [] spool = eagn.Mailspool (spooldir, worksize = worksize, scan = False, storeSize = size) while running: now = time.localtime () if now.tm_yday != lastCheck: checkProcmailrc (now, spool.store) lastCheck = now.tm_yday if len (children) < maxChildren: if len (children) == 0: spool.scanWorkspaces () for ws in spool: agn.log (agn.LV_VERBOSE, 'bavd', 'New child starting in %s' % ws.ws) ch = Child (ws) ch.start (size) children.append (ch) if len (children) >= maxChildren: break n = delay while running and n > 0: time.sleep (1) if children: children = waitForChildren (children) n -= 1 while children: agn.log (agn.LV_VERBOSE, 'bavd', 'Wait for %d children to terminate' % len (children)) for child in children: child.signal () time.sleep (1) children = waitForChildren (children) agn.log (agn.LV_INFO, 'bavd', 'Going down') agn.unlock ()
def __cleanupFilter(self): fpath = agn.mkpath(agn.base, 'var', 'spool', 'filter') if os.path.isdir(fpath): yday = datetime.datetime.now().fromordinal(self.today - 1) apath = agn.mkpath( fpath, '%04d%02d%02d' % (yday.year, yday.month, yday.day)) if not os.path.isdir(apath): agn.createPath(apath) for fname in [ _f for _f in os.listdir(fpath) if self.__filePattern.match(_f) ]: path = agn.mkpath(fpath, fname) if os.path.isfile(path): self.move(path, agn.mkpath(apath, fname)) self.compress( [agn.mkpath(apath, _f) for _f in os.listdir(apath)]) self.cleanupTimestampDirectories(fpath, 30, 180)
def checkProcmailrc (now, destpath): prc = agn.mkpath (agn.base, '.procmailrc') try: fd = open (prc, 'r') ocontent = fd.read () fd.close () except IOError, e: if e.args[0] != errno.ENOENT: agn.log (agn.LV_WARNING, 'chk', 'Failed to read "%s": %r' % (prc, e.args)) ocontent = ''
def __collect (self, path, remove): #{{{ files = os.listdir (path) for fname in [_f for _f in files if not self.pattern.match (_f) is None]: fpath = agn.mkpath (path, fname) if remove: try: os.unlink (fpath) agn.log (agn.LV_DEBUG, 'collect', 'File "%s" removed' % fpath) except OSError, e: agn.log (agn.LV_ERROR, 'collect', 'Failed to remove file "%s": %r' % (fpath, e.args)) elif fname.endswith ('.xml.gz'): self.__parseXML (fpath)
def __init__(self, cmd, output=False): super(EWatchdog, self).__init__(cmd) self.prior = None if output: self.output = agn.mkpath( agn.base, 'var', 'tmp', '%s-%06d.log' % (agn.logname, os.getpid())) if os.path.isfile(self.output): os.unlink(self.output) else: self.output = None self.limit = None
def doit (self, basename): (src, stamp, final) = [agn.mkpath (self.incoming, '%s.%s' % (basename, _e)) for _e in 'xml.gz', 'stamp', 'final'] if os.path.isfile (src): target = self.recover ok = True for path in stamp, final: if os.path.isfile (path): try: os.unlink (path) except OSError, e: agn.log (agn.LV_ERROR, 'doit', 'Failed to remove %s: %s' % (path, str (e))) ok = False else: agn.log (agn.LV_ERROR, 'doit', 'Failed to find file %s' % path) ok = False if ok: queue = self.__nextQueue () if self.mta (src, targetDirectory = queue, flushCount = '2'): agn.log (agn.LV_INFO, 'doit', 'Unpacked %s in %s' % (src, queue)) try: target = agn.mkArchiveDirectory (self.archive) except agn.error as e: agn.log (agn.LV_ERROR, 'unpack', 'Failed to setup archive directory %s: %s' % (self.archive, e)) target = self.archive else: agn.log (agn.LV_ERROR, 'doit', 'Failed to unpack %s in %s' % (src, queue)) target = self.recover else: agn.log (agn.LV_INFO, 'doit', 'Do not process %s as control file(s) is/are missing' % src) dst = agn.mkpath (target, os.path.basename (src)) try: shutil.move (src, dst) except (shutil.Error, IOError, OSError), e: agn.log (agn.LV_ERROR, 'doit', 'Failed to move %s to %s: %s' % (src, dst, str (e))) try: os.unlink (src) except OSError, e: agn.log (agn.LV_ERROR, 'doit', 'Failed to remove file %s: %s' % (src, str (e)))
def move(self, path, destination): n = 0 filename = os.path.basename(path) destination = agn.mkpath(destination, self.day) if destination not in self.existing: agn.createPath(destination) self.existing.add(destination) target = agn.mkpath(destination, filename) while os.path.isfile(target): n += 1 target = agn.mkpath(destination, '%s~%d~' % (filename, n)) try: os.rename(path, target) agn.log(agn.LV_INFO, 'move', 'Moved %s to %s' % (path, target)) except OSError as e: agn.log(agn.LV_ERROR, 'move', 'Failed to move %s to %s: %s' % (path, target, e)) if destination != self.recover: self.move(path, self.recover) else: agn.log(agn.LV_FATAL, 'move', 'Giving up to move %s to %s' % (path, target)) raise
def __init__(self): super(Pickdist, self).__init__(name='pickdist', method=self.run, args=()) self.spool = agn.mkpath(agn.base, 'var', 'spool') self.meta = agn.mkpath(self.spool, 'META') self.archive = agn.mkpath(self.spool, 'ARCHIVE') self.recover = agn.mkpath(self.spool, 'RECOVER') self.deleted = agn.mkpath(self.spool, 'DELETED') self.queue = agn.mkpath(self.spool, 'QUEUE') self.mta = agn.MTA() self.day = None self.existing = set()
def scan(self): global term self.expireTracker() try: fp = agn.Filepos(self.maillog, self.saveFile, checkpoint=1000) except agn.error as e: agn.log( agn.LV_INFO, 'main', 'Unable to open %s: %s, try to gain access' % (self.maillog, e)) n = agn.call([agn.mkpath(agn.base, 'bin', 'smctrl'), 'logaccess']) if n != 0: agn.log(agn.LV_ERROR, 'main', 'Failed to gain access to %s (%d)' % (self.maillog, n)) with agn.Ignore(OSError): st = os.stat(self.saveFile) if st.st_size == 0: agn.log(agn.LV_ERROR, 'main', 'Remove corrupt empty file %s' % self.saveFile) os.unlink(self.saveFile) return self.mtrack.open() try: sp = SyslogParser() for line in fp: try: info = sp(line) if info is not None: if not self.parse(info, line): agn.log(agn.LV_WARNING, 'scan', 'Unparsable line: %s (%r)' % (line, info)) else: agn.log(agn.LV_WARNING, 'scan', 'Unparsable format: %s' % line) except Exception as e: agn.logexc(agn.LV_ERROR, 'scan', 'Failed to parse line: %s: %s' % (line, e)) if term: break finally: fp.close() self.mtrack.close() self.processCompleted()
def reconfig_dkim(c): domains_ = sp.Popen("domains_dkim", stdout=sp.PIPE) domains = domains_.communicate()[0] agn.log (agn.LV_DEBUG, 'reconfig_dkim', 'domains = \n%s' % domains) list = [] DOM = c.queryc ('SELECT domain FROM dkim_client_key_tbl WHERE %s' % ACTIVE) for dmn in DOM: list.append(dmn[0]) agn.log (agn.LV_DEBUG, 'reconfig_dkim', 'list = %s' % list) list = filter(lambda x: x not in domains, list) agn.log (agn.LV_DEBUG, 'reconfig_dkim', 'list (after filter) = %s' % list) for dmn in list: for key in c.queryc ('SELECT private_key, selector FROM dkim_client_key_tbl WHERE domain = :domain', {'domain': dmn}): agn.log (agn.LV_DEBUG, 'reconfig_dkim', 'I am going to call: reconf_dkim %s %s %s' % dmn, key[0], key[1]) rc = agn.call ([agn.mkpath (agn.base, 'bin', 'reconf_dkim'), dmn, key[0], key[1]])
def scan_ready_to_run(self): finals = collections.defaultdict(list) stamps = {} availables = set() basenames = set() data = [] for filename in (agn.Stream(os.listdir( self.meta)).filter(lambda f: f.startswith('AgnMail') and (f.endswith('.xml.gz') or f.endswith( '.stamp') or f.endswith('.final')))): info = agn.METAFile(agn.mkpath(self.meta, filename)) if not info.valid: continue # if info.extension == 'final': finals[info.mailid].append(info) elif info.extension == 'stamp': stamps[info.basename] = info elif info.extension.startswith('xml'): availables.add(info.mailid) basenames.add(info.basename) if info.isReady(): data.append(info) for info in agn.Stream(finals.values()).chain().filter( lambda i: i.mailid not in availables): agn.log( agn.LV_INFO, 'final', 'No more data file for final %s found, archive it' % info.filename) self.move(info.path, self.archive) for info in agn.Stream( stamps.values()).filter(lambda i: i.basename not in basenames): agn.log(agn.LV_INFO, 'stamp', 'Move dangeling stamp file %s to archive' % info.filename) self.move(info.path, self.archive) isfull = self.queue_is_full() ready = (agn.Stream(data).filter( lambda i: i.basename in stamps and i.mailid in finals and (not isfull or i.single)).sorted( key=lambda i: (not i.single, i.timestamp, i.blocknr)).list()) return (ready, stamps, finals)
class Autoresponder: directory = agn.mkpath (agn.base, 'var', 'lib') def __init__ (self, rid, timestamp, sender, subject, text, html, armid, arst): self.rid = rid self.timestamp = timestamp self.sender = sender self.subject = subject self.text = self._encode (text) self.html = self._encode (html) self.armid = armid self.arst = arst self.fname = self.directory + os.sep + 'ar_%s.mail' % rid self.limit = self.directory + os.sep + 'ar_%s.limit' % rid def _encode (self, s): global charset if s and charset != 'UTF-8': try: s = agn.toutf8 (s, charset) except Exception, e: agn.log (agn.LV_ERROR, 'auto', 'Failed to convert autoresponder text for %s %s' % (self.rid, str (e))) return s
def ip_checker(c): list = [] with open("/home/transact/bin/list_of_ips.txt", "w") as ip_file: for ip in c.query ('SELECT ip FROM dkim_client_ip_tbl WHERE %s' % ACTIVE): ip_file.write ('%s\n' % ip[0]) list.append(ip[0]) fd = open("/home/opendkim/etc/opendkim/TrustedHosts", "r") log = fd.read() fd.close() agn.log (agn.LV_DEBUG, 'ip_checker', 'list = %s' % list) agn.log (agn.LV_DEBUG, 'ip_checker', 'log = %s' % log) rest_log = filter(lambda x: x not in list, log.split("\n")) agn.log (agn.LV_DEBUG, 'ip_checker', 'log (after filter) = %s' % rest_log) if len(rest_log) > 1: agn.log (agn.LV_DEBUG, 'ip_checker', 'we should remove these IPs: %s' % rest_log[1:-1]) if rest_log[1:-1] != []: agn.log (agn.LV_DEBUG, 'ip_checker', 'ok, good... now we are going to delete these IPs: %s' % rest_log[1:-1]) list = filter(lambda x: x not in log.split("\n"), list) agn.log (agn.LV_DEBUG, 'ip_checker', 'list (after filter) = %s' % list) if len(list) > 0: agn.log (agn.LV_DEBUG, 'ip_checker', 'I am going to call add_ips()') agn.call (['add_ips']) if os.path.isfile("/home/transact/bin/list_of_ips.txt"): rc = agn.call ([agn.mkpath (agn.base, 'bin', 'smctrl'), 'access', 'load', '/home/transact/bin/list_of_ips.txt']) else: agn.log (agn.LV_DEBUG, 'ip_checker', 'File list_of_ips.txt is not there...') os.remove("/home/transact/bin/list_of_ips.txt")
def __collectTrack(self, path): #{{{ for fname in os.listdir(path): tpath = agn.mkpath(path, fname) with open(tpath, 'r') as fd: self.seen.update(int(_l.strip()) for _l in fd)
def reportMailings(self): #{{{ mails = [] query = 'SELECT status_id, mailing_id, genstatus, genchange, status_field, senddate FROM maildrop_status_tbl WHERE ' query += 'genstatus IN (1, 2) AND status_field IN (\'W\', \'R\', \'D\')' for (statusID, mailingID, genStatus, genChange, statusField, sendDate) in self.cursor.queryc(query): if statusField in ('R', 'D') and genStatus == 1: continue info = self.__mail(mailingID) mail = agn.mutable(statusID=statusID, statusField=statusField, mailingID=mailingID, mailingName=info.name, companyID=info.companyID, deleted=info.deleted, genChange=genChange, sendDate=sendDate) mails.append(mail) if self.report or mails: template = agn.mkpath(agn.base, 'scripts', 'recovery.tmpl') try: fd = open(template, 'r') content = fd.read() fd.close() except IOError, e: agn.log( agn.LV_ERROR, 'report', 'Unable to find template "%s": %r' % (template, e.args)) content = None if content: ns = { 'host': socket.getfqdn(), 'report': self.report, 'mails': mails } tmpl = agn.Template(content) try: body = tmpl.fill(ns) except agn.error as e: agn.log(agn.LV_ERROR, 'report', 'Failed to fill template "%s": %s' % (template, e)) body = None if body: charset = tmpl.property('charset') subject = tmpl.property('subject') if not subject: subject = tmpl['subject'] if not subject: subject = 'Recovery report for %s' % ns['host'] else: stmpl = agn.Template(subject) subject = stmpl.fill(ns) sender = tmpl.property('sender') if not sender: sender = 'openemm' receiver = tmpl.property('receiver') if not receiver: receiver = 'openemm' if self.doit: if charset: eagn.EMail.forceEncoding(charset, 'qp') mail = eagn.EMail() mail.setSender(sender) for recv in [_r.strip() for _r in receiver.split(',')]: if recv: mail.addTo(recv) if charset: mail.setCharset(charset) mail.setSubject(subject) mail.setText(body) mail.sendMail() else: print('From: %s' % sender) print('To: %s' % receiver) print('Subject: %s' % subject) print('') print(body)
fd.write ('[%s]\n' % agn.toutf8 (sect)) for line in rule[sect]: fd.write ('%s\n' % agn.toutf8 (line)) fd.close () except IOError, e: agn.log (agn.LV_ERROR, 'rule', 'Failed to open "%s" for writing: %r' % (fname, e.args)) todel = [] try: for fname in os.listdir (self.ruleDirectory): m = self.rulePattern.match (fname) if m is not None: rid = int (m.groups ()[0]) if rid not in inuse: todel.append (fname) for fname in todel: path = agn.mkpath (self.ruleDirectory, fname) try: os.unlink (path) except OSError, e: agn.log (agn.LV_ERROR, 'rule', 'Failed to remove "%s": %r' % (fname, e.args)) except OSError, e: agn.log (agn.LV_ERROR, 'rule', 'Failed to access ruleDirectory "%s": %r' % (self.ruleDirectory, e.args)) def readDatabase (self, auto): rc = [] db = agn.DBaseID () if not db: agn.log (agn.LV_ERROR, 'data', 'Unable to create database connection') raise agn.error ('readDatabase.open') try: i = db.cursor ()
def __logfilename (self): now = time.localtime (time.time ()) return agn.mkpath (agn.logpath, '%04d%02d%02d-%s' % (now[0], now[1], now[2], self.base))
class ScannerPostfix(Scanner): messageidLog = agn.mkpath(agn.base, 'log', 'messageid.log') trackerPath = agn.mkpath(agn.base, 'var', 'run', 'scanner-postfix.track') SEC_MESSAGEID = 'message-id' def __init__(self, *args, **kws): super(ScannerPostfix, self).__init__(*args, **kws) self.uid = agn.UID() def processCompleted(self): self.__handleMessageIDs() super(ScannerPostfix, self).processCompleted() def __handleMessageIDs(self): if os.path.isfile(self.messageidLog): pfname = '%s.%d' % (self.messageidLog, int(time.time())) nfname = pfname n = 0 while os.path.isfile(nfname): n += 1 nfname = '%s.%d' % (pfname, n) try: os.rename(self.messageidLog, nfname) time.sleep(2) except OSError, e: agn.log( agn.LV_ERROR, 'mid', 'Failed to rename %s to %s: %s' % (self.messageidLog, nfname, str(e))) return agn.log(agn.LV_DEBUG, 'mid', 'Scanning input file %s' % nfname) try: with open(nfname, 'r') as fdi, open(agn.logdataname('messageid'), 'a') as fdo: for line in fdi: fdo.write(line) line = line.strip() try: parts = line.split(';', 5) if len(parts) == 6: rec = { 'licenceID': int(parts[0]), 'companyID': int(parts[1]), 'mailinglistID': int(parts[2]), 'mailingID': int(parts[3]), 'customerID': int(parts[4]) } self.mtrack.put(self.SEC_MESSAGEID, parts[5], rec) agn.log( agn.LV_DEBUG, 'mid', 'Saved licenceID=%s, companyID=%s, mailinglistID=%s, mailingID=%s, customerID=%s for message-id %s' % (parts[0], parts[1], parts[2], parts[3], parts[4], parts[5])) else: raise ValueError( 'expect 6 elements, got only %d' % len(parts)) except ValueError, e: agn.log(agn.LV_ERROR, 'mid', 'Failed to parse %s: %s' % (line, str(e))) except IOError, e: agn.log(agn.LV_ERROR, 'mid', 'Failed to write messagid file: %s' % str(e)) finally:
class ScannerSendmail(Scanner): trackerPath = agn.mkpath(agn.base, 'var', 'run', 'scanner-sendmail.track') isstat = re.compile( '([0-9A-F]{6}[0-9A-Z]{3}[0-9A-F]{8})[G-Zg-z]?:.*stat=(.*)$') acceptable = re.compile('|'.join(_s for _s in ( 'starting daemon', '[0-9]*[a-z][0-9A-Za-z]+: (from|to)=', 'STARTTLS', 'ruleset=tls_server,', '(runqueue|grew WorkList for)', '[0-9A-Fa-f]+T[0-9A-Fa-f]+:', 'did not issue MAIL/EXPN/VRFY/ETRN during connection to MTA', 'timeout waiting for input from [^ ]+ during client', 'error connecting to filter', 'Milter .*: to error state', 'sender notify:', ))) def __unparsable_as_expected(self, line): return self.acceptable.search(line) is not None def parse(self, info, line): if info.service not in ('sendmail', 'sm-msp-queue'): agn.log(agn.LV_DEBUG, 'parse', 'Skip non sendmail line: %s' % line) return True mtch = self.isstat.search(info.content) if mtch is None: if self.__unparsable_as_expected(info.content): return True return False # (qid, stat) = mtch.groups() mailing = int(qid[:6], 16) licence = int(qid[6:9], 16) if len(qid) == 17: customer = int(qid[9:], 16) else: customer = int(qid[10:], 16) # if customer >= 0xf0000000: agn.log( agn.LV_VERBOSE, 'parse', 'Line leads to test customerID 0x%x: %s' % (customer, line)) return True # self.line(qid, line) dsn = info.items.get('dsn') record = self.mtrack.get(self.SEC_MTAID, qid) update = {'timestamp': info.timestamp} if dsn is not None and (dsn.startswith('2') or dsn.startswith('5')): update['complete'] = True if 'envelopeFrom' not in record: envelopeFrom = info.items.get('ctladdr', '').strip('<>') if envelopeFrom: update['envelopeFrom'] = envelopeFrom if 'to' in info.items and 'envelopeTo' not in record: update['envelopeTo'] = info.items['to'].strip('<>') for key in 'to', 'dsn', 'status', 'relay': if key in info.items: update[key] = info.items[key] record.update(update) self.mtrack.put(self.SEC_MTAID, qid, record) if dsn: self.writeBounce(dsn, licence, mailing, customer, info.timestamp, stat, info.items.get('relay', ''), info.items.get('to', '')) else: agn.log(agn.LV_WARNING, 'parse', 'Line has no DSN: %s' % line) return True
def __cleanupLogDone(self): for path in self.select(agn.mkpath(agn.base, 'log', 'done'), False, self.__compareLogDone)[0]: self.remove(path)
def cleanupMailspool(self, compressAfterDays, removeAfterDays): spath = agn.mkpath(agn.base, 'var', 'spool', 'mail', 'store') if os.path.isdir(spath): self.cleanupTimestampFiles(spath, [(None, compressAfterDays)], [(None, removeAfterDays)])