def write_deliveries(self, record: Dict[str, Any], lines: List[Tuple[datetime, str]]) -> bool: with Ignore(KeyError): lines = record.get(self.KEY_LINES, []) + lines if lines: licence_id = record['licence_id'] mailing_id = record['mailing_id'] customer_id = record['customer_id'] enabled = self.fsdb.get(f'delivery:{licence_id}:{mailing_id}') if enabled is not None and atob(enabled): with open(self.deliver_log, 'a') as fd: for (timestamp, line) in lines: fd.write( '{licence_id};{mailing_id};{customer_id};{timestamp};{line}\n' .format( licence_id=licence_id, mailing_id=mailing_id, customer_id=customer_id, timestamp= f'{timestamp.year:04d}-{timestamp.month:02d}-{timestamp.day:02d} {timestamp.hour:02d}:{timestamp.minute:02d}:{timestamp.second:02d}', line=line)) with Ignore(KeyError): del record[self.KEY_LINES] return True return False
def remover (basepath: str) -> bool: with Ignore (OSError): files = [] directories = [] count = 0 for filename in os.listdir (basepath): count += 1 path = os.path.join (basepath, filename) st = os.lstat (path) if stat.S_ISREG (st.st_mode): if st.st_ctime < expire and st.st_mtime < expire: files.append (path) elif stat.S_ISDIR (st.st_mode): directories.append (path) for path in files: with Ignore (OSError): if self.doit: os.unlink (path) else: self.show ('REMOVE: %s' % path) stats.files += 1 count -= 1 for path in directories: if remover (path): with Ignore (OSError): if self.doit: os.rmdir (path) else: self.show ('RMDIR: %s' % path) count -= 1 stats.directories += 1 return count == 0 return False
def parse_rfc3164(self, line: str) -> Optional[SyslogParser.Info]: m = self.pattern_rfc3164.match(line) if m is not None: with Ignore(ValueError, KeyError): (month_name, day, hour, minute, second, server, service, content) = m.groups() month = { 'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'Jun': 6, 'Jul': 7, 'Aug': 8, 'Sep': 9, 'Oct': 10, 'Nov': 11, 'Dec': 12 }[month_name] now = datetime.now() year = now.year if now.month < month: year -= 1 return self.new_info(timestamp=datetime( year, month, int(day), int(hour), int(minute), int(second)), server=server, service=service, content=content) return None
def parse(self, pid: str) -> bool: """Parses an agnPUBID if parsing had been successful, the instance variable containing the parsed content and the method returns True, otherwise the content of the instance variables is undefinied and the method returns False.""" rc = False dst = self.__decode(pid) if dst is not None: parts = dst.split(';', 3) if len(parts) in (3, 4): with Ignore(ValueError): mailing_id = int(parts[0]) customer_id = int(parts[1]) source = parts[2] if len(parts) > 3: selector: Optional[str] = parts[3] else: selector = None if mailing_id > 0 and customer_id > 0: self.mailing_id = mailing_id self.customer_id = customer_id if source: self.source = source else: self.source = None self.selector = selector rc = True return rc
def setup(self) -> None: EMail.force_encoding('UTF-8', 'quoted-printable') self.delay = self.unit.parse('3m') self.fqdn = socket.getfqdn().lower() if not self.fqdn: self.fqdn = fqdn self.fixdomain = syscfg.get_str('filter-name', 'localhost') self.mta = MTA() self.domains: List[str] = [] self.mtdom: Dict[str, int] = {} self.prefix = 'aml_' self.last = '' self.autoresponder: List[Autoresponder] = [] self.read_mailertable() try: files = os.listdir(Autoresponder.directory) for fname in files: if len(fname ) > 8 and fname[:3] == 'ar_' and fname[-5:] == '.mail': with Ignore(ValueError, OSError): rid = int(fname[3:-5]) st = os.stat( os.path.join(Autoresponder.directory, fname)) self.autoresponder.append( Autoresponder(rid, datetime.fromtimestamp(st.st_ctime), None, None)) except OSError as e: logger.error('Unable to read directory %s %s' % (Autoresponder.directory, e))
def unlock (self, key: int) -> None: with Ignore (KeyError): self.locks[key] -= 1 if self.locks[key] <= 0: del self.locks[key] logger.debug ('%r removed' % key) else: logger.debug ('%r decreased' % key)
def process_completed(self) -> None: now = int(time.time()) if self.last_processed + 60 < now: max_count = 25000 # max # of records to delete per batch outdated = 25 * 60 * 60 # if record reached this value, the record is assumed to be done; add one hour to compensate postfixs delay to_remove: List[Key] = [] logger.debug('Start processing completed records') for (key, value) in self.mtrack.items(): diff = int(time.time()) - value[self.mtrack.key_created] if diff < 3600: diffstr = '%d:%02d' % (diff // 60, diff % 60) else: diffstr = '%d:%02d:%02d' % (diff // 3600, (diff // 60) % 60, diff % 60) # if key.section != self.SEC_MTAID: logger.debug('Ignore non %s record: %s' % (self.SEC_MTAID, key)) continue # if not value.get('complete'): if diff < outdated: logger.debug( f'{key}: Ignore not (yet) completed record since {diffstr}' ) continue logger.info( f'{key}: Found outdated incomplete record since {diffstr}' ) value['outdated'] = True # self.completed(key, value, to_remove) # self.write_deliveries(value, []) to_remove.append(key) if len(to_remove) >= max_count: logger.info( f'Reached limit of {max_count:,d}, defer further processing' ) break # if to_remove: logger.debug('Remove {:,d} processed keys'.format( len(to_remove))) for key in to_remove: with Ignore(KeyError): del self.mtrack[key] logger.debug(f'Removed {key}') logger.debug('Removed processed keys done') else: self.last_processed = now logger.debug('No records found to remove')
def find(key: str, default_value: str) -> BavUpdate.Filecontent: rc = BavUpdate.Filecontent(path=None, content=[], modified=False, hash=None) with Ignore(KeyError): for element in self.mta.getlist(key): hash: Optional[str] path: str try: (hash, path) = element.split(':', 1) except ValueError: (hash, path) = (None, element) if path.startswith(base): if rc.path is None: rc.path = path rc.hash = hash if not os.path.isfile(path): create_path(os.path.dirname(path)) open(path, 'w').close() if hash is not None: self.mta.postfix_make(path) if rc.path is not None: try: with open(rc.path) as fd: for line in (_l.strip() for _l in fd): try: (var, val) = [ _v.strip() for _v in line.split(None, 1) ] except ValueError: var = line val = default_value rc.modified = True if var not in [ _c.name for _c in rc.content ]: rc.content.append( BavUpdate.Domain(name=var, value=val)) else: rc.modified = True logger.debug('Read %d lines from %s' % (len(rc.content), rc.path)) except OSError as e: logger.error('Failed to read %s: %s' % (rc.path, e)) else: logger.warning( 'No path for postfix parameter %s found' % key) return rc
def __mail(self, mailing_id: int) -> Recovery.MailingInfo: #{{{ with Ignore(KeyError): return self.mailing_info[mailing_id] # rq = self.db.querys( 'SELECT company_id, shortname, deleted FROM mailing_tbl WHERE mailing_id = :mid', {'mid': mailing_id}) self.mailing_info[mailing_id] = rc = Recovery.MailingInfo( company_id=rq.company_id if rq is not None else 0, name=rq.shortname if rq is not None else f'#{mailing_id} not found', deleted=bool(rq.deleted) if rq is not None else False) return rc
def parse_rfc5424 (self, line: str) -> Optional[SyslogParser.Info]: m = self.pattern_rfc5424.match (line) if m is not None: with Ignore (ValueError): (year, month, day, hour, minute, second, fraction, timezone, _, server, service, content) = m.groups () microseconds = int (float (fraction) * 1000 * 1000) if fraction else 0 # # ignore TZ for now as it cannot be pickled to be stored in tracker # tz = self.parse_tz (timezone) if timezone else None return self.new_info ( timestamp = datetime (int (year), int (month), int (day), int (hour), int (minute), int (second), microseconds), server = server, service = service, content = content ) return None
def scan(self, is_active: Callable[[], bool]) -> None: self.fsdb.clear() self.expire_tracker() try: fp = Filepos(self.maillog, self.save_file, checkpoint=1000) except error as e: logger.info('Unable to open %s: %s, try to gain access' % (self.maillog, e)) n = call([os.path.join(base, 'bin', 'smctrl'), 'logaccess']) if n != 0: logger.error('Failed to gain access to %s (%d)' % (self.maillog, n)) with Ignore(OSError): st = os.stat(self.save_file) if st.st_size == 0: logger.error('Remove corrupt empty file %s' % self.save_file) os.unlink(self.save_file) return self.mtrack.open() try: sp = SyslogParser() for line in fp: try: info = sp(line) if info is not None: with log('parse') as log_id: if not self.parse(log_id, info, line): logger.warning('Unparsable line: %s (%r)' % (line, info)) else: logger.warning('Unparsable format: %s' % line) except Exception as e: logger.exception('Failed to parse line: %s: %s' % (line, e)) if not is_active(): break finally: fp.close() self.mtrack.close() self.process_completed()
def setup_parameter(self) -> None: #{{{ logger.info('Reading parameter from company info table') cc = CompanyConfig(self.db) cc.read() for (company_id, company_config) in cc.company_info.items(): with Ignore(KeyError): parameter = company_config[self.conversion_name] try: self.config[company_id] = Parameter(parameter) except Exception: logger.exception( 'Failed to parse parameter %r for company_id %d' % (parameter, company_id)) raise error( f'failed to parse parameter {parameter} for {company_id}' ) logger.info('%d parameter read' % len(self.config)) for row in self.db.query( 'SELECT company_id, status, mailtracking FROM company_tbl'): self.companies[row.company_id] = Softbounce.Company( active=row.status == 'active', mailtracking=row.mailtracking == 1)
def setup(self) -> None: self.delay = BavUpdate.unit.parse('3m') self.fqdn = socket.getfqdn().lower() if not self.fqdn: self.fqdn = fqdn self.filter_domain = syscfg.get_str('filter-name', BavUpdate.default_filter_domain) if self.filter_domain == BavUpdate.default_filter_domain: with DB() as db: rq = db.querys( 'SELECT mailloop_domain FROM company_tbl WHERE company_id = 1' ) if rq is not None and rq.mailloop_domain: self.filter_domain = rq.mailloop_domain self.mta = MTA() self.domains: List[str] = [] self.mtdom: Dict[str, int] = {} self.prefix = 'aml_' self.last = '' self.autoresponder: List[Autoresponder] = [] self.read_mailertable() try: files = os.listdir(Autoresponder.directory) for fname in files: if len(fname ) > 8 and fname[:3] == 'ar_' and fname[-5:] == '.mail': with Ignore(ValueError, OSError): rid = int(fname[3:-5]) st = os.stat( os.path.join(Autoresponder.directory, fname)) self.autoresponder.append( Autoresponder(rid, datetime.fromtimestamp(st.st_ctime), None, None)) except OSError as e: logger.error( f'Unable to read directory {Autoresponder.directory}: {e}')
def __state_check(self, t: Processentry) -> Optional[str]: err: Optional[str] = 'Unspecified' path = '/proc/%d/status' % t.stats.pid try: with open(path, 'r') as fd: status = fd.read() for line in status.split('\n'): with Ignore(ValueError): (var, val) = [_v.strip() for _v in line.split(':', 1)] if var == 'State' and val: state = val[0] if state == 'Z': err = 'is zombie' elif state == 'T': err = 'is stopped' else: err = None break else: err = 'Failed to find state for process %s in\n%s' % (t.stats, status) except IOError as e: err = 'Failed to read rocess path %s: %s' % (path, str(e)) return err
def remove_from_queue (self, entry: Entry) -> None: with Ignore (KeyError): removed_entry = entry del self.in_queue[entry.statusID] logger.debug ('%s: removed from queue' % removed_entry.name)
def read_database(self, auto: List[Autoresponder]) -> List[str]: rc: List[str] = [] with DB() as db: company_list: List[int] = [] new_domains: Dict[str, BavUpdate.RID] = {} forwards: List[BavUpdate.Forward] = [] seen_domains: Set[str] = set() accepted_forwards: Set[str] = set() ctab: Dict[int, str] = {} # rc.append('fbl@%s\taccept:rid=unsubscribe' % self.fixdomain) for domain in self.domains: if domain not in seen_domains: rc.append('fbl@%s\talias:fbl@%s' % (domain, self.fixdomain)) seen_domains.add(domain) if self.fixdomain not in seen_domains: new_domains[self.fixdomain] = BavUpdate.RID( rid=0, domain=self.fixdomain) seen_domains.add(self.fixdomain) # missing = [] for row in db.query( 'SELECT company_id, mailloop_domain FROM company_tbl WHERE status = :status', {'status': 'active'}): if row.mailloop_domain: ctab[row.company_id] = row.mailloop_domain if row.mailloop_domain not in seen_domains: rc.append('fbl@%s\talias:fbl@%s' % (row.mailloop_domain, self.fixdomain)) if row.mailloop_domain not in self.mtdom and row.mailloop_domain.lower( ) != self.fqdn: new_domains[row.mailloop_domain] = BavUpdate.RID( rid=0, domain=row.mailloop_domain) seen_domains.add(row.mailloop_domain) else: missing.append(row.company_id) company_list.append(row.company_id) if missing: missing.sort() logger.debug('Missing mailloop_domain for %s' % ', '.join([str(m) for m in missing])) # seen_rids: Set[int] = set() for row in db.query( 'SELECT rid, shortname, company_id, filter_address, ' ' forward_enable, forward, ar_enable, ' ' subscribe_enable, mailinglist_id, form_id, timestamp, ' ' spam_email, spam_required, spam_forward, ' ' autoresponder_mailing_id, security_token ' 'FROM mailloop_tbl'): if row.company_id not in company_list or row.rid is None: continue seen_rids.add(row.rid) domains: List[str] = [self.fixdomain] aliases: List[str] = [] if row.filter_address is not None: for alias in listsplit(row.filter_address): if not alias.startswith(self.prefix): with Ignore(ValueError): domain_part = alias.split('@', 1)[-1] if domain_part not in domains: domains.append(domain_part) if domain_part not in self.mtdom and domain_part not in new_domains: new_domains[ domain_part] = BavUpdate.RID( rid=row.rid, domain=domain_part) aliases.append(alias) # ar_enable = False if row.ar_enable and row.autoresponder_mailing_id: if not row.security_token: logger.error( '%s: Autoresponder has mailing id, but no security token' % row.rid) else: auto.append( Autoresponder( row.rid, row.timestamp if row.timestamp is not None else datetime.now(), row.autoresponder_mailing_id, row.security_token)) ar_enable = True # try: cdomain = ctab[row.company_id] if cdomain not in domains: if cdomain in self.domains: domains.append(cdomain) else: logger.debug( 'Company\'s domain "%s" not found in mailertable' % cdomain) except KeyError: logger.debug( 'No domain for company found, further processing') extra = ['rid=%s' % row.rid] if row.company_id: extra.append('cid=%d' % row.company_id) if row.forward_enable and row.forward: extra.append('fwd=%s' % row.forward) forwards.append( BavUpdate.Forward(rid=row.rid, address=row.forward)) if row.spam_email: extra.append('spam_email=%s' % row.spam_email) if row.spam_forward: extra.append('spam_fwd=%d' % row.spam_forward) if row.spam_required: extra.append('spam_req=%d' % row.spam_required) if ar_enable: extra.append('ar=%s' % row.rid) if row.autoresponder_mailing_id: extra.append('armid=%d' % row.autoresponder_mailing_id) if row.subscribe_enable and row.mailinglist_id and row.form_id: extra.append('sub=%d:%d' % (row.mailinglist_id, row.form_id)) for domain in domains: line = '%s%s@%s\taccept:%s' % (self.prefix, row.rid, domain, ','.join(extra)) logger.debug(f'Add line: {line}') rc.append(line) if aliases and domains: for alias in aliases: rc.append('%s\talias:%s%s@%s' % (alias, self.prefix, row.rid, domains[0])) accepted_forwards.add(alias) # if seen_rids: rules: Dict[int, Dict[str, List[str]]] = {} for row in db.query( 'SELECT rid, section, pattern FROM mailloop_rule_tbl'): if row.rid in seen_rids: try: rule = rules[row.rid] except KeyError: rule = rules[row.rid] = {} try: sect = rule[row.section] except KeyError: sect = rule[row.section] = [] sect.append(row.pattern) self.update_rules(rules) # for forward in forwards: with Ignore(ValueError): fdomain = (forward.address.split('@', 1)[-1]).lower() for domain in self.mtdom: if domain == fdomain and forward.address not in accepted_forwards: logger.warning( '%s: using address "%s" with local handled domain "%s"' % (forward.rid, forward.address, domain)) refuse = [] for (domain, new_domain) in ((_d, _n) for (_d, _n) in new_domains.items() if _d == fdomain): logger.warning( '%s: try to add new domain for already existing forward address "%s" in %s, refused' % (new_domain.rid, forward.address, forward.rid)) refuse.append(domain) for domain in refuse: del new_domains[domain] # if new_domains: if self.mta.mta == 'sendmail': cmd = [self.control_sendmail, 'add'] for domain in new_domains: cmd.append(domain) logger.info(f'Found new domains, add them using {cmd}') silent_call(*cmd) logger.info('Restarting sendmail due to domain update') silent_call(self.restart_sendmail) self.read_mailertable(new_domains) return rc
def parse(self, log_id: LogID, info: SyslogParser.Info, line: str) -> bool: if not info.queue_id: if info.service == 'postfix/smtpd': if (info.content.startswith('connect from ') or info.content.startswith('disconnect from ') or info.content.startswith('lost connection ') or info.content.startswith('timeout after ') or info.content == 'initializing the server-side TLS engine' or info.content.startswith( 'setting up TLS connection from ') or info.content.startswith('SSL_accept:') or info.content == 'SSL3 alert read:fatal:certificate unknown' or info.content == 'SSL3 alert write:fatal:handshake failure' or info.content.startswith( 'too many errors after RCPT from') or info.content.startswith( 'Anonymous TLS connection established from')): return True elif info.service == 'postfix/smtp': if (info.content.startswith('connect to ') or info.content.startswith('SSL_connect error to ') or info.content.startswith( 'Untrusted TLS connection established to') or info.content.startswith( 'Anonymous TLS connection established to')): return True elif info.service in ('postfix/postfix-script', 'postfix/tlsproxy', 'postfix/master'): return True return False # if info.queue_id in self.ignore_ids or info.service in self.ignore_services: return True # log_id.push(info.queue_id) self.line(info, line) key = Key(self.SEC_MTAID, info.queue_id) if info.service == 'postfix/pickup': match = self.pattern_envelope_from.search(info.content) if match is not None: envelopeFrom = match.group(1) self.mtrack.update(key, envelopeFrom=envelopeFrom) logger.debug('Found envelopeFrom=%s' % envelopeFrom) elif info.service == 'postfix/cleanup': match = self.pattern_message_id.search(info.content) if match is not None: message_id = match.group(1) if message_id: rec = self.mtrack.get(key) mid_key = Key(self.SEC_MESSAGEID, message_id) try: midinfo = self.mtrack[mid_key] rec.update(midinfo) except KeyError: try: self.uid.parse(message_id.split('@')[0], validate=False) rec.update({ 'licence_id': self.uid.licence_id, 'mailing_id': self.uid.mailing_id, 'customer_id': self.uid.customer_id }) except error as e: logger.info( f'Failed to parse message_id <{message_id}>: {e}' ) rec['message_id'] = message_id self.mtrack[key] = rec logger.debug('Found message_id=<%s>' % message_id) elif info.service == 'postfix/qmgr' and info.content == 'removed': with Ignore(KeyError): self.mtrack.update(key, complete=True) logger.debug('postfix processing completed') elif info.service in ('postfix/qmgr', 'postfix/smtp', 'postfix/error', 'postfix/local'): rec = self.mtrack.get(key) update: Dict[str, Any] = {'timestamp': info.timestamp} def host_said() -> Optional[Match[str]]: if info.service == 'postfix/smtp': return self.pattern_host_said.match(info.content) return None match = host_said() if match is not None: if 'status' not in rec: update['status'] = match.group(1) else: if 'from' in info.items: update['envelopeFrom'] = info.items['from'] if 'to' in info.items and 'envelopeTo' not in rec: update['envelopeTo'] = info.items['to'] for available in 'to', 'dsn', 'status', 'relay': if available in info.items: update[available] = info.items[available] if update: rec.update(update) self.mtrack[key] = rec logger.debug('Update tracking entry: %s' % str(rec)) if 'dsn' in update: self.__write_bounce(rec) else: logger.info('Not used: %s' % line) # return True
def read_database(self, auto: List[Autoresponder]) -> List[str]: rc: List[str] = [] with DB() as db: company_list: List[int] = [] new_domains: Dict[str, BavUpdate.RID] = {} forwards: List[BavUpdate.Forward] = [] seen_domains: Set[str] = set() accepted_forwards: Set[str] = set() ctab: Dict[int, str] = {} # rc.append(f'fbl@{self.filter_domain}\taccept:rid=unsubscribe') for domain in self.domains: if domain not in seen_domains: rc.append(f'fbl@{domain}\talias:fbl@{self.filter_domain}') seen_domains.add(domain) if self.filter_domain not in seen_domains: new_domains[self.filter_domain] = BavUpdate.RID( rid=0, domain=self.filter_domain) seen_domains.add(self.filter_domain) # missing = [] for row in db.query( 'SELECT company_id, mailloop_domain FROM company_tbl WHERE status = :status', {'status': 'active'}): if row.mailloop_domain: ctab[row.company_id] = row.mailloop_domain if row.mailloop_domain not in seen_domains: rc.append( f'fbl@{row.mailloop_domain}\talias:fbl@{self.filter_domain}' ) if row.mailloop_domain not in self.mtdom and row.mailloop_domain.lower( ) != self.fqdn: new_domains[row.mailloop_domain] = BavUpdate.RID( rid=0, domain=row.mailloop_domain) seen_domains.add(row.mailloop_domain) else: missing.append(row.company_id) company_list.append(row.company_id) if missing: logger.debug( 'Missing mailloop_domain for companies {companies}'.format( companies=Stream(missing).sorted().join(', '))) # seen_rids: Set[int] = set() seen_filter_addresses: Dict[str, str] = {} for row in db.query( 'SELECT rid, shortname, company_id, filter_address, ' ' forward_enable, forward, ar_enable, ' ' subscribe_enable, mailinglist_id, form_id, timestamp, ' ' spam_email, spam_required, spam_forward, ' ' autoresponder_mailing_id, security_token ' 'FROM mailloop_tbl ' 'ORDER BY rid'): if row.company_id not in company_list or row.rid is None: if row.company_id not in company_list: logger.debug('{row}: ignore due to inactive company') elif row.rid is None: logger.error( '{row}: ignore due to empty rid, should never happen!' ) continue # row_id = f'{row.rid} {row.shortname} [{row.company_id}]' seen_rids.add(row.rid) domains: List[str] = [self.filter_domain] aliases: List[str] = [] if row.filter_address is not None: for alias in listsplit(row.filter_address): if not alias.startswith(self.prefix): with Ignore(ValueError): (local_part, domain_part) = alias.split('@', 1) normalized_alias = '{local_part}@{domain_part}'.format( local_part=local_part, domain_part=domain_part.lower()) if normalized_alias in seen_filter_addresses: logger.warning( f'{row_id}: already seen "{alias}" as "{normalized_alias}" before ({seen_filter_addresses[normalized_alias]})' ) else: seen_filter_addresses[ normalized_alias] = row_id if domain_part not in domains: domains.append(domain_part) if domain_part not in self.mtdom and domain_part not in new_domains: new_domains[ domain_part] = BavUpdate.RID( rid=row.rid, domain=domain_part) aliases.append(alias) # ar_enable = False if row.ar_enable and row.autoresponder_mailing_id: if not row.security_token: logger.error( f'{row_id}: Autoresponder has mailing id, but no security token, not used' ) else: auto.append( Autoresponder( row.rid, row.timestamp if row.timestamp is not None else datetime.now(), row.autoresponder_mailing_id, row.security_token)) ar_enable = True # try: cdomain = ctab[row.company_id] if cdomain not in domains: if cdomain in self.domains: domains.append(cdomain) else: logger.debug( f'{row_id}: company\'s domain "{cdomain}" not found in mailertable' ) except KeyError: logger.debug( f'{row_id}: no domain for company found, further processing' ) extra = [f'rid={row.rid}'] if row.company_id: extra.append(f'cid={row.company_id}') if row.forward_enable and row.forward: forward = row.forward.strip() if forward: extra.append(f'fwd={forward}') forwards.append( BavUpdate.Forward(rid=row.rid, address=forward)) if row.spam_email: extra.append(f'spam_email={row.spam_email}') if row.spam_forward: forward = row.spam_forward.strip() if forward: extra.append(f'spam_fwd={forward}') if row.spam_required: extra.append(f'spam_req={row.spam_required}') if ar_enable: extra.append(f'ar={row.rid}') if row.autoresponder_mailing_id: extra.append(f'armid={row.autoresponder_mailing_id}') if row.subscribe_enable and row.mailinglist_id and row.form_id: extra.append(f'sub={row.mailinglist_id}:{row.form_id}') line = '{prefix}{rid}@{domain}\taccept:{extra}'.format( prefix=self.prefix, rid=row.rid, domain=self.filter_domain, extra=','.join([escape(_e) for _e in extra])) logger.debug(f'{row_id}: add line: {line}') rc.append(line) if aliases: for alias in aliases: rc.append( f'{alias}\talias:{self.prefix}{row.rid}@{self.filter_domain}' ) accepted_forwards.add(alias) # if seen_rids: rules: Dict[int, Dict[str, List[str]]] = {} for row in db.query( 'SELECT rid, section, pattern FROM mailloop_rule_tbl'): if row.rid in seen_rids: try: rule = rules[row.rid] except KeyError: rule = rules[row.rid] = {} try: sect = rule[row.section] except KeyError: sect = rule[row.section] = [] sect.append(row.pattern) self.update_rules(rules) # for forward in forwards: with Ignore(ValueError): fdomain = (forward.address.split('@', 1)[-1]).lower() for domain in self.mtdom: if domain == fdomain and forward.address not in accepted_forwards: logger.warning( f'{forward.ird}: using address "{forward.address}" with local handled domain "{domain}"' ) refuse = [] for (domain, new_domain) in ((_d, _n) for (_d, _n) in new_domains.items() if _d == fdomain): logger.warning( f'{new_domain.rid}: try to add new domain for already existing forward address "{forward.address}" in {forward.rid}, refused' ) refuse.append(domain) for domain in refuse: del new_domains[domain] # if new_domains: if self.mta.mta == 'sendmail': if os.access(BavUpdate.control_sendmail, os.X_OK): cmd = [BavUpdate.control_sendmail, 'add'] for domain in new_domains: cmd.append(domain) logger.info(f'Found new domains, add them using {cmd}') silent_call(*cmd) if os.access(BavUpdate.restart_sendmail, os.X_OK): logger.info( 'Restarting sendmail due to domain update') silent_call(BavUpdate.restart_sendmail) else: logger.warning( f'Missing {BavUpdate.restart_sendmail}, no restart of mta perfomed' ) else: logger.warning( f'Missing {BavUpdate.control_sendmail}, no new domains are added' ) self.read_mailertable(new_domains) return rc
def get(self, key: int, default: int) -> int: if key in self.mem: return self.mem[key] with Ignore(KeyError): return int(self.gdb[str(key).encode('UTF-8')].decode('UTF-8')) return default
def parse (self, log_id: LogID, info: SyslogParser.Info, line: str) -> bool: if info.service in self.ignore_services: return True # if not info.queue_id: if info.service == 'postfix/smtp': if ( info.content.startswith ('connect to ') or info.content.startswith ('SSL_connect error to ') or info.content.startswith ('Untrusted TLS connection established to') or info.content.startswith ('Untrusted TLS connection reused to') or info.content.startswith ('Anonymous TLS connection established to') or info.content.startswith ('Anonymous TLS connection reused to') or info.content.startswith ('Verified TLS connection established to') or info.content.startswith ('Verified TLS connection reused to') or info.content.startswith ('Trusted TLS connection established to') or info.content.startswith ('Trusted TLS connection reused to') or 'offers SMTPUTF8 support, but not 8BITMIME' in info.content ): return True return False # if info.queue_id in self.ignore_ids: return True # log_id.push (info.queue_id) self.line (info, line) key = Key (self.SEC_MTAID, info.queue_id) if info.service == 'postfix/pickup': match = self.pattern_envelope_from.search (info.content) if match is not None: envelopeFrom = match.group (1) self.mtrack.update (key, envelopeFrom = envelopeFrom) logger.debug ('Found envelopeFrom=%s' % envelopeFrom) elif info.service == 'postfix/cleanup': match = self.pattern_message_id.search (info.content) if match is not None: message_id = match.group (1) if message_id: rec = self.mtrack.get (key) mid_key = Key (self.SEC_MESSAGEID, message_id) try: midinfo = self.mtrack[mid_key] rec.update (midinfo) except KeyError: try: uid= self.uid.parse (message_id.split ('@')[0], validate = False) rec.update ({ 'licence_id': uid.licence_id, 'mailing_id': uid.mailing_id, 'customer_id': uid.customer_id }) except error as e: logger.info (f'Failed to parse message_id <{message_id}>: {e}') rec['message_id'] = message_id self.mtrack[key] = rec logger.debug ('Found message_id=<%s>' % message_id) elif info.service == 'postfix/qmgr' and info.content == 'removed': with Ignore (KeyError): self.mtrack.update (key, complete = True) logger.debug ('postfix processing completed') elif info.service in ('postfix/qmgr', 'postfix/smtp', 'postfix/error', 'postfix/local'): rec = self.mtrack.get (key) update: Dict[str, Any] = { 'timestamp': info.timestamp } def host_said () -> Optional[Match[str]]: if info.service == 'postfix/smtp': return self.pattern_host_said.match (info.content) return None match = host_said () if match is not None: if 'status' not in rec: update['status'] = match.group (1) else: if 'from' in info.items: update['envelopeFrom'] = info.items['from'] if 'to' in info.items and 'envelopeTo' not in rec: update['envelopeTo'] = info.items['to'] for available in 'to', 'dsn', 'status', 'relay': if available in info.items: update[available] = info.items[available] if update: rec.update (update) self.mtrack[key] = rec logger.debug ('Update tracking entry: %s' % str (rec)) if 'dsn' in update: self.__write_bounce (info, rec) else: logger.info ('Not used: %s' % line) # return True
def __cfg(self, company_id: int, var: str, default: int) -> int: #{{{ with Ignore(KeyError, ValueError): return int(self.config[company_id][var]) if company_id != 0: return self.__cfg(0, var, default) return default