def mail_aliases(): if request.args.get("format", "") == "json": return json_response(get_mail_aliases_ex(env)) else: return "".join( address + "\t" + receivers + "\t" + (senders or "") + "\n" for address, receivers, senders in get_mail_aliases(env) )
def check_mail_domain(domain, env, output): # Check the MX record. recommended_mx = "10 " + env['PRIMARY_HOSTNAME'] mx = query_dns(domain, "MX", nxdomain=None) if mx is None: mxhost = None else: # query_dns returns a semicolon-delimited list # of priority-host pairs. mxhost = mx.split('; ')[0].split(' ')[1] if mxhost == None: # A missing MX record is okay on the primary hostname because # the primary hostname's A record (the MX fallback) is... itself, # which is what we want the MX to be. if domain == env['PRIMARY_HOSTNAME']: output.print_ok("Domain's email is directed to this domain. [%s has no MX record, which is ok]" % (domain,)) # And a missing MX record is okay on other domains if the A record # matches the A record of the PRIMARY_HOSTNAME. Actually this will # probably confuse DANE TLSA, but we'll let that slide for now. else: domain_a = query_dns(domain, "A", nxdomain=None) primary_a = query_dns(env['PRIMARY_HOSTNAME'], "A", nxdomain=None) if domain_a != None and domain_a == primary_a: output.print_ok("Domain's email is directed to this domain. [%s has no MX record but its A record is OK]" % (domain,)) else: output.print_error("""This domain's DNS MX record is not set. It should be '%s'. Mail will not be delivered to this box. It may take several hours for public DNS to update after a change. This problem may result from other issues listed here.""" % (recommended_mx,)) elif mxhost == env['PRIMARY_HOSTNAME']: good_news = "Domain's email is directed to this domain. [%s ↦ %s]" % (domain, mx) if mx != recommended_mx: good_news += " This configuration is non-standard. The recommended configuration is '%s'." % (recommended_mx,) output.print_ok(good_news) else: output.print_error("""This domain's DNS MX record is incorrect. It is currently set to '%s' but should be '%s'. Mail will not be delivered to this box. It may take several hours for public DNS to update after a change. This problem may result from other issues listed here.""" % (mx, recommended_mx)) # Check that the postmaster@ email address exists. Not required if the domain has a # catch-all address or domain alias. if "@" + domain not in [address for address, *_ in get_mail_aliases(env)]: check_alias_exists("Postmaster contact address", "postmaster@" + domain, env, output) # Stop if the domain is listed in the Spamhaus Domain Block List. # The user might have chosen a domain that was previously in use by a spammer # and will not be able to reliably send mail. dbl = query_dns(domain+'.dbl.spamhaus.org', "A", nxdomain=None) if dbl is None: output.print_ok("Domain is not blacklisted by dbl.spamhaus.org.") else: output.print_error("""This domain is listed in the Spamhaus Domain Block List (code %s), which may prevent recipients from receiving your mail. See http://www.spamhaus.org/dbl/ and http://www.spamhaus.org/query/domain/%s.""" % (dbl, domain))
def check_alias_exists(alias_name, alias, env, output): mail_aliases = dict([(address, receivers) for address, receivers, *_ in get_mail_aliases(env)]) if alias in mail_aliases: if mail_aliases[alias]: output.print_ok("%s exists as a mail alias. [%s ↦ %s]" % (alias_name, alias, mail_aliases[alias])) else: output.print_error("""You must set the destination of the mail alias for %s to direct email to you or another administrator.""" % alias) else: output.print_error("""You must add a mail alias for %s which directs email to you or another administrator.""" % alias)
def check_alias_exists(alias, env): mail_alises = dict(get_mail_aliases(env)) if alias in mail_alises: print_ok("%s exists as a mail alias [=> %s]" % (alias, mail_alises[alias])) else: print_error("""You must add a mail alias for %s and direct email to you or another administrator.""" % alias)
def mail_aliases(): if request.args.get("format", "") == "json": return json_response(get_mail_aliases(env, as_json=True)) else: return "".join(x+"\t"+y+"\n" for x, y in get_mail_aliases(env))
def mail_aliases(): return "".join(x+"\t"+y+"\n" for x, y in get_mail_aliases(env))
def scan_mail_log(logger, env): """ Scan the system's mail log files and collect interesting data This function scans the 2 most recent mail log files in /var/log/. Args: logger (ConsoleOutput): Object used for writing messages to the console env (dict): Dictionary containing MiaB settings """ collector = { "other-services": set(), "imap-logins": {}, "pop3-logins": {}, "postgrey": {}, "rejected-mail": {}, "activity-by-hour": { "imap-logins": defaultdict(int), "pop3-logins": defaultdict(int), "smtp-sends": defaultdict(int), "smtp-receives": defaultdict(int), }, "real_mail_addresses": ( set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env)) ) } for fn in ('/var/log/mail.log.1', '/var/log/mail.log'): if not os.path.exists(fn): continue with open(fn, 'rb') as log: for line in log: line = line.decode("utf8", errors='replace') scan_mail_log_line(line.strip(), collector) if collector["imap-logins"]: logger.add_heading("Recent IMAP Logins") logger.print_block("The most recent login from each remote IP adddress is shown.") for k in utils.sort_email_addresses(collector["imap-logins"], env): for ip, date in sorted(collector["imap-logins"][k].items(), key=lambda kv: kv[1]): logger.print_line(k + "\t" + str(date) + "\t" + ip) if collector["pop3-logins"]: logger.add_heading("Recent POP3 Logins") logger.print_block("The most recent login from each remote IP adddress is shown.") for k in utils.sort_email_addresses(collector["pop3-logins"], env): for ip, date in sorted(collector["pop3-logins"][k].items(), key=lambda kv: kv[1]): logger.print_line(k + "\t" + str(date) + "\t" + ip) if collector["postgrey"]: logger.add_heading("Greylisted Mail") logger.print_block("The following mail was greylisted, meaning the emails were temporarily rejected. " "Legitimate senders will try again within ten minutes.") logger.print_line("recipient" + "\t" + "received" + 3 * "\t" + "sender" + 6 * "\t" + "delivered") for recipient in utils.sort_email_addresses(collector["postgrey"], env): sorted_recipients = sorted(collector["postgrey"][recipient].items(), key=lambda kv: kv[1][0]) for (client_address, sender), (first_date, delivered_date) in sorted_recipients: logger.print_line( recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date)) if delivered_date else "no retry yet") ) if collector["rejected-mail"]: logger.add_heading("Rejected Mail") logger.print_block("The following incoming mail was rejected.") for k in utils.sort_email_addresses(collector["rejected-mail"], env): for date, sender, message in collector["rejected-mail"][k]: logger.print_line(k + "\t" + str(date) + "\t" + sender + "\t" + message) logger.add_heading("Activity by Hour") logger.print_block("Dovecot logins and Postfix mail traffic per hour.") logger.print_block("Hour\tIMAP\tPOP3\tSent\tReceived") for h in range(24): logger.print_line( "%d\t%d\t\t%d\t\t%d\t\t%d" % ( h, collector["activity-by-hour"]["imap-logins"][h], collector["activity-by-hour"]["pop3-logins"][h], collector["activity-by-hour"]["smtp-sends"][h], collector["activity-by-hour"]["smtp-receives"][h], ) ) if len(collector["other-services"]) > 0: logger.add_heading("Other") logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"]))
def scan_mail_log(logger, env): collector = { "other-services": set(), "imap-logins": { }, "postgrey": { }, "rejected-mail": { }, } collector["real_mail_addresses"] = set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env)) for fn in ('/var/log/mail.log.1', '/var/log/mail.log'): if not os.path.exists(fn): continue with open(fn) as log: for line in log: scan_mail_log_line(line.strip(), collector) if collector["imap-logins"]: logger.add_heading("Recent IMAP Logins") logger.print_block("The most recent login from each remote IP adddress is show.") for k in utils.sort_email_addresses(collector["imap-logins"], env): for ip, date in sorted(collector["imap-logins"][k].items(), key = lambda kv : kv[1]): logger.print_line(k + "\t" + str(date) + "\t" + ip) if collector["postgrey"]: logger.add_heading("Greylisted Mail") logger.print_block("The following mail was greylisted, meaning the emails were temporarily rejected. Legitimate senders will try again within ten minutes.") logger.print_line("recipient" + "\t" + "received" + "\t" + "sender" + "\t" + "delivered") for recipient in utils.sort_email_addresses(collector["postgrey"], env): for (client_address, sender), (first_date, delivered_date) in sorted(collector["postgrey"][recipient].items(), key = lambda kv : kv[1][0]): logger.print_line(recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date)) if delivered_date else "no retry yet")) if collector["rejected-mail"]: logger.add_heading("Rejected Mail") logger.print_block("The following incoming mail was rejected.") for k in utils.sort_email_addresses(collector["rejected-mail"], env): for date, sender, message in collector["rejected-mail"][k]: logger.print_line(k + "\t" + str(date) + "\t" + sender + "\t" + message) if len(collector["other-services"]) > 0: logger.add_heading("Other") logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"]))
def scan_mail_log(env): """ Scan the system's mail log files and collect interesting data This function scans the 2 most recent mail log files in /var/log/. Args: env (dict): Dictionary containing MiaB settings """ collector = { "scan_count": 0, # Number of lines scanned "parse_count": 0, # Number of lines parsed (i.e. that had their contents examined) "scan_time": time.time(), # The time in seconds the scan took "sent_mail": OrderedDict(), # Data about email sent by users "received_mail": OrderedDict(), # Data about email received by users "dovecot": OrderedDict(), # Data about Dovecot activity "postgrey": {}, # Data about greylisting of email addresses "rejected": OrderedDict(), # Emails that were blocked "known_addresses": None, # Addresses handled by the Miab installation "other-services": set(), } try: import mailconfig collector["known_addresses"] = (set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env))) except ImportError: pass print("Scanning from {:%Y-%m-%d %H:%M:%S} back to {:%Y-%m-%d %H:%M:%S}".format( START_DATE, END_DATE) ) # Scan the lines in the log files until the date goes out of range scan_files(collector) if not collector["scan_count"]: print("No log lines scanned...") return collector["scan_time"] = time.time() - collector["scan_time"] print("{scan_count} Log lines scanned, {parse_count} lines parsed in {scan_time:.2f} " "seconds\n".format(**collector)) # Print Sent Mail report if collector["sent_mail"]: msg = "Sent email between {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) data = OrderedDict(sorted(collector["sent_mail"].items(), key=email_sort)) print_user_table( data.keys(), data=[ ("sent", [u["sent_count"] for u in data.values()]), ("hosts", [len(u["hosts"]) for u in data.values()]), ], sub_data=[ ("sending hosts", [u["hosts"] for u in data.values()]), ], activity=[ ("sent", [u["activity-by-hour"] for u in data.values()]), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) accum = defaultdict(int) data = collector["sent_mail"].values() for h in range(24): accum[h] = sum(d["activity-by-hour"][h] for d in data) print_time_table( ["sent"], [accum] ) # Print Received Mail report if collector["received_mail"]: msg = "Received email between {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) data = OrderedDict(sorted(collector["received_mail"].items(), key=email_sort)) print_user_table( data.keys(), data=[ ("received", [u["received_count"] for u in data.values()]), ], activity=[ ("sent", [u["activity-by-hour"] for u in data.values()]), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) accum = defaultdict(int) for h in range(24): accum[h] = sum(d["activity-by-hour"][h] for d in data.values()) print_time_table( ["received"], [accum] ) # Print Dovecot report if collector["dovecot"]: msg = "Email client logins between {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) data = OrderedDict(sorted(collector["dovecot"].items(), key=email_sort)) print_user_table( data.keys(), data=[ ("imap", [u["imap"] for u in data.values()]), ("pop3", [u["pop3"] for u in data.values()]), ], sub_data=[ ("IMAP IP addresses", [[k + " (%d)" % v for k, v in u["imap-logins"].items()] for u in data.values()]), ("POP3 IP addresses", [[k + " (%d)" % v for k, v in u["pop3-logins"].items()] for u in data.values()]), ], activity=[ ("imap", [u["activity-by-hour"]["imap"] for u in data.values()]), ("pop3", [u["activity-by-hour"]["pop3"] for u in data.values()]), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) accum = {"imap": defaultdict(int), "pop3": defaultdict(int), "both": defaultdict(int)} for h in range(24): accum["imap"][h] = sum(d["activity-by-hour"]["imap"][h] for d in data.values()) accum["pop3"][h] = sum(d["activity-by-hour"]["pop3"][h] for d in data.values()) accum["both"][h] = accum["imap"][h] + accum["pop3"][h] print_time_table( ["imap", "pop3", " +"], [accum["imap"], accum["pop3"], accum["both"]] ) if collector["postgrey"]: msg = "Greylisted Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) print(textwrap.fill( "The following mail was greylisted, meaning the emails were temporarily rejected. " "Legitimate senders will try again within ten minutes.", width=80, initial_indent=" ", subsequent_indent=" " ), end='\n\n') data = OrderedDict(sorted(collector["postgrey"].items(), key=email_sort)) users = [] received = [] senders = [] sender_clients = [] delivered_dates = [] for recipient in data: sorted_recipients = sorted(data[recipient].items(), key=lambda kv: kv[1][0] or kv[1][1]) for (client_address, sender), (first_date, delivered_date) in sorted_recipients: if first_date: users.append(recipient) received.append(first_date) senders.append(sender) delivered_dates.append(delivered_date) sender_clients.append(client_address) print_user_table( users, data=[ ("received", received), ("sender", senders), ("delivered", [str(d) or "no retry yet" for d in delivered_dates]), ("sending host", sender_clients) ], delimit=True, ) if collector["rejected"]: msg = "Blocked Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) data = OrderedDict(sorted(collector["rejected"].items(), key=email_sort)) rejects = [] if VERBOSE: for user_data in data.values(): user_rejects = [] for date, sender, message in user_data["blocked"]: if len(sender) > 64: sender = sender[:32] + "…" + sender[-32:] user_rejects.append("%s - %s " % (date, sender)) user_rejects.append(" %s" % message) rejects.append(user_rejects) print_user_table( data.keys(), data=[ ("blocked", [len(u["blocked"]) for u in data.values()]), ], sub_data=[ ("blocked emails", rejects), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) if collector["other-services"] and VERBOSE and False: print_header("Other services") print("The following unkown services were found in the log file.") print(" ", *sorted(list(collector["other-services"])), sep='\n│ ')
def __init__(self, start_date=None, end_date=None, filters=None, no_filter=False, sent=True, received=True, imap=False, pop3=False, grey=False, rejected=False): super().__init__() # Try and get all the email addresses known to this box known_addresses = [] if not no_filter: try: env_vars = utils.load_environment() import mailconfig known_addresses = sorted( set(mailconfig.get_mail_users(env_vars)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env_vars)), key=email_sort ) except (FileNotFoundError, ImportError): pass start_date = start_date or datetime.now() end_date = end_date or start_date - timedelta(weeks=52) self.update({ 'end_of_file': False, # Indicates whether the end of the log files was reached 'start_date': start_date, 'end_date': end_date, 'line_count': 0, # Number of lines scanned 'parse_count': 0, # Number of lines parsed (i.e. that had their contents examined) 'scan_time': time.time(), # The time in seconds the scan took 'unknown services': set(), # Services encountered that were not recognized 'known_addresses': known_addresses, # Addresses handled by MiaB 'services': {}, # What services to scan for 'data': OrderedDict(), # Scan data, per service }) # Caching is only useful with longer filter lists, but doesn't seem to hurt performance in shorter ones user_match = lru_cache(maxsize=None)(partial(filter_match, [f.lower() for f in filters] if filters else None)) if sent: data = {} self['data']['sent mail'] = { 'scan': partial(scan_postfix_submission, data, user_match), 'data': data, } self['services']['postfix/submission/smtpd'] = self['data']['sent mail'] if received: data = {} self['data']['received mail'] = { 'scan': partial(scan_postfix_lmtp, data, user_match), 'data': data, } self['services']['postfix/lmtp'] = self['data']['received mail'] if imap: data = {} self['data']['imap login'] = { 'scan': partial(scan_login, data, user_match), 'data': data, } self['services']['imap-login'] = self['data']['imap login'] if pop3: data = {} self['data']['pop3 login'] = { 'scan': partial(scan_login, data, user_match), 'data': data, } self['services']['pop3-login'] = self['data']['pop3 login'] if grey: data = {} self['data']['grey-listed mail'] = { 'scan': partial(scan_greylist, data, user_match), 'data': data, } self['services']['postgrey'] = self['data']['grey-listed mail'] if rejected: data = {} self['data']['blocked mail'] = { 'scan': partial(scan_rejects, data, self['known_addresses'], user_match), 'data': data, } self['services']['postfix/smtpd'] = self['data']['blocked mail']
def scan_mail_log(env): """ Scan the system's mail log files and collect interesting data This function scans the 2 most recent mail log files in /var/log/. Args: env (dict): Dictionary containing MiaB settings """ collector = { "scan_count": 0, # Number of lines scanned "parse_count": 0, # Number of lines parsed (i.e. that had their contents examined) "scan_time": time.time(), # The time in seconds the scan took "sent_mail": OrderedDict(), # Data about email sent by users "received_mail": OrderedDict(), # Data about email received by users "logins": OrderedDict(), # Data about login activity "postgrey": {}, # Data about greylisting of email addresses "rejected": OrderedDict(), # Emails that were blocked "known_addresses": None, # Addresses handled by the Miab installation "other-services": set(), } try: import mailconfig collector["known_addresses"] = (set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env))) except ImportError: pass print("Scanning logs from {:%Y-%m-%d %H:%M:%S} to {:%Y-%m-%d %H:%M:%S}".format( END_DATE, START_DATE) ) # Scan the lines in the log files until the date goes out of range scan_files(collector) if not collector["scan_count"]: print("No log lines scanned...") return collector["scan_time"] = time.time() - collector["scan_time"] print("{scan_count} Log lines scanned, {parse_count} lines parsed in {scan_time:.2f} " "seconds\n".format(**collector)) # Print Sent Mail report if collector["sent_mail"]: msg = "Sent email" print_header(msg) data = OrderedDict(sorted(collector["sent_mail"].items(), key=email_sort)) print_user_table( data.keys(), data=[ ("sent", [u["sent_count"] for u in data.values()]), ("hosts", [len(u["hosts"]) for u in data.values()]), ], sub_data=[ ("sending hosts", [u["hosts"] for u in data.values()]), ], activity=[ ("sent", [u["activity-by-hour"] for u in data.values()]), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) accum = defaultdict(int) data = collector["sent_mail"].values() for h in range(24): accum[h] = sum(d["activity-by-hour"][h] for d in data) print_time_table( ["sent"], [accum] ) # Print Received Mail report if collector["received_mail"]: msg = "Received email" print_header(msg) data = OrderedDict(sorted(collector["received_mail"].items(), key=email_sort)) print_user_table( data.keys(), data=[ ("received", [u["received_count"] for u in data.values()]), ], activity=[ ("sent", [u["activity-by-hour"] for u in data.values()]), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) accum = defaultdict(int) for h in range(24): accum[h] = sum(d["activity-by-hour"][h] for d in data.values()) print_time_table( ["received"], [accum] ) # Print login report if collector["logins"]: msg = "User logins per hour" print_header(msg) data = OrderedDict(sorted(collector["logins"].items(), key=email_sort)) # Get a list of all of the protocols seen in the logs in reverse count order. all_protocols = defaultdict(int) for u in data.values(): for protocol_name, count in u["totals_by_protocol"].items(): all_protocols[protocol_name] += count all_protocols = [k for k, v in sorted(all_protocols.items(), key=lambda kv : -kv[1])] print_user_table( data.keys(), data=[ (protocol_name, [ round(u["totals_by_protocol"][protocol_name] / (u["latest"]-u["earliest"]).total_seconds() * 60*60, 1) if (u["latest"]-u["earliest"]).total_seconds() > 0 else 0 # prevent division by zero for u in data.values()]) for protocol_name in all_protocols ], sub_data=[ ("Protocol and Source", [[ "{} {}: {} times".format(protocol_name, host, count) for (protocol_name, host), count in sorted(u["totals_by_protocol_and_host"].items(), key=lambda kv:-kv[1]) ] for u in data.values()]) ], activity=[ (protocol_name, [u["activity-by-hour"][protocol_name] for u in data.values()]) for protocol_name in all_protocols ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], numstr=lambda n : str(round(n, 1)), ) accum = { protocol_name: defaultdict(int) for protocol_name in all_protocols } for h in range(24): for protocol_name in all_protocols: accum[protocol_name][h] = sum(d["activity-by-hour"][protocol_name][h] for d in data.values()) print_time_table( all_protocols, [accum[protocol_name] for protocol_name in all_protocols] ) if collector["postgrey"]: msg = "Greylisted Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) print(textwrap.fill( "The following mail was greylisted, meaning the emails were temporarily rejected. " "Legitimate senders must try again after three minutes.", width=80, initial_indent=" ", subsequent_indent=" " ), end='\n\n') data = OrderedDict(sorted(collector["postgrey"].items(), key=email_sort)) users = [] received = [] senders = [] sender_clients = [] delivered_dates = [] for recipient in data: sorted_recipients = sorted(data[recipient].items(), key=lambda kv: kv[1][0] or kv[1][1]) for (client_address, sender), (first_date, delivered_date) in sorted_recipients: if first_date: users.append(recipient) received.append(first_date) senders.append(sender) delivered_dates.append(delivered_date) sender_clients.append(client_address) print_user_table( users, data=[ ("received", received), ("sender", senders), ("delivered", [str(d) or "no retry yet" for d in delivered_dates]), ("sending host", sender_clients) ], delimit=True, ) if collector["rejected"]: msg = "Blocked Email {:%Y-%m-%d %H:%M:%S} and {:%Y-%m-%d %H:%M:%S}" print_header(msg.format(END_DATE, START_DATE)) data = OrderedDict(sorted(collector["rejected"].items(), key=email_sort)) rejects = [] if VERBOSE: for user_data in data.values(): user_rejects = [] for date, sender, message in user_data["blocked"]: if len(sender) > 64: sender = sender[:32] + "…" + sender[-32:] user_rejects.append("%s - %s " % (date, sender)) user_rejects.append(" %s" % message) rejects.append(user_rejects) print_user_table( data.keys(), data=[ ("blocked", [len(u["blocked"]) for u in data.values()]), ], sub_data=[ ("blocked emails", rejects), ], earliest=[u["earliest"] for u in data.values()], latest=[u["latest"] for u in data.values()], ) if collector["other-services"] and VERBOSE and False: print_header("Other services") print("The following unkown services were found in the log file.") print(" ", *sorted(list(collector["other-services"])), sep='\n│ ')
def scan_mail_log(logger, env): """ Scan the system's mail log files and collect interesting data This function scans the 2 most recent mail log files in /var/log/. Args: logger (ConsoleOutput): Object used for writing messages to the console env (dict): Dictionary containing MiaB settings """ collector = { "other-services": set(), "imap-logins": {}, "pop3-logins": {}, "postgrey": {}, "rejected-mail": {}, "activity-by-hour": { "imap-logins": defaultdict(int), "pop3-logins": defaultdict(int), "smtp-sends": defaultdict(int), "smtp-receives": defaultdict(int), }, "real_mail_addresses": (set(mailconfig.get_mail_users(env)) | set(alias[0] for alias in mailconfig.get_mail_aliases(env))) } for fn in ('/var/log/mail.log.1', '/var/log/mail.log'): if not os.path.exists(fn): continue with open(fn, 'rb') as log: for line in log: line = line.decode("utf8", errors='replace') scan_mail_log_line(line.strip(), collector) if collector["imap-logins"]: logger.add_heading("Recent IMAP Logins") logger.print_block( "The most recent login from each remote IP adddress is shown.") for k in utils.sort_email_addresses(collector["imap-logins"], env): for ip, date in sorted(collector["imap-logins"][k].items(), key=lambda kv: kv[1]): logger.print_line(k + "\t" + str(date) + "\t" + ip) if collector["pop3-logins"]: logger.add_heading("Recent POP3 Logins") logger.print_block( "The most recent login from each remote IP adddress is shown.") for k in utils.sort_email_addresses(collector["pop3-logins"], env): for ip, date in sorted(collector["pop3-logins"][k].items(), key=lambda kv: kv[1]): logger.print_line(k + "\t" + str(date) + "\t" + ip) if collector["postgrey"]: logger.add_heading("Greylisted Mail") logger.print_block( "The following mail was greylisted, meaning the emails were temporarily rejected. " "Legitimate senders will try again within ten minutes.") logger.print_line("recipient" + "\t" + "received" + 3 * "\t" + "sender" + 6 * "\t" + "delivered") for recipient in utils.sort_email_addresses(collector["postgrey"], env): sorted_recipients = sorted( collector["postgrey"][recipient].items(), key=lambda kv: kv[1][0]) for (client_address, sender), (first_date, delivered_date) in sorted_recipients: logger.print_line(recipient + "\t" + str(first_date) + "\t" + sender + "\t" + (("delivered " + str(delivered_date) ) if delivered_date else "no retry yet")) if collector["rejected-mail"]: logger.add_heading("Rejected Mail") logger.print_block("The following incoming mail was rejected.") for k in utils.sort_email_addresses(collector["rejected-mail"], env): for date, sender, message in collector["rejected-mail"][k]: logger.print_line(k + "\t" + str(date) + "\t" + sender + "\t" + message) logger.add_heading("Activity by Hour") logger.print_block("Dovecot logins and Postfix mail traffic per hour.") logger.print_block("Hour\tIMAP\tPOP3\tSent\tReceived") for h in range(24): logger.print_line("%d\t%d\t\t%d\t\t%d\t\t%d" % ( h, collector["activity-by-hour"]["imap-logins"][h], collector["activity-by-hour"]["pop3-logins"][h], collector["activity-by-hour"]["smtp-sends"][h], collector["activity-by-hour"]["smtp-receives"][h], )) if len(collector["other-services"]) > 0: logger.add_heading("Other") logger.print_block("Unrecognized services in the log: " + ", ".join(collector["other-services"]))