Exemple #1
0
	def __init__(self, main_instance):
		AbstractRole.__init__(self, main_instance)
		self.dialog = Dialog(self)
		self.has_run = False
		self.shares = {}
		self.wm = None
		Config.read()
Exemple #2
0
	def __init__(self):
		Gtk.Window.__init__(self, title="Tumba Todo!!")

		grid = Gtk.Grid(row_spacing=15,column_spacing=15)
		self.add(grid)
		self.set_resizable(False)
		self.set_position(1)

		lblNB = Gtk.Label("Ruta del cache de Netbeans")
		lblGF = Gtk.Label("Ruta del dominio de glassfish")


		self.txtNB.set_editable(False)       
		self.txtGF.set_editable(False)

		btnTumba = Gtk.Button("Guardar")
		btnExaminaNb = Gtk.Button("...")
		btnExaminaGf = Gtk.Button("...")
		btnExaminaGf.connect("clicked", self.on_nb_clicked)
		btnExaminaNb.connect("clicked", self.on_gf_clicked)
		btnTumba.connect("clicked", self.on_guardar_clicked)

		grid.add(lblNB);
		grid.attach(self.txtNB,1,0,2,1)
		grid.attach_next_to(lblGF,lblNB,Gtk.PositionType.BOTTOM,1,2)
		grid.attach_next_to(self.txtGF,self.txtNB,Gtk.PositionType.BOTTOM,2,2)
		grid.attach_next_to(btnExaminaNb,self.txtGF,Gtk.PositionType.RIGHT,2,2)
		grid.attach_next_to(btnExaminaGf,self.txtNB,Gtk.PositionType.RIGHT,2,1)

		button2 = Gtk.Button("Choose Folder")
		#button2.connect("clicked", self.on_folder_clicked)
		grid.attach_next_to(btnTumba,self.txtGF,Gtk.PositionType.BOTTOM,2,2)
		config = Config()
		values = config.read()
		self.txtGF.set_text(values[0])
		self.txtNB.set_text(values[1])
Exemple #3
0
class Framework():
    def __init__(self, executable_path, basepath):
        self.db = None
        self.config = Config(self)
        self.configuration = {}
        self.webapp = None
        self.executable_path = executable_path
        self.basepath = basepath
        self.hmac_key = None
        self.allow_bad_hmac = False

        self.re_results = re.compile(r'<Results\s+type="(.*?)"\s+version="(.*?)"', re.S)
        self.re_tracking = re.compile(r'<Tracking\s+trackid="(.*?)"\s+existing="(.*?)"', re.S)
        self.re_fingerprints = re.compile(r'<(\w+)\s*>(.*?)</\1\s*>', re.S)

    def version(self):
        return '0.0.2-alpha'

    def web_application(self):
        if self.webapp is None:
            self.webapp = WebApplication(self)
        return self.webapp

    def get_db(self):
        return self.db

    def debug(self, source, msg, *args):
        sys.stderr.write('%s: %s (%s)\n' % (source, msg, args))

    def warn(self, msg):
        sys.stderr.write('%s\n' % (msg))

    def log_exception(self, ex):
        sys.stderr.write('FIX ME! ERROR: %s\n' % (traceback.format_exc(ex)))

    def read_config(self, config_filename):
        self.config.read(config_filename)

    def get_config(self, name):
        return self.configuration[name]

    def set_config(self, name, value):
        # set framework specific
        if 'hmac_key' == name:
            if value and value != 'changeme':
                self.hmac_key = value
        elif 'allow_bad_hmac' == name:
            if value and value.lower() == 'true':
                self.allow_bad_hmac = True

        self.configuration[name] = value

    def make_socket(self, family, sock_type, proto):
        sock = socket.socket(family, sock_type, 0)
        sock.setblocking(0)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, NOLINGER)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        if socket.SOCK_STREAM == sock_type:
            sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
        return sock
        
    def make_tcp_server_socket(self, port):
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        sock.setblocking(0)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, NOLINGER)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        sock.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
        sock.bind(('', port))
        sock.listen(128)
        return sock

    def make_udp_server_socket(self, port):
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        sock.setblocking(0)
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        sock.bind(('', port))
        return sock

    def get_hmac_key(self):
        if self.hmac_key:
            return self.hmac_key
        return self.unique_id

    def generate_tracking_id(self):
        tracking_id = uuid.uuid4().hex
        h = hmac.new(self.get_hmac_key(), digestmod = hashlib.md5) # MD5 is okay
        h.update(tracking_id)
        tracking_digest = h.hexdigest()
        return ('%s.%s' % (tracking_id, tracking_digest), tracking_id, tracking_digest)

    def extract_tracking_data(self, tracking_data):
        if ':' in tracking_data:
            versioning, tracking_data = tracking_data.split(':', 1)
        else:
            versioning = None
        if '.' in tracking_data:
            tracking_id, tracking_digest = tracking_data.split('.', 1)
            h = hmac.new(self.get_hmac_key(), digestmod = hashlib.md5) # MD5 is okay
            h.update(tracking_id)
            if h.hexdigest() == tracking_digest:
                self.debug('framework', 'extracted tracking_id', tracking_id, tracking_digest)
                return tracking_id, tracking_digest, versioning
            elif self.allow_bad_hmac:
                self.debug('framework', 'allowing bad hmac; extracted tracking_id', tracking_id, tracking_digest)
                return tracking_id, h.hexdigest(), versioning
            else:
                self.debug('framework', 'bad digest', tracking_data)
                return None, None, None
        else:
            self.debug('framework', 'bad tracking', tracking_data)
            return None, None, None

    def extract_results(self, data):
        # TODO: benchmark if XML processing would be faster
        results = {
            'type' : None,
            'version' : None,
            'trackid' : None,
            'existing' : None,
            'fingerprints' : None,
            }

        m = self.re_results.search(data)
        if m:
            results['type'] = m.group(1)
            results['version'] = m.group(2)

        m = self.re_tracking.search(data)
        if m:
            results['trackid'] = m.group(1)
            results['existing'] = m.group(2)

        # TODO: extract date/time info

        n1 = data.find('<Fingerprints')
        n2 = data.find('</Fingerprints')
        if n1 > -1 and n2 > n1:
            # fingerprints
            fingerprints = []
            tmp = data[n1+14:n2]
            matches = self.re_fingerprints.findall(tmp)
            for match in matches:
                fingerprints.append(match)
            results['fingerprints'] = fingerprints

        return results

    def process_results(self, address, request):
        db = self.db

        request_dt = int(time.time())

        results = self.extract_results(request)

        print(results)

        rtype = results['type']
        version = results['version']
        trackid = results['trackid']
        existing = results['existing']

        tracking_id, etracking_id = None, None
        if trackid:
            tracking_id, tracking_digest, junk = self.extract_tracking_data(trackid)
        if existing:
            etracking_id, tracking_digest, junk =  self.extract_tracking_data(existing)

        if tracking_id:
            db.insert_tracking_seen(tracking_id, address[0], request_dt)
            if etracking_id and etracking_id != tracking_id:
                db.insert_tracking_correlation(tracking_id, etracking_id, address[0], request_dt)

        if etracking_id:
            db.insert_tracking_seen(etracking_id, address[0], request_dt)
            if not tracking_id:
                tracking_id = etracking_id

        fingerprints = results['fingerprints']
        if fingerprints:
            for name, content in fingerprints:
                content = content.strip()
                moduleid = '%s-%s-%s' % (rtype, version, name)
                hasher = hashlib.md5()
                hasher.update(content)
                digest = hasher.hexdigest()
                db.insert_results(address[0], address[1], tracking_id, moduleid, '', digest)
                db.insert_results_data(digest, content)


    def initialize_db(self):
        self.db = dbinterface.create_instance(self)
        self.db.initialize()
        self.unique_id = self.db.get_unique_id()

    def close_db(self):
        self.db.close()
        self.db = None

    def get_flash_crossdomain_policy(self):
        return '''<?xml version="1.0"?>
<cross-domain-policy> 
   <site-control permitted-cross-domain-policies="master-only"/>
   <allow-access-from domain="*" to-ports="*" />
</cross-domain-policy>'''

    def get_flash_crossdomain_xml(self):
        return '''<?xml version="1.0"?>
<cross-domain-policy>
<allow-access-from domain="*" secure="false"/>
<allow-http-request-headers-from domain="*" headers="*" secure="false"/>
</cross-domain-policy>
'''
    def get_java_crossdomain_xml(self):
        return '''<?xml version="1.0"?>
<cross-domain-policy>
<allow-access-from domain="*" />
</cross-domain-policy>
'''

    def get_silverlight_clientaccess_policy(self):
        return '''<?xml version="1.0" encoding ="utf-8"?>
def main():
  DISCLAIMER = """WARNING: the policy herein built is based on a set
                  of common features found among the current layout and
                  configuration of the MX hostnames associated to input
                  mail domains. There is no warranty that the current
                  settings will be kept by mail servers' owners in the
                  future nor that these settings are the correct ones that
                  really identify the recipient domain's mail servers.
                  A bad policy could result in messages delivery failures.
                  USE THIS POLICY DEFINITIONS FILE AT YOUR OWN RISK."""

  parser = argparse.ArgumentParser(
    description="Guess STARTTLS policies on the basis of current MX "
      "hostnames settings.",
    epilog="""Consume a target list of mail domains and output a \
    policy definitions file for those domains. %s""" % DISCLAIMER)

  parser.add_argument("-c", "--cfg", default=Config.default_cfg_path,
                      help="general configuration file path", metavar="file",
                      dest="cfg_path")

  parser.add_argument("inputfile", type=argparse.FileType("r"),
                      default=sys.stdin, metavar="domains_list_file",
                      help="""file containing the list of domains to consume;
                      one domain on each line;
                      use "-" to read from stdin""")

  parser.add_argument("-o", metavar="file", type=argparse.FileType("w"),
                      help="path where policy definitions file will be "
                      "written to; default: stdout", dest="outputfile")

  parser.add_argument("-v", "--verbose", dest="verbose", action="store_true",
                      help="print some explanatory messages")

  parser.add_argument("--hash-alg", default="sha256",
                      choices=["sha1","sha256","sha512"],
                      help="hash algorithm used for fingerprints matching",
                      dest="hash_alg")

  parser.add_argument("--no-cache", dest="nocache", action="store_true",
                      help="ignore any cached data")

  parser.add_argument("--expires", dest="expires", type=int,
                      metavar="minutes",
                      help="policy expiration time, in minutes "
                      "(default: 10080, 1 week)", default=10080)

  avoid_choices = ["ta", "ee_pubkey", "ee_certificate", "valid"]
  parser.add_argument("--avoid-cert-matching", metavar="policy_type",
                      choices=avoid_choices,
                      dest="avoid",
                      help="do not use these policy types for certificate "
                      "matching; allowed values: " + \
                      ", ".join(avoid_choices), nargs="*")
  args = parser.parse_args()

  Config.read(args.cfg_path)

  global CACHE_DIR
  CACHE_DIR = Config.get("general", "guessstarttlspolicy_cache_dir")

  if not os.path.isdir(CACHE_DIR):
    mkdirp(CACHE_DIR)
  if not os.access(CACHE_DIR, os.W_OK):
    raise InsufficientPermissionError("Insufficient permissions to write "
                                      "into GuessSTARTTLSPolicies cache dir "
                                      "(guessstarttlspolicy_cache_dir): %s" %
                                      CACHE_DIR)

  hash_alg = args.hash_alg

  if args.avoid:
    avoid_cert_matching = args.avoid
  else:
    avoid_cert_matching = []

  check_ko = []

  expires = datetime.datetime.utcnow() + \
            datetime.timedelta(minutes=args.expires)

  output = {
    "version": "0.1",
    "timestamp": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"),
    "author": "GuessSTARTTLSPolicies on %s - "
              "USE THIS POLICY AT YOUR OWN RISK" % platform.node(),
    "expires": expires.strftime("%Y-%m-%dT%H:%M:%S"), 
    "tls-policies": {}
  }

  def verbose(s):
    if args.verbose:
      print(s)

  for domain in args.inputfile.readlines():
    mail_domain = domain.strip()

    verbose("Analysing domain %s..." % mail_domain)
    verbose("")

    check_ko, domain_data = collect(mail_domain, args.nocache)

    if len(check_ko) > 0:
      verbose(" One or more MX hosts can't be analysed:")
      for mx_hostname, failure in check_ko:
        verbose("  %s: %s" % (mx_hostname,failure))
      verbose("")
      continue

    if not domain_data:
      verbose(" Can't get information about any MX host for %s" % mail_domain)
      verbose("")
      continue

    common = {}

    # ----------------------------------------------

    verbose(" Highest common TLS version...")
    common["min-tls-version"] = None
    for mx_hostname in domain_data["mx-hostnames"]:
      mx_host = domain_data["mx-hostnames"][mx_hostname]
      tls_ver = mx_host["tls-version"]
      verbose("  %s supports %s" % (mx_hostname,tls_ver))
      if not common["min-tls-version"]:
        common["min-tls-version"] = tls_ver
      else:
        if not tls_ver in tls_protocols_higher_than(common["min-tls-version"]):
          common["min-tls-version"] = tls_ver

    verbose(" min-tls-version: %s" % common["min-tls-version"])
    verbose("")

    # ----------------------------------------------

    common["ta_certificate"] = None
    common["ta_pubkey"] = None
    common["ee_certificate"] = None
    common["ee_pubkey"] = None

    for descr, ee_ta, dest, pem, fp in [("trust anchor certificate",
                                         "ta",
                                         "ta_certificate",
                                         "certificate_pem",
                                         "certificate_fingerprints"),
                                        ("trust anchor public key",
                                         "ta",
                                         "ta_pubkey",
                                         "pubkey_pem",
                                         "pubkey_fingerprints"),
                                        ("leaf certificate",
                                         "ee",
                                         "ee_certificate",
                                         "certificate_pem",
                                         "certificate_fingerprints"),
                                        ("leaf certificate public key",
                                         "ee",
                                         "ee_pubkey",
                                         "pubkey_pem",
                                         "pubkey_fingerprints")]:

      verbose(" Common %s..." % descr)
      for mx_hostname in domain_data["mx-hostnames"]:
        mx_host = domain_data["mx-hostnames"][mx_hostname]
        if not ee_ta in mx_host["certificates"]:
          verbose("  no %s certificate found for %s" % (ee_ta.upper(),
                                                      mx_hostname))
          common[dest] = None
          break

        cert = mx_host["certificates"][ee_ta]
        verbose("  %s %s's fingerprint: %s" % (mx_hostname, descr,
                                             cert[fp][hash_alg]))
        if not common[dest]:
          common[dest] = {}
          common[dest][pem] = cert[pem]
          common[dest][fp] = cert[fp]
        elif common[dest][pem] != cert[pem]:
          common[dest] = None
          break

      if common[dest]:
        verbose(" Common %s found: fingerprint %s" %
              (descr,common[dest][fp][hash_alg]))
      else:
        verbose(" No common %s found" % descr)
      verbose("")

    # ----------------------------------------------

    verbose(" Any invalid EE certificates...")

    common["any-invalid-EE-cert"] = False

    for mx_hostname in domain_data["mx-hostnames"]:
      mx_host = domain_data["mx-hostnames"][mx_hostname]

      if not mx_host["certificates"]["ee"]["verify_ok"]:
        verbose("  %s: not valid (%s)" %
                (mx_hostname, mx_host["certificates"]["ee"]["verify_res"]))
        common["any-invalid-EE-cert"] = True
      else:
        verbose("  %s: valid" % mx_hostname)

    if common["any-invalid-EE-cert"]:
      verbose(" Invalid EE certificates found")
    else:
      verbose(" No invalid EE certificates found")
    verbose("")

    # ----------------------------------------------

    verbose(" Common names in EE certificates...")

    common["shortest_names"] = []

    pdoms = {}

    for mx_hostname in domain_data["mx-hostnames"]:
      mx_host = domain_data["mx-hostnames"][mx_hostname]
      verbose("  %s: %s" % (mx_hostname,
                          ", ".join(mx_host["certificates"]["ee"]["names"])))
      for name in mx_host["certificates"]["ee"]["names"]:
        lbls = name.split(".")

        for dom_len in range(2, len(lbls)+1):
          pdom = ".".join(lbls[-dom_len:])
          if dom_len != len(lbls):
            pdom = "." + pdom

          if not str(dom_len) in pdoms:
            pdoms[str(dom_len)] = {}

          if not pdom in pdoms[str(dom_len)]:
            pdoms[str(dom_len)][pdom] = [mx_hostname]
          elif not mx_hostname in pdoms[str(dom_len)][pdom]:
            pdoms[str(dom_len)][pdom].append(mx_hostname)

    common_names = {}
    for dom_len in pdoms.keys():
      for name in pdoms[dom_len].keys():
        if len(pdoms[dom_len][name]) == len(domain_data["mx-hostnames"]):
          if not dom_len in common_names:
            common_names[dom_len] = []
          common_names[dom_len].append(name)

    if len(common_names.keys()) > 0:
      min_len = sorted([int(x) for x in common_names.keys()])[0]
      common["shortest_names"] = common_names[str(min_len)]
      verbose(" Common shortest names: " + ", ".join(common["shortest_names"]))
    else:
      verbose(" No common names found in EE certificates")

    # ----------------------------------------------
    # Decisions follow

    policy = {}

    def add_tlsas(ee_ta,entity):
      assert(ee_ta in [ "ee", "ta" ])
      assert(entity in [ "pubkey", "certificate" ])

      # add both full entity (base64 PEM) and it's fingerprint
      policy["%s-tlsa" % ee_ta].append({
        "entity": entity,
        "data_format": "b64",
        "data": common["%s_%s" %
                       (ee_ta,entity)]["%s_pem" % entity]
      })
      policy["%s-tlsa" % ee_ta].append({
        "entity": entity,
        "hash_alg": hash_alg,
        "data_format": "hex",
        "data": common["%s_%s" %
                       (ee_ta,entity)]["%s_fingerprints" % entity][hash_alg]
      })

    verbose("")

    if common["ta_certificate"] or common["ta_pubkey"]:

      if len(common["shortest_names"]) > 0:
        if "ta" in avoid_cert_matching:
          verbose(" Common trust anchor found "
                  "but forbidden by user's choice: "
                  "--avoid-cert-matching ta")
        else:
          verbose(" Certificate matching based on common trust anchor.")
          policy["certificate-matching"] = "TA"
          policy["ta-tlsa"] = []

          if common["ta_certificate"]:
            add_tlsas("ta", "certificate")
          if common["ta_pubkey"]:
            add_tlsas("ta", "pubkey")
      else:
        verbose(" WARNING: even if domain's MX hosts share a common "
                "trust anchor it can't be used for certificate "
                "matching because no common EE certificate names have "
                "be found. ")

    if "certificate-matching" not in policy and common["ee_pubkey"]:

      if "ee_pubkey" in avoid_cert_matching:
        verbose(" Common EE certificates' public keys found "
                "but forbidden by user's choice: "
                "--avoid-cert-matching ee_pubkey")
      else:
        verbose(" Certificate matching based on the common EE certificates' "
                "public key.")
        policy["certificate-matching"] = "EE"
        policy["ee-tlsa"] = []

        add_tlsas("ee", "pubkey")

    if "certificate-matching" not in policy and common["ee_certificate"]:

      if "ee_certificate" in avoid_cert_matching:
        verbose(" Common EE certificates found "
                "but forbidden by user's choice: "
                "--avoid-cert-matching ee_certificate")
      else:
        verbose(" Certificate matching based on common EE certificates.")
        policy["certificate-matching"] = "EE"
        policy["ee-tlsa"] = []

        add_tlsas("ee", "certificate")

    if "certificate-matching" not in policy and \
      common["shortest_names"] != [] and not common["any-invalid-EE-cert"]:

      verbose(" No common TA or EE certificate have been found among domain's "
              "MX hosts.")

      if "valid" in avoid_cert_matching:
        verbose(" Certificate matching based on any valid certificate "
                "would be used "
                "but it's forbidden by user's choice: "
                "--avoid-cert-matching valid")
      else:
        verbose(" Certificate matching based on any valid certificate "
                "with a matching name.")
        policy["certificate-matching"] = "valid"
    
    if "certificate-matching" not in policy:
      verbose(" WARNING: no common certificates' trust anchors nor common "
              "EE valid certificates have been found. TLS will be enforced "
              "but no authentication will be provided.")

    if "certificate-matching" in policy:
      if policy["certificate-matching"] in [ "TA", "valid"]:
        policy["allowed-cert-names"] = copy.copy(common["shortest_names"])
        if mail_domain in policy["allowed-cert-names"]:
          policy["allowed-cert-names"].remove(mail_domain)
          if policy["allowed-cert-names"] == []:
            policy.pop("allowed-cert-names", None)

    if common["min-tls-version"]:
      policy["min-tls-version"] = common["min-tls-version"]

    output["tls-policies"][mail_domain] = copy.deepcopy(policy)
    verbose("")

#    print(json.dumps(common,indent=2))
#    print json.dumps(domain_data,indent=2)
#    print(json.dumps(policy,indent=2))

  if args.outputfile:
    args.outputfile.write(json.dumps(output,indent=2))
    verbose("Policy definitions written to the output file.")
    args.outputfile.close()
  else:
    verbose("Policy definitions follow:")
    verbose("")
    print(json.dumps(output,indent=2))
    verbose("")
  def main():
    parser = argparse.ArgumentParser(
      description="""MTA log watcher""",
      formatter_class=argparse.RawDescriptionHelpFormatter,
      epilog="""
Incremental reading is not available when logfile = "-" (stdin).

If a policy definitions file is supplied (-p argument) the output counters are
incremented only for logfile lines that match one of the mail domains covered
by the policy.

Output type:
- matched-lines: only the lines that have been analysed will be shown.
- unmatched-lines: only the lines that have not been included in the analysis
  will be shown; this option can be useful to evaluate the effectiveness of 
  log parsing patterns and to display log lines that have been ignored.
- domains: all the domains that have been analysed are shown, with counters
  of successful and failed delivery attempts.
- warnings: like for 'domains', but only mail domains with a failure rate that
  is higher than the configured threshold are shown.""")

    parser.add_argument("-c", "--cfg", default=Config.default_cfg_path,
                        help="general configuration file path", metavar="file",
                        dest="cfg_path")

    parser.add_argument("-m", default="Postfix",
                        help="MTA flavor", choices=["Postfix"],
                        dest="mta_flavor")

    parser.add_argument("logfile", help="MTA's log file to analyze; "
                        "a dash ('-') means read from stdin")

    parser.add_argument("-i", "--incremental", action="store_true",
                        dest="incremental", help="read file incrementally")

    parser.add_argument("--remove-cursor", action="store_true",
                        dest="remove_cursor",
                        help="remove the file containing the cursor used for "
                             "incrementally reading the logfile")

    parser.add_argument("--show-cursor", action="store_true",
                        dest="show_cursor",
                        help="show the file containing the cursor used for "
                             "incrementally reading the logfile")

    output_choices = ["warnings", "domains", "summary",
                              "matched-lines", "unmatched-lines"]

    parser.add_argument("-o", default="warnings", dest="output",
                        choices=output_choices,
                        metavar="output-type",
                        help="requested output: " + 
                        " | ".join("'" + c + "'" for c in output_choices))

    parser.add_argument("-p",
                        help="JSON policy definitions file",
                        dest="policy_defs",
                        metavar="policy_defs.json")

    args = parser.parse_args()

    Config.read(args.cfg_path)

    if args.output == "warnings":
      # Reporting facilities initialization

      reports_dir = Config.get("general","logwatcher_reports_dir")
      if not os.path.isdir(reports_dir):
        mkdirp(reports_dir)
        #raise MissingFileError("Logwatcher's reports directory "
        #                        "(logwatcher_reports_dir) not found: %s" %
        #                        reports_dir)
      if not os.access(reports_dir, os.W_OK):
        raise InsufficientPermissionError("Insufficient permissions to write "
                                          "into logwatcher's reports "
                                          "directory "
                                          "(logwatcher_reports_dir): %s" %
                                          reports_dir)

      Config.get_logger()

    # failure_threshold = failure_threshold_percent / 100
    #   1 = 100%
    #   0.001 = 0.1%
    failure_threshold = Config.get("general","failure_threshold_percent")
    try:
      failure_threshold = float(failure_threshold)/100
    except:
      raise TypeError("Invalid failure threshold: %s" % failure_threshold)

    if failure_threshold < 0 or failure_threshold > 1:
      raise ValueError("Failure threshold must be between 0 and 100: %s" %
                        failure_threshold)

    if args.logfile == "-":
      if args.incremental:
        print("Can't use incremental reading on stdin.")
        return
      if args.remove_cursor or args.show_cursor:
        print("Can't manage cursors for stdin.")
        return

    if args.policy_defs:
      policy_defs = DefsParser.Defs(args.policy_defs)
    else:
      policy_defs = None

    if args.mta_flavor == "Postfix":
      logwatcher = PostfixLogWatcher(args.logfile,args.incremental,policy_defs)
    else:
      print("Unexpected MTA flavor: {}".format(args.mta_flavor))
      return

    if args.remove_cursor:
      print(logwatcher.remove_cursor())
      return
    if args.show_cursor:
      print(logwatcher.show_cursor())
      return

    res = logwatcher.analyze_lines(logwatcher.get_newlines())

    if args.output == "summary":
      print("Displaying the summary accordingly to logfile parsing results")
      print("")

      for s in logwatcher.status_tags:
        if s in res:
          print("%s:" % s)
          print(json.dumps(res[s],indent=2))
      print("Domains:")
      print(json.dumps(res["domains"],indent=2))

    elif args.output == "matched-lines":
      print("Displaying the logfile's lines that matched configured patterns")
      print("")

      for l in res["matched_lines"]:
        print(l.rstrip("\n"))

    elif args.output == "unmatched-lines":
      print("Displaying the logfile's lines that did not match "
            "configured patterns")
      print("")

      for l in res["unmatched_lines"]:
        print(l.rstrip("\n"))

    elif args.output in [ "domains", "warnings" ]:
      print("Displaying successful/failed delivery attempts for %s" %
            ("every domain" if args.output == "domains" else
             "domains with an high failure rate (%s%%)" %
             (failure_threshold*100)))
      print("")

      warning_domains = []

      for domainname in res["domains"]:
        domain = res["domains"][domainname]
        
        if not "attempted" in domain:
          continue

        #TODO: implement results for "log-only = true" status.
        if "sent_ko" in domain and domain["attempted"] > 0:
          failure_rate = domain["sent_ko"] / domain["attempted"]
        else:
          failure_rate = None

        if args.output == "domains" or \
          ( args.output == "warnings" and failure_rate >= failure_threshold ):
          succeeded = domain["sent_ok"] if "sent_ok" in domain else "none"
          failed = domain["sent_ko"] if "sent_ko" in domain else "none"

          s = "{d}: {t} delivery attempts, {s} succeeded, {f} failed"
          if failure_rate:
            s = s + ", {r:.2%} failure rate"
            if failure_rate >= failure_threshold:
              s = s + " - WARNING"
              warning_domains.append(domainname)

          print(s.format(d=domainname, r=failure_rate,
                         t=domain["attempted"], s=succeeded,
                         f=failed))

      if args.output == "warnings" and len(warning_domains) > 0:
        report_format= Config.get("general","logwatcher_reports_fmt")
        report_filename = datetime.datetime.now().strftime(report_format)
        report_file = "%s/%s" % (reports_dir,report_filename)
        with open(report_file, "w") as r:
          r.write("domainname,attempts,ko,ok\n")
          for domainname in warning_domains:
            r.write("{domainname},{attempted},{sent_ko},{sent_ok}\n".format(
                    domainname=domainname,
                    attempted=res["domains"][domainname]["attempted"],
                    sent_ko=res["domains"][domainname].get("sent_ko",0),
                    sent_ok=res["domains"][domainname].get("sent_ok",0)))

        notification_t = "Delivery errors found for {domains} for a " + \
                         "total of {fail} failures over {tot} total " + \
                         "attempts. More details on {report_file}"

        if len(warning_domains) > 3:
          notification_domains = ", ".join(warning_domains[:3]) + \
                                 "and " + str(len(warning_domains)-3) + \
                                 " more domains"
        else:
          notification_domains = ", ".join(warning_domains)

        fail = 0
        tot = 0
        for domainname in warning_domains:
          fail = fail + res["domains"][domainname]["sent_ko"]
          tot = tot + res["domains"][domainname]["attempted"]

        notification = notification_t.format(domains=notification_domains,
                                             fail=fail,
                                             tot=tot,
                                             report_file=report_file)

        Config.get_logger().error(notification)
  def main():
    parser = argparse.ArgumentParser(
      description="""MTA configuration generator""")

    parser.add_argument("-c", "--cfg", default=Config.default_cfg_path,
                        help="general configuration file path", metavar="file",
                        dest="cfg_path")

    parser.add_argument("-m", default="Postfix",
                        help="MTA flavor", choices=["Postfix"],
                        dest="mta_flavor")

    parser.add_argument("-f", "--fix", action="store_true",
                        help="fix MTA general configuration; "
                        "by default, only STARTTLS policies are updated "
                        "while the main MTA configuration is kept unchanged. "
                        "Changes are saved only if -s | --save arguments are "
                        "given.",
                        dest="fixup")

    parser.add_argument("--show-ignore-list", action="store_true",
                        help="show the list of exceptions that can be "
                        "ignored for the given MTA", dest="show_ignore_list")

    parser.add_argument("--ignore", nargs="*",
                        help="ignore errors due to features not implemented",
                        metavar="error_type", dest="ignore_list")

    parser.add_argument("-s", "--save", action="store_true",
                        help="really write changes to disk (both for general "
                        "configuration changes and for policy definitions).",
                        dest="save")

    parser.add_argument("policy_def", help="JSON policy definitions file",
                        metavar="policy_defs.json")

    args = parser.parse_args()

    Config.read(args.cfg_path)

    import DefsParser
    c = DefsParser.Defs(args.policy_def)

    if args.ignore_list:
      ignore_list = args.ignore_list
    else:
      ignore_list = []

    if args.mta_flavor == "Postfix":
      from PostfixConfigGenerator import PostfixConfigGenerator

      postfix_dir = Config.get("postfix","cfg_dir")

      cfg_gen = PostfixConfigGenerator(c, postfix_dir, fixup=args.fixup,
                                     ignore_list=ignore_list)
    else:
      print("Unexpected MTA flavor: {}".format(args.mta_flavor))
      return

    if args.show_ignore_list:
      print("List of exceptions that can be ignored by "
            "%s config generator:" % args.mta_flavor)
      for err in cfg_gen.allowed_ignore_list:
        print(" - %s: %s" % (err,cfg_gen.allowed_ignore_list[err]))
      return

    if args.fixup:
      if cfg_gen.build_general_config():

        if args.save:
          if cfg_gen.fix_general_config():
            print("MTA general configuration changes saved and used by MTA!")
          else:
            print("MTA general configuration changes saved.")
            print("Ensure your MTA is using the new configuration; "
                  "reload it if needed.")
        else:
          print("MTA general configuration changes are needed.")

          try:
            cfg_gen.show_new_general_config_diff()
          except OSError:
            print("Error while showing configuration differences. "
            "The whole new configuration follows:")
            cfg_gen.show_new_general_config()

          print("\nMTA general configuration changes NOT saved: "
                "use -s | --save to save them.")
      else:
        print("No MTA general configuration changes are needed.")
    else:
      cfg_gen.build_defs()
      
      if args.save:
        if cfg_gen.update_defs():
          print("Policy definitions updated and used by MTA!")
        else:
          print("Policy definitions updated but not used yet by MTA: consider "
                "to reload it.")
      else:
        cfg_gen.show_defs()
        print("\nPolicy definitions NOT updated: "
              "use -s | --save to save them.")
                      dest="cfg_path")

  parser.add_argument("-j", nargs="?", const="*",
                      metavar="domain",
                      help="print the resultant JSON policy, optionally "
                      "limited to domain", dest="print_json")

  parser.add_argument("policy_def", help="""JSON policy definitions file; """
                      """"-" to read from stdin""",
                      metavar="policy_defs.json",
                      type=argparse.FileType("r"))

  args = parser.parse_args()

  try:
    Config.read(args.cfg_path)

    c = Defs(args.policy_def)
    print("Validation OK")
    if args.print_json:
      j = c.to_json()
      domain = args.print_json
      if domain == "*":
        print(json.dumps(j,indent=2))
      else:
        if domain in j["tls-policies"]:
          print(json.dumps(j["tls-policies"][domain],indent=2))
        else:
          print("The selected domain %s has not been found." % domain)
  except (ValueError, TypeError) as e:
    print("Validation failure: %s" % str(e))