def get_public_suffix_list(): """Initialize Public Suffix List""" try: psl_file = fetch() psl = PublicSuffixList(psl_file=psl_file) except Exception: psl = PublicSuffixList() return psl
def get_public_suffix_list() -> PublicSuffixList: # Initialize Public Suffix List try: psl_file = fetch() psl = PublicSuffixList(psl_file=psl_file) except Exception as e: logging.getLogger(__name__).warning(f'Unable to fetch the PublicSuffixList: {e}') psl = PublicSuffixList() return psl
def issDotCom(inputDomain): cert=readDomain(inputDomain) x509_One=crypto.load_certificate(crypto.FILETYPE_PEM, cert) psl = PublicSuffixList(idna=True) Issuer_is_com= False j= 'com' if j in psl.get_tld(cert.get_issuer().CN): Issuer_is_com = True else: Issuer_is_com = False return Issuer_is_com
def subDotCom(inputDomain): cert=readDomain(inputDomain) x509_One=crypto.load_certificate(crypto.FILETYPE_PEM, cert) psl = PublicSuffixList(idna=True) Subject_is_com= False j= 'com' if j in psl.get_tld(cert.get_subject().CN): Subject_is_com = True else: Subject_is_com = False return Subject_is_com
def issueriscom(): psl = PublicSuffixList(idna=True) x509 = readCert() Issuer_is_com=[] for cert in x509: k= False j= 'com' if j in psl.get_tld(cert.get_issuer().CN): k = True else: k=False Issuer_is_com.append(k) return Issuer_is_com
def subjectiscom(): psl = PublicSuffixList(idna=True) x509 = readCert() Subject_is_com=[] for cert in x509: i= False j= 'com' if j in psl.get_tld(cert.get_subject().CN): i = True else: i=False Subject_is_com.append(i) return Subject_is_com
def __init__(self) -> None: """Initialise information probes.""" self.psl = PublicSuffixList() self.reg = RegistryProbe() self.dns = DnsProbe() self.hosts = HostsProbe()
class DomainReport: """Inspect the state of a domain name and report on possible issues.""" def __init__(self) -> None: """Initialise information probes.""" self.psl = PublicSuffixList() self.reg = RegistryProbe() self.dns = DnsProbe() self.hosts = HostsProbe() def full_report(self, fqdn: str) -> str: """Grab information about `fqdn` and produce a report about it. :param fqdn: The fully qualified domain name for which a report is produced. :raises ValueError: If `fqdn` is not using a known public suffix. Indeed, we'll be prodding some public services for information about the domain, so it doesn't make much sense to run the information gathering for a domain name that won't have any valid information on those services. :return: a JSON-serialized data structure .. note:: If `fqdn` is not a second-level domain (e.g. the name that would be registered with a registry, the report will be run on the second-level domain part of it instead. This method inspects data returned from all probes and also adds errors for the following correlations: the domain name uses one of the known public suffixes if not, fail early the DNS servers in the zone match the ones in the registry check reported resolved hosts for presence in local hosts database """ # TODO decide exactly what structure the report should take report = {} domain_name = self.psl.get_sld(fqdn, strict=True) if domain_name is None: raise ValueError( f"{fqdn} is not using a known public suffix or TLD") report["domain"] = domain_name report["registry"] = self.registry_report(fqdn) report["dns"] = self.dns_report(fqdn) # TODO extract portion of report with resolved hosts and give that to # the next report method instead of fqdn hostnames: Set[str] = set() report["hosts"] = self.local_hosts_report(hostnames) return json.dumps(report) def registry_report(self, domain_name: str) -> dict: """Run a full inspection and produce a report about what was found. The registry should be checked for: domain is registered not expired not in a problematic status the DNS hosts in the registry have glue records :param domain_name: The domain name for which we'll be gathering information into a report. :return: A dictionary containing report information. """ info = self.reg.domain_name(domain_name) report = {} report["status"] = info["status"] report["expiration_date"] = str(info["expiration_date"]) report["registrar"] = info["entities"]["registrar"][0]["name"] report["nameservers"] = info["nameservers"] return report def dns_report(self, fqdn: str) -> dict: """Run all DNS inspections and produce report as a dictionary. To produce a full report we want to inspect the following details about a domain name: * List out NS entries * Grab the SOA and report the serial * Get the SOA from all NS entries and compare the serials. If there is a mismatch, add an error in the report about a mismatch in the SOA and which nameservers disagree * If any of the NS servers fail to respond, add an error about each one that failed * If no NS server responded, raise an exception to fail early * Details about email setup * MX is present. all values have a PTR corresponding to the same hostname * check all hosts in the same way as resolving tests down below and add results to report * SPF is present * DKIM is present (we'll need a configuration option for a set of DKIM sub-domains to search for) * DMARC is present * MTA-STS is present * onionmx SRV field exists * SRV records exist for IMAP/POP3 * autodiscover/autoconfig TXT entries exist * general security fields * DNSSEC: DS and DNSKEY * CAA * Resolve a series of hosts * check for CNAME first and report if any is found * A and AAAA, also check for PTR on found values * always check if there are NS entries for subdomains and report the delegations that were found * at least: * NS servers * top of domain * www subdomain * hosts found in SRV records * it would be a good idea to have a parameter for extra hosts to include in the report :param fqdn: The domain name for which we'll gather DNS information into a report. :return: A dictionary containing report information. """ report = {} try: nameservers = self.dns.name_servers(fqdn) except (DomainError, NoAnswer): raise DomainError( f"No nameserver was found for {fqdn}. Cannot go further.") ns_data = [] for ns in nameservers: ns_ips = self.dns.v4_address(ns) if self.dns.ipv6_enabled: ns_ips.extend(self.dns.v6_address(ns)) for ns_ip in ns_ips: ns_struct: Dict[str, Union[str, Dict[str, str]]] = { "hostname": ns, "ip_address": ns_ip, } # TODO catch errors from this soa = self.dns.soa(fqdn, ns_ip) ns_struct["soa"] = soa ns_data.append(ns_struct) report["nameservers"] = ns_data return report def local_hosts_report(self, hosts: Set[str]) -> dict: """Produce a report about the presence of hosts in the local database. Host names will not be verified for validity, only whether or not they are in the local hosts database. :param hosts: Set of unique host names :return: A dictionary with host names as keys and a boolean as values to indicate if the corresponding host name was found in the local database. """ report = {} for h in hosts: report[h] = self.hosts.in_database(h) return report
# You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import urllib.parse import warnings try: from publicsuffix2 import PublicSuffixList psl = PublicSuffixList(idna=False) except ImportError: warnings.warn("Unable to import the nodule 'publicsuffix2'. " "Will be unable to parse domain names.") psl = None urlparse_edge = { 'color': { 'color': '#4d4d4d' }, 'title': 'URL Parsing Functions', 'label': 'u' }
from urllib import unquote as urllib_unquote try: unicode str = unicode except NameError: unicode = str # Python versions _sys_v0 = sys.version_info[0] py2 = _sys_v0 == 2 py3 = _sys_v0 == 3 # For publicsuffix utilities from publicsuffix2 import PublicSuffixList psl = PublicSuffixList() # Come codes that we'll need IDNA = codecs.lookup('idna') UTF8 = codecs.lookup('utf-8') ASCII = codecs.lookup('ascii') W1252 = codecs.lookup('windows-1252') # The default ports associated with each scheme PORTS = {'http': 80, 'https': 443} def parse(url): '''Parse the provided url string and return an URL object''' return URL.parse(url)
"+00:00").astimezone( LOCAL_TIMEZONE).strftime('%Y-%m-%d') if date in totals: totals[date] = totals[date] + duration['value'] else: totals[date] = duration['value'] for date in totals: values.append({ 'date': date, 'name': 'gaming_min', 'value': int(totals[date] / 60) }) tags.append({'date': date, 'value': 'gaming'}) elif RESCUETIME_DATABASE != '': psl = PublicSuffixList() totals = {} client.switch_database(RESCUETIME_DATABASE) durations = client.query( 'SELECT "duration","activity" FROM "activity" WHERE category = \'Games\' AND activity != \'Steam\' AND activity != \'steamwebhelper\' AND activity != \'origin\' AND activity != \'mixedrealityportal\' AND activity != \'holoshellapp\' AND activity != \'vrmonitor\' AND activity != \'vrserver\' AND activity != \'oculusclient\' AND activity != \'vive\' AND activity != \'obs64\' AND time >= ' + start_time) for duration in list(durations.get_points()): date = datetime.fromisoformat(duration['time'].strip('Z') + "+00:00").astimezone( LOCAL_TIMEZONE).strftime('%Y-%m-%d') if psl.get_public_suffix(duration['activity'], strict=True) is None: if date in totals: totals[date] = totals[date] + duration['duration'] else: totals[date] = duration['duration']
def get_org_domain(domain): fn = get_suffix_list_file_name() with open(fn) as suffixList: psl = PublicSuffixList(suffixList) return psl.get_public_suffix(domain)
from publicsuffix2 import PublicSuffixList psl = PublicSuffixList(psl_file="/tmp/public_suffix_list.dat") for domain in [ "www.google.co.jp", "foo.bar.yokohama.jp", "bar.yokohama.jp", "yokohama.jp", "jp", "foo.bar.platform.sh", "bar.platform.sh", "platform.sh", "sh", "foo.s3.amazonaws.com", "s3.amazonaws.com", "foo.s1.amazonaws.com", "s1.amazonaws.com", "amazonaws.com", ]: print(f"[{domain}]") print(f"root: {psl.get_public_suffix(domain)}") print(f"public suffix: {psl.get_tld(domain)}") print()