Exemple #1
0
def checklinkfunc(link):
    """
    A user defined function that uses the Google's Safe
    Browsing API to check whether a given link is malicous
    or not.
    """

    key = Settings().SECRETS['GOOGLE_SAFE_BROWSING_API_KEY']
    checker = SafeBrowsing(f"{key}")
    regex = r"{regex}".format(regex=Settings().SECRETS['VALID_LINK_REGEX'])
    url = findall(regex, link)

    try:
        link = url[0][0]
        response = checker.lookup_urls([link])
        if response[link]["malicious"] == False:
            return "{link} **is safe!**".format(link=link)
        elif response[link]["malicious"] == True:
            return "{link} **is malicious!!!**".format(link=link)
        else:
            return "Something's wrong"
    except:
        message = "There was no link in your command\n"
        message += "Example command: ``checklink <pastethelinkhere>``"
        return message
Exemple #2
0
 def run(self, conf, args, plugins):
     sb = SafeBrowsing(conf['SafeBrowsing']['key'])
     if 'subcommand' in args:
         if args.subcommand == 'url':
             try:
                 if args.URL.startswith("http"):
                     res = sb.lookup_url(args.URL)
                 else:
                     res = sb.lookup_url("http://" + args.URL + "/")
             except SafeBrowsingInvalidApiKey:
                 print("Invalid API key!")
                 sys.exit(1)
             except SafeBrowsingWeirdError:
                 print("Weird Error!")
                 sys.exit(1)
             else:
                 if args.json:
                     print(json.dumps(res, sort_keys=True, indent=4))
                 else:
                     if res["malicious"]:
                         print("Malicious: Yes")
                         print("Platforms: %s" %
                               ", ".join(res["platforms"]))
                         print("Threats: %s" % ", ".join(res["threats"]))
                     else:
                         print("Malicious: No")
         elif args.subcommand == 'file':
             with open(args.FILE, 'r') as f:
                 data = f.read()
             domains = [d.strip() for d in data.split()]
             res = sb.lookup_urls([
                 "http://" + d + "/" if not d.startswith("http") else d
                 for d in domains
             ])
             if args.format == "txt":
                 for domain in res:
                     if res[domain]["malicious"]:
                         print("%s\tMalicious" % domain)
                     else:
                         print("%s\tOk" % domain)
             elif args.format == "json":
                 print(json.dumps(res, sort_keys=True, indent=4))
             else:
                 print("Url|Malicious|Threat|Platform")
                 for domain in res:
                     if res[domain]["malicious"]:
                         print("%s|%s|%s|%s" % (domain, "Yes", ",".join(
                             res[domain]["threats"]), ",".join(
                                 res[domain]["platforms"])))
                     else:
                         print("%s|No||" % domain)
         else:
             self.parser.print_help()
     else:
         self.parser.print_help()
Exemple #3
0
 def google_sb(self):
     try:
         if self.GOOGLE_SAFE_BROWSING_API_KEY == '':
             return False, None
         s = SafeBrowsing(self.GOOGLE_SAFE_BROWSING_API_KEY)
         r = s.lookup_urls([self.url])
         return r[self.url]['malicious'], None
     except Exception as e:
         ex = 'ERROR: Google Safe Browsing ,' + str(e)
         print(ex)
         return None, ex
Exemple #4
0
def run(
    analyzer_name,
    job_id,
    observable_name,
    observable_classification,
    additional_config_params,
):
    logger.info("started analyzer {} job_id {} observable {}"
                "".format(analyzer_name, job_id, observable_name))
    report = general.get_basic_report_template(analyzer_name)
    try:
        api_key = secrets.get_secret("GSF_KEY")
        if not api_key:
            raise AnalyzerRunException(
                "no api key retrieved. job_id {}, analyzer {}".format(
                    job_id, analyzer_name))

        sb_instance = SafeBrowsing(api_key)
        response = sb_instance.lookup_urls([observable_name])
        if observable_name in response and isinstance(
                response[observable_name], dict):
            result = response[observable_name]
        else:
            raise AnalyzerRunException(
                "result not expected: {}".format(response))

        # pprint.pprint(result)
        report["report"] = result
    except AnalyzerRunException as e:
        error_message = (
            "job_id:{} analyzer:{} observable_name:{} Analyzer error {}"
            "".format(job_id, analyzer_name, observable_name, e))
        logger.error(error_message)
        report["errors"].append(error_message)
        report["success"] = False
    except Exception as e:
        traceback.print_exc()
        error_message = (
            "job_id:{} analyzer:{} observable_name:{} Unexpected error {}"
            "".format(job_id, analyzer_name, observable_name, e))
        logger.exception(error_message)
        report["errors"].append(str(e))
        report["success"] = False
    else:
        report["success"] = True

    general.set_report_and_cleanup(job_id, report)

    logger.info("ended analyzer {} job_id {} observable {}"
                "".format(analyzer_name, job_id, observable_name))

    return report
Exemple #5
0
class SafeBrowse(AbstractIntelPlugin):
    LEVEL_MAPPINGS = {
        'debug': logging.DEBUG,
        'info': logging.INFO,
        'warn': logging.WARNING,
        'error': logging.ERROR,
        'critical': logging.CRITICAL,
    }

    def __init__(self, config):
        levelname = config.get('level', 'debug')
        self.level = self.LEVEL_MAPPINGS.get(levelname, logging.DEBUG)
        self.api = SafeBrowsing('')

    def analyse_hash(self, value, type, node):
        # TODO(andrew-d): better message?
        current_app.logger.log(self.level, 'Triggered alert: ')

    def analyse_domain(self, value, type, node):
        # TODO(andrew-d): better message?
        r = self.api.lookup_urls(
            url='http://malware.testing.google.test/testing/malware/')
        if 'malicious' in r and r['malicious'] == True:
            check_and_save_intel_alert(scan_type=type,
                                       scan_value=value,
                                       data=r,
                                       source="SafeBrowsing",
                                       severity="LOW")
        current_app.logger.log(self.level, 'Triggered alert: SfeBrowsing ')
Exemple #6
0
 def __init__(self, train_mails: List[Mail], train_labels: List[int],
              target_attribute: MailAttributes, config):
     super().__init__(train_mails, train_labels, target_attribute, config)
     api_token = config.google_api_token
     if api_token is None:
         logging.fatal(
             "Google API Token is not set. Unable to initialize URLClassifier."
         )
         raise ValueError(
             "Google API Token is not set. Unable to initialize URLClassifier."
         )
     self.target_attribute = target_attribute
     self.safe_browsing = SafeBrowsing(api_token)
     self.url_extractor = URLExtract()
     self.url_extractor.extract_email = False
     self.url_extractor.get_stop_chars_left().add(':')
     self.checked_urls: dict = {}
Exemple #7
0
def isSafeURL(url):
    result = {}
    if not validators.url(url):
        result['status'] = False
        result['reason'] = 'INVALID'
        return result
    safeBrowsingClient = SafeBrowsing(SAFE_BROWSING_KEY)
    rawDict = safeBrowsingClient.lookup_urls([url])
    json_str = json.dumps(rawDict)
    result = json.loads(json_str)
    if (result[url]['malicious'] == False):
        result['status'] = True
        return result
    else:
        result['status'] = False
        result['reason'] = 'THREAT'
        result['threats'] = result[url]['threats']
        return result
Exemple #8
0
    def run(self):
        api_key_name = "GSF_KEY"
        api_key = secrets.get_secret(api_key_name)
        if not api_key:
            raise AnalyzerRunException(
                f"No API key retrieved with name: '{api_key_name}'"
            )

        sb_instance = SafeBrowsing(api_key)
        response = sb_instance.lookup_urls([self.observable_name])
        if self.observable_name in response and isinstance(
            response[self.observable_name], dict
        ):
            result = response[self.observable_name]
        else:
            raise AnalyzerRunException(f"result not expected: {response}")

        return result
Exemple #9
0
def google_safe_brwosing_lookup(url: str) -> Optional[dict]:
    """Lookup a url on GSB

    Arguments:
        url {str} -- A URL to lookup

    Returns:
        Optional[dict] -- A lookup result
    """
    key = str(settings.GOOGLE_SAFE_BROWSING_API_KEY)
    if key == "":
        return None

    try:
        s = SafeBrowsing(key)
        return s.lookup_url(url)
    except (SafeBrowsingInvalidApiKey, SafeBrowsingWeirdError):
        pass

    return None
Exemple #10
0
    def run(self):
        api_key = self._secrets["api_key_name"]

        sb_instance = SafeBrowsing(api_key)
        response = sb_instance.lookup_urls([self.observable_name])
        if self.observable_name in response and isinstance(
            response[self.observable_name], dict
        ):
            result = response[self.observable_name]
        else:
            raise AnalyzerRunException(f"result not expected: {response}")

        malicious = result["malicious"]
        googlesb_result = malicious_detector_response(self.observable_name, malicious)
        # append google extra data
        if malicious:
            googlesb_result["cache"] = result["cache"]
            googlesb_result["threats"] = result["threats"]
            googlesb_result["platforms"] = result["platforms"]
        return googlesb_result
Exemple #11
0
def controlrisk(url):

    s = SafeBrowsing('GSBKEY')
    url = url['url']
    r = s.lookup_urls([url])
    r = r[url]

    # threatlist 에 따른 데이터 형태 정렬 (인덱스 위치가 해당 위험정도임)
    threatslist = ['THREAT_TYPE_UNSPECIFIED',
                   'UNWANTED_SOFTWARE',
                   'POTENTIALLY_HARMFUL_APPLICATION',
                   'SOCIAL_ENGINEERING',
                   'MALWARE']

    # platform 에 따른 데이터 형태 정렬 (중복값이 많으므로 딕셔너리로 선언 )
    platforms = {'PLATFORM_TYPE_UNSPECIFIED': 1,
                 'ANDROID': 2,
                 'CHROME': 2,
                 'IOS': 2,
                 'LINUX': 2,
                 'OSX': 2,
                 'WINDOWS': 2,
                 'ANY_PLATFORM': 3,
                 'ALL_PLATFORMS': 4}

    riskrange = 0
    if r['malicious'] == False:
        riskrange = 0
    else:
        strplatforms = r['platforms']
        strplatforms = str(strplatforms)
        strplatforms = strplatforms.replace("['", '')
        strplatforms = strplatforms.replace("']", '')
        if platforms[strplatforms] != 0:
            riskrange += platforms[strplatforms]
        r2 = r['threats']
        if r2[0] in threatslist:
            riskrange += threatslist.index(r2[0])
            global titleContext
            titleContext = threatslist[threatslist.index(r2[0])]
    return str(decide_risk(riskrange))
Exemple #12
0
def main():
    key = 'AIzaSyBZUltwuT2ApyBvBi4Yr9BF5gQTNP7nwgI'
    s = SafeBrowsing(key)
    url_list = [
        "malware.testing.google.test/testing/malware/",
        "cpcontacts.loveshackdiy.com", "beta.servicehub.com",
        "webmail.accantobeleza.com.br", "www.1plus1.sk", "www.raypal.com/",
        "lefcm.org", "forfriendsstore.com", "api.atux.com.ar",
        "www.mercercopywriting.co.uk",
        "verkehrspsychologische-untersuchung-schweiz.online",
        "bst-8b6d93ad-79da-40b9-8670-d837428ca3b1.bastion.azure.com",
        "enterprisevineyards.com", "wx.n3uc.com", "vitalbites.us",
        "labradortoscana.it", "entizolab.com", "www.hokkaido-select.com",
        "www.jacklete.ca", "46640.1602746166.cr-gke-boskos-40.cr-e2e.com",
        "web-ssoreporting-test.apps.beta.azure.cp-skoda-auto.com",
        "autodiscover.womanmoneyblog.com"
    ]

    for r in url_list:
        h = s.lookup_url('http://' + r)
        print(h)
Exemple #13
0
    def post(self):
        test_2_model = Test2Model()
        request_args = test_2_model.get_req_parser()
        website = request_args['website']
        KEY = 'AIzaSyBNdlz3cBi7YGJL-vZtcX53Z6Iq1qkbUew'
        s = SafeBrowsing(KEY)
        r = s.lookup_urls([website])
        print(r)
        new_dict = dict(r)
        status = new_dict[website]['malicious']

        query = "INSERT INTO exam_b(website, is_malicious) VALUES('" + str(
            website) + "', '" + str(status) + "' );"
        connection = pymysql.connect(host="localhost",
                                     user="******",
                                     passwd="",
                                     database="exam")
        cursor = connection.cursor()
        cursor.execute(query)
        connection.commit()
        resp = {'status': 200, 'message': 'Added ' + website + ' to db'}
        return resp
def url_scan():
    """"preforms scan for url"""
    web_url = request.form['url']
    search = SafeBrowsing(API_KEY)
    #web_url = urllib.request.urlopen(web_url)
    #web_url = requests.get(web_url).json
    res = search.lookup_urls([web_url])
    scan_type = "Url Scan"
    user_id = session['user_id']
    scan_date = datetime.now()
    is_mal = []
    scan_item = web_url

    for key, val in res.items():
        urls = key
        results = val
    for t, threat in results.items():
        if t == 'threats':
            threat_type = threat
        if t == 'malicious':
            is_mal = threat
        if t == 'platforms':
            platforms = threat

    #is_mal = "".join(is_mal)
    if session.get("scanfile_id"):
        if session["scanfile_id"] != None:
            del session["scanfile_id"]

    if is_mal == False:
        findings = (f'[+] The following {urls} is {is_mal} of Malicious code.')
        mal_code = False
        new_scan = Scan(findings=findings,
                        scan_type=scan_type,
                        scan_date=scan_date,
                        user_id=user_id,
                        mal_code=mal_code,
                        scan_item=scan_item)

        db.session.add(new_scan)
        db.session.commit()
        session["scanurl_id"] = new_scan.scan_id

        return redirect("/scan_url")
        #return render_template("nonmalfile.html", web_url= web_url, scan_type=scan_type, user_id=user_id, is_mal=is_mal)

    if is_mal == True:

        findings = (
            f'[!] The following {urls} is {is_mal} of Malicious code. Threat Type: {threat_type}!'
        )
        mal_code = True
        new_scan = Scan(findings=findings,
                        scan_type=scan_type,
                        scan_date=scan_date,
                        user_id=user_id,
                        mal_code=mal_code,
                        scan_item=scan_item)

        db.session.add(new_scan)
        db.session.commit()
        session["scanurl_id"] = new_scan.scan_id
        return redirect("/scan_url")
Exemple #15
0
def whois_lookup(domainname):
    try:
        nameservers = []
        nameserver1 = ""
        nameserver2 = ""
        nameserver3 = ""
        nameserver4 = ""
        registrant_emails = []
        registrant_email1 = ""
        registrant_email2 = ""
        registrant_email3 = ""
        registrant_email4 = ""
        registrant_name1 = ""
        registrant_address = ""
        registrant_country = ""
        state = ""
        orgname1 = ""
        emailindex = 1
        nsindex = 1
        try:
            w = whois.whois(domainname)
        except:
            pass
        try:
            created_date = w.creation_date
            try:
                if len(created_date) == 2:
                    created_date = created_date[0]
            except:
                pass
        except:
            created_date = "1969-12-31 00:00:00"
            pass
        try:
            expired_date = w.expiration_date
            try:
                if len(expired_date) == 2:
                    expired_date = expired_date[0]
            except:
                pass
        except:
            expired_date = "1969-12-23 00:00:00"
            pass
        try:
            registrant_name1 = w.name
            if not registrant_name1:
                registrant_name1 = ""
        except:
            registrant_name1 = ""
            pass
        try:
            registrant_address = w.address
            if not registrant_address:
                registrant_address = ""
        except:
            pass
            registrant_address = ""
        try:
            registrant_country = w.country
            registrant_country = str(registrant_country)
            if not registrant_country:
                registrant_country = ""
        except:
            registrant_country = ""
            pass
        try:
            state = w.state
            if not state:
                state = ""
        except:
            state = ""
            pass
        try:
            orgname1 = w.org
            if not orgname1:
                orgname1 = ""
        except:
            orgname1 = ""
            pass
        try:
            whois_city = w.city
            if not whois_city:
                whois_city = ""
        except:
            whois_city = ""
            pass
        try:
            whois_zipcode = w.zipcode
            if not whois_zipcode:
                whois_zipcode = ""
        except:
            whois_zipcode = ""
            pass
        try:
            whois_registrar = w.registrar
            if len(whois_registrar) < 4:
                whois_registrar = whois_registrar[0]
            whois_ref_url = w.referral_url
        except:
            whois_registrar = ""
            whois_ref_url = ""
            pass
        try:
            nameservers = w.name_servers
            if not nameservers:
                nameservers = ""
        except:
            nameservers = ""
            pass
        try:
            nameserver1 = w.name_servers[0]
            if not nameserver1:
                nameserver1 = ""
        except:
            nameserver1 = ""
            pass
        try:
            nameserver2 = w.name_servers[1]
            if not nameserver2:
                nameserver2 = ""
        except:
            nameserver2 = ""
            pass
        try:
            nameserver3 = w.name_servers[2]
            if not nameserver3:
                nameserver3 = ""
        except:
            nameserver3 = ""
            pass
        try:
            nameserver4 = w.name_servers[3]
            if not nameserver4:
                nameserver4 = ""
        except:
            nameserver4 = ""
            pass
        try:
            registrant_emails = w.emails
            if len(registrant_emails) > 5:
                registrant_email1 = w.emails
                registrant_email_table.append(registrant_email1)
            else:
                for registrant_email in registrant_emails:
                    registrant_email_table.append(registrant_email)
                    if emailindex == 1:
                        registrant_email1 = registrant_email
                    if emailindex == 2:
                        registrant_email2 = registrant_email
                    if emailindex == 3:
                        registrant_email3 = registrant_email
                    if emailindex == 4:
                        registrant_email4 = registrant_email
                    emailindex = emailindex + 1
        except:
            pass
    except:
        pass
    try:
        try:
            domain_ipaddr = socket.gethostbyname(domainname)
        except:
            domain_ipaddr = "- -"
            pass
        if domain_ipaddr != "- -":
            obj = IPWhois(domain_ipaddr)
            results = obj.lookup_whois()
            domain_asnid = "AS" + results['asn']
            if domain_asnid == "":
                domain_asnid = "- -"
            try:
                domain_country = results['asn_country_code']
                if domain_country == "":
                    domain_country = "- -"
            except:
                domain_country = "- -"
                pass
            try:
                domain_asn_name = results['nets'][0]['name']
                if domain_asn_name == "" or 'None':
                    domain_asn_name = "- -"
            except:
                domain_asn_name = "- -"
                pass
        else:
            domain_asnid = "- -"
            domain_country = "- -"
            domain_asn_name = "- -"
    except:
        domain_asnid = "- -"
        domain_asnid = "- -"
        domain_country = "- -"
        domain_asn_name = "- -"
        pass
    #################### BLOCKLIST FUNCTION BELOW ##################
    try:
        url = "https://www.urlvoid.com/scan/" + domainname + "/"
        results = requests.get(url, headers=user_agent).content
        soup = BeautifulSoup(results, 'html.parser')
        t = soup.find('span', {'class': 'label-danger'})
        urlvoid_bl = "URLVOID: " + t.text
        print(urlvoid_bl)
        #return urlvoid_bl
    except:
        urlvoid_bl = ""
        pass
    try:
        url2 = "https://fortiguard.com/webfilter?q=" + domainname + "&version=8"
        results2 = requests.get(url2, headers=user_agent).content
        soup2 = BeautifulSoup(results2, 'html.parser')
        t2 = soup2.find("meta", property="description")
        fortiguard = "FORTIGUARD " + str(t2["content"])
        print(fortiguard)
        #return fortiguard
    except:
        fortiguard = ""
        pass
    try:
        url3 = "http://www.siteadvisor.com/sitereport.html?url=" + domainname
        results3 = requests.get(url3, headers=user_agent).content
        soup3 = BeautifulSoup(results3, 'html.parser')
        t3 = soup3.find('a').contents[0]
        siteadvisor_bl = "SITEADVISOR: " + str(t3)
        print(siteadvisor_bl)
        #return fortiguard
    except:
        siteadvisor_bl = ""
        pass
    try:
        gsb_lookup = SafeBrowsing(gsb_apikey)
        results4 = gsb_lookup.lookup_urls([domainname])
        gsb_status = str(results4[domainname]['malicious'])
        gsb_platforms = results4[domainname]['platforms'][0]
        gsb_threats = results4[domainname]['threats'][0]
        print("GOOGLE SAFE BROWSING API4: " + gsb_status + " || " +
              gsb_platforms + " || " + gsb_threats)
    except:
        gsb_status = ""
        gsb_platforms = ""
        gsb_threats = ""
        pass
    try:
        url5 = "https://www.abuseipdb.com/check/" + domainname
        results5 = requests.get(url5, headers=user_agent).content
        soup5 = BeautifulSoup(results5, 'html.parser')
        abusedb_status = soup5.find_all('h3')[0].contents[2].strip().strip(
            " <tr>")
        if abusedb_status == "was found in our database!":
            abusedb_reported = soup5.find('div', {
                'class': 'well'
            }).contents[3].contents[1].contents[0]
            abusedb_reported = str(abusedb_reported)
            abusedb_confidence = soup5.find('div', {
                'class': 'well'
            }).contents[3].contents[3].contents[0]
            abusedb_confidence = str(abusedb_confidence)
            print("ABUSEDB : " + abusedb_status + " || " + abusedb_reported +
                  " || " + abusedb_confidence)
        else:
            abusedb_status = ""
            abusedb_reported = ""
            abusedb_confidence = ""
    except:
        abusedb_status = ""
        abusedb_reported = ""
        abusedb_confidence = ""
        pass
    try:
        output = domainname + ";" + orgname1 + ";" + registrant_name1 + ";" + registrant_email1 + ";" + registrant_email2 + ";" + registrant_email3 + ";" + registrant_email4 + ";" + registrant_country + ";" + whois_city + ";" + whois_zipcode + ";" + nameserver1 + ";" + nameserver2 + ";" + domain_ipaddr + ";" + domain_asnid + ";" + domain_asn_name + ";" + domain_country + ";" + gsb_status + ";" + gsb_platforms + ";" + gsb_threats + ";" + fortiguard + ";" + urlvoid_bl + ";" + siteadvisor_bl + ";" + abusedb_status + ";" + abusedb_reported + ";" + abusedb_confidence + "\n"
        filename1 = "WABBIT-LOOKUP-RESULTS.csv"
        with open(filename1, "a") as outputfile:
            outputfile.write(output)
        output = ""
    except:
        print("!!!!     ERROR     !!!!")
        pass
Exemple #16
0
class URLClassifier(DelegatableClassifier, Serializable['URLClassifier']):
    def __init__(self, train_mails: List[Mail], train_labels: List[int],
                 target_attribute: MailAttributes, config):
        super().__init__(train_mails, train_labels, target_attribute, config)
        api_token = config.google_api_token
        if api_token is None:
            logging.fatal(
                "Google API Token is not set. Unable to initialize URLClassifier."
            )
            raise ValueError(
                "Google API Token is not set. Unable to initialize URLClassifier."
            )
        self.target_attribute = target_attribute
        self.safe_browsing = SafeBrowsing(api_token)
        self.url_extractor = URLExtract()
        self.url_extractor.extract_email = False
        self.url_extractor.get_stop_chars_left().add(':')
        self.checked_urls: dict = {}

    def train(self, mails: List[Mail] = None, labels: List[int] = None):
        pass

    def classify(self, mails: List[Mail]) -> List[float]:
        mail_dict: dict = {}
        new_urls: set = set()
        for mail in mails:
            text = self.target_attribute(mail)
            urls = set(self.url_extractor.find_urls(text, only_unique=True))
            new_urls.update(self.filter_new(urls))
            mail_dict[mail] = urls
        # empty list is false
        if new_urls:
            self.check_urls(new_urls)
        # checked urls should now contain every url
        # check if any url in each mail is malicious
        # float(True) is 1 float(False) is 0
        return [
            float(any(self.checked_urls[url] for url in mail_dict[mail]))
            for mail in mails
        ]

    def check_urls(self, urls: Set[str]):
        # api only supports 500 urls at a time
        needed_parts = int(len(urls) / 500) + 1
        response: dict = {}
        for batch in np.array_split(list(urls), needed_parts):
            try:
                response.update(self.safe_browsing.lookup_urls(batch))
            except KeyError as ex:
                # catch exception for bug in safe_browsing module
                if ex.args[0] == 'details':
                    for url in batch:
                        response.update({url: {"malicious": False}})
                else:
                    raise ex
        for url in urls:
            try:
                self.checked_urls[url] = response[url]['malicious']
            except KeyError:
                logging.info("URL could not be classified: " + url)
                self.checked_urls[url] = False

    @property
    def save_folder(self):
        return None

    def serialize(self, sub_folder: str = None):
        pass

    def deserialize(self, sub_folder: str = None) -> 'URLClassifier':
        return self

    def filter_new(self, urls: set) -> set:
        new_urls: set = set()
        for url in urls:
            if self.checked_urls.get(url) is None:
                new_urls.add(url)
        return new_urls
Exemple #17
0
 def __init__(self, config):
     levelname = config.get('level', 'debug')
     self.level = self.LEVEL_MAPPINGS.get(levelname, logging.DEBUG)
     self.api = SafeBrowsing('')
def compare_with_google(url, api):
    s = SafeBrowsing(api)
    urls = []
    urls.append(url)
    r = s.lookup_urls(urls)
    return r
Exemple #19
0
def setSafeBrowsingAPIKey(APIKey):
    global safeBrowsingAPIKey, safeBrowsingInstance
    safeBrowsingAPIKey = APIKey
    safeBrowsingInstance = SafeBrowsing(safeBrowsingAPIKey)
Exemple #20
0
# Author: Leo07866
# Date: 09 June 2019

from pysafebrowsing import SafeBrowsing
# from termcolor import colored
import os

# Create a variable for my Google API key.
google_api_key = SafeBrowsing(API - KEY - HERE)
'''

creates a funtion that asks the path of the urlList file from the user
if the user enters wrong path the programme returne "file does not exists "
if path is correct, open the file and read through the lines and loop through each line
and then checks url with lookup client and prints both malicious and safe urls.

'''


def safe_browsing():
    global line
    user_input = input('[!][!] Enter path: ')

    if not os.path.exists(user_input):
        raise IOError('[X][X] file does not exsit,please check path again!!')
    url_dir = open(user_input)
    line = url_dir.readline()
    while line:
        threat_list = google_api_key.lookup_urls([line])
        print('==============================================')
        print('[x][x] URL: ' + str([line]) + str(threat_list))
Exemple #21
0
def safb(Tester):
    KEY = '<Google Safe Browsing API here>'
    from pysafebrowsing import SafeBrowsing
    s = SafeBrowsing(KEY)
    r = s.lookup_url(Tester)
    print("\nGoogle safe browsing " + str(r))
Exemple #22
0
import re
from pysafebrowsing import SafeBrowsing
import sys

fileName = sys.argv[1]
s = SafeBrowsing("AIzaSyC5zRGDds2v6--ZlDDyM274sv1Ueg4jC9M")

f = open(fileName, "r", encoding='utf-8', errors='ignore')
lines = f.readlines()
for line in lines:
    link = re.findall(r'(https?://[^\s]+)', line)
    r = s.lookup_urls(link)
    print(str(r))
    w = open("safelink.txt", "a+", encoding='utf-8', errors='ignore')
    w.write(str(r) + "\n")
    w.close()
    line.strip()
f.close()
  elif i in openphish_urls_list:
    print("This is a phishing site : " , i)
    try:
        print('The main domain:', data['domain'])
    except Exception as e:
       	pass
    try:
      reg = regiContacts[0]
      print("Organization name: ",reg['organization'])
    except:
      pass
    rcheck = 1

  else:
  	try:
  		s = SafeBrowsing("AIzaSyDZAITEKchXC5BStNgv3guaQY_IYIiKoKg") # Google Safebrowing API
  		print(i)
  		r = s.lookup_urls([i])
  		safebrowsing_check = r[i]['malicious']
  		if(safebrowsing_check == True):
  			print("This is a phishing site : " , i)
  			try:
  				print('The main domain:', data['domain'])
  			except Exception as e:
  				pass
  			try:
  				reg = regiContacts[0]
  				print("Organization name: ",reg['organization'])
  			except:
  				pass
  			rcheck = 1
Exemple #24
0
def whois_lookup(ipaddr):
    try:
        obj = IPWhois(ipaddr)
        results = obj.lookup_whois()
        domain_asnid = "AS" + results['asn']
        if domain_asnid == "":
            domain_asnid = ""
        try:
            domain_country = results['asn_country_code']
            if domain_country == "":
                domain_country = "- -"
        except:
            domain_country = "- -"
            pass
        try:
            domain_asn_name = results['nets'][0]['name']
            if domain_asn_name == "":
                domain_asn_name = "- -"
        except:
            domain_asn_name = "- -"
            pass
    except:
        pass
        domain_asnid = "- -"
        domain_country = "- -"
        domain_asn_name = "- -"
    #################### BLOCKLIST FUNCTION BELOW ##################
    try:
        url = "https://www.urlvoid.com/scan/" + ipaddr + "/"
        results = requests.get(url, headers=user_agent).content
        soup = BeautifulSoup(results, 'html.parser')
        t = soup.find('span', {'class': 'label-danger'})
        urlvoid_bl = "URLVOID: " + t.text
        print(urlvoid_bl)
        #return urlvoid_bl
    except:
        urlvoid_bl = ""
        pass
    try:
        url2 = "https://fortiguard.com/webfilter?q=" + ipaddr + "&version=8"
        results2 = requests.get(url2, headers=user_agent).content
        soup2 = BeautifulSoup(results2, 'html.parser')
        t2 = soup2.find("meta", property="description")
        fortiguard = "FORTIGUARD " + str(t2["content"])
        print(fortiguard)
        #return fortiguard
    except:
        fortiguard = ""
        pass
    try:
        url3 = "http://www.siteadvisor.com/sitereport.html?url=" + ipaddr
        results3 = requests.get(url3, headers=user_agent).content
        soup3 = BeautifulSoup(results3, 'html.parser')
        t3 = soup3.find('a').contents[0]
        siteadvisor_bl = "SITEADVISOR: " + str(t3)
        print(siteadvisor_bl)
        #return fortiguard
    except:
        siteadvisor_bl = ""
        pass
    try:
        gsb_lookup = SafeBrowsing(gsb_apikey)
        results4 = gsb_lookup.lookup_urls([ipaddr])
        gsb_status = str(results4[ipaddr]['malicious'])
        gsb_platforms = results4[ipaddr]['platforms'][0]
        gsb_threats = results4[ipaddr]['threats'][0]
        print("GOOGLE SAFE BROWSING API4: " + gsb_status + " || " +
              gsb_platforms + " || " + gsb_threats)
    except:
        gsb_status = ""
        gsb_platforms = ""
        gsb_threats = ""
        pass
    try:
        url5 = "https://www.abuseipdb.com/check/" + ipaddr
        results5 = requests.get(url5, headers=user_agent).content
        soup5 = BeautifulSoup(results5, 'html.parser')
        abusedb_status = soup5.find_all('h3')[0].contents[2].strip().strip(
            " <tr>")
        if abusedb_status == "was found in our database!":
            abusedb_reported = soup5.find('div', {
                'class': 'well'
            }).contents[3].contents[1].contents[0]
            abusedb_reported = str(abusedb_reported)
            abusedb_confidence = soup5.find('div', {
                'class': 'well'
            }).contents[3].contents[3].contents[0]
            abusedb_confidence = str(abusedb_confidence)
            print("ABUSEDB : " + abusedb_status + " || " + abusedb_reported +
                  " || " + abusedb_confidence)
        else:
            abusedb_status = ""
            abusedb_reported = ""
            abusedb_confidence = ""
    except:
        abusedb_status = ""
        abusedb_reported = ""
        abusedb_confidence = ""
        pass
    try:
        output = ipaddr + ";" + domain_asnid + ";" + domain_asn_name + ";" + domain_country + ";" + gsb_status + ";" + gsb_platforms + ";" + gsb_threats + ";" + fortiguard + ";" + siteadvisor_bl + ";" + abusedb_status + ";" + abusedb_reported + ";" + abusedb_confidence + "\n"
    except:
        output = ipaddr + ";" + "error\n"
    filename1 = "WABBIT4IP-LOOKUP-RESULTS.csv"
    with open(filename1, "a") as outputfile:
        outputfile.write(output)
    output = ""