def upload_file(self, file_path): ''' :param file_path: file path to upload :return: json response / None ''' try: url = self.base_url + "scan" files = {'file': open(file_path, 'rb')} headers = {"apikey": settings.VT_API_KEY} try: proxies, verify = upstream_proxy('https') except: PrintException("[ERROR] Setting upstream proxy") try: response = requests.post(url, files=files, data=headers, proxies=proxies, verify=verify) if response.status_code == 403: print( "[ERROR] VirusTotal Permission denied, wrong api key?") return None except: print( "[ERROR] VirusTotal ConnectionError, check internet connectivity" ) return None json_response = response.json() return json_response except: PrintException("[ERROR] in VirusTotal upload_file") return None
def get_report(self, file_hash): ''' :param file_hash: md5/sha1/sha256 :return: json response / None ''' try: url = self.base_url + 'report' params = { 'apikey': settings.VT_API_KEY, 'resource': file_hash } headers = {"Accept-Encoding": "gzip, deflate"} try: proxies, verify = upstream_proxy('https') except: PrintException("[ERROR] Setting upstream proxy") try: response = requests.get( url, params=params, headers=headers, proxies=proxies, verify=verify) if response.status_code == 403: print("[ERROR] VirusTotal Permission denied, wrong api key?") return None except: print( "[ERROR] VirusTotal ConnectionError, check internet connectivity") return None try: json_response = response.json() return json_response except ValueError: return None except: PrintException("[ERROR] in VirusTotal get_report") return None
def update_malware_db(): """Check for update in malware DB""" try: proxies, verify = upstream_proxy('http') except: PrintException("[ERROR] Setting upstream proxy") try: url = "http://www.malwaredomainlist.com/mdlcsv.php" response = requests.get(url, timeout=3, proxies=proxies, verify=verify) data = response.content tmp_dwd = tempfile.NamedTemporaryFile() tmp_dwd.write(data) mal_db = os.path.join(MALWARE_DB_DIR, 'malwaredomainlist') tmp_dwd.seek(0) # Check1: SHA256 Change if sha256(tmp_dwd.name) != sha256(mal_db): # DB needs update # Check2: DB Syntax Changed line = tmp_dwd.readline().decode("utf-8", "ignore") lst = line.split('",') if len(lst) == 10: # DB Format is not changed. Let's update DB print("\n[INFO] Updating Malware Database....") shutil.copyfile(tmp_dwd.name, mal_db) else: print("\n[WARNING] Malware Database format from malwaredomainlist.com changed. Database is not updated. Please report to: https://github.com/MobSF/Mobile-Security-Framework-MobSF/issues") else: print("\n[INFO] Malware Database is up-to-date.") tmp_dwd.close() except: PrintException("[ERROR] Malware DB Update")
def update_malware_db(): """Check for update in malware DB.""" try: proxies, verify = upstream_proxy('http') except Exception: logger.exception('[ERROR] Setting upstream proxy') try: url = settings.MALWARE_DB_URL response = requests.get(url, timeout=3, proxies=proxies, verify=verify) data = response.content tmp_dwd = tempfile.NamedTemporaryFile() tmp_dwd.write(data) mal_db = os.path.join(MALWARE_DB_DIR, 'malwaredomainlist') tmp_dwd.seek(0) # Check1: SHA256 Change if sha256(tmp_dwd.name) != sha256(mal_db): # DB needs update # Check2: DB Syntax Changed line = tmp_dwd.readline().decode('utf-8', 'ignore') lst = line.split('",') if len(lst) == 10: # DB Format is not changed. Let's update DB logger.info('Updating Malware Database....') shutil.copyfile(tmp_dwd.name, mal_db) else: logger.info('Malware Database format from ' 'malwaredomainlist.com has changed.' ' Database is not updated. ' 'Please report to: https://github.com/' 'MobSF/Mobile-Security-Framework-MobSF/issues') else: logger.info('Malware Database is up-to-date.') tmp_dwd.close() except Exception: logger.exception('[ERROR] Malware DB Update')
def upload_file(self, file_path): ''' :param file_path: file path to upload :return: json response / None ''' try: url = self.base_url + "scan" files = { 'file': open(file_path, 'rb') } headers = { "apikey": settings.VT_API_KEY } try: proxies, verify = upstream_proxy('https') except: PrintException("[ERROR] Setting upstream proxy") try: response = requests.post( url, files=files, data=headers, proxies=proxies, verify=verify) if response.status_code == 403: print("[ERROR] VirusTotal Permission denied, wrong api key?") return None except: print( "[ERROR] VirusTotal ConnectionError, check internet connectivity") return None json_response = response.json() return json_response except: PrintException("[ERROR] in VirusTotal upload_file") return None
def get_report(self, file_hash): """ :param file_hash: md5/sha1/sha256 :return: json response / None """ try: url = self.base_url + 'report' params = {'apikey': settings.VT_API_KEY, 'resource': file_hash} headers = {"Accept-Encoding": "gzip, deflate"} try: proxies, verify = upstream_proxy('https') except: PrintException("[ERROR] Setting upstream proxy") try: response = requests.get(url, params=params, headers=headers, proxies=proxies, verify=verify) if response.status_code == 403: logger.error( "VirusTotal Permission denied, wrong api key?") return None except: logger.error( "VirusTotal ConnectionError, check internet connectivity") return None try: json_response = response.json() return json_response except ValueError: return None except: PrintException("[ERROR] in VirusTotal get_report") return None
def update_malware_db(): """Check for update in malware DB""" try: proxies, verify = upstream_proxy('http') except: PrintException("[ERROR] Setting upstream proxy") try: url = "http://www.malwaredomainlist.com/mdlcsv.php" response = requests.get(url, timeout=3, proxies=proxies, verify=verify) data = response.content tmp_dwd = tempfile.NamedTemporaryFile() tmp_dwd.write(data) mal_db = os.path.join(MALWARE_DB_DIR, 'malwaredomainlist') tmp_dwd.seek(0) # Check1: SHA256 Change if sha256(tmp_dwd.name) != sha256(mal_db): # DB needs update # Check2: DB Syntax Changed line = tmp_dwd.readline().decode("utf-8", "ignore") lst = line.split('",') if len(lst) == 10: # DB Format is not changed. Let's update DB logger.info("Updating Malware Database....") shutil.copyfile(tmp_dwd.name, mal_db) else: logger.info("Malware Database format from malwaredomainlist.com changed. Database is not updated. " "Please report to: https://github.com/MobSF/Mobile-Security-Framework-MobSF/issues") else: logger.info("Malware Database is up-to-date.") tmp_dwd.close() except: PrintException("[ERROR] Malware DB Update")
def upload_file(self, file_path): """ Upload File to VT. :param file_path: file path to upload :return: json response / None """ try: url = self.base_url + 'scan' files = {'file': open(file_path, 'rb')} headers = {'apikey': settings.VT_API_KEY} try: proxies, verify = upstream_proxy('https') except Exception: logger.exception('Setting upstream proxy') try: response = requests.post(url, files=files, data=headers, proxies=proxies, verify=verify) if response.status_code == 403: logger.error( 'VirusTotal Permission denied, wrong api key?') return None except Exception: logger.error( 'VirusTotal Connection Error, check internet connectivity') return None json_response = response.json() return json_response except Exception: logger.exception('VirusTotal upload_file') return None
def start_proxy(port, project): """Start HTTPtools in Proxy Mode.""" argz = ['httptools', '-m', 'capture', '-p', str(port), '-n', project] proxies, _ = upstream_proxy('http') if proxies['http']: argz.extend(['-u', proxies['http']]) fnull = open(os.devnull, 'w') subprocess.Popen(argz, stdout=fnull, stderr=subprocess.STDOUT)
def app_search(app_id): """IOS Get App Details from App Store.""" logger.info('Fetching Details from App Store: %s', app_id) lookup_url = 'https://itunes.apple.com/lookup' req_url = '{}?bundleId={}&country={}&entity=software'.format( lookup_url, app_id, 'us') headers = { 'User-Agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36') } try: det = {} proxies, verify = upstream_proxy('https') req = requests.get(req_url, headers=headers, proxies=proxies, verify=verify) resp = req.json() if resp['results']: det = resp['results'][0] return { 'features': det['features'] or [], 'icon': (det['artworkUrl512'] or det['artworkUrl100'] or det['artworkUrl60'] or ''), 'developer_id': det['artistId'], 'developer': det['artistName'], 'developer_url': det['artistViewUrl'], 'developer_website': det['sellerUrl'], 'supported_devices': det['supportedDevices'], 'title': det['trackName'], 'app_id': det['bundleId'], 'category': det['genres'] or [], 'description': det['description'], 'price': det['price'], 'itunes_url': det['trackViewUrl'], 'score': det['averageUserRating'], 'error': False, } logger.warning('Unable to get app details.') return {'error': True} except Exception: logger.warning('Unable to get app details') return {'error': True}
def open_firebase(url): # Detect Open Firebase Database try: purl = urlparse(url) base_url = "{}://{}/.json".format(purl.scheme, purl.netloc) proxies, verify = upstream_proxy('https') headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'} resp = requests.get(base_url, headers=headers, proxies=proxies, verify=verify) if resp.status_code == 200: return base_url, True except Exception as exp: logger.warning('Open Firebase DB detection failed. %s', exp) return url, False
def app_search(app_id): """Get App Details from AppMonsta.""" det = {'error': True} if not settings.APPMONSTA_API: return det logger.info('Fetching Details from AppMonsta: %s', app_id) lookup_url = settings.APPMONSTA_URL req_url = '{}{}.json?country={}'.format(lookup_url, app_id, 'US') headers = { 'User-Agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36'), 'Accept-Encoding': 'deflate, gzip' } try: proxies, verify = upstream_proxy('https') req = requests.get(req_url, auth=(settings.APPMONSTA_API, 'X'), headers=headers, proxies=proxies, verify=verify, stream=True) resp = req.json() det['title'] = resp['app_name'] det['score'] = resp.get('all_rating', '') det['installs'] = resp.get('downloads', '') det['price'] = resp.get('price', '') det['androidVersionText'] = resp.get('requires_os', '') det['genre'] = resp.get('genre', '') det['url'] = resp.get('store_url', '') det['developer'] = resp.get('publisher_name', '') det['developerId'] = resp.get('publisher_id', '') det['developerAddress'] = resp.get('publisher_address', '') det['developerWebsite'] = resp.get('publisher_url', '') det['developerEmail'] = resp.get('publisher_email', '') det['released'] = resp.get('release_date', '') det['privacyPolicy'] = resp.get('privacy_url', '') description = BeautifulSoup(resp.get('description', ''), features='lxml') det['description'] = description.get_text() det['error'] = False return det except Exception: logger.warning('Unable to get app details') return det
def _update_tracker_db(self): """Update Trackers DB.""" try: proxies, verify = upstream_proxy('http') except Exception: logger.exception('[ERROR] Setting upstream proxy') try: exodus_url = settings.TRACKERS_DB_URL res = requests.get(exodus_url, timeout=3, proxies=proxies, allow_redirects=True, verify=verify) data = res.content tmp_dwd = tempfile.NamedTemporaryFile() tmp_dwd.write(data) tmp_dwd.seek(0) # Check1: SHA256 Change if sha256(tmp_dwd.name) != sha256(self.tracker_db): # DB needs update # Check2: DB Syntax Changed data = json.loads(tmp_dwd.read().decode('utf-8', 'ignore')) is_db_format_good = False if 'trackers' in data: if '1' in data['trackers']: if 'code_signature' in data['trackers']['1']: is_db_format_good = True if is_db_format_good: # DB Format is not changed. Let's update DB logger.info('Updating Tracker Database....') shutil.copyfile(tmp_dwd.name, self.tracker_db) else: logger.info('Tracker Database format from ' 'reports.exodus-privacy.eu.org has changed.' ' Database is not updated. ' 'Please report to: https://github.com/MobSF/' 'Mobile-Security-Framework-MobSF/issues') else: logger.info('Tracker Database is up-to-date.') tmp_dwd.close() except Exception: logger.exception('[ERROR] Tracker DB Update')
def app_search(app_id): '''iOS Get App Details from App Store''' logger.info("Fetching Details from App Store: %s", app_id) lookup_url = 'https://itunes.apple.com/lookup' req_url = '{}?bundleId={}&country={}&entity=software'.format( lookup_url, app_id, 'us') headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'} try: det = {} proxies, verify = upstream_proxy('https') req = requests.get(req_url, headers=headers, proxies=proxies, verify=verify) resp = req.json() if resp['results']: det = resp['results'][0] return { 'features': det['features'] or [], 'icon': det['artworkUrl512'] or det['artworkUrl100'] or det['artworkUrl60'] or '', 'developer_id': det['artistId'], 'developer': det['artistName'], 'developer_url': det['artistViewUrl'], 'developer_website': det['sellerUrl'], 'supported_devices': det['supportedDevices'], 'title': det['trackName'], 'app_id': det['bundleId'], 'category': det['genres'] or [], 'description': det['description'], 'price': det['price'], 'itunes_url': det['trackViewUrl'], 'score': det['averageUserRating'], 'error': False, } logger.warning('Unable to get app details.') return {'error': True} except Exception as exp: logger.warning('Unable to get app details. %s', exp) return {'error': True}
def pdf(request, api=False, jsonres=False): try: if api: checksum = request.POST['hash'] else: checksum = request.GET['md5'] hash_match = re.match('^[0-9a-f]{32}$', checksum) if not hash_match: if api: return {'error': 'Invalid scan hash'} else: return HttpResponse(json.dumps({'md5': 'Invalid scan hash'}), content_type=ctype, status=500) # Do Lookups android_static_db = StaticAnalyzerAndroid.objects.filter(MD5=checksum) ios_static_db = StaticAnalyzerIOS.objects.filter(MD5=checksum) win_static_db = StaticAnalyzerWindows.objects.filter(MD5=checksum) if android_static_db.exists(): context, template = handle_pdf_android(android_static_db) elif ios_static_db.exists(): context, template = handle_pdf_ios(ios_static_db) elif win_static_db.exists(): context, template = handle_pdf_win(win_static_db) else: if api: return {'report': 'Report not Found'} else: return HttpResponse(json.dumps({'report': 'Report not Found'}), content_type=ctype, status=500) # Do VT Scan only on binaries context['virus_total'] = None ext = os.path.splitext(context['file_name'].lower())[1] if settings.VT_ENABLED and ext != '.zip': app_bin = os.path.join(settings.UPLD_DIR, checksum + '/', checksum + ext) vt = VirusTotal.VirusTotal() context['virus_total'] = vt.get_result(app_bin, checksum) # Get Local Base URL proto = 'file://' host_os = 'nix' if platform.system() == 'Windows': proto = 'file:///' host_os = 'windows' context['base_url'] = proto + settings.BASE_DIR context['dwd_dir'] = proto + settings.DWD_DIR context['host_os'] = host_os try: if api and jsonres: return {'report_dat': context} else: options = { 'page-size': 'Letter', 'quiet': '', 'no-collate': '', 'margin-top': '0.50in', 'margin-right': '0.50in', 'margin-bottom': '0.50in', 'margin-left': '0.50in', 'encoding': 'UTF-8', 'custom-header': [ ('Accept-Encoding', 'gzip'), ], 'no-outline': None, } # Added proxy support to wkhtmltopdf proxies, _ = upstream_proxy('https') if proxies['https']: options['proxy'] = proxies['https'] html = template.render(context) pdf_dat = pdfkit.from_string(html, False, options=options) if api: return {'pdf_dat': pdf_dat} return HttpResponse(pdf_dat, content_type='application/pdf') except Exception as exp: logger.exception('Error Generating PDF Report') if api: return { 'error': 'Cannot Generate PDF/JSON', 'err_details': str(exp) } else: return HttpResponse(json.dumps({ 'pdf_error': 'Cannot Generate PDF', 'err_details': str(exp) }), content_type=ctype, status=500) except Exception as exp: logger.exception('Error Generating PDF Report') msg = str(exp) exp = exp.__doc__ if api: return print_n_send_error_response(request, msg, True, exp) else: return print_n_send_error_response(request, msg, False, exp)
def pdf(request, api=False, jsonres=False): try: if api: checksum = request.POST['hash'] else: checksum = request.GET['md5'] hash_match = re.match('^[0-9a-f]{32}$', checksum) if not hash_match: if api: return {'error': 'Invalid scan hash'} else: return HttpResponse(json.dumps({'md5': 'Invalid scan hash'}), content_type=ctype, status=500) # Do Lookups android_static_db = StaticAnalyzerAndroid.objects.filter(MD5=checksum) ios_static_db = StaticAnalyzerIOS.objects.filter(MD5=checksum) win_static_db = StaticAnalyzerWindows.objects.filter(MD5=checksum) if android_static_db.exists(): key = settings.MODEL_K % ('StaticAnalyzerAndroid', checksum) context, template = handle_pdf_android(android_static_db, key) elif ios_static_db.exists(): context, template = handle_pdf_ios(ios_static_db) elif win_static_db.exists(): context, template = handle_pdf_win(win_static_db) else: if api: return {'report': 'Report not Found'} else: return HttpResponse(json.dumps({'report': 'Report not Found'}), content_type=ctype, status=500) # Do VT Scan only on binaries context['virus_total'] = None ext = os.path.splitext(context['file_name'].lower())[1] if settings.VT_ENABLED and ext != '.zip': app_bin = os.path.join(settings.UPLD_DIR, checksum + '/', checksum + ext) vt = VirusTotal.VirusTotal() context['virus_total'] = vt.get_result(app_bin, checksum) # Get Local Base URL proto = 'file://' host_os = 'nix' if platform.system() == 'Windows': proto = 'file:///' host_os = 'windows' context['base_url'] = proto + settings.BASE_DIR context['dwd_dir'] = proto + settings.DWD_DIR context['host_os'] = host_os # 增加:返回时间 time = RecentScansDB.objects.get(MD5=checksum).TIMESTAMP context['time'] = time # 增加:证书subject和issuer提取 certificate_info = context['certificate_analysis']['certificate_info'] subject = re.search(r'Subject:.*', certificate_info).group().split(':')[1] issuer = re.search(r'Issuer:.*', certificate_info).group().split(':')[1] context['cer_subject'] = subject context['cer_issuer'] = issuer # 增加:permmision排序 perm_dict_items = context['permissions'].items() sorted_perm = sorted(perm_dict_items, key=lambda x: x[1]['status']) context['sorted_perm'] = sorted_perm #测试url context['test_url'] = [ { "urls": [ "http://192.168.1.201:12345/ad_monitor/uploadReceiver.php?os=android&platform=domob" ], "path": "cn/domob/android/ads/C0027o.java", "results": [{ "phones": ["18337258710", "15160578228"], "cards": [], "passports": ["E16728736"], "gps_lng_lat": [(78.356287, 120.834628)], "sources": "http://192.168.1.201:12345/ad_monitor/uploadReceiver.php?os=android&platform=domob" }] }, { "urls": ["http://www.google.com/loc/json"], "path": "cn/domob/android/ads/C0029q.java", "results": [{ "phones": [], "cards": [], "passports": [], "gps_lng_lat": [], "sources": "" }] }, { "urls": ["http://r.domob.cn/a/"], "path": "cn/domob/android/ads/C0032t.java", "results": [{ "phones": [], "cards": [], "passports": [], "gps_lng_lat": [], "sources": "" }] }, { "urls": ["http://e.domob.cn/event_report", "http://r.domob.cn/a/"], "path": "cn/domob/android/ads/C0017e.java", "results": [{ "phones": ["18337258710", "15160578228"], "cards": [], "passports": ["E16728736"], "gps_lng_lat": [(78.356287, 120.834628)], "sources": "http://e.domob.cn/event_report" }] }, { "urls": ["http://r.domob.cn/a/"], "path": "cn/domob/android/ads/C0022j.java", "results": [{ "phones": ["18337258710", "15160578228"], "cards": [], "passports": ["E16233236"], "gps_lng_lat": [(78.356287, 120.834628)], "sources": "http://r.domob.cn/a/" }] }, ] try: if api and jsonres: return {'report_dat': context} else: options = { 'page-size': 'A4', 'quiet': '', 'no-collate': '', 'margin-top': '15mm', 'margin-right': '10mm', 'margin-bottom': '15mm', 'margin-left': '10mm', 'encoding': 'UTF-8', 'header-line': '', # 页脚与正文之间的距离(默认为零) 'header-spacing': 2, 'footer-spacing': 2, # 设置页码 'footer-center': '第 [page] 页', # 'custom-header': [ # ('Accept-Encoding', 'gzip'), # ], # 'dump-outline':'toc.xml', # 'dump-default-toc-xsl': 'my.xsl', 'outline': '', # 大纲的深度 'outline-depth': '2', } xsl_path = os.path.join(settings.BASE_DIR, "templates/my.xsl"), toc = { # 'toc-header-text':'目录', 'toc-level-indentation': '100', 'disable-dotted-lines': '', 'xsl-style-sheet': xsl_path, } # cover = os.path.join(settings.BASE_DIR, 'templates/cover.html') html = template.render(context) # Added proxy support to wkhtmltopdf proxies, _ = upstream_proxy('https') if proxies['https']: options['proxy'] = proxies['https'] # pdf_dat = pdfkit.from_string(html,False,options,toc,cover,cover_first=True) pdf_dat = pdfkit.from_string(html, False, options, toc) if api: return {'pdf_dat': pdf_dat} return HttpResponse(pdf_dat, content_type='application/pdf') except Exception as exp: logger.exception('Error Generating PDF Report') if api: return { 'error': 'Cannot Generate PDF/JSON', 'err_details': str(exp) } else: return HttpResponse(json.dumps({ 'pdf_error': 'Cannot Generate PDF', 'err_details': str(exp) }), content_type=ctype, status=500) except Exception as exp: logger.exception('Error Generating PDF Report') msg = str(exp) exp = exp.__doc__ if api: return print_n_send_error_response(request, msg, True, exp) else: return print_n_send_error_response(request, msg, False, exp)