def update_frida_server(arch, version): """Download Assets of a given version.""" download_dir = Path(settings.DWD_DIR) fserver = f'frida-server-{version}-android-{arch}' frida_bin = download_dir / fserver if frida_bin.is_file(): return True if not is_internet_available(): return False try: proxies, verify = upstream_proxy('https') except Exception: logger.exception('[ERROR] Setting upstream proxy') try: response = requests.get(f'{settings.FRIDA_SERVER}{version}', timeout=3, proxies=proxies, verify=verify) for item in response.json()['assets']: if item['name'] == f'{fserver}.xz': url = item['browser_download_url'] return download_frida_server(url, version, fserver) return False except Exception: logger.exception('[ERROR] Fetching Frida Server Release') return False
def start_proxy(port, project): """Start HTTPtools in Proxy Mode.""" argz = ['httptools', '-m', 'capture', '-p', str(port), '-n', project] proxies, _ = upstream_proxy('http') if proxies['http']: argz.extend(['-u', proxies['http']]) fnull = open(os.devnull, 'w') subprocess.Popen(argz, stdout=fnull, stderr=subprocess.STDOUT)
def app_search(app_id): """IOS Get App Details from App Store.""" logger.info('Fetching Details from App Store: %s', app_id) lookup_url = settings.ITUNES_URL req_url = '{}?bundleId={}&country={}&entity=software'.format( lookup_url, app_id, 'us') headers = { 'User-Agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36') } try: det = {} proxies, verify = upstream_proxy('https') req = requests.get(req_url, headers=headers, proxies=proxies, verify=verify) resp = req.json() if resp['results']: det = resp['results'][0] return { 'features': det['features'] or [], 'icon': (det['artworkUrl512'] or det['artworkUrl100'] or det['artworkUrl60'] or ''), 'developer_id': det['artistId'], 'developer': det['artistName'], 'developer_url': det['artistViewUrl'], 'developer_website': det['sellerUrl'], 'supported_devices': det['supportedDevices'], 'title': det['trackName'], 'app_id': det['bundleId'], 'category': det['genres'] or [], 'description': det['description'], 'price': det['price'], 'itunes_url': det['trackViewUrl'], 'score': det['averageUserRating'], 'error': False, } logger.warning('Unable to get app details.') return {'error': True} except Exception: logger.warning('Unable to get app details') return {'error': True}
def app_search(app_id): """Get App Details from AppMonsta.""" det = {'error': True} if not settings.APPMONSTA_API: return det logger.info('Fetching Details from AppMonsta: %s', app_id) lookup_url = settings.APPMONSTA_URL req_url = '{}{}.json?country={}'.format(lookup_url, app_id, 'US') headers = { 'User-Agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36'), 'Accept-Encoding': 'deflate, gzip' } try: proxies, verify = upstream_proxy('https') req = requests.get(req_url, auth=(settings.APPMONSTA_API, 'X'), headers=headers, proxies=proxies, verify=verify, stream=True) resp = req.json() det['title'] = resp['app_name'] det['score'] = resp.get('all_rating', '') det['installs'] = resp.get('downloads', '') det['price'] = resp.get('price', '') det['androidVersionText'] = resp.get('requires_os', '') det['genre'] = resp.get('genre', '') det['url'] = resp.get('store_url', '') det['developer'] = resp.get('publisher_name', '') det['developerId'] = resp.get('publisher_id', '') det['developerAddress'] = resp.get('publisher_address', '') det['developerWebsite'] = resp.get('publisher_url', '') det['developerEmail'] = resp.get('publisher_email', '') det['released'] = resp.get('release_date', '') det['privacyPolicy'] = resp.get('privacy_url', '') description = BeautifulSoup(resp.get('description', ''), features='lxml') det['description'] = description.get_text() det['error'] = False return det except Exception: logger.warning('Unable to get app details') return det
def open_firebase(url): # Detect Open Firebase Database try: purl = urlparse(url) base_url = '{}://{}/.json'.format(purl.scheme, purl.netloc) proxies, verify = upstream_proxy('https') headers = { 'User-Agent': ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1)' ' AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36') } resp = requests.get(base_url, headers=headers, proxies=proxies, verify=verify) if resp.status_code == 200: return base_url, True except Exception: logger.warning('Open Firebase DB detection failed.') return url, False
def upload_file(self, file_path): """ Upload File to VT. :param file_path: file path to upload :return: json response / None """ try: url = self.base_url + 'scan' if file_size(file_path) > 31: logger.warning('VirusTotal Public API does ' 'not support files above 32 MB') return None files = {'file': open(file_path, 'rb')} headers = {'apikey': settings.VT_API_KEY} try: proxies, verify = upstream_proxy('https') except Exception: logger.exception('Setting upstream proxy') try: response = requests.post( url, files=files, data=headers, proxies=proxies, verify=verify) if response.status_code == 403: logger.error( 'VirusTotal Permission denied, wrong api key?') return None except Exception: logger.error( 'VirusTotal Connection Error, check internet connectivity') return None json_response = response.json() return json_response except Exception: logger.exception('VirusTotal upload_file') return None
def get_report(self, file_hash): """ Get Report from VT. :param file_hash: md5/sha1/sha256 :return: json response / None """ try: url = self.base_url + 'report' params = { 'apikey': settings.VT_API_KEY, 'resource': file_hash} headers = {'Accept-Encoding': 'gzip, deflate'} try: proxies, verify = upstream_proxy('https') except Exception: logger.exception('Setting upstream proxy') try: response = requests.get( url, params=params, headers=headers, proxies=proxies, verify=verify) if response.status_code == 403: logger.error( 'VirusTotal Permission denied, wrong api key?') return None except Exception: logger.error( 'VirusTotal ConnectionError, check internet connectivity') return None try: json_response = response.json() return json_response except ValueError: return None except Exception: logger.exception('VirusTotal get_report') return None
def pdf(request, api=False, jsonres=False): try: if api: checksum = request.POST['hash'] else: checksum = request.GET['md5'] hash_match = re.match('^[0-9a-f]{32}$', checksum) if not hash_match: if api: return {'error': 'Invalid scan hash'} else: return HttpResponse(json.dumps({'md5': 'Invalid scan hash'}), content_type=ctype, status=500) # Do Lookups android_static_db = StaticAnalyzerAndroid.objects.filter(MD5=checksum) ios_static_db = StaticAnalyzerIOS.objects.filter(MD5=checksum) win_static_db = StaticAnalyzerWindows.objects.filter(MD5=checksum) if android_static_db.exists(): context, template = handle_pdf_android(android_static_db) elif ios_static_db.exists(): context, template = handle_pdf_ios(ios_static_db) elif win_static_db.exists(): context, template = handle_pdf_win(win_static_db) else: if api: return {'report': 'Report not Found'} else: return HttpResponse(json.dumps({'report': 'Report not Found'}), content_type=ctype, status=500) # Do VT Scan only on binaries context['virus_total'] = None ext = os.path.splitext(context['file_name'].lower())[1] if settings.VT_ENABLED and ext != '.zip': app_bin = os.path.join(settings.UPLD_DIR, checksum + '/', checksum + ext) vt = VirusTotal.VirusTotal() context['virus_total'] = vt.get_result(app_bin, checksum) # Get Local Base URL proto = 'file://' host_os = 'nix' if platform.system() == 'Windows': proto = 'file:///' host_os = 'windows' context['base_url'] = proto + settings.BASE_DIR context['dwd_dir'] = proto + settings.DWD_DIR context['host_os'] = host_os context['timestamp'] = RecentScansDB.objects.get( MD5=checksum).TIMESTAMP try: if api and jsonres: return {'report_dat': context} else: options = { 'page-size': 'Letter', 'quiet': '', 'enable-local-file-access': '', 'no-collate': '', 'margin-top': '0.50in', 'margin-right': '0.50in', 'margin-bottom': '0.50in', 'margin-left': '0.50in', 'encoding': 'UTF-8', 'custom-header': [ ('Accept-Encoding', 'gzip'), ], 'no-outline': None, } # Added proxy support to wkhtmltopdf proxies, _ = upstream_proxy('https') if proxies['https']: options['proxy'] = proxies['https'] html = template.render(context) pdf_dat = pdfkit.from_string(html, False, options=options) if api: return {'pdf_dat': pdf_dat} return HttpResponse(pdf_dat, content_type='application/pdf') except Exception as exp: logger.exception('Error Generating PDF Report') if api: return { 'error': 'Cannot Generate PDF/JSON', 'err_details': str(exp) } else: err = { 'pdf_error': 'Cannot Generate PDF', 'err_details': str(exp) } return HttpResponse( json.dumps(err), # lgtm [py/stack-trace-exposure] content_type=ctype, status=500) except Exception as exp: logger.exception('Error Generating PDF Report') msg = str(exp) exp = exp.__doc__ if api: return print_n_send_error_response(request, msg, True, exp) else: return print_n_send_error_response(request, msg, False, exp)