def scan_url(url): # Scan the url global pub_vt, virustotal_api if virustotal_api == "": # get a random virustotal api virustotal_api = random.choice(pub_vt) core.updatelog('Using api: ' + virustotal_api) vturl = 'https://www.virustotal.com/vtapi/v2/url/scan' params = {'apikey': virustotal_api, 'url': url} response = requests.post(vturl, data=params) response = response.json() if response['response_code'] == 1: core.updatelog( 'URL queued for scan! getting report after 10 seconds...') time.sleep(10) newurl = 'https://www.virustotal.com/vtapi/v2/url/report' newparams = {'apikey': virustotal_api, 'resource': url} newresponse = requests.get(newurl, params=newparams) finalresp = newresponse.json() if finalresp['response_code'] == 1: print('{0}/{1} - {2}'.format(finalresp['positives'], finalresp['total'], finalresp['permalink'])) else: return [ False, 'Reached maximum rate limit for virustotal api! If you are using your own key, please wait a minute and try again' ] else: return [ False, 'Reached maximum rate limit for virustotal api! If you are using your own key, please wait a minute and try again' ]
def get_country(ip): ''' Gets Country and country code from given IP. Parameters = ip = ip address for lookup Response = [True, {country_code}, {country_name}] or [False, {ERR_MSG}] Needs maxminddb for fast performance ''' core.updatelog('Getting country from IP: ' + ip) try: # If maxminddb module is installed we don't have to query online services to get the country code hence saving a lot of time import maxminddb try: core.updatelog('Getting country from local DB') reader = maxminddb.open_database(helper.fixpath(core.path + '/db/geoip.mmdb')) ip_info = reader.get(ip) iso_code = ip_info['country']['iso_code'].lower() country = ip_info['country']['names']['en'] return [True, iso_code, country] except Exception as e: core.updatelog('Something went wrong while getting country from ip {0}! Error: {1}'.format(ip, str(e))) logging.error(traceback.format_exc()) return [False, str(e)] except: core.updatelog('maxminddb module not installed! Using online service to get Country from IP') core.updatelog('To save time in future analysis; install maxminddb by: pip3 install maxminddb') import core.scans as scan gip = scan.geoip(ip) if gip[0]: geoip = gip[1] return [True, geoip['country'].lower(), geoip['country_name']] else: return [False, gip[1]]
def copysource(self, result_directory): # copies all the json, html, css and js files and saves them to the result directory # create content for the source.json file source_json = {} # Copies all the json, css, js files to the result directory for future reference for file in self.files: if file['type'] == 'json' or file['type'] == 'html' or file[ 'type'] == 'css' or file['type'] == 'js': file_path = file['path'] new_path = helper.fixpath(result_directory + '/' + file['name'] + '.src') file_name = file['name'] if os.path.isfile(file_path): # Checks if file present shutil.copyfile(file_path, new_path) core.updatelog('Copied ' + file_path + ' to ' + new_path) # append this to source_json dict rel_path = os.path.relpath(file_path, self.directory) file_size = str(os.path.getsize(file_path) >> 10) + ' KB' if file['type'] == 'js': # Retire js scan core.updatelog( 'Running retirejs vulnerability scan on: ' + file_name) try: with open(file_path, 'r') as fc: file_content = fc.read() rjs_scan = retirejs.scan_file_content( file_content) core.updatelog('Scan complete!') except Exception as e: core.updatelog( 'Error {0} while running retirejs scan on {1}'. format(str(e), file_name)) rjs_scan = [] source_json[file['id']] = ({ 'id': file['id'], 'file_name': file_name, 'location': new_path, 'relative_path': rel_path, 'file_size': file_size, 'retirejs_result': rjs_scan }) else: source_json[file['id']] = ({ 'id': file['id'], 'file_name': file_name, 'location': new_path, 'relative_path': rel_path, 'file_size': file_size }) # write all the changes to source.json source_file = helper.fixpath(result_directory + '/source.json') sf = open(source_file, 'w+') sf.write(json.dumps(source_json, indent=4, sort_keys=True)) sf.close() core.updatelog('Saved sources to: ' + source_file) return True
def extract_chromium_plugins(self, parent_dir): ret_list = [] extension_dirs = os.listdir(parent_dir) for extension_dir in extension_dirs: extension_path = os.path.join(parent_dir, extension_dir) if not os.path.isdir(extension_path): core.updatelog("Invalid extension directory: " + extension_path) continue extension_vers = os.listdir(extension_path) for ver in extension_vers: manifest_file = helper.fixpath(extension_path + "/" + ver + "/manifest.json") if not os.path.isfile(manifest_file): core.updatelog("Invalid extension directory: " + extension_path) continue ext_name = core.GetNameFromManifest(manifest_file) if ext_name: ext_version = ver.split('_')[0] ext_name = ext_name + ' version ' + ext_version # small hack to not let commas f**k around with the path ext_name = ext_name.replace(",", ",") ret_list.append(ext_name + ',' + helper.fixpath(extension_path + "/" + ver)) return ret_list
def change_vt_api(api): ''' change virustotal api! parameters needed = api = new api ''' if api != core.virustotal_api: # Not the same api core.updatelog('Setting new virustotal api!') settings = open(core.settings_file, 'r') settings = json.loads(settings.read()) settings['virustotal_api'] = api try: ws = open(core.settings_file, 'w+') ws.write(json.dumps(settings, indent=4, sort_keys=False)) ws.close() core.virustotal_api = api core.updatelog('New virustotal api set successfully! new api: ' + api) return [True, 'New virustotal api set successfully!'] except Exception as e: logging.error(traceback.format_exc()) return [False, 'Error while writing settings file: ' + str(e)] else: return [False, 'This api is already in use. Nothing changed!']
def googlechrome(self): # TODO: add support for mac os chrome_directory = "" if self.os == 'windows': chrome_directory = helper.fixpath( self.user_directory + '\\AppData\\Local\\Google\\Chrome\\User Data\\Default\\Extensions' ) elif self.os == 'linux': chrome_directory = helper.fixpath( self.user_directory + '/.config/google-chrome/Default/Extensions') if chrome_directory != "": if os.path.isdir(chrome_directory): core.updatelog('Found Google chrome extension directory: ' + chrome_directory) extension_dirs = os.listdir(chrome_directory) for extension_dir in extension_dirs: # print('\n\n') if os.path.isdir( os.path.join(chrome_directory, extension_dir)): # Every extension directory is like this: Extension/<id>/<version>/{contents} extension_path = os.path.join(chrome_directory, extension_dir) extension_vers = os.listdir(extension_path) for ver in extension_vers: manifest_file = helper.fixpath(extension_path + "/" + ver + '/manifest.json') if os.path.isfile(manifest_file): ext_name = core.GetNameFromManifest( manifest_file) if ext_name != False and ext_name != None: # append version with name ext_version = ver.split('_')[0] ext_name = ext_name + ' version ' + ext_version self.chrome_extensions.append( ext_name + ',' + helper.fixpath(extension_path + "/" + ver)) else: core.updatelog( 'Could not determine extension name.. skipping local chrome extension' ) else: core.updatelog( 'Invalid extension directory: ' + extension_path) return self.chrome_extensions else: core.updatelog('Could not find google chrome directory!') return False else: core.updatelog('Unsupported OS')
def home(): core.updatelog('Accessed Main page') lic = open(helper.fixpath(core.path + '/LICENSE'), 'r') license_text = lic.read() cred = open(helper.fixpath(core.path + '/CREDITS'), 'r') credits_text = cred.read() return render_template("index.html", report_dir=core.reports_path, lab_dir=core.lab_path, license_text=license_text, credits_text=credits_text, virustotal_api=core.virustotal_api)
def __init__(self): if sys.platform == 'win32': self.os = 'windows' elif sys.platform == 'darwin': self.os = 'osx' elif sys.platform == 'linux' or sys.platform == 'linux2': self.os = 'linux' else: self.os = 'unknown' self.user_directory = os.path.expanduser('~') core.updatelog('User Directory: ' + self.user_directory) self.chrome_extensions = [] self.firefox_extensions = []
def path_changed(old_path, new_path): # Change '<reports_path>' to absolute path in results file if core.reportids == {}: ri = open(core.report_index, 'r') ri = ri.read() core.reportids = json.loads(ri) reports = core.reportids for report in reports['reports']: if '<reports_path>' in report['report_directory']: core.updatelog( '[Updating reports index] Chainging <report_index> to: ' + old_path) report['report_directory'] = report['report_directory'].replace( '<reports_path>', old_path) core.reportids = reports ri = open(core.report_index, 'w+') ri.write(json.dumps(reports, indent=4, sort_keys=True)) ri.close() core.updatelog('Report index updated successfully') core.updatelog('Updating settings.json') sj = open(core.settings_file, 'r') sj = json.loads(sj.read()) sj['old_result_directory'] = new_path wsj = open(core.settings_file, 'w+') wsj.write(json.dumps(sj, indent=4, sort_keys=False)) wsj.close() core.updatelog('Updated settings.json successfully')
def upload_file(): if request.method == 'POST': if 'file' not in request.files: return('error: No File uploaded') file = request.files['file'] if file.filename == '': return('error: Empty File!') if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) core.updatelog('File Uploaded.. Filename: ' + filename) #saveas = filename.split('.')[0] anls = analysis.analyze(filename) return(anls) else: return('error: Invalid file format! only .crx files allowed. If you\'re trying to upload zip file rename it to crx instead')
def creategraphdata(self): if self.list_status != False: # Extract urls from all html, json, js files for file in self.files: if file['type'] == 'json' or file['type'] == 'html' or file['type'] == 'js': file_path = file['path'] file_id = file['id'] file_urls = [] try: core.updatelog('Trying to extract urls from: ' + file_path) file_urls = core.extract_urls(file_path) if file_urls != [] and file_urls != False: for file_url in file_urls: self.urls.append({'id':'EXTAU' + str(self.current_url_number), 'parent':file_id, 'type':'url', 'name':file_url}) self.current_url_number += 1 except Exception as e: core.updatelog('Skipped getting URL from file: ' + file_path + ' Error: ' + str(e)) logging.error(traceback.format_exc()) # TODO: Clean this mess and use format for file in self.files: # print('Doing File: ' + file + ' parent: ' + file['parent']) #prepare_node = '\n{ id: "{0}", label: "{1}", group: "{2}", cid: "{3}" },'.format(file['id'], file['name'], file['type'], file['parent']) prepare_node = '\n{id: "' + file['id'] + '", label: "' + file['name'] + '", group: "' + file['type'] + '", cid: "'+file['parent']+'"},' self.nodes += prepare_node #prepare_edge = '\n{ from: "{0}", to: "{1}", color:{color:\'#fff\', highlight:\'#89ff00\'} },'.format(file['parent'], file['id']) prepare_edge = '\n{from: "' + file['parent'] + '", to: "' + file['id'] + '", color:{color:\'#fff\', highlight:\'#89ff00\'}},' self.edges += prepare_edge for file in self.dirs: if file['parent'] == 'none' and file['id'] == 'EXTAD0': # this is the parent directory i.e the extension #prepare_node = '\n{ id: "{0}", label: "{1}", group: "{2}" },'.format(file['id'], file['name'], file['type']) prepare_node = '\n{id: "' + file['id'] + '", label: "' + file['name'] + '", group: "' + file['type'] + '"},' self.nodes += prepare_node else: #prepare_node = '\n{ id: "{0}", label: "{1}", group: "{2}", cid: "{3}" },'.format(file['id'], file['name'], file['type'], file['parent']) prepare_node = '\n{id: "' + file['id'] + '", label: "' + file['name'] + '", group: "' + file['type'] + '", cid: "'+file['parent']+'"},' self.nodes += prepare_node #prepare_edge = '\n{ from: "{0}", to: "{1}", color:{color:\'#fff\', highlight:\'#89ff00\'} },'.format(file['parent'], file['id']) prepare_edge = '\n{from: "' + file['parent'] + '", to: "' + file['id'] + '", color:{color:\'#fff\', highlight:\'#89ff00\'}},' self.edges += prepare_edge for url in self.urls: #prepare_node = '\n{ id: "{0}", label: "{1}", group: "{2}", cid: "{3}"},'.format(url['id'], url['name'], url['type'], url['parent']) prepare_node = '\n{id: "' + url['id'] + '", label: "' + url['name'] + '", group: "' + url['type'] + '", cid: "'+url['parent']+'"},' self.nodes += prepare_node #prepare_edge = '\n{ from: "{0}", to: "{1}", color:{color:\'#fff\', highlight:\'#89ff00\'} },'.format(url['parent'], url['id']) prepare_edge = '\n{from: "' + url['parent'] + '", to: "' + url['id'] + '", color:{color:\'#fff\', highlight:\'#89ff00\'}},' self.edges += prepare_edge self.nodes += '\n]);' self.edges += '\n]);' # print(self.nodes + '\n\n\n\n\n\n\n' + self.edges) core.updatelog('Graph data creation complete!') else: core.updatelog('Graph data creation unsuccessful!') return False
def scan_domain(domain): global pub_vt # get a random virustotal api tvirustotal_api = random.choice(pub_vt) core.updatelog('Using api: ' + tvirustotal_api) try: url = 'https://www.virustotal.com/vtapi/v2/domain/report' params = {'apikey':tvirustotal_api,'domain':domain} response = requests.get(url, params=params) response = response.json() if response['response_code'] == 1: return [True, response] else: return [False, 'Either rate limited or something else went wrong while getting domain report from virustotal'] except Exception as e: logging.error(traceback.format_exc()) return [False, str(e)]
def changelabdir(newpath): ''' change the results_directory_path in settings.json response [True/False, 'message'] ''' if os.path.isdir(newpath): core.updatelog('Setting lab directory to: ' + newpath) settings = open(core.settings_file, 'r') settings = json.loads(settings.read()) old_reports_path = settings['lab_directory_path'] if old_reports_path == '': old_reports_path = core.lab_path if newpath == old_reports_path: return [ False, 'Please provide a different path, not the current one!' ] settings['lab_directory_path'] = newpath core.updatelog('Updating settings.json') try: ws = open(core.settings_file, 'w+') ws.write(json.dumps(settings, indent=4, sort_keys=False)) ws.close() core.updatelog( 'File successfully updated! rewriting variables and fixing old paths...' ) core.lab_path = newpath return [True, 'Lab directory updated successfully!'] except Exception as e: logging.error(traceback.format_exc()) return [False, 'Error while writing settings file: ' + str(e)] else: return [False, 'invalid path']
def googlechrome(self): # TODO: add support for mac os chrome_directory = "" if self.os == 'windows': chrome_directory = helper.fixpath( self.user_directory + '\\AppData\\Local\\Google\\Chrome\\User Data\\Default\\Extensions' ) elif self.os == 'linux': chrome_directory = helper.fixpath( self.user_directory + '/.config/google-chrome/Default/Extensions') elif self.os == 'osx': chrome_directory = helper.fixpath( self.user_directory + '/Library/Application Support/Google/Chrome/Profile 1/Extensions' ) if chrome_directory != "": if os.path.isdir(chrome_directory): core.updatelog('Found Google chrome extension directory: ' + chrome_directory) return self.extract_chromium_plugins(chrome_directory) else: core.updatelog('Could not find google chrome directory!') return False else: core.updatelog('Unsupported OS')
def geoip(ip): ''' Geo-IP Lookup via ipapi.co needed parameter = ip = the ip address response = [True/False, JSON_RESULT/ERROR_MSG] ''' core.updatelog('Initiating Geo-IP Lookup for address: ' + ip) try: lookup_url = 'https://ipapi.co/{0}/json'.format(ip) lookup = requests.get(lookup_url) lookup = lookup.json() try: if lookup['error']: core.updatelog('Geo-IP Lookup failed: ' + lookup['reason']) return [False, lookup['reason']] except: core.updatelog('Geo-IP Lookup successful') return [True, lookup] except Exception as e: logging.error(traceback.format_exc()) core.updatelog('Geo-IP Lookup failed: ' + str(e)) return [False, str(e)]
def domain_batch_scan(domains): # used only when there is only one virustotal api and the pub_vt list is empty batch_result = {} total_domains = len(domains) if total_domains > 4: # virustotal has limitation of 4 scans per minute for an api so if the domain count is less then 4 we have nothing to wait gotta_wait = True else: gotta_wait = False if core.virustotal_api != "": # Do batch scan for index, domain in enumerate(domains): real_index = index + 1 if gotta_wait and real_index % 4 == 0: core.updatelog( 'Sleeping for 1 minute... virustotal api limit reached!') # Sleep for 60 seconds.. I really hate it but it seems there's no other way around other then you adding a bunch of diff apis to the above list time.sleep(60) core.updatelog('Getting virustotal report for: ' + domain) try: url = 'https://www.virustotal.com/vtapi/v2/domain/report' params = {'apikey': core.virustotal_api, 'domain': domain} response = requests.get(url, params=params) response = response.json() if response['response_code'] == 1: batch_result[domain] = [True, response] else: batch_result[domain] = [ False, { "error": "Either rate limited or something else went wrong while getting domain report from virustotal" } ] except Exception as e: logging.error(traceback.format_exc()) batch_result[domain] = [False, str(e)] else: for _domain in domains: core.updatelog( 'Skipping virustotal domain scan for {0}. Reason: No virustotal api added!' .format(_domain)) batch_result[_domain] = [False, "No virustotal api found"] return batch_result
def vivaldi_local_extensions_check(self): vivaldi_dir = "" if self.os == 'windows': vivaldi_dir = helper.fixpath( self.user_directory + '\\AppData\\Local\\Vivaldi\\User Data\\Default\\Extensions') if vivaldi_dir == "": core.updatelog('Unsupported OS') return if not os.path.isdir(vivaldi_dir): core.updatelog("Couldn't find Vivaldi Extension directory!") return # -- core.updatelog('Found Vivaldi extension directory: ' + vivaldi_dir) return self.extract_chromium_plugins(vivaldi_dir)
def http_headers(url): ''' HTTP Headers lookup needed parameter = url = the url to get the http headers of response = [True/False, HEADERS_LIST/ERROR_MSG] ''' core.updatelog('Getting HTTP Headers of: ' + url) try: req = requests.get(url) headers = req.headers core.updatelog('HTTP Headers successfully acquired!') return [True, headers] except Exception as e: core.updatelog( 'Error while getting HTTP Headers of {0}! Error: {1}'.format( url, str(e))) logging.error(traceback.format_exc()) return [False, str(e)]
def source_code(url): ''' GET Source Code needed parameter = url = the url to get the source code of response = [True/False, SOURCE_CODE/ERROR_MSG] ''' core.updatelog('Getting Source code of: ' + url) try: req = requests.get(url) headers = req.text core.updatelog('Source code successfully acquired!') return [True, headers] except Exception as e: core.updatelog( 'Error while getting Source code of {0}! Error: {1}'.format( url, str(e))) logging.error(traceback.format_exc()) return [False, str(e)]
def download(id, name=""): ext_id = id if name == "": save_name = ext_id else: save_name = name save_path = helper.fixpath(core.lab_path + '/' + save_name + '.crx') core.updatelog("Downloader says: save_path is " + save_path) dl_url = "https://clients2.google.com/service/update2/crx?response=redirect&x=id%3D" + ext_id + "%26uc&prodversion=32" print("Download URL: " + dl_url) try: urllib.request.urlretrieve(dl_url, save_path) core.updatelog("Extension downloaded successfully: " + save_path) return save_name except Exception as e: core.updatelog("Error in downloader.py") print(e) return False
def download(id, name=""): ext_id = id if name == "": save_name = ext_id else: save_name = name save_path = helper.fixpath(core.lab_path + '/' + save_name + '.crx') core.updatelog("Downloader says: save_path is " + save_path) # dl_url = "https://clients2.google.com/service/update2/crx?response=redirect&x=id%3D" + ext_id + "%26uc&prodversion=32" # new download URL, issue #13 dl_url = "https://clients2.google.com/service/update2/crx?response=redirect&os=win&arch=x86-64&os_arch=x86-64&nacl_arch=x86-64&prod=chromecrx&prodchannel=unknown&prodversion=81.0.4044.138&acceptformat=crx2,crx3&x=id%3D" + ext_id + "%26uc" print("Download URL: " + dl_url) try: urllib.request.urlretrieve(dl_url, save_path) core.updatelog("Extension downloaded successfully: " + save_path) return save_name except Exception as e: core.updatelog("Error in downloader.py") print(e) return False
def download(id, name=""): #chrome ext_id = id if name == "": save_name = ext_id else: save_name = name save_path = helper.fixpath(core.lab_path + '/' + save_name + '.crx') core.updatelog("Downloader says: save_path is " + save_path) if sys.platform == 'win32': os = 'windows' file = open( ' ' ) #path or command to find where windows installed latest version is located (wmi or registry seems to be easiest) version = file.readline() #grabs the chrome version #dl_url = 'http://clients2.google.com/service/update2/crx?response=redirect&prodversion=' + version + '&acceptformat=crx2,crx3&x=id%3D' + ext_id + '%26uc' elif sys.platform == 'darwin': os = 'osx' username = getpass.getuser( ) #this gets the current users username as it is in the system path on MAC file = open( '/Users/' + username + '/Library/Application Support/Google/Chrome/Last Version', 'r' ) # combining the username into the macOS path so we can open the file. version = file.readline( ) # this grabs the version of chrome out of the local file dl_url = 'http://clients2.google.com/service/update2/crx?response=redirect&prodversion=' + version + '&acceptformat=crx2,crx3&x=id%3D' + ext_id + '%26uc' elif sys.platform == 'linux' or sys.platform == 'linux2': os = 'linux' dl_url = '' #need to craft linux url specific to finding the latest version of chrome print("Download URL: " + dl_url) try: urllib.request.urlretrieve(dl_url, save_path) core.updatelog("Extension downloaded successfully: " + save_path) return save_name except Exception as e: core.updatelog("Error in downloader.py") print(e) return False
def braveLocalExtensionsCheck(self): brave_directory = "" if self.os == 'windows': brave_directory = helper.fixpath( self.user_directory + '\\AppData\\Local\\BraveSoftware\\Brave-Browser\\User Data\\Default\\Extensions' ) elif self.os == 'linux': brave_directory = helper.fixpath( self.user_directory + '/.config/BraveSoftware/Brave-Browser/Default/Extensions') if brave_directory != "": if os.path.isdir(brave_directory): core.updatelog('Found Brave extension directory: ' + brave_directory) return self.extract_chromium_plugins(brave_directory) else: core.updatelog('Could not find Brave extension directory!') return False else: core.updatelog('Unsupported OS')
def analyzelocalfirefoxextension(path): if os.path.isfile(path) and path.endswith('.xpi'): # Extract the .xpi file to a temp directory in lab directory # Analyze the extracted directory # delete the temp directory extract_directory = helper.fixpath(core.lab_path + '/temp_extract_directory') try: core.updatelog('Unzipping ' + path + ' to: ' + extract_directory) zip_contents = zipfile.ZipFile(path, 'r') zip_contents.extractall(extract_directory) zip_contents.close() core.updatelog('Unzipping complete') except Exception as e: helper.fixpath('Something went wrong while unzipping ' + path + ' to ' + extract_directory) logging.error(traceback.format_exc()) return False analysis_status = analysis.analyze(extract_directory, 'Local Firefox Extension') if 'error:' in analysis_status: core.updatelog( 'Something went wrong while analysis... deleting temporary extract directory' ) else: core.updatelog( 'Scanning complete... Deleting temporary extract directory') shutil.rmtree(extract_directory) core.updatelog('Successfully deleted: ' + extract_directory) return analysis_status else: core.updatelog( '[analyzelocalfirefoxextension] Invalid local firefox extension path: ' + path)
def createFirefoxListing(self, extension_directory, xpi_file): list_file = os.path.join(extension_directory, 'extanalysis.json') xpi_directory = os.path.join(extension_directory, xpi_file) if os.path.isfile(xpi_directory) and os.path.isfile(list_file): # extract the xpi file get name from manifest and delete the extract directory extract_directory = os.path.join( extension_directory, 'extanalysis_temp_directory_delete_if_not_done_automatically') try: core.updatelog('Trying to unzip xpi: ' + xpi_file) zip_contents = zipfile.ZipFile(xpi_directory, 'r') zip_contents.extractall(extract_directory) zip_contents.close() core.updatelog('Unzipped xpi successfully: ' + xpi_directory) xpi_manifest = os.path.join(extract_directory, 'manifest.json') if os.path.isfile(xpi_manifest): ext_name = core.GetNameFromManifest(xpi_manifest) if ext_name != False or ext_name != None: # DO shits core.updatelog(xpi_file + ' has the name: ' + ext_name + ' adding it to the list') list_content = open(list_file, 'r') list_content = list_content.read() list_content = json.loads(list_content) list_content['extensions'][xpi_file] = ({ "name": ext_name, "file": xpi_directory }) list_write = open(list_file, 'w+') list_write.write( json.dumps(list_content, indent=4, sort_keys=True)) list_write.close() core.updatelog( 'List updated! Deleting temp extract directory') shutil.rmtree(extract_directory) core.updatelog('Removed temp extract directory') return True else: core.updatelog( 'Could not file extension name hence it will not be added to the list' ) else: core.updatelog( 'No manifest file found after extracting xpi! Deleting temp extract directory' ) shutil.rmtree(extract_directory) core.updatelog('Removed temp extract directory') return False except Exception as e: core.updatelog('Error unzipping xpi file: ' + xpi_directory) logging.error(traceback.format_exc()) return False
def firefox(self): # read the profiles.ini # check for previous list and create new if not found [list = extanalysis.json] # Get a list of all the xpi files # Unzip them # Get all their names from manifest.json # update the list firefox_directory = "" default_profile_path = "" if self.os == 'windows': firefox_directory = helper.fixpath( self.user_directory + '\\AppData\\Roaming\\Mozilla\\Firefox') if os.path.isdir(firefox_directory): # firfox installed firefox_profile = helper.fixpath(firefox_directory + '\\profiles.ini') if os.path.isfile(firefox_profile): # found firefox profiles.ini try: firefox_config = configparser.SafeConfigParser() with open(firefox_profile, 'rU') as ini_source: firefox_config.readfp(ini_source) default_profile_path = os.path.normpath( os.path.join( firefox_directory, firefox_config.get('Profile0', 'Path'))) core.updatelog('Found firefox profile path: ' + default_profile_path) except Exception as e: core.updatelog( 'Something went wrong while reading firefox profiles.ini' ) logging.error(traceback.format_exc()) return False else: core.updatelog( 'Could not find profiles.ini ExtAnalysis can\'t analyze local firefox extensions' ) return False else: # Could not find firefox directory core.updatelog('Firefox installation could not be detected') return False elif self.os == 'linux': firefox_directory = helper.fixpath(self.user_directory + '/.mozilla/firefox/') if os.path.isdir(firefox_directory): # firfox installed firefox_profile = helper.fixpath(firefox_directory + '/profiles.ini') if os.path.isfile(firefox_profile): # found firefox profiles.ini try: firefox_config = configparser.SafeConfigParser() with open(firefox_profile, 'rU') as ini_source: firefox_config.readfp(ini_source) default_profile_path = os.path.normpath( os.path.join( firefox_directory, firefox_config.get('Profile0', 'Path'))) core.updatelog('Found firefox profile path: ' + default_profile_path) except Exception as e: core.updatelog( 'Something went wrong while reading firefox profiles.ini' ) logging.error(traceback.format_exc()) return False else: core.updatelog( 'Could not find profiles.ini ExtAnalysis can\'t analyze local firefox extensions' ) return False else: # Could not find firefox directory core.updatelog('Firefox installation could not be detected') return False if default_profile_path != "": if os.path.isdir(default_profile_path): # profile path is valid firefox_extension_directory = os.path.join( default_profile_path, 'extensions') if os.path.join(firefox_extension_directory): unfiltered_files = os.listdir(firefox_extension_directory) xpi_files = [] for afile in unfiltered_files: if afile.endswith('.xpi') and os.path.isfile( os.path.join(firefox_extension_directory, afile)): xpi_files.append(afile) core.updatelog('xpi list generated') else: core.updatelog( 'extensions directory could not be found inside firefox default profile' ) return False else: core.updatelog( 'Invalid firefox profile path... Can\'t get local firefox extensions' ) return False else: core.updatelog('Could not find default profile path for firefox') return False if xpi_files != []: exta_firefox_list = os.path.join(firefox_extension_directory, 'extanalysis.json') if os.path.isfile(exta_firefox_list): # found previous list core.updatelog( 'Found previous analysis log.. updating with current extensions' ) listed_extensions = [] list_file = open(exta_firefox_list, 'r') list_files = json.loads(list_file.read()) for list_file in list_files['extensions']: listed_extensions.append(list_file) for xpi_file in xpi_files: if xpi_file not in listed_extensions: core.updatelog('Inserting ' + xpi_file + ' into list') self.createFirefoxListing(firefox_extension_directory, xpi_file) # return True else: core.updatelog('Creating ExtAnalysis list file') list_file = open(exta_firefox_list, 'w+') list_file.write('{"extensions":{}}') list_file.close() core.updatelog('Updating list file with all xpi file infos') for xpi_file in xpi_files: core.updatelog('Inserting ' + xpi_file + ' into list') self.createFirefoxListing(firefox_extension_directory, xpi_file) # return True else: core.updatelog('No installed firefox extensions found!') return False # Read the final list and then create return list and return it firefox_extensions_list = [] read_list = open(exta_firefox_list, 'r') read_list = json.loads(read_list.read()) if read_list['extensions'] != {}: # There are some extensions for fext in read_list['extensions']: prepare_to_insert = read_list['extensions'][fext][ 'name'] + ',' + read_list['extensions'][fext]['file'] firefox_extensions_list.append(prepare_to_insert) return firefox_extensions_list else: core.updatelog( 'ExtAnalysis could not find any local firefox extensions')
def view(query, allargs): if query == 'dlanalysis': try: extension_id = allargs.get('extid') saveas = "" try: saveas = allargs.get('savedir') if saveas == "" or saveas == " ": saveas = extension_id except Exception as e: print('Save name not specified') try: download_log = download_extension.download( extension_id, saveas) if download_log: aok = analysis.analyze(saveas + '.crx', 'Remote Google Chrome Extension') return (aok) else: return ( 'error: Something went wrong while downloading extension' ) except Exception as e: core.updatelog( 'Something went wrong while downloading extension: ' + str(e)) return ( 'error: Something went wrong while downloading extension, check log for more information' ) except Exception as e: core.updatelog('Something went wrong: ' + str(e)) return ( 'error: Something went wrong while downloading extension, check log for more information' ) elif query == 'firefoxaddon': try: addonurl = allargs.get('addonurl') try: download_log = download_extension.downloadFirefox(addonurl) if download_log: aok = analysis.analyze(download_log + '.xpi', 'Remote Firefox Addon') return (aok) else: return ( 'error: Something went wrong while downloading extension' ) except Exception as e: core.updatelog( 'Something went wrong while downloading extension: ' + str(e)) return ( 'error: Something went wrong while downloading extension, check log for more information' ) except Exception as e: core.updatelog('Something went wrong: ' + str(e)) return ( 'error: Something went wrong while downloading extension, check log for more information' ) elif query == 'results': reportids = core.reportids if reportids == {}: # Result index not loaded so let's load it and show em results core.updatelog('Reading report index and loading json') ridfile = core.report_index ridcnt = open(ridfile, 'r', encoding='utf8') ridcnt = ridcnt.read() reportids = json.loads(ridcnt) rd = "<table class='result-table' id='result-table'><thead><tr><th>Name</th><th>Version</th><th>Date</th><th>Actions</th></tr></thead><tbody>" for areport in reportids['reports']: report_name = areport['name'] report_id = areport['id'] report_date = areport['time'] report_version = areport['version'] rd += '<tr><td>' + report_name + '</td><td>' + report_version + '</td><td>' + report_date + '</td><td><button class="bttn-fill bttn-xs bttn-primary" onclick=viewResult(\'' + report_id + '\')><i class="fas fa-eye"></i> View</button> <button class="bttn-fill bttn-xs bttn-danger" onclick=deleteResult(\'' + report_id + '\')><i class="fas fa-trash"></i> Delete</button></td></tr>' return (rd + '</tbody></table><br>') elif query == 'getlocalextensions': try: browser = allargs.get('browser') if browser == 'googlechrome': import core.localextensions as localextensions lexts = localextensions.GetLocalExtensions() exts = "" exts = lexts.googlechrome() if exts != False and exts != [] and exts != None: return_html = "<table class='result-table' id='result-table'><thead><tr><th>Extension Name</th><th>Action</th></tr></thead><tbody>" for ext in exts: ext_info = ext.split(',') return_html += '<tr><td>' + ext_info[ 0] + '</td><td><button class="bttn-fill bttn-xs bttn-success" onclick="analyzeLocalExtension(\'' + ext_info[ 1].replace( '\\', '\\\\' ) + '\', \'googlechrome\')"><i class="fas fa-bolt"></i> Analyze</button></td></tr>' return (return_html + '</tbody></table>') else: return ( 'error: Something went wrong while getting local Google Chrome extensions! Check log for more information' ) elif browser == 'firefox': import core.localextensions as localextensions lexts = localextensions.GetLocalExtensions() exts = lexts.firefox() if exts != False and exts != [] and exts != None: return_html = "<table class='result-table' id='result-table'><thead><tr><th>Extension Name</th><th>Action</th></tr></thead><tbody>" for ext in exts: ext_info = ext.split(',') return_html += '<tr><td>' + ext_info[ 0] + '</td><td><button class="bttn-fill bttn-xs bttn-success" onclick="analyzeLocalExtension(\'' + ext_info[ 1].replace( '\\', '\\\\' ) + '\', \'firefox\')"><i class="fas fa-bolt"></i> Analyze</button></td></tr>' return (return_html + '</tbody></table>') else: return ( 'error: Something went wrong while getting local firefox extensions! Check log for more information' ) else: return ('error: Invalid Browser!') except Exception: logging.error(traceback.format_exc()) return ('error: Incomplete Query') elif query == 'analyzelocalextension': try: browser = allargs.get('browser') path_to_local = allargs.get('path') path = helper.fixpath(path_to_local) if browser == 'firefox' and os.path.isfile(path): # valid firefox extension import core.localextensions as localextensions analysis_stat = localextensions.analyzelocalfirefoxextension( path) return (analysis_stat) elif browser == 'googlechrome' and os.path.isdir(path): if os.path.isfile(os.path.join(path, 'manifest.json')): analysis_stat = analysis.analyze( path, 'Local Google Chrome Extension') return (analysis_stat) else: return ('error: Invalid Google Chrome Extension Directory') else: return ('error: Malformed Query') except Exception: logging.error(traceback.format_exc()) return ('error: Incomplete Query') elif query == 'deleteAll': ''' DELETES ALL RESULTS RESPONSE = SUCCESS / ERROR ''' import core.result as result delete_status = result.clearAllResults() if delete_status: return "success" else: return ( 'There were some errors while deleting all analysis reports... refer to log for more information' ) elif query == 'clearLab': ''' Deletes all the contents of lab RESPONSE = SUCCESS / ERROR ''' clear_lab = core.clear_lab() if clear_lab[0]: # Successful return (clear_lab[1]) else: # Unsuccessful return ('error: ' + clear_lab[1]) elif query == 'deleteResult': ''' DELETES A SPECIFIC RESULT PARAMETER = resultID RESPONSE = SUCCESS_MSG / 'error: ERROR_MSG' ''' try: result_id_to_delete = allargs.get('resultID') import core.result as result delete_status = result.clearResult(result_id_to_delete) if delete_status: return "success" else: return "Something went wrong while deleting result! Check log for more information" except Exception: return ('Invalid Query') elif query == 'vtDomainReport': try: domain = allargs.get('domain') analysis_id = allargs.get('analysis_id') ranalysis = core.get_result_info(analysis_id) if ranalysis[0]: # if ranalysis[0] is True then ranalysis[1] contains the details analysis_dir = ranalysis[1]['report_directory'] analysis_report = os.path.join(analysis_dir, 'extanalysis_report.json') if os.path.isfile(analysis_report): report = open(analysis_report, 'r') domains = json.loads(report.read())['domains'] for adomain in domains: if adomain['name'] == domain: vtjson = json.dumps(adomain['virustotal'], indent=4, sort_keys=False) #return_html = '<div id="vt_info"></div><script>var wrapper1 = document.getElementById("vt_info");var data = '+vtjson+' try {var data = JSON.parse(dataStr);} catch (e) {} var tree = jsonTree.create(data, wrapper1);tree.expand(function(node) { return node.childNodes.length < 2 || node.label === "phoneNumbers";});</script>' return vtjson return ('error: Domain info not found in analysis report!') else: return ('error: Analysis report for #{0} not found'.format( analysis_id)) else: # ranalysis[1] is the error msg when ranalysis[0] = False return ('error: ' + ranalysis[1]) except: logging.error(traceback.format_exc()) return ('error: Malformed api call') elif query == 'retirejsResult': ''' GET RETIREJS SCAN RESULTS FOR FILE REQUIRED PARAMETER: file = FILE_ID ''' try: file_id = allargs.get('file') analysis_id = allargs.get('analysis_id') ranalysis = core.get_result_info(analysis_id) if ranalysis[0]: # if ranalysis[0] is True then ranalysis[1] contains the details analysis_dir = ranalysis[1]['report_directory'] source_json = os.path.join(analysis_dir, 'source.json') if os.path.isfile(source_json): report = open(source_json, 'r') files = json.loads(report.read()) for _file in files: if _file == file_id: retirejs_result = files[_file]['retirejs_result'] if retirejs_result == []: ret = 'none' else: ret = json.dumps(retirejs_result, indent=4, sort_keys=False) return ret return ('error: File ID not found in report!') else: return ('error: Analysis report for #{0} not found'.format( analysis_id)) else: # ranalysis[1] is the error msg when ranalysis[0] = False return ('error: ' + ranalysis[1]) except: logging.error(traceback.format_exc()) return ('error: Malformed api call') elif query == 'whois': ''' GET WHOIS REPORT OF DOMAIN REQUIRES 'python-whois' module RESPONSE = HTML DIV WITH FORMATTED WHOIS INFO ''' try: domain = allargs.get('domain') try: import whois except: return ( "error: python-whois module not installed! install it using `pip3 install python-whois` or `pip3 install -r requirements.txt`" ) whois_result = whois.whois(domain) whois_html = '<div class="whois-data" style="overflow-y: scroll; max-height:500px; text-align: left;">' for data in whois_result: proper_data = data.replace('_', ' ').capitalize() if isinstance(whois_result[data], list): for subdata in whois_result[data]: whois_html += '<b style="color:#89ff00;">{0} : </b>{1}<br>'.format( proper_data, subdata) else: whois_html += '<b style="color:#89ff00;">{0} : </b>{1}<br>'.format( proper_data, whois_result[data]) whois_html += '</div>' if whois_result: return ( '<center><h4>Whois Results For {0}</h4></center><br>{1}'. format(domain, whois_html)) else: return ( "error: Something went wrong while checking whois information of: " + domain) except Exception: logging.error(traceback.format_exc()) return ('error: Invalid Query') elif query == 'geoip': ''' GEO-IP LOOKUP OF AN IP ADDRESS PARAMETERS -> IP = CONTAINS IP ADDRESS TO BE LOOKED UP RETURNS A HTML TO BE SHOWN ''' try: ip_address = allargs.get('ip') geo_ip = scan.geoip(ip_address) if geo_ip[0]: gip = geo_ip[1] rethtml = '<div class="whois-data" style="overflow-y: scroll; max-height:500px; text-align: left;">' for g in gip: name = str(g).replace('_', ' ').capitalize() val = str(gip[g]) rethtml += '<b style="color:#89ff00;">{0} : </b>{1}<br>'.format( name, val) rethtml += '</div>' return ( '<center><h4>Geo-IP Lookup Results For {0}</h4></center><br>{1}' .format(ip_address, rethtml)) else: # in case of geo_ip[0] being false element 1 has the error msg return ('error: ' + geo_ip[1]) except Exception as e: logging.error(traceback.format_exc()) return ('error: Invalid Query') elif query == 'HTTPHeaders': ''' HTTP HEADERS OF AN URL PARAMETERS -> URL -> BASE64 ENCODED URL RETURNS HTML ''' try: url = allargs.get('url') url = base64.b64decode(url).decode('ascii') headers_status = scan.http_headers(url) if headers_status[0]: rethtml = '<div class="whois-data" style="overflow-y: scroll; max-height:500px; text-align: left;">' headers = headers_status[1] for header in headers: hval = headers[header] rethtml += '<b style="color:#89ff00;">{0} : </b>{1}<br>'.format( header, hval) rethtml += '</div>' return ( '<center><h4>Showing HTTP Headers of: {0}</h4></center><br>{1}' .format(url, rethtml)) else: return ('error: ' + headers_status[1]) except Exception as e: logging.error(traceback.format_exc()) return ('error: Invalid Query') elif query == 'SourceCode': ''' GET SOURCE CODE OF AN URL PARAMETERS -> URL -> BASE64 ENCODED URL RETURNS HTML ''' try: url = allargs.get('url') rurl = base64.b64decode(url).decode('ascii') headers_status = scan.source_code(rurl) if headers_status[0]: rethtml = '<textarea id="src_code" class="source_code" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false">' headers = headers_status[1] rethtml += headers rethtml += '</textarea><br><br><center><a href="{0}" target="_blank" class="start_scan"><i class="fas fa-external-link-alt"></i> View Full Screen</a>'.format( '/source-code/' + url) return ('<center><h4>Source Code of: {0}</h4></center><br>{1}'. format(rurl, rethtml)) else: return ('error: ' + headers_status[1]) except Exception as e: logging.error(traceback.format_exc()) return ('error: Invalid Query') elif query == 'clearlogs': ''' CLEARS LOG ''' core.clearlog() return ('Logs cleared successfully!') elif query == 'changeReportsDir': ''' CHANGES THE REPORT DIRECTORY RESPONSE = SUCCESS / 'error: ERROR_MSG' ''' try: newpath = allargs.get('newpath') if os.path.isdir(newpath): # valid directory.. let's get the absolute path and set it absolute_path = os.path.abspath(newpath) import core.settings as settings change = settings.changedir(absolute_path) if change[0]: return (change[1]) else: return ('error: ' + change[1]) else: return ('error: Invalid directory path!') except: logging.error(traceback.format_exc()) return ('error: Invalid request for directory change!') elif query == 'changeVTapi': ''' CHANGE VIRUSTOTAL API RESPONSE = SUCCESS_MSG / 'error: ERROR_MSG' ''' try: new_api = allargs.get('api') import core.settings as settings change = settings.change_vt_api(new_api) if change[0]: return (change[1]) else: return ('error: ' + change[1]) except: logging.error(traceback.format_exc()) return ('error: Invalid request!') elif query == 'changelabDir': ''' CHANGES THE LAB DIRECTORY RESPONSE = SUCCESS / 'error : ERROR_MSG' ''' try: newpath = allargs.get('newpath') if os.path.isdir(newpath): # valid directory.. let's get the absolute path and set it absolute_path = os.path.abspath(newpath) import core.settings as settings change = settings.changelabdir(absolute_path) if change[0]: return (change[1]) else: return ('error: ' + change[1]) else: return ('error: Invalid directory path!') except: logging.error(traceback.format_exc()) return ('error: Invalid request for directory change!') elif query == 'updateIntelExtraction': ''' UPDATES INTELS TO BE EXTRACTED RESPONSE = SUCCESS_MSG / 'error: ' + ERROR_MSG ''' try: # Create the dict with all values and keys parameters = {} parameters["extract_comments"] = str( allargs.get('extract_comments')) parameters["extract_btc_addresses"] = str( allargs.get('extract_btc_addresses')) parameters["extract_base64_strings"] = str( allargs.get('extract_base64_strings')) parameters["extract_email_addresses"] = str( allargs.get('extract_email_addresses')) parameters["extract_ipv4_addresses"] = str( allargs.get('extract_ipv4_addresses')) parameters["extract_ipv6_addresses"] = str( allargs.get('extract_ipv6_addresses')) parameters["ignore_css"] = str(allargs.get('ignore_css')) import core.settings as settings status_code = settings.update_settings_batch(parameters) # 0 = failed, 1 = success, 2 = some updated some not! if status_code == '0': return ( 'error: Settings could not be updated! Check log for more information' ) elif status_code == '1': return ( 'Settings updated successfully... Please restart ExtAnalysis for them to take effect!' ) elif status_code == '2': return ( 'Some settings were updated and some were not... Please restart ExtAnalysis for them to take effect!' ) else: return ( 'error: Invalid response from "update_settings_batch". please report it here: https://github.com/Tuhinshubhra/ExtAnalysis/issues/new' ) except: logging.error(traceback.format_exc()) return ('error: Incomplete Request!') else: return ('error: Invalid Query!')
def update_settings_batch(settings_dict): ''' FUNCTION TO UPDATE SETTINGS KEYS THAT HAVE TRUE/FALSE VALUES NEEDED PARAMETERS: settings_dict = DICT WITH NAME AND VALUES.. ex: {"extract_comment":"true"} ''' update_type = '' # 0 = failed, 1 = success, 2 = some updated some not! try: settings = open(core.settings_file, 'r') settings = json.loads(settings.read()) for the_setting in settings_dict: try: if type(settings[the_setting]) == bool: # okay settings key is good... if str(settings_dict[the_setting]).lower() == 'true': # set to true settings[the_setting] = True core.updatelog( 'Set the value of {0} to True successfully'.format( the_setting)) # set update type if update_type == '': update_type = '1' elif update_type == '0': update_type = '2' elif str(settings_dict[the_setting]).lower() == 'false': # set to false settings[the_setting] = False core.updatelog( 'Set the value of {0} to False successfully'. format(the_setting)) # set update type if update_type == '': update_type = '1' elif update_type == '0': update_type = '2' else: core.updatelog( 'Invalid value: {1} for setting {0}'.format( the_setting, str(settings_dict[the_setting]))) # set update type if update_type == '': update_type = '0' elif update_type == '1': update_type = '2' except Exception as e: logging.error(traceback.format_exc()) if update_type == '': update_type = '0' elif update_type == '1': update_type = '2' try: ws = open(core.settings_file, 'w+') ws.write(json.dumps(settings, indent=4, sort_keys=False)) ws.close() core.updatelog( 'Settings written to file successfully! Restart ExtAnalysis for them to take effect' ) except Exception as e: core.updatelog( 'Error {0} occured while writing settings.json file'.format( str(e))) logging.error(traceback.format_exc()) return '0' return update_type except Exception as e: core.updatelog('Error {0} occured while updating settings'.format( str(e))) logging.error(traceback.format_exc()) return '0'
def extract(contents, relpath): ''' EXTRACTS THE FOLLOWING: -> URL -> EMAIL -> BTC ADDRESS -> IPV4, IPV6 ADDRESSES -> BASE64 ENCODED STRINGS CONTENTS = FILE CONTENT RELPATH = RELATIVE PATH (FOR JSON ENTRY IN RESULT) ''' found_urls = [ ] # URLS -> (http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])? found_mail = [ ] # emails -> ([a-zA-Z0-9\.\-_]+(?:@| ?\[(?:at)\] ?)[a-zA-Z0-9\.\-]+(?:\.| ?\[(?:dot)\] ?)[a-zA-Z]+) found_btcs = [ ] # bitcoin address -> [^a-zA-Z0-9]([13][a-km-zA-HJ-NP-Z1-9]{26,33})[^a-zA-Z0-9] found_ipv4 = [ ] # IPv4 addr -> [^a-zA-Z0-9]([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})[^a-zA-Z0-9] found_ipv6 = [ ] # IPV6 -> (([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))(?=\s|$) found_b64s = [ ] # base64 -> (?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==) found_cmnt = [] # Comments -> ... # Check if the file is css and if ignore css is set to true if core.ignore_css and relpath.endswith('.css'): # return empty result core.updatelog('ignore css set to true... ignoring: ' + relpath) result = { "urls": found_urls, "mails": found_mail, "ipv4": found_ipv4, "ipv6": found_ipv6, "base64": found_b64s, "btc": found_btcs, "comments": found_cmnt } return result ''' EXTRACT URLS FROM JS, HTML, CSS AND JSON FILES ''' curls = re.findall( '(http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?', contents) for url in curls: urlresult = {"file": relpath, "url": url[0] + '://' + url[1] + url[2]} if urlresult not in found_urls: found_urls.append(urlresult) ''' EXTRACT EMAIL IDs FROM JS, HTML, JSON AND CSS FILES ''' if core.extract_email_addresses: cmails = re.findall( '([a-zA-Z0-9\.\-_]+(?:@| ?\[(?:at)\] ?)[a-zA-Z0-9\.\-]+(?:\.| ?\[(?:dot)\] ?)[a-zA-Z]+)', contents) for mail in cmails: mail = mail.replace('[at]', '@').replace('[dot]', '.') core.updatelog('Found email address: ' + mail) mailarray = {"mail": mail, "file": relpath} if mailarray not in found_mail: found_mail.append(mailarray) ''' EXTRACT BITCOIN ADDRESSES ''' if core.extract_btc_addresses: btc_addresses = re.findall( '[^a-zA-Z0-9]([13][a-km-zA-HJ-NP-Z1-9]{26,33})[^a-zA-Z0-9]', contents) for btc_address in btc_addresses: core.updatelog('Found BTC address: ' + btc_address) btcarr = {"address": btc_address, "file": relpath} if btcarr not in found_btcs: found_btcs.append(btcarr) ''' EXTRACT IPV6 ADDRESSES ''' if core.extract_ipv6_addresses: ipv6s = re.findall( '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))(?=\s|$)', contents) for ipv6 in ipv6s: addr = ipv6[0] core.updatelog('Found IP v6 Address: ' + addr) v6arr = {"address": addr, "file": relpath} if v6arr not in found_ipv6: found_ipv6.append(v6arr) ''' EXTRACT IPV4 ADDRESSES ''' if core.extract_ipv4_addresses: ipv4s = re.findall( '[^a-zA-Z0-9]([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})[^a-zA-Z0-9]', contents) for ipv4 in ipv4s: core.updatelog('Found IP v4 Address: ' + ipv4) iparr = {"address": ipv4, "file": relpath} if iparr not in found_ipv4: found_ipv4.append(iparr) ''' EXTRACT BASE64 ENCODED STRINGS ''' if core.extract_base64_strings: base64_strings = re.findall( '(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)', contents) for base64_string in base64_strings: core.updatelog('Found base64 encoded string: ' + base64_string) stringarr = {"string": base64_string, "file": relpath} if stringarr not in found_b64s: found_b64s.append(stringarr) ''' EXTRACT COMMENTS FROM JS AND HTML FILES ''' if core.extract_comments: if relpath.endswith(('.html', '.js', '.htm', '.css')): c1 = re.findall('\/\*.*?\*\/|\/\/(.*?)\n|\$', contents) c2 = re.findall('\/\* *([^\"\']+?) *\*\/', contents) c3 = re.findall('<!-- *(.+?) *-->', contents) c1.extend(c2) c1.extend(c3) comments = c1 for comment in comments: if comment != "" and comment != " ": comment = helper.escape(comment) # escape html core.updatelog('Extracted comment: ' + comment[:30] + ' ...') cmarray = {"comment": comment, "file": relpath} if cmarray not in found_cmnt: found_cmnt.append(cmarray) result = { "urls": found_urls, "mails": found_mail, "ipv4": found_ipv4, "ipv6": found_ipv6, "base64": found_b64s, "btc": found_btcs, "comments": found_cmnt } return result
def check(): ''' Check for update ''' print('==== ExtAnalysis Update Check ====') core.updatelog('Current Version: ' + core.version) current_version = int(core.version.replace('.', '')) core.updatelog('Getting new version from github') v = scan.source_code(core.version_url) if v[0]: # Successfully acquired source code try: # validate version latest_version = int(v[1].replace('.', '').replace('\n', '')) core.updatelog('Latest version: ' + v[1]) if latest_version > current_version: # Update available update_prompt = input( 'New Version available! Update Now? (y/n): ').lower() if update_prompt == 'y': # update it update() else: core.updatelog( 'Update cancled! Make sure update the app later') core.handle_exit() elif latest_version == current_version: print("you're already on the latest version!") core.handle_exit() else: print('The script was tampered with and i don\'t like it!') core.handle_exit() except Exception as e: core.updatelog('Invalid response from github') logging.error(traceback.format_exc()) core.handle_exit() else: core.updatelog( 'Something went wrong while getting version from github') core.handle_exit()