def run(self): db = Database() file_id = rule_file = False if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] if 'rule_file' in self.request.POST: rule_file = self.request.POST['rule_file'] if rule_file and file_id and YARA: file_object = db.get_filebyid(file_id) file_data = file_object.read() rule_file = os.path.join('yararules', rule_file) if os.path.exists(rule_file): rules = yara.compile(rule_file) matches = rules.match(data=file_data) results = {'rows': [], 'columns': ['Rule', 'Offset', 'Data']} for match in matches: for item in match.strings: results['rows'].append([match.rule, item[0], string_clean_hex(item[2])]) else: raise IOError("Unable to locate rule file: {0}".format(rule_file)) if len(results['rows']) > 0: # Store the results in datastore store_data = {'file_id': file_id, 'yara': results} db.create_datastore(store_data) else: results = 'NoMatch' self.render_type = 'file' self.render_data = {'YaraScanner': {'yara_list': sorted(os.listdir('yararules')), 'yara_results': results}}
def run(self): db = Database() # https://github.com/williballenthin/python-registry file_id = self.request.POST['file_id'] pst_file = db.get_filebyid(file_id) if not pst_file: raise IOError("File not found in DB") try: self.pst = pypff.file() self.pst.open_file_object(pst_file) except Exception as e: raise base_path = u"" root_node = self.pst.get_root_folder() self.email_dict = {} self.recursive_walk_folders(root_node, base_path) # Store in DB Now store_data = {'file_id': file_id, 'pst': self.email_dict} db.create_datastore(store_data) self.render_type = 'file' self.render_data = {'PSTViewer': {'email_dict': self.email_dict, 'file_id': file_id}}
def run(self): db = Database() # https://github.com/williballenthin/python-registry file_id = self.request.POST['file_id'] pst_file = db.get_filebyid(file_id) if not pst_file: raise IOError("File not found in DB") try: self.pst = pypff.file() self.pst.open_file_object(pst_file) except Exception as e: raise base_path = u"" root_node = self.pst.get_root_folder() self.email_dict = {} self.recursive_walk_folders(root_node, base_path) # Store in DB Now store_data = {'file_id': file_id, 'pst': self.email_dict} db.create_datastore(store_data) self.render_type = 'file' self.render_data = { 'PSTViewer': { 'email_dict': self.email_dict, 'file_id': file_id } }
def run(self): global v db = Database() session_id = self.request.POST['session_id'] shell_input = self.request.POST['shell_input'] if shell_input == 'resetvolshellsession': v = {'volshell_id': None, 'volshell_object': None} session = db.get_session(session_id) # Shell type if session['session_profile'].lower().startswith('linux'): shell_type = 'linux_volshell' elif session['session_profile'].lower().startswith('mac'): shell_type = 'mac_volshell' else: shell_type = 'volshell' vol_shell_cmd = 'vol.py --profile={0} -f {1} {2}'.format(session['session_profile'], session['session_path'], shell_type ) # Determine if ipython is installed as this will change the expect regex try: import IPython expect_regex = '.*In .*\[[0-9]{1,3}.*\]:' except ImportError: expect_regex = '.*>>>' # Start or restore a shell if session_id in v: voll_shell = v[session_id]['volshell_object'] else: voll_shell = pexpect.spawn(vol_shell_cmd) voll_shell.expect(expect_regex) v[session_id] = {'volshell_object': None} # Now run the inputs voll_shell.sendline(shell_input) voll_shell.expect(expect_regex, timeout=60) v[session_id]['volshell_object'] = voll_shell before_data = self.strip_ansi_codes(voll_shell.before) after_data = self.strip_ansi_codes(voll_shell.after) #print "Before Data: ", before_data #print "After Data: ", after_data # lets start by getting input and returning it self.render_type = 'html' self.render_data = '<pre>{0}</pre>'.format(str(after_data)) self.render_javascript = open(os.path.join('extensions', self.extra_js), 'rb').read()
def run(self): global v db = Database() session_id = self.request.POST['session_id'] shell_input = self.request.POST['shell_input'] if shell_input == 'resetvolshellsession': v = {'volshell_id': None, 'volshell_object': None} session = db.get_session(session_id) # Shell type if session['session_profile'].lower().startswith('linux'): shell_type = 'linux_volshell' elif session['session_profile'].lower().startswith('mac'): shell_type = 'mac_volshell' else: shell_type = 'volshell' vol_shell_cmd = 'vol.py --profile={0} -f {1} {2}'.format( session['session_profile'], session['session_path'], shell_type) # Determine if ipython is installed as this will change the expect regex try: import IPython expect_regex = '.*In .*\[[0-9]{1,3}.*\]:' except ImportError: expect_regex = '.*>>>' # Start or restore a shell if session_id in v: voll_shell = v[session_id]['volshell_object'] else: voll_shell = pexpect.spawn(vol_shell_cmd) voll_shell.expect(expect_regex) v[session_id] = {'volshell_object': None} # Now run the inputs voll_shell.sendline(shell_input) voll_shell.expect(expect_regex, timeout=60) v[session_id]['volshell_object'] = voll_shell before_data = self.strip_ansi_codes(voll_shell.before) after_data = self.strip_ansi_codes(voll_shell.after) #print "Before Data: ", before_data #print "After Data: ", after_data # lets start by getting input and returning it self.render_type = 'html' self.render_data = '<pre>{0}</pre>'.format(str(after_data)) self.render_javascript = open( os.path.join('extensions', self.extra_js), 'rb').read()
def display(self): db = Database() file_id = self.request.POST['file_id'] file_datastore = db.search_datastore({'file_id': file_id}) pst_results = None for row in file_datastore: if 'pst' in row: pst_results = row['pst'] self.render_data = {'PSTViewer': {'email_dict': pst_results, 'file_id': file_id}}
def run(self): db = Database() # Get correct API URIS cuckoo_modified = self.config['cuckoo']['modified'] cuckoo_host = self.config['cuckoo']['host'] if cuckoo_modified: submit_file_url = '{0}/api/tasks/create/file/'.format(cuckoo_host) status_url = '{0}/api/cuckoo/status'.format(cuckoo_host) else: submit_file_url = '{0}/tasks/create/file'.format(cuckoo_host) status_url = '{0}/cuckoo/status'.format(cuckoo_host) params = {} if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_data = file_object.read() files = {'file': (file_object.filename, file_data)} if 'machine' in self.request.POST: if self.request.POST['machine'] != '': params['machine'] = self.request.POST['machine'] if 'package' in self.request.POST: if self.request.POST['package'] != '': params['package'] = self.request.POST['package'] if 'options' in self.request.POST: if self.request.POST['options'] != '': params['options'] = self.request.POST['options'] submit_file = self.api_query('post', submit_file_url, files=files, params=params) response_json = submit_file.json() try: print "Task Submitted ID: {0}".format(response_json['task_id']) task_id = response_json['task_id'] except KeyError: try: print "Task Submitted ID: {0}".format(response_json['data']['task_ids'][0]) task_id = response_json['data']['task_ids'][0] except KeyError: print response_json task_id = 0 rows = [['ID', 'Pending', 'Running', '', '{0}/analysis/{1}'.format(cuckoo_host, task_id)]] self.render_type = 'file' self.render_data = {'CuckooSandbox': {'machine_list': None, 'results': rows, 'file_id': file_id}}
def display(self): db = Database() if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] # Check to see if we already have strings stored. new_strings = db.get_strings(file_id) if new_strings: string_id = new_strings._id else: string_id = False print string_id self.render_data = {'ExtractStrings': {'string_id': string_id}}
def run(self): db = Database() # Get correct API URIS cuckoo_modified = self.config['cuckoo']['modified'] cuckoo_host = self.config['cuckoo']['host'] if cuckoo_modified == 'True': submit_file_url = '{0}/api/tasks/create/file/'.format(cuckoo_host) else: submit_file_url = '{0}/tasks/create/file'.format(cuckoo_host) params = {} if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_data = file_object.read() files = {'file': (file_object.filename, file_data)} if 'machine' in self.request.POST: if self.request.POST['machine'] != '': params['machine'] = self.request.POST['machine'] if 'package' in self.request.POST: if self.request.POST['package'] != '': params['package'] = self.request.POST['package'] if 'options' in self.request.POST: if self.request.POST['options'] != '': params['options'] = self.request.POST['options'] submit_file = self.api_query('post', submit_file_url, files=files, params=params) response_json = submit_file.json() if 'error' in response_json and response_json['error']: rows = [['ID', 'Error', response_json['error_value'], '', '']] else: try: print "Task Submitted ID: {0}".format(response_json['task_id']) task_id = response_json['task_id'] except KeyError: try: print "Task Submitted ID: {0}".format(response_json['data']['task_ids'][0]) task_id = response_json['data']['task_ids'][0] except KeyError: task_id = 0 rows = [[task_id, 'Pending', 'Running', '', '{0}/analysis/{1}'.format(cuckoo_host, task_id)]] self.render_type = 'file' self.render_data = {'CuckooSandbox': {'machine_list': None, 'results': rows, 'file_id': file_id}}
def display(self): db = Database() file_id = self.request.POST['file_id'] file_datastore = db.search_datastore({'file_id': file_id}) vt_results = None state = 'Not Checked' for row in file_datastore: if 'vt' in row: vt_results = row['vt'] if vt_results: state = 'complete' else: state = 'pending' self.render_data = {'VirusTotalSearch': {'state': state, 'vt_results': vt_results, 'file_id': file_id}}
def display(self): db = Database() file_id = self.request.POST['file_id'] file_datastore = db.search_datastore({'file_id': file_id}) pst_results = None for row in file_datastore: if 'pst' in row: pst_results = row['pst'] self.render_data = { 'PSTViewer': { 'email_dict': pst_results, 'file_id': file_id } }
def run(self): db = Database() metadata = {} img_src = None if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_data = file_object.read() with tempfile.NamedTemporaryFile() as tmp: tmp.write(file_data) try: with exiftool.ExifTool() as et: metadata = et.get_metadata(tmp.name) if 'File:MIMEType' in metadata: if 'image' in metadata['File:MIMEType']: img_src = b64encode(file_data) # Clean up the metadata to remove things we don't need. remove = [ 'File:Directory', 'File:FileInodeChangeDate', 'File:FileModifyDate', 'File:FileAccessDate', 'SourceFile', 'File:FilePermissions' ] print metadata for item in remove: if item in metadata: print metadata[item] print "Dropping" del metadata[item] except OSError: metadata[ 'error'] = "Exiftool is not installed. 'sudo apt-get install libimage-exiftool-perl'" except Exception as e: metadata[ 'error'] = "Error colleting EXIF data: {0}".format(e) self.render_type = 'file' self.render_data = { 'ExifData': { 'results': metadata, 'file_id': file_id, 'img_src': img_src } }
def run(self): db = Database() metadata = {} img_src = None if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_data = file_object.read() with tempfile.NamedTemporaryFile() as tmp: tmp.write(file_data) try: with exiftool.ExifTool() as et: metadata = et.get_metadata(tmp.name) if 'File:MIMEType' in metadata: if 'image' in metadata['File:MIMEType']: img_src = b64encode(file_data) # Clean up the metadata to remove things we don't need. remove = ['File:Directory', 'File:FileInodeChangeDate', 'File:FileModifyDate', 'File:FileAccessDate', 'SourceFile', 'File:FilePermissions'] print metadata for item in remove: if item in metadata: print metadata[item] print "Dropping" del metadata[item] except OSError: metadata['error'] = "Exiftool is not installed. 'sudo apt-get install libimage-exiftool-perl'" except Exception as e: metadata['error'] = "Error colleting EXIF data: {0}".format(e) self.render_type = 'file' self.render_data = {'ExifData': {'results': metadata, 'file_id': file_id, 'img_src': img_src}}
def run(self): db = Database() if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_data = file_object.read() html_string = '' start_offset = int(self.request.POST['start_offset']) end_offset = int(self.request.POST['end_offset']) if start_offset >= len(file_data): start_offset = 0 if end_offset > len(file_data): end_offset = len(file_data) hex_data = file_data[start_offset:end_offset] split_list = [ hex_data[i:i + 16] for i in range(0, len(hex_data), 16) ] offset_counter = start_offset for item in split_list: hex_encode = item.encode('hex') hex_chars = " ".join(hex_encode[i:i + 2] for i in range(0, len(hex_encode), 2)) ascii_chars = '' for char in item: if char in string.printable: ascii_chars += char else: ascii_chars += '.' html_string += '\n<div class="row"><span class="text-info mono">{0}</span> ' \ '<span class="text-primary mono">{1}</span> <span class="text-success mono">' \ '|{2}|</span></div>'.format("{0:#0{1}x}".format(offset_counter, 8), hex_chars, ascii_chars) offset_counter += 16 self.render_type = 'html' self.render_data = html_string
def run(self): db = Database() if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_data = file_object.read() html_string = '' start_offset = int(self.request.POST['start_offset']) end_offset = int(self.request.POST['end_offset']) if start_offset >= len(file_data): start_offset = 0 if end_offset > len(file_data): end_offset = len(file_data) hex_data = file_data[start_offset:end_offset] split_list = [hex_data[i:i + 16] for i in range(0, len(hex_data), 16)] offset_counter = start_offset for item in split_list: hex_encode = item.encode('hex') hex_chars = " ".join(hex_encode[i:i+2] for i in range(0, len(hex_encode), 2)) ascii_chars = '' for char in item: if char in string.printable: ascii_chars += char else: ascii_chars += '.' html_string += '\n<div class="row"><span class="text-info mono">{0}</span> ' \ '<span class="text-primary mono">{1}</span> <span class="text-success mono">' \ '|{2}|</span></div>'.format("{0:#0{1}x}".format(offset_counter, 8), hex_chars, ascii_chars) offset_counter += 16 self.render_type = 'html' self.render_data = html_string
try: from vt_key import API_KEY VT_KEY = True except ImportError: VT_KEY = False logger.warning("Unable to import VirusTotal API Key from vt_key.py") ## # Import The volatility Interface and DB Class ## import vol_interface from vol_interface import RunVol try: from web.database import Database db = Database() except Exception as e: logger.error("Unable to access mongo database: {0}".format(e)) sys.exit() ## # Helpers ## volutility_version = '0.1' volrc_file = os.path.join(os.path.expanduser('~'), '.volatilityrc') def string_clean_hex(line): line = str(line)
def run(self): db = Database() if 'file_id' in self.request.POST: # Get file object from DB file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) sha256 = file_object.sha256 print self.config['virustotal']['api_key'], type(self.config['virustotal']['api_key']) if self.config['virustotal']['api_key'] == 'None': state = 'error' vt_results = 'No API Key set in volutility.conf' else: # Init the API with key from config vt = PublicApi(self.config['virustotal']['api_key']) # If we upload if 'upload' in self.request.POST: response = vt.scan_file(file_object.read(), filename=file_object.filename, from_disk=False) if response['results']['response_code'] == 1 and 'Scan request successfully queued' in response['results']['verbose_msg']: print "File Uploaded and pending" state = 'pending' else: print response state = 'error' vt_results = None # Else just get the results else: # get results from VT response = vt.get_file_report(sha256) vt_results = {} # Valid response if response['response_code'] == 200: print "Valid Response from server" # Not present in data set prompt to uploads if response['results']['response_code'] == 0: state = 'missing' # Still Pending elif response['results']['response_code'] == -2: # Still Pending state = 'pending' # Results availiable elif response['results']['response_code'] == 1: vt_results['permalink'] = response['results']['permalink'] vt_results['total'] = response['results']['total'] vt_results['positives'] = response['results']['positives'] vt_results['scandate'] = response['results']['scan_date'] vt_results['scans'] = response['results']['scans'] # Store the results in datastore state = 'complete' store_data = {'file_id': file_id, 'vt': vt_results} db.create_datastore(store_data) self.render_type = 'file' self.render_data = {'VirusTotalSearch': {'state': state, 'vt_results': vt_results, 'file_id': file_id}}
def display(self): db = Database() cuckoo_modified = self.config['cuckoo']['modified'] cuckoo_host = self.config['cuckoo']['host'] if cuckoo_modified: search_url = '{0}/api/tasks/search/sha256'.format(cuckoo_host) status_url = '{0}/api/cuckoo/status'.format(cuckoo_host) machine_url = '{0}/api/machines/list/'.format(cuckoo_host) else: search_url = '{0}/tasks/list'.format(cuckoo_host) status_url = '{0}/cuckoo/status'.format(cuckoo_host) machine_url = '{0}/api/machines/list/'.format(cuckoo_host) # Get a list of machines from the API to populate a dropdown machine_list = [] json_response = self.api_query('get', machine_url) if json_response: json_response = json_response.json() json_data = json_response['data'] for machine in json_data: machine_string = '{0}: {1}'.format(machine['name'], ','.join(machine['tags'])) machine_dict = {'name': machine['name'], 'display': machine_string} machine_list.append(machine_dict) else: machine_list.append('Unable to connect to Cuckoo') file_id = rule_file = False if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_hash = file_object.sha256 else: file_hash = 'None' # Check for existing Session if cuckoo_modified: search_results = self.api_query('get', '{0}/{1}'.format(search_url, file_hash)).json() if search_results['data'] != "Sample not found in database": print "Found {0} Results".format(len(search_results['data'])) rows = [] for result in search_results['data']: rows.append([result['id'], result['started_on'], result['status'], result['completed_on'], '{0}/analysis/{1}'.format(cuckoo_host, result['id']) ]) else: search_results = self.api_query('get', search_url).json() count = 0 rows = [] if 'tasks' in search_results: for result in search_results['tasks']: try: if result['sample']['sha256'] == file_hash: rows.append([result['id'], result['started_on'], result['status'], result['completed_on']]) count += 1 except: pass self.render_type = 'file' self.render_data = {'CuckooSandbox': {'machine_list': machine_list, 'results': rows, 'file_id': file_id}}
def run(self): db = Database() # https://github.com/williballenthin/python-registry file_id = self.request.POST['file_id'] db_file = db.get_filebyid(file_id) if not db_file: raise IOError("File not found in DB") # Sqlite can only operate on a temp file. So. sqlite_data = {'table_meta': [], 'index_meta': [], 'table_data': []} new_data = [] first_table = None with tempfile.NamedTemporaryFile() as tmp: tmp.write(db_file.read()) # Now open in sqlite try: conn = sqlite3.connect(tmp.name) cursor = conn.cursor() # Get Table meta data cursor.execute("SELECT * FROM sqlite_master WHERE type='table';") table_data = cursor.fetchall() # Do everything under this for loop. for table in table_data: table_dict = {'Name': table[1], 'Meta': None, 'Data': None} table_meta_dict = {'type': table[0], 'name': table[1], 'int': table[3], 'sqlquery': table[4] } table_dict['Meta'] = table_meta_dict # Set active table if not first_table: first_table = table[1] # Get Table data cursor.execute("SELECT * FROM {0}".format(table[1])) table_data = cursor.fetchall() table_rows = [] for row in table_data: new_row = [] for col in row: try: new_row.append(str(col)) except: new_row.append(col.encode('hex')) table_rows.append(new_row) col_names = [str(description[0]) for description in cursor.description] table_data_dict = {'columns': col_names, 'rows': table_rows} table_dict['Data'] = table_data_dict new_data.append(table_dict) except Exception as e: raise # Get index meta data cursor.execute("SELECT * FROM sqlite_master WHERE type='index';") index_data = cursor.fetchall() for index in index_data: index_meta_dict = {'type': index[0], 'name': index[2], 'int': index[3], 'sqlquery': index[4] } sqlite_data['index_meta'].append(index_meta_dict) self.render_type = 'file' self.render_data = {'SqliteViewer': {'sqlite_data': new_data, 'file_id': file_id}} self.render_javascript = "$('#sqlitescan').remove();"
def display(self): db = Database() cuckoo_modified = self.config['cuckoo']['modified'] cuckoo_host = self.config['cuckoo']['host'] if cuckoo_modified == 'True': search_url = '{0}/api/tasks/search/sha256'.format(cuckoo_host) machine_url = '{0}/api/machines/list/'.format(cuckoo_host) else: search_url = '{0}/tasks/list'.format(cuckoo_host) machine_url = '{0}/machines/list'.format(cuckoo_host) # Get a list of machines from the API to populate a dropdown machine_list = [] json_response = self.api_query('get', machine_url) if json_response: json_response = json_response.json() if cuckoo_modified == 'True': json_data = json_response['data'] else: json_data = json_response['machines'] for machine in json_data: machine_string = '{0}: {1}'.format(machine['name'], ','.join(machine['tags'])) machine_dict = {'name': machine['name'], 'display': machine_string, 'label': machine['label']} machine_list.append(machine_dict) else: machine_list.append('Unable to connect to Cuckoo') file_id = rule_file = False if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) file_hash = file_object.sha256 else: file_hash = 'None' # Check for existing Entry if cuckoo_modified == 'True': search_results = self.api_query('get', '{0}/{1}'.format(search_url, file_hash)).json() if search_results['data'] != "Sample not found in database": print "Found {0} Results".format(len(search_results['data'])) rows = [] for result in search_results['data']: rows.append([result['id'], result['started_on'], result['status'], result['completed_on'], '{0}/analysis/{1}'.format(cuckoo_host, result['id']) ]) else: search_results = self.api_query('get', search_url).json() count = 0 rows = [] if 'tasks' in search_results: for result in search_results['tasks']: try: if result['sample']['sha256'] == file_hash: rows.append([result['id'], result['started_on'], result['status'], result['completed_on']]) count += 1 except: pass self.render_type = 'file' self.render_data = {'CuckooSandbox': {'machine_list': machine_list, 'results': rows, 'file_id': file_id}}
def run(self): db = Database() #self.render_javascript = "function test(){ alert(1); }; test();" self.render_javascript = "" if not self.config['virustotal']['api_key'] or not VT_LIB: self.render_type = 'error' self.render_data = "Unable to use Virus Total. No Key or Library Missing. Check the Console for details" if 'file_id' in self.request.POST: # Get file object from DB file_id = self.request.POST['file_id'] file_object = db.get_filebyid(file_id) sha256 = file_object.sha256 # Init the API with key from config vt = PublicApi(self.config.api_key) # If we upload if 'upload' in self.request.POST: response = vt.scan_file(file_object.read(), filename=file_object.filename, from_disk=False) if response['results'][ 'response_code'] == 1 and 'Scan request successfully queued' in response[ 'results']['verbose_msg']: print "File Uploaded and pending" state = 'pending' else: print response state = 'error' vt_results = None # Else just get the results else: # get results from VT response = vt.get_file_report(sha256) vt_results = {} # Valid response if response['response_code'] == 200: print "Valid Response from server" # Not present in data set prompt to uploads if response['results']['response_code'] == 0: state = 'missing' # Still Pending elif response['results']['response_code'] == -2: # Still Pending state = 'pending' # Results availiable elif response['results']['response_code'] == 1: vt_results['permalink'] = response['results']['permalink'] vt_results['total'] = response['results']['total'] vt_results['positives'] = response['results']['positives'] vt_results['scandate'] = response['results']['scan_date'] vt_results['scans'] = response['results']['scans'] # Store the results in datastore state = 'complete' store_data = {'file_id': file_id, 'vt': vt_results, 'state': state} db.create_datastore(store_data) self.render_type = 'file' self.render_data = { 'VirusTotalSearch': { 'state': state, 'vt_results': vt_results, 'file_id': file_id } }
def run(self): db = Database() # Get Options if "min_length" in self.request.POST: min_len = self.request.POST['min_length'] else: min_len = 4 if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] # Check to see if we already have strings stored. new_strings = db.get_strings(file_id) if new_strings: string_id = new_strings._id else: file_object = db.get_filebyid(file_id) # Always get ASCII and Unicode file_data = file_object.read() ascii_strings = self.ascii_strings(file_data, 4) unicode_strings = self.unicode_strings(file_data, 4) if HAVE_FLOSS: # Advacned Floss needs a file on disk with tempfile.NamedTemporaryFile() as tmp: tmp.write(file_data) file_path = tmp.name if self.is_supported_file_type(file_path): try: vw = viv_utils.getWorkspace(file_path, should_save=False) except Exception: print "ahhhhhhhhhhhhhh" raise # Decode Strings #decoding_functions_candidates = im.identify_decoding_functions(vw, selected_plugins, selected_functions) #function_index = viv_utils.InstructionFunctionIndex(vw) #decoded_strings = decode_strings(vw, function_index, decoding_functions_candidates) # Stack Strings # Generate the final output file string_list = '##### ASCII Strings #####\n {0} \n ##### Unicode Strings #####\n {1}'.format( ascii_strings, unicode_strings) ''' String lists can get larger than the 16Mb bson limit Need to store in GridFS ''' store_data = {'file_id': file_id, 'string_list': string_list} string_id = db.create_file(string_list, 'session_id', 'sha256', '{0}_strings.txt'.format(file_id)) print string_id self.render_type = 'html' self.render_data = '<td><a class="btn btn-success" role="button" href="/download/file/{0}">Download</a></td>'.format( string_id)
def run(self): db = Database() # Get Options if "min_length" in self.request.POST: min_len = self.request.POST['min_length'] else: min_len = 4 if 'file_id' in self.request.POST: file_id = self.request.POST['file_id'] # Check to see if we already have strings stored. new_strings = db.get_strings(file_id) if new_strings: string_id = new_strings._id else: file_object = db.get_filebyid(file_id) # Always get ASCII and Unicode file_data = file_object.read() ascii_strings = self.ascii_strings(file_data, 4) unicode_strings = self.unicode_strings(file_data, 4) if HAVE_FLOSS: # Advacned Floss needs a file on disk with tempfile.NamedTemporaryFile() as tmp: tmp.write(file_data) file_path = tmp.name if self.is_supported_file_type(file_path): try: vw = viv_utils.getWorkspace(file_path, should_save=False) except Exception: print "ahhhhhhhhhhhhhh" raise # Decode Strings #decoding_functions_candidates = im.identify_decoding_functions(vw, selected_plugins, selected_functions) #function_index = viv_utils.InstructionFunctionIndex(vw) #decoded_strings = decode_strings(vw, function_index, decoding_functions_candidates) # Stack Strings # Generate the final output file string_list = '##### ASCII Strings #####\n {0} \n ##### Unicode Strings #####\n {1}'.format(ascii_strings, unicode_strings) ''' String lists can get larger than the 16Mb bson limit Need to store in GridFS ''' store_data = {'file_id': file_id, 'string_list': string_list} string_id = db.create_file(string_list, 'session_id', 'sha256', '{0}_strings.txt'.format(file_id)) print string_id self.render_type = 'html' self.render_data = '<td><a class="btn btn-success" role="button" href="/download/file/{0}">Download</a></td>'.format( string_id)
def run(self): db = Database() # https://github.com/williballenthin/python-registry file_id = self.request.POST['file_id'] db_file = db.get_filebyid(file_id) if not db_file: raise IOError("File not found in DB") # Sqlite can only operate on a temp file. So. sqlite_data = {'table_meta': [], 'index_meta': [], 'table_data': []} new_data = [] first_table = None with tempfile.NamedTemporaryFile() as tmp: tmp.write(db_file.read()) # Now open in sqlite try: conn = sqlite3.connect(tmp.name) cursor = conn.cursor() # Get Table meta data cursor.execute( "SELECT * FROM sqlite_master WHERE type='table';") table_data = cursor.fetchall() # Do everything under this for loop. for table in table_data: table_dict = {'Name': table[1], 'Meta': None, 'Data': None} table_meta_dict = { 'type': table[0], 'name': table[1], 'int': table[3], 'sqlquery': table[4] } table_dict['Meta'] = table_meta_dict # Set active table if not first_table: first_table = table[1] # Get Table data cursor.execute("SELECT * FROM {0}".format(table[1])) table_data = cursor.fetchall() table_rows = [] for row in table_data: new_row = [] for col in row: try: new_row.append(str(col)) except: new_row.append(col.encode('hex')) table_rows.append(new_row) col_names = [ str(description[0]) for description in cursor.description ] table_data_dict = { 'columns': col_names, 'rows': table_rows } table_dict['Data'] = table_data_dict new_data.append(table_dict) except Exception as e: raise # Get index meta data cursor.execute("SELECT * FROM sqlite_master WHERE type='index';") index_data = cursor.fetchall() for index in index_data: index_meta_dict = { 'type': index[0], 'name': index[2], 'int': index[3], 'sqlquery': index[4] } sqlite_data['index_meta'].append(index_meta_dict) self.render_type = 'file' self.render_data = { 'SqliteViewer': { 'sqlite_data': new_data, 'file_id': file_id } } self.render_javascript = "$('#sqlitescan').remove();"
def run(self): db = Database() # https://github.com/williballenthin/python-registry file_id = self.request.POST['file_id'] key_request = urllib.unquote(self.request.POST['key']) reg_data = db.get_filebyid(file_id) reg = Registry.Registry(reg_data) if key_request == 'root': key = reg.root() else: try: key = reg.open(key_request) except Registry.RegistryKeyNotFoundException: # Check for values key = False if key: # Get the Parent try: parent_path = "\\".join( key.parent().path().strip("\\").split('\\')[1:]) print key.parent().path() except Registry.RegistryKeyHasNoParentException: parent_path = None json_response = {'parent_key': parent_path} # Get Sub Keys child_keys = [] for sub in self.reg_sub_keys(key): sub_path = "\\".join(sub.path().strip("\\").split('\\')[1:]) child_keys.append(sub_path) # Get Values key_values = [] for value in key.values(): val_name = value.name() val_type = value.value_type_str() val_value = value.value() # Replace Unicode Chars try: val_value = val_value.replace('\x00', ' ') except AttributeError: pass # Convert Bin to Hex chars if val_type == 'RegBin' and all(c in string.printable for c in val_value) == False: val_value = val_value.encode('hex') if val_type == 'RegNone' and all(c in string.printable for c in val_value) == False: val_value = val_value.encode('hex') # Assemble and send key_values.append([val_name, val_type, val_value]) # print val_type, val_value json_response['child_keys'] = child_keys json_response['key_values'] = key_values json_response = json.dumps(json_response) self.render_type = 'json' self.render_data = json_response self.render_javascript = open( os.path.join('extensions', self.extra_js), 'rb').read()