def batch_query_bit9(new_hash_list): data = {} # : Break list into 1000 unit chunks for Bit9 bit9_batch_hash_list = list(split_seq(new_hash_list, 1000)) for thousand_hashes in bit9_batch_hash_list: result = bit9.lookup_hashinfo(thousand_hashes) if result['response_code'] == 200 and result['results']['hashinfos']: for hash_info in result['results']['hashinfos']: if hash_info['isfound']: data['md5'] = hash_info['fileinfo']['md5'].upper() else: data['md5'] = hash_info['requestmd5'].upper() hash_info['timestamp'] = r.now() # datetime.utcnow() data['Bit9'] = hash_info db_insert(data) data.clear() elif result['response_code'] == 404: for new_hash in new_hash_list: data = {'md5': new_hash.upper(), 'Bit9': {'timestamp': r.now(), # datetime.utcnow(), 'isfound': False, 'requestmd5': new_hash.upper()} } db_insert(data) data.clear()
def update_upload_file_metadata(sample): found = is_hash_in_db(sample['md5']) if found: found['sha1'] = sample['sha1'] found['sha256'] = sample['sha256'] found['ssdeep'] = sample['ssdeep'] found['filesize'] = sample['filesize'] found['filetype'] = sample['filetype'] found['filemime'] = sample['filemime'] upload = { 'filename': sample['filename'], 'upload_date': sample['upload_date'], 'uploaded_by': sample['uploaded_by'], 'detection_ratio': sample['detection_ratio'] } found.setdefault('user_uploads', []).append(upload) db_insert(found) else: file = { 'md5': sample['md5'], 'sha1': sample['sha1'], 'sha256': sample['sha256'], 'ssdeep': sample['ssdeep'], 'filesize': sample['filesize'], 'filetype': sample['filetype'], 'filemime': sample['filemime'] } upload = { 'filename': sample['filename'], 'upload_date': sample['upload_date'], 'uploaded_by': sample['uploaded_by'], 'detection_ratio': sample['detection_ratio'] } file.setdefault('user_uploads', []).append(upload) db_insert(file)
def run_metascan(this_file, file_md5): # TODO : remove these hardcoded creds if config.has_section('Metascan'): meta_scan = MetaScan(ip=config.get('Metascan', 'IP'), port=config.get('Metascan', 'Port')) else: meta_scan = MetaScan(ip='127.0.0.1', port='8008') if meta_scan.connected: results = meta_scan.scan_file_stream_and_get_results(this_file) if results.status_code != 200: print_error("MetaScan can not be reached.") return None metascan_results = results.json() #: Calculate AV Detection Ratio detection_ratio = dict(infected=0, count=0) for av in metascan_results[u'scan_results'][u'scan_details']: metascan_results[u'scan_results'][u'scan_details'][av][u'def_time'] \ = parser.parse(metascan_results[u'scan_results'][u'scan_details'][av][u'def_time']) detection_ratio['count'] += 1 if metascan_results[u'scan_results'][u'scan_details'][av]['scan_result_i'] == 1: detection_ratio['infected'] += 1 found = is_hash_in_db(file_md5) if found: found['user_uploads'][-1].setdefault('metascan_results', []).append(metascan_results) found['user_uploads'][-1]['detection_ratio']['infected'] += detection_ratio['infected'] found['user_uploads'][-1]['detection_ratio']['count'] += detection_ratio['count'] data = found else: data = dict(md5=file_md5) data['user_uploads'][-1].setdefault('metascan_results', []).append(metascan_results) db_insert(data) return metascan_results else: return None
def update_upload_file_metadata(sample): found = is_hash_in_db(sample['md5']) if found: found['sha1'] = sample['sha1'] found['sha256'] = sample['sha256'] found['ssdeep'] = sample['ssdeep'] found['filesize'] = sample['filesize'] found['filetype'] = sample['filetype'] found['filemime'] = sample['filemime'] upload = {'filename': sample['filename'], 'upload_date': sample['upload_date'], 'uploaded_by': sample['uploaded_by'], 'detection_ratio': sample['detection_ratio']} found.setdefault('user_uploads', []).append(upload) db_insert(found) else: file = {'md5': sample['md5'], 'sha1': sample['sha1'], 'sha256': sample['sha256'], 'ssdeep': sample['ssdeep'], 'filesize': sample['filesize'], 'filetype': sample['filetype'], 'filemime': sample['filemime'] } upload = {'filename': sample['filename'], 'upload_date': sample['upload_date'], 'uploaded_by': sample['uploaded_by'], 'detection_ratio': sample['detection_ratio'] } file.setdefault('user_uploads', []).append(upload) db_insert(file)
def batch_query_bit9(new_hash_list): data = {} # : Break list into 1000 unit chunks for Bit9 bit9_batch_hash_list = list(split_seq(new_hash_list, 1000)) for thousand_hashes in bit9_batch_hash_list: result = bit9.lookup_hashinfo(thousand_hashes) if result['response_code'] == 200 and result['results']['hashinfos']: for hash_info in result['results']['hashinfos']: if hash_info['isfound']: data['md5'] = hash_info['fileinfo']['md5'].upper() else: data['md5'] = hash_info['requestmd5'].upper() hash_info['timestamp'] = r.now() # datetime.utcnow() data['Bit9'] = hash_info db_insert(data) data.clear() elif result['response_code'] == 404: for new_hash in new_hash_list: data = { 'md5': new_hash.upper(), 'Bit9': { 'timestamp': r.now(), # datetime.utcnow(), 'isfound': False, 'requestmd5': new_hash.upper() } } db_insert(data) data.clear()
def exif_scan(file_stream, file_md5): this_exif = exif.Exif(file_stream) if this_exif: key, exif_results = this_exif.scan() found = is_hash_in_db(file_md5) if found: found[key] = exif_results data = found else: data = dict(md5=file_md5) data[key] = exif_results db_insert(data) return data else: print_error("EXIF Analysis Failed.")
def pe_scan(this_file, file_md5): this_pe = pe.PE(this_file) if this_pe.pe: key, pe_results = this_pe.scan() found = is_hash_in_db(file_md5) if found: found[key] = pe_results data = found else: data = dict(md5=file_md5) data[key] = pe_results db_insert(data) return data else: print_error("PE Analysis Failed - This file might not be a PE Executable.")
def trid_scan(file_stream, file_md5): this_trid = trid.TrID(file_stream) if this_trid: key, trid_results = this_trid.scan() found = is_hash_in_db(file_md5) if found: found[key] = trid_results data = found else: data = dict(md5=file_md5) data[key] = trid_results db_insert(data) return data else: print_error("TrID Analysis Failed.")
def sophos_scan(this_file): my_sophos = sophos_engine() results = my_sophos.scan(PickleableFileSample.string_factory(this_file)) file_md5_hash = hashlib.md5(this_file).hexdigest().upper() found = is_hash_in_db(file_md5_hash) if found: found['user_uploads'][-1].setdefault('av_results', []).append(scan_to_dict(results, 'Sophos')) if results.infected: found['user_uploads'][-1]['detection_ratio']['infected'] += 1 found['user_uploads'][-1]['detection_ratio']['count'] += 1 data = found else: data = dict(md5=file_md5_hash) data['user_uploads'][-1].setdefault('av_results', []).append(scan_to_dict(results, 'Sophos')) db_insert(data) return data
def single_query_virustotal(new_hash): data = {} response = vt.get_file_report(new_hash) # error = vt.handle_response_status_code(response) if response['response_code'] == 200: vt_result = response['results'] if vt_result['response_code']: # print "Evilness: %d" % vt_result['positives'] data['md5'] = vt_result['md5'].upper() else: data['md5'] = vt_result['resource'].upper() vt_result['timestamp'] = r.now() # datetime.utcnow() data['VirusTotal'] = vt_result db_insert(data) data.clear() else: flash(response['error'])
def single_query_bit9(new_hash): data = {} result = bit9.lookup_hashinfo(new_hash) if result['response_code'] == 200 and result['results']['hashinfo']: hash_info = result['results']['hashinfo'] data['md5'] = hash_info['fileinfo']['md5'].upper() hash_info['isfound'] = True hash_info['timestamp'] = r.now() # datetime.utcnow() data['Bit9'] = hash_info elif result['response_code'] == 404: data = {'md5': new_hash.upper(), 'Bit9': {'timestamp': r.now(), # datetime.utcnow(), 'isfound': False, 'requestmd5': new_hash.upper()} } db_insert(data) data.clear()
def batch_query_virustotal(new_hash_list): data = {} #: Break list into 25 unit chuncks for VirusTotal vt_batch_hash_list = list(split_seq(new_hash_list, 25)) for twentyfive_hashes in vt_batch_hash_list: response = vt.get_file_report(list_to_string(twentyfive_hashes)) if hasattr(response, 'error'): flash(response['error']) else: vt_results = response['results'] for result in vt_results: if result['response_code']: # print "Evilness: %d" % result['positives'] data['md5'] = result['md5'].upper() else: data['md5'] = result['resource'].upper() result['timestamp'] = r.now() # datetime.utcnow() data['VirusTotal'] = result db_insert(data) data.clear()
def avg_scan(this_file): my_avg = avg_engine.AVG(this_file) result = my_avg.scan() # result = my_avg.scan(PickleableFileSample.string_factory(file)) if 'error' in result[1]: flash(result[1]['error'], 'error') else: file_md5_hash = hashlib.md5(this_file).hexdigest().upper() found = is_hash_in_db(file_md5_hash) if found: found['user_uploads'][-1].setdefault('av_results', []).append(result[1]) if result[1]['infected']: found['user_uploads'][-1]['detection_ratio']['infected'] += 1 found['user_uploads'][-1]['detection_ratio']['count'] += 1 data = found else: data = dict(md5=file_md5_hash) data['user_uploads'][-1].setdefault('av_results', []).append(result[1]) db_insert(data) return data