def test_send_analysis_by_file_sends_analysis_and_waits_specific_time_until_compilation( self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) analysis = FileAnalysis(file_path='a') wait = 1 with patch(self.patch_prop, mock_open(read_data='data')): # Act start = datetime.datetime.utcnow() analysis.send(wait=wait) duration = (datetime.datetime.utcnow() - start).total_seconds() # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.FINISH) self.assertGreater(duration, wait)
def test_send_analysis_by_file_and_get_dynamic_ttps(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) mock.add('GET', url=self.full_url + '/analyses/asd/dynamic-ttps', status=200, json={'result': 'ttps_report'}) analysis = FileAnalysis(file_path='a') with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send(wait=True) ttps = analysis.dynamic_ttps # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.FINISH) self.assertEqual(ttps, 'ttps_report')
def test_send_analysis_by_file_with_zip_password_adds_zip_extension(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) analysis = FileAnalysis(file_path='a', zip_password='******') with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send(wait=True) # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.FINISH) self.assertEqual(analysis.result(), 'report') request_body = mock.calls[0].request.body.decode() self.assertTrue( 'Content-Disposition: form-data; name="zip_password"\r\n\r\nasd' in request_body) self.assertTrue( 'Content-Disposition: form-data; name="file"; filename="a.zip"' in request_body)
def test_send_analysis_by_sha256_with_expired_jwt_token_gets_new_token( self): # Arrange analysis = FileAnalysis(file_hash='a' * 64) # FileAnalysis attempt will initiate an access-token refresh by getting UNAUTHORIZED 401 with responses.RequestsMock() as mock: def request_callback(request): if request.headers[ 'Authorization'] == 'Bearer newer-access-token': return HTTPStatus.CREATED, {}, json.dumps( {'result_url': 'https://analyze.intezer.com/test-url'}) if request.headers['Authorization'] == 'Bearer access-token': return HTTPStatus.UNAUTHORIZED, {}, json.dumps({}) # Fail test completley is unexpected access token received return HTTPStatus.SERVICE_UNAVAILABLE, {}, json.dumps({}) mock.add_callback('POST', url=self.full_url + '/analyze-by-hash', callback=request_callback) mock.add('POST', url=self.full_url + '/get-access-token', status=HTTPStatus.OK, json={'result': 'newer-access-token'}) # Act & Assert analysis.send() self.assertEqual( 3, len(mock.calls )) # analyze -> refresh access_token -> analyze retry
def test_analysis_check_status_before_send_raise_error(self): # Arrange analysis = FileAnalysis(file_hash='a') # Act + Assert with self.assertRaises(errors.IntezerError): analysis.check_status()
def analyze_by_hash_command(intezer_api: IntezerApi, args: Dict[str, str]) -> CommandResults: file_hash = args.get('file_hash') if not file_hash: raise ValueError('Missing file hash') analysis = FileAnalysis(file_hash=file_hash, api=intezer_api) try: analysis.send(requester=REQUESTER) analysis_id = analysis.analysis_id context_json = { 'ID': analysis.analysis_id, 'Status': 'Created', 'type': 'File' } return CommandResults( outputs_prefix='Intezer.Analysis', outputs_key_field='ID', outputs=context_json, readable_output='Analysis created successfully: {}'.format( analysis_id)) except HashDoesNotExistError: return _get_missing_file_result(file_hash) except AnalysisIsAlreadyRunning as error: return _get_analysis_running_result(response=error.response)
def test_send_analysis_by_file_with_disable_unpacking(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) analysis = FileAnalysis(file_path='a', disable_dynamic_unpacking=True, disable_static_unpacking=True) with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send(wait=True) # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.FINISH) self.assertEqual(analysis.result(), 'report') request_body = mock.calls[0].request.body.decode() self.assertTrue( 'Content-Disposition: form-data; name="disable_static_extraction"\r\n\r\nTrue' in request_body) self.assertTrue( 'Content-Disposition: form-data; name="disable_dynamic_execution"\r\n\r\nTrue' in request_body)
def test_get_dynamic_ttps_raises_when_on_premise_on_21_11(self): # Arrange analysis = FileAnalysis(file_path='a') analysis.status = consts.AnalysisStatusCode.FINISH get_global_api().on_premise_version = OnPremiseVersion.V21_11 # Act and Assert with self.assertRaises(errors.UnsupportedOnPremiseVersionError): _ = analysis.dynamic_ttps
def test_send_analysis_by_sha256_that_dont_exist_raise_error(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze-by-hash', status=404) analysis = FileAnalysis(file_hash='a' * 64) # Act + Assert with self.assertRaises(errors.HashDoesNotExistError): analysis.send()
def test_send_analysis_that_running_on_server_raise_error(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze-by-hash', status=409, json={'result_url': 'a/sd/asd'}) analysis = FileAnalysis(file_hash='a' * 64) # Act + Assert with self.assertRaises(errors.AnalysisIsAlreadyRunningError): analysis.send()
def find_largest_family(analysis: FileAnalysis) -> dict: largest_family_by_software_type = collections.defaultdict(lambda: {'reused_gene_count': 0}) for sub_analysis in itertools.chain([analysis.get_root_analysis()], analysis.get_sub_analyses()): if not sub_analysis.code_reuse: continue for family in sub_analysis.code_reuse['families']: software_type = family['family_type'] if family['reused_gene_count'] > largest_family_by_software_type[software_type]['reused_gene_count']: largest_family_by_software_type[software_type] = family return largest_family_by_software_type
def test_send_analysis_and_sub_analyses_metadata_and_code_reuse(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze-by-hash', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd/sub-analyses', status=200, json={ 'sub_analyses': [{ 'source': 'root', 'sub_analysis_id': 'ab', 'sha256': 'axaxaxax' }, { 'source': 'static_extraction', 'sub_analysis_id': 'ac', 'sha256': 'ba' }] }) mock.add('GET', url=self.full_url + '/analyses/asd/sub-analyses/ab/code-reuse', status=200, json={}) mock.add('GET', url=self.full_url + '/analyses/asd/sub-analyses/ab/metadata', status=200, json={}) mock.add('GET', url=self.full_url + '/analyses/asd/sub-analyses/ac/code-reuse', status=200, json={}) mock.add('GET', url=self.full_url + '/analyses/asd/sub-analyses/ac/metadata', status=200, json={}) analysis = FileAnalysis(file_hash='a' * 64) # Act analysis.send() root_analysis = analysis.get_root_analysis() sub_analyses = analysis.get_sub_analyses() _ = root_analysis.code_reuse _ = root_analysis.metadata _ = sub_analyses[0].code_reuse _ = sub_analyses[0].metadata # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.CREATED) self.assertEqual(len(analysis.get_sub_analyses()), 1) self.assertIsNotNone(analysis.get_root_analysis()) self.assertIsNotNone(analysis.get_root_analysis().code_reuse) self.assertIsNotNone(analysis.get_root_analysis().metadata)
def test_get_analysis_by_id_raises_when_analysis_is_queued(self): # Arrange analysis_id = 'analysis_id' with responses.RequestsMock() as mock: mock.add('GET', url='{}/analyses/{}'.format(self.full_url, analysis_id), status=202, json={'status': AnalysisStatusCode.QUEUED.value}) # Act with self.assertRaises(errors.AnalysisIsStillRunningError): FileAnalysis.from_analysis_id(analysis_id)
def analyze_threat(threat_id: str, threat: dict = None): _logger.info(f'incoming threat: {threat_id}') try: if not threat: threat = get_threat(threat_id) if not filter_threat(threat): _logger.info(f'threat {threat_id} is been filtered') return threat_info = threat['threatInfo'] file_hash = threat_info.get('sha256') or threat_info.get('sha1') or threat_info.get('md5') analysis = None if file_hash: _logger.debug(f'trying to analyze by hash {file_hash}') try: analysis = FileAnalysis(file_hash=file_hash) analysis.send() except errors.HashDoesNotExistError: _logger.debug(f'hash {file_hash} not found on server, fetching the file from endpoint') analysis = None if not analysis: analysis = analyze_by_file(threat_id) analysis.send(requester='s1') _logger.debug('waiting for analysis completion') analysis.wait_for_completion() _logger.debug('analysis completed') send_note(threat_id, analysis) except Exception as ex: _logger.exception(f'failed to process threat {threat_id}') send_failure_note(str(ex), threat_id)
def test_send_analysis_by_sha256_sent_analysis_and_sets_status(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze-by-hash', status=201, json={'result_url': 'a/sd/asd'}) analysis = FileAnalysis(file_hash='a' * 64) # Act analysis.send() # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.CREATED)
def get_analysis_family_by_family_id(analysis: FileAnalysis, family_id: str) -> int: reused_gene_count = 0 for sub_analysis in itertools.chain([analysis.get_root_analysis()], analysis.get_sub_analyses()): if not sub_analysis.code_reuse: continue for family in sub_analysis.code_reuse['families']: if family['family_id'] == family_id: if family['reused_gene_count'] > reused_gene_count: reused_gene_count = family['reused_gene_count'] break return reused_gene_count
def test_send_analysis_by_file_with_file_stream_sent_analysis(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) analysis = FileAnalysis(file_stream=__file__) # Act analysis.send() # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.CREATED)
def test_send_analysis_by_file_sent_analysis_with_pulling_and_get_status_finish( self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=202) mock.add('GET', url=self.full_url + '/analyses/asd', status=202) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) analysis = FileAnalysis(file_path='a') with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send() analysis.check_status() analysis.check_status() analysis.check_status() # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.FINISH)
def test_send_analysis_by_file_sent_analysis_and_sets_status(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) analysis = FileAnalysis(file_path='a') with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send() # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.CREATED)
def test_analysis_by_sha256_raise_value_error_when_sha256_file_path_and_file_stream_given( self): # Assert with self.assertRaises(ValueError): FileAnalysis(file_hash='a', file_stream=__file__, file_path='/test/test')
def check_analysis_status_and_get_results_command( intezer_api: IntezerApi, args: dict) -> List[CommandResults]: analysis_type = args.get('analysis_type', 'File') analysis_ids = argToList(args.get('analysis_id')) indicator_name = args.get('indicator_name') command_results = [] for analysis_id in analysis_ids: try: if analysis_type == 'Endpoint': response = intezer_api.get_url_result( f'/endpoint-analyses/{analysis_id}') analysis_result = response.json()['result'] elif analysis_type == 'Url': analysis = UrlAnalysis.from_analysis_id(analysis_id, api=intezer_api) if not analysis: command_results.append( _get_missing_url_result(analysis_id)) continue else: analysis_result = analysis.result() else: analysis = FileAnalysis.from_analysis_id(analysis_id, api=intezer_api) if not analysis: command_results.append( _get_missing_analysis_result(analysis_id)) continue else: analysis_result = analysis.result() if analysis_result and analysis_type == 'Endpoint': command_results.append( enrich_dbot_and_display_endpoint_analysis_results( analysis_result, indicator_name)) elif analysis_result and analysis_type == 'Url': command_results.append( enrich_dbot_and_display_url_analysis_results( analysis_result, intezer_api)) elif analysis_result: command_results.append( enrich_dbot_and_display_file_analysis_results( analysis_result)) except HTTPError as http_error: if http_error.response.status_code == HTTPStatus.CONFLICT: command_results.append( _get_analysis_running_result(analysis_id=analysis_id)) elif http_error.response.status_code == HTTPStatus.NOT_FOUND: command_results.append( _get_missing_analysis_result(analysis_id)) else: raise http_error except AnalysisIsStillRunning: command_results.append( _get_analysis_running_result(analysis_id=analysis_id)) return command_results
def get_analysis_sub_analyses_command(intezer_api: IntezerApi, args: dict) -> CommandResults: analysis_id = args.get('analysis_id') try: analysis = FileAnalysis.from_analysis_id(analysis_id, api=intezer_api) if not analysis: return _get_missing_analysis_result(analysis_id=str(analysis_id)) except AnalysisIsStillRunning: return _get_analysis_running_result(analysis_id=str(analysis_id)) sub_analyses: List[SubAnalysis] = analysis.get_sub_analyses() all_sub_analyses_ids = [sub.analysis_id for sub in sub_analyses] sub_analyses_table = tableToMarkdown('Sub Analyses', all_sub_analyses_ids, headers=['Analysis IDs']) context_json = { 'ID': analysis.analysis_id, 'SubAnalysesIDs': all_sub_analyses_ids } return CommandResults(outputs_prefix='Intezer.Analysis', outputs_key_field='ID', readable_output=sub_analyses_table, outputs=context_json, raw_response=all_sub_analyses_ids)
def get_analysis_summary_metadata(analysis: FileAnalysis, use_hash_link: bool = False, should_use_largest_families: bool = True) -> Dict[str, any]: result = analysis.result() verdict = result['verdict'].lower() sub_verdict = result['sub_verdict'].lower() analysis_url = f"{ANALYZE_URL}/files/{result['sha256']}?private=true" if use_hash_link else result['analysis_url'] main_family = None gene_count = None iocs = None dynamic_ttps = None related_samples_unique_count = None software_type_priorities_by_verdict = { 'malicious': ['malware', 'malicious_packer'], 'trusted': ['application', 'library', 'interpreter', 'installer'], 'suspicious': ['administration_tool', 'packer'] } software_type_priorities = software_type_priorities_by_verdict.get(verdict) if software_type_priorities: main_family, gene_count = get_analysis_family(analysis, software_type_priorities, should_use_largest_families) if verdict in ('malicious', 'suspicious'): iocs = analysis.iocs dynamic_ttps = analysis.dynamic_ttps related_samples = [sub_analysis.get_account_related_samples(wait=True) for sub_analysis in analysis.get_sub_analyses()] if related_samples: related_samples_unique_count = len({analysis['analysis']['sha256'] for analysis in itertools.chain.from_iterable( sample.result['related_samples'] for sample in related_samples if sample is not None)}) return { 'verdict': verdict, 'sub_verdict': sub_verdict, 'analysis_url': analysis_url, 'main_family': main_family, 'gene_count': gene_count, 'iocs': iocs, 'dynamic_ttps': dynamic_ttps, 'related_samples_unique_count': related_samples_unique_count }
def test_send_analysis_by_sha256_with_expired_jwt_token_doesnt_loop_indefinitley( self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze-by-hash', status=HTTPStatus.UNAUTHORIZED) mock.add('POST', url=self.full_url + '/get-access-token', status=HTTPStatus.OK, json={'result': 'newer-access-token'}) analysis = FileAnalysis(file_hash='a' * 64) # Act & Assert with self.assertRaises(errors.IntezerError): analysis.send() # analyze -> get_access token -> analyze -> 401Exception self.assertEqual(3, len(mock.calls))
def test_send_analysis_by_file_and_get_dynamic_ttps_handle_no_ttps(self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) mock.add('GET', url=self.full_url + '/analyses/asd/dynamic-ttps', status=404) analysis = FileAnalysis(file_path='a') with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send(wait=True) self.assertIsNone(analysis.dynamic_ttps)
def test_get_latest_analysis_none_when_no_analysis_found(self): # Arrange file_hash = 'hash' with responses.RequestsMock() as mock: mock.add('GET', url='{}/files/{}'.format(self.full_url, file_hash), status=404) # Act analysis = FileAnalysis.from_latest_hash_analysis(file_hash) self.assertIsNone(analysis)
def collect_suspicious_and_malicious_analyses() -> list: malicious_and_suspicious_analyses_results = [] file_paths = [file for file in os.listdir(DIRECTORY_PATH)] analyses = [ FileAnalysis(os.path.join(DIRECTORY_PATH, path)) for path in file_paths if os.path.isfile(os.path.join(DIRECTORY_PATH, path)) ] for analysis in analyses: analysis_result = send_analysis(analysis) if analysis_result['verdict'] == 'suspicious' or analysis_result[ 'verdict'] == 'malicious': malicious_and_suspicious_analyses_results.append(analysis_result) return malicious_and_suspicious_analyses_results
def analyze_by_uploaded_file_command(intezer_api: IntezerApi, args: dict) -> CommandResults: file_id = args.get('file_entry_id') file_data = demisto.getFilePath(file_id) try: analysis = FileAnalysis(file_path=file_data['path'], api=intezer_api) analysis.send(requester=REQUESTER) context_json = { 'ID': analysis.analysis_id, 'Status': 'Created', 'type': 'File' } return CommandResults( outputs_prefix='Intezer.Analysis', outputs_key_field='ID', outputs=context_json, readable_output='Analysis created successfully: {}'.format( analysis.analysis_id)) except AnalysisIsAlreadyRunning as error: return _get_analysis_running_result(response=error.response)
def test_send_analysis_by_file_sends_analysis_with_waits_to_compilation_when_requested( self): # Arrange with responses.RequestsMock() as mock: mock.add('POST', url=self.full_url + '/analyze', status=201, json={'result_url': 'a/sd/asd'}) mock.add('GET', url=self.full_url + '/analyses/asd', status=200, json={ 'result': 'report', 'status': 'succeeded' }) analysis = FileAnalysis(file_path='a') with patch(self.patch_prop, mock_open(read_data='data')): # Act analysis.send(wait=True) # Assert self.assertEqual(analysis.status, consts.AnalysisStatusCode.FINISH)
def get_latest_result_command(intezer_api: IntezerApi, args: Dict[str, str]) -> CommandResults: file_hash = args.get('file_hash') if not file_hash: raise ValueError('Missing file hash') latest_analysis = FileAnalysis.from_latest_hash_analysis( file_hash=file_hash, api=intezer_api, requester=REQUESTER) if not latest_analysis: return _get_missing_file_result(file_hash) return enrich_dbot_and_display_file_analysis_results( latest_analysis.result())