def test_get_MISP_Fitted_Value(self): assert MISPHelper.get_MISP_Fitted_Value('123.456.789/32', 'ip-dst') == '123.456.789' assert MISPHelper.get_MISP_Fitted_Value('md5:testtest', 'md5') == 'testtest' assert MISPHelper.get_MISP_Fitted_Value('sha1:testtest', 'sha1') == 'testtest' assert MISPHelper.get_MISP_Fitted_Value('sha256:testtest', 'sha256') == 'testtest' assert MISPHelper.get_MISP_Fitted_Value('testtest', 'somethingelse') == 'testtest'
def test_generate_Manifest_Entry(self): test_obs = { "3ad54db13a7b6129902b0ee0acf3e2d1": { "data_type": "IPv4", "first_seen": "2019-08-21 08:38:29+02:00", "last_seen": "2019-08-21 13:38:28+02:00", "created_at": "2019-08-21 08:51:20.242089+02:00", "updated_at": "2019-08-21 13:51:10.270419+02:00", "max_confidence": 40, "min_confidence": 40, "max_severity": 1, "min_severity": 1, "n_occurrences": 1, "sources": [{ "pseudonym": "testpseudo1", "name": "testname1" }], "value": "123.45.67.89/32", "categories": [], "actors": [], "families": [] }, "a3475b4484bed2a863720110e8099208": { "data_type": "ExactHash", "first_seen": "2019-08-21 13:38:17+02:00", "last_seen": "2019-08-21 13:38:26+02:00", "created_at": "2019-08-21 13:40:02.575150+02:00", "updated_at": "2019-08-21 13:40:02.575150+02:00", "max_confidence": 90, "min_confidence": 90, "max_severity": 1, "min_severity": 1, "n_occurrences": 1, "sources": [{ "pseudonym": "testpseudo2", "name": "testname2" }], "value": "sha1:930A0029225AA4C28B8EF095B679285EAAE27078", "categories": [], "actors": [], "families": ["testfamily"] } } conf = Config.parse("settings/config.yml") event, attr_hashes = MISPHelper.generate_MISP_Event(test_obs, conf, []) manifest_entry = MISPHelper.generate_Manifest_Entry(event) assert 'Attribute' not in manifest_entry.keys() assert 'publish_timestamp' not in manifest_entry.keys() assert 'published' not in manifest_entry.keys() assert 'uuid' not in manifest_entry.keys()
def test_get_Attribute_Type(self): attr = {'data_type': 'ExactHash', 'value': 'md5'} assert MISPHelper.get_Attribute_Type(attr) == 'md5' attr = {'data_type': 'ExactHash', 'value': 'sha1'} assert MISPHelper.get_Attribute_Type(attr) == 'sha1' attr = {'data_type': 'ExactHash', 'value': 'sha256'} assert MISPHelper.get_Attribute_Type(attr) == 'sha256' attr = {'data_type': 'EMail', 'value': 'EMail'} assert MISPHelper.get_Attribute_Type(attr) == 'email-dst' attr = {'data_type': 'DomainName', 'value': 'DomainName'} assert MISPHelper.get_Attribute_Type(attr) == 'domain' attr = {'data_type': 'URLVerbatim', 'value': 'URLVerbatim'} assert MISPHelper.get_Attribute_Type(attr) == 'url' attr = {'data_type': 'IPv4', 'value': 'IPv4'} assert MISPHelper.get_Attribute_Type(attr) == 'ip-dst' attr = {'data_type': 'IPv6', 'value': 'IPv6'} assert MISPHelper.get_Attribute_Type(attr) == 'ip-dst'
def test_generate_MISP_event(self): conf = Config.parse("settings/config.yml") test_obs = { "3ad54db13a7b6129902b0ee0acf3e2d1": { "data_type": "IPv4", "first_seen": "2019-08-21 08:38:29+02:00", "last_seen": "2019-08-21 13:38:28+02:00", "created_at": "2019-08-21 08:51:20.242089+02:00", "updated_at": "2019-08-21 13:51:10.270419+02:00", "max_confidence": 40, "min_confidence": 40, "max_severity": 1, "min_severity": 1, "n_occurrences": 1, "sources": [{ "pseudonym": "testpseudo1", "name": "testname1" }], "value": "123.45.67.89/32", "categories": [], "actors": [], "families": [] }, "a3475b4484bed2a863720110e8099208": { "data_type": "ExactHash", "first_seen": "2019-08-21 13:38:17+02:00", "last_seen": "2019-08-21 13:38:26+02:00", "created_at": "2019-08-21 13:40:02.575150+02:00", "updated_at": "2019-08-21 13:40:02.575150+02:00", "max_confidence": 90, "min_confidence": 90, "max_severity": 1, "min_severity": 1, "n_occurrences": 1, "sources": [{ "pseudonym": "testpseudo2", "name": "testname2" }], "value": "sha1:930A0029225AA4C28B8EF095B679285EAAE27078", "categories": [], "actors": [], "families": ["testfamily"] } } event, attr_hashes = MISPHelper.generate_MISP_Event(test_obs, conf, []) dt = datetime.now() assert isinstance(event, MISPEvent) assert event.info == dt.strftime("%Y%m%d ") + 'TIE' assert event.publish_timestamp == dt.strftime("%s") assert event.timestamp == dt.strftime("%s") assert event['timestamp'] == dt.strftime("%s") assert event.analysis == 2 assert event.published orgc = MISPOrganisation() orgc.from_json( json.dumps({ 'name': conf.org_name, 'uuid': conf.org_uuid })) assert event.orgc == orgc assert event.threat_level_id == conf.event_base_thread_level assert len(event['Attribute']) == 2
def test_get_Attribute_Category(self): attr = {'data_type': 'ExactHash'} assert MISPHelper.get_Attribute_Category(attr) == 'Payload delivery' attr = {'data_type': 'DomainName'} assert MISPHelper.get_Attribute_Category(attr) == 'Network activity'
def start(out_format, conf, tags, category, actor, family, source, first_seen, last_seen, min_confidence, min_severity, max_confindence, max_severity, proxy_tie_addr, no_filter=False, disable_cert_verify=False): # Building Auth Header conf_authHeader = {'Authorization': 'Bearer ' + conf.tie_api_key} # Building URL if first_seen: date_since = first_seen.strftime("%Y-%m-%d") if last_seen: date_until = last_seen.strftime("%Y-%m-%d") category = category finished = False event = None connection_error = False # Building parameters payload = dict() if category: payload['category'] = category if first_seen: payload['first_seen_since'] = date_since if last_seen: payload['first_seen_until'] = date_until if actor: payload['actor'] = actor if family: payload['family'] = family if source: payload['source_pseudonym'] = source if min_confidence and max_confindence: payload['confidence'] = str(min_confidence) + '-' + str( max_confindence) elif min_confidence: payload['confidence'] = str(min_confidence) + '-' elif max_confindence: payload['confidence'] = '-' + str(max_confindence) if min_severity and max_severity: payload['severity'] = str(min_severity) + '-' + str(max_severity) elif min_severity: payload['severity'] = str(min_severity) + '-' elif max_severity: payload['severity'] = '-' + str(max_severity) if not no_filter: payload['filter'] = 'default' payload['limit'] = 1000 url = conf.tie_api_url + 'observations' index = 0 connection_retrys = 1 deduplicated_observations = dict() while not finished: try: myResponse = requests.get(url, params=payload, headers=conf_authHeader, proxies=proxy_tie_addr, verify=not disable_cert_verify) # For successful API call, response code will be 200 (OK) if myResponse.ok: # print(myResponse.status_code) # Loading the response data into a dict variable # json.loads takes in only binary or string variables so using content to fetch binary content # Loads (Load String) takes a Json file and converts into python data structure # (dict or list, depending on JSON) try: jsonResponse = myResponse.json() # check is TIE Response is complete response_has_more = None response_observations = None response_params = None if 'has_more' in jsonResponse and 'observations' in jsonResponse and 'params' in jsonResponse: response_has_more = jsonResponse['has_more'] response_observations = jsonResponse[ 'observations'] response_params = jsonResponse['params'] else: raise ValueError( "Error: TIE answered with an invalid or empty JSON Response" ) TIELoader.deduplicate_observations( response_observations, deduplicated_observations) # parsing received observations logging.info("Parsing... - Offset: " + str(index) + " to " + str(index + len(response_observations))) index += len(response_observations) if response_has_more is not True: finished = True logging.info("There are no more attributes") logging.info("#### Finished #####") break else: if isinstance(myResponse.links, dict): res = myResponse.links["next"] url = res["url"] payload = dict() logging.info("#### Continue #####") except ValueError: logging.error("Error: Invalid or empty JSON Response") elif myResponse.status_code >= 500 and myResponse.status_code <= 550: logging.warning( "It seems there are connection issues with TIE at the moment" ) logging.warning("Status-Code: " + str(myResponse.status_code) + " - Try: " + str(connection_retrys) + " from 5") connection_retrys += 1 if connection_retrys < 6: continue else: logging.error( "TIE seems not to be available at the moment or connection is interrupted" ) raise ConnectionError else: # If response code is not ok (200), print the resulting http error code with description logging.error("Error:") logging.error(myResponse.content) myResponse.raise_for_status() except (HTTPError, ConnectionError, ConnectTimeout) as e: logging.error("Error:") logging.error( "TIE seems not to be available at the moment or connection is interrupted" ) logging.debug(e) connection_error = True finished = True return # TIE is available? if out_format == 'MISP': # Serialize event as MISP Event event, attr_hashes = MISPHelper.generate_MISP_Event( deduplicated_observations, conf, tags) event_json = event.to_json() event_from_json = json.loads(event_json) event_from_json['publish_timestamp'] = str( event_from_json['publish_timestamp']) json_output = '{"Event" :' + json.dumps(event_from_json) + '}' event_no_attr = MISPHelper.generate_Manifest_Entry(event_from_json) manifest_output = {event['uuid']: event_no_attr} fileHelper.save_events_to_file(event['uuid'], json_output) fileHelper.save_manifest_to_file(manifest_output) fileHelper.save_hashes(attr_hashes)