def dotransform(request, response): if request.fields['URL']: e = URL(request.fields['URL']) e.url = request.fields['URL'] response += e return response
def do_transform(self, request, response, config): user = request.entity _body = { 'query': { 'bool': { 'must': [{ 'match': { 'username': user.value } }, { 'regexp': { 'tweet': '[a-zA-Z0-9]{1,63}' } }] } }, 'size': request.limits.hard } res = es.search(index="twinttweets", body=_body) for hit in res['hits']['hits']: tweet = hit['_source'] _links = re.findall(r'[\w]{3,8}://[\w.]{2,63}.[\w/]{1,63}', tweet['tweet']) for l in _links: r = URL() r.url = l r.title = l.split('/')[2] response += r return response
def dotransform(request, response, config): """ The dotransform function is our transform's entry point. The request object has the following properties: - value: a string containing the value of the input entity. - fields: a dictionary of entity field names and their respective values of the input entity. - params: any additional command-line arguments to be passed to the transform. - entity: the information above is serialized into an Entity object. The entity type is determined by the inputs field in @configure for local transforms. For remote transforms, the entity type is determined by the information in the body of the request. Local transforms suffer from one limitation: if more than one entity type is listed in the inputs field of @configure, the entity type might not be resolvable. Therefore, this should not be referenced in local transforms if there is more than one input entity type defined in @configure. The response object is a container for output entities, UI messages, and exception messages. The config object contains a key-value store of the configuration file. TODO: write your data mining logic below. """ progress(10) debug('Extracting URL') val = request.entities[0].fields['malriq.url'].value urle = URL(val) urle.url = val response += [urle] progress(100) return response
def dotransform(request, response): urls=ast.literal_eval(request.fields['itw_urls']) for tmp_url in urls: e=URL(str(tmp_url)) e.url = str(tmp_url) response+=e return response
def dotransform(request, response): url = request.value regex = '.*{0}(/|:)'.format(re.escape(url)) json_dict = msmodule.query('/urls?url_regex={0}'.format(regex)) if len(json_dict['urls']) > 0: for url in json_dict['urls']: entity = URL('woopsa') entity.fqdn = url['url'] response += entity return response
def dotransform(request, response): url = request.value regex = '.*{0}(/|:)'.format(re.escape(url)) json_dict = msmodule.query('urls?url_regex={0}'.format(regex)) urls = json_dict['urls'] for item in urls: u = URL(item['url']) u.url = item['url'] response += u return response
def dotransform(request, response): data = getbehavior(request.value) try: try: network = data['network'] except: #no network data pass try: for result in network['dns']: dom = result['hostname'] ip = result['ip'] response += Domain(dom) response += IPv4Address['ip'] except: #no dns data pass try: for request in network['http']: uri = URL(request['uri']) uri.url = request['uri'] ua = UserAgent(request['user-agent']) req = HTTPRequest(request['data']) port = Port(request['port']) response += uri response += ua response += req response += port except: #no http data pass try: for entry in network['tcp']: e = entry['dst'] if e.startswith('10.'): pass else: conn = IPv4Address(e) response += conn except: #no tcp data pass except: response += UIMessage(data['verbose_msg']) return response
def dotransform(request, response): data = getbehavior(request.value) try: try: network = data['network'] except: #no network data pass try: for result in network['dns']: dom = result['hostname'] ip = result['ip'] response += Domain(dom) response += IPv4Address['ip'] except: #no dns data pass try: for request in network['http']: uri = URL(request['uri']) uri.url = request['uri'] ua = UserAgent(request['user-agent']) #req = HTTPRequest(request['data']) port = Port(request['port']) response += uri response += ua #response += req response += port except: #no http data pass try: for entry in network['tcp']: e = entry['dst'] if e.startswith('10.'): pass else: conn = IPv4Address(e) response += conn except: #no tcp data pass except: response += UIMessage(data['verbose_msg']) return response
def dotransform(request, response, config): try: query = '%s -site:blockchain.info -site:blockexplorer.com' % request.value jsondata = json.loads( csequery(config['gcse/gapi'], config['gcse/gcseid'], query)) except Exception as e: raise MaltegoException('An error occured: %s' % e) # parses the GCSE results if 'items' in jsondata: try: for item in jsondata['items']: e = URL(item['link'], url=item['link']) e += Label("Title", item['title'].encode('ascii', 'ignore')) e += Label("Snippet", item['snippet'].encode('ascii', 'ignore')) e += Label("Google Query", jsondata['queries']['request'][0]['searchTerms']) response += e # TODO: Check to see if there are more than one page of results up to 100 results can be returned by the GCSE API # if 'nextPage' in jsondata['queries'] return response except Exception as e: raise MaltegoException('An error occured: %s' % e) else: pass
def attribute_to_entity(a, link_label=None, event_tags=[], only_self=False): # prepare some attributes to a better form a['data'] = None # empty the file content as we really don't need this here if a['type'] == 'malware-sample': a['type'] = 'filename|md5' if a['type'] == 'regkey|value': # LATER regkey|value => needs to be a special non-combined object a['type'] = 'regkey' combined_tags = event_tags if 'Galaxy' in a and not only_self: for g in a['Galaxy']: for c in g['GalaxyCluster']: yield galaxycluster_to_entity(c) # complement the event tags with the attribute tags. if 'Tag' in a and not only_self: for t in a['Tag']: combined_tags.append(t['name']) # ignore all misp-galaxies if t['name'].startswith('misp-galaxy'): continue # ignore all those we add as notes if tag_matches_note_prefix(t['name']): continue yield Hashtag(t['name'], bookmark=Bookmark.Green) notes = convert_tags_to_note(combined_tags) # special cases if a['type'] in ('url', 'uri'): yield(URL(url=a['value'], short_title=a['value'], link_label=link_label, notes=notes, bookmark=Bookmark.Green)) return # attribute is from an object, and a relation gives better understanding of the type of attribute if a.get('object_relation') and mapping_misp_to_maltego.get(a['object_relation']): entity_obj = mapping_misp_to_maltego[a['object_relation']][0] yield entity_obj(a['value'], labels=[Label('comment', a.get('comment'))], link_label=link_label, notes=notes, bookmark=Bookmark.Green) # combined attributes elif '|' in a['type']: t_1, t_2 = a['type'].split('|') v_1, v_2 = a['value'].split('|') if t_1 in mapping_misp_to_maltego: entity_obj = mapping_misp_to_maltego[t_1][0] labels = [Label('comment', a.get('comment'))] if entity_obj == File: labels.append(Label('hash', v_2)) yield entity_obj_to_entity(entity_obj, v_1, t_1, labels=labels, link_label=link_label, notes=notes, bookmark=Bookmark.Green) # LATER change the comment to include the second part of the regkey if t_2 in mapping_misp_to_maltego: entity_obj = mapping_misp_to_maltego[t_2][0] labels = [Label('comment', a.get('comment'))] if entity_obj == Hash: labels.append(Label('filename', v_1)) yield entity_obj_to_entity(entity_obj, v_2, t_2, labels=labels, link_label=link_label, notes=notes, bookmark=Bookmark.Green) # LATER change the comment to include the first part of the regkey # normal attributes elif a['type'] in mapping_misp_to_maltego: entity_obj = mapping_misp_to_maltego[a['type']][0] yield entity_obj_to_entity(entity_obj, a['value'], a['type'], labels=[Label('comment', a.get('comment'))], link_label=link_label, notes=notes, bookmark=Bookmark.Green)
def dotransform(request, response): try: items=ast.literal_eval(request.fields['detected_urls']) except: return response for item in items: url=item['url'] scan_date=item['scan_date'] r=URL(url) r.url=url r.linklabel=scan_date response+=r return response
def dotransform(request, response): sess = request.value host = request.fields['kippodatabase'] x = db_connect(host) try: cursor = x.cursor() query = "select timestamp, url, `outfile` from downloads where session like %s" cursor.execute(query, (sess,)) for timestamp, url, outfile in cursor: e = URL(url) e.url = url e += Field('filetime', timestamp, displayname='Time Stamp') e += Field('fileout', outfile, displayname='Success') e += Field('kippodatabase', host, displayname='Kippo Database') response += e return response except Exception as e: return response + UIMessage(str(e))
def dotransform(request, response): sess = request.value host = request.fields['kippodatabase'] x = db_connect(host) try: cursor = x.cursor() query = "select timestamp, url, `outfile` from downloads where session like %s" cursor.execute(query, (sess, )) for timestamp, url, outfile in cursor: e = URL(url) e.url = url e += Field('filetime', timestamp, displayname='Time Stamp') e += Field('fileout', outfile, displayname='Success') e += Field('kippodatabase', host, displayname='Kippo Database') response += e return response except Exception as e: return response + UIMessage(str(e))
def dotransform(request, response): if request.fields['behavioral'] != "": try: behavior = ast.literal_eval(request.fields['behavior_data']) except Exception as e: debug("Entity has no behavioral data") return response if behavior.has_key("network"): if behavior['network'].has_key('dns'): for item in behavior['network']['dns']: host = Domain(item['hostname']) host.linklabel = "vt_behav->hosts" response += host if item.has_key('ip'): ip = IPv4Address(item['ip']) ip.linklabel = "vt_behav->hosts" response += ip if behavior['network'].has_key('tcp'): for item in behavior['network']['tcp']: conn = item.split(":") r = IPv4Address(conn[0]) r.linklabel = "vt_behav->hosts_tcp (%s)" % str(conn[1]) response += r if behavior['network'].has_key('udp'): for item in behavior['network']['udp']: conn = item.split(":") r = IPv4Address(conn[0]) r.linklabel = "vt_behav->hosts_udp (%s)" % str(conn[1]) response += r if behavior['network'].has_key('http'): for item in behavior['network']['http']: r = URL(item['url']) r.url = item['url'] r.linklabel = "vt_behav->hosts_http (%s)" % item['method'] response += r else: debug("ripVT: No behavioral for %s" % request.value) return response
def do_transform(self, request, response, config): # TODO: write your code here. scan_request = request.entity scan_id = "".join(random.choice("0123456789abcdef") for x in range(32)) scan_request.ports = scan_request.ports.split(', ') if scan_request.ports is not None else None start(scan_request.host, [], [], scan_request.ports, scan_request.timeout_sec, scan_request.thread_no, 1, 1, 'abcd', 0, "en", scan_request.verbose, scan_request.socks_proxy, scan_request.retries, [], scan_id, "Through Maltego") results = find_log(scan_id, "en") for result in results: url = result["DESCRIPTION"].split()[0] response += URL(url=url, title=result["DESCRIPTION"], short_title=url, link_label='dir_scan') return response
def dotransform(request, response): graph = login() try: r = graph.fql('SELECT url FROM url_like WHERE user_id = %s' % request.fields['uid']) for u in r: e = URL('%s ...' % (u['url'][:50] if len(u['url']) > 50 else u['url'])) e += Field('url', u['url']) response += e except GraphAPIError, e: response += UIMessage(str(e))
def dotransform(request, response): # Build the request page = build(request.value) # Locate the URL files section of the report try: urls = page.find( text="The data identified by the following URLs was then requested from the remote web server:" ).findNext("ul") except: urls = None pass try: url = page.find( text="The data identified by the following URL was then requested from the remote web server:" ).findNext("ul") except: url = None if urls is not None: # Find the appropriate cell and extract the MD5 hash for file in urls.findAll("li"): text = file.text e = URL(text) e.url = text response += e elif url is not None: for file in url.findAll("li"): text = file.text e = URL(text) e.url = text response += e else: return response return response
def dotransform(request, response): #Build request page = build(request.value) #Find the Hosts section and extract IPs try: table = page.find("div", {"id": "network_http"}).findNext('table') elements = table.findAll("span", {"class": "mono"}) for element in elements: text = element.find(text=True) response += URL(text) except: return response return response
def detType(in_val): val = str(in_val) #::Email email = re.compile(".*\[@\][a-z0-9\-]{1,}\.[a-z0-9\-]{1,}") #::IP ipv4 = re.compile("^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$") #::CIDR cidr = re.compile("^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}/\d{1,2}$") #::Range v4range = re.compile( "^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\-\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$" ) #::Domain dom = re.compile("([a-z0-9\-]{1,}\.?)+\.[a-z0-9\-]{1,}$") if email.match(val): e = EmailAddress(val) return e if ipv4.match(val): e = IPv4Address(val) return e if cidr.match(val): e = CIDR(val) return e if v4range.match(val): e = Range(val) return e if dom.match(val): e = Domain(val) return e if re.match("^([a-z]*)://", val, re.M | re.I): e = URL(val) e.url = val return e
def dotransform(request, response, config): if 'ThreatCentral.resourceId' in request.fields: try: indicator = get_indicator(request.fields['ThreatCentral.resourceId']) except ThreatCentralError as err: indicator = None response += UIMessage(err.value, type='PartialError') if indicator: try: # Update Indicator entity ? e = Indicator(request.value) e.title = encode_to_utf8(indicator.get('title')) e.resourceId = indicator.get('resourceId') e.severity = indicator.get('severity', dict()).get('displayName') e.confidence = indicator.get('confidence', dict()).get('displayName') e.indicatorType = indicator.get('indicatorType', dict()).get('displayName') e += Label('Severity', indicator.get('severity', dict()).get('displayName')) e += Label('Confidence', indicator.get('confidence', dict()).get('displayName')) e += Label('Indicator Type', indicator.get('indicatorType', dict()).get('displayName')) if indicator.get('description'): e += Label('Description', '<br/>'.join(encode_to_utf8(indicator.get('description') ).split('\n'))) response += e if len(indicator.get('observables', list())) is not 0: for observable in indicator.get('observables'): if upper(observable.get('type', dict()).get('value')) == 'URI': e = URL(observable.get('value')) e.url = observable.get('value') e += Label('URI', observable.get('value')) response += e except AttributeError as err: response += UIMessage('Error: {}'.format(err), type='PartialError') except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') except TypeError: return response return response
def do_transform(self, request, response, config): # TODO: write your code here. scan_request = request.entity scan_id = "".join(random.choice("0123456789abcdef") for x in range(32)) scan_request.ports = scan_request.ports.split( ', ') if scan_request.ports is not None else None start(scan_request.host, [], [], scan_request.ports, scan_request.timeout_sec, scan_request.thread_no, 1, 1, 'abcd', 0, "en", scan_request.verbose, scan_request.socks_proxy, scan_request.retries, [], scan_id, "Through Maltego") results = find_log(scan_id, "en") for result in results: url = result["HOST"] + ":" + result["PORT"] response += URL(url=url, title=result["DESCRIPTION"], short_title="Wordpress DOS CVE 2018 6389 Found!", link_label='weak_signature_algorithm_vuln') return response
def dotransform(request, response): #Build the request page = build(request.value) #Locate the URL files section of the report try: urls = page.find( text= 'The data identified by the following URLs was then requested from the remote web server:' ).findNext('ul') except: urls = None pass try: url = page.find( text= 'The data identified by the following URL was then requested from the remote web server:' ).findNext('ul') except: url = None if urls is not None: #Find the appropriate cell and extract the MD5 hash for file in urls.findAll("li"): text = file.text e = URL(text) e.url = text response += e elif url is not None: for file in url.findAll("li"): text = file.text e = URL(text) e.url = text response += e else: return response return response
def get_urls(hash): result = msmodule.query('urls?hash={0}'.format(hash))['urls'] for item in result: u = URL(item['url']) u.url = item['url'] yield u
def dotransform(request, response, config): notes = list() if 'ThreatCentral.resourceId' in request.fields: try: actor = get_actor(request.fields['ThreatCentral.resourceId']) except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') else: try: # Update entity? e = Actor(request.value) if actor.get('name'): e.name = encode_to_utf8(actor.get('name')) e.actor = encode_to_utf8(actor.get('name')) e.title = encode_to_utf8(actor.get('title')) e.resourceId = actor.get('resourceId') if actor.get('organization'): e.organization = encode_to_utf8(actor.get('organization')) if actor.get('aliases'): e.aliases = ', '.join([encode_to_utf8(_) for _ in actor.get('aliases')]) if actor.get('country'): e.country = encode_to_utf8(actor.get('country', dict()).get('displayName')) # Add Location entitie l = Location(encode_to_utf8(actor.get('country', dict()).get('displayName'))) response += l if actor.get('score'): e.score = actor.get('score') if actor.get('links'): e += Label('Links', '<br/>'.join(['<a href="{}">{}</a>'.format(_.get('href'), _.get('href')) for _ in actor.get('links')])) if actor.get('hyperlinks'): e += Label('Hyperlinks', '<br/>'.join(['<a href="{}">{}</a>'.format(_.get('url'), _.get('title')) for _ in actor.get('hyperlinks')])) if actor.get('title'): e += Label('Title', encode_to_utf8(actor.get('title'))) if actor.get('resourceId'): e += Label('ResourceID', actor.get('resourceId')) if actor.get('aliases'): e += Label('Aliases', '<br/>'.join([encode_to_utf8(_) for _ in actor.get('aliases', '')])) if actor.get('description'): e += Label('Description', '<br/>'.join(encode_to_utf8(actor.get('description', '')).split('\n'))) if actor.get('country'): e += Label('Country', encode_to_utf8(actor.get('country', dict()).get('displayName'))) if actor.get('organization'): e += Label('Organization', encode_to_utf8(actor.get('organization'))) if actor.get('types'): e += Label('Types', '<br/>'.join([encode_to_utf8(_.get('displayName')) for _ in actor.get('types')])) if actor.get('motivations'): e += Label('Motivations', '<br/>'.join([encode_to_utf8(_.get('displayName')) for _ in actor.get('motivations')])) if actor.get('intendedEffects'): e += Label('Intended Effects', '<br/>'.join([encode_to_utf8(_.get('displayName')) for _ in actor.get('intendedEffects')])) if actor.get('sophistication'): e += Label('Sophistication', encode_to_utf8(actor.get('sophistication', dict()).get('displayName'))) if actor.get('socialMediaText'): e += Label('Social Media', '<br/>'.join(encode_to_utf8(actor.get('socialMediaText', '') ).split('\n'))) if actor.get('moreInfo'): e += Label('More Info', '<br/>'.join(encode_to_utf8(actor.get('moreInfo', '')).split('\n'))) if actor.get('score'): e += Label('Score', actor.get('score')) response += e # Extract email addresses usable_info = search_for_usable_info( '{} {} {}'.format(encode_to_utf8(actor.get('description')), encode_to_utf8(actor.get('socialMediaText')), encode_to_utf8(actor.get('moreInfo')))) if usable_info: debug(usable_info) try: urls = usable_info.get('url', dict()) for twitter in urls.get('twitter', list()): t = Twitter(twitter.get('name')) t.uid = twitter.get('name') t.set_field('affiliation.profile-url', twitter.get('url')) response += t for facebook in urls.get('facebook', list()): f = Facebook(facebook.get('name')) f.uid = facebook.get('name') f.set_field('affiliation.profile-url', facebook.get('url')) response += f for other in urls.get('other', list()): u = URL(other) u.url = other response += u emailaddr = usable_info.get('email', list()) for email in emailaddr: e = EmailAddress(email) response += e except AttributeError as err: response += UIMessage('Error: {}'.format(err)) except AttributeError as err: response += UIMessage('Error: {}'.format(err), type='PartialError') except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') except TypeError: return response return response
def dotransform(request, response, config): try: actor = get_actor(request.fields['ThreatCentral.resourceId']) except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') return response except KeyError: try: actors = search_actor(request.value) except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') return response else: i = 0 for actor in actors: try: rtype = lower(actor.get('type')) actor = actor.get('resource') if actor.get('tcScore'): weight = int(actor.get('tcScore')) else: weight = 1 if len(actor) is not 0: if rtype == 'actor': if actor.get('name'): e = Actor(encode_to_utf8(actor.get('name')), weight=weight) e.name = encode_to_utf8(actor.get('name')) e.actor = encode_to_utf8(actor.get('name')) elif actor.get('title'): e = Actor(encode_to_utf8(actor.get('title'))) e.title = encode_to_utf8(actor.get('title')) e.resourceId = actor.get('resourceId') if actor.get('organization'): e.organization = encode_to_utf8( actor.get('organization')) if actor.get('aliases'): e.aliases = ', '.join([ encode_to_utf8(_) for _ in actor.get('aliases') ]) if actor.get('country'): e.country = encode_to_utf8( actor.get('country', dict()).get('displayName')) if actor.get('score'): e.score = actor.get('score') if actor.get('links'): e += Label( 'Links', '<br/>'.join([ '<a href="{}">{}</a>'.format( _.get('href'), _.get('href')) for _ in actor.get('links') ])) if actor.get('hyperlinks'): e += Label( 'Hyperlinks', '<br/>'.join([ '<a href="{}">{}</a>'.format( _.get('url'), _.get('title')) for _ in actor.get('hyperlinks') ])) if actor.get('title'): e += Label('Title', encode_to_utf8(actor.get('title'))) if actor.get('resourceId'): e += Label('ResourceID', actor.get('resourceId')) if actor.get('aliases'): e += Label( 'Aliases', '<br/>'.join([ encode_to_utf8(_) for _ in actor.get('aliases', '') ])) if actor.get('description'): e += Label( 'Description', '<br/>'.join( encode_to_utf8( actor.get('description', '')).split('\n'))) if actor.get('country'): e += Label( 'Country', encode_to_utf8( actor.get('country', dict()).get('displayName'))) if actor.get('organization'): e += Label( 'Organization', encode_to_utf8(actor.get('organization'))) if actor.get('types'): e += Label( 'Types', '<br/>'.join([ encode_to_utf8(_.get('displayName')) for _ in actor.get('types') ])) if actor.get('motivations'): e += Label( 'Motivations', '<br/>'.join([ encode_to_utf8(_.get('displayName')) for _ in actor.get('motivations') ])) if actor.get('intendedEffects'): e += Label( 'Intended Effects', '<br/>'.join([ encode_to_utf8(_.get('displayName')) for _ in actor.get('intendedEffects') ])) if actor.get('sophistication'): e += Label( 'Sophistication', actor.get('sophistication', dict()).get('displayName')) if actor.get('socialMediaText'): e += Label( 'Social Media', '<br/>'.join( encode_to_utf8( actor.get('socialMediaText', '')).split('\n'))) if actor.get('moreInfo'): e += Label( 'More Info', '<br/>'.join( encode_to_utf8( actor.get('moreInfo', '')).split('\n'))) if actor.get('score'): e += Label('Score', actor.get('score')) if i < 1: i += 1 e.linkcolor = "0xf90000" response += e except AttributeError as err: response += UIMessage(err, type='PartialError') continue except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') except TypeError: return response else: if actor: try: if actor.get('tcScore'): weight = int(actor.get('tcScore')) else: weight = 1 # Update entity? e = Actor(request.value, weight=weight) if actor.get('name'): e.name = encode_to_utf8(actor.get('name')) e.actor = encode_to_utf8(actor.get('name')) e.title = encode_to_utf8(actor.get('title')) e.resourceId = actor.get('resourceId') if actor.get('organization'): e.organization = encode_to_utf8(actor.get('organization')) if actor.get('aliases'): e.aliases = ', '.join( [encode_to_utf8(_) for _ in actor.get('aliases')]) if actor.get('country'): e.country = encode_to_utf8( actor.get('country', dict()).get('displayName')) # Add Location entitie l = Location( encode_to_utf8( actor.get('country', dict()).get('displayName'))) response += l if actor.get('score'): e.score = actor.get('score') if actor.get('links'): e += Label( 'Links', '<br/>'.join([ '<a href="{}">{}</a>'.format( _.get('href'), _.get('href')) for _ in actor.get('links') ])) if actor.get('hyperlinks'): e += Label( 'Hyperlinks', '<br/>'.join([ '<a href="{}">{}</a>'.format( _.get('url'), _.get('title')) for _ in actor.get('hyperlinks') ])) if actor.get('title'): e += Label('Title', encode_to_utf8(actor.get('title'))) if actor.get('resourceId'): e += Label('ResourceID', actor.get('resourceId')) if actor.get('aliases'): e += Label( 'Aliases', '<br/>'.join([ encode_to_utf8(_) for _ in actor.get('aliases', '') ])) if actor.get('description'): e += Label( 'Description', '<br/>'.join( encode_to_utf8(actor.get('description', '')).split('\n'))) if actor.get('country'): e += Label( 'Country', encode_to_utf8( actor.get('country', dict()).get('displayName'))) if actor.get('organization'): e += Label('Organization', encode_to_utf8(actor.get('organization'))) if actor.get('types'): e += Label( 'Types', '<br/>'.join([ encode_to_utf8(_.get('displayName')) for _ in actor.get('types') ])) if actor.get('motivations'): e += Label( 'Motivations', '<br/>'.join([ encode_to_utf8(_.get('displayName')) for _ in actor.get('motivations') ])) if actor.get('intendedEffects'): e += Label( 'Intended Effects', '<br/>'.join([ encode_to_utf8(_.get('displayName')) for _ in actor.get('intendedEffects') ])) if actor.get('sophistication'): e += Label( 'Sophistication', encode_to_utf8( actor.get('sophistication', dict()).get('displayName'))) if actor.get('socialMediaText'): e += Label( 'Social Media', '<br/>'.join( encode_to_utf8(actor.get('socialMediaText', '')).split('\n'))) if actor.get('moreInfo'): e += Label( 'More Info', '<br/>'.join( encode_to_utf8(actor.get('moreInfo', '')).split('\n'))) if actor.get('score'): e += Label('Score', actor.get('score')) response += e # Extract email addresses usable_info = search_for_usable_info('{} {} {}'.format( encode_to_utf8(actor.get('description')), encode_to_utf8(actor.get('socialMediaText')), encode_to_utf8(actor.get('moreInfo')))) if usable_info: debug(usable_info) try: urls = usable_info.get('url', dict()) for twitter in urls.get('twitter', list()): t = Twitter(twitter.get('name')) t.uid = twitter.get('name') t.set_field('affiliation.profile-url', twitter.get('url')) response += t for facebook in urls.get('facebook', list()): f = Facebook(facebook.get('name')) f.uid = facebook.get('name') f.set_field('affiliation.profile-url', facebook.get('url')) response += f for other in urls.get('other', list()): u = URL(other) u.url = other response += u emailaddr = usable_info.get('email', list()) for email in emailaddr: e = EmailAddress(email) response += e except AttributeError as err: response += UIMessage('Error: {}'.format(err)) except AttributeError as err: response += UIMessage('Error: {}'.format(err), type='PartialError') except ThreatCentralError as err: response += UIMessage(err.value, type='PartialError') except TypeError: return response return response