def main(domain, username, password, host_identifier): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) sql = PREFETCH_QUERY print ("PolyLogyx") print ("Acquiring prefetch files for the node : {0}".format(host_identifier)) request = polylogyx_api.send_distributed_query(sql=sql, tags=[], host_identifiers=[args.host_identifier]) if request['response_code'] and 'results' in request: if request['results']['status'] == 'success': try: query_data = polylogyx_api.get_distributed_query_results( request['results']['query_id']) data = query_data.recv() sleep_and_download_file(host_identifier, request['results']['query_id']) except Exception as e: print(e) else: print (request['results']['message']) else: print("Error sending the query : ".format(sql))
def main(domain=None, username=None, password=None): polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) response = polylogyx_api.get_nodes() print(json.dumps(response, sort_keys=False, indent=4))
def main(domain, username, password, host_identifier, file_path, limit): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) return read_csv(file_path, host_identifier, limit)
def deploy_packs(threat_id, tags): # Get data from graph walk down to Indicators: # Note, this uses g4i molecule walk which requires some post-filtering. # Better to have 1-shot query if possible. res = g4i.get_molecule(user_id=g4i.identity['id'], stix_ids=[threat_id], schema_name="mitre", objs=True, pivot=False) if not res: return False # Vanilla plgx server setup... domain = '127.0.0.1' username = '******' password = '******' plgx = PolylogyxApi(domain=domain, username=username, password=password) # Post-filtering and deploy each pack... out = [] for hit in hits_from_res(res): if hit['type'] != 'indicator': continue if hit['pattern_type'] != 'osquery-pack': continue # Add tags which is the way to tell plgx server to deploy to nodes # that also have that tag. new_pack = json.loads(hit['pattern']) new_pack['tags'] = tags new_pack = json.dumps(new_pack) headers = { 'x-access-token': plgx.AUTH_TOKEN, 'content-type': 'application/json' } url = plgx.base + "/distributed/add" try: response = requests.post(url, json=new_pack, headers=headers, verify=False, timeout=30) except requests.RequestException as e: out.append(dict(error=str(e))) out.append(plgx._return_response_and_status_code(response)) return out
def capture_nodes(org_id): # Get all nodes that are configured on a plgx server and add them to the # knowledge base. Return a list of possible tags to select from for # Rule/Pack deployment. # Vanilla plgx server setup... domain = '127.0.0.1' username = '******' password = '******' plgx = PolylogyxApi(domain=domain, username=username, password=password) # Get nodes and create identities for them nodes = plgx.get_nodes() all_tags = [] pii_dm = g4i.pii_marking['id'] for node in nodes['results']['data']: if not node['is_active']: continue all_tags += node['tags'] # Create new identity object for node stix_node = stix2.v21.Identity(created_by_ref=org_id, id='identity--' + node['node_key'], identity_class='system', labels=node['tags'], name=node['display_name'], object_marking_refs=[pii_dm]) # Create 'deployed-by' relationship from node to org node_rel = stix2.v21.Relationship(created_by_ref=org_id, relationship_type='deployed-by', source_ref=stix_node.id, target_ref=org_id, object_marking_refs=[pii_dm]) # Store new objects to knowledge base print( g4i.index(user_id=g4i.identity['id'], body=json.loads(stix_node.serialize()))) print( g4i.index(user_id=g4i.identity['id'], body=json.loads(node_rel.serialize()))) # Return the list of all tags to be used in rule deployment return all_tags
def main(domain=None, username=None, password=None): polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) request = polylogyx_api.send_distributed_query( sql=sql, tags=[], host_identifiers=[args.host_identifier]) if request['response_code'] and 'results' in request: if request['results']['status'] == 'success': try: query_data = polylogyx_api.get_distributed_query_results( request['results']['query_id']) data = query_data.recv() return data except Exception as e: print(e) else: print(request['results']['message']) else: print("Error sending the query : ".format(sql)) return
def main(domain, username, password, host_identifier, vt_api_key): global polylogyx_api creds = { 'username': username, 'password': password, 'domain': domain, 'vt_api_key': vt_api_key } polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) host_identifiers = host_identifier.split(',') for host_identifier in host_identifiers: print("Acquiring binary file from host : {0}".format(host_identifier)) response = get_distributed_query_data_over_websocket( APPCOMPAT_SHIM_QUERY, host_identifier) if response and len(response) > 0 and 'data' in response[0]: base_folder_path = os.getcwd( ) + '/appcompat/' + host_identifier + '/' + str(int(time.time())) try: os.makedirs(base_folder_path) except OSError as e: pass file_path = base_folder_path + '/' + 'appcompat.bin' with open(file_path, 'wb') as f: hb = binascii.a2b_hex(response[0]['data']) f.write(hb) f.close() print('Generated appcompat bin file for host : {0} at {1}'. format(host_identifier, file_path)) vt_score_path = analyse_binary_file( file_path, base_folder_path + '/' + "appcompat.csv", host_identifier, creds) anaylyse_vt_score_file(vt_score_path, host_identifier) else: print("Nothing to acquire from the host : {0}".format( host_identifier))
def main(domain, username, password, host_identifier): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) host_identifiers = host_identifier.split(',') path_parser = PathParser() finished = False for host_identifier in host_identifiers: print('Scanning for autoruns from the host : {0}'.format( host_identifier)) hashes = [] for name, queries in AUTORUN_QUERIES.items(): print("Getting data for the {0}".format(name)) if finished: break for i in range(len(queries)): query = queries[i] print("Getting data for the query {0}".format( str(i + 1) + "/" + str(len(queries)) + " " + name)) query_results = get_distributed_query_data_over_websocket( query, host_identifier) response = path_parser.parse_resgistry_paths( query_results, name) hashes.extend(response) if args.limit and len(hashes) >= args.limit: hashes = hashes[0:args.limit] finished = True break file_path = write_to_csv(hashes, host_identifier) print("Fetching hashes for the path obtained") filepath = fetch_hashes(args.domain, args.username, args.password, host_identifier, file_path) print("Fetching virustotal reputation for the hashes obtained") vt_score_path = fetch_vt_reputation.main(args.vt_api_key, filepath) anaylyse_vt_score_file(vt_score_path, host_identifier)
def main(domain=None, username=None, password=None): polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) current_time = int(time.time()) start_time = current_time - 24 * 60 * 60 indicators = ['md5', 'sha1', 'sha256'] indicator_not_empty_rules = [] hash_list = {} for indicator in indicators: hash_list[indicator] = {} indicator_not_empty_rules.append( {"id": indicator, "field": indicator, "type": "string", "input": "text", "operator": "is_not_empty", "value": ''}) per_page_count = 10 if args.pid: search_json = {"condition": "AND", "rules": [{"condition": "OR", "rules": indicator_not_empty_rules}, {"condition": "AND", "rules": [ {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "less_or_equal", "value": current_time}, {"id": 'pid', "field": 'pid', "type": "string", "input": "text", "operator": "equal", "value": args.pid}, {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "greater_or_equal", "value": start_time}]}], "valid": True} else: search_json = {"condition": "AND", "rules": [{"condition": "OR", "rules": indicator_not_empty_rules}, {"condition": "AND", "rules": [ {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "less_or_equal", "value": current_time}, {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "greater_or_equal", "value": start_time}]}], "valid": True} response = polylogyx_api.search_query_data({"conditions": search_json}) acquired_results = 0 if response['response_code'] == 200 and 'results' in response and 'data' in response['results']: for key, value in response['results']['data'].items(): if args.limit and acquired_results >= args.limit: break for query_result in value: total_results = query_result['count'] if args.limit and acquired_results >= args.limit: break for x in range(0, int(math.ceil(float( total_results) / float(per_page_count)))): response_query_result = polylogyx_api.search_query_data( {"conditions": search_json, "host_identifier": key, "query_name": query_result['query_name'], "start": x * per_page_count, "limit": per_page_count}) if response_query_result[ 'response_code'] == 200 and 'results' in response_query_result and 'data' in \ response_query_result['results']: for entry in response_query_result['results']['data']: for indicator in indicators: if indicator in entry and entry[indicator]: if entry[indicator] in hash_list[indicator]: hash_list[indicator][entry[indicator]] = hash_list[indicator][ entry[indicator]] + " " + key else: hash_list[indicator][entry[indicator]] = key acquired_results = acquired_results+per_page_count if args.limit and acquired_results >=args.limit: break return json.dumps(hash_list)
def main(domain, username, password, host_identifier): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) fetch_suspicous_process_data(host_identifier)