def main(domain=None, username=None, password=None): polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) response = polylogyx_api.get_nodes() print(json.dumps(response, sort_keys=False, indent=4))
def main(domain, username, password, host_identifier): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) host_identifiers = host_identifier.split(',') path_parser = PathParser() finished=False for host_identifier in host_identifiers: print ('Scanning for autoruns from the host : {0}'.format(host_identifier)) hashes = [] for name, queries in AUTORUN_QUERIES.items(): print ("Getting data for the {0}".format(name)) if finished: break for i in range(len(queries)): query = queries[i] print ("Getting data for the query {0}".format(str(i + 1) + "/" + str(len(queries)) + " " + name)) query_results = get_distributed_query_data_over_websocket(query, host_identifier) response = path_parser.parse_resgistry_paths(query_results, name) hashes.extend(response) if args.limit and len(hashes) >= args.limit: hashes = hashes[0:args.limit] finished=True break file_path=write_to_csv(hashes, host_identifier) print ("Fetching hashes for the path obtained") filepath = fetch_hashes(args.domain, args.username, args.password, host_identifier,file_path) print ("Fetching virustotal reputation for the hashes obtained") vt_score_path = fetch_vt_reputation.main(args.vt_api_key, filepath) anaylyse_vt_score_file(vt_score_path, host_identifier)
def main(domain, username, password, host_identifier, file_path, limit): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) return read_csv(file_path, host_identifier, limit)
def main(domain, username, password, host_identifier, vt_api_key): global polylogyx_api creds = {'username': username, 'password': password, 'domain': domain, 'vt_api_key': vt_api_key} polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) host_identifiers = host_identifier.split(',') for host_identifier in host_identifiers: print("Acquiring binary file from host : {0}".format(host_identifier)) response = get_distributed_query_data_over_websocket(APPCOMPAT_SHIM_QUERY, host_identifier) if response and len(response) > 0 and 'data' in response[0]: try: os.makedirs(base_folder_path) except OSError as e: pass file_path = base_folder_path + '/' + 'appcompat.bin' with open(file_path, 'wb') as f: hb = binascii.a2b_hex(response[0]['data']) f.write(hb) f.close() print('Generated appcompat bin file for host : {0} at {1}'.format(host_identifier, file_path)) vt_score_path = analyse_binary_file(file_path, base_folder_path + '/' + "appcompat.csv", host_identifier, creds) anaylyse_vt_score_file(vt_score_path, host_identifier) else: print("Nothing to acquire from the host : {0}".format(host_identifier))
def main(domain=None, username=None, password=None): polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) request = polylogyx_api.send_distributed_query(sql=sql, tags=[], host_identifiers=[args.host_identifier]) if request['response_code'] and 'results' in request: if request['results']['status'] == 'success': try: query_data = polylogyx_api.get_distributed_query_results( request['results']['data']['query_id']) data = query_data.recv() return data except Exception as e: print(e) else: print (request['results']['message']) else: print("Error sending the query : ".format(sql)) return
def main(domain, username, password, host_identifier): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) distributed_result = exec_distributed_query(host_identifier, PREFETCH_QUERY_COUNT) if distributed_result: distributed_result = distributed_result[0] if 'data' in json.loads(distributed_result) and int(json.loads(distributed_result)['data'][0]['count(*)']): sql = PREFETCH_QUERY print ("PolyLogyx") print ("Acquiring prefetch files for the node : {0}".format(host_identifier)) distributed_result = exec_distributed_query(host_identifier, sql) if distributed_result: query_id = distributed_result[1] sleep_and_download_file(host_identifier, query_id) else: print ("No prefetch file found to be scanned!") else: print ("Host is offline or host identifier provided may be invalid!")
def main(domain=None, username=None, password=None): polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) current_time = int(time.time()) start_time = current_time - 24 * 60 * 60 indicators = ['md5', 'sha1', 'sha256'] indicator_not_empty_rules = [] hash_list = {} for indicator in indicators: hash_list[indicator] = {} indicator_not_empty_rules.append( {"id": indicator, "field": indicator, "type": "string", "input": "text", "operator": "is_not_empty", "value": ''}) per_page_count = 10 if args.pid: search_json = {"condition": "AND", "rules": [{"condition": "OR", "rules": indicator_not_empty_rules}, {"condition": "AND", "rules": [ {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "less_or_equal", "value": current_time}, {"id": 'pid', "field": 'pid', "type": "string", "input": "text", "operator": "equal", "value": args.pid}, {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "greater_or_equal", "value": start_time}]}], "valid": True} else: search_json = {"condition": "AND", "rules": [{"condition": "OR", "rules": indicator_not_empty_rules}, {"condition": "AND", "rules": [ {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "less_or_equal", "value": current_time}, {"id": "time", "field": "time", "type": "string", "input": "text", "operator": "greater_or_equal", "value": start_time}]}], "valid": True} response = polylogyx_api.search_query_data({"conditions": search_json}) acquired_results = 0 if response['response_code'] == 200 and 'results' in response and 'data' in response['results']: for dict_item in response['results']['data']: if args.limit and acquired_results >= args.limit: break for query_result in dict_item['queries']: total_results = query_result['count'] if args.limit and acquired_results >= args.limit: break for x in range(0, int(math.ceil(float( total_results) / float(per_page_count)))): response_query_result = polylogyx_api.search_query_data( {"conditions": search_json, "host_identifier": dict_item['host_identifier'], "query_name": query_result['query_name'], "start": x * per_page_count, "limit": per_page_count}) if response_query_result[ 'response_code'] == 200 and 'results' in response_query_result and 'data' in \ response_query_result['results']: for entry in response_query_result['results']['data']['results']: for indicator in indicators: if indicator in entry and entry[indicator]: if entry[indicator] in hash_list[indicator]: hash_list[indicator][entry[indicator]] = hash_list[indicator][ entry[indicator]] + " " + dict_item['host_identifier'] else: hash_list[indicator][entry[indicator]] = dict_item['host_identifier'] acquired_results = acquired_results+per_page_count if args.limit and acquired_results >= args.limit: break return json.dumps(hash_list)
def main(domain, username, password, host_identifier): global polylogyx_api polylogyx_api = PolylogyxApi(domain=domain, username=username, password=password) fetch_suspicous_process_data(host_identifier)