def form_valid(self, form): workflow = self.get_workflow() # get the values from the user submitted form here query_tag = workflow.get('query_tag') hashes = workflow.get('hashes') output_type = workflow.get('output_type') api_key = workflow.get('api_key') payload = { 'query_tag': query_tag, 'hashes': hashes, 'output_type': output_type, 'api_key': api_key } tortHost = cnc_utils.get_config_value("TORT_HOST", "localhost") tortPort = cnc_utils.get_config_value("TORT_PORT", 5010) resp = process_hashes(payload) print(f"The response is: {resp}") # resp = requests.post(f'http://{tortHost}:{tortPort}', data=payload) # print(resp.headers) results = super().get_context_data() results['results'] = resp return render(self.request, 'pan_cnc/results.html', context=results)
def form_valid(self, form): payload = self.render_snippet_template() # get the content_downloader host and port from the .panrc file, environrment, or default name lookup # docker-compose will host content_downloader under the 'content_downloader' domain name content_downloader_host = cnc_utils.get_config_value( 'CONTENT_DOWNLOADER_HOST', 'content_downloader') content_downloader_port = cnc_utils.get_config_value( 'CONTENT_DOWNLOADER_PORT', '5003') resp = requests.post( f'http://{content_downloader_host}:{content_downloader_port}/download_content', json=json.loads(payload)) print(f'Download returned: {resp.status_code}') if resp.status_code != 200: messages.add_message(self.request, messages.ERROR, f'Could not download dynamic content!') if 'Content-Disposition' in resp.headers: filename = resp.headers['Content-Disposition'].split('=')[1] messages.add_message( self.request, messages.INFO, f'Downloaded Dynamic Content file: {filename}') return HttpResponseRedirect('complete')
def processHashList(hashList, outputType, queryTag, hashType, apiKey): """hash_data main module""" results = dict() outResults = dict() fileDate = datetime.datetime.now().strftime("%y-%m-%d-%H-%M") outFile = f"/tmp/{queryTag}_{fileDate}.json" # Define the default Elasticsearch client connections.create_connection( hosts=cnc_utils.get_config_value('ELASTICSEARCH_HOST', 'localhost')) # Set the number of multi processes to use and cap it at 16 so # so we don't blow out the minute points on AutoFocus # multiProcNum = (app.config['TORT_POOL_COUNT'] # if app.config['TORT_POOL_COUNT'] <= 2 else 2) # DEBUG_MODE means we only run 1 at a time, rather than multi-processing # if app.config['DEBUG_MODE'] != True: # # Multiprocess the the hashes # print(f"Running hashes through on {multiProcNum} processes") # with Pool(multiProcNum) as pool: # results = pool.map(getHashInfo, hashList) # return storeResults(results,outFile) # else: for hashData in hashList: results = getHashInfo(hashData, outputType, queryTag, apiKey) outResults.update(storeResults(results, outFile, outputType)) if "text" in outputType: return f"{outFile}" else: return outResults
def get_context_data(self, **kwargs): """ Override get_context_data so we can modify the context as necessary :param kwargs: :return: """ print('Getting vm_auth_key') vm_auth_key = self.get_value_from_workflow('vm_auth_key', '') if vm_auth_key == '': vm_auth_key = pan_utils.get_vm_auth_key_from_panorama() vakl = vm_auth_key.split(' ') if len(vakl) > 2: vm_auth_key = vakl[3] print(vm_auth_key) panorama_ip = cnc_utils.get_config_value('PANORAMA_IP', '0.0.0.0') print(panorama_ip) self.save_value_to_workflow('vm_auth_key', vm_auth_key) self.save_value_to_workflow('panorama_ip', panorama_ip) # Ensure we capture FW_NAME in case that has been set previously (easy button case) fw_name = self.get_value_from_workflow('FW_NAME', '') if fw_name != '': self.save_value_to_workflow('vm_name', fw_name) context = super().get_context_data(**kwargs) form = context['form'] # load all snippets with a type of 'service' salt_util = salt_utils.SaltUtil() minion_list = salt_util.get_minion_list() # we need to construct a new ChoiceField with the following basic format # snippet_name = forms.ChoiceField(choices=(('gold', 'Gold'), ('silver', 'Silver'), ('bronze', 'Bronze'))) choices_list = list() # grab each service and construct a simple tuple with name and label, append to the list for minion in minion_list: minion_label = minion.split('.')[0] choice = (minion, minion_label) choices_list.append(choice) # let's sort the list by the label attribute (index 1 in the tuple) choices_list = sorted(choices_list, key=lambda k: k[1]) # convert our list of tuples into a tuple itself choices_set = tuple(choices_list) # make our new field new_choices_field = forms.ChoiceField(choices=choices_set) # set it on the original form, overwriting the hardcoded GSB version form.fields['minion'] = new_choices_field # save to kwargs and call parent for additional processing context['form'] = form return context
def get_context_data(self, **kwargs): context = dict() panorama_ip = cnc_utils.get_config_value('PANORAMA_IP', '0.0.0.0') if panorama_ip == '0.0.0.0': panorama_ip = self.get_value_from_workflow('TARGET_IP', '0.0.0.0') if panorama_ip == '0.0.0.0': print('Could not load panorama ip') messages.add_message( self.request, messages.ERROR, 'Could not locate Panorama Configuration') context['redirect_link'] = '/' return context context['redirect_link'] = f'https://{panorama_ip}' else: context['redirect_link'] = f'https://{panorama_ip}' return context
def get_query_results(af_ip, af_api_key, search_dict): """ check for a hit and then retrieve search results when hit = 1 """ autofocus_results = {} cookie = search_dict['af_cookie'] print(f'Tracking cookie is {cookie}') query_status = '' results_url = cnc_utils.get_config_value( 'AUTOFOCUS_RESULTS_URL', 'https://autofocus.paloaltonetworks.com/api/v1.0/samples/results/') cookie_url = results_url + cookie headers = {"Content-Type": "application/json"} results_values = {"apiKey": af_api_key} while query_status != 'FIN': time.sleep(5) try: print(f"sending {cookie_url}") results = requests.post(url=cookie_url, headers=headers, data=json.dumps(results_values)) results.raise_for_status() except requests.exceptions.HTTPError: print(results) print(results.text) print('\nCorrect errors and rerun the application\n') sys.exit() autofocus_results = results.json() if 'total' in autofocus_results: if autofocus_results['total'] == 0: print('Now waiting for a hit...') print(autofocus_results) else: query_status = 'FIN' else: print('Autofocus still queuing up the search...') return autofocus_results
def getHashInfo(thisHash, outputType): apiKey = cnc_utils.get_config_value("AUTOFOCUS_API_KEY", "NOT-SET") hostname = "autofocus.paloaltonetworks.com" hashType = "MD5" now = datetime.datetime.now().replace(microsecond=0).isoformat('T') hashCounters = init_hash_counters() sampleData = {} hashDataDict = {} # query Autofocus to get sample and signature coverage data try: sampleData = get_sample_data(hostname, apiKey, thisHash, hashType, hashCounters) except Exception as e: logger.error(f"Unable to get sample data--ERROR: {e}") try: if sampleData['verdict'] != 'No sample found': hashDataDict, hashCounters = \ get_sig_coverage(hostname, apiKey, sampleData, hashCounters) except Exception as e: logger.error(e) # Add some pertinent info to the data before saving hashDataDict['query_time'] = now hashDataDict['query_tag'] = app.config['QUERY_TAG'] if 'text' in outputType: return hashDataDict else: try: result = helpers.bulk(es, loadJSON(hashDataDict)) app.logger.debug(f"Result of indexing {thisHash} " f"is {result}") if result[0] == 1: return {thisHash: "SUCCESS"} else: return {thisHash: "FAILURE"} except Exception as e: return {thisHash: f"Unknown error {e}"}
def init_application(): # Check to make sure we have the API key(s) set first tortHost = cnc_utils.get_config_value("TORT_HOST", "localhost") tortPort = cnc_utils.get_config_value("TORT_PORT", 5010) print(f'Starting TORT')
def process_hashes(payload): ''' Requires a JSON formatted payload with the following keys: queryTag - name of search so you can query it later in Kibana hashes - Comma delimited list of hashes to go get info on :return: formatted file to display in AFrame UI HTTP-400 if required API key is missing - this comes from config HTTP-500 on application error ''' init_application() try: postedJSON = payload print(f"Received the following JSON: {postedJSON}") queryTag = postedJSON['query_tag'] hashListString = postedJSON['hashes'] outputType = postedJSON['output_type'] apiKey = postedJSON.get('api_key', '') hashType = 'MD5' #postedJSON['hash_type'] if apiKey == "": apiKey = cnc_utils.get_config_value("AUTOFOCUS_API_KEY", "NOT-SET") if apiKey == "NOT-SET": return ( "There is no API key set in .panrc and there wasn't one entered" ) # hash should have newlines replaced by HTML char for newline '%0A', # let's split on that here to get a list # could also check for ',' and split on that too if necessary if len(hashListString) == 32: # this is a single entry instead of a list! hashList = list() hashList.append(hashListString) elif '%0A' in hashListString: hashList = hashListString.strip('%0A').split('%0A') elif ',' in hashListString: hashList = hashListString.split(',') elif '\\n' in hashListString: # it's something else like hashList = hashListString.split('\\n') elif '\r\n' in hashListString: # it's something else like hashList = hashListString.split('\r\n') else: return 'Could not parse JSON payload ' + str(postedJSON) # now we should have a valid hash list to work with print(f"Hash list is {hashList}") if "text" in outputType: outFile = processHashList(hashList, outputType, queryTag, hashType, apiKey) path, fileName = os.path.split(f"{outFile}") print(f"returning fileName is {fileName}") print(f"path is {path}") with open(outFile, "r") as myFile: return myFile.read() else: hashListResult = processHashList(hashList, outputType, queryTag, hashType, apiKey) print(f"{hashListResult}") #return render_template('kibana_page.html',list=hashListResult) except KeyError as ke: print(ke) return ke except Exception as e: print(e) errorMessage = f"Problem with query to Autofocus: {e}"
def form_valid(self, form): context = self.get_snippet_context() if 'panorama_ip' not in context and 'TARGET_IP' in context: print('Setting panorama ip on context') context['panorama_ip'] = context['TARGET_IP'] print('Compiling init-cfg.txt') ic = snippet_utils.render_snippet_template(self.service, self.app_dir, context, 'init_cfg.txt') print(ic) if ic is not None: icb = bytes(ic, 'utf-8') encoded_init_cfg_string = urlsafe_b64encode(icb) self.save_value_to_workflow('init_cfg_string', encoded_init_cfg_string.decode('utf-8')) payload = self.render_snippet_template() # get the bootstrapper host and port from the .panrc file, environrment, or default name lookup # docker-compose will host bootstrapper under the 'bootstrapper' domain name bootstrapper_host = cnc_utils.get_config_value('BOOTSTRAPPER_HOST', 'bootstrapper') bootstrapper_port = cnc_utils.get_config_value('BOOTSTRAPPER_PORT', '5000') print(f'Using bootstrapper_host: {bootstrapper_host}') print(f'Using bootstrapper_port: {bootstrapper_port}') resp = requests.post(f'http://{bootstrapper_host}:{bootstrapper_port}/generate_bootstrap_package', json=json.loads(payload) ) content_type = '' if 'Content-Type' in resp.headers: content_type = resp.headers['Content-Type'] if 'Content-Disposition' in resp.headers: filename = resp.headers['Content-Disposition'].split('=')[1] else: filename = context.get('hostname') print(resp.headers) if resp.status_code == 200: if 'json' in content_type: return_json = resp.json() if 'response' in return_json: result_text = return_json["response"] else: result_text = resp.text results = dict() results['results'] = str(resp.status_code) results['results'] += '\n' results['results'] += result_text return render(self.request, 'pan_cnc/results.html', context=results) else: response = HttpResponse(content_type=content_type) response['Content-Disposition'] = 'attachment; filename=%s' % filename response.write(resp.content) return response else: results = super().get_context_data() results['results'] = str(resp.status_code) results['results'] += '\n' results['results'] += resp.text return render(self.request, 'pan_cnc/results.html', context=results)