def wait_for_analysis(self): url = self.base_url + 'analysis/check' waited_time = 0 while waited_time < self.wait_timeout: response = requests.post(url, data=self.joe_params) status = response.json()['status'] if status == 'finished': # Figure out which run is the most interesting self.joe_params['run'] = 0 detections = response.json()['detections'].rstrip(';').split( ';') max_score = 0 for i, score in enumerate(detections): if score > max_score: max_score = score self.joe_params['run'] = i break time.sleep(self.wait_step) waited_time += self.wait_step if status != 'finished': raise ModuleExecutionError('could not get report before timeout.')
def submit(self, target, type): url = self.api + "submit" param = { "auth": HTTPBasicAuth(self.apikey, self.secret), "data": { "environmentId": self.environmentId, "hybridanalysis": ("false", "true")[self.hybridanalysis], "nosharevt": ("false", "true")[self.nosharevt], "torenabledanalysis": ("false", "true")[self.torenabledanalysis] }, "headers": self.headers, "verify": False } msg = "unsuccessful file submission" if type == "url": url += "url" param["data"]["analyzeurl"] = target # elif type == "apk": # nothing changes # pass else: # apk, windows param["files"] = {"file": open(target, 'rb')} data = self.post(url, param, msg, json=True) if data: try: self.state = data["sha256"] except KeyError: raise VxStreamAPIDataFormatError(url) self.inf("successful file submission") else: raise ModuleExecutionError(msg + ", exiting")
def query(self, url, param, msg): res = requests.get(url, **param) msg = msg + " - " if res.status_code == HTTP.OK: if res.headers["Content-Type"] == HTTP.json: null = None # to account for potential JSON null values data = res.json() if data["response_code"] == RESPONSE_ERROR: self.warn(msg + data["response"]["error"]) elif data["response_code"] == RESPONSE_OK: return data["response"] else: self.warn(msg + "unexpected JSON response code " + str(data["response_code"])) else: self.warn(msg + "unexpected response content type " + str(res.headers["Content-Type"])) else: msg += "%s (HTTP" + str(res.status_code) + " " + str( res.reason) + ")" if res.status_code == HTTP.TooManyRequests: raise ModuleExecutionError(msg % "API key quota has been reached") else: self.error(msg % "unspecified error") return None
def each(self, target): self.results = {} try: rules = capa.rules.RuleSet(capa.main.get_rules(self.rules, disable_progress=True)) extractor = capa.main.get_extractor(target, "auto", capa.main.BACKEND_VIV, [], False, disable_progress=True) capabilities, counts = capa.main.find_capabilities(rules, extractor, disable_progress=True) except Exception as error: raise ModuleExecutionError(self, 'Could not run capa on target with error: ' + str(error)) meta = capa.main.collect_metadata('', target, self.rules, extractor) meta['analysis'].update(counts) doc = capa.render.result_document.convert_capabilities_to_result_document(meta, rules, capabilities) # extract all MBS behaviors # taken from https://github.com/mandiant/capa/blob/master/scripts/capa_as_library.py if doc: for rule in rutils.capability_rules(doc): if not rule['meta'].get('mbc'): continue for mbc in rule['meta']['mbc']: if mbc['objective'] not in self.results: self.results[mbc['objective']] = [] self.results[mbc['objective']].append(f"{mbc['id']}: {mbc['behavior']}::{mbc.get('method')}") return len(self.results) > 0
def each(self, target): self.add_ioc(target) response = requests.get(target, stream=True) if response.status_code == 200: tmpdir = tempdir() try: filename = parse_header(response.headers['content-disposition'])[1]['filename'] except KeyError: filename = target.split('/')[-1] if not filename: filename = "no_filename" filepath = os.path.join(tmpdir, filename) with open(filepath, 'wb') as fd: for chunk in response.iter_content(1024): fd.write(chunk) self.add_extracted_file(filepath) self.add_ioc(target, 'payload_delivery') return True else: raise ModuleExecutionError("Could not download file. Status: {}".format(response.status_code))
def process_report(self): try: triage_client = Client(self.apikey, root_url=self.api_endpoint) response = triage_client.overview_report(self.task_id) self.extract_info(response) except Exception as error: raise ModuleExecutionError( 'Error encountered while processing report:\n{}'.format(error))
def submit_url(self, target_url, options): url = urljoin(self.web_endpoint, '/api/tasks/create/url/') options['url'] = target_url response = requests.post(url, data=options) try: self.task_id = response.json()['data']['task_ids'][0] except: raise ModuleExecutionError('{0}'.format(response.json()['error_value']))
def _wait_for_completion(self, state): state = iterify(state) def correct_state(): return self._state() in state if with_timeout(correct_state, timedelta(seconds=120), 0.5) is None: raise ModuleExecutionError('Timeout while waiting for machine "{}" to be "{}"'.format(self.vm_label, self.state))
def submit_file(self, filepath, options): url = urljoin(self.web_endpoint, '/api/tasks/create/file/') fp = open(filepath, 'rb') response = requests.post(url, files={'file': fp}, data=options) try: self.task_id = response.json()['data']['task_ids'][0] except: raise ModuleExecutionError('{0}'.format(response.json()['error_value']))
def _init_vm(self): from fame.core.module_dispatcher import dispatcher self._vm = dispatcher.get_virtualization_module(self.virtualization) if self._vm is None: raise ModuleExecutionError('missing (or disabled) virtualization module: {}'.format(self.virtualization)) self._vm.initialize(self.locked_label, self.base_url, self.snapshot) self._vm.prepare()
def heartbeat(self): url = self.api + "state/" + self.state param = { "params": { "apikey": self.apikey, "secret": self.secret, "environmentId": self.environmentId }, "headers": self.headers } msg = "unsuccessful heartbeat check" try: self.timeout = int(self.timeout) if self.timeout < 0: raise ValueError except ValueError: self.warn("invalid timeout (%s) value, " "using default value of 600 seconds" % self.timeout) self.timeout = 600 try: self.graceperiod = int(self.graceperiod) if self.graceperiod < 0: raise ValueError except ValueError: self.warn("invalid grace period (%s) value, " "using default value of 300 seconds" % self.graceperiod) self.graceperiod = 300 self.inf("waiting %s seconds before checking the analysis status" % self.graceperiod) sleep(self.graceperiod) stopwatch = 0 while stopwatch < self.timeout: data = self.query(url, param, msg, json=True) try: if data and data["state"] == "SUCCESS": break except KeyError: raise VxStreamAPIDataFormatError(url) if stopwatch + self.interval <= self.timeout: tmp = self.interval else: tmp = self.timeout - stopwatch self.inf("analysis has not finished yet, waiting " + str(self.timeout - stopwatch) + " more seconds") sleep(tmp) stopwatch += tmp if stopwatch >= self.timeout: raise ModuleExecutionError("report retrieval timed out") self.inf("analysis finished, retrieving report")
def _state(self, to_print=False): output = self._vbox("showvminfo", self.vm_label, "--machinereadable") if to_print: print output for line in output.splitlines(): if line.startswith('VMState="'): return line[9:-1] raise ModuleExecutionError('Could not determine machine state for "{}"'.format(self.vm_label))
def process_report(self): try: API_KEY_STRING = 'Key %s' % self.api_key auth_header = {'Authorization': API_KEY_STRING} r = requests.get(self.api_endpoint + 'results/' + self.task_id, headers=auth_header) response = r.json() self.extract_info(response) except Exception as error: raise ModuleExecutionError( 'Error encountered while processing report:\n{}'.format(error))
def wait_for_analysis(self): taskfound = 'false' # Format time to send Back to Server after = datetime.datetime.utcnow() after = after.strftime("%Y-%m-%d %H:%M:%S") moreData = "moreData" jsonHeaders = {'content-type': 'application/json'} waited_time = 0 analyzeduuids = [] while True: while True: url = urljoin(self.api_endpoint, 'analysis/get_completed.json') if self.useDevTestServer == "true": # MoreData only need for DevServer # Todo remove moreData response = requests.post(url, data=json.dumps({ moreData: 'nothing', "after": after }), headers=jsonHeaders) after = response.json()["data"]["before"] moreData = "noMore" else: response = requests.post(url, data=json.dumps({"after": after}), headers=jsonHeaders) after = response.json()["data"]["before"] # Add found Uuids to a List for uuid in response.json()['data']['tasks']: analyzeduuids.append(uuid) # Are there more to fetch? if response.json()['data']['more_results_available'] != 1: # No more uuids.. break for actualTask in analyzeduuids: if actualTask == self.task_id: print("Break !!!") taskfound = 'true' # Found the UUID from task break if taskfound == 'true': break elif waited_time > self.wait_timeout: # Timeout, we found nothing raise ModuleExecutionError( 'could not get report before timeout.') else: time.sleep(self.wait_step) waited_time += self.wait_step self.log('info', "Found Task-UUID!")
def each(self, target): if self.updateall: self.updateCapaRules() self.log("info", "Begin Processing Sample") self.results = {} generic = [] generic.append(("Rule Path", self.rule_path)) generic.append(("Commit Date", self.getCommitDate())) generic.append(("GitCommit", self.getCommitHash())) generic.append(("Filetype", self.filetype)) request = [] request.append('-r') request.append(self.rule_path) request.append('-f') request.append(self.filetype) request.append(target) try: taste = get_file_taste(target) except: raise ModuleExecutionError(self, "Target Sample Error") try: rules = get_rules(self.rule_path) except: raise ModuleExecutionError(self, "Rule Path Error") rules = RuleSet(rules) extractor = get_extractor(target, self.filetype) meta = collect_metadata(request, target, self.rule_path, self.filetype, extractor) capabilities, counts = find_capabilities(rules, extractor) meta['analysis'].update(counts) self.log("info", "Begin Process Analysis Results") self.results = render.render_fame(meta, rules, capabilities) self.results['generic'] = generic return True
def wait_for_analysis(self): waited_time = 0 while waited_time < self.wait_timeout: try: data = self.joe.submission_info(self.submission_id) status = data["status"] except JoeException as error: raise ModuleExecutionError("Error while waiting for analysis:\n{}".format(error)) if status == 'finished': break time.sleep(self.wait_step) waited_time += self.wait_step if status != 'finished': raise ModuleExecutionError('Could not get report before timeout.') try: submission_info = self.joe.submission_info(self.submission_id) self.webid = submission_info["most_relevant_analysis"]["webid"] analysis_info = self.joe.analysis_info(self.webid) self.analysisid = analysis_info["analysisid"] except JoeException as error: raise ModuleExecutionError("Error while getting analysis details:\n{}".format(error))
def get_unpacked_executables(self): try: data = self.joe.analysis_download(self.webid, "unpackpe") unpackpe = io.BytesIO(data[1]) tmpdir = tempdir() unpacked_files = [] with ZipFile(unpackpe) as zf: for name in zf.namelist(): unpacked_files.append(zf.extract(name, tmpdir, pwd='infected')) self.register_files('unpacked_executable', unpacked_files) except Exception as err: raise ModuleExecutionError('Error encountered while processing unpacked executables:\n{}'.format(err))
def stop(self): self._vbox("controlvm", self.vm_label, "poweroff") self._wait_for_completion("poweroff") # For some reason, the restore fails in some cases if we do not wait # a little, so we are waiting for 'SessionName' to disapear from the # vminfo. def session_ended(): return 'SessionName="' not in self._vbox("showvminfo", self.vm_label, "--machinereadable") if with_timeout(session_ended, timedelta(seconds=30), 0.5) is None: raise ModuleExecutionError('Timeout while waiting for machine "{}" to poweroff properly.'.format(self.vm_label))
def _make_request(self, method, path, **kwargs): try: url = self._url(path) if method == "GET": response = requests.get(url, **kwargs) else: response = requests.post(url, **kwargs) response.raise_for_status() return response except Exception, e: raise ModuleExecutionError("Error communicating with agent ({}): {}".format(path, e))
def restore_snapshot(self): snapshot = None if self.snapshot is None: if self.vm.hasCurrentSnapshot(): snapshot = self.vm.snapshotCurrent() else: raise ModuleExecutionError( 'Machine "{}" does not have a current snapshot. Please specify a snapshot name in the configuration.' .format(self.vm_label)) else: snapshot = self.vm.snapshotLookupByName(self.snapshot) self.vm.revertToSnapshot(snapshot)
def authenticate(self): url = urljoin(self.api_endpoint, 'papi/login.json') print("sende Requst mit user:"******" und password:"******"username": self.username, "password": self.password }), headers={'content-type': 'application/json'}) print("ResponseSucces is:", response.json()['success']) if (response.json()['success'] != "1"): print("can't authenticate on lastline!") raise ModuleExecutionError('Could not Login in Lastlline')
def wait_for_analysis(self): waited_time = 0 while waited_time < self.wait_timeout: API_KEY_STRING = 'Key %s' % self.api_key auth_header = {'Authorization': API_KEY_STRING} r = requests.get(self.api_endpoint + 'status/' + self.task_id, headers=auth_header) response = r.json() status = response['status'] if status == 'complete': break time.sleep(self.wait_step) waited_time += self.wait_step if status != 'complete': raise ModuleExecutionError('could not get report before timeout.')
def wait_for_analysis(self): triage_client = Client(self.apikey, root_url=self.api_endpoint) waited_time = 0 while waited_time < self.wait_timeout: response = triage_client.sample_by_id(self.task_id) status = response['status'] if status == 'reported': break time.sleep(self.wait_step) waited_time += self.wait_step if status != 'reported': raise ModuleExecutionError('could not get report before timeout.')
def each_with_type(self, target, file_type): # Set root URLs self.results = dict() if file_type == 'url': if not "http://" in target and not "https://" in target: target = "http://" + target try: params = {'api': self.api_key, 'domain': target} response = requests.post(url=self.notmining_url, data=params) if response.status_code == 200: report = response.text if "Positive" in report: self.results['verdict'] = "Suspicious" elif "Negative" in report: self.results['verdict'] = "Clean" else: raise ModuleExecutionError(self, "Report error") else: raise ModuleExecutionError(self, "Network error") return True except Exception: return False
def each_with_type(self, target, type): self.headers = { "User-agent": "FAME (https://github.com/certsocietegenerale/fame) " "VxStream Sandbox Processing Module" } self.results = {} self.state = "module" url = self.url + "system/state" param = { "params": { "apikey": self.apikey, "secret": self.secret }, "headers": self.headers } msg = "unsuccessful system state query" data = self.query(url, param, msg, json=True) if data: try: data = data["backend"]["nodes"][0]["environment"] except (KeyError, IndexError): raise VxStreamAPIDataFormatError(url) msg = "invalid or unavailable analysis environment(s)" if type == "apk": env = Environment.apk elif type == "application/x-executable": env = Environment.nix else: # url, windows env = Environment.win tmp = [i.get("ID") for i in data if i.get("architecture") == env] if not self.environmentId in tmp or not tmp: raise ModuleExecutionError(msg) else: self.warn("using configured analysis environment") # submit file or url for analysis self.submit(target, type) # wait for the analysis to be over self.heartbeat() # retrieve the report and populate results self.report() return True
def process_report(self): try: data = self.joe.analysis_download(self.webid, type="lightjson") report = io.BytesIO(data[1]) self.extract_iocs(report) data = self.joe.analysis_download(self.webid, type="html") report = io.BytesIO(data[1]) self.extract_graph(report) tmpdir = tempdir() filepath = os.path.join(tmpdir, 'joe_report.html') with open(filepath, 'w+b') as fd: fd.write(data[1]) self.add_support_file('Report', filepath) except Exception as error: raise ModuleExecutionError( 'Error encountered while processing report:\n{}'.format(error))
def wait_for_analysis(self): url = urljoin(self.api_endpoint, '/tasks/view/{0}'.format(self.task_id)) waited_time = 0 while waited_time < self.wait_timeout: response = requests.get(url) status = response.json()['task']['status'] if status == 'reported': break time.sleep(self.wait_step) waited_time += self.wait_step if status != 'reported': raise ModuleExecutionError('could not get report before timeout.')
def restore(self, should_raise=True): if self.is_running(): self.stop() self.restore_snapshot() if not self.is_running(): self.start() started_at = datetime.now() while (started_at + timedelta(seconds=self.TIMEOUT) > datetime.now()): if self.is_ready(): break sleep(5) else: if should_raise: raise ModuleExecutionError("could not restore virtual machine '{}' before timeout.".format(self.vm_label))
def preload(self, target): if not self.api_key: self.log("warning", "VirusTotal API key not set.") return params = {"apikey": self.api_key, "hash": target} response = requests.get( "https://www.virustotal.com/vtapi/v2/file/download", params=params, stream=True) if response.status_code == 400: self.log("warning", "API key not valid or file not found.") elif response.status_code == 200: self.add_preloaded_file(fd=BytesIO(response.raw.read())) else: raise ModuleExecutionError( "Could not download file. Status: {}".format( response.status_code))
def submit_file(self, target, file_type): url = self.base_url + 'analysis' if self.allow_internet_access: inet = "1" else: inet = "0" params = { 'apikey': (None, self.apikey), 'tandc': (None, "1"), 'type': (None, "file"), 'auto': (None, "1"), 'inet': (None, inet), 'ssl': (None, inet), 'scae': (None, "1"), 'vbainstr': (None, "1"), 'comments': (None, 'Submitted via FAME'), } if file_type == 'url': params['type'] = (None, "url") params['url'] = (None, target) else: if file_type == 'apk': del params['auto'] params['android1'] = (None, "1") params['sample'] = (os.path.basename(target), open(target, 'rb'), mimetypes.guess_type(target)[0] or 'application/octet-stream') r = requests.post(url, files=params) if r.status_code != 200: raise ModuleExecutionError('could not submit: {0}'.format(r.text)) results = r.json() self.joe_params['webid'] = results['webid']