def RekallAction(self, request): self.rekall_request = request # Pretend Rekall returned the memory file. return [ rdf_rekall_types.RekallResponse(json_messages=""" [["file",{"path": "%s", "pathtype": "TMPFILE"}]] """ % self.memory_file, plugin="aff4acquire"), rdf_client.Iterator(state="FINISHED") ]
def RekallAction(self, _): # Generate this file with: # rekal --output data -f win7_trial_64bit.raw \ # pslist | gzip - > rekall_pslist_result.dat.gz ps_list_file = os.path.join(config.CONFIG["Test.data_dir"], self.result_filename) result = rdf_rekall_types.RekallResponse( json_messages=gzip.open(ps_list_file).read(), plugin="pslist", client_urn=self.client_id) return [result, rdf_client.Iterator(state="FINISHED")]
def testOverwriting(self): req = rdf_client.Iterator(client_state=rdf_protodict.Dict({"A": 1})) # There should be one element now. self.assertEqual(len(list(req.client_state.items())), 1) req.client_state = rdf_protodict.Dict({"B": 2}) # Still one element. self.assertEqual(len(list(req.client_state.items())), 1) req.client_state = rdf_protodict.Dict({}) # And now it's gone. self.assertEqual(len(list(req.client_state.items())), 0)
def RekallAction(self, _): ps_list_file = os.path.join(config.CONFIG["Test.data_dir"], "rekall_vad_result.dat.gz") response = rdf_rekall_types.RekallResponse( json_messages=gzip.open(ps_list_file, "rb").read(), plugin="pslist") # If we are given process names here we need to craft a Rekall result # containing them. This is so they point to valid files in the fixture. if self.process_list: json_data = json.loads(response.json_messages) template = json_data[7] if template[1]["filename"] != ur"\Windows\System32\ntdll.dll": raise RuntimeError("Test data invalid.") json_data = [] for process in self.process_list: new_entry = copy.deepcopy(template) new_entry[1]["filename"] = process json_data.append(new_entry) response.json_messages = json.dumps(json_data) return [response, rdf_client.Iterator(state="FINISHED")]
def __init__(self, request=None, responses=None): self.status = None # A GrrStatus rdfvalue object. self.success = True self.request = request if request: self.request_data = rdf_protodict.Dict(request.data) self._responses = [] self._dropped_responses = [] if responses: # This may not be needed if we can assume that responses are # returned in lexical order from the data_store. responses.sort(key=operator.attrgetter("response_id")) # The iterator that was returned as part of these responses. This should # be passed back to actions that expect an iterator. self.iterator = None # Filter the responses by authorized states for msg in responses: # Check if the message is authenticated correctly. if msg.auth_state != msg.AuthorizationState.AUTHENTICATED: logging.warning( "%s: Messages must be authenticated (Auth state %s)", msg.session_id, msg.auth_state) self._dropped_responses.append(msg) # Skip this message - it is invalid continue # Check for iterators if msg.type == msg.Type.ITERATOR: self.iterator = rdf_client.Iterator(msg.payload) continue # Look for a status message if msg.type == msg.Type.STATUS: # Our status is set to the first status message that we see in # the responses. We ignore all other messages after that. self.status = rdf_flows.GrrStatus(msg.payload) # Check this to see if the call succeeded self.success = self.status.status == self.status.ReturnedStatus.OK # Ignore all other messages break # Use this message self._responses.append(msg) if self.status is None: # This is a special case of de-synchronized messages. if self._dropped_responses: logging.error( "De-synchronized messages detected:\n %s", "\n".join([ utils.SmartUnicode(x) for x in self._dropped_responses ])) if responses: self._LogFlowState(responses) raise FlowError("No valid Status message.") # This is the raw message accessible while going through the iterator self.message = None